text
stringlengths 1
1.05M
|
|---|
import gql from "graphql-tag";
export default gql(`
mutation(
$id: ID! $customerId: String! $partnerId: String $cost: Int! $when: String! $discount: Int! $services: String!
) {
createOrder(input:
{
id: $id
customerId: $customerId
partnerId: $partnerId
cost: $cost
when: $when
discount: $discount
services: $services
}
) {
id
customerId
partnerId
cost
when
discount
services
}
}`);
|
<reponame>abin1525/rose-edg<gh_stars>1-10
#include "ai_tool_runtime.h"
int * _loop_counters;
int _loop_count;
char* _ofilename;
static void allocAndCopyStr(char** dest, const char* src)
{
*dest = (char*) malloc((strlen(src)+1)*sizeof(char));
strcpy (*dest, src);
assert (strlen(src) == strlen (*dest));
}
void ai_runtime_init(char* outputfile_name, int loop_count)
{
// allocate and initialize counters
assert (loop_count>0);
_loop_count = loop_count;
int _i;
_loop_counters = (int*) malloc (loop_count *sizeof(int));
for ( _i =0; _i< loop_count ; _i++)
{
_loop_counters[_i] = 0;
}
// copy the file name
allocAndCopyStr (&_ofilename, outputfile_name);
}
void ai_runtime_terminate()
{
// trans
// write results into a file
FILE* o_file;
o_file = fopen (_ofilename, "a+");
if(o_file != NULL)
{
int ii;
for (ii=0; ii< _loop_count; ii++)
{
fprintf(o_file, "loop %d : iteration count: %d\n", ii, _loop_counters[ii]);
}
}
else
assert (0);
fclose (o_file);
// terminate the monitoring
free (_loop_counters);
}
|
import 'babel-polyfill'
import configureMockStore from 'redux-mock-store'
import thunk from 'redux-thunk'
import { push } from 'react-router-redux'
import { navigateTo } from 'actions/navigation'
const mockStore = configureMockStore([thunk])
describe('action navigate', () => {
let store
beforeEach(() => {
store = mockStore({})
})
it('dispatches a push event from react-router-redux', () => {
store.dispatch(navigateTo('/foo'))
const actions = store.getActions()
expect(actions[0]).toEqual(push('/foo'))
})
})
|
#!/bin/bash
# |
# watch kubectl get gitrepository -A |
# |
# --------------------------------------+
# |
# watch kubectl get kustomizastion -A | watch kubectl get pods -A
# |
# --------------------------------------+
# |
# kubectl apply -f podinfo.yaml |
# |
# Create app
kubectl apply -f podinfo.yaml
kubectl get gitrepository podinfo
kubectl get kustomization podinfo-dev
# Wait for deployment
watch kubectl get pods -A
# Check backend in browser
kubectl -n dev port-forward service/backend 9898:9898
# http://localhost:9898
# Check frontend in browser
kubectl -n dev port-forward service/frontend 8080:80
# http://localhost:8080
|
<filename>kernel/modules/gpu/mali450/kernel_mode/driver/src/devicedrv/mali/platform/arm/arm.c
/*
* Copyright (C) 2010, 2012-2015 ARM Limited. All rights reserved.
*
* This program is free software and is provided to you under the terms of the GNU General Public License version 2
* as published by the Free Software Foundation, and any use by you of this program is subject to the terms of such GNU licence.
*
* A copy of the licence is included with the program, and can also be obtained from Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
/**
* @file mali_platform.c
* Platform specific Mali driver functions for:
* - Realview Versatile platforms with ARM11 Mpcore and virtex 5.
* - Versatile Express platforms with ARM Cortex-A9 and virtex 6.
*/
#include <linux/platform_device.h>
#include <linux/version.h>
#include <linux/pm.h>
#ifdef CONFIG_PM_RUNTIME
#include <linux/pm_runtime.h>
#endif
#include <asm/io.h>
#include <linux/mali/mali_utgard.h>
#include "mali_kernel_common.h"
#include <linux/dma-mapping.h>
#include <linux/moduleparam.h>
#include "arm_core_scaling.h"
#include "mali_executor.h"
static int mali_core_scaling_enable = 0;
void mali_gpu_utilization_callback(struct mali_gpu_utilization_data *data);
static u32 mali_read_phys(u32 phys_addr);
#if defined(CONFIG_ARCH_REALVIEW)
static void mali_write_phys(u32 phys_addr, u32 value);
#endif
#ifndef CONFIG_MALI_DT
static void mali_platform_device_release(struct device *device);
#if defined(CONFIG_ARCH_VEXPRESS)
#if defined(CONFIG_ARM64)
/* Juno + Mali-450 MP6 in V7 FPGA */
static struct resource mali_gpu_resources_m450_mp6[] = {
MALI_GPU_RESOURCES_MALI450_MP6_PMU(0x6F040000, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200)
};
#else
static struct resource mali_gpu_resources_m450_mp8[] = {
MALI_GPU_RESOURCES_MALI450_MP8_PMU(0xFC040000, -1, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 68)
};
static struct resource mali_gpu_resources_m450_mp6[] = {
MALI_GPU_RESOURCES_MALI450_MP6_PMU(0xFC040000, -1, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 68)
};
static struct resource mali_gpu_resources_m450_mp4[] = {
MALI_GPU_RESOURCES_MALI450_MP4_PMU(0xFC040000, -1, 70, 70, 70, 70, 70, 70, 70, 70, 70, 68)
};
#endif /* CONFIG_ARM64 */
#elif defined(CONFIG_ARCH_REALVIEW)
static struct resource mali_gpu_resources_m300[] = {
MALI_GPU_RESOURCES_MALI300_PMU(0xC0000000, -1, -1, -1, -1)
};
static struct resource mali_gpu_resources_m400_mp1[] = {
MALI_GPU_RESOURCES_MALI400_MP1_PMU(0xC0000000, -1, -1, -1, -1)
};
static struct resource mali_gpu_resources_m400_mp2[] = {
MALI_GPU_RESOURCES_MALI400_MP2_PMU(0xC0000000, -1, -1, -1, -1, -1, -1)
};
#endif
#endif
static struct mali_gpu_device_data mali_gpu_data = {
#ifndef CONFIG_MALI_DT
.pmu_switch_delay = 0xFF, /* do not have to be this high on FPGA, but it is good for testing to have a delay */
.max_job_runtime = 60000, /* 60 seconds */
#if defined(CONFIG_ARCH_VEXPRESS)
.shared_mem_size = 256 * 1024 * 1024, /* 256MB */
#endif
#endif
#if defined(CONFIG_ARCH_REALVIEW)
.dedicated_mem_start = 0x80000000, /* Physical start address (use 0xD0000000 for old indirect setup) */
.dedicated_mem_size = 0x10000000, /* 256MB */
#endif
#if defined(CONFIG_ARM64)
.fb_start = 0x5f000000,
.fb_size = 0x91000000,
#else
.fb_start = 0xe0000000,
.fb_size = 0x01000000,
#endif
.control_interval = 1000, /* 1000ms */
.utilization_callback = mali_gpu_utilization_callback,
.get_clock_info = NULL,
.get_freq = NULL,
.set_freq = NULL,
};
#ifndef CONFIG_MALI_DT
static struct platform_device mali_gpu_device = {
.name = MALI_GPU_NAME_UTGARD,
.id = 0,
.dev.release = mali_platform_device_release,
.dev.dma_mask = &mali_gpu_device.dev.coherent_dma_mask,
.dev.coherent_dma_mask = DMA_BIT_MASK(32),
.dev.platform_data = &mali_gpu_data,
#if defined(CONFIG_ARM64)
.dev.archdata.dma_ops = &noncoherent_swiotlb_dma_ops,
#endif
};
int mali_platform_device_register(void)
{
int err = -1;
int num_pp_cores = 0;
#if defined(CONFIG_ARCH_REALVIEW)
u32 m400_gp_version;
#endif
MALI_DEBUG_PRINT(4, ("mali_platform_device_register() called\n"));
/* Detect present Mali GPU and connect the correct resources to the device */
#if defined(CONFIG_ARCH_VEXPRESS)
#if defined(CONFIG_ARM64)
if (mali_read_phys(0x6F000000) == 0x40601450) {
MALI_DEBUG_PRINT(4, ("Registering Mali-450 MP6 device\n"));
num_pp_cores = 6;
mali_gpu_device.num_resources = ARRAY_SIZE(mali_gpu_resources_m450_mp6);
mali_gpu_device.resource = mali_gpu_resources_m450_mp6;
}
#else
if (mali_read_phys(0xFC000000) == 0x00000450) {
MALI_DEBUG_PRINT(4, ("Registering Mali-450 MP8 device\n"));
num_pp_cores = 8;
mali_gpu_device.num_resources = ARRAY_SIZE(mali_gpu_resources_m450_mp8);
mali_gpu_device.resource = mali_gpu_resources_m450_mp8;
} else if (mali_read_phys(0xFC000000) == 0x40600450) {
MALI_DEBUG_PRINT(4, ("Registering Mali-450 MP6 device\n"));
num_pp_cores = 6;
mali_gpu_device.num_resources = ARRAY_SIZE(mali_gpu_resources_m450_mp6);
mali_gpu_device.resource = mali_gpu_resources_m450_mp6;
} else if (mali_read_phys(0xFC000000) == 0x40400450) {
MALI_DEBUG_PRINT(4, ("Registering Mali-450 MP4 device\n"));
num_pp_cores = 4;
mali_gpu_device.num_resources = ARRAY_SIZE(mali_gpu_resources_m450_mp4);
mali_gpu_device.resource = mali_gpu_resources_m450_mp4;
}
#endif /* CONFIG_ARM64 */
#elif defined(CONFIG_ARCH_REALVIEW)
m400_gp_version = mali_read_phys(0xC000006C);
if ((m400_gp_version & 0xFFFF0000) == 0x0C070000) {
MALI_DEBUG_PRINT(4, ("Registering Mali-300 device\n"));
num_pp_cores = 1;
mali_gpu_device.num_resources = ARRAY_SIZE(mali_gpu_resources_m300);
mali_gpu_device.resource = mali_gpu_resources_m300;
mali_write_phys(0xC0010020, 0xA); /* Enable direct memory mapping for FPGA */
} else if ((m400_gp_version & 0xFFFF0000) == 0x0B070000) {
u32 fpga_fw_version = mali_read_phys(0xC0010000);
if (fpga_fw_version == 0x130C008F || fpga_fw_version == 0x110C008F) {
/* Mali-400 MP1 r1p0 or r1p1 */
MALI_DEBUG_PRINT(4, ("Registering Mali-400 MP1 device\n"));
num_pp_cores = 1;
mali_gpu_device.num_resources = ARRAY_SIZE(mali_gpu_resources_m400_mp1);
mali_gpu_device.resource = mali_gpu_resources_m400_mp1;
mali_write_phys(0xC0010020, 0xA); /* Enable direct memory mapping for FPGA */
} else if (fpga_fw_version == 0x130C000F) {
/* Mali-400 MP2 r1p1 */
MALI_DEBUG_PRINT(4, ("Registering Mali-400 MP2 device\n"));
num_pp_cores = 2;
mali_gpu_device.num_resources = ARRAY_SIZE(mali_gpu_resources_m400_mp2);
mali_gpu_device.resource = mali_gpu_resources_m400_mp2;
mali_write_phys(0xC0010020, 0xA); /* Enable direct memory mapping for FPGA */
}
}
#endif
/* Register the platform device */
err = platform_device_register(&mali_gpu_device);
if (0 == err) {
#ifdef CONFIG_PM_RUNTIME
#if (LINUX_VERSION_CODE >= KERNEL_VERSION(2, 6, 37))
pm_runtime_set_autosuspend_delay(&(mali_gpu_device.dev), 1000);
pm_runtime_use_autosuspend(&(mali_gpu_device.dev));
#endif
pm_runtime_enable(&(mali_gpu_device.dev));
#endif
MALI_DEBUG_ASSERT(0 < num_pp_cores);
mali_core_scaling_init(num_pp_cores);
return 0;
}
return err;
}
void mali_platform_device_unregister(void)
{
MALI_DEBUG_PRINT(4, ("mali_platform_device_unregister() called\n"));
mali_core_scaling_term();
platform_device_unregister(&mali_gpu_device);
platform_device_put(&mali_gpu_device);
#if defined(CONFIG_ARCH_REALVIEW)
mali_write_phys(0xC0010020, 0x9); /* Restore default (legacy) memory mapping */
#endif
}
static void mali_platform_device_release(struct device *device)
{
MALI_DEBUG_PRINT(4, ("mali_platform_device_release() called\n"));
}
#else /* CONFIG_MALI_DT */
int mali_platform_device_init(struct platform_device *device)
{
int num_pp_cores;
int err = -1;
#if defined(CONFIG_ARCH_REALVIEW)
u32 m400_gp_version;
#endif
/* Detect present Mali GPU and connect the correct resources to the device */
#if defined(CONFIG_ARCH_VEXPRESS)
#if defined(CONFIG_ARM64)
if (mali_read_phys(0x6F000000) == 0x40601450) {
MALI_DEBUG_PRINT(4, ("Registering Mali-450 MP6 device\n"));
num_pp_cores = 6;
}
#else
if (mali_read_phys(0xFC000000) == 0x00000450) {
MALI_DEBUG_PRINT(4, ("Registering Mali-450 MP8 device\n"));
num_pp_cores = 8;
} else if (mali_read_phys(0xFC000000) == 0x40400450) {
MALI_DEBUG_PRINT(4, ("Registering Mali-450 MP4 device\n"));
num_pp_cores = 4;
}
#endif
#elif defined(CONFIG_ARCH_REALVIEW)
m400_gp_version = mali_read_phys(0xC000006C);
if ((m400_gp_version & 0xFFFF0000) == 0x0C070000) {
MALI_DEBUG_PRINT(4, ("Registering Mali-300 device\n"));
num_pp_cores = 1;
mali_write_phys(0xC0010020, 0xA); /* Enable direct memory mapping for FPGA */
} else if ((m400_gp_version & 0xFFFF0000) == 0x0B070000) {
u32 fpga_fw_version = mali_read_phys(0xC0010000);
if (fpga_fw_version == 0x130C008F || fpga_fw_version == 0x110C008F) {
/* Mali-400 MP1 r1p0 or r1p1 */
MALI_DEBUG_PRINT(4, ("Registering Mali-400 MP1 device\n"));
num_pp_cores = 1;
mali_write_phys(0xC0010020, 0xA); /* Enable direct memory mapping for FPGA */
} else if (fpga_fw_version == 0x130C000F) {
/* Mali-400 MP2 r1p1 */
MALI_DEBUG_PRINT(4, ("Registering Mali-400 MP2 device\n"));
num_pp_cores = 2;
mali_write_phys(0xC0010020, 0xA); /* Enable direct memory mapping for FPGA */
}
}
#endif
err = platform_device_add_data(device, &mali_gpu_data, sizeof(mali_gpu_data));
if (0 == err) {
#ifdef CONFIG_PM_RUNTIME
#if (LINUX_VERSION_CODE >= KERNEL_VERSION(2, 6, 37))
pm_runtime_set_autosuspend_delay(&(device->dev), 1000);
pm_runtime_use_autosuspend(&(device->dev));
#endif
pm_runtime_enable(&(device->dev));
#endif
MALI_DEBUG_ASSERT(0 < num_pp_cores);
mali_core_scaling_init(num_pp_cores);
}
return err;
}
int mali_platform_device_deinit(struct platform_device *device)
{
MALI_IGNORE(device);
MALI_DEBUG_PRINT(4, ("mali_platform_device_deinit() called\n"));
mali_core_scaling_term();
#if defined(CONFIG_ARCH_REALVIEW)
mali_write_phys(0xC0010020, 0x9); /* Restore default (legacy) memory mapping */
#endif
return 0;
}
#endif /* CONFIG_MALI_DT */
static u32 mali_read_phys(u32 phys_addr)
{
u32 phys_addr_page = phys_addr & 0xFFFFE000;
u32 phys_offset = phys_addr & 0x00001FFF;
u32 map_size = phys_offset + sizeof(u32);
u32 ret = 0xDEADBEEF;
void *mem_mapped = ioremap_nocache(phys_addr_page, map_size);
if (NULL != mem_mapped) {
ret = (u32)ioread32(((u8 *)mem_mapped) + phys_offset);
iounmap(mem_mapped);
}
return ret;
}
#if defined(CONFIG_ARCH_REALVIEW)
static void mali_write_phys(u32 phys_addr, u32 value)
{
u32 phys_addr_page = phys_addr & 0xFFFFE000;
u32 phys_offset = phys_addr & 0x00001FFF;
u32 map_size = phys_offset + sizeof(u32);
void *mem_mapped = ioremap_nocache(phys_addr_page, map_size);
if (NULL != mem_mapped) {
iowrite32(value, ((u8 *)mem_mapped) + phys_offset);
iounmap(mem_mapped);
}
}
#endif
static int param_set_core_scaling(const char *val, const struct kernel_param *kp)
{
int ret = param_set_int(val, kp);
if (1 == mali_core_scaling_enable) {
mali_core_scaling_sync(mali_executor_get_num_cores_enabled());
}
return ret;
}
static struct kernel_param_ops param_ops_core_scaling = {
.set = param_set_core_scaling,
.get = param_get_int,
};
module_param_cb(mali_core_scaling_enable, ¶m_ops_core_scaling, &mali_core_scaling_enable, 0644);
MODULE_PARM_DESC(mali_core_scaling_enable, "1 means to enable core scaling policy, 0 means to disable core scaling policy");
void mali_gpu_utilization_callback(struct mali_gpu_utilization_data *data)
{
if (1 == mali_core_scaling_enable) {
mali_core_scaling_update(data);
}
}
|
#!/bin/bash
set -euo pipefail
function contains() {
local value=$1
shift
local array="$@"
echo "${array[*]}"
for element in ${array[@]}; do
if [ "$element" = "$value" ]; then
return 0
fi
done
return 1
}
function init_manifest_file() {
mkdir -p /tmp/containerd/$version
local file=/tmp/containerd/$version/Manifest
cat <<EOT >> $file
yum libzstd
asset runc https://github.com/opencontainers/runc/releases/download/v1.0.0-rc95/runc.amd64
EOT
}
function add_supported_os_to_manifest_file() {
local version=$1
local os=$2
local dockerfile=$3
local file=/tmp/containerd/$version/Manifest
cat <<EOT >> $file
dockerout $os addons/containerd/template/$dockerfile $version
EOT
}
function init_preflight_file() {
local version=$1
mkdir -p /tmp/containerd/$version
local file=/tmp/containerd/$version/host-preflight.yaml
cat <<EOT > $file
apiVersion: troubleshoot.sh/v1beta2
kind: HostPreflight
metadata:
name: kurl-builtin
spec:
collectors:
- hostOS: {}
analyzers:
- hostOS:
outcomes:
EOT
}
function add_unsupported_os_to_preflight_file() {
local version=$1
local os_distro=$2
local os_version=$3
local file=/tmp/containerd/$version/host-preflight.yaml
cat <<EOT >> $file
- fail:
when: "$os_distro = $os_version"
message: "containerd addon does not support $os_distro $os_version"
EOT
}
function add_supported_os_to_preflight_file() {
local version=$1
local os_distro=$2
local os_version=$3
local file=/tmp/containerd/$version/host-preflight.yaml
cat <<EOT >> $file
- pass:
when: "$os_distro = $os_version"
message: "containerd addon supports $os_distro $os_version"
EOT
}
function copy_generated_files() {
local version=$1
local src=/tmp/containerd/$version/host-preflight.yaml
local dst=../$version/host-preflight.yaml
if [ -f $src ]; then
mv -f $src $dst
fi
local src=/tmp/containerd/$version/Manifest
local dst=../$version/Manifest
if [ -f $src ]; then
mv -f $src $dst
fi
}
VERSIONS=()
function find_common_versions() {
docker build -t centos7 -f Dockerfile.centos7 .
docker build -t centos8 -f Dockerfile.centos8 .
docker build -t ubuntu16 -f Dockerfile.ubuntu16 .
docker build -t ubuntu18 -f Dockerfile.ubuntu18 .
docker build -t ubuntu20 -f Dockerfile.ubuntu20 .
CENTOS7_VERSIONS=($(docker run --rm -i centos7 yum list --showduplicates containerd.io | grep -Eo '1\.[[:digit:]]+\.[[:digit:]]+' | grep -vE '1\.[012]\.' | sort -rV | uniq))
echo "Found ${#CENTOS7_VERSIONS[*]} containerd versions for CentOS 7: ${CENTOS7_VERSIONS[*]}"
CENTOS8_VERSIONS=($(docker run --rm -i centos8 yum list --showduplicates containerd.io | grep -Eo '1\.[[:digit:]]+\.[[:digit:]]+' | grep -vE '1\.[012]\.' | sort -rV | uniq))
echo "Found ${#CENTOS8_VERSIONS[*]} containerd versions for CentOS 8: ${CENTOS8_VERSIONS[*]}"
UBUNTU16_VERSIONS=($(docker run --rm -i ubuntu16 apt-cache madison containerd.io | grep -Eo '1\.[[:digit:]]+\.[[:digit:]]+' | grep -vE '1\.[012]\.' | sort -rV | uniq))
echo "Found ${#UBUNTU16_VERSIONS[*]} containerd versions for Ubuntu 16: ${UBUNTU16_VERSIONS[*]}"
UBUNTU18_VERSIONS=($(docker run --rm -i ubuntu18 apt-cache madison containerd.io | grep -Eo '1\.[[:digit:]]+\.[[:digit:]]+' | grep -vE '1\.[012]\.' | sort -rV | uniq))
echo "Found ${#UBUNTU18_VERSIONS[*]} containerd versions for Ubuntu 18: ${UBUNTU18_VERSIONS[*]}"
UBUNTU20_VERSIONS=($(docker run --rm -i ubuntu20 apt-cache madison containerd.io | grep -Eo '1\.[[:digit:]]+\.[[:digit:]]+' | grep -vE '1\.[012]\.' | sort -rV | uniq))
echo "Found ${#UBUNTU20_VERSIONS[*]} containerd versions for Ubuntu 20: ${UBUNTU20_VERSIONS[*]}"
# Get the intersection of versions available for all operating systems
local ALL_VERSIONS=("${CENTOS7_VERSIONS[@]}" "${CENTOS8_VERSIONS[@]}" "${UBUNTU16_VERSIONS[@]}" "${UBUNTU18_VERSIONS[@]}" "${UBUNTU20_VERSIONS[@]}")
ALL_VERSIONS=($(echo "${ALL_VERSIONS[@]}" | tr ' ' '\n' | sort -rV | uniq -d | tr '\n' ' ')) # remove duplicates
for version in ${ALL_VERSIONS[@]}; do
init_preflight_file $version
init_manifest_file $version
if ! contains "$version" ${CENTOS7_VERSIONS[*]}; then
echo "CentOS 7 lacks version $version"
add_unsupported_os_to_preflight_file $version "centos" "7"
else
add_supported_os_to_preflight_file $version "centos" "7"
add_supported_os_to_manifest_file $version "rhel-7" "Dockerfile.centos7"
add_supported_os_to_manifest_file $version "rhel-7-force" "Dockerfile.centos7-force"
fi
if ! contains "$version" ${CENTOS8_VERSIONS[*]}; then
echo "CentOS 8 lacks version $version"
add_unsupported_os_to_preflight_file $version "centos" "8"
else
add_supported_os_to_preflight_file $version "centos" "8"
add_supported_os_to_manifest_file $version "rhel-8" "Dockerfile.centos8"
fi
if ! contains "$version" ${UBUNTU16_VERSIONS[*]}; then
echo "Ubuntu 16 lacks version $version"
add_unsupported_os_to_preflight_file $version "ubuntu" "16.04"
else
add_supported_os_to_preflight_file $version "ubuntu" "16.04"
add_supported_os_to_manifest_file $version "ubuntu-16.04" "Dockerfile.ubuntu16"
fi
if ! contains "$version" ${UBUNTU18_VERSIONS[*]}; then
echo "Ubuntu 18 lacks version $version"
add_unsupported_os_to_preflight_file $version "ubuntu" "18.04"
else
add_supported_os_to_preflight_file $version "ubuntu" "18.04"
add_supported_os_to_manifest_file $version "ubuntu-18.04" "Dockerfile.ubuntu18"
fi
if ! contains "$version" ${UBUNTU20_VERSIONS[*]}; then
echo "Ubuntu 20 lacks version $version"
add_unsupported_os_to_preflight_file $version "ubuntu" "20.04"
else
add_supported_os_to_preflight_file $version "ubuntu" "20.04"
add_supported_os_to_manifest_file $version "ubuntu-20.04" "Dockerfile.ubuntu20"
fi
VERSIONS+=("$version")
done
local DEFAULT_VERSION="1.4.6"
VERSIONS=("$DEFAULT_VERSION" "${VERSIONS[@]/$DEFAULT_VERSION}")
echo "Found ${#VERSIONS[*]} containerd versions >=1.3 available for all operating systems: ${VERSIONS[*]}"
}
function generate_version() {
mkdir -p "../$version"
cp -r ./base/* "../$version"
sed -i "s/__version__/$version/g" "../$version/install.sh"
copy_generated_files $version
# Containerd overrides the pod sandbox image with pause:3.1 for 1.3.x and pause:3.2 for 1.4+.
# The Kubernetes airgap package only includes the default pause image specified by kubeadm for the
# version, so the correct pause image used by containerd must be included in its bundle.
if echo "$version" | grep -qE "1\.3\."; then
echo "image pause k8s.gcr.io/pause:3.1" >> "../$version/Manifest"
else
echo "image pause k8s.gcr.io/pause:3.2" >> "../$version/Manifest"
fi
}
function update_available_versions() {
local v=""
for version in ${VERSIONS[@]}; do
v="${v}\"${version}\", "
done
sed -i "/cron-containerd-update/c\ containerd: [${v}\"1.2.13\"], \/\/ cron-containerd-update" ../../../web/src/installers/versions.js
}
function main() {
find_common_versions
for version in ${VERSIONS[*]}; do
generate_version "$version"
done
echo "::set-output name=containerd_version::$VERSIONS"
update_available_versions
}
main
|
/*\
title: $:/plugins/sq/streams/streams-edit
type: application/javascript
module-type: widget-subclass
\*/
exports.baseClass = "edit";
exports.name = "streams-edit";
exports.constructor = function(parseTreeNode,options) {
this.initialise(parseTreeNode,options);
}
exports.prototype = {};
exports.prototype.getEditorType = function() {
var tiddler = this.wiki.getTiddler(this.editTitle);
var type = tiddler.fields.type || "text/vnd.tiddlywiki";
var editorType;
if(type === "text/vnd.tiddlywiki") {
editorType = this.wiki.getTiddlerText("$:/config/sq/streams/editor-engine");
if((!$tw.wiki.getTiddler("$:/plugins/tiddlywiki/codemirror") || $tw.wiki.getTiddlerText("$:/config/Plugins/Disabled/$:/plugins/tiddlywiki/codemirror","no") === "yes" || !$tw.modules.titles["$:/plugins/tiddlywiki/codemirror/edit-codemirror.js"]) && (editorType === "codemirror") ) {
editorType = "text";
}
return editorType;
}
editorType = this.wiki.getTiddlerText(EDITOR_MAPPING_PREFIX + type);
if(!editorType) {
var typeInfo = $tw.config.contentTypeInfo[type];
if(typeInfo && typeInfo.encoding === "base64") {
editorType = "binary";
} else {
editorType = "text";
}
}
return editorType;
};
|
echo "### CONTROLLER LOCAL INSTALL SCRIPT"
INTERNAL_IP=$(curl -s -H "Metadata-Flavor: Google" \
http://metadata.google.internal/computeMetadata/v1/instance/network-interfaces/0/ip)
K8S_VERSION=1.10.6
echo " # INTERNAL_IP=${INTERNAL_IP}"
echo " # K8S_VERSION=${K8S_VERSION}"
echo " # Move certificates and config to correct location (/var/lib/kubernetes/)"
sudo mkdir -p /var/lib/kubernetes/
sudo mv ca.pem ca-key.pem kubernetes-key.pem kubernetes.pem \
service-account-key.pem service-account.pem \
encryption-config.yaml /var/lib/kubernetes/
ls -lath /var/lib/kubernetes/
echo " ## DOWNLOAD BINARIES"
wget -q --show-progress --https-only --timestamping \
"https://storage.googleapis.com/kubernetes-release/release/v${K8S_VERSION}/bin/linux/amd64/kube-apiserver" \
"https://storage.googleapis.com/kubernetes-release/release/v${K8S_VERSION}/bin/linux/amd64/kube-controller-manager" \
"https://storage.googleapis.com/kubernetes-release/release/v${K8S_VERSION}/bin/linux/amd64/kube-scheduler" \
"https://storage.googleapis.com/kubernetes-release/release/v${K8S_VERSION}/bin/linux/amd64/kubectl"
echo " # Move the binaries to the proper location (/usr/local/bin/)"
chmod +x kube-apiserver kube-controller-manager kube-scheduler kubectl
sudo mv kube-apiserver kube-controller-manager kube-scheduler kubectl /usr/local/bin/
ls -lath /usr/local/bin/kube*
echo " ## CONFIGURE API SERVER"
echo " # create api server systemd service definition (/etc/systemd/system/kube-apiserver.service)"
cat <<EOF | sudo tee /etc/systemd/system/kube-apiserver.service
[Unit]
Description=Kubernetes API Server
Documentation=https://github.com/kubernetes/kubernetes
[Service]
ExecStart=/usr/local/bin/kube-apiserver \\
--advertise-address=${INTERNAL_IP} \\
--allow-privileged=true \\
--apiserver-count=3 \\
--audit-log-maxage=30 \\
--audit-log-maxbackup=3 \\
--audit-log-maxsize=100 \\
--audit-log-path=/var/log/audit.log \\
--authorization-mode=Node,RBAC \\
--bind-address=0.0.0.0 \\
--client-ca-file=/var/lib/kubernetes/ca.pem \\
--enable-admission-plugins=Initializers,NamespaceLifecycle,NodeRestriction,LimitRanger,ServiceAccount,DefaultStorageClass,ResourceQuota \\
--enable-swagger-ui=true \\
--etcd-cafile=/var/lib/kubernetes/ca.pem \\
--etcd-certfile=/var/lib/kubernetes/kubernetes.pem \\
--etcd-keyfile=/var/lib/kubernetes/kubernetes-key.pem \\
--etcd-servers=https://10.240.0.10:2379,https://10.240.0.11:2379,https://10.240.0.12:2379 \\
--event-ttl=1h \\
--experimental-encryption-provider-config=/var/lib/kubernetes/encryption-config.yaml \\
--kubelet-certificate-authority=/var/lib/kubernetes/ca.pem \\
--kubelet-client-certificate=/var/lib/kubernetes/kubernetes.pem \\
--kubelet-client-key=/var/lib/kubernetes/kubernetes-key.pem \\
--kubelet-https=true \\
--runtime-config=api/all \\
--service-account-key-file=/var/lib/kubernetes/service-account.pem \\
--service-cluster-ip-range=10.32.0.0/24 \\
--service-node-port-range=30000-32767 \\
--tls-cert-file=/var/lib/kubernetes/kubernetes.pem \\
--tls-private-key-file=/var/lib/kubernetes/kubernetes-key.pem \\
--v=2
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
echo " ## CONFIGURE CONTROLLER MANAGER"
echo " # move kubeconfig to expected location(/var/lib/kubernetes/)"
sudo mv kube-controller-manager.kubeconfig /var/lib/kubernetes/
ls -lath /var/lib/kubernetes/kube-controller-manager.kubeconfig
echo " # create controller manager systemd service definition file ()"
cat <<EOF | sudo tee /etc/systemd/system/kube-controller-manager.service
[Unit]
Description=Kubernetes Controller Manager
Documentation=https://github.com/kubernetes/kubernetes
[Service]
ExecStart=/usr/local/bin/kube-controller-manager \\
--address=0.0.0.0 \\
--cluster-cidr=10.200.0.0/16 \\
--cluster-name=kubernetes \\
--cluster-signing-cert-file=/var/lib/kubernetes/ca.pem \\
--cluster-signing-key-file=/var/lib/kubernetes/ca-key.pem \\
--kubeconfig=/var/lib/kubernetes/kube-controller-manager.kubeconfig \\
--leader-elect=true \\
--root-ca-file=/var/lib/kubernetes/ca.pem \\
--service-account-private-key-file=/var/lib/kubernetes/service-account-key.pem \\
--service-cluster-ip-range=10.32.0.0/24 \\
--use-service-account-credentials=true \\
--v=2
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
echo " ## CONFIGURE KUBE-SCHEDULER"
echo " # move kubeconfig to expected location (/var/lib/kubernetes/)"
sudo mv kube-scheduler.kubeconfig /var/lib/kubernetes/
ls -lath /var/lib/kubernetes/kube-scheduler.kubeconfig
echo " # prepare config folder"
sudo mkdir -p /etc/kubernetes/config
echo " # create kube-schedule config yaml (/etc/kubernetes/config/kube-scheduler.yaml)"
cat <<EOF | sudo tee /etc/kubernetes/config/kube-scheduler.yaml
apiVersion: componentconfig/v1alpha1
kind: KubeSchedulerConfiguration
clientConnection:
kubeconfig: "/var/lib/kubernetes/kube-scheduler.kubeconfig"
leaderElection:
leaderElect: true
EOF
ls -lath /etc/kubernetes/config/kube-scheduler.yaml
echo " # create kube-scheduler systemd service definition file (/etc/systemd/system/kube-scheduler.service)"
cat <<EOF | sudo tee /etc/systemd/system/kube-scheduler.service
[Unit]
Description=Kubernetes Scheduler
Documentation=https://github.com/kubernetes/kubernetes
[Service]
ExecStart=/usr/local/bin/kube-scheduler \\
--config=/etc/kubernetes/config/kube-scheduler.yaml \\
--v=2
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
echo " ## LOAD AND START SYSTEMD SERVICES"
echo " # kube-apiserver, kube-controller-manager, kube-scheduler"
sudo systemctl daemon-reload
sudo systemctl enable kube-apiserver kube-controller-manager kube-scheduler
sudo systemctl start kube-apiserver kube-controller-manager kube-scheduler
echo " ## ENABLE HTTP HEALTH CHECKS"
echo " # install nginx"
sudo apt-get install -y nginx
echo " # configure nginx"
cat > kubernetes.default.svc.cluster.local <<EOF
server {
listen 80;
server_name kubernetes.default.svc.cluster.local;
location /healthz {
proxy_pass https://127.0.0.1:6443/healthz;
proxy_ssl_trusted_certificate /var/lib/kubernetes/ca.pem;
}
}
EOF
sudo mv kubernetes.default.svc.cluster.local /etc/nginx/sites-available/kubernetes.default.svc.cluster.local
sudo ln -s /etc/nginx/sites-available/kubernetes.default.svc.cluster.local /etc/nginx/sites-enabled/
ls -lath /etc/nginx/sites-enabled/
echo " # start nginx service"
sudo systemctl restart nginx
sudo systemctl enable nginx
echo " # verification (waiting a few seconds to make sure they're running)"
sleep 15
kubectl get componentstatuses --kubeconfig admin.kubeconfig
curl -H "Host: kubernetes.default.svc.cluster.local" -i http://127.0.0.1/healthz
echo " ## CREATE RBAC CONFIGURATION"
echo " # generate ClusterRole"
cat <<EOF | kubectl apply --kubeconfig admin.kubeconfig -f -
apiVersion: rbac.authorization.k8s.io/v1beta1
kind: ClusterRole
metadata:
annotations:
rbac.authorization.kubernetes.io/autoupdate: "true"
labels:
kubernetes.io/bootstrapping: rbac-defaults
name: system:kube-apiserver-to-kubelet
rules:
- apiGroups:
- ""
resources:
- nodes/proxy
- nodes/stats
- nodes/log
- nodes/spec
- nodes/metrics
verbs:
- "*"
EOF
echo " # create ClusterRoleBinding"
cat <<EOF | kubectl apply --kubeconfig admin.kubeconfig -f -
apiVersion: rbac.authorization.k8s.io/v1beta1
kind: ClusterRoleBinding
metadata:
name: system:kube-apiserver
namespace: ""
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: system:kube-apiserver-to-kubelet
subjects:
- apiGroup: rbac.authorization.k8s.io
kind: User
name: kubernetes
EOF
|
<filename>test/unit/lib/path_test.js<gh_stars>1-10
/**
* Copyright 2014 Skytap Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
var path = require('path'),
should = require('should'),
sinon = require('sinon'),
Backhoe = require('backhoe'),
Module = require('../../../lib/path');
describe('lib/path.js', function () {
beforeEach(function () {
Backhoe.clear();
});
describe('isCoffeescriptFile', function () {
[
{
file : '',
expected : false
},
{
file : 'coffeescript.js',
expected : false
},
{
file : 'foo.notcoffee',
expected : false
},
{
file : '.coffee',
expected : false
},
{
file : 'foo.coffee',
expected : true
}
].forEach(function (testCase) {
it('returns correct value for: ' + JSON.stringify(testCase), function () {
var module = new Module(testCase.file);
module.isCoffeescriptFile().should.eql(testCase.expected);
});
});
});
describe('isJavascriptFile', function () {
[
{
file : '',
expected : false
},
{
file : 'js.coffee',
expected : false
},
{
file : 'foo.notjs',
expected : false
},
{
file : '.js',
expected : false
},
{
file : 'foo.js',
expected : true
}
].forEach(function (testCase) {
it('returns correct value for: ' + JSON.stringify(testCase), function () {
var module = new Module(testCase.file);
module.isJavascriptFile().should.eql(testCase.expected);
});
});
});
});
|
<reponame>AnnaPalna/basic-js-ds
const { NotImplementedError } = require('../extensions/index.js');
/**
* Given a singly linked list of integers l and an integer k,
* remove all elements from list l that have a value equal to k.
*
* @param {List} l
* @param {Number} k
* @return {List}
*
* @example
* For l = [3, 1, 2, 3, 4, 5] and k = 3,
* the output should be [1, 2, 4, 5]
/** */
function ListNode(x) {
this.value = x;
this.next = null;
}
module.exports = function removeKFromList(head, val) {
let array = transformToArray(head);
array = array.filter((item) => item !== val);
const res = transformToList(array)
return res;
}
// односвязный список трансформировать в массив
function transformToArray(head) {
let resArray = []
if (head) {
resArray.push(head.value)
}
while (head.next) {
resArray.push(head.next.value)
head.next = head.next.next
}
return resArray
}
// из массива сделать список
function transformToList(array) {
return array.reverse().reduce((acc, cur) => {
if (acc) {
const node = new ListNode(cur);
node.next = acc;
return node;
}
return new ListNode(cur);
}, null);
}
|
import logging
import time
from functools import wraps
LOGGER = logging.getLogger(__name__)
def log_execution_time(func):
@wraps(func)
def wrapper(*args, **kwargs):
start_time = time.time()
result = func(*args, **kwargs)
end_time = time.time()
execution_time_ms = (end_time - start_time) * 1000
LOGGER.info(f"Function '{func.__name__}' executed in {execution_time_ms:.2f} milliseconds")
return result
return wrapper
@log_execution_time
def sample_function(a, b):
return a + b
# Demonstrate the functionality of the log_execution_time decorator
sample_result = sample_function(3, 5)
print(sample_result) # Output: 8
# Check the logs to see the execution time of the sample_function
|
<reponame>jproudlo/PyModel
# pma.py --maxTransitions 100 synchronous msocket
# 77 states, 100 transitions, 1 accepting states, 0 unsafe states, 0 finished and 0 deadend states
# actions here are just labels, but must be symbols with __name__ attribute
def send_return(): pass
def send_call(): pass
def recv_call(): pass
def recv_return(): pass
# states, key of each state here is its number in graph etc. below
states = {
0 : {'synchronous': 0, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': ''}},
1 : {'synchronous': 1, 'msocket': {'send_arg': 'a', 'recv_arg': 0, 'buffers': ''}},
2 : {'synchronous': 1, 'msocket': {'send_arg': 'bb', 'recv_arg': 0, 'buffers': ''}},
3 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'a'}},
4 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bb'}},
5 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'b'}},
6 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'a'}},
7 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'bb'}},
8 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'b'}},
9 : {'synchronous': 0, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'b'}},
10 : {'synchronous': 1, 'msocket': {'send_arg': 'a', 'recv_arg': 0, 'buffers': 'b'}},
11 : {'synchronous': 1, 'msocket': {'send_arg': 'bb', 'recv_arg': 0, 'buffers': 'b'}},
12 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'ba'}},
13 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bbb'}},
14 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'ba'}},
15 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'bbb'}},
16 : {'synchronous': 0, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'a'}},
17 : {'synchronous': 0, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bb'}},
18 : {'synchronous': 1, 'msocket': {'send_arg': 'a', 'recv_arg': 0, 'buffers': 'a'}},
19 : {'synchronous': 1, 'msocket': {'send_arg': 'bb', 'recv_arg': 0, 'buffers': 'a'}},
20 : {'synchronous': 1, 'msocket': {'send_arg': 'a', 'recv_arg': 0, 'buffers': 'bb'}},
21 : {'synchronous': 1, 'msocket': {'send_arg': 'bb', 'recv_arg': 0, 'buffers': 'bb'}},
22 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'aa'}},
23 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'abb'}},
24 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'ab'}},
25 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bba'}},
26 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bbbb'}},
27 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'aa'}},
28 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'abb'}},
29 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'ab'}},
30 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'bba'}},
31 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'bbbb'}},
32 : {'synchronous': 0, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'ba'}},
33 : {'synchronous': 0, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bbb'}},
34 : {'synchronous': 1, 'msocket': {'send_arg': 'a', 'recv_arg': 0, 'buffers': 'ba'}},
35 : {'synchronous': 1, 'msocket': {'send_arg': 'bb', 'recv_arg': 0, 'buffers': 'ba'}},
36 : {'synchronous': 1, 'msocket': {'send_arg': 'a', 'recv_arg': 0, 'buffers': 'bbb'}},
37 : {'synchronous': 1, 'msocket': {'send_arg': 'bb', 'recv_arg': 0, 'buffers': 'bbb'}},
38 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'baa'}},
39 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'babb'}},
40 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bab'}},
41 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bbba'}},
42 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bbbbb'}},
43 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'baa'}},
44 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'babb'}},
45 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'bab'}},
46 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'bbba'}},
47 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'bbbbb'}},
48 : {'synchronous': 0, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'aa'}},
49 : {'synchronous': 0, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'abb'}},
50 : {'synchronous': 0, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'ab'}},
51 : {'synchronous': 0, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bba'}},
52 : {'synchronous': 0, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bbbb'}},
53 : {'synchronous': 1, 'msocket': {'send_arg': 'a', 'recv_arg': 0, 'buffers': 'aa'}},
54 : {'synchronous': 1, 'msocket': {'send_arg': 'bb', 'recv_arg': 0, 'buffers': 'aa'}},
55 : {'synchronous': 1, 'msocket': {'send_arg': 'a', 'recv_arg': 0, 'buffers': 'abb'}},
56 : {'synchronous': 1, 'msocket': {'send_arg': 'bb', 'recv_arg': 0, 'buffers': 'abb'}},
57 : {'synchronous': 1, 'msocket': {'send_arg': 'a', 'recv_arg': 0, 'buffers': 'ab'}},
58 : {'synchronous': 1, 'msocket': {'send_arg': 'bb', 'recv_arg': 0, 'buffers': 'ab'}},
59 : {'synchronous': 1, 'msocket': {'send_arg': 'a', 'recv_arg': 0, 'buffers': 'bba'}},
60 : {'synchronous': 1, 'msocket': {'send_arg': 'bb', 'recv_arg': 0, 'buffers': 'bba'}},
61 : {'synchronous': 1, 'msocket': {'send_arg': 'a', 'recv_arg': 0, 'buffers': 'bbbb'}},
62 : {'synchronous': 1, 'msocket': {'send_arg': 'bb', 'recv_arg': 0, 'buffers': 'bbbb'}},
63 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'aaa'}},
64 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'aabb'}},
65 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'aab'}},
66 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'abba'}},
67 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'abbbb'}},
68 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'abbb'}},
69 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'aba'}},
70 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bbaa'}},
71 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bbabb'}},
72 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bbab'}},
73 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bbbba'}},
74 : {'synchronous': 2, 'msocket': {'send_arg': '', 'recv_arg': 0, 'buffers': 'bbbbbb'}},
75 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'aaa'}},
76 : {'synchronous': 3, 'msocket': {'send_arg': '', 'recv_arg': 4, 'buffers': 'aabb'}},
}
# initial state, accepting states, unsafe states, frontier states, deadend states
initial = 0
accepting = [0]
unsafe = []
frontier = [65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76]
finished = []
deadend = []
runstarts = [0]
# finite state machine, list of tuples: (current, (action, args, result), next)
graph = (
(0, (send_call, ('a',), None), 1),
(0, (send_call, ('bb',), None), 2),
(1, (send_return, (1,), None), 3),
(2, (send_return, (2,), None), 4),
(2, (send_return, (1,), None), 5),
(3, (recv_call, (4,), None), 6),
(4, (recv_call, (4,), None), 7),
(5, (recv_call, (4,), None), 8),
(6, (recv_return, ('a',), None), 0),
(7, (recv_return, ('bb',), None), 0),
(7, (recv_return, ('b',), None), 9),
(8, (recv_return, ('b',), None), 0),
(9, (send_call, ('a',), None), 10),
(9, (send_call, ('bb',), None), 11),
(10, (send_return, (1,), None), 12),
(11, (send_return, (2,), None), 13),
(11, (send_return, (1,), None), 4),
(12, (recv_call, (4,), None), 14),
(13, (recv_call, (4,), None), 15),
(14, (recv_return, ('ba',), None), 0),
(14, (recv_return, ('b',), None), 16),
(15, (recv_return, ('bb',), None), 9),
(15, (recv_return, ('b',), None), 17),
(16, (send_call, ('a',), None), 18),
(16, (send_call, ('bb',), None), 19),
(17, (send_call, ('a',), None), 20),
(17, (send_call, ('bb',), None), 21),
(18, (send_return, (1,), None), 22),
(19, (send_return, (2,), None), 23),
(19, (send_return, (1,), None), 24),
(20, (send_return, (1,), None), 25),
(21, (send_return, (2,), None), 26),
(21, (send_return, (1,), None), 13),
(22, (recv_call, (4,), None), 27),
(23, (recv_call, (4,), None), 28),
(24, (recv_call, (4,), None), 29),
(25, (recv_call, (4,), None), 30),
(26, (recv_call, (4,), None), 31),
(27, (recv_return, ('aa',), None), 0),
(27, (recv_return, ('a',), None), 16),
(28, (recv_return, ('a',), None), 17),
(28, (recv_return, ('ab',), None), 9),
(29, (recv_return, ('a',), None), 9),
(29, (recv_return, ('ab',), None), 0),
(30, (recv_return, ('bb',), None), 16),
(30, (recv_return, ('b',), None), 32),
(31, (recv_return, ('bb',), None), 17),
(31, (recv_return, ('b',), None), 33),
(32, (send_call, ('a',), None), 34),
(32, (send_call, ('bb',), None), 35),
(33, (send_call, ('a',), None), 36),
(33, (send_call, ('bb',), None), 37),
(34, (send_return, (1,), None), 38),
(35, (send_return, (2,), None), 39),
(35, (send_return, (1,), None), 40),
(36, (send_return, (1,), None), 41),
(37, (send_return, (2,), None), 42),
(37, (send_return, (1,), None), 26),
(38, (recv_call, (4,), None), 43),
(39, (recv_call, (4,), None), 44),
(40, (recv_call, (4,), None), 45),
(41, (recv_call, (4,), None), 46),
(42, (recv_call, (4,), None), 47),
(43, (recv_return, ('ba',), None), 16),
(43, (recv_return, ('b',), None), 48),
(44, (recv_return, ('ba',), None), 17),
(44, (recv_return, ('b',), None), 49),
(45, (recv_return, ('ba',), None), 9),
(45, (recv_return, ('b',), None), 50),
(46, (recv_return, ('bb',), None), 32),
(46, (recv_return, ('b',), None), 51),
(47, (recv_return, ('bb',), None), 33),
(47, (recv_return, ('b',), None), 52),
(48, (send_call, ('a',), None), 53),
(48, (send_call, ('bb',), None), 54),
(49, (send_call, ('a',), None), 55),
(49, (send_call, ('bb',), None), 56),
(50, (send_call, ('a',), None), 57),
(50, (send_call, ('bb',), None), 58),
(51, (send_call, ('a',), None), 59),
(51, (send_call, ('bb',), None), 60),
(52, (send_call, ('a',), None), 61),
(52, (send_call, ('bb',), None), 62),
(53, (send_return, (1,), None), 63),
(54, (send_return, (2,), None), 64),
(54, (send_return, (1,), None), 65),
(55, (send_return, (1,), None), 66),
(56, (send_return, (2,), None), 67),
(56, (send_return, (1,), None), 68),
(57, (send_return, (1,), None), 69),
(58, (send_return, (2,), None), 68),
(58, (send_return, (1,), None), 23),
(59, (send_return, (1,), None), 70),
(60, (send_return, (2,), None), 71),
(60, (send_return, (1,), None), 72),
(61, (send_return, (1,), None), 73),
(62, (send_return, (2,), None), 74),
(62, (send_return, (1,), None), 42),
(63, (recv_call, (4,), None), 75),
(64, (recv_call, (4,), None), 76),
)
|
import React from 'react'
import { rgba } from 'polished'
import styled from 'styled-components'
import { Container, Row, Col } from 'react-bootstrap'
import { Section, Title, Text, Span, Box } from '../../components/Core'
import ContactForm from '../../components/ContactForm'
import { device } from '../../utils'
const ContactCard = styled.div`
border-radius: 10px;
background-color: ${({ theme }) => rgba(theme.colors.primary, 0.1)};
margin-top: 3rem;
@media ${device.lg} {
margin-top: 120px;
}
`
const Contact = ({ hero = true, bg = 'dark', ...rest }) => {
return (
<>
<Section hero={hero} bg={bg} {...rest}>
<Container>
<Row className="justify-content-center">
<Col lg="6">
<ContactCard className="p-5 ms-lg-5">
<div>
<Text color="light">Call me at</Text>
<a
className="text-primary"
href="tel:+34653071080"
className="fw-bold"
>
<span className="text-white">
+34 653071080
</span>
</a>
</div>
{/* <div className="mt-5">
<Text color="light">Call me at</Text>
<div>
<a href="tel:+1-402-4983" className="fw-bold">
<Span color="primary">+1-402-4983</Span>
</a>
</div>
</div> */}
</ContactCard>
{/* <Box className="pr-lg-5">
<Title color="light" variant="secSm" mb="2rem">
Contact now
</Title>
<Text color="light" mb="2.75rem">
Call me: <a className="text-white" href="tel:+34653071080">653071080</a>
</Text>
<ContactForm theme="dark" />
</Box> */}
</Col>
<Col lg="6">
<ContactCard className="p-5 ms-lg-5">
<div>
<Text color="light">Email me at</Text>
<a
className="text-primary"
href="mailto:<EMAIL>"
className="fw-bold"
>
<span className="text-white">
<EMAIL>
</span>
</a>
</div>
{/* <div className="mt-5">
<Text color="light">Call me at</Text>
<div>
<a href="tel:+1-402-4983" className="fw-bold">
<Span color="primary">+1-402-4983</Span>
</a>
</div>
</div> */}
</ContactCard>
</Col>
</Row>
</Container>
</Section>
</>
)
}
export default Contact
|
SELECT TOP 10 * FROM Customers ORDER BY birth_date ASC;
|
(defn reverse-string [s]
(apply str (reverse (seq s))))
|
package main
import (
"fmt"
"reflect"
"testing"
)
func assertErrorIsNil(t *testing.T, err error) {
t.Helper()
if err != nil {
t.Errorf("got an error: %v", err)
}
}
func assertIntListIsEqual(t *testing.T, got, want []int) {
t.Helper()
if !reflect.DeepEqual(got, want) {
t.Errorf("got %v want %v", got, want)
}
}
func assertEqualInts(t *testing.T, got, want int) {
t.Helper()
if got != want {
t.Errorf("got %v want %v", got, want)
}
}
func Test_PrepareInput(t *testing.T) {
tests := []struct {
input []int
want []int
}{
{[]int{1}, []int{0, 1, 4}},
{[]int{1, 2, 3}, []int{0, 1, 2, 3, 6}},
{[]int{4, 2, 1}, []int{0, 1, 2, 4, 7}},
}
for _, test := range tests {
t.Run("PrepareInput("+fmt.Sprint(test.input)+")", func(t *testing.T) {
got := PrepareInput(test.input)
assertIntListIsEqual(t, got, test.want)
})
}
}
func Test_GetNextAdapter(t *testing.T) {
adapters := PrepareInput([]int{16, 10, 15, 5, 1, 11, 7, 19, 6, 12, 4})
tests := []struct {
input int
want int
}{
{0, 1},
{1, 4},
{4, 5},
{5, 6},
{6, 7},
{7, 10},
{10, 11},
{11, 12},
{12, 15},
{15, 16},
{16, 19},
}
for _, test := range tests {
t.Run("GetNextAdapter("+fmt.Sprint(test.input)+")", func(t *testing.T) {
got, err := GetNextAdapter(test.input, adapters)
assertErrorIsNil(t, err)
assertEqualInts(t, got, test.want)
})
}
t.Run("GetNextAdapter returns an error if no adapter matches", func(t *testing.T) {
_, err := GetNextAdapter(adapters[len(adapters)-1], adapters)
if err == nil {
t.Errorf("GetNextAdapter() should return an error if there is no bigger adapter")
}
})
t.Run("GetNextAdapter returns an error if no adapter can be found (difference of jolts > 3)", func(t *testing.T) {
brokenAdapters := []int{1, 5}
_, err := GetNextAdapter(1, brokenAdapters)
if err == nil {
t.Errorf("GetNextAdapter() should return an error if there is no bigger adapter")
}
})
}
func Test_GetDifferences(t *testing.T) {
tests := []struct {
input []int
wantedDifferencesOf1 int
wantedDifferencesOf3 int
}{
{[]int{1}, 1, 1},
{[]int{1, 2}, 2, 1},
{[]int{16, 10, 15, 5, 1, 11, 7, 19, 6, 12, 4}, 7, 5},
{[]int{28, 33, 18, 42, 31, 14, 46, 20, 48, 47, 24, 23, 49, 45, 19, 38, 39, 11, 1, 32, 25, 35, 8, 17, 7, 9, 4, 2, 34, 10, 3}, 22, 10},
}
for _, test := range tests {
t.Run("GetDifferences("+fmt.Sprint(test.input)+")", func(t *testing.T) {
input := PrepareInput(test.input)
got1, got3, err := GetDifferences(input)
assertErrorIsNil(t, err)
assertEqualInts(t, got1, test.wantedDifferencesOf1)
assertEqualInts(t, got3, test.wantedDifferencesOf3)
})
}
}
func Test_CountPossibleCombinations(t *testing.T) {
tests := []struct {
adapters []int
startingPoint int
cache map[int]int
want int
}{
{[]int{1}, 0, map[int]int{}, 1},
{[]int{1, 2, 3, 4}, 2, map[int]int{2: 99}, 99},
{[]int{16, 10, 15, 5, 1, 11, 7, 19, 6, 12, 4}, 0, map[int]int{}, 8},
{[]int{28, 33, 18, 42, 31, 14, 46, 20, 48, 47, 24, 23, 49, 45, 19, 38, 39, 11, 1, 32, 25, 35, 8, 17, 7, 9, 4, 2, 34, 10, 3}, 0, map[int]int{}, 19208},
}
for _, test := range tests {
t.Run("CountPossibleCombinations("+fmt.Sprint(test.adapters)+", "+fmt.Sprint(test.startingPoint)+", "+fmt.Sprint(test.cache)+")", func(t *testing.T) {
adapters := PrepareInput(test.adapters)
got := CountPossibleCombinations(adapters, test.startingPoint, test.cache)
assertEqualInts(t, got, test.want)
})
}
}
|
<filename>Documentation/_permute_8cpp.js
var _permute_8cpp =
[
[ "Permute", "_permute_8cpp.xhtml#af3c74017185773dd61d8ca6662d65d43", null ],
[ "Permuted", "_permute_8cpp.xhtml#abeaf4f6785039866fd075f4569ba8e84", null ],
[ "Permuted", "_permute_8cpp.xhtml#a2ba6f6f40c7382b61b00ac02f961ba22", null ]
];
|
#!/usr/bin/env bash
set -e
echo
echo "ANALYZE QUERY PLAN - GET STREAM MESSAGES CORRELATED"
echo "==================================================="
echo "- Write 3 messages to an entity stream"
echo "- Retrieve a batch of messages from the stream matching the correlation category"
echo
source test/_controls.sh
correlation=$(category)
correlation_stream_name=$(stream-name $correlation)
echo "Correlation:"
echo $correlation
echo
stream_name=$(stream-name)
echo "Stream Name:"
echo $stream_name
echo
write-message-correlated $stream_name 1
write-message-correlated $stream_name 2 $correlation_stream_name
cmd="
LOAD 'auto_explain';
SET auto_explain.log_min_duration = 0;
SET auto_explain.log_nested_statements=on;
EXPLAIN ANALYZE SELECT * FROM get_stream_messages('$stream_name', correlation => '$correlation');
"
echo "Command:"
echo "$cmd"
echo
psql message_store -P pager=off -x -c "$cmd"
echo "= = ="
echo
|
def generate_unique_id(arr):
# create an empty dictionary
ids = {}
#
id = 0
# loop through the list and generate unique ids
for item in arr:
if item not in ids:
ids[item] = id
id += 1
return ids
if __name__ == '__main__':
arr = [1, 2, 3]
print(generate_unique_id(arr))
|
def spam(divideBy):
try:
return 42 / divideBy
except ZeroDivisionError:
print('Error: Invalid argument.')
print(spam(2))
print(spam(12))
print(spam(0))
|
<gh_stars>0
import { IsNotEmpty, IsString, MaxLength, Min, MinLength } from 'class-validator';
import { ObjectType, Field, ID, InputType, PartialType } from '@nestjs/graphql';
@ObjectType()
export class DeveloperType {
@Field(type => ID, { nullable: true })
_id?: string;
@Field()
name: string;
@Field()
fullName: string;
@Field()
userName: string;
@Field()
password: string;
@Field({ nullable: true })
createdAt?: Date;
@Field({ nullable: true })
updatedAt?: Date;
}
@InputType()
export class CreateDeveloperInput {
_id?: string;
@Field()
@IsNotEmpty()
@IsString()
@MaxLength(100, {message:"Firstname is too long. 100 Characters Only"})
name?: string;
@Field()
@IsNotEmpty()
@IsString()
@MaxLength(100, {message:"Lastname is too long. 100 Characters Only"})
fullName?: string;
@Field()
@IsNotEmpty()
@IsString()
@MinLength(5, {message:"Username is too short. 5 Characters is the minimum"})
@MaxLength(100, {message:"Username is too long. 100 Characters Only"})
userName?: string;
@Field()
@IsNotEmpty()
@IsString()
@MinLength(5, {message:"Password is too short. 5 Characters is the minimum"})
@MaxLength(1500, {message:"Password is too long, 120 Characters Only"})
password?: string;
}
|
/* global browser */
// TEMP DEV
let mockUrlToCheck = 'https://mockurl.example.com/a-path?fake-query=sure'
let mockAssets = [
{
fileUrl: 'bingbong.com.js',
assetType: 'js',
forPatch: 'bingbong.com',
},
{
fileUrl: 'wimwam.flam,bingbong.com,hiphop.stop,bingobango.bongo,hothere.stranger.css',
assetType: 'js',
forPatch: 'wimwam.flam,bingbong.com,hiphop.stop,bingobango.bongo,hothere.stranger',
},
{
fileUrl: 'wimwam.flam,bingbong.com,hiphop.stop,bingobango.bongo,hothere.stranger.js',
assetType: 'js',
forPatch: 'wimwam.flam,bingbong.com,hiphop.stop,bingobango.bongo,hothere.stranger',
}
]
let mockPatches = [
{
id: mockAssets[1].forPatch,
matchList: mockAssets[1].forPatch.split(','),
assets: [mockAssets[1], mockAssets[2]],
options: {
on: true
}
},
{
id: mockAssets[0].forPatch,
matchList: mockAssets[0].forPatch.split(','),
assets: [mockAssets[0]],
options: {
on: false
}
}
]
export let last = arr => arr.reverse()[0]
export let fileExtension = path => last(path.split('.'))
export const getActiveAssets = (app) => {
// TODO - DEV ONLY
if (!app.weApiAvailable && !app.chromeWeApiAvailable){
// Mock real asset dep-invocations
return mockAssets
}
let els = [...document.querySelectorAll('head [data-mane-match-list]')]
let assets = els.map(el => {
let src = el.href || el.src
return {
fileUrl: src,
assetType: fileExtension(src),
forPatch: el.dataset.maneId
}
})
return assets
}
export const makePatchMapFromStrings = matchListStrings => new Map(matchListStrings.map(matchListString => {
let patch = new Patch(matchListString)
return [patch.id, patch]
}))
export const assetsToPatchMap = assets => {
let patches = new Map()
assets.forEach(asset => {
let id = asset.forPatch
delete asset.forPatch
let extant = patches.get(id)
if (extant){
extant.assets.push(asset)
patches.set(id, extant)
} else {
extant = {
id,
matchList: id.split(','),
assets: [asset]
}
}
patches.set(id, extant)
})
return patches
}
export const getActivePatches = (app) => assetsToPatchMap(getActiveAssets(app))
export const getActiveTabUrl = async (app) => {
// TODO - DEV ONLY
if (!app.weApiAvailable && !app.chromeWeApiAvailable){
// Mock a real URL
return mockUrlToCheck
}
return new Promise((res, rej) => {
let onTabsFound = tabs => {
if (tabs[0]){
res(tabs[0].url)
} else {
rej(Error('No tabs found'))
}
}
if (app.chromeWeApiAvailable) {
browser.tabs.query({
active: true,
currentWindow: true
}, onTabsFound)
} else if (app.weApiAvailable){
return browser.tabs.query({
active: true,
currentWindow: true
}).then(onTabsFound, err => {
console.error({err})
})
}
})
}
// TODO: Why is our CORS not working here?
export const getMatchingPatches = async (url, {app}) => {
if (!app.weApiAvailable && !app.chromeWeApiAvailable){
return mockPatches
}
if (!url){
url = await getActiveTabUrl(app)
}
// We need to encode to escape all the special URI characters
let patchRequestPath = `${app.cfg.maneServerHostname}:${app.cfg.maneServerPort}/patches-for/${encodeURIComponent(url)}`
let response = await fetch(patchRequestPath, {
mode: 'cors'
})
if (response.ok) {
let patchArr = await response.json()
for (let patch of patchArr){
if (!patch.options){
console.error('Matching patch had no options; filling with defaults', patch)
patch.options = {}
}
let {
on = true,
whenToRun = 'dom'
} = patch.options
patch.options = { on, whenToRun }
}
return patchArr
} else {
throw response
}
}
export const resolveIn = waitMs => new Promise(resolve => setTimeout(resolve, waitMs))
export const rejectIn = waitMs => new Promise((res, reject) => setTimeout(reject, waitMs))
// From https://stackoverflow.com/a/35385518
export const htmlToElement = html => {
var template = document.createElement('template')
template.innerHTML = html.trim()
return template.content.firstChild
}
|
#!/bin/bash
unamestr=$(uname)
if [[ "$unamestr" == "Darwin" ]]; then
LIBRARY_NAME_SUFFIX=dylib
else
LIBRARY_NAME_SUFFIX=dll
fi
# make sure that we are under project folder
mkdir -p build
pushd build
|
<gh_stars>1-10
require 'etengine/scenario_migration'
class HouseholdBatteryVolume < ActiveRecord::Migration[5.2]
include ETEngine::ScenarioMigration
P2P_KEY = "households_flexibility_p2p_electricity_market_penetration"
# old volume divided by new volume
ADJUSTMENT_FACTOR = 0.0198 / 0.0097
def up
migrate_scenarios do |scenario|
if scenario.user_values.key?(P2P_KEY)
new_value = scenario.user_values[P2P_KEY] * ADJUSTMENT_FACTOR
if new_value > 100.0
new_value = 100.0
end
scenario.user_values[P2P_KEY] = new_value
end
end
end
end
|
#!/bin/bash
set -ex
mkdir build
cd build
cmake -G "Unix Makefiles" \
-DCMAKE_INSTALL_PREFIX:PATH="${PREFIX}" \
-DCMAKE_BUILD_TYPE:STRING=Release \
-DENABLE_TESTS=OFF \
-DCMAKE_LIBRARY_PATH="${PREFIX}/lib" \
-DCMAKE_INCLUDE_PATH="${PREFIX}/include" \
..
# CircleCI offers two cores.
make -j $CPU_COUNT
make install
|
def sort_ascending(lst):
for i in range(len(lst)-1):
min_index = i
for j in range(i+1, len(lst)):
if lst[j] < lst[min_index]:
min_index = j
lst[i], lst[min_index] = lst[min_index], lst[i]
return lst
|
import Vue from 'vue'
import moxios from 'moxios'
import * as sinon from 'sinon'
import { fn as momentProto } from 'moment'
import New from '@/components/App/Sample/New'
import VueRouter from 'vue-router'
import { HTTP } from '@/utils/http-common'
const sandbox = sinon.sandbox.create()
const router = new VueRouter()
describe('Sample/New.vue', () => {
let vm = null
const LIGAND_COMPONENTS = [
{
'id': 1,
'finalConcentration': 4,
'finalDrop': null,
'type': 'G',
'aliquot': {
'id': 1,
'slug': 'lignad-no-1',
'label': 'Lignad no.1',
'tubeLabel': 'tube no.1',
'concentration': null,
'conceptId': 'CONCEPT5',
'solvent': 'solvent no.1'
}
}]
const PROTEIN_COMPONENTS = [{
'id': 1,
'finalConcentration': 1.3,
'finalDrop': null,
'type': 'PUR',
'aliquot': {
'id': 1,
'slug': 'protein-no-1',
'label': 'PROTEIN no.1',
'tubeLabel': 'tube no.1',
'concentration': null,
'purificationId': 'PUR1AH1'
}
}]
const DATA = {
projectSlugOrId: 'project-1',
sample: {
id: 1,
slug: 'sample-label',
label: 'Sample Label',
incubationTime: 13,
incubationTemperature: 137,
otherBufferComponents: 'other components',
ligandComponent: LIGAND_COMPONENTS,
proteinComponent: PROTEIN_COMPONENTS
}
}
const DATA_2 = {
projectSlugOrId: 'project-1',
sample: {
id: null,
slug: null,
version: null,
label: 'Sample 17-08-01 08:13',
incubationTime: 13,
incubationTemperature: 137,
otherBufferComponents: 'other components',
availableForGridMaking: true,
ligandComponent: LIGAND_COMPONENTS.map(c => {
c.id = null
c.slug = null
c.version = null
return c
}),
proteinComponent: PROTEIN_COMPONENTS.map(c => {
c.id = null
c.slug = null
c.version = null
return c
})
}
}
const PROJECTS = [
{
id: 1,
slug: 'project-1',
label: 'Project 1',
samples: [DATA.sample]
},
{id: 2, label: 'Project 2', slug: 'project-2'},
{id: 3, label: 'Project 3', slug: 'project-3'}
]
beforeEach(() => {
sandbox.stub(router, 'push')
const Constructor = Vue.extend(New)
vm = new Constructor({router})
moxios.install(HTTP)
moxios.stubRequest(process.env.API_URL + 'sample/' + DATA.sample.slug, {
status: 200,
response: DATA.sample
})
moxios.stubRequest(process.env.API_URL + 'sample/' + DATA.sample.slug + '/projects', {
status: 200,
response: PROJECTS
})
moxios.stubRequest(process.env.API_URL + 'project/', {
status: 200,
response: PROJECTS
})
moxios.stubRequest(process.env.API_URL + 'sample/' + DATA.projectSlugOrId, {
status: 200,
response: DATA.sample
})
sandbox.stub(momentProto, 'format')
momentProto.format.withArgs('YY-MM-DD HH:mm').returns('17-08-01 08:13')
vm.id = DATA.sample.slug
vm.projectId = DATA.projectSlugOrId
vm = vm.$mount()
})
afterEach(() => {
moxios.uninstall(HTTP)
sandbox.restore()
vm.$destroy()
vm = null
})
it('should create new sample form have save and cancel buttons enabled', done => {
Vue.nextTick().then(() => {
expect([...vm.$el.querySelectorAll('.actions-header__buttons > button')]
.map(x => x.textContent.trim()))
.to.be.deep.equal(['Cancel', 'Save'])
expect(vm.$el.querySelector('.actions-header__action-buttons__submit').disabled).to.be.eql(false)
expect(vm.$el.querySelector('.actions-header__action-buttons__cancel').disabled).to.be.eql(false)
}).then(done, done)
})
it('should send a create sample event on save button', done => {
// Need to copy DATA. Would be modified otherwise.
vm.sample = Object.assign({}, DATA.sample)
vm.sample.components = [
...LIGAND_COMPONENTS,
...PROTEIN_COMPONENTS
]
Vue.nextTick()
.then(() => {
vm.$el.querySelector('.actions-header__action-buttons__submit').click()
vm._watcher.run()
return new Promise((resolve, reject) => moxios.wait(resolve))
}).then(() => {
const request = moxios.requests.mostRecent()
let result = Object.assign({}, DATA_2)
result.sampleToSave = result.sample
expect(JSON.parse(request.config.data)).to.be.deep.equal(result.sample)
}).then(done, done)
})
it('should update url query upon new sample creation', done => {
// Need to copy DATA. Would be modified otherwise.
vm.sample = Object.assign({}, DATA.sample)
vm.sample.components = [
...LIGAND_COMPONENTS,
...PROTEIN_COMPONENTS
]
Vue.nextTick().then(() => {
vm.$el.querySelector('.actions-header__action-buttons__submit').click()
return new Promise((resolve, reject) => moxios.wait(resolve))
}).then(() => {
const expectedPush = {
name: 'sample-view',
params: {
id: DATA.sample.slug,
projectId: DATA.projectSlugOrId
}
}
expect(router.push).to.have.been.calledOnce
expect(router.push).to.have.been.calledWith(expectedPush)
}).then(done, done)
})
it('should not send update signal due to form validation', done => {
vm.projectId = '3'
Vue.nextTick().then(() => {
vm.sample.label = ''
vm.saveForm = sandbox.spy()
vm.submitBaseFormBy('sample')
return Vue.nextTick()
}).then(() => {
expect(vm.saveForm).to.not.have.been.called
}).then(done, done)
})
it('should not send update signal due to incubation time validation', done => {
vm.sample = DATA.sample
vm.saveForm = sandbox.spy()
Vue.nextTick().then(() => {
vm.sample.incubationTime = 'non number'
vm.submitBaseFormBy('sample')
vm.sample.incubationTime = 0
vm.submitBaseFormBy('sample')
vm.sample.incubationTime = -15
vm.submitBaseFormBy('sample')
}).then(() => {
expect(vm.saveForm).to.not.have.been.called
}).then(done, done)
})
it('should not send update signal due to incubation temperature validation', done => {
vm.sample = DATA.sample
vm.saveForm = sandbox.spy()
Vue.nextTick().then(() => {
vm.sample.incubationTemperature = 'non number'
vm.submitBaseFormBy('sample')
vm.sample.incubationTemperature = -273.15
vm.submitBaseFormBy('sample')
vm.sample.incubationTemperature = -460
vm.submitBaseFormBy('sample')
}).then(() => {
expect(vm.saveForm).to.not.have.been.called
}).then(done, done)
})
it('should not update sample with no components', done => {
vm.saveForm = sandbox.spy()
Vue.nextTick().then(() => {
vm.sample.label = 'My sample name'
}).then(() => {
vm.submitBaseFormBy('sample')
}).then(() => {
expect(vm.saveForm).to.not.have.been.called
vm.sample.components.push({aliquot: {label: 'my label', finalConcentration: '5'}})
vm.submitBaseFormBy('sample')
}).then(() => {
expect(vm.saveForm).to.have.been.calledOnce
}).then(done, done)
})
it('should show error messages on server error', done => {
let errors = {
incubationTime: 'error',
incubationTemperature: 'error'
}
vm.$events.$emit('validationError', errors)
Vue.nextTick().then(() => {
const errorMessages = vm.$el.querySelectorAll('.el-form-item__error')
errorMessages.should.have.lengthOf(2)
}).then(done, done)
})
})
|
$(document).ready(function() {
$("h4#item1").click(function() {
$("p.class-p1").toggle();
});
$("h4#item2").click(function() {
$("p.class-p2").toggle();
});
$("h4#item3").click(function() {
$("p.class-p3").toggle();
});
$("h4#item4").click(function() {
$("p.class-p4").toggle();
});
$("h4#item5").click(function() {
$("p.class-p5").toggle();
});
$("h4#item6").click(function() {
$("p.class-p6").toggle();
});
$("h4#item7").click(function() {
$("p.class-p7").toggle();
});
$("h4#item8").click(function() {
$("p.class-p8").toggle();
});
$("h4#item9").click(function() {
$("p.class-p9").toggle();
});
$("h4#item10").click(function() {
$("p.class-p10").toggle();
});
$("h4#item11").click(function() {
$("p.class-p11").toggle();
});
$("h4#item12").click(function() {
$("p.class-p12").toggle();
});
$("h4#item13").click(function() {
$("p.class-p13").toggle();
});
$("h4#item14").click(function() {
$("p.class-p14").toggle();
});
$("h4#item15").click(function() {
$("p.class-p15").toggle();
});
$("h4#item16").click(function() {
$("p.class-p16").toggle();
});
$("h4#item17").click(function() {
$("p.class-p17").toggle();
});
$("h4#item18").click(function() {
$("p.class-p18").toggle();
});
$("h4#item19").click(function() {
$("p.class-p19").toggle();
});
})
|
<filename>src/app/components/fonctionnalite/fonctionnalite.component.spec.ts<gh_stars>0
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { FonctionnaliteComponent } from './fonctionnalite.component';
describe('FonctionnaliteComponent', () => {
let component: FonctionnaliteComponent;
let fixture: ComponentFixture<FonctionnaliteComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ FonctionnaliteComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(FonctionnaliteComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
|
package tree.declarations;
public class TDeclarationSAD extends TDeclaration {
public TDeclarationSAD(TDeclarationSAD node) {
super(node);
}
public TStaticAssertDeclaration getStaticAssertDeclaration() {
return (TStaticAssertDeclaration) getChild(0);
}
public TDeclarationSAD(TStaticAssertDeclaration stassdec) {
addChild(stassdec);
}
}
|
for row = 0 to row < array.length
for col = 0 to col < array.length
newMatrix[row, col] = array[col, array.length - row - 1]
|
import React, { memo } from 'react';
// Assets
import logo from 'assets/img/logo.png';
// Helpers
import { useSessionContext } from 'shared/view/contexts';
import { clickOnEnter } from './helpers';
// Hooks
// Styles
import { Container, LobbyTitle, LoggedText, LogoContainer } from './styles';
const Navbar: React.FC = () => {
const { userData, logout } = useSessionContext();
return (
<Container>
<LogoContainer>
<img src={logo} alt="GameTask" />
<LobbyTitle>
Lobby <span>Entre ou crie um jogo</span>
</LobbyTitle>
</LogoContainer>
<LoggedText>
Você está logado como <strong>{userData.name}</strong>.{' '}
<span
onClick={logout}
role="button"
tabIndex={0}
onKeyUp={clickOnEnter}
>
Sair
</span>
</LoggedText>
</Container>
);
};
export default memo(Navbar);
|
#!/usr/bin/env bash
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Due to the GCE custom metadata size limit, we split the entire script into two
# files configure.sh and configure-helper.sh. The functionality of downloading
# kubernetes configuration, manifests, docker images, and binary files are
# put in configure.sh, which is uploaded via GCE custom metadata.
set -o errexit
set -o nounset
set -o pipefail
### Hardcoded constants
DEFAULT_CNI_VERSION="v0.6.0"
DEFAULT_CNI_SHA1="d595d3ded6499a64e8dac02466e2f5f2ce257c9f"
DEFAULT_NPD_VERSION="v0.5.0"
DEFAULT_NPD_SHA1="650ecfb2ae495175ee43706d0bd862a1ea7f1395"
DEFAULT_CRICTL_VERSION="v1.11.1"
DEFAULT_CRICTL_SHA1="527fca5a0ecef6a8e6433e2af9cf83f63aff5694"
DEFAULT_MOUNTER_TAR_SHA="8003b798cf33c7f91320cd6ee5cec4fa22244571"
###
# Use --retry-connrefused opt only if it's supported by curl.
CURL_RETRY_CONNREFUSED=""
if curl --help | grep -q -- '--retry-connrefused'; then
CURL_RETRY_CONNREFUSED='--retry-connrefused'
fi
function set-broken-motd {
cat > /etc/motd <<EOF
Broken (or in progress) Kubernetes node setup! Check the cluster initialization status
using the following commands.
Master instance:
- sudo systemctl status kube-master-installation
- sudo systemctl status kube-master-configuration
Node instance:
- sudo systemctl status kube-node-installation
- sudo systemctl status kube-node-configuration
EOF
}
function download-kube-env {
# Fetch kube-env from GCE metadata server.
(
umask 077
local -r tmp_kube_env="/tmp/kube-env.yaml"
curl --fail --retry 5 --retry-delay 3 ${CURL_RETRY_CONNREFUSED} --silent --show-error \
-H "X-Google-Metadata-Request: True" \
-o "${tmp_kube_env}" \
http://metadata.google.internal/computeMetadata/v1/instance/attributes/kube-env
# Convert the yaml format file into a shell-style file.
eval $(python -c '''
import pipes,sys,yaml
for k,v in yaml.load(sys.stdin).iteritems():
print("readonly {var}={value}".format(var = k, value = pipes.quote(str(v))))
''' < "${tmp_kube_env}" > "${KUBE_HOME}/kube-env")
rm -f "${tmp_kube_env}"
)
}
function download-kubelet-config {
local -r dest="$1"
echo "Downloading Kubelet config file, if it exists"
# Fetch kubelet config file from GCE metadata server.
(
umask 077
local -r tmp_kubelet_config="/tmp/kubelet-config.yaml"
if curl --fail --retry 5 --retry-delay 3 ${CURL_RETRY_CONNREFUSED} --silent --show-error \
-H "X-Google-Metadata-Request: True" \
-o "${tmp_kubelet_config}" \
http://metadata.google.internal/computeMetadata/v1/instance/attributes/kubelet-config; then
# only write to the final location if curl succeeds
mv "${tmp_kubelet_config}" "${dest}"
elif [[ "${REQUIRE_METADATA_KUBELET_CONFIG_FILE:-false}" == "true" ]]; then
echo "== Failed to download required Kubelet config file from metadata server =="
exit 1
fi
)
}
function download-kube-master-certs {
# Fetch kube-env from GCE metadata server.
(
umask 077
local -r tmp_kube_master_certs="/tmp/kube-master-certs.yaml"
curl --fail --retry 5 --retry-delay 3 ${CURL_RETRY_CONNREFUSED} --silent --show-error \
-H "X-Google-Metadata-Request: True" \
-o "${tmp_kube_master_certs}" \
http://metadata.google.internal/computeMetadata/v1/instance/attributes/kube-master-certs
# Convert the yaml format file into a shell-style file.
eval $(python -c '''
import pipes,sys,yaml
for k,v in yaml.load(sys.stdin).iteritems():
print("readonly {var}={value}".format(var = k, value = pipes.quote(str(v))))
''' < "${tmp_kube_master_certs}" > "${KUBE_HOME}/kube-master-certs")
rm -f "${tmp_kube_master_certs}"
)
}
function validate-hash {
local -r file="$1"
local -r expected="$2"
actual=$(sha1sum ${file} | awk '{ print $1 }') || true
if [[ "${actual}" != "${expected}" ]]; then
echo "== ${file} corrupted, sha1 ${actual} doesn't match expected ${expected} =="
return 1
fi
}
# Retry a download until we get it. Takes a hash and a set of URLs.
#
# $1 is the sha1 of the URL. Can be "" if the sha1 is unknown.
# $2+ are the URLs to download.
function download-or-bust {
local -r hash="$1"
shift 1
local -r urls=( $* )
while true; do
for url in "${urls[@]}"; do
local file="${url##*/}"
rm -f "${file}"
if ! curl -f --ipv4 -Lo "${file}" --connect-timeout 20 --max-time 300 --retry 6 --retry-delay 10 ${CURL_RETRY_CONNREFUSED} "${url}"; then
echo "== Failed to download ${url}. Retrying. =="
elif [[ -n "${hash}" ]] && ! validate-hash "${file}" "${hash}"; then
echo "== Hash validation of ${url} failed. Retrying. =="
else
if [[ -n "${hash}" ]]; then
echo "== Downloaded ${url} (SHA1 = ${hash}) =="
else
echo "== Downloaded ${url} =="
fi
return
fi
done
done
}
function is-preloaded {
local -r key=$1
local -r value=$2
grep -qs "${key},${value}" "${KUBE_HOME}/preload_info"
}
function split-commas {
echo $1 | tr "," "\n"
}
function remount-flexvolume-directory {
local -r flexvolume_plugin_dir=$1
mkdir -p $flexvolume_plugin_dir
mount --bind $flexvolume_plugin_dir $flexvolume_plugin_dir
mount -o remount,exec $flexvolume_plugin_dir
}
function install-gci-mounter-tools {
CONTAINERIZED_MOUNTER_HOME="${KUBE_HOME}/containerized_mounter"
local -r mounter_tar_sha="${DEFAULT_MOUNTER_TAR_SHA}"
if is-preloaded "mounter" "${mounter_tar_sha}"; then
echo "mounter is preloaded."
return
fi
echo "Downloading gci mounter tools."
mkdir -p "${CONTAINERIZED_MOUNTER_HOME}"
chmod a+x "${CONTAINERIZED_MOUNTER_HOME}"
mkdir -p "${CONTAINERIZED_MOUNTER_HOME}/rootfs"
download-or-bust "${mounter_tar_sha}" "https://storage.googleapis.com/kubernetes-release/gci-mounter/mounter.tar"
cp "${KUBE_HOME}/kubernetes/server/bin/mounter" "${CONTAINERIZED_MOUNTER_HOME}/mounter"
chmod a+x "${CONTAINERIZED_MOUNTER_HOME}/mounter"
mv "${KUBE_HOME}/mounter.tar" /tmp/mounter.tar
tar xf /tmp/mounter.tar -C "${CONTAINERIZED_MOUNTER_HOME}/rootfs"
rm /tmp/mounter.tar
mkdir -p "${CONTAINERIZED_MOUNTER_HOME}/rootfs/var/lib/kubelet"
}
# Install node problem detector binary.
function install-node-problem-detector {
if [[ -n "${NODE_PROBLEM_DETECTOR_VERSION:-}" ]]; then
local -r npd_version="${NODE_PROBLEM_DETECTOR_VERSION}"
local -r npd_sha1="${NODE_PROBLEM_DETECTOR_TAR_HASH}"
else
local -r npd_version="${DEFAULT_NPD_VERSION}"
local -r npd_sha1="${DEFAULT_NPD_SHA1}"
fi
local -r npd_tar="node-problem-detector-${npd_version}.tar.gz"
if is-preloaded "${npd_tar}" "${npd_sha1}"; then
echo "node-problem-detector is preloaded."
return
fi
echo "Downloading node problem detector."
local -r npd_release_path="https://storage.googleapis.com/kubernetes-release"
download-or-bust "${npd_sha1}" "${npd_release_path}/node-problem-detector/${npd_tar}"
local -r npd_dir="${KUBE_HOME}/node-problem-detector"
mkdir -p "${npd_dir}"
tar xzf "${KUBE_HOME}/${npd_tar}" -C "${npd_dir}" --overwrite
mv "${npd_dir}/bin"/* "${KUBE_BIN}"
chmod a+x "${KUBE_BIN}/node-problem-detector"
rmdir "${npd_dir}/bin"
rm -f "${KUBE_HOME}/${npd_tar}"
}
function install-cni-binaries {
local -r cni_tar="cni-plugins-amd64-${DEFAULT_CNI_VERSION}.tgz"
local -r cni_sha1="${DEFAULT_CNI_SHA1}"
if is-preloaded "${cni_tar}" "${cni_sha1}"; then
echo "${cni_tar} is preloaded."
return
fi
echo "Downloading cni binaries"
download-or-bust "${cni_sha1}" "https://storage.googleapis.com/kubernetes-release/network-plugins/${cni_tar}"
local -r cni_dir="${KUBE_HOME}/cni"
mkdir -p "${cni_dir}/bin"
tar xzf "${KUBE_HOME}/${cni_tar}" -C "${cni_dir}/bin" --overwrite
mv "${cni_dir}/bin"/* "${KUBE_BIN}"
rmdir "${cni_dir}/bin"
rm -f "${KUBE_HOME}/${cni_tar}"
}
# Install crictl binary.
function install-crictl {
if [[ -n "${CRICTL_VERSION:-}" ]]; then
local -r crictl_version="${CRICTL_VERSION}"
local -r crictl_sha1="${CRICTL_TAR_HASH}"
else
local -r crictl_version="${DEFAULT_CRICTL_VERSION}"
local -r crictl_sha1="${DEFAULT_CRICTL_SHA1}"
fi
local -r crictl="crictl-${crictl_version}-linux-amd64"
if is-preloaded "${crictl}" "${crictl_sha1}"; then
echo "crictl is preloaded"
return
fi
echo "Downloading crictl"
local -r crictl_path="https://storage.googleapis.com/kubernetes-release/crictl"
download-or-bust "${crictl_sha1}" "${crictl_path}/${crictl}"
mv "${KUBE_HOME}/${crictl}" "${KUBE_BIN}/crictl"
chmod a+x "${KUBE_BIN}/crictl"
# Create crictl config file.
cat > /etc/crictl.yaml <<EOF
runtime-endpoint: ${CONTAINER_RUNTIME_ENDPOINT:-unix:///var/run/dockershim.sock}
EOF
}
function install-exec-auth-plugin {
if [[ ! "${EXEC_AUTH_PLUGIN_URL:-}" ]]; then
return
fi
local -r plugin_url="${EXEC_AUTH_PLUGIN_URL}"
local -r plugin_sha1="${EXEC_AUTH_PLUGIN_SHA1}"
echo "Downloading gke-exec-auth-plugin binary"
download-or-bust "${plugin_sha1}" "${plugin_url}"
mv "${KUBE_HOME}/gke-exec-auth-plugin" "${KUBE_BIN}/gke-exec-auth-plugin"
chmod a+x "${KUBE_BIN}/gke-exec-auth-plugin"
}
function install-kube-manifests {
# Put kube-system pods manifests in ${KUBE_HOME}/kube-manifests/.
local dst_dir="${KUBE_HOME}/kube-manifests"
mkdir -p "${dst_dir}"
local -r manifests_tar_urls=( $(split-commas "${KUBE_MANIFESTS_TAR_URL}") )
local -r manifests_tar="${manifests_tar_urls[0]##*/}"
if [ -n "${KUBE_MANIFESTS_TAR_HASH:-}" ]; then
local -r manifests_tar_hash="${KUBE_MANIFESTS_TAR_HASH}"
else
echo "Downloading k8s manifests sha1 (not found in env)"
download-or-bust "" "${manifests_tar_urls[@]/.tar.gz/.tar.gz.sha1}"
local -r manifests_tar_hash=$(cat "${manifests_tar}.sha1")
fi
if is-preloaded "${manifests_tar}" "${manifests_tar_hash}"; then
echo "${manifests_tar} is preloaded."
return
fi
echo "Downloading k8s manifests tar"
download-or-bust "${manifests_tar_hash}" "${manifests_tar_urls[@]}"
tar xzf "${KUBE_HOME}/${manifests_tar}" -C "${dst_dir}" --overwrite
local -r kube_addon_registry="${KUBE_ADDON_REGISTRY:-k8s.gcr.io}"
if [[ "${kube_addon_registry}" != "k8s.gcr.io" ]]; then
find "${dst_dir}" -name \*.yaml -or -name \*.yaml.in | \
xargs sed -ri "s@(image:\s.*)k8s.gcr.io@\1${kube_addon_registry}@"
find "${dst_dir}" -name \*.manifest -or -name \*.json | \
xargs sed -ri "s@(image\":\s+\")k8s.gcr.io@\1${kube_addon_registry}@"
fi
cp "${dst_dir}/kubernetes/gci-trusty/gci-configure-helper.sh" "${KUBE_BIN}/configure-helper.sh"
if [[ -e "${dst_dir}/kubernetes/gci-trusty/gke-internal-configure-helper.sh" ]]; then
cp "${dst_dir}/kubernetes/gci-trusty/gke-internal-configure-helper.sh" "${KUBE_BIN}/"
fi
cp "${dst_dir}/kubernetes/gci-trusty/health-monitor.sh" "${KUBE_BIN}/health-monitor.sh"
rm -f "${KUBE_HOME}/${manifests_tar}"
rm -f "${KUBE_HOME}/${manifests_tar}.sha1"
}
# A helper function for loading a docker image. It keeps trying up to 5 times.
#
# $1: Full path of the docker image
function try-load-docker-image {
local -r img=$1
echo "Try to load docker image file ${img}"
# Temporarily turn off errexit, because we don't want to exit on first failure.
set +e
local -r max_attempts=5
local -i attempt_num=1
until timeout 30 ${LOAD_IMAGE_COMMAND:-docker load -i} "${img}"; do
if [[ "${attempt_num}" == "${max_attempts}" ]]; then
echo "Fail to load docker image file ${img} after ${max_attempts} retries. Exit!!"
exit 1
else
attempt_num=$((attempt_num+1))
sleep 5
fi
done
# Re-enable errexit.
set -e
}
# Loads kube-system docker images. It is better to do it before starting kubelet,
# as kubelet will restart docker daemon, which may interfere with loading images.
function load-docker-images {
echo "Start loading kube-system docker images"
local -r img_dir="${KUBE_HOME}/kube-docker-files"
if [[ "${KUBERNETES_MASTER:-}" == "true" ]]; then
try-load-docker-image "${img_dir}/kube-apiserver.tar"
try-load-docker-image "${img_dir}/kube-controller-manager.tar"
try-load-docker-image "${img_dir}/kube-scheduler.tar"
else
try-load-docker-image "${img_dir}/kube-proxy.tar"
fi
}
# Downloads kubernetes binaries and kube-system manifest tarball, unpacks them,
# and places them into suitable directories. Files are placed in /home/kubernetes.
function install-kube-binary-config {
cd "${KUBE_HOME}"
local -r server_binary_tar_urls=( $(split-commas "${SERVER_BINARY_TAR_URL}") )
local -r server_binary_tar="${server_binary_tar_urls[0]##*/}"
if [[ -n "${SERVER_BINARY_TAR_HASH:-}" ]]; then
local -r server_binary_tar_hash="${SERVER_BINARY_TAR_HASH}"
else
echo "Downloading binary release sha1 (not found in env)"
download-or-bust "" "${server_binary_tar_urls[@]/.tar.gz/.tar.gz.sha1}"
local -r server_binary_tar_hash=$(cat "${server_binary_tar}.sha1")
fi
if is-preloaded "${server_binary_tar}" "${server_binary_tar_hash}"; then
echo "${server_binary_tar} is preloaded."
else
echo "Downloading binary release tar"
download-or-bust "${server_binary_tar_hash}" "${server_binary_tar_urls[@]}"
tar xzf "${KUBE_HOME}/${server_binary_tar}" -C "${KUBE_HOME}" --overwrite
# Copy docker_tag and image files to ${KUBE_HOME}/kube-docker-files.
local -r src_dir="${KUBE_HOME}/kubernetes/server/bin"
local dst_dir="${KUBE_HOME}/kube-docker-files"
mkdir -p "${dst_dir}"
cp "${src_dir}/"*.docker_tag "${dst_dir}"
if [[ "${KUBERNETES_MASTER:-}" == "false" ]]; then
cp "${src_dir}/kube-proxy.tar" "${dst_dir}"
else
cp "${src_dir}/kube-apiserver.tar" "${dst_dir}"
cp "${src_dir}/kube-controller-manager.tar" "${dst_dir}"
cp "${src_dir}/kube-scheduler.tar" "${dst_dir}"
cp -r "${KUBE_HOME}/kubernetes/addons" "${dst_dir}"
fi
load-docker-images
mv "${src_dir}/kubelet" "${KUBE_BIN}"
mv "${src_dir}/kubectl" "${KUBE_BIN}"
mv "${KUBE_HOME}/kubernetes/LICENSES" "${KUBE_HOME}"
mv "${KUBE_HOME}/kubernetes/kubernetes-src.tar.gz" "${KUBE_HOME}"
fi
if [[ "${KUBERNETES_MASTER:-}" == "false" ]] && \
[[ "${ENABLE_NODE_PROBLEM_DETECTOR:-}" == "standalone" ]]; then
install-node-problem-detector
fi
if [[ "${NETWORK_PROVIDER:-}" == "kubenet" ]] || \
[[ "${NETWORK_PROVIDER:-}" == "cni" ]]; then
install-cni-binaries
fi
# Put kube-system pods manifests in ${KUBE_HOME}/kube-manifests/.
install-kube-manifests
chmod -R 755 "${KUBE_BIN}"
# Install gci mounter related artifacts to allow mounting storage volumes in GCI
install-gci-mounter-tools
# Remount the Flexvolume directory with the "exec" option, if needed.
if [[ "${REMOUNT_VOLUME_PLUGIN_DIR:-}" == "true" && -n "${VOLUME_PLUGIN_DIR:-}" ]]; then
remount-flexvolume-directory "${VOLUME_PLUGIN_DIR}"
fi
# Install crictl on each node.
install-crictl
if [[ "${KUBERNETES_MASTER:-}" == "false" ]]; then
install-exec-auth-plugin
fi
# Clean up.
rm -rf "${KUBE_HOME}/kubernetes"
rm -f "${KUBE_HOME}/${server_binary_tar}"
rm -f "${KUBE_HOME}/${server_binary_tar}.sha1"
}
######### Main Function ##########
echo "Start to install kubernetes files"
# if install fails, message-of-the-day (motd) will warn at login shell
set-broken-motd
KUBE_HOME="/home/kubernetes"
KUBE_BIN="${KUBE_HOME}/bin"
# download and source kube-env
download-kube-env
source "${KUBE_HOME}/kube-env"
download-kubelet-config "${KUBE_HOME}/kubelet-config.yaml"
# master certs
if [[ "${KUBERNETES_MASTER:-}" == "true" ]]; then
download-kube-master-certs
fi
# binaries and kube-system manifests
install-kube-binary-config
echo "Done for installing kubernetes files"
|
/**
* Copyright (c) 2010 MongoDB, Inc. <http://mongodb.com>
* Copyright (c) 2009, 2010 Novus Partners, Inc. <http://novus.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For questions and comments about this product, please see the project page at:
*
* http://github.com/mongodb/casbah
*
*/
package com.mongodb.casbah.test.core
import scala.util.Random
import com.mongodb.casbah.Imports._
import scala.collection.mutable
import com.mongodb.casbah.Cursor
class CoreWrappersSpec extends CasbahDBTestSpecification {
"Casbah behavior between Scala and Java versions of Objects" should {
lazy val javaConn = new com.mongodb.MongoClient() // Java connection
"provide working .asScala methods on the Java version of the objects" in {
"Connection objects" in {
val scalaConn = javaConn.asScala
scalaConn.underlying must beEqualTo(javaConn)
}
val javaDb = javaConn.getDB("test")
"DB objects" in {
val scalaDb = javaDb.asScala
scalaDb.underlying must beEqualTo(javaDb)
}
val javaCollection = javaDb.getCollection("test")
"Collection objects" in {
val scalaCollection = javaCollection.asScala
scalaCollection.underlying must beEqualTo(javaCollection)
}
}
"be directly instantiable, with working apply methods" in {
lazy val conn: MongoClient = MongoClient()
lazy val db: MongoDB = conn("casbahTest")
lazy val coll: MongoCollection = database("collection.in")
"MongoClient" in {
"direct instantiation" in {
conn.underlying must haveClass[com.mongodb.MongoClient]
}
"the apply method works" in {
database.underlying must haveSuperclass[com.mongodb.DB]
}
}
"MongoDB" in {
"has a working apply method" in {
coll.underlying must beAnInstanceOf[com.mongodb.DBCollection]
}
}
}
"allow indexes to work as expected" in {
collection.drop()
collection.insert(MongoDBObject("foo" -> "bar"))
collection.indexInfo.length must beEqualTo(1)
collection.ensureIndex(MongoDBObject("uid" -> 1), "user_index", unique = true)
collection.indexInfo.length must beEqualTo(2)
collection.indexInfo(1)("key") == MongoDBObject("uid" -> 1)
}
"check query failure exception" in {
collection.drop()
collection += MongoDBObject("loc" -> List(0, 0))
val near = "loc" $near(0, 0)
collection.findOne(near) must throwAn[MongoException]
}
"Renaming a collection successfully tracks the rename in MongoCollection" in {
database("collection").drop()
val coll = database("collectoin")
coll.drop()
coll.insert(MongoDBObject("foo" -> "bar"))
coll must beAnInstanceOf[com.mongodb.casbah.MongoCollection]
coll.name must beEqualTo("collectoin")
val newColl = coll.rename("collection")
newColl must beAnInstanceOf[com.mongodb.casbah.MongoCollection]
newColl.name must beEqualTo("collection")
// no mutability in the old collection
coll.name must beEqualTo("collectoin")
// collection should be gone so rename fails
newColl.rename("collection") must throwA[MongoException]
}
}
"findOne operations" should {
"Not fail as reported by <NAME> in CASBAH-11" in {
collection.drop()
collection.insert(MongoDBObject("foo" -> "bar"))
val basicFind = collection.find(MongoDBObject("foo" -> "bar"))
basicFind.size must beEqualTo(1)
val findOne = collection.findOne()
findOne must beSome
val findOneMatch = collection.findOne(MongoDBObject("foo" -> "bar"))
findOneMatch must beSome
}
}
"Cursor Operations" should {
"load some test data first" in {
collection.drop()
for (i <- 1 to 100)
collection += MongoDBObject("foo" -> "bar", "x" -> Random.nextDouble())
success
}
"Behave in chains" in {
val cur = collection.find(MongoDBObject("foo" -> "bar")) skip 5
cur must beAnInstanceOf[MongoCursor]
val cur2 = collection.find(MongoDBObject("foo" -> "bar")) limit 25 skip 12
cur2 must beAnInstanceOf[MongoCursor]
}
}
"Distinct operations" should {
"load some test data first" in {
collection.drop()
for (i <- 1 to 99)
collection += MongoDBObject("_id" -> i, "x" -> i % 10)
success
}
"except just a key" in {
val l = collection.distinct("x")
l.size must beEqualTo(10)
}
"except key and query" in {
val l = collection.distinct("x", "_id" $gt 95)
l.size must beEqualTo(4)
}
"except key and readPref" in {
val l = collection.distinct("x", readPrefs = ReadPreference.Primary)
l.size must beEqualTo(10)
}
"except key, query and readPref" in {
val l = collection.distinct("x", "_id" $gt 95, ReadPreference.Primary)
l.size must beEqualTo(4)
}
}
"Aggregation operations" should {
"load some test data first" in {
collection.drop()
for (i <- 1 to 99)
collection += MongoDBObject("_id" -> i, "score" -> i % 10)
success
}
"except just a single op" in {
val cursor: AggregationOutput = collection.aggregate(MongoDBObject("$match" -> ("score" $gte 7)))
cursor.results.size must beEqualTo(30)
}
"except multiple ops" in {
val cursor: AggregationOutput = collection.aggregate(
MongoDBObject("$match" -> ("score" $gte 7)),
MongoDBObject("$project" -> MongoDBObject("score" -> 1))
)
cursor.results.size must beEqualTo(30)
}
"except list of ops" in {
val cursor: AggregationOutput = collection.aggregate(
List(MongoDBObject("$match" -> ("score" $gte 7)),
MongoDBObject("$project" -> MongoDBObject("score" -> 1)))
)
cursor.results.size must beEqualTo(30)
}
"return a cursor when options are supplied" in {
serverIsAtLeastVersion(2, 5) must beTrue.orSkip("Needs server >= 2.6")
val aggregationOptions = AggregationOptions(allowDiskUse=true, outputMode=AggregationOptions.CURSOR)
val cursor: CommandCursor = collection.aggregate(
List(MongoDBObject("$match" -> ("score" $gte 7)),
MongoDBObject("$project" -> MongoDBObject("score" -> 1))),
aggregationOptions
)
cursor.toList.size must beEqualTo(30)
}
"test allowDiskUse isn't included by default" in {
serverIsAtLeastVersion(2, 5) must beTrue.orSkip("Needs server >= 2.6")
val profileCollection = database("system.profile")
val profileLevel = database.command(MongoDBObject("profile" -> -1)).as[Int]("was")
database.command(MongoDBObject("profile" -> 0))
profileCollection.drop()
database.command(MongoDBObject("profile" -> 2))
collection.aggregate(
List(MongoDBObject("$match" -> ("score" $gte 7))),
AggregationOptions(outputMode=AggregationOptions.CURSOR)
)
val profile = profileCollection.findOne().get.as[MongoDBObject]("command")
database.command(MongoDBObject("profile" -> profileLevel))
profile.contains("allowDiskUse") must beFalse
}
"test allowDiskUse is included if set" in {
serverIsAtLeastVersion(2, 5) must beTrue.orSkip("Needs server >= 2.6")
val profileCollection = database("system.profile")
val profileLevel = database.command(MongoDBObject("profile" -> -1)).as[Int]("was")
database.command(MongoDBObject("profile" -> 0))
profileCollection.drop()
database.command(MongoDBObject("profile" -> 2))
collection.aggregate(
List(MongoDBObject("$match" -> ("score" $gte 7))),
AggregationOptions(allowDiskUse=true, outputMode=AggregationOptions.CURSOR)
)
val profile = profileCollection.findOne().get.as[MongoDBObject]("command")
database.command(MongoDBObject("profile" -> profileLevel))
profile.contains("allowDiskUse") must beTrue
}
"test explainAggregate" in {
serverIsAtLeastVersion(2, 5) must beTrue.orSkip("Needs server >= 2.6")
val aggregationOptions = AggregationOptions(AggregationOptions.CURSOR)
val explaination = collection.explainAggregate(
List(MongoDBObject("$match" -> ("score" $gte 7)),
MongoDBObject("$project" -> MongoDBObject("score" -> 1))),
aggregationOptions
)
explaination("ok") must beEqualTo(1.0)
explaination.keys must contain("stages")
}
"return a cursor when options are supplied even if inline" in {
serverIsAtLeastVersion(2, 5) must beTrue.orSkip("Needs server >= 2.5")
val aggregationOptions = AggregationOptions(AggregationOptions.INLINE)
val cursor: CommandCursor = collection.aggregate(
List(MongoDBObject("$match" -> ("score" $gte 7)),
MongoDBObject("$project" -> MongoDBObject("score" -> 1))),
aggregationOptions
)
cursor.size must beEqualTo(30)
}
"handle $out in multiple ops" in {
serverIsAtLeastVersion(2, 5) must beTrue.orSkip("Needs server >= 2.5")
val outCollection = database("outCollection")
outCollection.drop()
val cursor: AggregationOutput = collection.aggregate(
MongoDBObject("$match" -> ("score" $gte 7)),
MongoDBObject("$project" -> MongoDBObject("score" -> 1)),
MongoDBObject("$out" -> outCollection.name)
)
cursor.results.iterator.hasNext must beFalse
outCollection.count() must beEqualTo(30)
}
"handle $out in list of ops" in {
serverIsAtLeastVersion(2, 5) must beTrue.orSkip("Needs server >= 2.5")
val outCollection = database("outCollection")
outCollection.drop()
val cursor: AggregationOutput = collection.aggregate(List(
MongoDBObject("$match" -> ("score" $gte 7)),
MongoDBObject("$project" -> MongoDBObject("score" -> 1)),
MongoDBObject("$out" -> outCollection.name)
))
cursor.results.iterator.hasNext must beFalse
outCollection.count() must beEqualTo(30)
}
"handle $out with options INLINE" in {
serverIsAtLeastVersion(2, 5) must beTrue.orSkip("Needs server >= 2.5")
val outCollection = database("outCollection")
outCollection.drop()
val aggregationOptions = AggregationOptions(AggregationOptions.INLINE)
val cursor: CommandCursor = collection.aggregate(
List(
MongoDBObject("$match" -> ("score" $gte 7)),
MongoDBObject("$project" -> MongoDBObject("score" -> 1)),
MongoDBObject("$out" -> outCollection.name)
),
aggregationOptions
)
cursor.size must beEqualTo(30)
outCollection.count() must beEqualTo(30)
}
"handle $out with options CURSOR" in {
serverIsAtLeastVersion(2, 5) must beTrue.orSkip("Needs server >= 2.5")
lazy val outCollection = database("outCollection")
outCollection.drop()
val aggregationOptions = AggregationOptions(AggregationOptions.CURSOR)
val cursor: CommandCursor = collection.aggregate(
List(
MongoDBObject("$match" -> ("score" $gte 7)),
MongoDBObject("$project" -> MongoDBObject("score" -> 1)),
MongoDBObject("$out" -> outCollection.name)
),
aggregationOptions
)
cursor.size must beEqualTo(30)
outCollection.count() must beEqualTo(30)
}
}
"Collection" should {
"support parallel scan" in {
serverIsAtLeastVersion(2, 5) must beTrue.orSkip("Needs server >= 2.5")
isSharded must beFalse.orSkip("Currently doesn't work with mongos")
collection.drop()
val ids = (1 to 2000 by 1).toSet
for(i <- ids) collection += MongoDBObject("_id" -> i)
val numCursors = 10
val cursors: mutable.Buffer[Cursor] = collection.parallelScan(ParallelScanOptions(numCursors, 1000))
cursors.size must beLessThanOrEqualTo(numCursors)
var cursorIds = Set[Int]()
for (cursor <- cursors) {
while (cursor.hasNext) {
cursorIds += cursor.next().get("_id").asInstanceOf[Int]
}
}
cursorIds must beEqualTo(ids)
}
}
}
|
<reponame>cugg/BusinessParameters
package be.kwakeroni.scratch;
import be.kwakeroni.scratch.env.Environment;
import be.kwakeroni.scratch.env.es.ElasticSearchTestData;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Test;
import javax.swing.*;
import java.util.Arrays;
import java.util.function.Function;
/**
* (C) 2017 <NAME>
*/
@Ignore
public class ElasticSearchTest {
@ClassRule
public static Environment environment = new Environment(ElasticSearchTestData::new);
@Test
public void test() throws Exception {
System.out.println("Started test");
dump("/_cat/indices?v");
JOptionPane.showConfirmDialog(null, "Click OK to stop server", "ElasticSearch server runnning...", JOptionPane.OK_OPTION);
}
private String resolve(String base, String path) {
return base + ((path.startsWith("/")) ? "" : "/") + path;
}
public WebResource call(String path) {
return new Client().resource(resolve("http://127.0.0.1:9200", path));
}
// http://127.0.0.1:9200/_cat/indices?v
private String dump(String path) {
return dump(call(path).get(ClientResponse.class));
}
private String dump(ClientResponse response) {
String str = format(response,
"[%s] %s",
ClientResponse::getStatus,
r -> r.getEntity(String.class)
);
System.out.println(str);
return str;
}
@SafeVarargs
private final <T> String format(T t, String pattern, Function<T, ?>... args) {
return String.format(pattern,
Arrays.stream(args).map(func -> func.apply(t)).toArray()
);
}
// c:\Projects\elasticsearch-5.2.1\bin>elasticsearch.bat
// "C:\Program Files\Java\jdk1.8.0_25\bin\java.exe" -Xms2g -Xmx2g -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFracti
// on=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+DisableExplicitGC -XX:+AlwaysPreTouch -server -Xss1m -Djava.awt.headless=t
// rue -Dfile.encoding=UTF-8 -Djna.nosys=true -Djdk.io.permissionsUseCanonicalPath=true -Dio.netty.noUnsafe=true -Dio.netty
// .noKeySetOptimization=true -Dio.netty.recycler.maxCapacityPerThread=0 -Dlog4j.shutdownHookEnabled=false -Dlog4j2.disable
// .jmx=true -Dlog4j.skipJansi=true -XX:+HeapDumpOnOutOfMemoryError -Delasticsearch -Des.path.home="c:\Projects\elasticsea
// rch-5.2.1" -cp "c:\Projects\elasticsearch-5.2.1/lib/elasticsearch-5.2.1.jar;c:\Projects\elasticsearch-5.2.1/lib/*" "org.
//elasticsearch.bootstrap.Elasticsearch"
}
|
<gh_stars>0
const names = [
"Gerard",
"Tania",
"Richard",
"Antonio",
"Joe"
];
const getRandomName = () => {
const name = names[Math.floor(Math.random() * names.length)];
//console.log('Welcome $(message)');
console.log(`Welcome ${name}`);
};
// Export function
module.exports = { getRandomName }
|
import { metrics, SignificanceLevel } from "../../src/analysis";
import { Document, PackageBenchmarkSummary, config } from "../../src/common";
describe("analysis", () => {
describe("metrics", () => {
test("proportionalTo significance", () => {
const significance1 = metrics.typeCount.getSignificance(
6,
1000,
6000,
{ body: { testIdentifierCount: 1000, typeCount: 1000 } } as Document<PackageBenchmarkSummary>,
{ body: { testIdentifierCount: 6000, typeCount: 6000 } } as Document<PackageBenchmarkSummary>
);
expect(significance1).toBe(undefined);
const significance2 = metrics.typeCount.getSignificance(
6,
1000,
6000,
{ body: { testIdentifierCount: 1000, typeCount: 1000 } } as Document<PackageBenchmarkSummary>,
{ body: { testIdentifierCount: 1000, typeCount: 6000 } } as Document<PackageBenchmarkSummary>
);
expect(significance2).toBe(SignificanceLevel.Warning);
const significance3 = metrics.typeCount.getSignificance(
config.comparison.percentDiffWarningThreshold + 0.01,
1000,
200,
{ body: { testIdentifierCount: 1000, typeCount: 1000 } } as Document<PackageBenchmarkSummary>,
{ body: { testIdentifierCount: 5000, typeCount: 2000 } } as Document<PackageBenchmarkSummary>
);
expect(significance3).toBe(undefined);
});
test("withThreshold significance", () => {
const significance1 = metrics.typeCount.getSignificance(
6,
100,
600,
{ body: { testIdentifierCount: 100, typeCount: 100 } } as Document<PackageBenchmarkSummary>,
{ body: { testIdentifierCount: 600, typeCount: 600 } } as Document<PackageBenchmarkSummary>
);
expect(significance1).toBe(undefined);
});
});
});
|
<filename>elasta-composer/src/main/java/elasta/composer/message/handlers/builder/impl/DeleteAllMessageHandlerBuilderImpl.java
package elasta.composer.message.handlers.builder.impl;
import elasta.composer.converter.FlowToJsonArrayMessageHandlerConverter;
import elasta.composer.flow.holder.DeleteAllFlowHolder;
import elasta.composer.message.handlers.JsonArrayMessageHandler;
import elasta.composer.message.handlers.builder.DeleteAllMessageHandlerBuilder;
import java.util.Objects;
/**
* Created by sohan on 5/21/2017.
*/
final public class DeleteAllMessageHandlerBuilderImpl implements DeleteAllMessageHandlerBuilder {
final DeleteAllFlowHolder deleteAllFlowHolder;
final FlowToJsonArrayMessageHandlerConverter flowToJsonArrayMessageHandlerConverter;
public DeleteAllMessageHandlerBuilderImpl(DeleteAllFlowHolder deleteAllFlowHolder, FlowToJsonArrayMessageHandlerConverter flowToJsonArrayMessageHandlerConverter) {
Objects.requireNonNull(deleteAllFlowHolder);
Objects.requireNonNull(flowToJsonArrayMessageHandlerConverter);
this.deleteAllFlowHolder = deleteAllFlowHolder;
this.flowToJsonArrayMessageHandlerConverter = flowToJsonArrayMessageHandlerConverter;
}
@Override
public JsonArrayMessageHandler build() {
return flowToJsonArrayMessageHandlerConverter.convert(deleteAllFlowHolder.getFlow());
}
}
|
/*-------------------------------------------------------------------------
*
* pgtime.h
* PostgreSQL internal timezone library
*
* Portions Copyright (c) 1996-2017, PostgreSQL Global Development PGGroup
*
* IDENTIFICATION
* src/include/pgtime.h
*
*-------------------------------------------------------------------------
*/
#pragma once
#include <cstdint>
/*
* The API of this library is generally similar to the corresponding
* C library functions, except that we use pg_time_t which (we hope) is
* 64 bits wide, and which is most definitely signed not unsigned.
*/
typedef int64_t pg_time_t;
struct pg_tm
{
int tm_sec;
int tm_min;
int tm_hour;
int tm_mday;
int tm_mon; /* origin 1, not 0! */
int tm_year; /* relative to 1900 */
int tm_wday;
int tm_yday;
int tm_isdst;
long int tm_gmtoff;
const char *tm_zone;
};
typedef struct pg_tz pg_tz;
typedef struct pg_tzenum pg_tzenum;
/* Maximum length of a timezone name (not including trailing null) */
#define TZ_STRLEN_MAX 255
/* these functions are in localtime.c */
extern struct pg_tm *pg_localtime(const pg_time_t *timep, const pg_tz *tz);
extern struct pg_tm *pg_gmtime(const pg_time_t *timep);
extern int pg_next_dst_boundary(const pg_time_t *timep,
long int *before_gmtoff,
int *before_isdst,
pg_time_t *boundary,
long int *after_gmtoff,
int *after_isdst,
const pg_tz *tz);
extern bool pg_interpret_timezone_abbrev(const char *abbrev,
const pg_time_t *timep,
long int *gmtoff,
int *isdst,
const pg_tz *tz);
extern bool pg_get_timezone_offset(const pg_tz *tz, long int *gmtoff);
extern const char *pg_get_timezone_name(pg_tz *tz);
extern bool pg_tz_acceptable(pg_tz *tz);
/* these functions and variables are in pgtz.c */
extern pg_tz *session_timezone;
extern pg_tz *log_timezone;
extern void pg_timezone_initialize(void);
extern pg_tz *pg_tzset(const char *tzname);
extern pg_tz *pg_tzset_offset(long gmtoffset);
extern pg_tzenum *pg_tzenumerate_start(void);
extern pg_tz *pg_tzenumerate_next(pg_tzenum *dir);
extern void pg_tzenumerate_end(pg_tzenum *dir);
|
<gh_stars>0
import React, { useContext, useEffect } from 'react'
import { View, Text, StyleSheet, Button } from 'react-native'
import { AppContext } from '../provider/AppProvider'
import DeckList from '../components/DeckList'
export default function MyDecksScreen(props) {
useEffect(() => {
}, [])
const state = useContext(AppContext)
return (
<View style={styles.container}>
<DeckList decks={state.user.decks}/>
</View>
)
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#fff',
alignItems: 'center',
justifyContent: 'center',
},
});
|
package com.efei.proxy.channelHandler;
import com.efei.proxy.common.Constant;
import com.efei.proxy.common.bean.ProxyTcpProtocolBean;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.handler.timeout.IdleState;
import io.netty.handler.timeout.IdleStateEvent;
import io.netty.util.concurrent.GenericFutureListener;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.Date;
@ChannelHandler.Sharable
@Component
@Slf4j
public class HeartBeatClientHandler extends ChannelInboundHandlerAdapter {
private volatile int time = 0;
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
if (evt instanceof IdleStateEvent){
IdleStateEvent event = (IdleStateEvent)evt;
if (event.state()== IdleState.WRITER_IDLE){
ProxyTcpProtocolBean heartMsg = new ProxyTcpProtocolBean(Constant.MSG_HEART,Constant.MSG_RQ,"123456",1,Constant.CONTENT_HEART);
ByteBuf buf = ctx.alloc().buffer();
heartMsg.toByteBuf(buf);
ctx.writeAndFlush(buf);
} else if(event.state() == IdleState.READER_IDLE) {
if(time <= 2){
ProxyTcpProtocolBean heartMsg = new ProxyTcpProtocolBean(Constant.MSG_HEART,Constant.MSG_RQ,"123456",1,Constant.CONTENT_HEART);
ByteBuf buf = ctx.alloc().buffer();
heartMsg.toByteBuf(buf);
ctx.writeAndFlush(buf);
time++;
} else {
time = 0;
ctx.close();
}
}
} else {
super.userEventTriggered(ctx,evt);
}
}
}
|
<reponame>lsm5/crio-deb
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package options
import (
"fmt"
"net"
"strings"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apiserver/pkg/admission"
utilfeature "k8s.io/apiserver/pkg/util/feature"
utilflag "k8s.io/apiserver/pkg/util/flag"
// add the generic feature gates
_ "k8s.io/apiserver/pkg/features"
"github.com/spf13/pflag"
)
// ServerRunOptions contains the options while running a generic api server.
type ServerRunOptions struct {
AdmissionControl string
AdmissionControlConfigFile string
AdvertiseAddress net.IP
CorsAllowedOriginList []string
// To enable protobuf as storage format, it is enough
// to set it to "application/vnd.kubernetes.protobuf".
DefaultStorageMediaType string
DeleteCollectionWorkers int
AuditLogPath string
AuditLogMaxAge int
AuditLogMaxBackups int
AuditLogMaxSize int
EnableGarbageCollection bool
EnableProfiling bool
EnableContentionProfiling bool
EnableSwaggerUI bool
EnableWatchCache bool
ExternalHost string
MaxRequestsInFlight int
MaxMutatingRequestsInFlight int
MinRequestTimeout int
RuntimeConfig utilflag.ConfigurationMap
TargetRAMMB int
WatchCacheSizes []string
}
func NewServerRunOptions() *ServerRunOptions {
return &ServerRunOptions{
AdmissionControl: "AlwaysAdmit",
DefaultStorageMediaType: "application/json",
DeleteCollectionWorkers: 1,
EnableGarbageCollection: true,
EnableProfiling: true,
EnableContentionProfiling: false,
EnableWatchCache: true,
MaxRequestsInFlight: 400,
MaxMutatingRequestsInFlight: 200,
MinRequestTimeout: 1800,
RuntimeConfig: make(utilflag.ConfigurationMap),
}
}
// DefaultAdvertiseAddress sets the field AdvertiseAddress if
// unset. The field will be set based on the SecureServingOptions. If
// the SecureServingOptions is not present, DefaultExternalAddress
// will fall back to the insecure ServingOptions.
func (s *ServerRunOptions) DefaultAdvertiseAddress(secure *SecureServingOptions, insecure *ServingOptions) error {
if s.AdvertiseAddress == nil || s.AdvertiseAddress.IsUnspecified() {
switch {
case secure != nil:
hostIP, err := secure.ServingOptions.DefaultExternalAddress()
if err != nil {
return fmt.Errorf("Unable to find suitable network address.error='%v'. "+
"Try to set the AdvertiseAddress directly or provide a valid BindAddress to fix this.", err)
}
s.AdvertiseAddress = hostIP
case insecure != nil:
hostIP, err := insecure.DefaultExternalAddress()
if err != nil {
return fmt.Errorf("Unable to find suitable network address.error='%v'. "+
"Try to set the AdvertiseAddress directly or provide a valid BindAddress to fix this.", err)
}
s.AdvertiseAddress = hostIP
}
}
return nil
}
// AddFlags adds flags for a specific APIServer to the specified FlagSet
func (s *ServerRunOptions) AddUniversalFlags(fs *pflag.FlagSet) {
// Note: the weird ""+ in below lines seems to be the only way to get gofmt to
// arrange these text blocks sensibly. Grrr.
fs.StringVar(&s.AdmissionControl, "admission-control", s.AdmissionControl, ""+
"Ordered list of plug-ins to do admission control of resources into cluster. "+
"Comma-delimited list of: "+strings.Join(admission.GetPlugins(), ", ")+".")
fs.StringVar(&s.AdmissionControlConfigFile, "admission-control-config-file", s.AdmissionControlConfigFile,
"File with admission control configuration.")
fs.IPVar(&s.AdvertiseAddress, "advertise-address", s.AdvertiseAddress, ""+
"The IP address on which to advertise the apiserver to members of the cluster. This "+
"address must be reachable by the rest of the cluster. If blank, the --bind-address "+
"will be used. If --bind-address is unspecified, the host's default interface will "+
"be used.")
fs.StringSliceVar(&s.CorsAllowedOriginList, "cors-allowed-origins", s.CorsAllowedOriginList, ""+
"List of allowed origins for CORS, comma separated. An allowed origin can be a regular "+
"expression to support subdomain matching. If this list is empty CORS will not be enabled.")
fs.StringVar(&s.DefaultStorageMediaType, "storage-media-type", s.DefaultStorageMediaType, ""+
"The media type to use to store objects in storage. Defaults to application/json. "+
"Some resources may only support a specific media type and will ignore this setting.")
fs.IntVar(&s.DeleteCollectionWorkers, "delete-collection-workers", s.DeleteCollectionWorkers,
"Number of workers spawned for DeleteCollection call. These are used to speed up namespace cleanup.")
fs.StringVar(&s.AuditLogPath, "audit-log-path", s.AuditLogPath,
"If set, all requests coming to the apiserver will be logged to this file.")
fs.IntVar(&s.AuditLogMaxAge, "audit-log-maxage", s.AuditLogMaxBackups,
"The maximum number of days to retain old audit log files based on the timestamp encoded in their filename.")
fs.IntVar(&s.AuditLogMaxBackups, "audit-log-maxbackup", s.AuditLogMaxBackups,
"The maximum number of old audit log files to retain.")
fs.IntVar(&s.AuditLogMaxSize, "audit-log-maxsize", s.AuditLogMaxSize,
"The maximum size in megabytes of the audit log file before it gets rotated. Defaults to 100MB.")
fs.BoolVar(&s.EnableGarbageCollection, "enable-garbage-collector", s.EnableGarbageCollection, ""+
"Enables the generic garbage collector. MUST be synced with the corresponding flag "+
"of the kube-controller-manager.")
fs.BoolVar(&s.EnableProfiling, "profiling", s.EnableProfiling,
"Enable profiling via web interface host:port/debug/pprof/")
fs.BoolVar(&s.EnableContentionProfiling, "contention-profiling", s.EnableContentionProfiling,
"Enable contention profiling. Requires --profiling to be set to work.")
fs.BoolVar(&s.EnableSwaggerUI, "enable-swagger-ui", s.EnableSwaggerUI,
"Enables swagger ui on the apiserver at /swagger-ui")
// TODO: enable cache in integration tests.
fs.BoolVar(&s.EnableWatchCache, "watch-cache", s.EnableWatchCache,
"Enable watch caching in the apiserver")
fs.IntVar(&s.TargetRAMMB, "target-ram-mb", s.TargetRAMMB,
"Memory limit for apiserver in MB (used to configure sizes of caches, etc.)")
fs.StringVar(&s.ExternalHost, "external-hostname", s.ExternalHost,
"The hostname to use when generating externalized URLs for this master (e.g. Swagger API Docs).")
// TODO: remove post-1.6
fs.String("long-running-request-regexp", "", ""+
"A regular expression matching long running requests which should "+
"be excluded from maximum inflight request handling.")
fs.MarkDeprecated("long-running-request-regexp", "regular expression matching of long-running requests is no longer supported")
deprecatedMasterServiceNamespace := metav1.NamespaceDefault
fs.StringVar(&deprecatedMasterServiceNamespace, "master-service-namespace", deprecatedMasterServiceNamespace, ""+
"DEPRECATED: the namespace from which the kubernetes master services should be injected into pods.")
fs.IntVar(&s.MaxRequestsInFlight, "max-requests-inflight", s.MaxRequestsInFlight, ""+
"The maximum number of non-mutating requests in flight at a given time. When the server exceeds this, "+
"it rejects requests. Zero for no limit.")
fs.IntVar(&s.MaxMutatingRequestsInFlight, "max-mutating-requests-inflight", s.MaxMutatingRequestsInFlight, ""+
"The maximum number of mutating requests in flight at a given time. When the server exceeds this, "+
"it rejects requests. Zero for no limit.")
fs.IntVar(&s.MinRequestTimeout, "min-request-timeout", s.MinRequestTimeout, ""+
"An optional field indicating the minimum number of seconds a handler must keep "+
"a request open before timing it out. Currently only honored by the watch request "+
"handler, which picks a randomized value above this number as the connection timeout, "+
"to spread out load.")
fs.Var(&s.RuntimeConfig, "runtime-config", ""+
"A set of key=value pairs that describe runtime configuration that may be passed "+
"to apiserver. apis/<groupVersion> key can be used to turn on/off specific api versions. "+
"apis/<groupVersion>/<resource> can be used to turn on/off specific resources. api/all and "+
"api/legacy are special keys to control all and legacy api versions respectively.")
fs.StringSliceVar(&s.WatchCacheSizes, "watch-cache-sizes", s.WatchCacheSizes, ""+
"List of watch cache sizes for every resource (pods, nodes, etc.), comma separated. "+
"The individual override format: resource#size, where size is a number. It takes effect "+
"when watch-cache is enabled.")
utilfeature.DefaultFeatureGate.AddFlag(fs)
}
|
<filename>gcs_inspector/file_processor.py
import pp, requests
import os, os.path, pathlib, errno, json
from gcs_inspector.custom_logging import print_log
# File Read/Write
def is_path_exist(filepath):
return os.path.isfile(filepath)
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
def safe_open_w(path):
# Open "path" for writing, creating any parent directories as needed.
mkdir_p(os.path.dirname(path))
return open(path, "w")
def write_file(content, filename, type="json", rel_out_folder="outputs"):
out_folder = str(pathlib.Path(__file__).parent.absolute())+"/"+rel_out_folder+"/"
file_path = out_folder+filename
try:
with safe_open_w(file_path) as f:
if type=="json":
json.dump(content, f, ensure_ascii=False, indent=4)
else:
print_log("File format is not valid", type="error")
return
except Exception as e:
print_log("An error has occured when writing the output file", type="error")
print_log(e, type="error")
return
def read_file(filename, type="raw", is_root=False):
file_path = "/"
if not(is_root):
file_path = str(pathlib.Path(__file__).parent.absolute())+"/"+filename
try:
with open(file_path, "r") as f:
data = f.read()
if type=="json":
return json.loads(data)
elif type=="raw":
return data
else:
print_log("File format is not valid", type="error")
return
except Exception as e:
print_log("An error has occured when opening the file: "+filename, type="error")
print_log(e, type="error")
return
def get_latest_json_filename(foldername, format="-all_scopes.json", last=0):
path = str(pathlib.Path(__file__).parent.absolute())+"/"+foldername+"/"
files = []
# r=root, d=directories, f = files
for r, d, f in os.walk(path):
for file in f:
if format in file:
files.append(file)
return sorted(files)[-last:]
def remove_oldest_jsons(foldername, format="-all_scopes.json", spared_amount=3):
filenames = get_latest_json_filename(foldername, format)
out_folder = str(pathlib.Path(__file__).parent.absolute())+"/"+foldername+"/"
for i in range(len(filenames)-spared_amount):
path = out_folder+filenames[i]
os.remove(path)
def get_latest_jsons(foldername, format="-all_scopes.json", last=2):
json_all_name = get_latest_json_filename(foldername, format, last)
json_all = []
for name in json_all_name:
json_all.append(read_file(foldername+"/"+name, "json"))
return json_all
|
#!/bin/sh -x
# Create Oracle user and groups
groupadd -g 54321 oinstall
groupadd -g 54322 dba
groupadd -g 54323 oper
useradd -m -c "Oracle" -u 54321 -g oinstall -G dba,oper oracle
# Setup Oracle environment settings for user oracle
cat /vagrant/oracle/environment.sh >> /home/oracle/.bashrc
# Add vagrant user to Oracle group
usermod -a -G oinstall,dba,oper vagrant
# Add alias for quick db connect
echo 'alias sysdba="sqlplus / as sysdba"' >> /home/oracle/.bashrc
# Add alias for quick sudo to oracle user
echo 'alias so="sudo -i -u oracle"' >> /home/vagrant/.bashrc
# Source Oracle environment settings
source /vagrant/oracle/environment.sh
# Create Oracle installation directories
mkdir -p ${ORACLE_HOME}
chown -R oracle:oinstall /u01
chmod -R 775 /u01
# Unzip distribution zip file
( cd /tmp; unzip /vagrant/oracle/linuxx64_12201_database.zip )
# Run Oracle Universal Installer
sudo -i -u oracle /tmp/database/runInstaller \
-waitForCompletion -showProgress -silent \
-responseFile /tmp/database/response/db_install.rsp \
ORACLE_HOSTNAME=vagrant.localdomain \
UNIX_GROUP_NAME=oinstall \
INVENTORY_LOCATION=/u01/app/oraInventory \
SELECTED_LANGUAGES=en \
ORACLE_HOME=${ORACLE_HOME} \
ORACLE_BASE=${ORACLE_BASE} \
oracle.install.option=INSTALL_DB_SWONLY \
oracle.install.db.InstallEdition=EE \
oracle.install.db.OSDBA_GROUP=dba \
oracle.install.db.OSBACKUPDBA_GROUP=dba \
oracle.install.db.OSDGDBA_GROUP=dba \
oracle.install.db.OSKMDBA_GROUP=dba \
oracle.install.db.OSRACDBA_GROUP=dba \
SECURITY_UPDATES_VIA_MYORACLESUPPORT=false \
DECLINE_SECURITY_UPDATES=true
# Execute root scripts
/u01/app/oraInventory/orainstRoot.sh
/u01/app/oracle/product/12.2.0.1/db_1/root.sh
# Run Net Configuration Assistent
sudo -i -u oracle $ORACLE_HOME/bin/netca \
-silent -responsefile /vagrant/oracle/netca.rsp
# Running Database Configuration Assistant
sudo -i -u oracle $ORACLE_HOME/bin/dbca \
-silent -createDatabase \
-templateName General_Purpose.dbc \
-responseFile NO_VALUE \
-gdbname ${ORACLE_UNQNAME} \
-sid ${ORACLE_SID} \
-characterSet AL32UTF8 \
-emConfiguration DBEXPRESS \
-emExpressPort 5500 \
-sampleSchema true \
-sysPassword Welcome01 \
-systemPassword Welcome01
# Copy management scripts
cp -r /vagrant/oracle/scripts /home/oracle
chown -R oracle:oinstall /home/oracle/scripts
# Configure automatic restart of database instance
sed -e 's/:N$/:Y/' -i /etc/oratab
# Add init script
cp /vagrant/oracle/init/dbora /etc/init.d/dbora
chmod 750 /etc/init.d/dbora
chkconfig --add dbora
# Start database
service dbora start
|
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
export SUPERSET_CONFIG=${SUPERSET_CONFIG:-tests.superset_test_config}
echo "Superset config module: $SUPERSET_CONFIG"
superset db upgrade
superset init
nosetests --stop tests/load_examples_test.py
nosetests --stop --exclude=load_examples_test tests
|
// Get global library/lang parameters from the script.
var library;
var lang;
var city;
var consortium;
var largeSchedules = false;
// Get parameters from iframe url.
function getParamValue(paramName)
{
var url = window.location.search.substring(1); //get rid of "?" in querystring
var qArray = url.split('&'); //get key-value pairs
for (var i = 0; i < qArray.length; i++)
{
var pArr = qArray[i].split('='); //split key and value
if (pArr[0] == paramName)
return pArr[1]; //return value
}
}
library = getParamValue('lib');
lang = getParamValue('lang');
city = getParamValue('city');
consortium = getParamValue('consortium');
/* Large schedules are used in iDiD info screens. */
if(getParamValue('large') === 'true') {
largeSchedules = true;
}
/* Old method, to be removed */
if(getParamValue('font') == 'l' || getParamValue('font') == 'xl') {
largeSchedules = true;
}
/* Alternative: <script data-library="85111" data-lang="fi" src="../../js/main.js" type="text/javascript"></script>*/
// If lang and lib are undefined (not used in iframe)
if(lang == undefined && library == undefined){
var scripts = document.getElementsByTagName('script');
var scriptName = scripts[scripts.length-1];
library = scriptName.getAttribute('data-library'),
lang = scriptName.getAttribute('data-lang')
}
// Setup the translations.
var i18n = $('body').translate({lang: lang, t: dict}); // Use the correct language
$("html").attr("lang", lang);
// Get referrer url (Iframe parent). If Library name is set, use that as the default.
var refUrl = (window.location != window.parent.location)
? document.referrer
: document.location.href;
refUrl = refUrl.toLocaleLowerCase();
// We use ? to navigate right to library X, # is not passed in url.
// Check the libraries of JKL, by default the main library is used. (lib param from iframe)
if(refUrl.indexOf("?halssila") > -1) {
library = 85305;
}
else if(refUrl.indexOf("?huhtasuo") > -1) {
library = 85533;
}
else if(refUrl.indexOf("?keljo") > -1) {
library = 85516;
}
else if(refUrl.indexOf("?keltin") > -1) {
library = 85754;
}
else if(refUrl.indexOf("?korpi") > -1) {
library = 85116;
}
else if(refUrl.indexOf("?korte") > -1) {
library = 85160;
}
else if(refUrl.indexOf("?kuokka") > -1) {
library = 86583;
}
else if(refUrl.indexOf("?lohi") > -1) {
library = 85909;
}
else if(refUrl.indexOf("?palok") > -1) {
library = 85732;
}
else if(refUrl.indexOf("?saynat") > -1 || refUrl.indexOf("s%c3%a4yn%c3%a4t") > -1 ||
refUrl.indexOf("s%C3%A4yn%C3%A4tsalo") > -1 || refUrl.indexOf("?säynät") > -1) {
library = 85117;
}
else if(refUrl.indexOf("?tikka") > -1) {
library = 85111;
}
else if(refUrl.indexOf("?vaaja") > -1) {
library = 85573;
}
else if(refUrl.indexOf("?vesan") > -1) {
library = 85306;
}
// If no library parameter was provided.
if(library === undefined || library === null || library === '') {
library = 85159;
}
// Navigate to contacts or services, if parameter is in the url.
// Active tab: 0 = info, 1 = contact details, 3 = services.
var activeTab = 0;
if(refUrl.indexOf("yhteys") > -1 || refUrl.indexOf("contact") > -1) {
activeTab = 1;
}
else if(refUrl.indexOf("palvelu") > -1 || refUrl.indexOf("service") > -1) {
activeTab = 2;
}
|
#!/bin/bash
test_non_existing_command() {
echo "stuff on stdout"
echo "stuff on stderr" 1>&2
return 96
}
|
<reponame>thetruefixit2/Sky<filename>app/src/main/java/com/dabe/skyapp/utils/RandomUtils.java<gh_stars>0
package com.dabe.skyapp.utils;
import java.util.Random;
/**
* Created by <NAME> on 28.01.2017 0:27.
* Project: SkyApp; Skype: pandamoni1
*/
public class RandomUtils {
public static int getMockRandomDelay() {
Random r = new Random();
return r.nextInt(5 - 2) + 2;
}
public static int getMockRandomUnavailable() {
Random r = new Random();
return r.nextInt(5 - 2) + 2;
}
}
|
<filename>api/index.js
'use strict'
exports.policies = require('./policies')
|
<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.socialStumbleupon = void 0;
var socialStumbleupon = {
"viewBox": "0 0 512 512",
"children": [{
"name": "path",
"attribs": {
"d": "M256,0C114.609,0,0,114.609,0,256s114.609,256,256,256s256-114.609,256-256S397.391,0,256,0z M256,472\r\n\tc-119.297,0-216-96.703-216-216S136.703,40,256,40s216,96.703,216,216S375.297,472,256,472z"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M270.219,228.578l16.625,11.734l26.031-11.734l-0.156-13.062C311.812,186.031,286.812,160,256,160\r\n\tc-30.719,0-55.672,25.844-56.75,55.188v79.203c0,7.141-6.031,12.953-13.438,12.953c-7.453,0-15.141-3.031-15.141-10.219V256H128\r\n\tc0,0,0,40.734,0,41.125C128,327.422,153.5,352,184.891,352c31.141,0,56.438-24.969,56.891-54.875v-82.281\r\n\tc0-7.156,6.781-13.688,14.219-13.688c7.422,0,14.219,6.531,14.219,13.688V228.578z M341.344,256v41.125\r\n\tc0,7.172-7.719,11.781-15.156,11.781s-13.469-5.797-13.469-12.969L312.875,256l-26.031,12.953L270.219,256v41.125\r\n\tC270.5,327.203,295.875,352,327.125,352C358.5,352,384,327.422,384,297.125c0-0.391,0-41.125,0-41.125H341.344z"
},
"children": []
}]
};
exports.socialStumbleupon = socialStumbleupon;
|
#!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
source $(dirname $0)/lib/utils.sh
# UPGRADE_MAP maps gravity version -> space separated list of linux distros to upgrade from
declare -A UPGRADE_MAP
# Use a fixed tag until we cut our first non-pre-release, as recommended_upgrade_tag skips pre-releases
# UPGRADE_MAP[$(recommended_upgrade_tag $(branch 9.0.x))]="redhat:8.2 centos:7.9 ubuntu:18 ubuntu:20"
UPGRADE_MAP[9.0.0-beta.2]="redhat:8.2 centos:7.9 ubuntu:18 ubuntu:20"
UPGRADE_MAP[8.0.0-beta.1]="redhat:7.9 centos:8.2 ubuntu:18"
UPGRADE_MAP[7.1.0-alpha.6]="ubuntu:20"
function build_upgrade_suite {
local size='"flavor":"three","nodes":3,"role":"node"'
local to_tarball=${INSTALLER_URL}
local suite=''
for release in ${!UPGRADE_MAP[@]}; do
local from_tarball=$(tag_to_image $release)
for os in ${UPGRADE_MAP[$release]}; do
suite+=$(build_upgrade_step $from_tarball $to_tarball $os $size)
suite+=' '
done
done
echo -n $suite
}
function build_resize_suite {
local suite=$(cat <<EOF
resize={"installer_url":"${INSTALLER_URL}","nodes":1,"to":3,"flavor":"one","role":"node","state_dir":"/var/lib/telekube","os":"ubuntu:18"}
shrink={"installer_url":"${INSTALLER_URL}","nodes":3,"flavor":"three","role":"node","os":"redhat:7.9"}
EOF
)
echo -n $suite
}
function build_ops_suite {
local suite=$(cat <<EOF
install={"installer_url":"${OPSCENTER_URL}","nodes":1,"flavor":"standalone","role":"node","os":"ubuntu:18","ops_advertise_addr":"example.com:443"}
EOF
)
echo -n $suite
}
function build_telekube_suite {
local suite=$(cat <<EOF
install={"installer_url":"${TELEKUBE_URL}","nodes":3,"flavor":"three","role":"node","os":"ubuntu:18"}
EOF
)
echo -n $suite
}
function build_install_suite {
local suite=''
local oses="redhat:8.3 redhat:7.9 centos:8.2 centos:7.9 sles:12-sp5 sles:15-sp2 ubuntu:16 ubuntu:18 ubuntu:20 debian:9 debian:10"
local cluster_size='"flavor":"one","nodes":1,"role":"node"'
for os in $oses; do
suite+=$(cat <<EOF
install={"installer_url":"${INSTALLER_URL}",${cluster_size},"os":"${os}"}
EOF
)
done
suite+=' '
echo -n $suite
}
if [[ ${1} == "upgradeversions" ]] ; then
UPGRADE_VERSIONS=${!UPGRADE_MAP[@]}
echo "$UPGRADE_VERSIONS"
elif [[ ${1} == "configuration" ]] ; then
SUITE=""
SUITE+=" $(build_telekube_suite)"
SUITE+=" $(build_ops_suite)"
SUITE+=" $(build_install_suite)"
SUITE+=" $(build_resize_suite)"
SUITE+=" $(build_upgrade_suite)"
echo "$SUITE"
else
echo "Unknown parameter: $1"
exit 1
fi
|
<reponame>ansell/pipelines
package org.gbif.pipelines.transforms.metadata;
import java.time.Instant;
import java.util.Optional;
import java.util.Set;
import org.gbif.pipelines.core.Interpretation;
import org.gbif.pipelines.core.interpreters.metadata.MetadataInterpreter;
import org.gbif.pipelines.io.avro.ExtendedRecord;
import org.gbif.pipelines.io.avro.MetadataRecord;
import org.gbif.pipelines.parsers.ws.client.metadata.MetadataServiceClient;
import org.gbif.pipelines.transforms.SerializableConsumer;
import org.gbif.pipelines.transforms.SerializableSupplier;
import org.gbif.pipelines.transforms.Transform;
import org.gbif.pipelines.transforms.common.CheckTransforms;
import org.apache.beam.sdk.values.PCollection;
import lombok.Builder;
import lombok.extern.slf4j.Slf4j;
import static org.gbif.pipelines.common.PipelinesVariables.Metrics.METADATA_RECORDS_COUNT;
import static org.gbif.pipelines.common.PipelinesVariables.Pipeline.Interpretation.RecordType.METADATA;
import static org.gbif.pipelines.transforms.common.CheckTransforms.checkRecordType;
/**
* Beam level transformations for the GBIF metadata, reads an avro, writes an avro, maps from value to keyValue and
* transforms form {@link ExtendedRecord} to {@link MetadataRecord}.
* <p>
* ParDo runs sequence of interpretations for {@link MetadataRecord} using {@link ExtendedRecord}
* as a source and {@link MetadataInterpreter} as interpretation steps
* <p>
*/
@Slf4j
public class MetadataTransform extends Transform<String, MetadataRecord> {
private Integer attempt;
private String endpointType;
private SerializableSupplier<MetadataServiceClient> clientSupplier;
private MetadataServiceClient client;
@Builder(buildMethodName = "create")
private MetadataTransform(
Integer attempt,
String endpointType,
SerializableSupplier<MetadataServiceClient> clientSupplier,
MetadataServiceClient client) {
super(MetadataRecord.class, METADATA, MetadataTransform.class.getName(), METADATA_RECORDS_COUNT);
this.attempt = attempt;
this.endpointType = endpointType;
this.clientSupplier = clientSupplier;
this.client = client;
}
public MetadataTransform counterFn(SerializableConsumer<String> counterFn) {
setCounterFn(counterFn);
return this;
}
/** Beam @Setup initializes resources */
@Setup
public void setup() {
if (client == null && clientSupplier != null) {
log.info("Initialize MetadataServiceClient");
client = clientSupplier.get();
}
}
/** Beam @Teardown closes initialized resources */
@Teardown
public void tearDown() {
if (client != null) {
log.info("Close MetadataServiceClient");
client.close();
}
}
@Override
public Optional<MetadataRecord> convert(String source) {
return Interpretation.from(source)
.to(id -> MetadataRecord.newBuilder().setId(id).setCreated(Instant.now().toEpochMilli()).build())
.via(MetadataInterpreter.interpret(client))
.via(MetadataInterpreter.interpretCrawlId(attempt))
.via(MetadataInterpreter.interpretEndpointType(endpointType))
.get();
}
/**
* Checks if list contains {@link MetadataTransform#getRecordType()}, else returns empty
* {@link PCollection<MetadataRecord>}
*/
public CheckTransforms<MetadataRecord> checkMetadata(Set<String> types) {
return CheckTransforms.create(MetadataRecord.class, checkRecordType(types, getRecordType()));
}
/**
* Checks if list contains metadata type only
*/
public boolean metadataOnly(Set<String> types) {
return types.size() == 1 && types.contains(getRecordType().name());
}
}
|
<gh_stars>1-10
from django.contrib import admin
from library.models import Author, Book, Genre, Review
admin.site.register(Author)
admin.site.register(Book)
admin.site.register(Genre)
admin.site.register(Review)
|
def sum_of_multiples(limit):
sum = 0
for i in range(limit):
if (i % 3 == 0 or i % 5 == 0):
sum += i
return sum
print(sum_of_multiples(1000))
|
import nextConnect from 'next-connect';
import middleware from '../../../middlewares/middleware';
const handler = nextConnect();
handler.use(middleware);
handler.post(async (req, res) => {
const id = req.body.id
if (!id) {
res.status(400).send('Missing field(s)');
return;
}
req.db.collection('users').deleteOne({"_id": id}, function(err, user) {
if (err) {
res.status(500).json({error: true, message: 'Error deleting User'});
return;
}
if(!user)
res.status(404).json({error: true, message: 'User not found'});
else
res.status(200).json(user);
})
})
export default handler;
|
package main
import (
"fmt"
"os"
"strings"
"github.com/mbauhardt/moneyflow/parse"
"github.com/mbauhardt/moneyflow/persistence"
)
func main() {
env, err := persistence.Env()
if err != nil {
panic(err)
}
argsWithoutProg := os.Args[1:]
// new db
doc, dberr := persistence.NewDatabaseDocument(env)
if dberr != nil {
panic(dberr)
}
// parse
tags := parse.ParseTags(strings.Join(argsWithoutProg, " "))
money, merr := parse.ParseMoney(strings.Join(argsWithoutProg, " "))
if merr != nil {
panic(merr)
}
desc, derr := parse.ParseDescription(strings.Join(argsWithoutProg, " "))
if derr != nil {
panic(derr)
}
// save
persistence.SaveDescription(env, doc, desc)
persistence.SaveTags(env, doc, tags)
if money != nil {
persistence.SaveMoney(env, doc, money)
}
fmt.Println("Added new doc[" + doc.Id + "]")
}
|
/*
* Copyright 2015 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.opencga.storage.app.service;
import org.opencb.opencga.core.common.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Properties;
/**
* Created by jacobo on 23/10/14.
*/
public class OpenCGAStorageServiceMain {
public static Properties properties;
private static String opencgaHome;
private static Logger logger = LoggerFactory.getLogger(OpenCGAStorageServiceMain.class);
public static void main(String args[]) throws IOException {
OptionParser optionParser = new OptionParser();
String command = optionParser.parse(args);
OptionParser.GeneralOptions opts = optionParser.getGeneralOptions();
//Get properties
String propertyAppHome = System.getProperty("app.home");
if (propertyAppHome != null) {
opencgaHome = System.getProperty("app.home");
} else {
String envAppHome = System.getenv("OPENCGA_HOME");
if (envAppHome != null) {
opencgaHome = envAppHome;
} else {
opencgaHome = Paths.get("opencga-storage-app", "build").toString(); //If it has not been run from the shell script (debug)
}
}
Config.setOpenCGAHome(opencgaHome);
File configFile;
if(opts.conf.isEmpty()) {
configFile = Paths.get(opencgaHome, "conf", "storage-service.properties").toFile();
} else {
configFile = Paths.get(opts.conf).toFile();
}
if (!configFile.exists()) {
throw new FileNotFoundException("File " + configFile.toString() + " not found");
}
properties = new Properties();
properties.load(new FileInputStream(configFile));
if (opts.port != 0) {
properties.setProperty(OpenCGAStorageService.PORT, Integer.toString(opts.port));
}
int status;
try {
OpenCGAStorageService storageService = OpenCGAStorageService.newInstance(properties);
storageService.start();
status = storageService.join();
} catch (Exception e) {
e.printStackTrace();
status = 3;
}
System.exit(status);
}
}
|
<reponame>ac-dc87/ruby-plugin
require 'ruby/plugin/integrations/prism_edc/request'
require 'json'
RSpec.describe Ruby::Plugin::Integrations::PrismEdc::Request do
let(:request) { nil }
before do
# Simulating mapping not provided
$config = {
mapping: {
'data' => {}
}
}
Ruby::Plugin::Integrations::PrismEdc::Request.base_uri 'https://esource.nextrials.com/esource-toolkit'
end
subject { described_class.call(request) }
describe '#send_form_event', :vcr do
let(:request) do
{
action: 'send_form_event',
verb: 'POST',
body: {
'formAbstractId' => '3083372917',
'formFields' => {
'IT.109cdc9572cf3732a8024c1a69d06eff7bcc32a5' => '12:12',
'IT.20f745f2240052fa6ae7b0631d3179a5ee6cd808' => 'BREAKFAST',
'IT.590f2722f3b1b05aa327654611621866d07e785f' => '01 MAY 2019'
}
},
original_payload: {}
}
end
context 'meal event' do
it 'returns 200 OK (there is not a way to verify this from prism EDC API yet)' do
expect(subject).to eq('OK')
end
end
context 'error case we need to handle (unkown, details to come from PrismEDC)' do
skip
end
end
describe '#edit_event', :vcr do
let(:request) do
{
action: 'edit_event',
verb: 'POST',
body: {
'formId' => '3154341004',
'formFields' => { 'IT.109cdc9572cf3732a8024c1a69d06eff7bcc32a5' => '10:25'}
}
}
end
context 'edit meal event' do
it 'returns 200 OK (there is not a way to verify this from prism EDC API yet)' do
expect(subject).to eq('OK')
end
end
end
describe '#enroll_subject', :vcr do
let(:request) do
{
action: 'enroll_subject',
verb: 'POST',
body: {
'ntiCode' => 'demo008',
'firstName' => 'Test1',
'lastName' => 'Subject2',
'orgId' => '2890682056',
'dateOfBirth' => '1987-02-02',
'gender' => '0'
}
}
end
context 'edit meal event' do
it 'returns "subjectId" in the response body' do
parsed_response = JSON.parse(subject)
expect(parsed_response.key?('subjectId')).to be_truthy
end
end
end
end
|
#!/bin/bash
# Notarize dmg with Apple
xcrun altool --notarize-app -t osx -f Tippy.dmg --primary-bundle-id "com.nervos.tippy" -u "$APPLE_ID" -p "$APPLE_ID_PASSWORD" --output-format xml | tee notarize_result
request_id="$(cat notarize_result | grep -A1 "RequestUUID" | sed -n 's/\s*<string>\([^<]*\)<\/string>/\1/p' | xargs)"
echo "Notarization in progress, request id: $request_id"
echo "Waiting for approval..."
while true; do
echo -n "."
sleep 10 # We need to wait 10 sec, even for the first loop because Apple might still not have their own data...
xcrun altool --notarization-info "$request_id" -u "$APPLE_ID" -p "$APPLE_ID_PASSWORD" > notarization_progress
if grep -q "Status: success" notarization_progress; then
echo ""
cat notarization_progress
echo "Notarization succeed"
break
elif grep -q "Status: in progress" notarization_progress; then
continue
else
cat notarization_progress
echo "Notarization failed"
exit 1
fi
done
|
#!/usr/bin/env bash
# create directories if they don't exists with user privileges.
# otherwise docker might create them with root privileges
DIRS="$HOME/.composer"
DIRS="$DIRS $HOME/.npm"
DIRS="$DIRS $PWD/vendor/shopware/platform/src/Administration/Resources/app/administration/test/e2e"
DIRS="$DIRS $PWD/vendor/shopware/platform/src/Storefront/Resources/app/storefront/test/e2e"
for dir in $DIRS; do
mkdir -p $dir || true
done
if [[ "$OSTYPE" == "darwin"* ]]; then
for dir in $DIRS; do
(cd "$dir") || {
echo "$dir is not accessible"
exit 1
}
if [[ $(stat -f '%Su' "$dir") == 'root' ]]; then
err_msg="Error: The owner of $dir is root. This can cause problems with your docker setup. Please change the owner/group of these folders."
echo $err_msg;
exit 1
fi
done
elif [[ "$OSTYPE" == "linux"* ]]; then
for dir in $DIRS; do
(cd "$dir") || {
echo "$dir is not accessible"
exit 1
}
if [[ $(stat -c '%U' "$dir") == 'root' ]]; then
err_msg="Error: The owner of $dir is root. This can cause problems with your docker setup. Please change the owner/group of these folders."
echo $err_msg;
exit 1
fi
done
fi
|
//
// Animation Viewer
//
//
// Copyright (C) 2016 <NAME>
//
#pragma once
#include "documentapi.h"
#include "animationview.h"
#include "animationproperties.h"
#include <QMainWindow>
#include <QToolBar>
#include <QLabel>
#include <QSlider>
//-------------------------- AnimationViewer --------------------------------
//---------------------------------------------------------------------------
class AnimationViewer : public QMainWindow
{
Q_OBJECT
public:
AnimationViewer(QWidget *parent = nullptr);
virtual ~AnimationViewer();
public slots:
QToolBar *toolbar() const;
void view(Studio::Document *document);
void edit(Studio::Document *document);
private:
QToolBar *m_toolbar;
AnimationView *m_view;
AnimationProperties *m_properties;
};
|
#/bin/bash
kubectl apply -f k8s/deployment.yaml && \
kubectl apply -f k8s/service.yaml
|
<reponame>minuk8932/Algorithm_BaekJoon
package minimumcost_spanning_tree;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.*;
/**
*
* @author exponential-e
* 백준 5818번: SPIJUNI
*
* @see https://www.acmicpc.net/problem/5818
*
*/
public class Boj5818 {
private static int[] parent;
private static int[] message;
private static Queue<Spy> pq;
private static class Spy {
int node1;
int node2;
int cost;
public Spy(int node1, int node2, int cost) {
this.node1 = node1;
this.node2 = node2;
this.cost = cost;
}
}
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
init(N);
pq = new PriorityQueue<>(Comparator.comparingInt(spy -> spy.cost));
for (int i = 0; i < N; i++) {
StringTokenizer st = new StringTokenizer(br.readLine());
for (int j = 0; j < N; j++) {
int cost = Integer.parseInt(st.nextToken());
if (i == j) continue;
pq.offer(new Spy(i, j, cost));
}
}
StringTokenizer st = new StringTokenizer(br.readLine());
for (int i = 0; i < N; i++) {
message[i] = Integer.parseInt(st.nextToken());
pq.offer(new Spy(N, i, message[i]));
}
System.out.println(kruskal());
}
private static void init(int n) {
message = new int[n];
parent = new int[n + 1];
Arrays.fill(parent, -1);
}
private static int kruskal() {
int cost = 0;
while (!pq.isEmpty()) {
Spy current = pq.poll();
if (merged(current.node1, current.node2)) continue;
cost += current.cost;
}
return cost;
}
private static int find(int x) {
if (parent[x] < 0) return x;
return parent[x] = find(parent[x]);
}
private static boolean merged(int x, int y) {
x = find(x);
y = find(y);
if (x == y) return true;
if (parent[x] < parent[y]) {
parent[x] += parent[y];
parent[y] = x;
} else {
parent[y] += parent[x];
parent[x] = y;
}
return false;
}
}
|
<filename>Decorator/src/Decorator2.java<gh_stars>0
/**
* 具体装饰类2
*/
public class Decorator2 extends AbstractDecorator{
public Decorator2(Component component){
super(component);
}
@Override
public void show() {
System.out.println("装饰类 2");
super.show();
}
}
|
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/cache/jdbc/EnsureIndicesTask.java<gh_stars>10-100
package io.opensphere.core.cache.jdbc;
import java.sql.Connection;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import io.opensphere.core.cache.CacheException;
import io.opensphere.core.cache.jdbc.ConnectionAppropriator.ConnectionUser;
import io.opensphere.core.cache.matcher.GeometryMatcher;
import io.opensphere.core.cache.matcher.PropertyMatcher;
import io.opensphere.core.util.Utilities;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.lang.StringUtilities;
/**
* Database task that ensures that the data tables for some groups have indices
* for columns associated with some property matchers.
*/
public class EnsureIndicesTask extends DatabaseTask implements ConnectionUser<Void>
{
/** The group ids. */
private final int[] myGroupIds;
/** The property matchers. */
private final Collection<? extends PropertyMatcher<?>> myPropertyMatchers;
/** The number of created indices. */
private transient int myCreatedIndexCount;
/**
* Constructor.
*
* @param groupIds The group ids.
* @param propertyMatchers The property matchers.
* @param databaseTaskFactory The database task factory.
*/
public EnsureIndicesTask(int[] groupIds, Collection<? extends PropertyMatcher<?>> propertyMatchers,
DatabaseTaskFactory databaseTaskFactory)
{
super(databaseTaskFactory);
Utilities.checkNull(groupIds, "groupIds");
Utilities.checkNull(propertyMatchers, "propertyMatchers");
myGroupIds = groupIds.clone();
myPropertyMatchers = propertyMatchers;
}
@Override
public String getTimingMessage()
{
return "Time to ensure indices on " + getGroupIds().length + " groups (created " + myCreatedIndexCount + " indices): ";
}
@Override
public Void run(Connection conn) throws CacheException
{
// First determine what columns are to be indexed.
List<String> columnsToBeIndexed = New.list();
for (PropertyMatcher<?> matcher : getPropertyMatchers())
{
// Do not use an ordinary index for a geometry matcher.
if (!(matcher instanceof GeometryMatcher))
{
List<String> columnNames = getTypeMapper().getColumnNames(matcher.getPropertyDescriptor());
columnsToBeIndexed.addAll(columnNames);
}
}
if (!columnsToBeIndexed.isEmpty())
{
if (columnsToBeIndexed.size() > 1)
{
Collections.sort(columnsToBeIndexed);
}
for (int groupId : getGroupIds())
{
String tableName = TableNames.getDataTableName(groupId);
StringBuilder indexName = new StringBuilder(32).append("\"INDEX_").append(tableName).append('_');
StringUtilities.join(indexName, "_", columnsToBeIndexed).append('"');
StringBuilder columns = new StringBuilder(32).append('"');
StringUtilities.join(columns, "\", \"", columnsToBeIndexed).append('"');
String indexNameStr = indexName.toString();
if (getDatabaseState().getCreatedIndices().add(indexNameStr))
{
getCacheUtilities().execute(getSQLGenerator().generateCreateIndex(indexNameStr, tableName, false,
New.array(columnsToBeIndexed, String.class)), conn);
myCreatedIndexCount++;
}
}
}
return null;
}
/**
* Accessor for the groupIds.
*
* @return The groupIds.
*/
protected int[] getGroupIds()
{
return myGroupIds;
}
/**
* Accessor for the propertyMatchers.
*
* @return The propertyMatchers.
*/
protected Collection<? extends PropertyMatcher<?>> getPropertyMatchers()
{
return myPropertyMatchers;
}
}
|
public class BreastRadiologyDocument
{
public string EncounterReference { get; set; }
public string SubjectReference { get; set; }
}
public class ClinicalImpression
{
public string Encounter { get; set; }
public string Subject { get; set; }
}
public class ClinicalImpressionBase
{
private BreastRadiologyDocument Doc { get; }
private ClinicalImpression Resource { get; }
public ClinicalImpressionBase(BreastRadiologyDocument doc) : base()
{
this.Init(doc, new ClinicalImpression());
}
public void Init(BreastRadiologyDocument doc, ClinicalImpression impression)
{
this.Doc = doc;
this.Resource = impression;
}
public void Write()
{
this.Resource.Encounter = this.Doc.EncounterReference;
this.Resource.Subject = this.Doc.SubjectReference;
}
}
|
public static int getRandomNumber(){
// create instance of Random class
Random rand = new Random();
// Generate random integers in range 0 to 10
int rand_int = rand.nextInt(10) + 1;
return rand_int;
}
|
<filename>app/src/main/java/com/h5190067/mahmut_mirza_kutlu_final/adaptor/GolfAdaptor.java
package com.h5190067.mahmut_mirza_kutlu_final.adaptor;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import com.h5190067.mahmut_mirza_kutlu_final.R;
import com.h5190067.mahmut_mirza_kutlu_final.models.GolfModel;
import com.h5190067.mahmut_mirza_kutlu_final.utilities.GlideUtil;
import java.util.List;
public class GolfAdaptor extends RecyclerView.Adapter<GolfViewHolder> {
List<GolfModel> golfModels;
Context context;
OnItemClickListener onItemClickListener;
public interface OnItemClickListener{
void onClick(int position);
}
public GolfAdaptor(List<GolfModel> golfModelList, Context context, OnItemClickListener onItemClickListener) {
this.golfModels = golfModelList;
this.context = context;
this.onItemClickListener=onItemClickListener;
}
@NonNull
@Override
public GolfViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View itemView = LayoutInflater.from(parent.getContext()).inflate(R.layout.card_view_golfs,parent,false);
return new GolfViewHolder(itemView,onItemClickListener);
}
@Override
public void onBindViewHolder(@NonNull GolfViewHolder viewHolder, int position) {
String hp = context.getString(R.string.hp);
String weight = context.getString(R.string.weight);
String year = context.getString(R.string.year);
viewHolder.txtGolfName.setText(golfModels.get(position).getModel());
viewHolder.txtGolfInfo1.setText(hp + golfModels.get(position).getBeygirGucu());
viewHolder.txtGolfInfo2.setText(weight + golfModels.get(position).getAgirlik());
viewHolder.txtGolfInfo3.setText(year + golfModels.get(position).getUretimYili());
GlideUtil.glideImage(context,golfModels.get(position).getKartResim(),viewHolder.golfImage);
}
@Override
public int getItemCount() {
return golfModels.size();
}
}
|
#!/bin/bash
set -e
root=$(dirname "${BASH_SOURCE[0]}")
# shellcheck disable=SC1091
source .env.local
bash "$root/redeploy-dev-resources.sh"
dlv --listen=:2345 --headless=true --api-version=2 debug main.go -- --zap-devel=true
|
<filename>lib/speakout/survey.rb
module Speakout
class Survey
def initialize(api, id = nil)
@api = api
@id = id
end
def attributes
if @id
response, status = @api.get("surveys/#{@id}")
return response
else
nil
end
end
def update(attributes)
attributes.delete('id')
# transform here so it fits straight into Rails' nested attributes structure
attributes['question_blocks_attributes'] = attributes['question_blocks'] || []
attributes.delete('question_blocks')
attributes['question_blocks_attributes'] = attributes['question_blocks_attributes'].map do |key, question_block|
question_block['questions_attributes'] = question_block['questions'].map{|k,v| v}
question_block.delete('questions')
question_block
end
puts "Attributes being sent: #{attributes.inspect}"
response, status = @api.put("surveys/#{@id}", attributes)
if status < 400
return true
else
return false, response['errors']
end
end
# Copies the campaign and returns a Speakout::Campaign with new campaign
def clone
response, status = @api.post("surveys/#{@id}/clone")
if status < 400
new_id = response['id']
return Speakout::Survey.new(@api, new_id)
else
return false
end
end
def results
end
end
end
|
#!/bin/bash
#MSUB -A p20519
#MSUB -l walltime=24:00:00
#MSUB -l nodes=1:ppn=1
#MSUB -M jiawu@u.northwestern.edu
#MSUB -j oe
#MSUB -o /projects/p20519/jia_output/Roller_error.txt
#MSUB -m bae
#MSUB -q normal
#MSUB -N RF_window_scan_janes
#MSUB -V
nwindows=${MOAB_JOBARRAYINDEX}
workon seqgen
module load python/anaconda3
cd /projects/p20519/Roller
python run_pipeline_RF_window_scan_janes.py ${nwindows}
|
#!/usr/bin/env bash
#
# OpenVPN helper to add DHCP information into systemd-resolved via DBus.
# Copyright (C) 2016, Jonathan Wright <jon@than.io>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This script will parse DHCP options set via OpenVPN (dhcp-option) to update
# systemd-resolved directly via DBus, instead of updating /etc/resolv.conf. To
# install, set as the 'up' and 'down' script in your OpenVPN configuration file
# or via the command-line arguments, alongside setting the 'down-pre' option to
# run the 'down' script before the device is closed. For example:
# up /etc/openvpn/scripts/update-systemd-resolved
# down /etc/openvpn/scripts/update-systemd-resolved
# down-pre
# Define what needs to be called via DBus
DBUS_DEST="org.freedesktop.resolve1"
DBUS_NODE="/org/freedesktop/resolve1"
SCRIPT_NAME="${BASH_SOURCE[0]##*/}"
log() {
logger -s -t "$SCRIPT_NAME" "$@"
}
for level in emerg err warning info debug; do
printf -v functext -- '%s() { log -p user.%s -- "$@" ; }' "$level" "$level"
eval "$functext"
done
usage() {
err "${1:?${1}. }. Usage: ${SCRIPT_NAME} up|down device_name."
}
busctl_call() {
# Preserve busctl's exit status
busctl call "$DBUS_DEST" "$DBUS_NODE" "${DBUS_DEST}.Manager" "$@" || {
local -i status=$?
emerg "'busctl' exited with status $status"
return $status
}
}
get_link_info() {
dev="$1"
shift
link=''
link="$(ip link show dev "$dev")" || return $?
echo "$dev" "${link%%:*}"
}
dhcp_settings() {
for foreign_option in "${!foreign_option_@}"; do
foreign_option_value="${!foreign_option}"
[[ "$foreign_option_value" == *dhcp-option* ]] \
&& echo "${foreign_option_value#dhcp-option }"
done
}
up() {
local link="$1"
shift
local if_index="$1"
shift
info "Link '$link' coming up"
# Preset values for processing -- will be altered in the various process_*
# functions.
local -a dns_servers=() dns_domain=() dns_search=() dns_routed=()
local -i dns_server_count=0 dns_domain_count=0 dns_search_count=0 dns_routed_count=0
local dns_sec=""
while read -r setting; do
setting_type="${setting%% *}"
setting_value="${setting#* }"
process_setting_function="${setting_type,,}"
process_setting_function="process_${process_setting_function//-/_}"
if declare -f "$process_setting_function" &>/dev/null; then
"$process_setting_function" "$setting_value" || return $?
else
warning "Not a recognized DHCP setting: '${setting}'"
fi
done < <(dhcp_settings)
if [[ "${#dns_servers[*]}" -gt 0 ]]; then
busctl_params=("$if_index" "$dns_server_count" "${dns_servers[@]}")
info "SetLinkDNS(${busctl_params[*]})"
busctl_call SetLinkDNS 'ia(iay)' "${busctl_params[@]}" || return $?
fi
if [[ "${#dns_domain[*]}" -gt 0 \
|| "${#dns_search[*]}" -gt 0 \
|| "${#dns_routed[*]}" -gt 0 ]]; then
dns_count=$((dns_domain_count+dns_search_count+dns_routed_count))
busctl_params=("$if_index" "$dns_count")
if [[ "${#dns_domain[*]}" -gt 0 ]]; then
busctl_params+=("${dns_domain[@]}")
fi
if [[ "${#dns_search[*]}" -gt 0 ]]; then
busctl_params+=("${dns_search[@]}")
fi
if [[ "${#dns_routed[*]}" -gt 0 ]]; then
busctl_params+=("${dns_routed[@]}")
fi
info "SetLinkDomains(${busctl_params[*]})"
busctl_call SetLinkDomains 'ia(sb)' "${busctl_params[@]}" || return $?
fi
if [[ -n "${dns_sec}" ]]; then
if [[ "${dns_sec}" == "default" ]]; then
# We need to provide an empty string to use the default settings
info "SetLinkDNSSEC($if_index '')"
busctl_call SetLinkDNSSEC 'is' "$if_index" "" || return $?
else
info "SetLinkDNSSEC($if_index ${dns_sec})"
busctl_call SetLinkDNSSEC 'is' "$if_index" "${dns_sec}" || return $?
fi
fi
}
down() {
local link="$1"
shift
local if_index="$1"
shift
info "Link '$link' going down"
if [[ "$(whoami 2>/dev/null)" != "root" ]]; then
# Cleanly handle the privilege dropped case by not calling RevertLink
info "Privileges dropped in the client: Cannot call RevertLink."
else
busctl_call RevertLink i "$if_index"
fi
}
process_dns() {
address="$1"
shift
if looks_like_ipv6 "$address"; then
process_dns_ipv6 "$address" || return $?
elif looks_like_ipv4 "$address"; then
process_dns_ipv4 "$address" || return $?
else
err "Not a valid IPv6 or IPv4 address: '$address'"
return 1
fi
}
looks_like_ipv4() {
[[ -n "$1" ]] && {
local dots="${1//[^.]}"
(( ${#dots} == 3 ))
}
}
looks_like_ipv6() {
[[ -n "$1" ]] && {
local colons="${1//[^:]}"
(( ${#colons} >= 2 ))
}
}
process_dns_ipv4() {
local address="$1"
shift
info "Adding IPv4 DNS Server ${address}"
(( dns_server_count += 1 ))
dns_servers+=(2 4 ${address//./ })
}
# Enforces RFC 5952:
# 1. Don't shorten a single 0 field to '::'
# 2. Only longest run of zeros should be compressed
# 3. If there are multiple longest runs, the leftmost should be compressed
# 4. Address must be maximally compressed, so no all-zero runs next to '::'
#
# ...
#
# Thank goodness we don't have to handle port numbers, though :)
parse_ipv6() {
local raw_address="$1"
log_invalid_ipv6() {
local message="'$raw_address' is not a valid IPv6 address"
emerg "${message}: $*"
}
trap -- 'unset -f log_invalid_ipv6' RETURN
if [[ "$raw_address" == *::*::* ]]; then
log_invalid_ipv6 "address cannot contain more than one '::'"
return 1
elif [[ "$raw_address" =~ :0+:: ]] || [[ "$raw_address" =~ ::0+: ]]; then
log_invalid_ipv6 "address contains a 0-group adjacent to '::' and is not maximally shortened"
return 1
fi
local -i length=8
local -a raw_segments=()
IFS=$':' read -r -a raw_segments <<<"$raw_address"
local -i raw_length="${#raw_segments[@]}"
if (( raw_length > length )); then
log_invalid_ipv6 "expected ${length} segments, got ${raw_length}"
return 1
fi
# Store zero-runs keyed to their sizes, storing all non-zero segments prefixed
# with a token marking them as such.
local nonzero_prefix=$'!'
local -i zero_run_i=0 compressed_i=0
local -a tokenized_segments=()
local decimal_segment='' next_decimal_segment=''
for (( i = 0 ; i < raw_length ; i++ )); do
raw_segment="${raw_segments[i]}"
printf -v decimal_segment -- '%d' "0x${raw_segment:-0}"
# We're in the compressed group. The length of this run should be
# enough to bring the total number of segments to 8.
if [[ -z "$raw_segment" ]]; then
(( compressed_i = zero_run_i ))
# `+ 1' because the length of the current segment is counted in
# `raw_length'.
(( tokenized_segments[zero_run_i] = ((length - raw_length) + 1) ))
# If we have an address like `::1', skip processing the next group to
# avoid double-counting the zero-run, and increment the number of
# 0-groups to add since the second empty group is counted in
# `raw_length'.
if [[ -z "${raw_segments[i + 1]}" ]]; then
(( i++ ))
(( tokenized_segments[zero_run_i]++ ))
fi
(( zero_run_i++ ))
elif (( decimal_segment == 0 )); then
(( tokenized_segments[zero_run_i]++ ))
# The run is over if the next segment is not 0, so increment the
# tracking index.
printf -v next_decimal_segment -- '%d' "0x${raw_segments[i + 1]}"
(( next_decimal_segment != 0 )) && (( zero_run_i++ ))
else
# Prefix the raw segment with `nonzero_prefix' to mark this as a
# non-zero field.
tokenized_segments[zero_run_i]="${nonzero_prefix}${decimal_segment}"
(( zero_run_i++ ))
fi
done
if [[ "$raw_address" == *::* ]]; then
if (( ${#tokenized_segments[*]} == length )); then
log_invalid_ipv6 "single '0' fields should not be compressed"
return 1
else
local -i largest_run_i=0 largest_run=0
for (( i = 0 ; i < ${#tokenized_segments[@]}; i ++ )); do
# Skip groups that aren't zero-runs
[[ "${tokenized_segments[i]:0:1}" == "$nonzero_prefix" ]] && continue
if (( tokenized_segments[i] > largest_run )); then
(( largest_run_i = i ))
largest_run="${tokenized_segments[i]}"
fi
done
local -i compressed_run="${tokenized_segments[compressed_i]}"
if (( largest_run > compressed_run )); then
log_invalid_ipv6 "the compressed run of all-zero fields is smaller than the largest such run"
return 1
elif (( largest_run == compressed_run )) && (( largest_run_i < compressed_i )); then
log_invalid_ipv6 "only the leftmost largest run of all-zero fields should be compressed"
return 1
fi
fi
fi
for segment in "${tokenized_segments[@]}"; do
if [[ "${segment:0:1}" == "$nonzero_prefix" ]]; then
printf -- '%04x\n' "${segment#${nonzero_prefix}}"
else
for (( n = 0 ; n < segment ; n++ )); do
echo 0000
done
fi
done
}
process_dns_ipv6() {
local address="$1"
shift
info "Adding IPv6 DNS Server ${address}"
local -a segments=()
segments=($(parse_ipv6 "$address")) || return $?
# Add AF_INET6 and byte count
dns_servers+=(10 16)
for segment in "${segments[@]}"; do
dns_servers+=("$((16#${segment:0:2}))" "$((16#${segment:2:2}))")
done
(( dns_server_count += 1 ))
}
process_domain() {
local domain="$1"
shift
info "Setting DNS Domain ${domain}"
(( dns_domain_count = 1 ))
dns_domain=("${domain}" false)
}
process_adapter_domain_suffix() {
# This enables support for ADAPTER_DOMAIN_SUFFIX which is a Microsoft standard
# which works in the same way as DOMAIN to set the primary search domain on
# this specific link.
process_domain "$@"
}
process_domain_search() {
local domain="$1"
shift
info "Adding DNS Search Domain ${domain}"
(( dns_search_count += 1 ))
dns_search+=("${domain}" false)
}
process_domain_route() {
local domain="$1"
shift
info "Adding DNS Routed Domain ${domain}"
(( dns_routed_count += 1 ))
dns_routed+=("${domain}" true)
}
process_dnssec() {
local option="$1" setting=""
shift
case "${option,,}" in
yes|true)
setting="yes" ;;
no|false)
setting="no" ;;
default)
setting="default" ;;
allow-downgrade)
setting="allow-downgrade" ;;
*)
local message="'$option' is not a valid DNSSEC option"
emerg "${message}"
return 1 ;;
esac
info "Setting DNSSEC to ${setting}"
dns_sec="${setting}"
}
main() {
local script_type="$1"
shift
local dev="$1"
shift
if [[ -z "$script_type" ]]; then
usage 'No script type specified'
return 1
elif [[ -z "$dev" ]]; then
usage 'No device name specified'
return 1
elif ! declare -f "${script_type}" &>/dev/null; then
usage "Invalid script type: '${script_type}'"
return 1
else
if ! read -r link if_index _ < <(get_link_info "$dev"); then
usage "Invalid device name: '$dev'"
return 1
fi
"$script_type" "$link" "$if_index" "$@"
fi
}
if [[ "${BASH_SOURCE[0]}" == "$0" ]] || [[ "$AUTOMATED_TESTING" == 1 ]]; then
set -o nounset
main "${script_type:-}" "${dev:-}" "$@"
fi
|
package net.b07z.sepia.server.teach.server;
import static spark.Spark.get;
import static spark.Spark.halt;
import static spark.Spark.port;
import static spark.Spark.post;
import static spark.Spark.secure;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.b07z.sepia.server.core.data.Answer;
import net.b07z.sepia.server.core.data.CmdMap;
import net.b07z.sepia.server.core.data.Command;
import net.b07z.sepia.server.core.data.Defaults;
import net.b07z.sepia.server.core.data.Language;
import net.b07z.sepia.server.core.data.Role;
import net.b07z.sepia.server.core.data.SentenceBuilder;
import net.b07z.sepia.server.core.endpoints.CoreEndpoints;
import net.b07z.sepia.server.core.server.ConfigDefaults;
import net.b07z.sepia.server.core.server.RequestGetOrFormParameters;
import net.b07z.sepia.server.core.server.RequestParameters;
import net.b07z.sepia.server.core.server.SparkJavaFw;
import net.b07z.sepia.server.core.server.Validate;
import net.b07z.sepia.server.core.tools.ClassBuilder;
import net.b07z.sepia.server.core.tools.Connectors;
import net.b07z.sepia.server.core.tools.Converters;
import net.b07z.sepia.server.core.tools.DateTime;
import net.b07z.sepia.server.core.tools.Debugger;
import net.b07z.sepia.server.core.tools.Is;
import net.b07z.sepia.server.core.tools.JSON;
import net.b07z.sepia.server.core.tools.Timer;
import net.b07z.sepia.server.core.users.Account;
import net.b07z.sepia.server.teach.database.TeachDatabase;
import net.b07z.sepia.server.teach.database.TeachUiDataLoader;
import net.b07z.sepia.server.teach.data.Vote;
import net.b07z.sepia.server.teach.database.ElasticsearchLogger;
import net.b07z.sepia.server.teach.database.Feedback;
import spark.Request;
import spark.Response;
public final class Start {
private static final Logger log = LoggerFactory.getLogger(Start.class);
private static String startGMT = "";
private static TeachDatabase db = null;
private static TeachDatabase getDatabase(){
return db == null ? (TeachDatabase) ClassBuilder.construct(Config.teachDbModule) : db;
}
public static final String LIVE_SERVER = "live";
public static final String TEST_SERVER = "test";
public static final String CUSTOM_SERVER = "custom";
public static String serverType = "";
public static boolean isSSL = false;
private static String keystorePwd = "<PASSWORD>";
private Start() {}
public static void main(String[] args) {
//load settings
serverType = TEST_SERVER;
for (String arg : args){
if (arg.equals("--test")){
//Test system
serverType = TEST_SERVER;
}else if (arg.equals("--live")){
//Local test system
serverType = LIVE_SERVER;
}else if (arg.equals("--my") || arg.equals("--custom")){
//Custom system
serverType = CUSTOM_SERVER;
}else if (arg.equals("--local")){
//Local test system
serverType = "local";
}else if (arg.equals("--ssl")){
//SSL
isSSL = true;
}else if (arg.startsWith("keystorePwd=")){
//Java key-store password - TODO: maybe not the best way to load the pwd ...
keystorePwd = arg.replaceFirst(".*?=", "").trim();
}
}
//set security
if (isSSL){
secure("Xtensions/SSL/ssl-keystore.jks", keystorePwd, null, null);
}
//load settings
if (serverType.equals(TEST_SERVER)){
log.info("--- Running " + Config.SERVERNAME + " with TEST settings ---");
Config.configFile = "Xtensions/teach.test.properties";
}else if (serverType.equals(LIVE_SERVER)){
log.info("--- Running " + Config.SERVERNAME + " with LIVE settings ---");
Config.configFile = "Xtensions/teach.properties";
}else if (serverType.equals(CUSTOM_SERVER)){
log.info("--- Running " + Config.SERVERNAME + " with CUSTOM settings ---");
Config.configFile = "Xtensions/teach.custom.properties";
}
Config.loadSettings(Config.configFile);
//SETUP CORE-TOOLS
JSONObject coreToolsConfig;
//part 1
coreToolsConfig = JSON.make(
"defaultAssistAPI", Config.assistAPI,
"defaultTeachAPI", Config.endpointUrl,
"clusterKey", Config.clusterKey,
"privacyPolicy", Config.privacyPolicyLink
);
ConfigDefaults.setupCoreTools(coreToolsConfig);
//part 2
long clusterTic = Timer.tic();
JSONObject assistApiClusterData = ConfigDefaults.getAssistantClusterData();
if (assistApiClusterData == null){
throw new RuntimeException("Core-tools are NOT set properly! AssistAPI could not be reached!");
}else{
log.info("Received cluster-data from AssistAPI after " + Timer.toc(clusterTic) + "ms");
}
coreToolsConfig = JSON.make(
"defaultAssistantUserId", JSON.getString(assistApiClusterData, "assistantUserId")
);
//common micro-services API-Keys
//...JSON.put(coreToolsConfig, "...ApiKey", ...);
ConfigDefaults.setupCoreTools(coreToolsConfig);
//Check core-tools settings
if (!ConfigDefaults.areCoreToolsSet()){
throw new RuntimeException("Core-tools are NOT set properly!");
}
Debugger.println("Starting Teach-API server " + Config.apiVersion + " (" + serverType + ")", 3);
startGMT = DateTime.getGMT(new Date(), "dd.MM.yyyy' - 'HH:mm:ss' - GMT'");
Debugger.println("Date: " + startGMT, 3);
//int maxThreads = 8;
//threadPool(maxThreads);
try {
port(Integer.valueOf(System.getenv("PORT")));
Debugger.println("Server running on port: " + Integer.valueOf(System.getenv("PORT")), 3);
}catch (Exception e){
int port = Config.serverPort;
port(port);
Debugger.println("Server running on port " + port, 3);
}
//set access-control headers to enable CORS
if (Config.enableCORS){
SparkJavaFw.enableCORS("*", "*", "*");
}
//Authenticate user and store basic account info
/*
before((request, response) -> {
authenticate(request, response);
});
*/
get("/online", (request, response) -> CoreEndpoints.onlineCheck(request, response));
get("/ping", (request, response) -> CoreEndpoints.ping(request, response, Config.SERVERNAME));
get("/validate", (request, response) -> CoreEndpoints.validateServer(request, response, Config.SERVERNAME,
Config.apiVersion, Config.localName, Config.localSecret));
post("/hello", Start::helloWorld);
post("/getTeachUiServices", Start::getTeachUiServices);
post("/getCustomCommandMappings", Start::getCustomCommandMappings);
post("/setCustomCommandMappings", Start::setCustomCommandMappings);
post("/getPersonalCommands", Start::getPersonalCommands);
post("/getAllPersonalCommands", Start::getAllPersonalCommands);
post("/getAllCustomAssistantCommands", Start::getAllCustomAssistantCommands);
post("/getPersonalCommandsByIds", Start::getPersonalCommandsByIds);
post("/deletePersonalCommand", Start::deletePersonalCommand);
post("/submitPersonalCommand", Start::submitPersonalCommand);
// e.g. /submitPersonalCommand?language=en&sentence=This is the command&command=search&public=yes&reply=reply one&reply=reply two&KEY=...'
post("/getAllCustomSentencesAsTrainingData", Start::getAllCustomSentencesAsTrainingData);
post("/addSentence", Start::addSentence); // To add a variation that is not a direct translation of an existing sentence.
// e.g. /addSentence?id=ABCD&language=de&text=new sentenceKEY=...
post("/voteSentence", Start::voteSentence);
// e.g. /voteSentence?id=ABC12345&vote=positive|negative&text=This is the sentence&language=en&KEY=...'
post("/feedback", Start::feedback);
//TODO: add more tests for answers!
post("/addAnswer", Start::addAnswer);
post("/deleteAnswerById", Start::deleteAnswerById);
post("/modifyAnswer", Start::modifyAnswer);
post("/getAnswersByType", Start::getAnswersByType);
post("/voteAnswer", Start::voteAnswer);
post("/getLogs", Start::getLogs);
//Error handling
SparkJavaFw.handleError();
spark.Spark.awaitInitialization();
Debugger.println("Initialization complete, lets go!", 3);
}
private static Account authenticate(RequestParameters params, Request request, Response response){
//statistics a
long tic = System.currentTimeMillis();
Statistics.add_API_hit();
Account userAccount = new Account();
//check for intra-API call that does not require authentication again
boolean isInternalCall = Config.allowInternalCalls &&
Validate.validateInternalCall(request, params.getString("sKey"), Config.clusterKey);
if (isInternalCall){
//user data must be submitted in this case
//TODO: this should not be sent in GET calls and maybe only with SSL!
//TODO: we might need to add a white-list of endpoints that allow internal calls.
//It also is a potential risk if someone hacks the secure key and uses any user ID he wants :-(
String accountS = params.getString("userData");
JSONObject accountJS = JSON.parseString(accountS);
if (accountJS == null){
log.warn("Invalid internal API call from " + request.ip());
halt(SparkJavaFw.returnResult(request, response, "{\"result\":\"fail\",\"error\":\"401 not authorized - invalid userData\"}", 401));
}else{
//log.info("successful internal API call from " + request.ip()); //activate this?
userAccount.importJSON(accountJS);
}
//else do database authentication
}else if (!userAccount.authenticate(params)){
haltWithAuthError(request, response);
}
request.attribute(Defaults.ACCOUNT_ATTR, userAccount); //Note: keep this for testing and role-checks
//statistics b
if (!isInternalCall){
Statistics.add_API_hit_authenticated();
Statistics.save_Auth_total_time(tic);
}else{
Statistics.add_API_hit_internal();
}
return userAccount;
}
private static void haltWithAuthError(Request request, Response response) {
halt(SparkJavaFw.returnResult(request, response, "{\"result\":\"fail\",\"error\":\"401 not authorized\"}", 401));
}
//------------------ ENDPOINTS -------------------
//hello and statistics end-point
private static String helloWorld(Request request, Response response){
//Test Authentication
/*
Account userAccount = request.attribute(ACCOUNT_ATTR);
System.out.println("User ID: " + userAccount.getUserID());
System.out.println("User Name Data: " + userAccount.userName);
System.out.println("User Name Short: " + userAccount.userNameShort);
System.out.println("Access Level: " + userAccount.getAccessLevel());
System.out.println("Account Language: " + userAccount.language);
System.out.println("User Roles: " + userAccount.userRoles.toString());
*/
//time now
Date date = new Date();
String nowGMT = DateTime.getGMT(date, "dd.MM.yyyy' - 'HH:mm:ss' - GMT'");
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
//msg
Account userAccount = authenticate(params, request, response);
String reply;
if (userAccount.hasRole(Role.developer.name())){
//stats
reply = "Hello World!"
+ "<br><br>"
+ "Stats:<br>" +
"<br>api: " + Config.apiVersion +
"<br>started: " + startGMT +
"<br>now: " + nowGMT +
"<br>host: " + request.host() +
"<br>url: " + request.url() + "<br><br>" +
Statistics.getInfo();
}else{
reply = "Hello World!";
}
JSONObject msg = new JSONObject();
JSON.add(msg, "result", "success");
JSON.add(msg, "reply", reply);
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
//-- Teach-UI DATA --
static String getTeachUiServices(Request request, Response response) {
//we could use an account-dependent list of services
//RequestParameters params = new RequestGetOrFormParameters(request);
//Account account = authenticate(params, request, response);
long tic = System.currentTimeMillis();
String servicesJson;
try{
servicesJson = TeachUiDataLoader.getServices(null); //NOTE: add account here?
//statistics
Statistics.addOtherApiHit("getTeachUiServices");
Statistics.addOtherApiTime("getTeachUiServices", tic);
JSONObject msg = new JSONObject();
JSON.add(msg, "result", servicesJson);
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}catch (Exception e){
//statistics
Statistics.addOtherApiHit("getTeachUiServices ERROR");
Statistics.addOtherApiTime("getTeachUiServices ERROR", tic);
Debugger.printStackTrace(e, 3);
JSONObject msg = new JSONObject();
JSON.add(msg, "result", "fail");
JSON.add(msg, "error", "Could not load data, check teach-server logs for more info.");
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
}
//-- COMMAND MAPPINGS (CMD -> SERVICE) --
static String getCustomCommandMappings(Request request, Response response) {
long tic = System.currentTimeMillis();
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
//get account
Account account = authenticate(params, request, response);
String customOrSystem = getOrDefault("customOrSystem", CmdMap.CUSTOM, params);
String userId = account.getUserID();
TeachDatabase db = getDatabase();
List<CmdMap> map = db.getCustomCommandMappings(userId, customOrSystem, null);
JSONObject msg = new JSONObject();
JSONArray data = new JSONArray();
for (CmdMap cm : map){
JSON.add(data, cm.getJSON());
}
JSON.add(msg, "result", data);
//statistics
Statistics.addOtherApiHit("getCustomCommandMappings");
Statistics.addOtherApiTime("getCustomCommandMappings", tic);
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
static String setCustomCommandMappings(Request request, Response response){
long tic = System.currentTimeMillis();
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
//get account
Account account = authenticate(params, request, response);
String customOrSystem = getOrDefault("customOrSystem", CmdMap.CUSTOM, params);
boolean overwrite = getOrDefault("overwrite", false, params);
String userId = account.getUserID();
String mapArrayAsString = getOrDefault("mappings", "", params);
if (mapArrayAsString.isEmpty()){
throw new RuntimeException("required parameter 'mappings' is missing or empty!");
}
Set<CmdMap> cmSet;
try{
JSONArray mapArray = JSON.parseStringToArrayOrFail(mapArrayAsString);
cmSet = new HashSet<>();
for (Object o : mapArray){
cmSet.add(new CmdMap((JSONObject) o));
}
}catch (Exception e){
throw new RuntimeException("parsing parameter 'mappings' failed with error: " + e.getMessage());
}
HashMap<String, Object> filters = new HashMap<>();
filters.put("overwrite", overwrite);
TeachDatabase db = getDatabase();
db.setCustomCommandMappings(userId, customOrSystem, cmSet, filters);
//statistics
Statistics.addOtherApiHit("setCustomCommandMappings");
Statistics.addOtherApiTime("setCustomCommandMappings", tic);
return SparkJavaFw.sendSuccessResponse(request, response);
}
//-- COMMANDS --
/**
* Checks for duplicates when "overwriteExisting" is set, otherwise it assumes the user only ends up here
* if there's no similar sentence already.
*/
static String submitPersonalCommand(Request request, Response response) {
//statistics a
long tic = System.currentTimeMillis();
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
//get account
Account account = authenticate(params, request, response);
//get required parameters:
Language language = getLanguageOrFail(params);
String environment = getOrDefault("environment", "all", params);
String deviceId = params.getString("device_id");
String sentence = getOrFail("sentence", params);
String command = getOrFail("command", params);
String publicStr = getOrFail("public", params);
if (!(publicStr.equals("no") || publicStr.equals("yes"))) {
throw new RuntimeException("required parameter 'public' must be 'yes' or 'no')");
}
boolean isPublic = publicStr.equals("yes");
String localStr = getOrFail("local", params);
if (!(localStr.equals("no") || localStr.equals("yes"))) {
throw new RuntimeException("required parameter 'local' must be 'yes' or 'no')");
}
boolean isLocal = localStr.equals("yes");
String explicitStr = getOrDefault("explicit", "no", params);
boolean isExplicit = explicitStr.equals("yes");
boolean overwriteExisting = getOrDefault("overwriteExisting", false, params);
//get optional parameters:
String taggedSentence = params.getString("tagged_sentence");
JSONObject paramsJson = JSON.parseString(params.getString("params"));
String cmdSummary = params.getString("cmd_summary");
if ((cmdSummary == null || cmdSummary.isEmpty()) && (paramsJson != null && !paramsJson.isEmpty())){
cmdSummary = Converters.makeCommandSummary(command, paramsJson);
}
String userLocation = params.getString("user_location"); //TODO: The client should set this as detailed or vague as required
String[] repliesArr = params.getStringArray("reply");
List<String> replies = repliesArr == null ? new ArrayList<>() : Arrays.asList(repliesArr);
//custom button data and stuff
JSONObject dataJson;
String dataJsonString = params.getString("data");
if (Is.notNullOrEmpty(dataJsonString)){
dataJson = JSON.parseString(dataJsonString);
}else{
dataJson = new JSONObject(); //NOTE: If no data is submitted it will kill all previous data info (anyway the whole object is overwritten)
}
//build sentence - Note: Commands support sentence arrays but we use only one entry
List<Command.Sentence> sentenceList = new ArrayList<>();
Command.Sentence sentenceObj = new SentenceBuilder(sentence, account.getUserID(), "community") //TODO: add user role check to switch from "community" to "developer"?
.setLanguage(Language.valueOf(language.name().toUpperCase()))
.setParams(paramsJson)
.setCmdSummary(cmdSummary)
.setTaggedText(taggedSentence)
.setPublic(isPublic)
.setLocal(isLocal)
.setExplicit(isExplicit)
.setEnvironment(environment)
.setDeviceId(deviceId)
.setUserLocation(userLocation)
.setData(dataJson)
//TODO: keep it or remove it? The general answers should be stored in an index called "answers"
//and the connector is the command. For chats, custom answers are inside parameter "reply". But I think its still useful here ...
.setReplies(new ArrayList<>(replies))
.build();
sentenceList.add(sentenceObj);
//build command
Command cmd = new Command(command);
cmd.add(sentenceList);
//System.out.println(cmd.toJson()); //debug
//submit to DB
TeachDatabase db = getDatabase();
//get ID if sentence exists
if (overwriteExisting){
//search existing:
String itemId = "";
if (taggedSentence != null && !taggedSentence.isEmpty()){
itemId = db.getIdOfCommand(account.getUserID(), language.toValue(), taggedSentence);
}else if (!sentence.isEmpty()){
itemId = db.getIdOfCommand(account.getUserID(), language.toValue(), sentence);
}
if (itemId == null || itemId.isEmpty()){
//not found
db.submitCommand(cmd);
}else{
//overwrite
db.submitCommand(cmd, itemId);
}
}else{
db.submitCommand(cmd);
}
//log
logDB(request, "submitted command");
//statistics b
Statistics.add_teach_hit();
Statistics.save_teach_total_time(tic);
//answer to client
return SparkJavaFw.sendSuccessResponse(request, response);
}
static String getPersonalCommands(Request request, Response response) {
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
Account userAccount = authenticate(params, request, response);
//statistics a
long tic = System.currentTimeMillis();
Language language = getLanguageOrFail(params);
boolean includePublic = getOrDefault("include_public", true, params); //default is with public now
String searchText = getOrDefault("searchText", "", params); //in case we only want certain results matching the search text
HashMap<String, Object> filters = new HashMap<>();
//String userOrSelf = getOrDefault("user", userAccount.getUserID(), request);
//note: list function for user has been removed here since the assist-API has its own version now
//filters.put("userIds", userOrSelf);
filters.put("userIds", userAccount.getUserID());
filters.put("language", language.name());
filters.put("includePublic", includePublic);
filters.put("searchText", searchText);
JSONArray output = getDatabase().getPersonalCommands(filters);
JSONObject msg = new JSONObject();
JSON.add(msg, "result", output);
//statistics b
Statistics.addOtherApiHit("getPersonalCommands");
Statistics.addOtherApiTime("getPersonalCommands", tic);
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
static String getAllPersonalCommands(Request request, Response response){
//statistics a
long tic = System.currentTimeMillis();
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
Account userAccount = authenticate(params, request, response);
String userId = userAccount.getUserID();
if (userId == null || userId.isEmpty()){
throw new RuntimeException("Cannot load commands, userId is invalid!");
}
JSONArray output = getSpecificPersonalCommands(userAccount.getUserID(), userAccount.getPreferredLanguage(), params);
JSONObject msg = new JSONObject();
JSON.add(msg, "result", output);
//statistics b
Statistics.addOtherApiHit("getAllPersonalCommands");
Statistics.addOtherApiTime("getAllPersonalCommands", tic);
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
static String getAllCustomAssistantCommands(Request request, Response response){
//statistics a
long tic = System.currentTimeMillis();
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
Account userAccount = authenticate(params, request, response);
String userId = userAccount.getUserID();
if (userId == null || userId.isEmpty()){
throw new RuntimeException("Cannot load commands, userId is invalid!");
}
if (userId.equals(ConfigDefaults.defaultAssistantUserId)){
throw new RuntimeException("User ID and assistant ID are identical. Use 'getAllPersonalCommands' instead!");
}
String language = userAccount.getPreferredLanguage(); //NOTE: by default it will use USER language, but can be overwritten via "language" parameter
JSONArray output = getSpecificPersonalCommands(ConfigDefaults.defaultAssistantUserId, language, params);
JSONObject msg = new JSONObject();
JSON.add(msg, "result", output);
//statistics b
Statistics.addOtherApiHit("getAllCustomAssistantCommands");
Statistics.addOtherApiTime("getAllCustomAssistantCommands", tic);
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
private static JSONArray getSpecificPersonalCommands(String userId, String userLanguage, RequestParameters params){
String language = getOrDefault("language", userLanguage, params);
String from = getOrDefault("from", "0", params);
String size = getOrDefault("size", "10", params);
String withButtonOnly = getOrDefault("button", null, params);
boolean sortByDateNewest = getOrDefault("sortByDate", false, params);
HashMap<String, Object> filters = new HashMap<>();
filters.put("userId", userId);
filters.put("language", language);
filters.put("from", from);
filters.put("size", size);
if (withButtonOnly != null){
filters.put("button", true); //Its either true or not included
}
filters.put("sortByDate", sortByDateNewest);
TeachDatabase db = getDatabase();
JSONArray output = db.getAllPersonalCommands(filters);
return output;
}
static String getPersonalCommandsByIds(Request request, Response response) {
//statistics a
long tic = System.currentTimeMillis();
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
Account userAccount = authenticate(params, request, response);
JSONArray ids = params.getJsonArray("ids");
if (Is.nullOrEmpty(ids)){
throw new RuntimeException("Missing or empty 'ids' parameter");
}
HashMap<String, Object> filters = new HashMap<>();
filters.put("userId", userAccount.getUserID()); //to make sure that this can only be used by the authenticated user
JSONArray output = getDatabase().getPersonalCommandsByIds(Converters.jsonArrayToStringList(ids), filters);
JSONObject msg = new JSONObject();
JSON.add(msg, "result", output);
//statistics b
Statistics.addOtherApiHit("getPersonalCommandsByIds");
Statistics.addOtherApiTime("getPersonalCommandsByIds", tic);
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
static String deletePersonalCommand(Request request, Response response) {
long tic = Debugger.tic();
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
String id = getOrFail("id", params);
Account userAccount = authenticate(params, request, response);
String userId = userAccount.getUserID();
if (userId == null || userId.isEmpty()){
throw new RuntimeException("Cannot delete command, userId is invalid!");
}
TeachDatabase db = getDatabase();
JSONObject res = db.deleteCommand(id, userId);
//JSON.printJSONpretty(res); //DEBUG
JSONObject msg;
if (Connectors.httpSuccess(res)){
long deleted = JSON.getLongOrDefault(res, "deleted", 0);
//if (deleted > 0){ } //log it?
msg = JSON.make("result", JSON.make("deleted", deleted));
//statistics
logDB(request, "deleted personal command with id: " + id);
Statistics.addOtherApiHit("deleteCommandFromDB");
Statistics.addOtherApiTime("deleteCommandFromDB", tic);
}else{
msg = JSON.make("result", "fail");
Statistics.addOtherApiHit("deleteCommandFromDB ERROR");
Statistics.addOtherApiTime("deleteCommandFromDB ERROR", tic);
}
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
//-- SENTENCES (of commands) --
static String getAllCustomSentencesAsTrainingData(Request request, Response response) {
//allow request?
RequestParameters params = new RequestGetOrFormParameters(request);
authenticate(params, request, response);
requireRole(request, Role.superuser);
Language language = getLanguageOrFail(params);
JSONArray sentencesForTraining = getDatabase().getAllCustomSentencesAsTrainingData(language.toValue());
return SparkJavaFw.returnResult(request, response, JSON.make(
"result", JSON.make(
"sentences", sentencesForTraining
)
).toJSONString(), 200);
}
static String addSentence(Request request, Response response) {
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
Account userAccount = authenticate(params, request, response);
requireTranslatorRole(request);
String id = getOrFail("id", params);
Language language = getLanguageOrFail(params);
String text = getOrFail("text", params);
getDatabase().addSentence(id, language, text, userAccount);
logDB(request, "added sentence", language, null, text, getDatabase());
return SparkJavaFw.sendSuccessResponse(request, response);
}
static String voteSentence(Request request, Response response) {
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
Account userAccount = authenticate(params, request, response);
String docId = getOrFail("id", params);
Language votedLanguage = getLanguageOrFail(params);
String votedSentence = getOrFail("text", params);
TeachDatabase db = getDatabase();
Vote vote = Vote.valueOf(getOrFail("vote", params));
db.voteSentence(docId, votedSentence, votedLanguage, vote, userAccount);
JSONObject msg = new JSONObject();
JSON.add(msg, "result", "success");
logDB(request, "voted sentence '" + votedSentence + "' as " + vote, votedLanguage, db);
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
//-- ANSWERS --
static String getAnswersByType(Request request, Response response) {
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
Account userAccount = authenticate(params, request, response);
String userOrSelf = getOrDefault("user", userAccount.getUserID(), params); //note: user can be given as a list here, e.g. "assistant,<EMAIL>"
//require role when requested user is not user who asks
if (!userOrSelf.toLowerCase().equals(userAccount.getUserID().toLowerCase())){
requireSeniorDeveloperRole(request);
//TODO: add an "all" tag to get answers of all users? or leave that to the "browser" for now?
}
String answerType = getOrFail("type", params);
String languageOrNull = getOrDefault("language", null, params);
JSONObject json = getDatabase()
.getAnswersByType(Config.DB_ANSWERS, Answer.ANSWERS_TYPE, answerType, languageOrNull, userOrSelf);
return SparkJavaFw.returnResult(request, response, json.toJSONString(), 200);
}
static String addAnswer(Request request, Response response) {
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
//TODO: we should allow every user to add and edit answers for himself, by default the assistant reads only Defaults.USER answers
Account userAccount = authenticate(params, request, response);
requireDeveloperRole(request);
Language language = getLanguageOrFail(params);
String type = getOrFail("type", params);
String text = getOrFail("text", params);
List<Answer.Character> characters = new ArrayList<>();
if (getOrDefault("neutral", false, params)) {
characters.add(Answer.Character.neutral);
}
if (getOrDefault("cool", false, params)) {
characters.add(Answer.Character.cool);
}
if (getOrDefault("polite", false, params)) {
characters.add(Answer.Character.polite);
}
if (getOrDefault("rude", false, params)) {
characters.add(Answer.Character.rude);
}
int repetition = Integer.parseInt(getOrFail("repetition", params));
int mood = Integer.parseInt(getOrFail("mood", params));
String source = "assistant-tools";
boolean isPublic = getOrDefault("public", true, params);
boolean isLocal = getOrDefault("local", false, params);
boolean isExplicit = getOrDefault("explicit", false, params);
String[] tagsArray = getOrDefault("tags", "", params).split(",\\s*");
List<String> tags = Arrays.asList(tagsArray).stream().filter(f -> !f.isEmpty()).collect(Collectors.toList());
//check if the answer should be saved as a default system answer
boolean makeSystemDefault = getOrDefault("systemdefault", false, params);
String savedUser = userAccount.getUserID();
if (makeSystemDefault){
requireSeniorDeveloperRole(request);
savedUser = ConfigDefaults.defaultAssistantUserId;
}
Answer answer = new Answer(language, type, text, characters, repetition, mood, savedUser, source,
isPublic, isLocal, isExplicit, null, false, null, "", tags, null);
getDatabase().addAnswer(answer, userAccount);
logDB(request, "added answer", language, null, text, getDatabase());
return SparkJavaFw.sendSuccessResponse(request, response);
}
static String deleteAnswerById(Request request, Response response) {
long tic = Debugger.tic();
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
String id = getOrFail("id", params);
Account userAccount = authenticate(params, request, response);
String userId = userAccount.getUserID();
if (userId == null || userId.isEmpty()){
throw new RuntimeException("Cannot delete answer, userId is invalid!");
}
JSONObject msg;
//get document to check user
TeachDatabase db = getDatabase();
JSONObject answerRes = db.getAnswerById(id);
boolean found = JSON.getBoolean(answerRes, "found");
if (!found){
msg = JSON.make("result", "fail", "error", "ID not found!");
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
JSONObject answerResSource = JSON.getJObject(answerRes, "_source");
//check user
String foundUser = JSON.getString(answerResSource, "user");
if (foundUser == null || foundUser.isEmpty()){
log.warn("deleteAnswerById - ID '" + id + "' has invalid data! Needs clean-up!");
Statistics.addOtherApiHit("deleteAnswerById ERROR");
Statistics.addOtherApiTime("deleteAnswerById ERROR", tic);
msg = JSON.make("result", "fail", "error", "ID has invalid data!");
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}else if (foundUser.equals(userId)){
//clear for deletion - every user can delete his own answers
JSONObject result = db.deleteAnswerById(id, userId);
msg = JSON.make("result", "success", "info", result);
logDB(request, "deleted answer with id: " + id);
Statistics.addOtherApiHit("deleteAnswerById");
Statistics.addOtherApiTime("deleteAnswerById", tic);
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}else{
//check user role
requireSeniorDeveloperRole(request);
//clear for deletion
JSONObject result = db.deleteAnswerById(id, foundUser);
msg = JSON.make("result", "success", "info", result);
logDB(request, "deleted answer with id: " + id);
Statistics.addOtherApiHit("deleteAnswerById");
Statistics.addOtherApiTime("deleteAnswerById", tic);
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
}
static String modifyAnswer(Request request, Response response) {
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
authenticate(params, request, response);
requireTranslatorRole(request);
String id = getOrFail("id", params);
Language language = getLanguageOrFail(params);
String oldText = getOrFail("oldText", params);
String newText = getOrFail("newText", params);
getDatabase().modifyAnswer(id, language, oldText, newText);
logDB(request, "modify answer", language, oldText, newText, getDatabase());
return SparkJavaFw.sendSuccessResponse(request, response);
}
static String voteAnswer(Request request, Response response) {
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
Account userAccount = authenticate(params, request, response);
String docId = getOrFail("id", params);
// id would be enough, but just to be sure (and to be similar to voteSentence), we also check text and language:
Language votedLanguage = getLanguageOrFail(params);
String votedSentence = getOrFail("text", params);
TeachDatabase db = getDatabase();
Vote vote = Vote.valueOf(getOrFail("vote", params));
db.voteAnswer(docId, votedSentence, votedLanguage, vote, userAccount);
JSONObject msg = new JSONObject();
JSON.add(msg, "result", "success");
logDB(request, "voted answer '" + votedSentence + "' as " + vote, votedLanguage, db);
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
//--FEEDBACK--
static String feedback(Request request, Response response) {
//statistics a
long tic = System.currentTimeMillis();
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
//get account
Account account = authenticate(params, request, response);
//TODO: add role check to see if user is allowed to retrieve feedback or write only
//get action (submit, retrieve)
String action = params.getString("action");
//no action
if (action == null || action.trim().isEmpty()){
JSONObject msg = new JSONObject();
JSON.add(msg, "result", "fail");
JSON.add(msg, "error", "action attribute missing or invalid! Use e.g. submit, retrieve.");
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
//get default database for feedback data
TeachDatabase db = getDatabase();
//submit feedback
if (action.trim().equals("submit")){
//info (like, report, deprecated: dislike)
String info = params.getString("info");
String dataStr = params.getString("data");
JSONObject data = JSON.parseString(dataStr);
//no info?
if (info == null || info.trim().isEmpty()){
JSONObject msg = new JSONObject();
JSON.add(msg, "result", "fail");
JSON.add(msg, "error", "info attribute missing or invalid! Use e.g. like, report.");
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
//no data?
if (data == null || data.isEmpty()){
JSONObject msg = new JSONObject();
JSON.add(msg, "result", "fail");
JSON.add(msg, "error", "data attribute missing or invalid!");
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}else{
//add user id, time stamp and info
JSON.add(data, "user", account.getUserID());
JSON.add(data, "timestamp", System.currentTimeMillis());
if (!data.containsKey("info")){
JSON.add(data, "info", info);
}
}
//make id from cleaned text
String itemId = JSON.getString(data, "text");
if (itemId.isEmpty()){
//make id of user and time-stamp
itemId = account.getUserID() + "_" + System.currentTimeMillis();
}else{
itemId = Converters.makeIDfromSentence(itemId);
}
//check
if (itemId.isEmpty()){
String msg = "{\"result\":\"fail\",\"error\":\"no valid data\"}";
return SparkJavaFw.returnResult(request, response, msg, 200);
}
if (info.equals("like")){
Feedback.saveAsync(db, Feedback.INDEX, Feedback.TYPE_LIKE, itemId, data); //set and forget ^^
//System.out.println("DB SENT: " + Feedback.INDEX + "/" + Feedback.TYPE_LIKE + "/" + item_id + " - Data: " + data.toJSONString()); //debug
}
else if (info.equals("dislike")){
Feedback.saveAsync(db, Feedback.INDEX, Feedback.TYPE_DISLIKE, itemId, data); //set and forget ^^
//System.out.println("DB SENT: " + Feedback.INDEX + "/" + Feedback.TYPE_DISLIKE + "/" + item_id + " - Data: " + data.toJSONString()); //debug
}
else if (info.equals("report")){
Feedback.saveAsync(db, Feedback.INDEX, Feedback.TYPE_REPORT, itemId, data); //set and forget ^^
//System.out.println("DB SENT: " + Feedback.INDEX + "/" + Feedback.TYPE_REPORT + "/" + item_id + " - Data: " + data.toJSONString()); //debug
}
else if (info.equals("nps")){
//we can also use "lang" and "client" parameters to get more details
log.info("NPS - " + "id: " + account.getUserID() + " - score: " + data.get("score") + " - comment: " + data.get("comment") + " - TS: " + System.currentTimeMillis());
}
else{
//invalid info
JSONObject msg = new JSONObject();
JSON.add(msg, "result", "fail");
JSON.add(msg, "error", "info attribute missing or invalid! Use e.g. like, report.");
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
//if you come till here that everything has been submitted :-) It might not be successful though as we don't wait for feedback
JSONObject msg = new JSONObject();
JSON.add(msg, "result", "processing");
JSON.add(msg, "duration_ms", Timer.toc(tic));
//statistics b
Statistics.add_feedback_hit();
Statistics.save_feedback_total_time(tic);
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
//get feedback
else if (action.trim().equals("retrieve")){
//some parameters to match
String language = params.getString("language");
String user = params.getString("user");
String info = params.getString("info"); //like, report
int from =Integer.parseInt(getOrFail("from", params));
int size =Integer.parseInt(getOrFail("size", params));
//no info?
if (info == null || info.trim().isEmpty()){
JSONObject msg = new JSONObject();
JSON.add(msg, "result", "fail");
JSON.add(msg, "error", "info attribute missing or invalid! Use e.g. like, dislike, report.");
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
//build match filters
HashMap<String, Object> filters = new HashMap<>();
if (language != null) filters.put("language", language);
if (user != null) filters.put("user", user);
JSONArray feedback;
if (info.equals("report")){
feedback = db.getReportedFeedback(filters, from, size);
}else if (info.equals("like")){
feedback = db.getLikedFeedback(filters, from, size);
}else{
JSONObject msg = new JSONObject();
JSON.add(msg, "result", "fail");
JSON.add(msg, "error", "info attribute missing or invalid! Use e.g. like, dislike, report.");
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
JSONObject result = new JSONObject();
JSON.add(result, "result", feedback);
//statistics b
Statistics.add_KDB_read_hit();
Statistics.save_KDB_read_total_time(tic);
return SparkJavaFw.returnResult(request, response, result.toJSONString(), 200);
}
//invalid request
else{
//no valid action
JSONObject msg = new JSONObject();
JSON.add(msg, "result", "fail");
JSON.add(msg, "error", "action attributes missing or invalid! Use e.g. submit, retrieve.");
return SparkJavaFw.returnResult(request, response, msg.toJSONString(), 200);
}
}
//------------------------------------------------------------------------------
private static void logDB(Request request, String message) {
logDB(request, message, null, null, null, getDatabase());
}
private static void logDB(Request request, String message, Language language, TeachDatabase db) {
logDB(request, message, language, null, null, db);
}
private static void logDB(Request request, String message, Language language, String oldValue, String newValue, TeachDatabase db) {
Account userAccount = request.attribute(Defaults.ACCOUNT_ATTR);
if (Config.useDatabaseLog){
new ElasticsearchLogger(db).log(userAccount, message, language, oldValue, newValue);
}else{
String logInfo = "EVENT-LOG"
+ " - id: " + userAccount.getUserID()
+ " - msg: " + message;
if (language != null){
logInfo += (" - lang: " + language);
}
if (oldValue != null){
logInfo += (" - old: " + oldValue);
}
if (newValue != null){
logInfo += (" - new: " + newValue);
}
log.info(logInfo);
}
}
static String getLogs(Request request, Response response) {
//prepare parameters
RequestParameters params = new RequestGetOrFormParameters(request);
authenticate(params, request, response);
requireSeniorDeveloperRole(request);
String from = getOrFail("from", params);
String size = getOrFail("size", params);
JSONObject json = getDatabase().getLogs(Config.DB_LOGS, ElasticsearchLogger.LOGS_TYPE, Integer.parseInt(from), Integer.parseInt(size));
return SparkJavaFw.returnResult(request, response, json.toJSONString(), 200);
}
//------------------------------------------------------------------------------
/*
private static Set<String> getSetOrFail(String paramName, Request request) {
String[] values = request.queryParamsValues(paramName);
if (values == null) {
throw new RuntimeException("Missing '" + paramName + "' parameter");
}
Set<String> result = new HashSet<>();
for (String value : values) {
validate(paramName, value);
result.add(value);
}
return result;
}
*/
private static String getOrFail(String paramName, RequestParameters params) {
String val = params.getString(paramName);
validate(paramName, val);
return val;
}
private static String getOrDefault(String paramName, String defaultValue, RequestParameters params) {
String val = params.getString(paramName);
if (val != null) {
return val;
} else {
return defaultValue;
}
}
private static boolean getOrDefault(String paramName, boolean defaultBoolean, RequestParameters params) {
String val = params.getString(paramName);
if (val != null) {
return Boolean.parseBoolean(val) || val.equals("on"); // 'on' is what jquery gives for val() for turned-on checkboxes
} else {
return defaultBoolean;
}
}
private static void validate(String paramName, String val) {
if (val == null) {
throw new RuntimeException("Missing '" + paramName + "' parameter");
}
if (val.trim().isEmpty()) {
throw new RuntimeException("Parameter '" + paramName + "' is empty or whitespace only");
}
}
private static Language getLanguageOrFail(RequestParameters params) {
String code = getOrFail("language", params);
return Language.forValue(code);
}
private static void requireTranslatorRole(Request request) {
requireRole(request, Role.translator);
}
/*private static void requireTesterRole(Request request) {
requireRole(request, Role.tester);
}*/
private static void requireDeveloperRole(Request request) {
requireRole(request, Role.developer);
}
private static void requireSeniorDeveloperRole(Request request) {
requireRole(request, Role.seniordev);
}
private static void requireRole(Request request, Role role) {
String requiredRole = role.name();
Account userAccount = request.attribute(Defaults.ACCOUNT_ATTR);
if (!userAccount.hasRole(requiredRole)) {
//TODO: now that we can assign roles in the JUnit test we could remove the authRequired and add roles to all tests as needed (and can check against missing roles as well)
throw new RuntimeException("User '" + userAccount.getUserID() + "' doesn't have the required role '" + requiredRole + "'. User's roles: " + userAccount.getUserRoles());
}
}
}
|
import { Component } from '@angular/core';
import { connectState, ConnectState } from 'src';
import { interval } from 'rxjs';
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.scss']
})
@ConnectState()
export class AppComponent {
constructor() {}
state = connectState(this, {
counter: interval(1000),
counter2: interval(500),
counter3: interval(250),
});
ngOnDestroy() {
}
}
|
import '@brightspace-ui-labs/grade-result/d2l-grade-result.js';
import './consistent-evaluation-right-panel-block';
import { Grade, GradeType } from '@brightspace-ui-labs/grade-result/src/controller/Grade';
import { html, LitElement } from 'lit-element';
import { Debouncer } from '@polymer/polymer/lib/utils/debounce.js';
import { ifDefined } from 'lit-html/directives/if-defined.js';
import { LocalizeConsistentEvaluation } from '../../lang/localize-consistent-evaluation.js';
import { timeOut } from '@polymer/polymer/lib/utils/async.js';
export class ConsistentEvaluationGradeResult extends LocalizeConsistentEvaluation(LitElement) {
static get properties() {
return {
grade: {
attribute: false,
type: Object
},
gradeItemInfo: {
attribute: false,
type: Object
},
customManualOverrideText: {
attribute: 'custom-manual-override-text',
type: String
},
customManualOverrideClearText: {
attribute:'custom-manual-override-clear-text',
type: String
},
readOnly: {
attribute: 'read-only',
type: Boolean
},
hideTitle: {
attribute: 'hide-title',
type: Boolean
},
_manuallyOverriddenGrade: { type: Object },
_hasUnsavedChanged: { type: Boolean },
_gradeButtonTooltip: { type: String },
_reportsButtonTooltip: { type: String },
_isGradeAutoCompleted: { type: Boolean },
_gradeSummaryInfo: { type: String }
};
}
constructor() {
super();
this.grade = new Grade(GradeType.Number, 0, 0, null, null, null);
this.gradeItemInfo = {};
this.customManualOverrideText = undefined;
this.customManualOverrideClearText = undefined;
this.readOnly = false;
this.hideTitle = false;
this._gradeButtonUrl = '';
this._reportsButtonUrl = '';
this._debounceJobs = {};
this.flush = this.flush.bind(this);
// hard coded as disabled as not yet supported by API
this._manuallyOverriddenGrade = undefined;
this._hasUnsavedChanged = false;
this._gradeButtonTooltip = undefined;
this._reportsButtonTooltip = undefined;
this._isGradeAutoCompleted = false;
}
connectedCallback() {
super.connectedCallback();
window.addEventListener('d2l-flush', this.flush);
}
disconnectedCallback() {
window.removeEventListener('d2l-flush', this.flush);
super.disconnectedCallback();
}
flush() {
if (this._debounceJobs.grade && this._debounceJobs.grade.isActive()) {
this._debounceJobs.grade.flush();
}
}
onGradeChanged(e) {
const score = e.detail.value;
this._debounceJobs.grade = Debouncer.debounce(
this._debounceJobs.grade,
timeOut.after(800),
() => this._emitGradeChangeEvent(score)
);
}
_emitGradeChangeEvent(score) {
this.grade.setScore(score);
this.dispatchEvent(new CustomEvent('on-d2l-consistent-eval-grade-changed', {
composed: true,
bubbles: true,
detail: {
grade: this.grade
}
}));
}
_setGradeSummaryInfo(gradeType, score, scoreOutOf) {
let summary = '';
if (score === null || score === '') {
summary = this.localize('noGradeSummary');
} else if (gradeType === GradeType.Letter) {
summary = score;
} else {
summary = this.localize('gradeSummary', { grade: score, outOf: scoreOutOf });
}
this._gradeSummaryInfo = summary;
}
render() {
const gradeType = this.grade.getScoreType();
let score = this.grade.getScore();
const scoreOutOf = this.grade.getScoreOutOf();
// handle when grade is not yet initialized on the server
if (gradeType === GradeType.Number && score === null) {
score = 0;
} else if (gradeType === GradeType.Letter && score === null) {
score = '';
}
this._setGradeSummaryInfo(gradeType, score, scoreOutOf);
return html`
<d2l-consistent-evaluation-right-panel-block
supportingInfo=${ifDefined(this._gradeSummaryInfo)}
title="${this.localize('overallGrade')}">
<d2l-labs-d2l-grade-result-presentational
.gradeType=${gradeType}
scoreNumerator=${score}
scoreDenominator=${scoreOutOf}
.letterGradeOptions=${scoreOutOf}
selectedLetterGrade=${score}
.customManualOverrideText=${this.customManualOverrideText}
.customManualOverrideClearText=${this.customManualOverrideClearText}
gradeButtonTooltip=${this.localize('attachedGradeItem', 'gradeItemName', this.gradeItemInfo && this.gradeItemInfo.gradeItemName)}
reportsButtonTooltip=${this.localize('statistics')}
?includeGradeButton=${this.gradeItemInfo && this.gradeItemInfo.evaluationUrl}
?includeReportsButton=${this.gradeItemInfo && this.gradeItemInfo.statsUrl}
?isGradeAutoCompleted=${this._isGradeAutoCompleted}
?isManualOverrideActive=${this._manuallyOverriddenGrade !== undefined}
?readOnly=${this.readOnly}
?hideTitle=${this.hideTitle}
@d2l-grade-result-reports-button-click=${this._openGradeStatisticsDialog}
@d2l-grade-result-grade-button-click=${this._openGradeEvaluationDialog}
@d2l-grade-result-grade-change=${this.onGradeChanged}
@d2l-grade-result-letter-score-selected=${this.onGradeChanged}
@d2l-grade-result-manual-override-click=${this._handleManualOverrideClick}
@d2l-grade-result-manual-override-clear-click=${this._handleManualOverrideClearClick}
></d2l-labs-d2l-grade-result-presentational>
</d2l-consistent-evaluation-right-panel-block>
`;
}
_openGradeEvaluationDialog() {
const dialogUrl = this.gradeItemInfo && this.gradeItemInfo.evaluationUrl;
if (!dialogUrl) {
console.error('Consistent-Eval: Expected grade item evalutaion dialog URL, but none found');
return;
}
const location = new D2L.LP.Web.Http.UrlLocation(dialogUrl);
const buttons = [
{
Key: 'save',
Text: this.localize('saveBtn'),
ResponseType: 1, // D2L.Dialog.ResponseType.Positive
IsPrimary: true,
IsEnabled: true
},
{
Text: this.localize('cancelBtn'),
ResponseType: 2, // D2L.Dialog.ResponseType.Negative
IsPrimary: false,
IsEnabled: true
}
];
D2L.LP.Web.UI.Legacy.MasterPages.Dialog.Open(
/* opener: */ document.body,
/* location: */ location,
/* srcCallback: */ 'SrcCallback',
/* resizeCallback: */ '',
/* responseDataKey: */ 'result',
/* width: */ 1920,
/* height: */ 1080,
/* closeText: */ this.localize('closeBtn'),
/* buttons: */ buttons,
/* forceTriggerOnCancel: */ false
);
}
_openGradeStatisticsDialog() {
const dialogUrl = this.gradeItemInfo && this.gradeItemInfo.statsUrl;
if (!dialogUrl) {
console.error('Consistent-Eval: Expected grade item statistics dialog URL, but none found');
return;
}
const location = new D2L.LP.Web.Http.UrlLocation(dialogUrl);
const buttons = [
{
Key: 'close',
Text: this.localize('closeBtn'),
ResponseType: 1, // D2L.Dialog.ResponseType.Positive
IsPrimary: true,
IsEnabled: true
}
];
D2L.LP.Web.UI.Legacy.MasterPages.Dialog.Open(
/* opener: */ document.body,
/* location: */ location,
/* srcCallback: */ 'SrcCallback',
/* resizeCallback: */ '',
/* responseDataKey: */ 'result',
/* width: */ 1920,
/* height: */ 1080,
/* closeText: */ this.localize('closeBtn'),
/* buttons: */ buttons,
/* forceTriggerOnCancel: */ false
);
}
}
customElements.define('d2l-consistent-evaluation-grade-result', ConsistentEvaluationGradeResult);
|
using System;
using System.Collections.Generic;
namespace UserAclDesc
{
public class Helper
{
private Dictionary<string, HashSet<string>> rolePermissions;
public Helper()
{
rolePermissions = new Dictionary<string, HashSet<string>>();
}
public void AddRole(string roleName)
{
if (!rolePermissions.ContainsKey(roleName))
{
rolePermissions.Add(roleName, new HashSet<string>());
}
else
{
throw new ArgumentException("Role already exists");
}
}
public void RemoveRole(string roleName)
{
if (rolePermissions.ContainsKey(roleName))
{
rolePermissions.Remove(roleName);
}
else
{
throw new ArgumentException("Role does not exist");
}
}
public void AssignPermission(string roleName, string permission)
{
if (rolePermissions.ContainsKey(roleName))
{
rolePermissions[roleName].Add(permission);
}
else
{
throw new ArgumentException("Role does not exist");
}
}
public void RemovePermission(string roleName, string permission)
{
if (rolePermissions.ContainsKey(roleName))
{
rolePermissions[roleName].Remove(permission);
}
else
{
throw new ArgumentException("Role does not exist");
}
}
public bool HasPermission(string roleName, string permission)
{
if (rolePermissions.ContainsKey(roleName))
{
return rolePermissions[roleName].Contains(permission);
}
else
{
throw new ArgumentException("Role does not exist");
}
}
}
}
|
<reponame>lacendarko/bluetooth
package bluetooth
import (
"errors"
"time"
"github.com/godbus/dbus/v5"
)
var (
errScanning = errors.New("bluetooth: a scan is already in progress")
errNotScanning = errors.New("bluetooth: there is no scan in progress")
errAdvertisementPacketTooBig = errors.New("bluetooth: advertisement packet overflows")
)
// MACAddress contains a Bluetooth address which is a MAC address.
type MACAddress struct {
// MAC address of the Bluetooth device.
MAC
isRandom bool
}
// IsRandom if the address is randomly created.
func (mac MACAddress) IsRandom() bool {
return mac.isRandom
}
// SetRandom if is a random address.
func (mac MACAddress) SetRandom(val bool) {
mac.isRandom = val
}
// Set the address
func (mac MACAddress) Set(val string) {
m, err := ParseMAC(val)
if err != nil {
return
}
mac.MAC = m
}
// AdvertisementOptions configures an advertisement instance. More options may
// be added over time.
type AdvertisementOptions struct {
// The (complete) local name that will be advertised. Optional, omitted if
// this is a zero-length string.
LocalName string
// ServiceUUIDs are the services (16-bit or 128-bit) that are broadcast as
// part of the advertisement packet, in data types such as "complete list of
// 128-bit UUIDs".
ServiceUUIDs []UUID
// Interval in BLE-specific units. Create an interval by using NewDuration.
Interval Duration
}
// Duration is the unit of time used in BLE, in 0.625µs units. This unit of time
// is used throughout the BLE stack.
type Duration uint16
// NewDuration returns a new Duration, in units of 0.625µs. It is used both for
// advertisement intervals and for connection parameters.
func NewDuration(interval time.Duration) Duration {
// Convert an interval to units of 0.625µs.
return Duration(uint64(interval / (625 * time.Microsecond)))
}
// Connection is a numeric identifier that indicates a connection handle.
type Connection uint16
// Addresser contains a Bluetooth address, which is a MAC address plus some extra
// information.
type Addresser interface {
// String of the address
String() string
// Set the address
Set(val string)
// Is this address a random address?
// Bluetooth addresses are roughly split in two kinds: public
// (IEEE-assigned) addresses and random (not IEEE assigned) addresses.
// "Random" here doesn't mean it is exactly random but at least it looks
// random. Sometimes, it contains a hash.
// For more information:
// https://www.novelbits.io/bluetooth-address-privacy-ble/
// Set the address
SetRandom(bool)
IsRandom() bool
}
// ScanResult contains information from when an advertisement packet was
// received. It is passed as a parameter to the callback of the Scan method.
type ScanResult struct {
// Bluetooth address of the scanned device.
Address Addresser
// RSSI the last time a packet from this device has been received.
RSSI int16
// The data obtained from the advertisement data, which may contain many
// different properties.
// Warning: this data may only stay valid until the next event arrives. If
// you need any of the fields to stay alive until after the callback
// returns, copy them.
AdvertisementPayload
}
// AdvertisementPayload contains information obtained during a scan (see
// ScanResult). It is provided as an interface as there are two possible
// implementations: an implementation that works with raw data (usually on
// low-level BLE stacks) and an implementation that works with structured data.
type AdvertisementPayload interface {
// LocalName is the (complete or shortened) local name of the device.
// Please note that many devices do not broadcast a local name, but may
// broadcast other data (e.g. manufacturer data or service UUIDs) with which
// they may be identified.
LocalName() string
// HasServiceUUID returns true whether the given UUID is present in the
// advertisement payload as a Service Class UUID. It checks both 16-bit
// UUIDs and 128-bit UUIDs.
HasServiceUUID(UUID) bool
// Bytes returns the raw advertisement packet, if available. It returns nil
// if this data is not available.
Bytes() []byte
// GetManufacturerData returns raw packet
GetManufacturerData(key uint16) []byte
// GetServiceData returns raw packet
GetServiceData(key string) ([]byte, map[string]interface{})
}
// AdvertisementFields contains advertisement fields in structured form.
type AdvertisementFields struct {
// The LocalName part of the advertisement (either the complete local name
// or the shortened local name).
LocalName string
// ServiceUUIDs are the services (16-bit or 128-bit) that are broadcast as
// part of the advertisement packet, in data types such as "complete list of
// 128-bit UUIDs".
ServiceUUIDs []UUID
// ManufacturerData package
ManufacturerData map[uint16]interface{}
// ServiceData package
ServiceData map[string]interface{}
}
// advertisementFields wraps AdvertisementFields to implement the
// AdvertisementPayload interface. The methods to implement the interface (such
// as LocalName) cannot be implemented on AdvertisementFields because they would
// conflict with field names.
type advertisementFields struct {
AdvertisementFields
}
// LocalName returns the underlying LocalName field.
func (p *advertisementFields) LocalName() string {
return p.AdvertisementFields.LocalName
}
// HasServiceUUID returns true whether the given UUID is present in the
// advertisement payload as a Service Class UUID.
func (p *advertisementFields) HasServiceUUID(uuid UUID) bool {
for _, u := range p.AdvertisementFields.ServiceUUIDs {
if u == uuid {
return true
}
}
return false
}
// GetManufacturerData
func (p *advertisementFields) GetManufacturerData(key uint16) []byte {
if p.ManufacturerData[key] != nil {
temp := p.ManufacturerData[key].(dbus.Variant)
return temp.Value().([]byte)
}
return nil
}
// GetManufacturerData
func (p *advertisementFields) GetServiceData(key string) ([]byte, map[string]interface{}) {
if p.ServiceData[key] != nil {
temp := p.ServiceData[key].(dbus.Variant)
return temp.Value().([]byte), p.ServiceData
}
return nil, p.ServiceData
}
// Bytes returns nil, as structured advertisement data does not have the
// original raw advertisement data available.
func (p *advertisementFields) Bytes() []byte {
return nil
}
// rawAdvertisementPayload encapsulates a raw advertisement packet. Methods to
// get the data (such as LocalName()) will parse just the needed field. Scanning
// the data should be fast as most advertisement packets only have a very small
// (3 or so) amount of fields.
type rawAdvertisementPayload struct {
data [31]byte
len uint8
}
// Bytes returns the raw advertisement packet as a byte slice.
func (buf *rawAdvertisementPayload) Bytes() []byte {
return buf.data[:buf.len]
}
// findField returns the data of a specific field in the advertisement packet.
//
// See this list of field types:
// https://www.bluetooth.com/specifications/assigned-numbers/generic-access-profile/
func (buf *rawAdvertisementPayload) findField(fieldType byte) []byte {
data := buf.Bytes()
for len(data) >= 2 {
fieldLength := data[0]
if int(fieldLength)+1 > len(data) {
// Invalid field length.
return nil
}
if fieldType == data[1] {
return data[2 : fieldLength+1]
}
data = data[fieldLength+1:]
}
return nil
}
// LocalName returns the local name (complete or shortened) in the advertisement
// payload.
func (buf *rawAdvertisementPayload) LocalName() string {
b := buf.findField(9) // Complete Local Name
if len(b) != 0 {
return string(b)
}
b = buf.findField(8) // Shortened Local Name
if len(b) != 0 {
return string(b)
}
return ""
}
// HasServiceUUID returns true whether the given UUID is present in the
// advertisement payload as a Service Class UUID. It checks both 16-bit UUIDs
// and 128-bit UUIDs.
func (buf *rawAdvertisementPayload) HasServiceUUID(uuid UUID) bool {
if uuid.Is16Bit() {
b := buf.findField(0x03) // Complete List of 16-bit Service Class UUIDs
if len(b) == 0 {
b = buf.findField(0x02) // Incomplete List of 16-bit Service Class UUIDs
}
uuid := uuid.Get16Bit()
for i := 0; i < len(b)/2; i++ {
foundUUID := uint16(b[i*2]) | (uint16(b[i*2+1]) << 8)
if uuid == foundUUID {
return true
}
}
return false
} else {
b := buf.findField(0x07) // Complete List of 128-bit Service Class UUIDs
if len(b) == 0 {
b = buf.findField(0x06) // Incomplete List of 128-bit Service Class UUIDs
}
uuidBuf1 := uuid.Bytes()
for i := 0; i < len(b)/16; i++ {
uuidBuf2 := b[i*16 : i*16+16]
match := true
for i, c := range uuidBuf1 {
if c != uuidBuf2[i] {
match = false
break
}
}
if match {
return true
}
}
return false
}
}
// reset restores this buffer to the original state.
func (buf *rawAdvertisementPayload) reset() {
// The data is not reset (only the length), because with a zero length the
// data is undefined.
buf.len = 0
}
// addFromOptions constructs a new advertisement payload (assumed to be empty
// before the call) from the advertisement options. It returns true if it fits,
// false otherwise.
func (buf *rawAdvertisementPayload) addFromOptions(options AdvertisementOptions) (ok bool) {
buf.addFlags(0x06)
if options.LocalName != "" {
if !buf.addCompleteLocalName(options.LocalName) {
return false
}
}
// TODO: if there are multiple 16-bit UUIDs, they should be listed in
// one field.
// This is not possible for 128-bit service UUIDs (at least not in
// legacy advertising) because of the 31-byte advertisement packet
// limit.
for _, uuid := range options.ServiceUUIDs {
if !buf.addServiceUUID(uuid) {
return false
}
}
return true
}
// addFlags adds a flags field to the advertisement buffer. It returns true on
// success (the flags can be added) and false on failure.
func (buf *rawAdvertisementPayload) addFlags(flags byte) (ok bool) {
if int(buf.len)+3 > len(buf.data) {
return false // flags don't fit
}
buf.data[buf.len] = 2 // length of field (including type)
buf.data[buf.len+1] = 0x01 // type, 0x01 means Flags
buf.data[buf.len+2] = flags // the flags
buf.len += 3
return true
}
// addCompleteLocalName adds the Complete Local Name field to the advertisement
// buffer. It returns true on success (the name fits) and false on failure.
func (buf *rawAdvertisementPayload) addCompleteLocalName(name string) (ok bool) {
if int(buf.len)+len(name)+2 > len(buf.data) {
return false // name doesn't fit
}
buf.data[buf.len] = byte(len(name) + 1) // length of field (including type)
buf.data[buf.len+1] = 9 // type, 0x09 means Complete Local name
copy(buf.data[buf.len+2:], name) // copy the name into the buffer
buf.len += byte(len(name) + 2)
return true
}
// addServiceUUID adds a Service Class UUID (16-bit or 128-bit). It has
// currently only been designed for adding single UUIDs: multiple UUIDs are
// stored in separate fields without joining them together in one field.
func (buf *rawAdvertisementPayload) addServiceUUID(uuid UUID) (ok bool) {
// Don't bother with 32-bit UUID support, it doesn't seem to be used in
// practice.
if uuid.Is16Bit() {
if int(buf.len)+4 > len(buf.data) {
return false // UUID doesn't fit.
}
shortUUID := uuid.Get16Bit()
buf.data[buf.len+0] = 3 // length of field, including type
buf.data[buf.len+1] = 0x03 // type, 0x03 means "Complete List of 16-bit Service Class UUIDs"
buf.data[buf.len+2] = byte(shortUUID)
buf.data[buf.len+3] = byte(shortUUID >> 8)
buf.len += 4
return true
} else {
if int(buf.len)+18 > len(buf.data) {
return false // UUID doesn't fit.
}
buf.data[buf.len+0] = 17 // length of field, including type
buf.data[buf.len+1] = 0x07 // type, 0x07 means "Complete List of 128-bit Service Class UUIDs"
rawUUID := uuid.Bytes()
copy(buf.data[buf.len+2:], rawUUID[:])
buf.len += 18
return true
}
}
// ConnectionParams are used when connecting to a peripherals.
type ConnectionParams struct {
// The timeout for the connection attempt. Not used during the rest of the
// connection. If no duration is specified, a default timeout will be used.
ConnectionTimeout Duration
// Minimum and maximum connection interval. The shorter the interval, the
// faster data can travel between both devices but also the more power they
// will draw. If no intervals are specified, a default connection interval
// will be used.
MinInterval Duration
MaxInterval Duration
}
|
#!/bin/bash
# ========== Experiment Seq. Idx. 1130 / 56.3.1 / N. 56/2/1 - _S=56.3.1 D1_N=56 a=1 b=-1 c=-1 d=-1 e=1 f=1 D3_N=2 g=-1 h=1 i=-1 D4_N=1 j=1 ==========
set -u
# Prints header
echo -e '\n\n========== Experiment Seq. Idx. 1130 / 56.3.1 / N. 56/2/1 - _S=56.3.1 D1_N=56 a=1 b=-1 c=-1 d=-1 e=1 f=1 D3_N=2 g=-1 h=1 i=-1 D4_N=1 j=1 ==========\n\n'
if [[ "Yes" == "No" ]]; then
echo 'FATAL: This treatment did not include an SVM layer.'>&2
echo ' Something very wrong happened!'>&2
exit 161
fi
# Prepares all environment variables
JBHI_DIR="$HOME/jbhi-special-issue"
DATASET_DIR="$JBHI_DIR/data/challenge-val-seg.305.tfr"
MODEL_DIR="$JBHI_DIR/models/deep.56"
SVM_DIR="$JBHI_DIR/svm-models"
SVM_PREFIX="$SVM_DIR/deep.56.layer.2.svm"
SVM_PATH="$SVM_PREFIX.pkl"
FEATURES_DIR="$JBHI_DIR/features"
TEST_FEATURES_PREFIX="$FEATURES_DIR/deep.56.layer.2.test.1.index.2899.test"
TEST_FEATURES_PATH="$TEST_FEATURES_PREFIX.feats.pkl"
RESULTS_DIR="$JBHI_DIR/results"
RESULTS_PREFIX="$RESULTS_DIR/deep.56.layer.2.test.1.index.2899.svm"
RESULTS_PATH="$RESULTS_PREFIX.results.txt"
# ...variables expected by jbhi-checks.include.sh and jbhi-footer.include.sh
SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODEL_DIR/finish.txt:$SVM_PREFIX.finish.txt"
START_PATH="$RESULTS_PREFIX.start.txt"
FINISH_PATH="$RESULTS_PREFIX.finish.txt"
LOCK_PATH="$RESULTS_PREFIX.running.lock"
LAST_OUTPUT="$RESULTS_PATH"
# ...creates mid-way checkpoint after the expensive test features extraction
SEMIFINISH_PATH="$TEST_FEATURES_PREFIX.finish.txt"
# EXPERIMENT_STATUS=1
# STARTED_BEFORE=No
mkdir -p "$FEATURES_DIR"
mkdir -p "$RESULTS_DIR"
#
# Assumes that the following environment variables where initialized
# SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
# LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODELS_DIR/finish.txt:"
# START_PATH="$OUTPUT_DIR/start.txt"
# FINISH_PATH="$OUTPUT_DIR/finish.txt"
# LOCK_PATH="$OUTPUT_DIR/running.lock"
# LAST_OUTPUT="$MODEL_DIR/[[[:D1_MAX_NUMBER_OF_STEPS:]]].meta"
EXPERIMENT_STATUS=1
STARTED_BEFORE=No
# Checks if code is stable, otherwise alerts scheduler
pushd "$SOURCES_GIT_DIR" >/dev/null
GIT_STATUS=$(git status --porcelain)
GIT_COMMIT=$(git log | head -n 1)
popd >/dev/null
if [ "$GIT_STATUS" != "" ]; then
echo 'FATAL: there are uncommitted changes in your git sources file' >&2
echo ' for reproducibility, experiments only run on committed changes' >&2
echo >&2
echo ' Git status returned:'>&2
echo "$GIT_STATUS" >&2
exit 162
fi
# The experiment is already finished - exits with special code so scheduler won't retry
if [[ "$FINISH_PATH" != "-" ]]; then
if [[ -e "$FINISH_PATH" ]]; then
echo 'INFO: this experiment has already finished' >&2
exit 163
fi
fi
# The experiment is not ready to run due to dependencies - alerts scheduler
if [[ "$LIST_OF_INPUTS" != "" ]]; then
IFS=':' tokens_of_input=( $LIST_OF_INPUTS )
input_missing=No
for input_to_check in ${tokens_of_input[*]}; do
if [[ ! -e "$input_to_check" ]]; then
echo "ERROR: input $input_to_check missing for this experiment" >&2
input_missing=Yes
fi
done
if [[ "$input_missing" != No ]]; then
exit 164
fi
fi
# Sets trap to return error code if script is interrupted before successful finish
LOCK_SUCCESS=No
FINISH_STATUS=161
function finish_trap {
if [[ "$LOCK_SUCCESS" == "Yes" ]]; then
rmdir "$LOCK_PATH" &> /dev/null
fi
if [[ "$FINISH_STATUS" == "165" ]]; then
echo 'WARNING: experiment discontinued because other process holds its lock' >&2
else
if [[ "$FINISH_STATUS" == "160" ]]; then
echo 'INFO: experiment finished successfully' >&2
else
[[ "$FINISH_PATH" != "-" ]] && rm -f "$FINISH_PATH"
echo 'ERROR: an error occurred while executing the experiment' >&2
fi
fi
exit "$FINISH_STATUS"
}
trap finish_trap EXIT
# While running, locks experiment so other parallel threads won't attempt to run it too
if mkdir "$LOCK_PATH" --mode=u=rwx,g=rx,o=rx &>/dev/null; then
LOCK_SUCCESS=Yes
else
echo 'WARNING: this experiment is already being executed elsewhere' >&2
FINISH_STATUS="165"
exit
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$START_PATH" != "-" ]]; then
if [[ -e "$START_PATH" ]]; then
echo 'WARNING: this experiment is being restarted' >&2
STARTED_BEFORE=Yes
fi
#...marks start
date -u >> "$START_PATH"
echo GIT "$GIT_COMMIT" >> "$START_PATH"
fi
#...gets closest checkpoint file
MODEL_CHECKPOINT=$(ls "$MODEL_DIR/"model.ckpt-*.index | \
sed 's/.*ckpt-\([0-9]*\)\..*/\1/' | \
sort -n | \
awk -v c=1 -v t=20000 \
'NR==1{d=$c-t;d=d<0?-d:d;v=$c;next}{m=$c-t;m=m<0?-m:m}m<d{d=m;v=$c}END{print v}')
MODEL_PATH="$MODEL_DIR/model.ckpt-$MODEL_CHECKPOINT"
echo "$MODEL_PATH" >> "$START_PATH"
if [[ ! -f "$SEMIFINISH_PATH" ]]; then
#...performs preliminary feature extraction
echo Extracting SVM test features with "$MODEL_PATH"
python \
"$SOURCES_GIT_DIR/predict_image_classifier.py" \
--model_name="inception_v4_seg" \
--checkpoint_path="$MODEL_PATH" \
--dataset_name=skin_lesions \
--task_name=label \
--dataset_split_name=test \
--preprocessing_name=dermatologic \
--aggressive_augmentation="False" \
--add_rotations="False" \
--minimum_area_to_crop="0.05" \
--normalize_per_image="1" \
--batch_size=1 \
--id_field_name=id \
--pool_features=avg \
--extract_features \
--output_format=pickle \
--add_scores_to_features=none \
--eval_replicas="1" \
--output_file="$TEST_FEATURES_PATH" \
--dataset_dir="$DATASET_DIR"
# Tip: leave last the arguments that make the command fail if they're absent,
# so if there's a typo or forgotten \ the entire thing fails
EXPERIMENT_STATUS="$?"
if [[ "$EXPERIMENT_STATUS" != "0" || ! -e "$TEST_FEATURES_PATH" ]]; then
exit
fi
date -u >> "$SEMIFINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$SEMIFINISH_PATH"
else
echo Reloading features from "$TEST_FEATURES_PATH"
fi
#...performs prediction with SVM model
python \
"$SOURCES_GIT_DIR/predict_svm_layer.py" \
--output_file "$RESULTS_PATH" \
--input_test "$TEST_FEATURES_PATH" \
--input_model "$SVM_PATH"
# Tip: leave last the arguments that make the command fail if they're absent,
# so if there's a typo or forgotten \ the entire thing fails
EXPERIMENT_STATUS="$?"
#
#...starts training
if [[ "$EXPERIMENT_STATUS" == "0" ]]; then
if [[ "$LAST_OUTPUT" == "" || -e "$LAST_OUTPUT" ]]; then
if [[ "$FINISH_PATH" != "-" ]]; then
date -u >> "$FINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$FINISH_PATH"
fi
FINISH_STATUS="160"
fi
fi
|
#!/bin/bash
set -euo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd)"
$DIR/drop-tables.js
$DIR/create-tables.js
$DIR/add-test-data.js
|
#!/usr/bin/env bash
set -e
set -x
CURRENT_BRANCH="master"
function split()
{
SHA1=`./bin/splitsh-lite --prefix=$1`
git push $2 "$SHA1:refs/heads/$CURRENT_BRANCH" -f
}
function remote()
{
git remote add $1 $2 || true
}
git pull origin $CURRENT_BRANCH
remote amqp git@github.com:hyperf-cloud/amqp.git
remote async-queue git@github.com:hyperf-cloud/async-queue.git
remote cache git@github.com:hyperf-cloud/cache.git
remote circuit-breaker git@github.com:hyperf-cloud/circuit-breaker.git
remote command git@github.com:hyperf-cloud/command.git
remote config git@github.com:hyperf-cloud/config.git
remote config-aliyun-acm git@github.com:hyperf-cloud/config-aliyun-acm.git
remote config-apollo git@github.com:hyperf-cloud/config-apollo.git
remote constants git@github.com:hyperf-cloud/constants.git
remote consul git@github.com:hyperf-cloud/consul.git
remote contract git@github.com:hyperf-cloud/contract.git
remote database git@github.com:hyperf-cloud/database.git
remote db-connection git@github.com:hyperf-cloud/db-connection.git
remote devtool git@github.com:hyperf-cloud/devtool.git
remote di git@github.com:hyperf-cloud/di.git
remote dispatcher git@github.com:hyperf-cloud/dispatcher.git
remote elasticsearch git@github.com:hyperf-cloud/elasticsearch.git
remote event git@github.com:hyperf-cloud/event.git
remote exception-handler git@github.com:hyperf-cloud/exception-handler.git
remote framework git@github.com:hyperf-cloud/framework.git
remote grpc git@github.com:hyperf-cloud/grpc.git
remote grpc-client git@github.com:hyperf-cloud/grpc-client.git
remote grpc-server git@github.com:hyperf-cloud/grpc-server.git
remote guzzle git@github.com:hyperf-cloud/guzzle.git
remote http-message git@github.com:hyperf-cloud/http-message.git
remote http-server git@github.com:hyperf-cloud/http-server.git
remote json-rpc git@github.com:hyperf-cloud/json-rpc.git
remote load-balancer git@github.com:hyperf-cloud/load-balancer.git
remote logger git@github.com:hyperf-cloud/logger.git
remote memory git@github.com:hyperf-cloud/memory.git
remote model-cache git@github.com:hyperf-cloud/model-cache.git
remote paginator git@github.com:hyperf-cloud/paginator.git
remote pool git@github.com:hyperf-cloud/pool.git
remote process git@github.com:hyperf-cloud/process.git
remote rate-limit git@github.com:hyperf-cloud/rate-limit.git
remote redis git@github.com:hyperf-cloud/redis.git
remote rpc git@github.com:hyperf-cloud/rpc.git
remote rpc-client git@github.com:hyperf-cloud/rpc-client.git
remote rpc-server git@github.com:hyperf-cloud/rpc-server.git
remote server git@github.com:hyperf-cloud/server.git
remote service-governance git@github.com:hyperf-cloud/service-governance.git
remote swagger git@github.com:hyperf-cloud/swagger.git
remote testing git@github.com:hyperf-cloud/testing.git
remote tracer git@github.com:hyperf-cloud/tracer.git
remote utils git@github.com:hyperf-cloud/utils.git
remote websocket-server git@github.com:hyperf-cloud/websocket-server.git
split 'src/amqp' amqp
split 'src/async-queue' async-queue
split 'src/cache' cache
split 'src/circuit-breaker' circuit-breaker
split 'src/command' command
split 'src/config' config
split 'src/config-aliyun-acm' config-aliyun-acm
split 'src/config-apollo' config-apollo
split 'src/constants' constants
split 'src/consul' consul
split 'src/contract' contract
split 'src/database' database
split 'src/db-connection' db-connection
split 'src/devtool' devtool
split 'src/di' di
split 'src/dispatcher' dispatcher
split 'src/elasticsearch' elasticsearch
split 'src/event' event
split 'src/exception-handler' exception-handler
split 'src/framework' framework
split 'src/grpc' grpc
split 'src/grpc-client' grpc-client
split 'src/grpc-server' grpc-server
split 'src/guzzle' guzzle
split 'src/http-message' http-message
split 'src/http-server' http-server
split 'src/json-rpc' json-rpc
split 'src/load-balancer' load-balancer
split 'src/logger' logger
split 'src/memory' memory
split 'src/model-cache' model-cache
split 'src/paginator' paginator
split 'src/pool' pool
split 'src/process' process
split 'src/rate-limit' rate-limit
split 'src/redis' redis
split 'src/rpc' rpc
split 'src/rpc-client' rpc-client
split 'src/rpc-server' rpc-server
split 'src/server' server
split 'src/service-governance' service-governance
split 'src/testing' testing
split 'src/tracer' tracer
split 'src/swagger' swagger
split 'src/utils' utils
split 'src/websocket-server' websocket-server
|
<filename>lib/util/mxEventObject.d.ts
declare module 'mxgraph' {
class mxEventObject {
constructor(name: string, ...args: any[]);
/**
* Variable: name
*
* Holds the name.
*/
name: string;
/**
* Variable: properties
*
* Holds the properties as an associative array.
*/
properties: any[];
/**
* Variable: consumed
*
* Holds the consumed state. Default is false.
*/
consumed: boolean;
/**
* Function: getName
*
* Returns <name>.
*/
getName(): string;
/**
* Function: getProperties
*
* Returns <properties>.
*/
getProperties(): any[];
/**
* Function: getProperty
*
* Returns the property for the given key.
*/
getProperty(key: string): any;
/**
* Function: isConsumed
*
* Returns true if the event has been consumed.
*/
isConsumed(): boolean;
/**
* Function: consume
*
* Consumes the event.
*/
consume(): void;
}
}
|
# ----------------------------------------------------------------------------
#
# Package : gcsio
# Version : master
# Source repo : https://github.com/GoogleCloudDataproc/hadoop-connectors
# Tested on : UBI: 8.5
# Language : Java
# Travis-Check : True
# Script License: Apache License 2.0
# Maintainer's : Balavva Mirji <Balavva.Mirji@ibm.com>
#
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
#!/bin/bash
set -e
PACKAGE_NAME=gcsio
PACKAGE_VERSION=${1:-master}
PACKAGE_URL=https://github.com/GoogleCloudDataproc/hadoop-connectors
# install dependencies
yum install -y git wget java-1.8.0-openjdk-devel
# install maven
wget https://dlcdn.apache.org/maven/maven-3/3.8.4/binaries/apache-maven-3.8.4-bin.tar.gz
tar -zxvf apache-maven-3.8.4-bin.tar.gz
mv apache-maven-3.8.4 /opt/maven
export M2_HOME=/opt/maven
export PATH=${M2_HOME}/bin:${PATH}
# clone package
git clone $PACKAGE_URL
cd hadoop-connectors
git checkout $PACKAGE_VERSION
mvn -P hadoop1 clean package
|
from mesa import Model, Agent
from mesa.time import SimultaneousActivation
from mesa.space import SingleGrid
from mesa.datacollection import DataCollector
class VehicleAgent(Agent):
"""
Vehicle agent
"""
def __init__(self, pos, model, max_speed):
"""
Create a new vehicle agent.
Args:
pos: Agent initial position in x, y.
model: The model the agent is associated with.
max_speed: The maximum number of cells an agent can move in a single step
"""
super().__init__(pos, model)
self.pos = pos
self.speed = 0
self.max_speed = max_speed
self._next_pos = None
def step(self):
"""
Calculates the next position of the agent based on several factors:
- Current Speed
- Max Speed
- Proximity of agent ahead of it
- Random chance of deceleration
"""
# STEP 1: ACCELERATION
if self.speed < self.max_speed:
self.speed += 1
# STEP 2: DECELERATION
distance_to_next = 0
(x, y) = self.pos
for distance in range(self.max_speed):
distance += 1
test_x = x + distance
test_pos = self.model.grid.torus_adj((test_x, y))
if self.model.grid.is_cell_empty(test_pos):
distance_to_next += 1
if distance_to_next == self.speed:
break
else:
break
self.speed = distance_to_next
# STEP 3: RANDOMISATION
if self.random.random() < 0.3 and self.speed > 0:
self.speed -= 1
# STEP 4: MOVEMENT
self._next_pos = self.pos
(x, y) = self._next_pos
x += self.speed
self._next_pos = self.model.grid.torus_adj((x, y))
self.model.total_speed = self.model.total_speed + self.speed
def advance(self):
"""
Moves the agent to its next position.
"""
self.model.grid.move_agent(self, self._next_pos)
class NaSchTraffic(Model):
"""
Model class for the Nagel and Schreckenberg traffic model.
"""
def __init__(self, height=1, width=60, vehicle_quantity=5, general_max_speed=4, seed=None):
""""""
super().__init__(seed=seed)
self.height = height
self.width = width
self.vehicle_quantity = vehicle_quantity
self.general_max_speed = general_max_speed
self.schedule = SimultaneousActivation(self)
self.grid = SingleGrid(width, height, torus=True)
self.average_speed = 0.0
self.averages = []
self.total_speed = 0
self.datacollector = DataCollector(
model_reporters={"Average_Speed": "average_speed"}, # Model-level count of average speed of all agents
# For testing purposes, agent's individual x position and speed
agent_reporters={
"PosX": lambda x: x.pos[0],
"Speed": lambda x: x.speed,
},
)
# Set up agents
# We use a grid iterator that returns
# the coordinates of a cell as well as
# its contents. (coord_iter)
cells = list(self.grid.coord_iter())
self.random.shuffle(cells)
for vehicle_iter in range(0, self.vehicle_quantity):
cell = cells[vehicle_iter]
(content, x, y) = cell
agent = VehicleAgent((x, y), self, general_max_speed)
self.grid.position_agent(agent, (x, y))
self.schedule.add(agent)
self.running = True
self.datacollector.collect(self)
def step(self):
"""
Run one step of the model. Calculate current average speed of all agents.
"""
if self.schedule.steps == 100:
self.running = False
self.total_speed = 0
# Step all agents, then advance all agents
self.schedule.step()
if self.schedule.get_agent_count() > 0:
self.average_speed = self.total_speed / self.schedule.get_agent_count()
else:
self.average_speed = 0
self.averages.append(self.average_speed)
# collect data
self.datacollector.collect(self)
|
#!/usr/bin/env bashio
set -e
DIRSFIRST=$(bashio::config 'dirsfirst')
ENFORCE_BASEPATH=$(bashio::config 'enforce_basepath')
IGNORE_PATTERN="$(bashio::jq "/data/options.json" ".ignore_pattern")"
WAIT_PIDS=()
# Setup and run Frontend
sed -i "s/%%PORT%%/8080/g" /etc/nginx/nginx-ingress.conf
sed -i "s/%%PORT_INGRESS%%/8099/g" /etc/nginx/nginx-ingress.conf
nginx -c /etc/nginx/nginx-ingress.conf &
WAIT_PIDS+=($!)
# Setup and run configurator
sed -i "s/%%TOKEN%%/${HASSIO_TOKEN}/g" /etc/configurator.conf
sed -i "s/%%DIRSFIRST%%/${DIRSFIRST}/g" /etc/configurator.conf
sed -i "s/%%ENFORCE_BASEPATH%%/${ENFORCE_BASEPATH}/g" /etc/configurator.conf
sed -i "s/%%IGNORE_PATTERN%%/${IGNORE_PATTERN}/g" /etc/configurator.conf
hass-configurator /etc/configurator.conf &
WAIT_PIDS+=($!)
# Register stop
function stop_addon() {
bashio::log.debug "Kill Processes..."
kill -15 "${WAIT_PIDS[@]}"
wait "${WAIT_PIDS[@]}"
bashio::log.debug "Done."
}
trap "stop_addon" SIGTERM SIGHUP
# Wait until all is done
bashio::log.info "Add-on running"
wait "${WAIT_PIDS[@]}"
|
package com.google.sps.data;
/** A message to the comment section. */
public final class Message{
private final long id;
private final String content;
private final long timestamp;
private final String userEmail;
/**
* @param id datastore-generated unique id for this comment.
* @param content main text of this message.
* @param timestamp comment creation date in ms.
* @param userEmail email used by user to log in .
*/
public Message(long id, String content, long timestamp, String userEmail) {
this.id = id;
this.content = content;
this.timestamp = timestamp;
this.userEmail = userEmail;
}
}
|
(defn random-string [length]
(apply str (repeatedly length #(rand-int 36
(char (if (> % 36) (int (+ % 87)) (+ % 48)))))))
random-string 8
|
<filename>server/routes/projects/publish.js
"use strict";
var request = require("request");
var utils = require("../utils");
var HttpError = require("../../lib/http-error");
module.exports = function(config, req, res, next) {
var project = req.project;
project.description = req.body.description;
// Uncomment the line below once https://github.com/mozilla/publish.webmaker.org/issues/98 is done
// project.public = req.body.public;
project.date_updated = req.body.dateUpdated;
utils.updateProject(config, req.user, project, function(
err,
status,
project
) {
if (err) {
res.status(status);
next(HttpError.format(err, req));
return;
}
var publishURL = config.publishURL + "/projects/" + project.id + "/publish";
request(
{
method: "PUT",
uri: publishURL,
headers: {
Authorization: "token " + req.user.token
}
},
function(err, response, body) {
var failure = false;
if (err) {
res.status(500);
failure = {
message: "Failed to send request to " + publishURL,
context: err
};
} else if (response.statusCode !== 200) {
res.status(response.statusCode);
failure = {
message:
"Request to " +
publishURL +
" returned a status of " +
response.statusCode,
context: response.body
};
}
if (failure) {
next(HttpError.format(failure, req));
return;
}
var project;
try {
project = JSON.parse(body);
} catch (e) {
res.status(500);
next(
HttpError.format(
{
message:
"Project sent by calling function was in an invalid format. Failed to run `JSON.parse`",
context: e.message,
stack: e.stack
},
req
)
);
return;
}
res.status(200).send({ link: project.publish_url });
}
);
});
};
|
package com.testvagrant.ekam.reports.interceptors;
import com.google.inject.Inject;
import com.testvagrant.ekam.commons.Toggles;
import com.testvagrant.ekam.commons.interceptors.InvocationInterceptor;
import com.testvagrant.ekam.reports.annotations.Step;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicReference;
public class ScreenshotInterceptor extends InvocationInterceptor implements MethodInterceptor {
@Inject ScreenshotTaker screenshotTaker;
@Override
public Object invoke(MethodInvocation invocation) throws Throwable {
try {
AtomicReference<Object> proceed = invokeMethod(invocation);
initScreenshot(invocation);
return proceed.get();
} catch (Throwable e) {
initScreenshot(invocation);
throw e;
}
}
private void initScreenshot(MethodInvocation invocation) {
Step step = invocation.getMethod().getAnnotation(Step.class);
if (Objects.isNull(step) && Toggles.TIMELINE.isActive()) screenshotTaker.saveScreenshot();
}
}
|
<gh_stars>1-10
/*
* HMPPS Offender Assessment API
* OASys Data API.
*
* OpenAPI spec version: 2020-09-02
* Contact: <EMAIL>
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
*
* Swagger Codegen version: 2.4.15
*
* Do not edit the class manually.
*
*/
(function(root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(['ApiClient'], factory);
} else if (typeof module === 'object' && module.exports) {
// CommonJS-like environments that support module.exports, like Node.
module.exports = factory(require('../ApiClient'));
} else {
// Browser globals (root is window)
if (!root.HmppsOffenderAssessmentApi) {
root.HmppsOffenderAssessmentApi = {};
}
root.HmppsOffenderAssessmentApi.AuthorisationDto = factory(root.HmppsOffenderAssessmentApi.ApiClient);
}
}(this, function(ApiClient) {
'use strict';
/**
* The AuthorisationDto model module.
* @module model/AuthorisationDto
* @version 1.0.0
*/
/**
* Constructs a new <code>AuthorisationDto</code>.
* @alias module:model/AuthorisationDto
* @class
*/
var exports = function() {
};
/**
* Constructs a <code>AuthorisationDto</code> from a plain JavaScript object, optionally creating a new instance.
* Copies all relevant properties from <code>data</code> to <code>obj</code> if supplied or a new instance if not.
* @param {Object} data The plain JavaScript object bearing properties of interest.
* @param {module:model/AuthorisationDto} obj Optional instance to populate.
* @return {module:model/AuthorisationDto} The populated <code>AuthorisationDto</code> instance.
*/
exports.constructFromObject = function(data, obj) {
if (data) {
obj = obj || new exports();
if (data.hasOwnProperty('oasysOffenderId'))
obj.oasysOffenderId = ApiClient.convertToType(data['oasysOffenderId'], 'Number');
if (data.hasOwnProperty('oasysUserCode'))
obj.oasysUserCode = ApiClient.convertToType(data['oasysUserCode'], 'String');
if (data.hasOwnProperty('offenderPermissionLevel'))
obj.offenderPermissionLevel = ApiClient.convertToType(data['offenderPermissionLevel'], 'String');
if (data.hasOwnProperty('offenderPermissionResource'))
obj.offenderPermissionResource = ApiClient.convertToType(data['offenderPermissionResource'], 'String');
}
return obj;
}
/**
* @member {Number} oasysOffenderId
*/
exports.prototype.oasysOffenderId = undefined;
/**
* @member {String} oasysUserCode
*/
exports.prototype.oasysUserCode = undefined;
/**
* @member {module:model/AuthorisationDto.OffenderPermissionLevelEnum} offenderPermissionLevel
*/
exports.prototype.offenderPermissionLevel = undefined;
/**
* @member {module:model/AuthorisationDto.OffenderPermissionResourceEnum} offenderPermissionResource
*/
exports.prototype.offenderPermissionResource = undefined;
/**
* Allowed values for the <code>offenderPermissionLevel</code> property.
* @enum {String}
* @readonly
*/
exports.OffenderPermissionLevelEnum = {
/**
* value: "UNAUTHORISED"
* @const
*/
UNAUTHORISED: "UNAUTHORISED",
/**
* value: "READ_ONLY"
* @const
*/
READ_ONLY: "READ_ONLY",
/**
* value: "WRITE"
* @const
*/
WRITE: "WRITE"
};
/**
* Allowed values for the <code>offenderPermissionResource</code> property.
* @enum {String}
* @readonly
*/
exports.OffenderPermissionResourceEnum = {
/**
* value: "SENTENCE_PLAN"
* @const
*/
SENTENCE_PLAN: "SENTENCE_PLAN"
};
return exports;
}));
|
<filename>src/api/index.js
import { version } from '../../package.json';
import { Router } from 'express';
import tweets from './tweets';
import config from '../config.json';
import { OAuth } from 'oauth';
const REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token';
const ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token';
const OAUTH_VERSION = '1.0';
const HASH_VERSION = 'HMAC-SHA1';
const consumer_key = config.consumer_key;
const consumer_secret = config.consumer_secret;
const callback_url = config.callback_url;
const oa = new OAuth(REQUEST_TOKEN_URL, ACCESS_TOKEN_URL, consumer_key, consumer_secret, OAUTH_VERSION, callback_url, HASH_VERSION);
let _oauth_access_token_secret;
export default ({ config, db }) => {
let api = Router();
// mount the facets resource
api.use('/tweets', tweets({ config, db }));
// perhaps expose some API metadata at the root
api.get('/', (req, res) => {
res.json({ version });
});
// TODO: move elsewhere
api.get('/request-token', (req, res) => {
oa.getOAuthRequestToken((err, oauth_token, oauth_token_secret) => {
if (err) {
// throw new Error(([err.statusCode, err.data].join(': ')));
return console.error([err.statusCode, err.data].join(': '));
}
_oauth_access_token_secret = oauth_token_secret;
// res.redirect('https://api.twitter.com/oauth/authenticate?oauth_token=' + oauth_token);
res.json({ redirect_url: 'https://api.twitter.com/oauth/authenticate?oauth_token=' + oauth_token })
});
});
api.get("/access-token", function (req, res) {
var oauth_token = req.query.oauth_token,
verifier = req.query.oauth_verifier;
oa.getOAuthAccessToken(oauth_token, _oauth_access_token_secret, verifier,
function (err, oauth_access_token, oauth_access_token_secret) {
if (err) {
if (parseInt(err.statusCode) == 401) {
console.error('The pin number you have entered is incorrect');
}
}
// console.log('Your OAuth Access Token: ', oauth_access_token);
// console.log('Your OAuth Token Secret: ', oauth_access_token_secret);
// console.log('Now, save these two values, along with your original consumer secret and key and use these in your twitter app');
oa.get('https://api.twitter.com/1.1/account/verify_credentials.json', oauth_access_token, oauth_access_token_secret, (err, data) => {
if (err) throw new Error(err.data);
// console.log('user data:');
var user = JSON.parse(data);
// console.log(user);
// TODO: DO NOT SEND SENSITIVE INFO TO CLIENT SIDE, REMOVE THESE!!!;
user.access_token = oauth_access_token;
user.access_token_secret = oauth_access_token_secret;
res.json(user);
});
});
});
return api;
}
|
<gh_stars>1-10
/*
* Copyright (c) 2018 Ahome' Innovation Technologies. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ait.lienzo.ks.client.views.components;
import static com.ait.lienzo.client.core.AttributeOp.all;
import static com.ait.lienzo.client.core.AttributeOp.any;
import static com.ait.lienzo.client.core.AttributeOp.has;
import static com.ait.lienzo.client.core.animation.AnimationProperty.Properties.ROTATION_DEGREES;
import static com.ait.lienzo.client.core.animation.AnimationProperty.Properties.SCALE;
import static com.ait.tooling.common.api.flow.Flows.andOp;
import java.util.LinkedHashMap;
import com.ait.lienzo.client.core.Attribute;
import com.ait.lienzo.client.core.animation.AnimationCallback;
import com.ait.lienzo.client.core.animation.AnimationProperties;
import com.ait.lienzo.client.core.animation.AnimationTweener;
import com.ait.lienzo.client.core.animation.IAnimation;
import com.ait.lienzo.client.core.animation.IAnimationHandle;
import com.ait.lienzo.client.core.event.AnimationFrameAttributesChangedBatcher;
import com.ait.lienzo.client.core.event.AttributesChangedEvent;
import com.ait.lienzo.client.core.event.AttributesChangedHandler;
import com.ait.lienzo.client.core.event.DeferredAttributesChangedBatcher;
import com.ait.lienzo.client.core.event.FinallyAttributesChangedBatcher;
import com.ait.lienzo.client.core.event.IAttributesChangedBatcher;
import com.ait.lienzo.client.core.event.ImmediateAttributesChangedBatcher;
import com.ait.lienzo.client.core.shape.Layer;
import com.ait.lienzo.client.core.shape.Rectangle;
import com.ait.lienzo.client.core.shape.Text;
import com.ait.lienzo.client.core.types.LinearGradient;
import com.ait.lienzo.client.core.types.Point2D;
import com.ait.lienzo.client.core.types.Shadow;
import com.ait.lienzo.ks.client.ui.components.KSButton;
import com.ait.lienzo.ks.client.ui.components.KSComboBox;
import com.ait.lienzo.ks.client.views.AbstractToolBarViewComponent;
import com.ait.lienzo.shared.core.types.ColorName;
import com.ait.lienzo.shared.core.types.DragConstraint;
import com.ait.lienzo.shared.core.types.LineJoin;
import com.ait.lienzo.shared.core.types.TextBaseLine;
import com.ait.tooling.common.api.flow.Flows.BooleanOp;
import com.ait.tooling.nativetools.client.event.HandlerRegistrationManager;
import com.ait.toolkit.sencha.ext.client.events.button.ClickEvent;
import com.ait.toolkit.sencha.ext.client.events.button.ClickHandler;
import com.ait.toolkit.sencha.ext.client.events.form.ChangeEvent;
import com.ait.toolkit.sencha.ext.client.events.form.ChangeHandler;
public class AttributesChangedBatcherViewComponent extends AbstractToolBarViewComponent
{
private final KSButton m_scaled = new KSButton("Scale");
private final KSButton m_rotate = new KSButton("Rotate");
private final KSButton m_doboth = new KSButton("Both");
private final KSButton m_srshow = new KSButton("Remove S/R");
private final KSButton m_xyshow = new KSButton("Remove X/Y");
private final KSButton m_dshorz = new KSButton("Horizontal");
private final KSButton m_dsvert = new KSButton("Vertical");
private final KSButton m_dsnone = new KSButton("None");
private long m_maxrot = 0;
private final HandlerRegistrationManager m_srlist = new HandlerRegistrationManager();
private final HandlerRegistrationManager m_xylist = new HandlerRegistrationManager();
private IAttributesChangedBatcher m_batcher = new ImmediateAttributesChangedBatcher();
public AttributesChangedBatcherViewComponent()
{
final BooleanOp andhas = andOp(has(Attribute.SCALE), has(Attribute.ROTATION));
final BooleanOp anyhas = any(Attribute.SCALE, Attribute.ROTATION);
final BooleanOp allhas = all(Attribute.SCALE, Attribute.ROTATION);
final Layer layer = new Layer();
final Text text = new Text("Push an amimate button").setFillColor(ColorName.BLACK).setX(400).setY(100).setFontSize(20).setTextBaseLine(TextBaseLine.TOP);
final Text labl = new Text(m_batcher.getName()).setFillColor(ColorName.BLACK).setX(400).setY(150).setFontSize(20).setTextBaseLine(TextBaseLine.TOP);
final Text json = new Text("{}").setFillColor(ColorName.BLACK).setX(400).setY(200).setFontSize(20).setTextBaseLine(TextBaseLine.TOP);
final Text posn = new Text("{}").setFillColor(ColorName.BLACK).setX(400).setY(250).setFontSize(20).setTextBaseLine(TextBaseLine.TOP);
final LinearGradient lgradient = new LinearGradient(0, 0, 200, 0);
lgradient.addColorStop(0.0, ColorName.WHITE);
lgradient.addColorStop(0.1, ColorName.SALMON);
lgradient.addColorStop(0.9, ColorName.DARKRED);
lgradient.addColorStop(1.0, ColorName.WHITE);
final Rectangle rectangle = new Rectangle(200, 300).setX(100).setY(100).setFillGradient(lgradient).setDraggable(true).setShadow(new Shadow(ColorName.BLACK, 10, 5, 5)).setStrokeColor(ColorName.BLACK).setStrokeWidth(10).setLineJoin(LineJoin.ROUND);
rectangle.setOffset(100, 150);
rectangle.setAttributesChangedBatcher(m_batcher);
final LinkedHashMap<String, String> pick = new LinkedHashMap<>();
pick.put("Immediate", "Immediate");
pick.put("Deferred", "Deferred");
pick.put("AnimationFrame", "AnimationFrame");
pick.put("Finally", "Finally");
final KSComboBox cbox = new KSComboBox(pick);
cbox.addChangeHandler(new ChangeHandler()
{
@Override
public void onChange(final ChangeEvent event)
{
final String value = pick.get(event.getNewValue());
if ("Immediate".equals(value))
{
m_batcher = new ImmediateAttributesChangedBatcher();
rectangle.setAttributesChangedBatcher(m_batcher);
labl.setText(m_batcher.getName());
layer.draw();
}
else if ("Deferred".equals(value))
{
m_batcher = new DeferredAttributesChangedBatcher();
rectangle.setAttributesChangedBatcher(m_batcher);
labl.setText(m_batcher.getName());
layer.draw();
}
else if ("AnimationFrame".equals(value))
{
m_batcher = new AnimationFrameAttributesChangedBatcher();
rectangle.setAttributesChangedBatcher(m_batcher);
labl.setText(m_batcher.getName());
layer.draw();
}
else if ("Finally".equals(value))
{
m_batcher = new FinallyAttributesChangedBatcher();
rectangle.setAttributesChangedBatcher(m_batcher);
labl.setText(m_batcher.getName());
layer.draw();
}
}
});
getToolBarContainer().add(cbox);
m_scaled.setWidth(90);
getToolBarContainer().add(m_scaled);
m_scaled.addClickHandler(new ClickHandler()
{
@Override
public void onClick(final ClickEvent event)
{
m_maxrot = 0;
cbox.disable();
rectangle.animate(AnimationTweener.BOUNCE, AnimationProperties.toPropertyList(SCALE(0.25, 0.25)), 2000, new AnimationCallback()
{
@Override
public void onClose(final IAnimation animation, final IAnimationHandle handle)
{
rectangle.animate(AnimationTweener.BOUNCE, AnimationProperties.toPropertyList(SCALE(1, 1)), 2000, new AnimationCallback()
{
@Override
public void onClose(final IAnimation animation, final IAnimationHandle handle)
{
cbox.enable();
}
});
}
});
}
});
m_rotate.setWidth(90);
getToolBarContainer().add(m_rotate);
m_rotate.addClickHandler(new ClickHandler()
{
@Override
public void onClick(final ClickEvent event)
{
m_maxrot = 0;
cbox.disable();
rectangle.animate(AnimationTweener.LINEAR, AnimationProperties.toPropertyList(ROTATION_DEGREES(360)), 2000, new AnimationCallback()
{
@Override
public void onClose(final IAnimation animation, final IAnimationHandle handle)
{
rectangle.animate(AnimationTweener.LINEAR, AnimationProperties.toPropertyList(ROTATION_DEGREES(0)), 2000, new AnimationCallback()
{
@Override
public void onClose(final IAnimation animation, final IAnimationHandle handle)
{
cbox.enable();
}
});
}
});
}
});
m_doboth.setWidth(90);
getToolBarContainer().add(m_doboth);
m_doboth.addClickHandler(new ClickHandler()
{
@Override
public void onClick(final ClickEvent event)
{
m_maxrot = 0;
cbox.disable();
rectangle.animate(AnimationTweener.LINEAR, AnimationProperties.toPropertyList(ROTATION_DEGREES(360), SCALE(0.25, 0.25)), 2000, new AnimationCallback()
{
@Override
public void onClose(final IAnimation animation, final IAnimationHandle handle)
{
rectangle.animate(AnimationTweener.LINEAR, AnimationProperties.toPropertyList(ROTATION_DEGREES(0), SCALE(1, 1)), 2000, new AnimationCallback()
{
@Override
public void onClose(final IAnimation animation, final IAnimationHandle handle)
{
cbox.enable();
}
});
}
});
}
});
final AttributesChangedHandler xyhandler = new AttributesChangedHandler()
{
@Override
public void onAttributesChanged(final AttributesChangedEvent event)
{
json.setText(event.toJSONString());
posn.setText(new Point2D(rectangle.getX(), rectangle.getY()).toJSONString());
layer.batch();
}
};
m_xyshow.setWeight(90);
getToolBarContainer().add(m_xyshow);
m_xyshow.addClickHandler(new ClickHandler()
{
@Override
public void onClick(final ClickEvent event)
{
if (m_xylist.size() == 0)
{
m_xyshow.setText("Remove X/Y");
m_xylist.register(rectangle.addAttributesChangedHandler(Attribute.X, xyhandler));
m_xylist.register(rectangle.addAttributesChangedHandler(Attribute.Y, xyhandler));
}
else
{
m_xyshow.setText("Register X/Y");
m_xylist.destroy();
}
}
});
final AttributesChangedHandler srhandler = new AttributesChangedHandler()
{
@Override
public void onAttributesChanged(final AttributesChangedEvent event)
{
final Point2D scale = rectangle.getScale();
final long r = Math.round(rectangle.getRotationDegrees());
if (r > m_maxrot)
{
m_maxrot = r;
}
if (null != scale)
{
text.setText("AND:" + event.evaluate(andhas) + ":ANY:" + event.evaluate(anyhas) + ":ALL:" + event.evaluate(allhas) + ":ROTATION:" + event.has(Attribute.ROTATION) + ":" + r + ":" + m_maxrot + ":SCALE:" + event.has(Attribute.SCALE) + ":" + scale.toJSONString());
}
else
{
text.setText("AND:" + event.evaluate(andhas) + ":ANY:" + event.evaluate(anyhas) + ":ALL:" + event.evaluate(allhas) + ":ROTATION:" + event.has(Attribute.ROTATION) + ":" + r + ":" + m_maxrot + ":SCALE:" + event.has(Attribute.SCALE) + ":{none}");
}
json.setText(event.toJSONString());
layer.batch();
}
};
m_srshow.setWeight(90);
getToolBarContainer().add(m_srshow);
m_srshow.addClickHandler(new ClickHandler()
{
@Override
public void onClick(final ClickEvent event)
{
if (m_srlist.size() == 0)
{
m_srshow.setText("Remove S/R");
m_srlist.register(rectangle.addAttributesChangedHandler(Attribute.SCALE, srhandler));
m_srlist.register(rectangle.addAttributesChangedHandler(Attribute.ROTATION, srhandler));
}
else
{
m_srshow.setText("Register S/R");
m_srlist.destroy();
}
}
});
m_dsnone.setWeight(90);
getToolBarContainer().add(m_dsnone);
m_dsnone.addClickHandler(new ClickHandler()
{
@Override
public void onClick(final ClickEvent event)
{
rectangle.setDragConstraint(DragConstraint.NONE);
}
});
m_dshorz.setWeight(90);
getToolBarContainer().add(m_dshorz);
m_dshorz.addClickHandler(new ClickHandler()
{
@Override
public void onClick(final ClickEvent event)
{
rectangle.setDragConstraint(DragConstraint.HORIZONTAL);
}
});
m_dsvert.setWeight(90);
getToolBarContainer().add(m_dsvert);
m_dsvert.addClickHandler(new ClickHandler()
{
@Override
public void onClick(final ClickEvent event)
{
rectangle.setDragConstraint(DragConstraint.VERTICAL);
}
});
m_srlist.register(rectangle.addAttributesChangedHandler(Attribute.SCALE, srhandler));
m_srlist.register(rectangle.addAttributesChangedHandler(Attribute.ROTATION, srhandler));
m_xylist.register(rectangle.addAttributesChangedHandler(Attribute.X, xyhandler));
m_xylist.register(rectangle.addAttributesChangedHandler(Attribute.Y, xyhandler));
layer.add(rectangle);
layer.add(text);
layer.add(labl);
layer.add(json);
layer.add(posn);
getLienzoPanel().add(layer);
getLienzoPanel().setBackgroundLayer(getBackgroundLayer());
getWorkingContainer().add(getLienzoPanel());
}
}
|
<gh_stars>0
import {Component, OnInit} from "@angular/core";
import {ComplaintService} from "../../../@core/services/complaint.service";
import {ActivatedRoute} from "@angular/router";
import {Complaint} from "../../../@core/model/complaint";
@Component({
selector: 'complaint-view',
styleUrls: ['complaint-view.component.scss'],
templateUrl: 'complaint-view.component.html',
})
export class ComplaintViewComponent implements OnInit {
complaint: Complaint;
constructor(private complaintService: ComplaintService, private route: ActivatedRoute) {
}
ngOnInit() {
this.route.params.subscribe(params => {
let complaintId = params['id'];
this.complaintService.getComplaint(complaintId).subscribe(res => this.complaint = res);
});
}
}
|
import parse, { testables } from "../../engine/parser"
import tokenize from "../../engine/lexer"
import { isDuplicateIdentifier } from "../../engine/parser/identifiers"
const {
parseAttributes,
parseRelBody,
parseEntity,
parseWeakEntity,
parseRel,
parseIdenRel,
} = testables
describe("tests for parser", () => {
it("should throw a syntax error", () => {
expect(() => parse(tokenize("FOO"))).toThrow(SyntaxError)
})
it("should return empty nodes array for empty tokens array", () => {
expect(parse([])).toEqual([])
})
})
describe("tests for parseAttributes", () => {
it("should throw a syntax error", () => {
const tokens = tokenize(`ENTITY Foo {
COMPOSITE "full_name" {
MULTIVALUED "first_name"
}
}`)
expect(() => parseAttributes(tokens, 3, tokens.length - 2)).toThrow(
SyntaxError
)
})
it("should parse valid tokens", () => {
const tokens = tokenize(`ENTITY Bar {
PRIMARY "SSN",
SIMPLE "salary",
SIMPLE "DoB",
DERIVED "age",
COMPOSITE "full_name" {
SIMPLE "first_name",
SIMPLE "last_name"
}
}`)
expect(parseAttributes(tokens, 3, tokens.length - 3)).toMatchSnapshot()
})
})
describe("tests for parseRelBody", () => {
it("should throw a syntax error", () => {
isDuplicateIdentifier("Part_entity_1")
isDuplicateIdentifier("Part_entity_2")
const tokens = tokenize(`REL Rel_name {
ATTRIBUTES {
SIMPLE "hours"
},
Part_entity_1 <PARTIAL, N>,
Part_entity_2 <TOTAL, N>,
ATTRIBUTES {
SIMPLE "hours"
}
}`)
expect(() => parseRelBody(tokens, 3, tokens.length - 2)).toThrow(
SyntaxError
)
})
it("should parse valid tokens", () => {
isDuplicateIdentifier("Part_entity_1")
isDuplicateIdentifier("Part_entity_2")
const tokens = tokenize(`REL Rel_name {
Part_entity_1 <PARTIAL, N>,
Part_entity_2 <TOTAL, N>,
ATTRIBUTES {
SIMPLE "hours"
}, // Trailing comma is allowed
}`)
expect(parseRelBody(tokens, 3, tokens.length - 3)).toMatchSnapshot()
})
})
describe("test for parseEntity", () => {
it("should parse valid tokens", () => {
const tokens = tokenize(`ENTITY User {
PRIMARY "SSN",
SIMPLE "salary",
SIMPLE "DoB",
DERIVED "age",
COMPOSITE "full_name" {
SIMPLE "first_name",
SIMPLE "last_name"
}
}`)
expect(parseEntity(tokens, 0)).toMatchSnapshot()
})
})
describe("test for parseWeakEntity", () => {
it("should parse valid tokens", () => {
isDuplicateIdentifier("Father")
const tokens = tokenize(`WEAK ENTITY Son OWNER Father {
COMPOSITE "key" {
SIMPLE "name",
SIMPLE "DoB"
},
SIMPLE "relationship",
SIMPLE "gender"
}`)
expect(parseWeakEntity(tokens, 0)).toMatchSnapshot()
})
})
describe("test for parseRel", () => {
it("should parse valid tokens", () => {
isDuplicateIdentifier("Foo")
isDuplicateIdentifier("Bar")
const tokens = tokenize(`REL Rel_name {
Foo <PARTIAL, 1>,
Bar <TOTAL, 1>,
ATTRIBUTES {
SIMPLE "start_date"
}
}`)
expect(parseRel(tokens, 0)).toMatchSnapshot()
})
})
describe("test for parseIdenRel", () => {
it("should parse valid tokens", () => {
isDuplicateIdentifier("Fizz")
isDuplicateIdentifier("Buzz")
const tokens = tokenize(`IDEN REL Iden_rel_name {
Fizz <PARTIAL, 1>,
Buzz <TOTAL, N>
}`)
expect(parseIdenRel(tokens, 0)).toMatchSnapshot()
})
})
|
<reponame>jonaslu/thatswhatsup
package getchange.version2;
import java.util.ArrayList;
import java.util.List;
public class GetChange2 {
public static int getNumberOfWays(List<Integer> denominators, int sum) {
if (denominators.isEmpty()) {
return 0;
}
if (sum < 0) {
return 0;
}
if (sum == 0) {
return 1;
}
int head = denominators.get(0);
List<Integer> tail = new ArrayList<>(denominators.subList(1, denominators.size()));
return getNumberOfWays(tail, sum) + getNumberOfWays(new ArrayList<>(denominators), sum - head);
}
}
|
#!/bin/sh
test_description='git archive attribute tests'
. ./test-lib.sh
SUBSTFORMAT='%H (%h)%n'
test_expect_exists() {
test_expect_${2:-success} " $1 exists" "test -e $1"
}
test_expect_missing() {
test_expect_${2:-success} " $1 does not exist" "test ! -e $1"
}
extract_tar_to_dir () {
(mkdir "$1" && cd "$1" && "$TAR" xf -) <"$1.tar"
}
test_expect_success 'setup' '
echo ignored >ignored &&
echo ignored export-ignore >>.git/info/attributes &&
git add ignored &&
echo ignored by tree >ignored-by-tree &&
echo ignored-by-tree export-ignore >.gitattributes &&
mkdir ignored-by-tree.d &&
>ignored-by-tree.d/file &&
echo ignored-by-tree.d export-ignore >>.gitattributes &&
git add ignored-by-tree ignored-by-tree.d .gitattributes &&
echo ignored by worktree >ignored-by-worktree &&
echo ignored-by-worktree export-ignore >.gitattributes &&
git add ignored-by-worktree &&
mkdir excluded-by-pathspec.d &&
>excluded-by-pathspec.d/file &&
git add excluded-by-pathspec.d &&
printf "A\$Format:%s\$O" "$SUBSTFORMAT" >nosubstfile &&
printf "A\$Format:%s\$O" "$SUBSTFORMAT" >substfile1 &&
printf "A not substituted O" >substfile2 &&
echo "substfile?" export-subst >>.git/info/attributes &&
git add nosubstfile substfile1 substfile2 &&
git commit -m. &&
git clone --bare . bare &&
cp .git/info/attributes bare/info/attributes
'
test_expect_success 'git archive' '
git archive HEAD >archive.tar &&
(mkdir archive && cd archive && "$TAR" xf -) <archive.tar
'
test_expect_missing archive/ignored
test_expect_missing archive/ignored-by-tree
test_expect_missing archive/ignored-by-tree.d
test_expect_missing archive/ignored-by-tree.d/file
test_expect_exists archive/ignored-by-worktree
test_expect_exists archive/excluded-by-pathspec.d
test_expect_exists archive/excluded-by-pathspec.d/file
test_expect_success 'git archive with pathspec' '
git archive HEAD ":!excluded-by-pathspec.d" >archive-pathspec.tar &&
extract_tar_to_dir archive-pathspec
'
test_expect_missing archive-pathspec/ignored
test_expect_missing archive-pathspec/ignored-by-tree
test_expect_missing archive-pathspec/ignored-by-tree.d
test_expect_missing archive-pathspec/ignored-by-tree.d/file
test_expect_exists archive-pathspec/ignored-by-worktree
test_expect_missing archive-pathspec/excluded-by-pathspec.d
test_expect_missing archive-pathspec/excluded-by-pathspec.d/file
test_expect_success 'git archive with wildcard pathspec' '
git archive HEAD ":!excluded-by-p*" >archive-pathspec-wildcard.tar &&
extract_tar_to_dir archive-pathspec-wildcard
'
test_expect_missing archive-pathspec-wildcard/ignored
test_expect_missing archive-pathspec-wildcard/ignored-by-tree
test_expect_missing archive-pathspec-wildcard/ignored-by-tree.d
test_expect_missing archive-pathspec-wildcard/ignored-by-tree.d/file
test_expect_exists archive-pathspec-wildcard/ignored-by-worktree
test_expect_missing archive-pathspec-wildcard/excluded-by-pathspec.d
test_expect_missing archive-pathspec-wildcard/excluded-by-pathspec.d/file
test_expect_success 'git archive with worktree attributes' '
git archive --worktree-attributes HEAD >worktree.tar &&
(mkdir worktree && cd worktree && "$TAR" xf -) <worktree.tar
'
test_expect_missing worktree/ignored
test_expect_exists worktree/ignored-by-tree
test_expect_missing worktree/ignored-by-worktree
test_expect_success 'git archive --worktree-attributes option' '
git archive --worktree-attributes --worktree-attributes HEAD >worktree.tar &&
(mkdir worktree2 && cd worktree2 && "$TAR" xf -) <worktree.tar
'
test_expect_missing worktree2/ignored
test_expect_exists worktree2/ignored-by-tree
test_expect_missing worktree2/ignored-by-worktree
test_expect_success 'git archive vs. bare' '
(cd bare && git archive HEAD) >bare-archive.tar &&
test_cmp_bin archive.tar bare-archive.tar
'
test_expect_success 'git archive with worktree attributes, bare' '
(cd bare && git archive --worktree-attributes HEAD) >bare-worktree.tar &&
(mkdir bare-worktree && cd bare-worktree && "$TAR" xf -) <bare-worktree.tar
'
test_expect_missing bare-worktree/ignored
test_expect_exists bare-worktree/ignored-by-tree
test_expect_exists bare-worktree/ignored-by-worktree
test_expect_success 'export-subst' '
git log "--pretty=format:A${SUBSTFORMAT}O" HEAD >substfile1.expected &&
test_cmp nosubstfile archive/nosubstfile &&
test_cmp substfile1.expected archive/substfile1 &&
test_cmp substfile2 archive/substfile2
'
test_done
|
<filename>src/main/java/io/prestok8s/baseapp/AppModule.java
package io.prestok8s.baseapp;
import com.google.inject.AbstractModule;
import io.dropwizard.Configuration;
import lombok.Getter;
@Getter
public abstract class AppModule<T extends Configuration, E> extends AbstractModule {
private final T configuration;
private final E environment;
public AppModule(T config, E env) {
this.configuration = config;
this.environment = env;
}
@Override
protected void configure() {}
}
|
import {COALITIONS_GETTED} from "../actions/coalitions";
const initialState = {
coalitions: []
};
const coalitions = (state = initialState, {type, payload}) => {
switch (type) {
case COALITIONS_GETTED:
return {
...state,
coalitions: [...Object.values(payload)]
};
default:
return state;
}
};
export default coalitions;
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies the dSYM of a vendored framework
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DWARF_DSYM_FOLDER_PATH}"
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/CZPicker/CZPicker.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Charts/Charts.framework"
install_framework "${BUILT_PRODUCTS_DIR}/JSONHelper/JSONHelper.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Siesta/Siesta.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/CZPicker/CZPicker.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Charts/Charts.framework"
install_framework "${BUILT_PRODUCTS_DIR}/JSONHelper/JSONHelper.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Siesta/Siesta.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
import { NdiControllerConnection } from '../NdiController/ndiControllerClient'
import { CompanionActionEvent, CompanionActions, CompanionAction } from '../../../../instance_skel_types'
export enum ActionId {
SetCrossPoint = 'set_crosspoint',
}
type CompanionActionWithCallback = CompanionAction & Required<Pick<CompanionAction, 'callback'>>
export function GetActionsList(ndiConnection: NdiControllerConnection): CompanionActions {
const entityOnOff = (opt: CompanionActionEvent['options']): void => {
const srcNumber = parseInt(opt.srcNumber as string)
const trgNumber = parseInt(opt.trgNumber as string)
ndiConnection.setMtxConnection(srcNumber, trgNumber)
}
const actions: { [id in ActionId]: CompanionActionWithCallback | undefined } = {
[ActionId.SetCrossPoint]: {
label: 'Set Cross Point',
options: [
{
type: 'number',
label: 'Source number',
id: 'srcNumber',
min: 1,
max: 100,
step: 1,
required: true,
default: 1,
},
{
type: 'number',
label: 'Target number',
id: 'trgNumber',
min: 1,
max: 100,
step: 1,
required: true,
default: 1,
},
],
callback: (evt): void => entityOnOff(evt.options),
},
}
return actions
}
|
(function() {
'use strict';
angular
.module('app.usuario')
.factory('UsuarioModel', usuarioModel);
usuarioModel.$inject = ['UsuarioService', 'Notificacao'];
function usuarioModel(UsuarioService, Notificacao) {
var service = {
create : create,
find : find,
findAll : findAll,
update : update,
remove : remove
};
return service;
///////////////////
function findAll() {
var _handleFindAll = {
success: function(result) {
Notificacao.notify('usuario:find_all_success', result.data);
},
error: function(error){
Notificacao.notify('usuario:find_all_error', error.data);
console.error('UserModel : User find all error');
}
};
UsuarioService.findAll().then(_handleFindAll.success, _handleFindAll.error);
}
function find(id) {
var _handleFind = {
success: function(result){
Notificacao.notify('usuario:find_success', result.data);
},
error: function(error){
Notificacao.notify('usuario:find_error', error.data);
console.error('UserModel : User find error');
}
};
UsuarioService.find(id).then(_handleFind.success, _handleFind.error);
}
function create(data) {
var _handleCreate = {
success: function(result){
Notificacao.notify('usuario:create_success', result.data);
},
error: function(error){
Notificacao.notify('usuario:create_error', error.data);
console.error('UserModel : User create error');
}
};
UsuarioService.create(data).then(_handleCreate.success, _handleCreate.error);
}
function update(data) {
var _handleUpdate = {
success: function(result){
Notificacao.notify('usuario:update_success', result.data);
},
error: function(error){
Notificacao.notify('usuario:update_error', error.data);
console.error('UserModel : User update error');
}
};
UsuarioService.update(data).then(_handleUpdate.success, _handleUpdate.error);
}
function remove(id) {
var _handleRemove = {
success: function(result){
Notificacao.notify('usuario:remove_success', result.data);
},
error: function(error){
Notificacao.notify('usuario:remove_error', error.data);
console.error('UserModel : User remove error');
}
};
UsuarioService.remove(id).then(_handleRemove.success, _handleRemove.error);
}
}
})();
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.