text
stringlengths 1
1.05M
|
|---|
<filename>lib/options.js
const path = require('path')
const metadata = require('read-metadata')
const exists = require('fs').existsSync
/**
* Read prompts metadata.
*
* @param {String} dir
* @return {Object}
*/
module.exports = function options (name, dir) {
const opts = getMetadata(name, dir)
return opts
}
/**
* Gets the metadata from either a meta.json or meta.js file.
*
* @param {String} dir
* @return {Object}
*/
function getMetadata (name, dir) {
const json = path.join(dir, `idea.${name}.meta.json`)
const js = path.join(dir, `idea.${name}.meta.js`)
let opts = {}
if (exists(json)) {
opts = metadata.sync(json)
} else if (exists(js)) {
const req = require(path.resolve(js))
if (req !== Object(req)) {
throw new Error(`idea.${name}.meta.js needs to expose an object`)
}
opts = req
}
return opts
}
|
import Foundation
struct Record {
var uid: String
var createdTime: Date
var recordId: String
var recordName: String
var recordRef: String
init() {
self.uid = ""
self.createdTime = Date()
self.recordId = ""
self.recordName = ""
self.recordRef = ""
}
func checkRecordValidity() -> String {
if recordName.isEmpty && recordRef.isEmpty {
return "Record name and reference are empty."
} else if recordName.isEmpty || recordRef.isEmpty {
return "Record name/reference is empty."
} else {
return "Record is valid."
}
}
}
// Test the checkRecordValidity method
let emptyRecord = Record()
print(emptyRecord.checkRecordValidity()) // Output: Record name and reference are empty.
let invalidRecord = Record(recordName: "Name", recordRef: "")
print(invalidRecord.checkRecordValidity()) // Output: Record name/reference is empty.
let validRecord = Record(recordName: "Name", recordRef: "Reference")
print(validRecord.checkRecordValidity()) // Output: Record is valid.
|
// Custom exception class
class UnknownReaderTypeException extends IllegalArgumentException {
public UnknownReaderTypeException(String unknownType) {
super(String.format("Unable to create a Reader: Unknown Reader type in Source specification: %s", unknownType));
}
}
// Reader class
class Reader {
public void createReader(String sourceSpec) {
// Check for unknown reader type in source specification
if (sourceSpec.contains("unknownType")) {
throw new UnknownReaderTypeException("unknownType");
} else {
// Create the reader
System.out.println("Reader created successfully for source specification: " + sourceSpec);
}
}
}
// Main program to demonstrate custom exception usage
public class Main {
public static void main(String[] args) {
Reader reader = new Reader();
try {
// Attempt to create a reader with a known type
reader.createReader("knownType");
// Attempt to create a reader with an unknown type
reader.createReader("unknownType");
} catch (UnknownReaderTypeException e) {
System.out.println("Custom exception caught: " + e.getMessage());
}
}
}
|
#!/bin/bash
#params:
# - ref dpnd location
# - tested dpnd location
# - ref blockchain folder location
# - tested blockchain folder location
# - path to directory, where non-empty logs should be generated
# - stop replay at block
# - number of jobs (optional)
# - --dont-copy-config (optional), if passed config.init files are not copied from test directories
#
# WARNING: use absolute paths instead of relative!
#
# sudo ./docker_build_and_run.sh ~/dpnit/dpn/build/Release/programs/dpnd ~/dpnit/dpn/build/Release/programs/dpnd ~/dpnit/dpnd_data/dpnnet ~/dpnit/dpnd_data/dpnnet ~/dpnit/logs 5000000 12
if [ $# -lt 6 -o $# -gt 8 ]
then
echo "Usage: reference_dpnd_location tested_dpnd_location ref_blockchain_folder_location tested_blockchain_folder_location"
echo " logs_dir stop_replay_at_block [jobs [--dont-copy-config]"
echo "Example: ~/dpnit/ref_dpnd ~/dpnit/dpn/build/Release/programs/dpnd ~/dpnit/dpnnet ~/dpnit/testnet"
echo " ~/dpnit/logs 5000000 12"
echo " if <jobs> not passed, <nproc> will be used."
exit -1
fi
echo $*
JOBS=0
if [ $# -eq 7 ]
then
JOBS=$7
fi
docker build -t smoketest ../ -f Dockerfile
[ $? -ne 0 ] && echo docker build FAILED && exit -1
docker system prune -f
if [ -e $5 ]; then
rm -rf $5/*
else
mkdir -p $5
fi
docker run -v $1:/reference -v $2:/tested -v $3:/ref_blockchain -v $4:/tested_blockchain -v $5:/logs_dir -v /run:/run \
-e STOP_REPLAY_AT_BLOCK=$6 -e JOBS=$JOBS -e COPY_CONFIG=$8 -p 8090:8090 -p 8091:8091 smoketest:latest
|
<reponame>ilariom/wildcat<gh_stars>10-100
#ifndef _WKT_SCENE_GRAPH_H
#define _WKT_SCENE_GRAPH_H
#include "managers/ECSContext.h"
#include "components/Node.h"
#include "graphics/SurfaceCache.h"
#include "graphics/Camera.h"
#include "graphics/Director.h"
#include "systems/RenderSystem.h"
#include "systems/TransformUpdateSystem.h"
#include "config.h"
#include <memory>
namespace wkt {
namespace scene
{
class SceneGraph final : public wkt::managers::ECSContext
{
public:
inline SceneGraph();
SceneGraph(const SceneGraph&) = delete;
SceneGraph(SceneGraph&&) = default;
~SceneGraph() = default;
SceneGraph& operator=(const SceneGraph&) = delete;
SceneGraph& operator=(SceneGraph&&) = default;
public:
void setRoot(std::shared_ptr<wkt::components::Node> node) { this->node = node; }
std::shared_ptr<wkt::components::Node> root() const { return this->node; }
void setDirector(wkt::gph::Director* dir) { this->dir = dir; }
wkt::gph::Director* director() const { return this->dir; }
wkt::gph::Camera& camera() { return this->cam; }
inline void runAllSystems();
inline void render();
void setActive(bool enable) { this->active = enable; }
bool isActive() const { return this->active; }
private:
std::shared_ptr<wkt::components::Node> node;
std::unique_ptr<wkt::systems::RenderSystem> renderSystem;
wkt::gph::Camera cam;
wkt::gph::Director* dir = nullptr;
bool active = true;
};
inline SceneGraph::SceneGraph()
{
// this->cam.setSize({640.f, 480.f});
// this->cam.setPosition({300.f, 0.f});
this->cam.setSize({
static_cast<float>(getStartupConfig().windowWidth),
static_cast<float>(getStartupConfig().windowHeight)
});
this->renderSystem = std::make_unique<wkt::systems::RenderSystem>();
auto& sm = systemsManager();
sm += std::make_unique<wkt::systems::TransformUpdateSystem>();
}
inline void SceneGraph::runAllSystems()
{
if(!isActive())
return;
runComponentSystems();
if(root())
{
systemsManager().run(*root());
}
}
inline void SceneGraph::render()
{
if(!root() || !isActive())
return;
this->dir->setCamera(&this->cam);
this->renderSystem->setDirector(this->dir);
this->renderSystem->bindRoot(*root());
this->renderSystem->run();
}
}}
#endif
|
SELECT u.user_id, u.username, a.first_name, a.last_name
FROM user u
INNER JOIN address a ON u.user_id = a.user_id;
|
import random
def coin_toss():
head = 0
tail = 0
print("Let's simulate a coin toss...")
for _ in range(5):
toss = random.randint(0,1)
if toss == 0:
head += 1
print("It's Heads")
else:
tail += 1
print("It's Tails")
print(f"Heads: {head}")
print(f"Tails: {tail}")
|
#!/usr/bin/env bash
#
# Copyright (c) 2019-2020 The Beans Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
export HOST=s390x-linux-gnu
# The host arch is unknown, so we run the tests through qemu.
# If the host is s390x and wants to run the tests natively, it can set QEMU_USER_CMD to the empty string.
if [ -z ${QEMU_USER_CMD+x} ]; then export QEMU_USER_CMD="${QEMU_USER_CMD:-"qemu-s390x"}"; fi
export PACKAGES="python3-zmq"
if [ -n "$QEMU_USER_CMD" ]; then
# Likely cross-compiling, so install the needed gcc and qemu-user
export DPKG_ADD_ARCH="s390x"
export PACKAGES="$PACKAGES g++-s390x-linux-gnu qemu-user libc6:s390x libstdc++6:s390x libfontconfig1:s390x libxcb1:s390x"
fi
# Use debian to avoid 404 apt errors
export CONTAINER_NAME=ci_s390x
export DOCKER_NAME_TAG="debian:buster"
export RUN_UNIT_TESTS=true
export TEST_RUNNER_ENV="LC_ALL=C"
export RUN_FUNCTIONAL_TESTS=true
export GOAL="install"
export BEANS_CONFIG="--enable-reduce-exports --with-incompatible-bdb --enable-external-signer"
|
const fs = require("fs");
const path = require("path");
const { promises: fsPromises } = fs;
const contactsPath = path.join(__dirname, "./db/contacts.json");
let currentId = 10;
async function listContacts() {
await fsPromises
.readFile(contactsPath, "utf-8")
.then((contacts) => {
console.table(JSON.parse(contacts));
})
.catch((err) => console.warn(err));
}
async function getContactById(contactId) {
await fsPromises
.readFile(contactsPath, "utf-8")
.then((contacts) =>
JSON.parse(contacts).find((contact) => contact.id === contactId)
)
.then((contact) => console.table(contact))
.catch((err) => console.warn(err));
}
async function removeContact(contactId) {
await fsPromises
.readFile(contactsPath, "utf-8")
.then((contacts) => JSON.parse(contacts))
.then((contacts) => contacts.filter((contact) => contact.id !== contactId))
.then((contacts) => JSON.stringify(contacts))
.then((contacts) => fsPromises.writeFile(contactsPath, contacts))
.then(() => fsPromises.readFile(contactsPath, "utf-8"))
.then((contacts) => console.table(JSON.parse(contacts)))
.catch((err) => console.warn(err));
}
async function addContact(name, email, phone) {
currentId += 1;
await fsPromises
.readFile(contactsPath, "utf-8")
.then((contacts) => JSON.parse(contacts))
.then((contacts) => [
...contacts,
{
id: currentId,
name,
email,
phone,
},
])
.then((contacts) => JSON.stringify(contacts))
.then((contacts) => fsPromises.writeFile(contactsPath, contacts))
.then(() => fsPromises.readFile(contactsPath, "utf-8"))
.then((contacts) => console.table(JSON.parse(contacts)))
.catch((err) => console.warn(err));
}
module.exports = {
listContacts,
getContactById,
removeContact,
addContact,
};
|
import React from "react"
import renderer from "react-test-renderer"
import { SectionContent } from "./section"
describe("sectionContent", () => {
it("should not add mb2 className when children is only a text string", () => {
const tree = renderer.create(<SectionContent>aaaa</SectionContent>).toJSON()
expect(tree).toMatchInlineSnapshot(`
.emotion-0 {
line-height: 1.5em;
margin-bottom: 1.45rem;
}
<div
className=" f5 section-content tj emotion-0"
>
aaaa
</div>
`)
})
it("should not add mb2 className on p when it is the last element", () => {
const tree = renderer
.create(
<SectionContent>
aaaa
<p>Test</p>
</SectionContent>
)
.toJSON()
expect(tree).toMatchInlineSnapshot(`
.emotion-0 {
line-height: 1.5em;
margin-bottom: 1.45rem;
}
<div
className=" f5 section-content tj emotion-0"
>
aaaa
<p>
Test
</p>
</div>
`)
})
it("should add mb2 className on first p", () => {
const tree = renderer
.create(
<SectionContent>
<p>Abc</p>
<p>Test</p>
</SectionContent>
)
.toJSON()
expect(tree).toMatchInlineSnapshot(`
.emotion-0 {
line-height: 1.5em;
margin-bottom: 1.45rem;
}
<div
className=" f5 section-content tj emotion-0"
>
<p
className="mb2 "
>
Abc
</p>
<p>
Test
</p>
</div>
`)
})
it("should add mb2 className and keep other provided className on first p", () => {
const tree = renderer
.create(
<SectionContent>
<p className="foo">Abc</p>
<p>Test</p>
</SectionContent>
)
.toJSON()
expect(tree).toMatchInlineSnapshot(`
.emotion-0 {
line-height: 1.5em;
margin-bottom: 1.45rem;
}
<div
className=" f5 section-content tj emotion-0"
>
<p
className="mb2 foo"
>
Abc
</p>
<p>
Test
</p>
</div>
`)
})
it("should add mb2 className on all p but last one", () => {
const tree = renderer
.create(
<SectionContent>
<p>Abc</p>
<p>345</p>
<p>678</p>
<p>Test</p>
</SectionContent>
)
.toJSON()
expect(tree).toMatchInlineSnapshot(`
.emotion-0 {
line-height: 1.5em;
margin-bottom: 1.45rem;
}
<div
className=" f5 section-content tj emotion-0"
>
<p
className="mb2 "
>
Abc
</p>
<p
className="mb2 "
>
345
</p>
<p
className="mb2 "
>
678
</p>
<p>
Test
</p>
</div>
`)
})
it("should add mb2 className on first ul", () => {
const tree = renderer
.create(
<SectionContent>
<ul>Abc</ul>
<p>Test</p>
</SectionContent>
)
.toJSON()
expect(tree).toMatchInlineSnapshot(`
.emotion-0 {
line-height: 1.5em;
margin-bottom: 1.45rem;
}
<div
className=" f5 section-content tj emotion-0"
>
<ul
className="mb2 "
>
Abc
</ul>
<p>
Test
</p>
</div>
`)
})
})
|
#!/usr/bin/env bash
#Build logos.json before commit
(cd ./source && ./buildLogos.js)
if [[ $? -eq 1 ]]; then
echo "Error build logos.json";
exit 1;
fi;
#Run directory.js before commit
node directory.js
if [[ $? -eq 1 ]]; then
echo "Error with run directory.js";
exit 1;
fi;
# Build buildInfo.json
ID="$(git rev-list --count HEAD)";
cd ./static && echo {'"buildId"': $ID} > buildInfo.json;
# Commit
COMMIT_NAME=$(openssl dgst -sha256 buildInfo.json | sed 's/^.* //');
git add .
git commit -a -m "Commit id: $COMMIT_NAME"
|
"""
Develop a code that takes a text string as input and returns the longest word in the text string
"""
def longest_word(text):
words = text.split()
longest_word = ""
max_length = 0
for word in words:
if len(word) > max_length:
max_length = len(word)
longest_word = word
return longest_word
if __name__ == '__main__':
text = "Hello this is a random text"
print(longest_word(text))
|
#!/bin/bash
unameOut="$(uname -s)"
case "${unameOut}" in
Linux*) machine=Linux;;
Darwin*) machine=Mac;;
CYGWIN*) machine=Cygwin;;
MINGW*) machine=MinGw;;
*) machine="UNKNOWN:${unameOut}"
esac
# make sure node is installed
if ! command -v node;then
echo "Install node and npm first then re-run script"
echo "Go to https://nodejs.org/en/download/ to download and install"
exit
fi
# if yarn isn't installed install it
if ! command -v yarn;then
npm -g install yarn
fi
git clone https://github.com/elmsln/HAXcms.git
cd HAXcms/
# install docker if not installed
if ! command -v docker;then
curl -fsSL https://get.docker.com -o get-docker.sh
sudo sh get-docker.sh
fi
windows_ddev() {
# make sure chocolatey is installed
if ! command -v choco;then
echo "Please install Chocolatey then run again script again"
echo "(https://chocolatey.org/install)"
else
choco install ddev git
fi
}
linux_ddev() {
if ! command -v ddev;then
curl -LO https://raw.githubusercontent.com/drud/ddev/master/scripts/install_ddev.sh && bash install_ddev.sh
fi
}
if [ "${machine}" == "Cygwin" ]; then
windows_ddev
elif [ "${machine}" == "MinGw" ]; then
windows_ddev
else
if ! command -v docker-compose;then
sudo curl -L "https://github.com/docker/compose/releases/download/1.29.1/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
sudo chmod +x /usr/local/bin/docker-compose
sudo ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose
fi
linux_ddev
fi
|
from typing import List, Dict, Any
import instaloader
def download_instagram_posts(usernames: List[str]) -> Dict[str, List[Dict[str, Any]]]:
result = {}
L = instaloader.Instaloader(download_videos=False, download_video_thumbnails=False, download_geotags=False, download_comments=False, save_metadata=True, post_metadata_txt_pattern="")
for username in usernames:
try:
profile = instaloader.Profile.from_username(L.context, username)
posts = profile.get_posts()
user_posts = []
for post in posts:
post_data = {
'id': post.shortcode,
'timestamp': post.date_utc.strftime('%Y-%m-%d %H:%M:%S'),
'likes': post.likes,
'comments': post.comments,
'caption': post.caption
}
user_posts.append(post_data)
result[username] = user_posts
except instaloader.exceptions.ProfileNotExistsException:
result[username] = "Error: Profile not found"
except instaloader.exceptions.QueryReturnedNotFoundException:
result[username] = "Error: Profile not found"
except instaloader.exceptions.ConnectionException:
result[username] = "Error: Connection error"
except Exception as e:
result[username] = f"Error: {str(e)}"
return result
|
sudo apt-get update -y
sudo apt-get install git python-pip python-dev -y
vagrant_pkg_url=https://dl.bintray.com/mitchellh/vagrant/vagrant_1.7.2_x86_64.deb
wget ${vagrant_pkg_url}
sudo dpkg -i $(basename ${vagrant_pkg_url})
sudo apt-get install libxslt-dev libxml2-dev libvirt-dev build-essential qemu-utils qemu-kvm libvirt-bin virtinst -y
sudo service libvirt-bin restart
vagrant plugin install vagrant-libvirt
vagrant plugin install vagrant-mutate
precise_box_vb_url=https://cloud-images.ubuntu.com/vagrant/precise/current/precise-server-cloudimg-amd64-vagrant-disk1.box
precise_box_vb_filename=$(basename ${precise_box_vb_url})
centos65_box_vb_url=https://developer.nrel.gov/downloads/vagrant-boxes/CentOS-6.5-x86_64-v20140504.box
centos65_box_vb_filename=$(basename ${centos65_box_vb_url})
wget ${precise_box_vb_url}
wget ${centos65_box_vb_url}
mv ${precise_box_vb_filename} precise64.box
mv ${centos65_box_vb_filename} centos65.box
vagrant mutate precise64.box libvirt
vagrant mutate centos65.box libvirt
sudo pip install ansible
git clone http://git.openstack.org/openstack/compass-install
cd compass-install
function join { local IFS="$1"; shift; echo "$*"; }
if [[ ! -z $VIRT_NUMBER ]]; then
mac_array=$(ci/mac_generator.sh $VIRT_NUMBER)
mac_list=$(join , $mac_array)
echo "pxe_boot_macs: [${mac_list}]" >> install/group_vars/all
echo "test: true" >> install/group_vars/all
fi
sudo vagrant up compass_vm
if [[ $? != 0 ]]; then
sudo vagrant provision compass_vm
if [[ $? != 0 ]]; then
echo "provisioning of compass failed"
exit 1
fi
fi
echo "compass is up"
if [[ -n $mac_array ]]
echo "bringing up pxe boot vms"
i=0
for mac in "$mac_array"; do
virsh list |grep pxe${i}
if [[ $? == 0 ]]; then
virsh destroy pxe${i}
virsh undefine pxe${i}
fi
virsh list --all |grep pxe${i}
if [[ $? == 0 ]]; then
virsh undefine pxe${i}
fi
echo "creating vm disk for instance pxe${i}"
sudo qemu-img create -f raw /home/pxe${i}.raw ${VIRT_DISK}
sudo virt-install --accelerate --hvm --connect qemu:///system \
--name pxe$i --ram=$VIRT_MEM --pxe --disk /home/pxe$i.raw,format=raw \
--vcpus=$VIRT_CPUS --graphics vnc,listen=0.0.0.0 \
--network=bridge:virbr2,mac=$mac \
--network=bridge:virbr2
--network=bridge:virbr2
--network=bridge:virbr2
--noautoconsole --autostart --os-type=linux --os-variant=rhel6
if [[ $? != 0 ]]; then
echo "launching pxe${i} failed"
exit 1
fi
echo "checking pxe${i} state"
state=$(virsh domstate pxe${i})
if [[ "$state" == "running" ]]; then
echo "pxe${i} is running"
sudo virsh destroy pxe${i}
fi
echo "add network boot option and make pxe${i} reboot if failing"
sudo sed -i "/<boot dev='hd'\/>/ a\ <boot dev='network'\/>" /etc/libvirt/qemu/pxe${i}.xml
sudo sed -i "/<boot dev='network'\/>/ a\ <bios useserial='yes' rebootTimeout='0'\/>" /etc/libvirt/qemu/pxe${i}.xml
sudo virsh define /etc/libvirt/qemu/pxe${i}.xml
sudo virsh start pxe${i}
let i=i+1
done
fi
rm -rf compass-core
git clone http://git.openstack.org/openstack/compass-core -b dev/experimental
cd compass-core
virtualenv venv
source venv/bin/activate
pip install -e .
if [[ ! -f /var/log/compass ]]; then
sudo mkdir /var/log/compass
sudo chown -R 777 /var/log/compass
fi
if [[ ! -f /etc/compass ]]; then
sudo mkdir /etc/compass
sudo cp -rf conf/setting /etc/compass/.
fi
cp bin/switch_virtualenv.py.template bin/switch_virtualenv.py
sed -i "s|\$PythonHome|$VIRTUAL_ENV|g" /opt/compass/bin/switch_virtualenv.py
source ../compass-install/ci/allinone.conf
bin/client.py --logfile= --loglevel=debug --logdir= --compass_server="${COMPASS_SERVER_URL}" \
--compass_user_email="${COMPASS_USER_EMAIL}" --compass_user_password="${COMPASS_USER_PASSWORD}" \
--cluster_name="${CLUSTER_NAME}" --language="${LANGUAGE}" --timezone="${TIMEZONE}" \
--hostnames="${HOSTNAMES}" --partitions="${PARTITIONS}" --subnets="${SUBNETS}" \
--adapter_os_pattern="${ADAPTER_OS_PATTERN}" --adapter_name="${ADAPTER_NAME}" \
--adapter_flavor_pattern="${ADAPTER_FLAVOR_PATTERN}" \
--http_proxy="${PROXY}" --https_proxy="${PROXY}" --no_proxy="${IGNORE_PROXY}" \
--ntp_server="${NTP_SERVER}" --dns_servers="${NAMESERVERS}" --domain="${DOMAIN}" \
--search_path="${SEARCH_PATH}" --default_gateway="${GATEWAY}" \
--server_credential="${SERVER_CREDENTIAL}" --local_repo_url="${LOCAL_REPO_URL}" \
--os_config_json_file="${OS_CONFIG_FILENAME}" --service_credentials="${SERVICE_CREDENTIALS}" \
--console_credentials="${CONSOLE_CREDENTIALS}" --host_networks="${HOST_NETWORKS}" \
--network_mapping="${NETWORK_MAPPING}" --package_config_json_file="${PACKAGE_CONFIG_FILENAME}" \
--host_roles="${HOST_ROLES}" --default_roles="${DEFAULT_ROLES}" --switch_ips="${SWITCH_IPS}" \
--machines="${machines}" --switch_credential="${SWITCH_CREDENTIAL}" \
--deployment_timeout="${DEPLOYMENT_TIMEOUT}" --${POLL_SWITCHES_FLAG} --dashboard_url="${DASHBOARD_URL}"
#sudo vagrant up regtest_vm
#if [[ $? != 0 ]]; then
# sudo vagrant provision regtest_vm
# if [[ $? != 0 ]]; then
# echo "deployment of cluster failed"
# exit 1
# fi
#fi
#echo "deployment of cluster complete"
|
#include <string>
#include <system_error>
namespace detail {
template<typename String>
struct string_traits {
static const char* c_str(const String& str) {
return str.c_str();
}
};
}
void binder(const char* host, unsigned port, std::error_code& ec) {
// Implementation of the binder function is not provided
// Assume it binds the network socket to the specified host and port
// and sets the error code if any error occurs
}
template<typename String>
void bind(const String& host, unsigned port, std::error_code& ec) noexcept {
binder(detail::string_traits<String>::c_str(host), port, ec);
}
|
<gh_stars>1-10
import {
GET_ALL_EMPLOYEE_START,
GET_ALL_EMPLOYEE_SUCCESS,
GET_ALL_EMPLOYEE_FAIL,
GET_ALL_EMPLOYEE_RESOLVE,
GET_EMPLOYEE_START,
GET_EMPLOYEE_SUCCESS,
GET_EMPLOYEE_FAIL,
GET_EMPLOYEE_RESOLVE,
ADD_EMPLOYEE_START,
ADD_EMPLOYEE_SUCCESS,
ADD_EMPLOYEE_FAIL,
ADD_EMPLOYEE_RESOLVE,
EDIT_EMPLOYEE_START,
EDIT_EMPLOYEE_SUCCESS,
EDIT_EMPLOYEE_FAIL,
EDIT_EMPLOYEE_RESOLVE,
DELETE_EMPLOYEE_START,
DELETE_EMPLOYEE_SUCCESS,
DELETE_EMPLOYEE_FAIL,
DELETE_EMPLOYEE_RESOLVE,
} from '../actions/employeeActions';
// Initial Employee State
export const initialEmployeeState = {
employees: [],
employee: null,
status: 'Resolved',
change: '',
error: '',
};
export const employeeReducer = (state = initialEmployeeState, action) => {
switch (action.type) {
//Get All Employees
case GET_ALL_EMPLOYEE_START:
return {
...state,
status: 'Pending...',
};
case GET_ALL_EMPLOYEE_SUCCESS:
// console.log(action.payload, 'payload inside reducer');
return {
...state,
employees: action.payload,
status: 'Success',
};
case GET_ALL_EMPLOYEE_FAIL:
return {
...state,
status: 'Failed',
error: action.payload,
};
case GET_ALL_EMPLOYEE_RESOLVE:
// console.log(state.employees, 'state inside reducer');
return {
...state,
status: 'Resolved',
};
// Get Employee By Id
case GET_EMPLOYEE_START:
return {
...state,
status: 'Pending...',
};
case GET_EMPLOYEE_SUCCESS:
return {
...state,
employee: action.payload,
status: 'Success',
};
case GET_EMPLOYEE_FAIL:
return {
...state,
status: 'Failed',
error: action.payload,
};
case GET_EMPLOYEE_RESOLVE:
return {
...state,
status: 'Resolved',
};
//Add Employee
case ADD_EMPLOYEE_START:
return {
...state,
status: 'Pending...',
};
case ADD_EMPLOYEE_SUCCESS:
return {
...state,
status: 'Success',
change: 'added',
};
case ADD_EMPLOYEE_FAIL:
return {
...state,
status: 'Failed',
error: action.payload,
};
case ADD_EMPLOYEE_RESOLVE:
return {
...state,
status: 'Resolved',
change: '',
};
//Edit Employee
case EDIT_EMPLOYEE_START:
return {
...state,
status: 'Pending...',
};
case EDIT_EMPLOYEE_SUCCESS:
return {
...state,
status: 'Success',
change: 'edited',
};
case EDIT_EMPLOYEE_FAIL:
return {
...state,
status: 'Failed',
error: action.payload,
};
case EDIT_EMPLOYEE_RESOLVE:
return {
...state,
status: 'Resolved',
change: '',
};
// Delete Employee
case DELETE_EMPLOYEE_START:
return {
...state,
status: 'Pending...',
};
case DELETE_EMPLOYEE_SUCCESS:
return {
...state,
status: 'Success',
change: 'deleted',
};
case DELETE_EMPLOYEE_FAIL:
return {
...state,
status: 'Failed',
error: action.payload,
};
case DELETE_EMPLOYEE_RESOLVE:
return {
...state,
status: 'Resolved',
change: '',
};
//Default
default:
return state;
}
};
|
import {_, Autowired, Component, PostConstruct} from "@ag-grid-community/core";
import {ChartMenu} from "./menu/chartMenu";
import {Chart} from "ag-charts-community";
import {ChartTranslator} from "./chartTranslator";
import {ChartProxy} from "./chartProxies/chartProxy";
type BBox = { x: number; y: number; width: number; height: number };
export class TitleEdit extends Component {
private static TEMPLATE = /* html */
`<input
class="ag-chart-title-edit"
style="padding:0; border:none; border-radius: 0; min-height: 0; text-align: center;" />
`;
@Autowired('chartTranslator') private chartTranslator: ChartTranslator;
private chartProxy: ChartProxy<Chart, any>;
private destroyableChartListeners: (() => void)[];
constructor(private readonly chartMenu: ChartMenu) {
super(TitleEdit.TEMPLATE);
}
@PostConstruct
public init(): void {
this.addManagedListener(this.getGui(), 'keypress', (e: KeyboardEvent) => {
if (e.key === 'Enter') {
this.endEditing();
}
});
this.addManagedListener(this.getGui(), 'blur', this.endEditing.bind(this));
}
/* should be called when the containing component changes to a new chart proxy */
public setChartProxy(chartProxy: ChartProxy<Chart, any>) {
if (this.chartProxy) {
for (let i = 0; i++; i < this.destroyableChartListeners.length) {
this.destroyableChartListeners[i]();
}
this.destroyableChartListeners = [];
}
this.chartProxy = chartProxy;
const chart = this.chartProxy.getChart();
const canvas = chart.scene.canvas.element;
const destroyDbleClickListener = this.addManagedListener(canvas, 'dblclick', event => {
const { title } = chart;
if (title && title.node.containsPoint(event.offsetX, event.offsetY)) {
const bbox = title.node.computeBBox();
const xy = title.node.inverseTransformPoint(bbox.x, bbox.y);
this.startEditing({ ...bbox, ...xy });
}
});
const destroyMouseMoveListener = this.addManagedListener(canvas, 'mousemove', event => {
const { title } = chart;
const inTitle = title && title.node.containsPoint(event.offsetX, event.offsetY);
canvas.style.cursor = inTitle ? 'pointer' : '';
});
this.destroyableChartListeners = [
destroyDbleClickListener,
destroyMouseMoveListener
];
}
private startEditing(titleBBox: BBox): void {
if (this.chartMenu && this.chartMenu.isVisible()) {
// currently we ignore requests to edit the chart title while the chart menu is showing
// because the click to edit the chart will also close the chart menu, making the position
// of the title change.
return;
}
const minimumTargetInputWidth: number = 300;
const maximumInputWidth: number = this.chartProxy.getChart().width;
const inputWidth = Math.max(Math.min(titleBBox.width + 20, maximumInputWidth), minimumTargetInputWidth);
const inputElement = this.getGui() as HTMLInputElement;
_.addCssClass(inputElement, 'currently-editing');
const inputStyle = inputElement.style;
// match style of input to title that we're editing
inputStyle.fontFamily = this.chartProxy.getTitleOption('fontFamily');
inputStyle.fontWeight = this.chartProxy.getTitleOption('fontWeight');
inputStyle.fontStyle = this.chartProxy.getTitleOption('fontStyle');
inputStyle.fontSize = this.chartProxy.getTitleOption('fontSize') + 'px';
inputStyle.color = this.chartProxy.getTitleOption('color');
// populate the input with the title, unless the title is the placeholder:
const oldTitle = this.chartProxy.getTitleOption('text');
const inputValue = oldTitle === this.chartTranslator.translate('titlePlaceholder') ? '' : oldTitle;
inputElement.value = inputValue;
const inputRect = inputElement.getBoundingClientRect();
inputStyle.left = Math.round(titleBBox.x + titleBBox.width / 2 - inputWidth / 2) + 'px';
inputStyle.top = Math.round(titleBBox.y + titleBBox.height / 2 - inputRect.height / 2) + 'px';
inputStyle.width = Math.round(inputWidth) + 'px';
inputElement.focus();
}
private endEditing(): void {
const value = (this.getGui() as HTMLInputElement).value;
this.chartProxy.setTitleOption('text', value);
this.eventService.dispatchEvent({'type': 'chartTitleEdit'});
_.removeCssClass(this.getGui(), 'currently-editing');
}
}
|
#!/bin/bash
# Copyright 2018-2020 Daniel Povey
# 2018-2020 Yiming Wang
# This recipe uses E2E LF-MMI training which doesn't require GMM training to obtain alignments.
# Its performance is slightly better than those based on alignments (cross-entropy or regular LF-MMI)
# on this dataset.
stage=0
. ./cmd.sh
. ./path.sh
. utils/parse_options.sh
set -euo pipefail
if [ $stage -le 0 ]; then
local/mobvoi_data_download.sh
echo "$0: Extracted all datasets into data/download/"
fi
if [ $stage -le 1 ]; then
echo "$0: Splitting datasets..."
local/split_datasets.sh
echo "$0: text and utt2spk have been generated in data/{train|dev|eval}."
fi
if [ $stage -le 2 ]; then
echo "$0: Preparing wav.scp..."
local/prepare_wav.py data
echo "wav.scp has been generated in data/{train|dev|eval}."
fi
if [ $stage -le 3 ]; then
echo "$0: Extracting MFCC..."
for folder in train dev eval; do
dir=data/$folder
utils/fix_data_dir.sh $dir
steps/make_mfcc.sh --cmd "$train_cmd" --nj 16 $dir
steps/compute_cmvn_stats.sh $dir
utils/fix_data_dir.sh $dir
utils/data/get_utt2dur.sh $dir
utils/validate_data_dir.sh $dir
done
fi
if [ $stage -le 4 ]; then
echo "$0: Post processing transcripts..."
for folder in train dev eval; do
dir=data/$folder
export LC_ALL=en_US.UTF-8
cat $dir/text | awk '{if ($2=="嗨小问" || $2=="嗨小问嗨小问") {print $1,"嗨小问";} else {print $1,"FREETEXT"}}' > $dir/text.tmp || exit 1
export LC_ALL=C
cat $dir/text.tmp > $dir/text || exit 1
rm -f $dir/text.tmp 2>/dev/null || true
done
fi
if [ $stage -le 5 ]; then
echo "$0: Preparing dictionary and lang..."
local/prepare_dict.sh
utils/prepare_lang.sh --num-sil-states 1 --num-nonsil-states 4 --sil-prob 0.5 \
--position-dependent-phones false \
data/local/dict "<sil>" data/lang/temp data/lang
fi
if [ $stage -le 6 ]; then
id_sil=`cat data/lang/words.txt | grep "<sil>" | awk '{print $2}'`
id_freetext=`cat data/lang/words.txt | grep "FREETEXT" | awk '{print $2}'`
export LC_ALL=en_US.UTF-8
id_word=`cat data/lang/words.txt | grep "嗨小问" | awk '{print $2}'`
export LC_ALL=C
mkdir -p data/lang/lm
cat <<EOF > data/lang/lm/fst.txt
0 1 $id_sil $id_sil
0 4 $id_sil $id_sil 7.0
1 4 $id_freetext $id_freetext 0.0
4 0 $id_sil $id_sil
1 2 $id_word $id_word 1.1
2 0 $id_sil $id_sil
0
EOF
fstcompile data/lang/lm/fst.txt data/lang/G.fst
set +e
fstisstochastic data/lang/G.fst
set -e
utils/validate_lang.pl data/lang
fi
if [ $stage -le 7 ]; then
echo "$0: subsegmenting for the training data..."
srcdir=data/train
utils/data/convert_data_dir_to_whole.sh $srcdir ${srcdir}_whole
utils/data/get_segments_for_data.sh $srcdir > ${srcdir}_whole/segments
utils/filter_scp.pl <(awk '{if ($2 == "FREETEXT") print $1}' ${srcdir}_whole/text) \
${srcdir}_whole/segments >${srcdir}_whole/neg_segments
utils/filter_scp.pl --exclude ${srcdir}_whole/neg_segments ${srcdir}_whole/segments \
>${srcdir}_whole/pos_segments
utils/filter_scp.pl ${srcdir}_whole/pos_segments ${srcdir}_whole/utt2dur >${srcdir}_whole/pos_utt2dur
local/get_random_subsegments.py --overlap-duration=0.3 --max-remaining-duration=0.3 \
${srcdir}_whole/neg_segments ${srcdir}_whole/pos_utt2dur | \
cat ${srcdir}_whole/pos_segments - | sort >${srcdir}_whole/sub_segments
utils/data/subsegment_data_dir.sh ${srcdir}_whole \
${srcdir}_whole/sub_segments data/train_segmented
awk '{print $1,$2}' ${srcdir}_whole/sub_segments | \
utils/apply_map.pl -f 2 ${srcdir}_whole/text >data/train_segmented/text
utils/data/extract_wav_segments_data_dir.sh --nj 50 --cmd "$train_cmd" \
data/train_segmented data/train_shorter
steps/compute_cmvn_stats.sh data/train_shorter
utils/fix_data_dir.sh data/train_shorter
utils/validate_data_dir.sh data/train_shorter
fi
# In this section, we augment the training data with reverberation,
# noise, music, and babble, and combined it with the clean data.
if [ $stage -le 8 ]; then
utils/data/get_utt2dur.sh data/train_shorter
cp data/train_shorter/utt2dur data/train_shorter/reco2dur
# Download the package that includes the real RIRs, simulated RIRs, isotropic noises and point-source noises
[ ! -f rirs_noises.zip ] && wget --no-check-certificate http://www.openslr.org/resources/28/rirs_noises.zip
[ ! -d "RIRS_NOISES" ] && unzip rirs_noises.zip
# Make a version with reverberated speech
rvb_opts=()
rvb_opts+=(--rir-set-parameters "0.5, RIRS_NOISES/simulated_rirs/smallroom/rir_list")
rvb_opts+=(--rir-set-parameters "0.5, RIRS_NOISES/simulated_rirs/mediumroom/rir_list")
# Make a reverberated version of the SWBD+SRE list. Note that we don't add any
# additive noise here.
steps/data/reverberate_data_dir.py \
"${rvb_opts[@]}" \
--speech-rvb-probability 1 \
--prefix "rev" \
--pointsource-noise-addition-probability 0 \
--isotropic-noise-addition-probability 0 \
--num-replications 1 \
--source-sampling-rate 16000 \
data/train_shorter data/train_shorter_reverb
cat data/train_shorter/utt2dur | awk -v name=rev1 '{print name"-"$0}' >data/train_shorter_reverb/utt2dur
# Prepare the MUSAN corpus, which consists of music, speech, and noise
# suitable for augmentation.
steps/data/make_musan.sh /export/corpora/JHU/musan data
# Get the duration of the MUSAN recordings. This will be used by the
# script augment_data_dir.py.
for name in speech noise music; do
utils/data/get_utt2dur.sh data/musan_${name}
cp data/musan_${name}/utt2dur data/musan_${name}/reco2dur
done
# Augment with musan_noise
export LC_ALL=en_US.UTF-8
steps/data/augment_data_dir.py --utt-prefix "noise" --modify-spk-id true --fg-interval 1 --fg-snrs "15:10:5:0" --fg-noise-dir "data/musan_noise" data/train_shorter data/train_shorter_noise
# Augment with musan_music
steps/data/augment_data_dir.py --utt-prefix "music" --modify-spk-id true --bg-snrs "15:10:8:5" --num-bg-noises "1" --bg-noise-dir "data/musan_music" data/train_shorter data/train_shorter_music
# Augment with musan_speech
steps/data/augment_data_dir.py --utt-prefix "babble" --modify-spk-id true --bg-snrs "20:17:15:13" --num-bg-noises "3:4:5:6:7" --bg-noise-dir "data/musan_speech" data/train_shorter data/train_shorter_babble
export LC_ALL=C
fi
if [ $stage -le 9 ]; then
# Now make MFCC features
for name in reverb noise music babble; do
steps/make_mfcc.sh --nj 16 --cmd "$train_cmd" \
data/train_shorter_${name} || exit 1;
steps/compute_cmvn_stats.sh data/train_shorter_${name}
utils/fix_data_dir.sh data/train_shorter_${name}
utils/validate_data_dir.sh data/train_shorter_${name}
done
fi
combined_train_set=train_shorter_combined
aug_affix="reverb noise music babble"
if [ $stage -le 10 ]; then
aug_affix="reverb noise music babble"
eval utils/combine_data.sh data/${combined_train_set} data/train_shorter_{$(echo $aug_affix | sed 's/ /,/g')}
fi
if [ -f data/${combined_train_set}_spe2e_hires/feats.scp ]; then
echo "$0: It seems that features for the perturbed training data already exist."
echo "If you want to extract them anyway, remove them first and run this"
echo "stage again. Skipping this stage..."
else
if [ $stage -le 11 ]; then
echo "$0: perturbing the training data to allowed lengths..."
utils/data/get_utt2dur.sh data/${combined_train_set} # necessary for the next command
# 12 in the following command means the allowed lengths are spaced
# by 12% change in length.
utils/data/perturb_speed_to_allowed_lengths.py --speed-perturb false 12 data/${combined_train_set} \
data/${combined_train_set}_e2e_hires
cat data/${combined_train_set}_e2e_hires/utt2dur | \
awk '{print $1 " " substr($1,5)}' >data/${combined_train_set}_e2e_hires/utt2uniq.tmp
utils/apply_map.pl -f 2 data/${combined_train_set}/utt2uniq \
<data/${combined_train_set}_e2e_hires/utt2uniq.tmp >data/${combined_train_set}_e2e_hires/utt2uniq
rm -f data/${combined_train_set}_e2e_hires/utt2uniq.tmp 2>/dev/null || true
utils/fix_data_dir.sh data/${combined_train_set}_e2e_hires
utils/data/get_utt2dur.sh data/train_shorter # necessary for the next command
utils/data/perturb_speed_to_allowed_lengths.py 12 data/train_shorter data/train_shorter_spe2e_hires
cat data/train_shorter_spe2e_hires/utt2dur | \
awk '{print $1 " " substr($1,5)}' >data/train_shorter_spe2e_hires/utt2uniq
utils/fix_data_dir.sh data/train_shorter_spe2e_hires
utils/combine_data.sh data/${combined_train_set}_spe2e_hires data/${combined_train_set}_e2e_hires data/train_shorter_spe2e_hires
cat data/train_shorter_spe2e_hires/allowed_lengths.txt >data/${combined_train_set}_spe2e_hires/allowed_lengths.txt
fi
if [ $stage -le 12 ]; then
echo "$0: extracting MFCC features for the training data..."
mfccdir=data/${combined_train_set}_spe2e_hires/data
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $mfccdir/storage ]; then
utils/create_split_dir.pl /export/b0{5,6,7,8}/$USER/kaldi-data/egs/mobvoi-$(date +'%m_%d_%H_%M')/v1/$mfccdir/storage $mfccdir/storage
fi
steps/make_mfcc.sh --nj 50 --mfcc-config conf/mfcc_hires.conf \
--cmd "$train_cmd" \
data/${combined_train_set}_spe2e_hires || exit 1;
steps/compute_cmvn_stats.sh data/${combined_train_set}_spe2e_hires || exit 1;
utils/fix_data_dir.sh data/${combined_train_set}_spe2e_hires
utils/validate_data_dir.sh data/${combined_train_set}_spe2e_hires
fi
fi
if [ $stage -le 13 ]; then
if [ -f data/eval_hires/feats.scp ]; then
echo "$0: It seems that features for the test sets already exist."
echo "skipping this stage..."
else
echo "$0: extracting MFCC features for the test sets"
for datadir in dev eval; do
utils/copy_data_dir.sh data/$datadir data/${datadir}_hires
steps/make_mfcc.sh --cmd "$train_cmd" --nj 50 --mfcc-config conf/mfcc_hires.conf \
--cmd "$train_cmd" data/${datadir}_hires || exit 1;
steps/compute_cmvn_stats.sh data/${datadir}_hires || exit 1;
utils/fix_data_dir.sh data/${datadir}_hires || exit 1;
done
fi
fi
if [ $stage -le 14 ]; then
local/chain/run_e2e_tdnn.sh --train-set ${combined_train_set}_spe2e
fi
combined_train_set=train_shorter_sp_combined
if [ -f data/${combined_train_set}_hires/feats.scp ]; then
echo "$0: It seems that features for the perturbed training data already exist."
echo "If you want to extract them anyway, remove them first and run this"
echo "stage again. Skipping this stage..."
else
if [ $stage -le 15 ]; then
echo "$0: preparing for speed-perturbed data"
utils/data/perturb_data_dir_speed_3way.sh data/train_shorter data/train_shorter_sp_hires
echo "$0: creating high-resolution MFCC features for speed-perturbed data"
mfccdir=data/train_shorter_sp_hires/data
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $mfccdir/storage ]; then
utils/create_split_dir.pl /export/b0{5,6,7,8}/$USER/kaldi-data/egs/mobvoi-$(date +'%m_%d_%H_%M')/v1/$mfccdir/storage $mfccdir/storage
fi
# do volume-perturbation on the training data prior to extracting hires
# features; this helps make trained nnets more invariant to test data volume.
utils/data/perturb_data_dir_volume.sh data/train_shorter_sp_hires || exit 1;
steps/make_mfcc.sh --nj 50 --mfcc-config conf/mfcc_hires.conf \
--cmd "$train_cmd" data/train_shorter_sp_hires || exit 1;
steps/compute_cmvn_stats.sh data/train_shorter_sp_hires || exit 1;
utils/fix_data_dir.sh data/train_shorter_sp_hires || exit 1;
fi
if [ $stage -le 16 ]; then
for name in $aug_affix; do
echo "$0: creating high-resolution MFCC features for train_shorter_${name}"
mfccdir=data/train_shorter_${name}_hires/data
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $mfccdir/storage ]; then
utils/create_split_dir.pl /export/b0{5,6,7,8}/$USER/kaldi-data/egs/mobvoi-$(date +'%m_%d_%H_%M')/v1/$mfccdir/storage $mfccdir/storage
fi
utils/copy_data_dir.sh data/train_shorter_${name} data/train_shorter_${name}_hires
steps/make_mfcc.sh --nj 50 --mfcc-config conf/mfcc_hires.conf \
--cmd "$train_cmd" data/train_shorter_${name}_hires || exit 1;
steps/compute_cmvn_stats.sh data/train_shorter_${name}_hires || exit 1;
utils/fix_data_dir.sh data/train_shorter_${name}_hires || exit 1;
done
eval utils/combine_data.sh data/${combined_train_set}_hires data/train_shorter_sp_hires \
data/train_shorter_{$(echo $aug_affix | sed 's/ /,/g')}_hires
fi
fi
if [ $stage -le 17 ]; then
echo "$0: Aligning the training data using the e2e chain model..."
steps/nnet3/align.sh --nj 50 --cmd "$train_cmd" \
--use-gpu false \
--scale-opts '--transition-scale=1.0 --self-loop-scale=1.0 --acoustic-scale=1.0' \
data/${combined_train_set}_hires data/lang exp/chain/e2e_tdnn_1a exp/chain/e2e_ali_${combined_train_set}
fi
if [ $stage -le 18 ]; then
echo "$0: Building a tree and training a regular chain model using the e2e alignments..."
local/chain/run_tdnn_e2eali.sh --train-set ${combined_train_set} --e2echain-model-dir exp/chain/e2e_tdnn_1a
fi
exit 0
|
#!/bin/sh
docker-compose up -d
sleep 10
sensible-browser http://localhost:18080/zap
|
package ru.job4j.user;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Set;
import java.util.TreeSet;
import java.util.List;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
/**
* SortUserTest
*
* @author <NAME> (<EMAIL>)
* @version $Id$
* @since 0.1
*/
public class SortUserTest {
@Test
public void whenNoAnyUsersThenEmptyTreeSet() {
List<User> users = new ArrayList<User>();
Set<User> tree = new TreeSet<>(new SortUser().sort(users));
assertThat(tree.isEmpty(), is(true));
}
@Test
public void whenOneUserThenOneElementInTreeSet() {
List<User> users = new ArrayList<User>();
users.add(new User("Maria", 21));
Set<User> tree = new TreeSet<>(new SortUser().sort(users));
Set<User> expected = Set.of(
new User("Maria", 21)
);
assertThat(expected.equals(tree), is(true));
}
@Test
public void whenTwoAgeSortedUsersThenTwoAgeSortedUsersInTreeSet() {
List<User> users = new ArrayList<User>();
users.add(new User("Maria", 21));
users.add(new User("Ivan", 27));
Set<User> tree = new TreeSet<>(new SortUser().sort(users));
Set<User> expected = Set.of(
new User("Maria", 21),
new User("Ivan", 27)
);
assertThat(expected.equals(tree), is(true));
}
@Test
public void whenTwoNonAgeSortedUsersThenTwoAgeSortedUsersInTreeSet() {
List<User> users = new ArrayList<User>();
users.add(new User("Ivan", 27));
users.add(new User("Maria", 21));
Set<User> tree = new TreeSet<>(new SortUser().sort(users));
Set<User> expected = Set.of(
new User("Maria", 21),
new User("Ivan", 27)
);
assertThat(expected.equals(tree), is(true));
}
@Test
public void whenThreeNonAgeSortedUsersThenThreeAgeSortedUsersInTreeSet() {
List<User> users = new ArrayList<User>();
users.add(new User("Ivan", 27));
users.add(new User("Maria", 21));
users.add(new User("Nick", 32));
Set<User> tree = new TreeSet<>(new SortUser().sort(users));
Set<User> expected = Set.of(
new User("Maria", 21),
new User("Ivan", 27),
new User("Nick", 32)
);
assertThat(expected.equals(tree), is(true));
}
@Test
public void whenTwoUsersWithEqualAgeThenReturnedInSetOnlyFirstUser() {
List<User> users = new ArrayList<>();
users.add(new User("Jim", 30));
users.add(new User("Vlad", 30));
Set<User> tree = new TreeSet<>(new SortUser().sort(users));
Set<User> expected = Set.of(
new User("Jim", 30)
);
assertThat(expected.equals(tree), is(true));
}
@Test
public void whenThreeUsersWithDifferentNamesThenSortedByNameLength() {
List<User> users = new ArrayList<User>();
users.add(new User("Karina"));
users.add(new User("Jim"));
users.add(new User("Vlad"));
new SortUser().sortNameLength(users);
List<User> expected = List.of(
new User("Jim"),
new User("Vlad"),
new User("Karina")
);
assertThat(expected.equals(users), is(true));
}
@Test
public void whenFourUsersWithDifferentNamesAndAgesThenSortedByAllFields() {
List<User> users = new ArrayList<User>();
users.add(new User("Sergey", 25));
users.add(new User("Ivan", 30));
users.add(new User("Sergey", 20));
users.add(new User("Ivan", 25));
new SortUser().sortByAllFields(users);
List<User> expected = List.of(
new User("Ivan", 25),
new User("Ivan", 30),
new User("Sergey", 20),
new User("Sergey", 25)
);
assertThat(expected.equals(users), is(true));
}
}
|
cd
cd documents/github/striper-snake/run
open engine_run.command
open snake_run.command
open http://0.0.0.0:3010
|
package org.egovframe.rte.fdl.cmmn.aspectj;
import java.util.Date;
public class Order {
public int orderId;
public String orderStatus;
public String securityCode;
public String description;
public Date orderDate;
public int getOrderId() {
return orderId;
}
public void setOrderId(int orderId) {
this.orderId = orderId;
}
public String getOrderStatus() {
return orderStatus;
}
public void setOrderStatus(String orderStatus) {
this.orderStatus = orderStatus;
}
public String getSecurityCode() {
return securityCode;
}
public void setSecurityCode(String securityCode) {
this.securityCode = securityCode;
}
public Date getOrderDate() {
return orderDate;
}
public void setOrderDate(Date orderDate) {
this.orderDate = orderDate;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
|
package dbr.antoine.pixviewer.features.common;
/**
* Created by antoine on 7/7/17.
*/
public interface Presenter {
void register();
void unregister();
}
|
#!/bin/bash
set -e
export GTEST_COLOR=1
export CTEST_OUTPUT_ON_FAILURE=true
CMAKE_LINKER_OPTS="-DCMAKE_EXE_LINKER='-fuse-ld=gold'"
CMAKE_CONFIG_OPTS="-DHUNTER_CONFIGURATION_TYPES=Debug -DCMAKE_BUILD_TYPE=Debug"
CMAKE_TOOLCHAIN_OPTS="-DCMAKE_TOOLCHAIN_FILE='`pwd`/tools/polly/gcc-pic-cxx17.cmake'"
CMAKE_OPTS="$CMAKE_LINKER_OPTS $CMAKE_CONFIG_OPTS $CMAKE_TOOLCHAIN_OPTS"
cmake -H. -B_builds $CMAKE_OPTS -DBUILD_COVERAGE=ON
cmake --build _builds
cmake --build _builds --target test
cmake --build _builds --target gcov
cmake --build _builds --target lcov
gcovr -r .
|
package com.kafka.consumer.avro;
import java.io.ByteArrayInputStream;
import java.util.Map;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.kafka.common.serialization.Deserializer;
public class AvroValueDeserializer implements Deserializer<GenericRecord> {
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
}
@Override
public GenericRecord deserialize(String topic, byte[] data) {
GenericDatumReader<GenericRecord> datumReader = new GenericDatumReader<>();
GenericRecord record = null;
try (ByteArrayInputStream bis = new ByteArrayInputStream(data)) {
BinaryDecoder binaryDecoder = DecoderFactory.get().binaryDecoder(bis, null);
record = datumReader.read(null, binaryDecoder);
return record;
} catch (Exception e) {
e.printStackTrace();
}
return record;
}
@Override
public void close() {
}
}
|
<gh_stars>1-10
document.addEventListener("DOMContentLoaded", function () {
$(".top-menu-tools-settingbutton").on("click", function () {
var req = new XMLHttpRequest();
req.onerror = function () {
};
req.onload = function () {
if (req.readyState === 4) {
window.location = "http://localhost/ExchangeWebsite/backend/login.php";
}
};
req.open("GET","http://localhost/ExchangeWebsite/backend/auth.php?command=logout");
req.send();
});
$('.bottom-menu-item').on("click", function () {
$('.bottom-menu-item').removeClass("bottom-menu-item-active");
$(this).toggleClass("bottom-menu-item-active");
});
});
function validurl(url) {
try {
var ok = new URL(url);
return true;
} catch {
return false;
}
}
|
/*
TITLE Rectangle and Polygone Chapter12Exercise1.cpp
Bjarne Stroustrup "Programming: Principles and Practice Using C++"
COMMENT
Objective: Draw a rectangle using class Rectangle (red lines)
and class Polygon (blue lines).
Input: -
Output: Graph on screen.
Author: <NAME>
Date: 17. 08. 2015
*/
#include <iostream>
#include "Simple_window.h"
int main()
{
try
{
// create a window in the center of the screen
int windowHeigth = 600;
int windowWidth = 600;
Point centerOfScreen(x_max()/2 - windowHeigth / 2, y_max()/2 - windowWidth / 2);
Simple_window sw(centerOfScreen, windowWidth, windowHeigth, "Chapter12 Exercise 1");
// draw a rectangle
Point windowLeftCenter(sw.x_max()/2 - 150, sw.y_max() - 250);
Graph_lib::Rectangle rect(windowLeftCenter, 300, 200);
rect.set_color(Color::dark_blue);
sw.attach(rect);
// draw a polygon: points added clockwise
Graph_lib::Polygon poly;
// upper left
poly.add(Point(sw.x_max()/2 - 150, sw.y_max()/2 - 250));
// upper right
poly.add(Point(sw.x_max()/2 + 150, sw.y_max()/2 - 250));
// lower right
poly.add(Point(sw.x_max()/2 + 150, sw.y_max()/2 - 50));
// lower left
poly.add(Point(sw.x_max()/2 - 150, sw.y_max()/2 - 50));
poly.set_color(Color::dark_red);
sw.attach(poly);
sw.wait_for_button();
}
catch(std::exception& e)
{
std::cerr << e.what() << std::endl;
}
catch(...)
{
std::cerr << "Default exception!" << std::endl;
}
}
|
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { AgenciesRoutingModule } from './agencies-routing.module';
import { AgenciesComponent } from './agencies.component';
import { ReactiveFormsModule, FormsModule } from '@angular/forms';
import { NgxPaginationModule } from 'ngx-pagination';
import { NgSelectModule } from '@ng-select/ng-select';
import { RatingModule } from 'ng-starrating';
import { EditAgencyComponent } from '../../modals/edit-agency/edit-agency.component';
import { NgbModule } from '@ng-bootstrap/ng-bootstrap';
import { EditAgencyModule } from '../../modals/edit-agency/edit-agency.module';
import {SharedModule} from '../../modals/shared.module';
import {RatingsReviewComponent} from '../../modals/ratings-review/ratings-review.component';
@NgModule({
declarations: [AgenciesComponent],
imports: [
CommonModule,
AgenciesRoutingModule,
ReactiveFormsModule,
FormsModule,
NgxPaginationModule,
NgSelectModule,
SharedModule,
RatingModule,
EditAgencyModule,
NgbModule.forRoot(),
],
entryComponents: [EditAgencyComponent, RatingsReviewComponent]
})
export class AgenciesModule { }
|
Object.defineProperty(exports, "__esModule", { value: true });
var lie_ts_1 = require("lie-ts");
exports.Promise = (function () {
return typeof window !== "undefined" && window["Promise"] ? window["Promise"] : typeof global !== "undefined" && global["Promise"] ? global["Promise"] : lie_ts_1.Promise;
})();
/**
* Object.assign, but better.
*
* @param {*} obj
* @returns
*/
exports._assign = function (obj) {
return obj ? JSON.parse(JSON.stringify(obj)) : null;
};
/**
* Quickly and efficiently fire asyncrounous operations in sequence, returns once all operations complete.
*
* @param {any[]} items
* @param {(item: any, i: number, next: (result?: any) => void) => void} callback
* @returns {Promise<any[]>}
*/
exports.fastCHAIN = function (items, callback) {
return new exports.Promise(function (res, rej) {
if (!items || !items.length) {
res([]);
return;
}
var results = [];
var step = function () {
if (results.length < items.length) {
callback(items[results.length], results.length, function (result) {
results.push(result);
lie_ts_1.setFast(step);
});
}
else {
res(results);
}
};
step();
});
};
/**
* Quickly and efficiently fire asyncrounous operations in parallel, returns once any operation completes.
*
* @param {any[]} items
* @param {(item: any, i: number, next: (result?: any) => void) => void} callback
* @returns {Promise<any[]>}
*/
exports.fastRACE = function (items, callback) {
return new exports.Promise(function (res, rej) {
if (!items || !items.length) {
res([]);
return;
}
var resolved = false;
var counter = 0;
var step = function () {
if (counter < items.length) {
callback(items[counter], counter, function (result) {
if (!resolved) {
resolved = true;
res([result]);
}
});
counter++;
step();
}
};
step();
});
};
/**
* Quickly and efficiently fire asyncrounous operations in parallel, returns once all operations are complete.
*
* @param {any[]} items
* @param {(item: any, i: number, done: (result?: any) => void) => void} callback
* @returns {Promise<any[]>}
*/
exports.fastALL = function (items, callback) {
return exports.Promise.all((items || []).map(function (item, i) {
return new exports.Promise(function (res, rej) {
callback(item, i, function (result) {
res(result);
});
});
}));
};
var ua = typeof window === "undefined" ? "" : (navigator.userAgent || "");
// Detects iOS device OR Safari running on desktop
exports.isSafari = ua.length === 0 ? false : (/^((?!chrome|android).)*safari/i.test(ua)) || (/iPad|iPhone|iPod/.test(ua) && !window["MSStream"]);
// Detect Edge or Internet Explorer
exports.isMSBrowser = ua.length === 0 ? false : ua.indexOf("MSIE ") > 0 || ua.indexOf("Trident/") > 0 || ua.indexOf("Edge/") > 0;
// Detect Android Device
exports.isAndroid = /Android/.test(ua);
/**
* Generate a random 16 bit number using strongest crypto available.
*
* @returns {number}
*/
exports.random16Bits = function () {
if (typeof crypto === "undefined") {
return Math.round(Math.random() * Math.pow(2, 16)); // Less random fallback.
}
else {
if (crypto.getRandomValues) { // Browser crypto
var buf = new Uint16Array(1);
crypto.getRandomValues(buf);
return buf[0];
}
else if (typeof global !== "undefined" && global._crypto.randomBytes) { // NodeJS crypto
return global._crypto.randomBytes(2).reduce(function (prev, cur) { return cur * prev; });
}
else {
return Math.round(Math.random() * Math.pow(2, 16)); // Less random fallback.
}
}
};
/**
* Generate a TimeID for use in the database.
*
* @param {boolean} [ms]
* @returns {string}
*/
exports.timeid = function (ms) {
var time = Math.round((new Date().getTime()) / (ms ? 1 : 1000)).toString();
while (time.length < (ms ? 13 : 10)) {
time = "0" + time;
}
return time + "-" + (exports.random16Bits() + exports.random16Bits()).toString(16);
};
/**
* See if two arrays intersect.
*
* @param {any[]} arr1
* @param {any[]} arr2
* @returns {boolean}
*/
exports.intersect = function (arr1, arr2) {
if (!arr1 || !arr2)
return false;
if (!arr1.length || !arr2.length)
return false;
return (arr1 || []).filter(function (item) { return (arr2 || []).indexOf(item) !== -1; }).length > 0;
};
/**
* Generates a valid V4 UUID using the strongest crypto available.
*
* @returns {string}
*/
exports.uuid = function () {
var r, s, b = "";
return [b, b, b, b, b, b, b, b].reduce(function (prev, cur, i) {
r = exports.random16Bits();
s = (i === 3 ? 4 : (i === 4 ? (r % 16 & 0x3 | 0x8).toString(16) : b));
r = r.toString(16);
while (r.length < 4)
r = "0" + r;
return prev + ([2, 3, 4, 5].indexOf(i) > -1 ? "-" : b) + (s + r).slice(0, 4);
}, b);
};
var idTypes = {
"int": function (value) { return value; },
"uuid": exports.uuid,
"timeId": function () { return exports.timeid(); },
"timeIdms": function () { return exports.timeid(true); }
};
/**
* A quick and dirty hashing function, turns a string into a md5 style hash.
* stolen from https://github.com/darkskyapp/string-hash
*
* @param {string} str
* @returns {string}
*/
exports.hash = function (str) {
var hash = 5381, i = str.length;
while (i) {
hash = (hash * 33) ^ str.charCodeAt(--i);
}
return (hash >>> 0).toString(16);
};
/**
* Generate a row ID given the primary key type.
*
* @param {string} primaryKeyType
* @param {number} [incrimentValue]
* @returns {*}
*/
exports.generateID = function (primaryKeyType, incrimentValue) {
return idTypes[primaryKeyType] ? idTypes[primaryKeyType](incrimentValue || 1) : "";
};
/**
* Clean the arguments from an object given an array of arguments and their types.
*
* @param {string[]} argDeclarations
* @param {StdObject<any>} args
* @returns {StdObject<any>}
*/
exports.cleanArgs = function (argDeclarations, args) {
var a = {};
var i = argDeclarations.length;
while (i--) {
var k2 = argDeclarations[i].split(":");
if (k2.length > 1) {
a[k2[0]] = exports.cast(k2[1], args[k2[0]] || undefined);
}
else {
a[k2[0]] = args[k2[0]] || undefined;
}
}
return a;
};
/**
* Determine if a given value is a javascript object or not. Exludes Arrays, Functions, Null, Undefined, etc.
*
* @param {*} val
* @returns {boolean}
*/
exports.isObject = function (val) {
return Object.prototype.toString.call(val) === "[object Object]";
};
/**
* Cast a javascript variable to a given type. Supports typescript primitives and more specific types.
*
* @param {string} type
* @param {*} [val]
* @returns {*}
*/
exports.cast = function (type, val) {
if (type === "any" || type === "blob")
return val;
var t = typeof val;
if (t === "undefined" || val === null) {
return val;
}
var entityMap = {
"&": "&",
"<": "<",
">": ">",
"\"": """,
"'": "'",
"/": "/",
"`": "`",
"=": "="
};
var types = function (type, val) {
switch (type) {
case "safestr": return types("string", val).replace(/[&<>"'`=\/]/gmi, function (s) { return entityMap[s]; });
case "int": return (t !== "number" || val % 1 !== 0) ? parseInt(val || 0) : val;
case "number":
case "float": return t !== "number" ? parseFloat(val || 0) : val;
case "any[]":
case "array": return Array.isArray(val) ? val : [];
case "uuid":
case "timeId":
case "timeIdms":
case "string": return t !== "string" ? String(val) : val;
case "object":
case "obj":
case "map": return exports.isObject(val) ? val : {};
case "boolean":
case "bool": return val === true;
}
return val;
};
var newVal = types(String(type || "").toLowerCase(), val);
if (type.indexOf("[]") !== -1) {
var arrayOf_1 = type.slice(0, type.lastIndexOf("[]"));
return (val || []).map(function (v) {
return exports.cast(arrayOf_1, v);
});
}
else if (newVal !== undefined) {
if (["int", "float", "number"].indexOf(type) > -1) {
return isNaN(newVal) ? 0 : newVal;
}
else {
return newVal;
}
}
return undefined;
};
/**
* Insert a value into a sorted array, efficiently gaurantees records are sorted on insert.
*
* @param {any[]} arr
* @param {*} value
* @param {number} [startVal]
* @param {number} [endVal]
* @returns {any[]}
*/
exports.sortedInsert = function (arr, value, startVal, endVal) {
if (arr.length) {
arr.splice(exports.binarySearch(arr, value), 0, value);
return arr;
}
else {
arr.push(value);
return arr;
}
};
/**
* Given a sorted array and a value, find where that value fits into the array.
*
* @param {any[]} arr
* @param {*} value
* @param {number} [startVal]
* @param {number} [endVal]
* @returns {number}
*/
exports.binarySearch = function (arr, value, startVal, endVal) {
var length = arr.length;
var start = startVal || 0;
var end = endVal !== undefined ? endVal : length - 1;
if (length === 0) {
return 0;
}
if (value > arr[end]) {
return end + 1;
}
if (value < arr[start]) {
return start;
}
if (start >= end) {
return 0;
}
var m = start + Math.floor((end - start) / 2);
if (value < arr[m]) {
return exports.binarySearch(arr, value, start, m - 1);
}
if (value > arr[m]) {
return exports.binarySearch(arr, value, m + 1, end);
}
return 0;
};
/**
* Quickly removes duplicates from a sorted array.
*
* @param {any[]} arr
* @returns {any[]}
*/
exports.removeDuplicates = function (arr) {
if (!arr.length)
return [];
var newarr = [arr[0]];
for (var i = 1; i < arr.length; i++) {
if (arr[i] !== arr[i - 1])
newarr.push(arr[i]);
}
return newarr;
};
/**
* Recursively freeze a javascript object to prevent it from being modified.
*
* @param {*} obj
* @returns
*/
exports.deepFreeze = function (obj) {
Object.getOwnPropertyNames(obj || {}).forEach(function (name) {
var prop = obj[name];
if (typeof prop === "object" && prop !== null) {
obj[name] = exports.deepFreeze(prop);
}
});
// Freeze self (no-op if already frozen)
return Object.freeze(obj);
};
var objectPathCache = {};
/**
* Take an object and a string describing a path like "value.length" or "val[length]" and safely get that value in the object.
*
* @param {string} pathQuery
* @param {*} object
* @param {boolean} [ignoreFirstPath]
* @returns {*}
*/
exports.objQuery = function (pathQuery, object, ignoreFirstPath) {
var val;
var safeGet = function (getPath, pathIdx, object) {
if (!getPath[pathIdx] || !object)
return object;
return safeGet(getPath, pathIdx + 1, object[getPath[pathIdx]]);
};
var cacheKey = pathQuery + (ignoreFirstPath ? "1" : "0");
// cached path arrays, skips subsequent identical path requests.
if (objectPathCache[cacheKey]) {
return safeGet(objectPathCache[cacheKey], 0, object);
}
var path = [];
// need to turn path into array of strings, ie value[hey][there].length => [value, hey, there, length];
path = pathQuery.indexOf("[") > -1 ?
// handle complex mix of dots and brackets like "users.value[meta][value].length"
[].concat.apply([], pathQuery.split(".").map(function (v) { return v.match(/([^\[]+)|\[([^\]]+)\]\[/gmi) || v; })).map(function (v) { return v.replace(/\[|\]/gmi, ""); }) :
// handle simple dot paths like "users.meta.value.length"
pathQuery.split(".");
// handle joins where each row is defined as table.column
if (ignoreFirstPath) {
var firstPath = path.shift() + "." + path.shift();
path.unshift(firstPath);
}
objectPathCache[cacheKey] = path;
return safeGet(objectPathCache[cacheKey], 0, object);
};
|
#!/bin/bash -f
xv_path="/opt/Xilinx/Vivado/2015.3"
ExecStep()
{
"$@"
RETVAL=$?
if [ $RETVAL -ne 0 ]
then
exit $RETVAL
fi
}
ExecStep $xv_path/bin/xelab -wto e3a711b46ac549c798dc1c692e5c281e -m64 --debug typical --relax --mt 8 --maxdelay -L xil_defaultlib -L simprims_ver -L secureip --snapshot fourBitCLASim_time_impl -transport_int_delays -pulse_r 0 -pulse_int_r 0 xil_defaultlib.fourBitCLASim xil_defaultlib.glbl -log elaborate.log
|
<gh_stars>1-10
name "btsync"
maintainer "<NAME>"
maintainer_email "<EMAIL>"
license "GPL 3.0"
description "Installs/Configures Bittorrent P2P Synchronization Service"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.1"
%w{ ubuntu debian }.each do |os|
supports os
end
attribute "btsync/bootstrap",
:display_name => "Bootstrap server for Btsync",
:description => "True if this machine is the seeder for the torrent",
:type => 'string',
:default => "false"
|
#!/usr/bin/env bats
load test_helper
@test "autoon: file with explicity entered env" {
rm -f ./.envirius
run nv autoon test_env1
assert_success
# file should be created
[ -e ./.envirius ]
# file should content environment name
assert_equal "test_env1" "`cat ./.envirius`"
rm ./.envirius
}
@test "autoon: show help if env not activated, file not created" {
rm -f ./.envirius
run nv autoon
assert_success
# file should not be created
[ ! -e ./.envirius ]
[ "${lines[0]}" = "`nv_bold Usage`: nv autoon [<env-name>]" ]
[ "${lines[1]}" = "`nv_bold Description`: Mark current directory for environment auto activating" ]
[ "${lines[2]}" = " If environment's name is not entered then used current" ]
[ "${lines[3]}" = " (active) environment. If environment is not activated" ]
[ "${lines[4]}" = " then environment's name is required." ]
rm -f ./.envirius
}
@test "autoon: file with activated environment name" {
rm -f ./.envirius
nv mk empty_env
nv on empty_env --same-shell
nv autoon
# file should be created
[ -e ./.envirius ]
# file should content environment name
assert_equal "empty_env" "`cat ./.envirius`"
rm ./.envirius
}
|
//dependencies
const express = require("express");
const path = require("path");
const fs = require("fs");
//create express server
const app = express();
//sets initial port for listeners
const PORT = process.env.PORT || 8000;
const database = require("./db/db.json");
const { dirname } = require("path");
// Sets up the Express app to handle data parsing
app.use(express.urlencoded({ extended: true }));
app.use(express.json());
app.use(express.static("public"));
// * GET `/notes` - Should return the `notes.html` file.
app.get("/notes", function(req, res){
res.sendFile(path.join(__dirname, "./public/notes.html"))
})
// //* GET `/api/notes` - Should read the `db.json` file and return all saved notes as JSON.
app.get("/api/notes", function(req, res) {
return res.json(database);
});
app.get("/api/notes/:id", function(req, res) {
let savedNotes = JSON.parse(fs.readFileSync(database, "utf8"));
res.json(savedNotes[Number(req.params.id)]);
});
//* GET `*` - Should return the `index.html` file
app.get("*", function(req,res){
res.sendFile(path.join(__dirname, "./public/index.html"))
})
// * POST `/api/notes` - Should receive a new note to save on the request body,
// add it to the `db.json` file, and then return the new note to the client.
app.post("/api/notes",function (req,res) {
let newNote = req.body;
let newNoteId = database.length
newNote.id = newNoteId
database.push(newNote)
fs.writeFile("./db/db.json", JSON.stringify(database), function (err) {
if (err) {
return console.log(err);
}
console.log("Note saved to db.json. Content: ", newNote);
});
res.json(newNote)
})
//* DELETE `/api/notes/:id` - Should receive a query parameter containing the id of a note to delete.
//This means you'll need to find a way to give each note a unique `id` when it's saved.
//In order to delete a note, you'll need to read all notes from the `db.json` file,
//remove the note with the given `id` property, and then rewrite the notes to the `db.json` file.
app.delete("/api/notes/:id", function (req, res) {
// request to delete note by id.
for (let i = 0; i < database.length; i++) {
if (database[i].id == req.params.id) {
// Splice takes i position, and then deletes the 1 note.
database.splice(i, 1);
break;
}
}
// Write the db.json file again.
fs.writeFileSync("./db/db.json", JSON.stringify(database), function (err) {
if (err) {
return console.log(err);
}
console.log("Your note was deleted!");
});
res.json(database);
});
// Starts the server to begin listening
// =============================================================
app.listen(PORT, function () {
console.log("App listening on PORT " + PORT);
});
|
# Runs prior to every test
setup() {
# Load our script file.
source ./src/scripts/install.sh
}
@test '1: test CPU detection' {
# Mock environment variables or functions by exporting them (after the script has been sourced)
# export PARAM_TO="World"
# Capture the output of our "Greet" function
result=$(get_cpu)
[ "$result" == "amd64" ]
}
@test '2: test Arch detection' {
result=$(get_arch)
[ "$result" == "linux" ]
}
|
# Stop and exit on error
set -e
VERSION="1.3.0"
cd ..
sed 's/$VERSION/'$VERSION'/g' tools/README.template.md > README.md
# Generate documentation
dub --build=docs
mkdir docs/$VERSION
mv docs/weather_forecast.html docs/$VERSION/index.html
git add docs/$VERSION/
# Create release
git commit -a -m "Release $VERSION"
git push
# Create and push tag
git tag v$VERSION -m "Release $VERSION"
git push --tags
|
#!/usr/bin/env bash
#
# Created by vcernomschi on 10/06/2015
#
path=$(cd $(dirname $0); pwd -P)
npm=`which npm`
eslint=`which eslint`
tslint=`which tslint`
if [ -z ${eslint} ]; then
${npm} -g install eslint
fi
if [ -z ${tslint} ]; then
${npm} -g install tslint
fi
if [ -f ${path}/../.git/hooks/pre-commit ]; then
cp ${path}/../.git/hooks/pre-commit ${path}/../.git/hooks/pre-commit_$(date +%F-%H%M%S).bak
fi
cp ${path}/pre-commit ${path}/../.git/hooks/.
|
/*
* Copyright (c) 2021 Target Brands, Inc. All rights reserved.
* Use of this source code is governed by the LICENSE file in this repository.
*/
context('Deployment', () => {
context('server returning deployment', () => {
beforeEach(() => {
cy.server();
cy.route(
'GET',
'*api/v1/secrets/native/repo/github/octocat/password*',
'fixture:secret_repo.json',
);
cy.route(
'POST',
'*api/v1/deployments/github/octocat',
'fixture:deployment.json',
);
cy.login('/github/octocat/add-deployment');
});
it('Add Parameter button should be disabled', () => {
cy.get('[data-test=add-parameter-button]')
.should('exist')
.should('not.be.enabled')
.contains('Add');
});
it('Add Parameter should work as intended', () => {
cy.get('[data-test=parameters-list]')
.should('exist')
.children()
.first()
.should('contain.text', 'No Parameters defined');
cy.get('[data-test=parameter-key-input]').should('exist').type('key1');
cy.get('[data-test=parameter-value-input]').should('exist').type('val1');
cy.get('[data-test=add-parameter-button]')
.should('exist')
.should('be.enabled')
.contains('Add')
.click();
it('toast should show', () => {
cy.get('[data-test=alerts]').should('exist').contains('Success');
});
cy.get('[data-test=parameters-list]')
.should('exist')
.children()
.first()
.children()
.first()
.should('contain.text', 'key1=val1');
cy.get('[data-test=parameter-key-input]')
.should('exist')
.should('have.value', '');
cy.get('[data-test=parameter-value-input]')
.should('exist')
.should('have.value', '');
});
});
});
|
<reponame>jloh02/valorant-chat-client
export const GAME_MODE: Map<string, string> = new Map([
["", "Custom"],
["ggteam", "Escalation"],
["onefa", "Replication"],
["Spikerush", "Spike Rush"],
]);
export const SCREEN_DEFAULTS = {
mainWidth: 1200,
mainHeight: 800,
minWidth: 750,
minHeight: 500,
};
export const LOCKFILE_POLLING_RATE = 5000; //in ms
export const NOTIFICATION_TIMEOUT = 5000; //in ms
|
<gh_stars>0
#include "GuildInfoManager.h"
#include <Core/Resource/Resource.h>
namespace Lunia {
namespace XRated {
namespace Database {
namespace Info {
void GuildInfoManager::Load(bool xml)
{
Resource::SerializerStreamReader reader;
if (xml == true) {
reader = Resource::ResourceSystemInstance().CreateSerializerXmlStreamReader(L"Database/Guild/GuildInfo.xml");
}
else {
reader = Resource::ResourceSystemInstance().CreateSerializerStructuredBinaryStreamReader(L"Database/GuildInfo.b");
}
reader->Read(L"General", general);
reader->Read(L"LevelInfos", levelInfos);
reader->Read(L"RankInfos", rankInfos);
}
void GuildInfoManager::Save(bool xml)
{
Resource::SerializerStreamWriter writer;
if (xml == true) {
writer = Resource::ResourceSystemInstance().CreateSerializerXmlStreamWriter(L"Database/Guild/GuildInfo.xml");
}
else {
writer = Resource::ResourceSystemInstance().CreateSerializerStructuredBinaryStreamWriter(L"Database/GuildInfo.b");
}
writer->Write(L"General", general);
writer->Write(L"LevelInfos", levelInfos);
writer->Write(L"RankInfos", rankInfos);
}
void GuildInfoManager::Init(bool xml)
{
Load(xml);
}
const GuildInfo::Level* GuildInfoManager::GetLevelInfo(uint8 level) const
{
if (level - 1 < 0)
{
LoggerInstance().Exception(L"Invalid Guild Level - Under 1");
}
if (levelInfos.size() > level - 1) {
return &(levelInfos.at(level - 1));
}
return NULL;
}
const GuildInfo::Rank* GuildInfoManager::GetRankInfo(uint32 rank) const
{
if (rankInfos.empty()) return NULL;
RankInfoList::const_iterator iter = rankInfos.find(rank);
if (iter != rankInfos.end())
{
return &iter->second;
}
return NULL;
}
const GuildInfo::General& GuildInfoManager::GetGeneral() const
{
return general;
}
uint32 GuildInfoManager::GetMaxGuildPoint(uint8 currentLevel) const
{
if (currentLevel - 1 < 0)
{
LoggerInstance().Exception(L"Invalid Guild Level - Under 1");
return 0;
}
return general.MaxGuildPoints.at(static_cast<int>(currentLevel) - 1);
}
uint32 GuildInfoManager::GetMaxGuildUserPerLevel(uint8 currentLevel) const
{
if (static_cast<int>(currentLevel) - 1 < 0)
{
LoggerInstance().Exception(L"Invalid Guild Level - Under 1");
return 0;
}
if (static_cast<uint32>(currentLevel) > general.MaxGuildUser.size())
{
LoggerInstance().Exception(L"Invalid Guild Level - {0}", currentLevel);
return 0;
}
return general.MaxGuildUser.at(static_cast<int>(currentLevel) - 1);
}
std::pair<bool, uint32> GuildInfoManager::GetNextLevelExp(uint8 currentLevel) const
{
if (currentLevel - 1 < 0)
{
LoggerInstance().Exception(L"Invalid Guild Level - Under 1");
return std::pair<bool, uint32>(false, 0);
}
std::pair<bool, uint32> returnValue(false, 0);
if (general.LevelUpExps.size() > currentLevel - 1) {
returnValue.first = true;
returnValue.second = general.LevelUpExps.at(currentLevel - 1);
return returnValue;
}
return returnValue;
}
DateTime GuildInfoManager::GetGuildUserExpAddTime(DateTime startTime, float userExp) const
{
if (general.MinuteForExp > userExp) {
uint32 needMinute = general.MinuteForExp - static_cast<uint32>(userExp);
startTime.Add(DateTime::Unit::Second, needMinute * 60);
return startTime;
}
return DateTime::Now();
}
float GuildInfoManager::GetShopDiscountRate(uint16 rank) const
{
if (rank < 1 || rank > general.DiscountRate.size())
return 1.0f;
return general.DiscountRate.at(rank - 1);
}
std::pair<float, uint32> GuildInfoManager::CalculdateCurrentUserExp(DateTime start, DateTime currentTime, float oldPlayTime) const
{
static uint32 secondForExp = general.MinuteForExp * 60;
float playTime = CalculatePlayTime(start, currentTime, oldPlayTime);
std::pair<float, uint32> result;
result.first = float((uint64)playTime % secondForExp);
result.second = (uint32)(playTime / secondForExp);
return result;
}
float GuildInfoManager::CalculatePlayTime(DateTime start, DateTime now, float oldPlayTime)
{
const static int secondOfDay = 60 * 60 * 24;
oldPlayTime += (float)now.GetTime().GetCumulatedSec() - (float)start.GetTime().GetCumulatedSec();
oldPlayTime += (float)(now.GetDate().GetCumulatedDay() - start.GetDate().GetCumulatedDay()) * secondOfDay;
return oldPlayTime;
}
void GuildInfoManager::SortSellItemList()
{
{
//Level
levelToSellItems.clear();
levelToSellItems.resize(levelInfos.size());
LevelToSellItemList::iterator levelToSellItemIter = levelToSellItems.begin();
LevelInfoList::const_iterator iter = levelInfos.begin();
LevelInfoList::const_iterator end = levelInfos.end();
while (iter != end) {
SellItemLists& items = *levelToSellItemIter;
ShopInfo::CategoryList::const_iterator categoryIter = (*iter).ShopCategorys.begin();
ShopInfo::CategoryList::const_iterator categoryEnd = (*iter).ShopCategorys.end();
while (categoryIter != categoryEnd) {
ShopInfo::Category::ItemList::const_iterator itemIter = (*categoryIter).Items.begin();
ShopInfo::Category::ItemList::const_iterator itemEnd = (*categoryIter).Items.end();
while (itemIter != itemEnd) {
items.push_back((*itemIter).ItemHash);
++itemIter;
}
++categoryIter;
}
++iter;
++levelToSellItemIter;
}
}
{
//Rank
rankToSellItems.clear();
RankInfoList::const_iterator rankIter = rankInfos.begin();
RankInfoList::const_iterator rankEnd = rankInfos.end();
while (rankIter != rankEnd)
{
SellItemLists items;
ShopInfo::CategoryList::const_iterator categoryIter = rankIter->second.ShopCategorys.begin();
ShopInfo::CategoryList::const_iterator categoryEnd = rankIter->second.ShopCategorys.end();
while (categoryIter != categoryEnd) {
ShopInfo::Category::ItemList::const_iterator itemIter = (*categoryIter).Items.begin();
ShopInfo::Category::ItemList::const_iterator itemEnd = (*categoryIter).Items.end();
while (itemIter != itemEnd) {
items.push_back((*itemIter).ItemHash);
++itemIter;
}
++categoryIter;
}
rankToSellItems[rankIter->first] = items;
++rankIter;
}
}
}
bool GuildInfoManager::IsShopItem(uint8 guildLevel, uint32 guildRank, uint32 hash) const
{
if (levelToSellItems.size() > guildLevel - 1) {
const SellItemLists& sellItems = (levelToSellItems.at(guildLevel - 1));
SellItemLists::const_iterator iter = std::find(sellItems.begin(), sellItems.end(), hash);
if (iter != sellItems.end()) {
return true;
}
}
if (rankToSellItems.empty()) return false;
RankToSellItemList::const_iterator iter = rankToSellItems.find(guildRank);
if (iter != rankToSellItems.end())
{
SellItemLists::const_iterator itemIter = std::find(iter->second.begin(), iter->second.end(), hash);
if (itemIter != iter->second.end()) {
return true;
}
}
return false;
}
}
}
}
}
|
<gh_stars>10-100
import { Injectable } from '@angular/core';
import { CrudService } from '../../../shared/services/crud.service';
import { Observable } from 'rxjs';
@Injectable({
providedIn: 'root'
})
export class OptionValuesService {
constructor(
private crudService: CrudService
) {
}
getListOfOptionValues(params): Observable<any> {
return this.crudService.get(`/v1/private/product/options/values`, params);
}
deleteOptionValue(id): Observable<any> {
return this.crudService.delete(`/v1/private/product/option/value/${id}`);
}
getOptionValueById(id): Observable<any> {
const params = {
lang: '_all'
};
return this.crudService.get(`/v1/private/product/option/value/${id}`, params);
}
createOptionValue(option): Observable<any> {
return this.crudService.post(`/v1/private/product/option/value`, option);
}
updateOptionValue(id, option): Observable<any> {
return this.crudService.put(`/v1/private/product/option/value/${id}`, option);
}
checkOptionValueCode(code): Observable<any> {
const params = {
'code': code,
};
return this.crudService.get(`/v1/private/product/option/value/unique`, params);
}
}
|
<reponame>fourier11/interview<filename>javapractice/sort/MergeSortFromWiki.java
package sort;
import java.util.Arrays;
/**
* 归并排序,更加简洁的版本,就是临时变量有点多
*/
public class MergeSortFromWiki {
private static void mergeSortRecursive(int[] arr, int[] result, int start, int end) {
if (start >= end) {
return;
}
int len = end - start;
int mid = start + len / 2;
int start1 = start;
int end1 = mid;
int start2 = mid + 1;
int end2 = end;
mergeSortRecursive(arr, result, start1, end1);
mergeSortRecursive(arr, result, start2, end2);
int k = start;
while (start1 <= end1 && start2 <= end2) {
result[k++] = arr[start1] < arr[start2] ? arr[start1++] : arr[start2++];
}
while (start1 <= end1) {
result[k++] = arr[start1++];
}
while (start2 <= end2) {
result[k++] = arr[start2++];
}
for (k = start; k <= end; k++) {
arr[k] = result[k];
}
}
public static void mergeSort(int[] arr) {
int len = arr.length;
// 这里result变量命名有点问题,实际结果仍然是原数组
int[] result = new int[len];
mergeSortRecursive(arr, result, 0, len - 1);
}
public static void main(String[] args) {
int[] arr = new int[] { 1, 4, 8, 2, 55, 3, 4, 8, 6, 4, 0, 11, 34, 90, 23, 54, 77, 9, 2, 9, 4, 10 };
mergeSort(arr);
System.out.println(Arrays.toString(arr));
}
}
|
TERMUX_PKG_HOMEPAGE=https://www.gnupg.org/related_software/libassuan/
TERMUX_PKG_DESCRIPTION="Library implementing the Assuan IPC protocol used between most newer GnuPG components"
TERMUX_PKG_LICENSE="GPL-2.0"
TERMUX_PKG_VERSION=2.5.4
TERMUX_PKG_SRCURL=https://www.gnupg.org/ftp/gcrypt/libassuan/libassuan-${TERMUX_PKG_VERSION}.tar.bz2
TERMUX_PKG_SHA256=c080ee96b3bd519edd696cfcebdecf19a3952189178db9887be713ccbcb5fbf0
TERMUX_PKG_DEPENDS="libgpg-error"
TERMUX_PKG_BREAKS="libassuan-dev"
TERMUX_PKG_REPLACES="libassuan-dev"
|
# other imports
import numpy as np
import os
from tqdm import tqdm
from sklearn.metrics import confusion_matrix
import h5py
# torch imports
import torch
import torch.nn.functional as F
import torch.utils.data
from s3dis_dataset import DatasetTrainVal as Dataset
import lightconvpoint.utils.metrics as metrics
from lightconvpoint.utils import get_network
# SACRED
from sacred import Experiment
from sacred import SETTINGS
from sacred.utils import apply_backspaces_and_linefeeds
from sacred.config import save_config_file
SETTINGS.CAPTURE_MODE = "sys" # for tqdm
ex = Experiment("S3DIS")
ex.captured_out_filter = apply_backspaces_and_linefeeds # for tqdm
ex.add_config("s3dis.yaml")
######
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
# wrap blue / green
def wblue(str):
return bcolors.OKBLUE+str+bcolors.ENDC
def wgreen(str):
return bcolors.OKGREEN+str+bcolors.ENDC
@ex.automain
def main(_run, _config):
print(_config)
savedir_root = _config['training']['savedir']
device = torch.device(_config['misc']['device'])
# save the config file
os.makedirs(savedir_root, exist_ok=True)
save_config_file(eval(str(_config)), os.path.join(
savedir_root, "config.yaml"))
# create the path to data
rootdir = os.path.join(_config['dataset']['datasetdir'], _config['dataset']['dataset'])
N_CLASSES = 13
# create the network
print("Creating the network...", end="", flush=True)
def network_function():
return get_network(
_config["network"]["model"],
in_channels=3,
out_channels=N_CLASSES,
backend_conv=_config["network"]["backend_conv"],
backend_search=_config["network"]["backend_search"],
config=_config
)
net = network_function()
net.to(device)
print("Done")
# create the filelits (train / val) according to area
print("Create filelist...", end="")
filelist_train = []
filelist_test = []
for area_idx in range(1 ,7):
folder = os.path.join(rootdir, f"Area_{area_idx}")
datasets = [os.path.join(f"Area_{area_idx}", dataset) for dataset in os.listdir(folder)]
if area_idx == _config['dataset']['area']:
filelist_test = filelist_test + datasets
else:
filelist_train = filelist_train + datasets
filelist_train.sort()
filelist_test.sort()
print(f"done, {len(filelist_train)} train files, {len(filelist_test)} test files")
print("Creating dataloader and optimizer...", end="", flush=True)
ds = Dataset(filelist_train, rootdir,
training=True, block_size=_config['dataset']['pillar_size'],
npoints=_config['dataset']['npoints'],
iteration_number=_config['training']['batchsize']*_config['training']['epoch_iter'],
jitter=_config['training']['jitter'],
scaling_param=_config['training']['scaling_param'],
rgb_dropout=_config['training']['rgb_dropout'],
rgb=_config['training']['rgb'], network_function=network_function)
train_loader = torch.utils.data.DataLoader(ds, batch_size=_config['training']['batchsize'], shuffle=True,
num_workers=_config['misc']['threads']
)
ds_val = Dataset(filelist_test, rootdir,
training=False, block_size=_config['dataset']['pillar_size'],
npoints=_config['dataset']['npoints'],
iteration_number=_config['training']['batchsize']*100,
rgb=_config['training']['rgb'],
network_function=network_function)
test_loader = torch.utils.data.DataLoader(ds_val, batch_size=_config['training']['batchsize'], shuffle=False,
num_workers=_config['misc']['threads']
)
print("Done")
print("Creating optimizer...", end="", flush=True)
optimizer = torch.optim.Adam(net.parameters(), lr=_config['training']['lr_start'])
print("done")
print("Weights")
if _config['training']['weights']: # computed on the train set
if _config['area']==1:
weights = torch.Tensor([0.7615, 0.3969, 0.4546, 0.2727, 6.7376, 4.1650, 1.6270, 3.2547,
2.3042, 2.1289, 17.7709, 1.1333, 6.7996])
elif _config['area']==2:
weights = torch.Tensor([ 0.7366, 0.4071, 0.4866, 0.2736, 4.0031, 3.3682, 1.6507, 2.5912,
2.0347, 3.0115, 17.2155, 1.1268, 5.9607])
elif _config['area']==3:
weights = torch.Tensor([0.7499, 0.3991, 0.4636, 0.2758, 4.4585, 3.7786, 1.6039, 2.9821,
2.2443, 2.1931, 20.1374, 1.2197, 6.2980])
elif _config['area']==4:
weights = torch.Tensor([0.7543, 0.3921, 0.4622, 0.2818, 3.8026, 3.8313, 1.7192, 3.0418,
2.1892, 2.1827, 19.7227, 1.2032, 5.5455])
elif _config['area']==5:
weights = torch.Tensor([0.7045, 0.4006, 0.4644, 0.2815, 3.1686, 3.6080, 1.4001, 3.6230,
2.3671, 1.8859, 15.7542, 1.6276, 6.0848])
elif _config['area']==6:
weights = torch.Tensor([0.7508, 0.3955, 0.4576, 0.2720, 5.9368, 4.1264, 1.6474, 3.0501,
2.5304, 2.2307, 18.0194, 1.1336, 6.5966])
else:
raise Exception('Unknown area')
else:
weights = torch.ones(N_CLASSES).float()
weights=weights.to(device)
print("Done")
# iterate over epochs
for epoch in range(0, _config['training']['epoch_nbr']):
#######
# training
net.train()
count=0
train_loss = 0
cm = np.zeros((N_CLASSES, N_CLASSES))
t = tqdm(train_loader, ncols=100, desc="Epoch {}".format(epoch), disable=_config['misc']['disable_tqdm'])
for data in t:
pts = data['pts'].to(device)
features = data['features'].to(device)
seg = data['target'].to(device)
net_ids = data["net_indices"]
net_pts = data["net_support"]
for i in range(len(net_ids)):
net_ids[i] = net_ids[i].to(device)
for i in range(len(net_pts)):
net_pts[i] = net_pts[i].to(device)
optimizer.zero_grad()
outputs = net(features, pts, indices=net_ids, support_points=net_pts)
loss = F.cross_entropy(outputs, seg, weight=weights)
loss.backward()
optimizer.step()
output_np = np.argmax(outputs.cpu().detach().numpy(), axis=1).copy()
target_np = seg.cpu().numpy().copy()
cm_ = confusion_matrix(target_np.ravel(), output_np.ravel(), labels=list(range(N_CLASSES)))
cm += cm_
oa = f"{metrics.stats_overall_accuracy(cm):.5f}"
aa = f"{metrics.stats_accuracy_per_class(cm)[0]:.5f}"
iou = f"{metrics.stats_iou_per_class(cm)[0]:.5f}"
train_loss += loss.detach().cpu().item()
t.set_postfix(OA=wblue(oa), AA=wblue(aa), IOU=wblue(iou), LOSS=wblue(f"{train_loss/cm.sum():.4e}"))
######
## validation
net.eval()
cm_test = np.zeros((N_CLASSES, N_CLASSES))
test_loss = 0
t = tqdm(test_loader, ncols=80, desc=" Test epoch {}".format(epoch), disable=_config['misc']['disable_tqdm'])
with torch.no_grad():
for data in t:
pts = data['pts'].to(device)
features = data['features'].to(device)
seg = data['target'].to(device)
net_ids = data["net_indices"]
net_pts = data["net_support"]
for i in range(len(net_ids)):
net_ids[i] = net_ids[i].to(device)
for i in range(len(net_pts)):
net_pts[i] = net_pts[i].to(device)
outputs = net(features, pts, indices=net_ids, support_points=net_pts)
loss = F.cross_entropy(outputs, seg)
output_np = np.argmax(outputs.cpu().detach().numpy(), axis=1).copy()
target_np = seg.cpu().numpy().copy()
cm_ = confusion_matrix(target_np.ravel(), output_np.ravel(), labels=list(range(N_CLASSES)))
cm_test += cm_
oa_val = f"{metrics.stats_overall_accuracy(cm_test):.5f}"
aa_val = f"{metrics.stats_accuracy_per_class(cm_test)[0]:.5f}"
iou_val = f"{metrics.stats_iou_per_class(cm_test)[0]:.5f}"
test_loss += loss.detach().cpu().item()
t.set_postfix(OA=wgreen(oa_val), AA=wgreen(aa_val), IOU=wgreen(iou_val), LOSS=wgreen(f"{test_loss/cm_test.sum():.4e}"))
# create the root folder
os.makedirs(savedir_root, exist_ok=True)
# save the checkpoint
torch.save({
'epoch': epoch + 1,
'state_dict': net.state_dict(),
'optimizer' : optimizer.state_dict(),
}, os.path.join(savedir_root, "checkpoint.pth"))
# write the logs
logs = open(os.path.join(savedir_root, "logs.txt"), "a+")
logs.write(f"{epoch} {oa} {aa} {iou} {oa_val} {aa_val} {iou_val}\n")
logs.close()
# log train values
_run.log_scalar("trainOA", oa, epoch)
_run.log_scalar("trainAA", aa, epoch)
_run.log_scalar("trainIoU", iou, epoch)
_run.log_scalar("testOA", oa_val, epoch)
_run.log_scalar("testAA", aa_val, epoch)
_run.log_scalar("testAIoU", iou_val, epoch)
|
import axios from 'axios';
const API_URL = 'http://localhost:8000/products';
const fetchData = async() => {
const response = await axios.get(API_URL);
return response.data;
};
const renderTable = (data) => {
const table = document.createElement('table');
const headerRow = document.createElement('tr');
const columnNames = Object.keys(data[0]);
columnNames.forEach(name => {
const headerCell = document.createElement('th');
headerCell.textContent = name;
headerRow.appendChild(headerCell);
});
table.appendChild(headerRow);
data.forEach(rowData => {
const dataRow = document.createElement('tr');
columnNames.forEach(name => {
const dataCell = document.createElement('td');
dataCell.textContent = rowData[name];
dataRow.appendChild(dataCell);
});
table.appendChild(dataRow);
});
return table;
};
const main = async () => {
const data = await fetchData();
const table = renderTable(data);
document.body.appendChild(table);
};
main();
|
class RecordingDevice:
def __init__(self, can_record):
self._can_record = can_record
self._is_recording = False
self._recorded_audio = []
def record_audio(self, duration):
if self._can_record:
print(f"Recording audio for {duration} seconds")
self._is_recording = True
# Simulate recording audio
# Assume audio is recorded and stored in some format
# For example, appending to a list for simplicity
self._recorded_audio.append(f"Audio recorded for {duration} seconds")
else:
print("Device cannot record audio")
def stop_recording(self):
if self._is_recording:
print("Recording stopped")
self._is_recording = False
else:
print("No ongoing recording")
def playback_audio(self):
if self._recorded_audio:
print("Playing back recorded audio:")
for audio in self._recorded_audio:
print(audio)
else:
print("No recorded audio to playback")
@property
def can_record(self):
return self._can_record
|
<filename>modules/caas/api/src/main/java/io/cattle/platform/api/instance/ContainerLogsActionHandler.java
package io.cattle.platform.api.instance;
import com.netflix.config.DynamicStringProperty;
import io.cattle.platform.archaius.util.ArchaiusUtil;
import io.cattle.platform.core.constants.InstanceConstants;
import io.cattle.platform.core.model.Host;
import io.cattle.platform.core.model.Instance;
import io.cattle.platform.docker.api.model.ContainerLogs;
import io.cattle.platform.docker.api.model.HostAccess;
import io.cattle.platform.docker.util.DockerUtils;
import io.cattle.platform.hostapi.HostApiAccess;
import io.cattle.platform.hostapi.HostApiService;
import io.cattle.platform.object.ObjectManager;
import io.cattle.platform.util.type.CollectionUtils;
import io.github.ibuildthecloud.gdapi.request.ApiRequest;
import io.github.ibuildthecloud.gdapi.request.resource.ActionHandler;
import java.util.Map;
public class ContainerLogsActionHandler implements ActionHandler {
private static final DynamicStringProperty HOST_LOGS_PATH = ArchaiusUtil.getString("host.logs.path");
HostApiService apiService;
ObjectManager objectManager;
public ContainerLogsActionHandler(HostApiService apiService, ObjectManager objectManager) {
super();
this.apiService = apiService;
this.objectManager = objectManager;
}
@Override
public Object perform(Object obj, ApiRequest request) {
Host host = null;
Instance container = null;
if (obj instanceof Instance) {
container = (Instance) obj;
host = DockerUtils.getHostFromContainer(objectManager, container, null);
}
if (host == null) {
return null;
}
ContainerLogs logs = request.proxyRequestObject(ContainerLogs.class);
String dockerId = DockerUtils.getDockerIdentifier(container);
Map<String, Object> data = CollectionUtils.asMap(InstanceConstants.DOCKER_CONTAINER, dockerId, "Lines", logs.getLines(), "Follow",
logs.getFollow());
HostApiAccess apiAccess = apiService.getAccess(request, host.getId(), CollectionUtils.asMap("logs", data), HOST_LOGS_PATH.get());
if (apiAccess == null) {
return null;
}
HostAccess access = new HostAccess(apiAccess.getUrl(), apiAccess.getAuthenticationToken());
return access;
}
}
|
import random
rand_num = random.randint(10000, 99999)
print(rand_num)
|
/**
* There are 4 different ways to call a function
* 1) fn()
* 2) fn.call()
* 3) fn.apply()
* 4) new fn()
*/
function add(a, b) {
return a + b;
}
var x = add(1,2); //-> 3
var y = add.call(null, 1, 2); //-> 3
var z = add.apply(null, [1,2]); //-> 3
var w = new add(1,2); //-> new object (discussed later)
|
<reponame>htruong/M5Paper_FactoryTest
#include <WiFi.h>
#include <ArduinoJson.h>
#include <HTTPClient.h>
#include <MD5Builder.h>
#include "frame_feedcontent.h"
#include "frame_urlreader.h"
#include "../utils/urlencoder.h"
#define MAX_BTN_NUM 12
void key_feedcontent_feed_cb(epdgui_args_vector_t &args)
{
Frame_Base *frame = new Frame_urlReader(((EPDGUI_Button*)(args[0]))->GetCustomString());
EPDGUI_PushFrame(frame);
*((int*)(args[1])) = 0;
log_d("%s", ((EPDGUI_Button*)(args[0]))->GetCustomString().c_str());
}
void key_feedcontent_exit_cb(epdgui_args_vector_t &args)
{
EPDGUI_PopFrame(true);
*((int*)(args[0])) = 0;
}
Frame_FeedContent::Frame_FeedContent(String url)
{
_frame_name = "Frame_FeedContent";
_url = url;
uint8_t language = GetLanguage();
_canvas_title->setTextDatum(CR_DATUM);
if (language == LANGUAGE_JA)
{
exitbtn("ホーム");
}
else if (language == LANGUAGE_ZH)
{
exitbtn("主页");
}
else
{
exitbtn("Back");
}
_canvas_title->drawString("Items Index", 540 - 15, 34);
_key_exit->AddArgs(EPDGUI_Button::EVENT_RELEASED, 0, (void *)(&_is_run));
_key_exit->Bind(EPDGUI_Button::EVENT_RELEASED, &key_feedcontent_exit_cb);
}
bool Frame_FeedContent::downloadFeed() {
if (WiFi.status() != WL_CONNECTED) {
log_e("URL %s is not cached and you're not connected to the internet, gave up.", _url.c_str());
return false;
}
HTTPClient http;
http.begin("http://article-proxy.tnhh.net/feed/?url=" + URLEncoder::urlencode(_url));
int httpCode = http.GET();
if (httpCode == HTTP_CODE_OK) {
DynamicJsonDocument doc(4096);
StaticJsonDocument<200> filter;
filter["items"][0]["title"] = true;
filter["items"][0]["url"] = true;
DeserializationError error = deserializeJson(doc, http.getStream(), DeserializationOption::Filter(filter));
if (error != DeserializationError::Ok) {
log_e("URL %s cache error: %s.", _url.c_str(), error.c_str());
return false;
}
log_d("URL %s cached successfully.", _url.c_str());
/*
char buf [1000];
serializeJson(doc["items"], buf, 10000);
log_d("JSON: %s", buf);
*/
JsonArray RSSItems = doc["items"].as<JsonArray>();
int item_count = 0;
log_d("Begin %llu, end %llu", RSSItems.begin(), RSSItems.end());
for (JsonArray::iterator it=RSSItems.begin(); it!=RSSItems.end(); ++it) {
log_d("Item %llu: %s at %s", it, (*it)["title"].as<char*>(), (*it)["url"].as<char*>());
struct rssItem_t item;
item.name = String((*it)["title"].as<char*>());
item.url = String((*it)["url"].as<char*>());
_feed.push_back(item);
}
} else {
log_e("URL %s caching failed.", _url.c_str());
return false;
}
return true;
}
void Frame_FeedContent::listFeeds()
{
if (! downloadFeed()) {
log_e ("Cannot download feed %s.", _url);
return;
}
for(int n = 0; n < _feed.size(); n++)
{
if(_key_feed.size() > MAX_BTN_NUM)
{
break;
}
EPDGUI_Button *btn = new EPDGUI_Button(4, 100 + _key_feed.size() * 60, 532, 61);
_key_feed.push_back(btn);
btn->CanvasNormal()->fillCanvas(0);
btn->CanvasNormal()->drawRect(0, 0, 532, 61, 15);
btn->CanvasNormal()->setTextSize(26);
btn->CanvasNormal()->setTextDatum(CL_DATUM);
btn->CanvasNormal()->setTextColor(15);
btn->CanvasNormal()->drawString(_feed[n].name, 47 + 13, 35);
btn->SetCustomString(_feed[n].url);
btn->CanvasNormal()->setTextDatum(CR_DATUM);
btn->CanvasNormal()->pushImage(15, 14, 32, 32, ImageResource_item_icon_file_floder_32x32);
btn->CanvasNormal()->pushImage(532 - 15 - 32, 14, 32, 32, ImageResource_item_icon_arrow_r_32x32);
*(btn->CanvasPressed()) = *(btn->CanvasNormal());
btn->CanvasPressed()->ReverseColor();
btn->AddArgs(EPDGUI_Button::EVENT_RELEASED, 0, btn);
btn->AddArgs(EPDGUI_Button::EVENT_RELEASED, 1, (void*)(&_is_run));
btn->Bind(EPDGUI_Button::EVENT_RELEASED, key_feedcontent_feed_cb);
}
}
Frame_FeedContent::~Frame_FeedContent(void)
{
for(int i = 0; i < _key_feed.size(); i++)
{
delete _key_feed[i];
}
}
int Frame_FeedContent::init(epdgui_args_vector_t &args)
{
_is_run = 1;
if(_key_feed.size() == 0)
{
listFeeds();
}
M5.EPD.WriteFullGram4bpp(GetWallpaper());
_canvas_title->pushCanvas(0, 8, UPDATE_MODE_NONE);
EPDGUI_AddObject(_key_exit);
for(int i = 0; i < _key_feed.size(); i++)
{
EPDGUI_AddObject(_key_feed[i]);
}
return 3;
}
|
#!/bin/bash
###################################################################################################
### Configuration
###################################################################################################
VBB_LIST=( 0.0 3.0 6.0 ) # in V
I_THR_LIST=( 51 100 ) # in DAC
V_CASN_LIST=( 50 105 135 ) # in DAC
V_CLIP_LIST=( 0 60 100 ) # in DAC
V_RESETD_LIST=( 147 170 170 ) # in DAC
# how many V_CASN values to process per back-bias voltage
V_CASN_PER_VBB=$(echo "${#V_CASN_LIST[@]} / ${#VBB_LIST[@]}" | bc)
MODE_LIST=( 0 1 ) # chip readout mode (0=triggered, 1=continuous)
STROBEB_LIST=( 4 20 0 ) # continuous integration: 0,
# different from 0: multiple of 25ns clock cycles
TRG_FREQ_LIST=( 20000 50000 100000 200000 500000 1000000 ) # in Hz
TRG_TRAIN_LENGTH=( 100 ) # number of consecutive triggers
HAMEG=/dev/ttyHAMEG0
CURR=( 0.8 0.0 0.02 ) # 5V, unconnected, Vbb
###################################################################################################
###################################################################################################
# setup environment / load functions
ROOT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) # determine where this script is located
source ${ROOT_DIR}/common/functions.sh
###################################################################################################
### determine 'run conditions'
eval $(determine_run_conditions)
###################################################################################################
### execution
if [ "$#" -lt 1 ]
then
# ask for chip ID
echo "Please enter the name of the chip followed by [ENTER]:"
read CHIPNAME
else
CHIPNAME=$1
fi
# is ROOT available?
check_root
# create output folder
FULLPATH=${ROOT_DIR}/../Data/rate_dependence_${DATE}_${GIT_INFO}${SUFFIX}
create_output_folder $FULLPATH
LOGFILE=${FULLPATH}/log.txt
# store the chip name
touch ${FULLPATH}/${CHIPNAME}
# store git diff
store_git_diff $FULLPATH >> $LOGFILE 2>&1
DATAPATH=${FULLPATH}/data
mkdir $DATAPATH
###################################################################################################
### initialise all power supplies
###################################################################################################
${ROOT_DIR}/common/pulser.py -1.0 # deactivate pulser which could lead to an input signal above VDDA and VDDD
${ROOT_DIR}/common/hameg2030.py ${HAMEG} 0 ${CURR[@]}
###################################################################################################
#### start the measurment itself
###################################################################################################
i_VBB=0
for VBB in "${VBB_LIST[@]}"
do
# set the back-bias voltage
echo "setting back-bias voltage to ${VBB}V" | tee -a $LOGFILE
${ROOT_DIR}/common/hameg2030.py ${HAMEG} 1 3 ${VBB}
VBB_OLD=${VBB}
sleep 1
${ROOT_DIR}/common/hameg2030.py ${HAMEG} 4
if [ $? -eq 1 ]
then
echo "back-bias current too high, stopping measurement"
${ROOT_DIR}/common/hameg2030.py ${HAMEG} 4
exit 1
fi
V_CLIP=${V_CLIP_LIST[${i_VBB}]}
V_RESETD=${V_RESETD_LIST[${i_VBB}]}
for I_THR in "${I_THR_LIST[@]}"
do
echo "I_THR="${I_THR}"DAC" | tee -a $LOGFILE
for i_V_CASN in $(seq 0 $((${V_CASN_PER_VBB} - 1)))
#for V_CASN in "${V_CASN_LIST[@]}"
do
V_CASN=${V_CASN_LIST[$(( i_VBB*${V_CASN_PER_VBB} + i_V_CASN ))]}
V_CASN2=$(( ${V_CASN} + 12 ))
echo "V_CASN="${V_CASN}"DAC" | tee -a $LOGFILE
for MODE in "${MODE_LIST[@]}"
do
echo "READOUT MODE=$MODE" | tee -a $LOGFILE
for TRG_FREQ in "${TRG_FREQ_LIST[@]}"
do
echo "TRG_FREQ="${TRG_FREQ}"Hz" | tee -a $LOGFILE
${ROOT_DIR}/common/pulser.py 1 ${TRG_FREQ} ${TRG_TRAIN_LENGTH} | tee -a $LOGFILE
for STROBEB in "${STROBEB_LIST[@]}"
do
if [ "${STROBEB}" -eq 0 ]
then
STROBEB=$(bc <<< "40000000/${TRG_FREQ}-10" )
echo "Continuous integration mode, STROBEB="$(bc <<< "${STROBEB}*25")"ns" | tee -a $LOGFILE
else
echo "Fixed length STROBEB="$(bc <<< "${STROBEB}*25")"ns" | tee -a $LOGFILE
fi
###########################################################################
### write config file
###########################################################################
cd ${ROOT_DIR}
cat <<EOF > Config.cfg
# First line has to be DEVICE (Values: CHIP, TELESCOPE, MODULE, STAVE, CHIPMOSAIC)
DEVICE CHIP
# as of firmware version 247e0611 the DAQboard version (v2 or v3) must be defined; 0 -> v2; 1 -> v3;
BOARDVERSION 1
ITHR ${I_THR}
VCASN2 ${V_CASN2}
VCASN ${V_CASN}
VCLIP ${V_CLIP}
VRESETD ${V_RESETD}
IDB 29
VCASP 86
IBIAS 64
STROBEDURATION ${STROBEB}
READOUTMODE ${MODE}
EOF
mv Config.cfg ../
CURR_DATAPATH=$(printf "$DATAPATH/VBB-%0.1f/ITHR%0.3d/VCASN%0.3d/MODE%c/TRG_FREQ%0.6d/STROBEB%0.6d" \
${VBB} ${V_RST} ${I_THR} ${V_CASN} ${MODE} ${TRG_FREQ} ${STROBEB})
SUBFOLDER=${CURR_DATAPATH}/FHR
mkdir -p ${SUBFOLDER}
cp ../Config.cfg ${SUBFOLDER}
###########################################################################
### start the acquisition
###########################################################################
cd ${ROOT_DIR}/../
./test_noiseocc_ext | tee -a $LOGFILE
# moving the data
mv -v Data/$(ls -1tr Data | grep NoiseOccupancyExt | tail -n 1) ${SUBFOLDER}
mv -v Data/$(ls -1tr Data | grep ScanConfig | tail -n 1) ${SUBFOLDER}
done
done
done
done
done
let i_VBB+=1
done
###################################################################################################
### power down everything
###################################################################################################
${ROOT_DIR}/common/pulser.py -1.0 # deactivate pulser which could lead to an input signal above VDDA and VDDD
${ROOT_DIR}/common/hameg2030.py ${HAMEG} 6 # turn off the PSU
git checkout ${ROOT_DIR}/../Config.cfg
|
from torch.optim import Optimizer
import math
import torch
import time
class AdamW(Optimizer):
"""Implements AdamW algorithm.
It has been proposed in `Fixing Weight Decay Regularization in Adam`_.
Arguments:
params (iterable): iterable of parameters to optimize or dicts defining
parameter groups
lr (float, optional): learning rate (default: 1e-3)
betas (Tuple[float, float], optional): coefficients used for computing
running averages of gradient and its square (default: (0.9, 0.999))
eps (float, optional): term added to the denominator to improve
numerical stability (default: 1e-8)
weight_decay (float, optional): weight decay (L2 penalty) (default: 0)
.. Fixing Weight Decay Regularization in Adam:
https://arxiv.org/abs/1711.05101
"""
def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8,
weight_decay=0, amsgrad=False):
if not 0.0 <= lr:
raise ValueError("Invalid learning rate: {}".format(lr))
if not 0.0 <= eps:
raise ValueError("Invalid epsilon value: {}".format(eps))
if not 0.0 <= betas[0] < 1.0:
raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0]))
if not 0.0 <= betas[1] < 1.0:
raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1]))
if not 0.0 <= weight_decay:
raise ValueError("Invalid weight_decay value: {}".format(weight_decay))
defaults = dict(lr=lr, betas=betas, eps=eps,
weight_decay=weight_decay, amsgrad=amsgrad)
super(AdamW, self).__init__(params, defaults)
def __setstate__(self, state):
super(AdamW, self).__setstate__(state)
for group in self.param_groups:
group.setdefault('amsgrad', False)
def step(self, closure=None):
"""Performs a single optimization step.
Arguments:
closure (callable, optional): A closure that reevaluates the model
and returns the loss.
"""
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
beta1, beta2 = group['betas']
lr, eps, amsgrad = group['lr'], group['eps'], group['amsgrad']
for p in group['params']:
if p.grad is None:
continue
if group['weight_decay'] != 0:
p.data.mul_(1. - lr * group['weight_decay'])
grad = p.grad.data
if grad.is_sparse:
raise RuntimeError('AdamW does not support sparse gradients, please consider SparseAdam instead')
state = self.state[p]
# State initialization
if 'step' not in state:
state['step'] = 0
# Exponential moving average of gradient values
state['exp_avg'] = torch.zeros_like(p.data)
# Exponential moving average of learning rates
state['exp_avg_lr_1'] = 0.
state['exp_avg_lr_2'] = 0.
# Exponential moving average of squared gradient values
state['exp_avg_sq'] = torch.zeros_like(p.data)
if amsgrad:
# Maintains max of all exp. moving avg. of sq. grad. values
state['max_exp_avg_sq'] = torch.zeros_like(p.data)
state['PopArt_rescale'] = 1.
exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']
if amsgrad:
if not 'max_exp_avg_sq' in state:
state['max_exp_avg_sq'] = torch.zeros_like(p.data)
max_exp_avg_sq = state['max_exp_avg_sq']
state['step'] += 1
# We incorporate the term group['lr'] into the momentum, and define the bias_correction1 such that it respects the possibly moving group['lr']
state['exp_avg_lr_1'] = state['exp_avg_lr_1'] * beta1 + (1. - beta1) * lr
state['exp_avg_lr_2'] = state['exp_avg_lr_2'] * beta2 + (1. - beta2)
#bias_correction1 = state['exp_avg_lr_1'] / group['lr'] if group['lr']!=0. else 1. #1. - beta1 ** state['step']
bias_correction2 = state['exp_avg_lr_2']
# For convenience, we directly use "sqrt_bias_correction2" and "step_size" as the following
sqrt_bias_correction2 = math.sqrt(bias_correction2)
# when state['exp_avg_lr_1'] is zero, exp_avg should also be zero and it is trivial
one_over_bias_correction1 = lr / state['exp_avg_lr_1'] if state['exp_avg_lr_1']!=0. else 0.
step_size = one_over_bias_correction1 * sqrt_bias_correction2 # instead of correcting "denom" by dividing it, we put the correction factor into "step_size" and "eps"
# Decay the first and second moment running average coefficient
rescaling = state['PopArt_rescale']
exp_avg.mul_(beta1*rescaling).add_(grad, alpha=(1. - beta1) * lr)
exp_avg_sq.mul_(beta2*rescaling**2).addcmul_(grad, grad, value=1. - beta2)
if amsgrad:
# Maintains the maximum of all 2nd moment running avg. till now
torch.max(max_exp_avg_sq, exp_avg_sq, out=max_exp_avg_sq)
# Use the max. for normalizing running avg. of gradient
denom = max_exp_avg_sq.sqrt().add_(eps * sqrt_bias_correction2)
# 'eps' is first multiplied by sqrt_bias_correction2 and then divided by sqrt_bias_correction2
else:
denom = exp_avg_sq.sqrt().add_(eps * sqrt_bias_correction2)
p.data.addcdiv_(exp_avg, denom, value=-step_size)
return loss
class AdamBelief(Optimizer):
"""Implements AdamW algorithm.
It has been proposed in `Fixing Weight Decay Regularization in Adam`_.
Arguments:
params (iterable): iterable of parameters to optimize or dicts defining
parameter groups
lr (float, optional): learning rate (default: 1e-3)
betas (Tuple[float, float], optional): coefficients used for computing
running averages of gradient and its square (default: (0.9, 0.999))
eps (float, optional): term added to the denominator to improve
numerical stability (default: 1e-8)
weight_decay (float, optional): weight decay (L2 penalty) (default: 0)
.. Fixing Weight Decay Regularization in Adam:
https://arxiv.org/abs/1711.05101
"""
def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8,
weight_decay=0, amsgrad=False):
if not 0.0 <= lr:
raise ValueError("Invalid learning rate: {}".format(lr))
if not 0.0 <= eps:
raise ValueError("Invalid epsilon value: {}".format(eps))
if not 0.0 <= betas[0] < 1.0:
raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0]))
if not 0.0 <= betas[1] < 1.0:
raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1]))
if not 0.0 <= weight_decay:
raise ValueError("Invalid weight_decay value: {}".format(weight_decay))
defaults = dict(lr=lr, betas=betas, eps=eps,
weight_decay=weight_decay, amsgrad=amsgrad)
super(AdamBelief, self).__init__(params, defaults)
def __setstate__(self, state):
super(AdamW, self).__setstate__(state)
for group in self.param_groups:
group.setdefault('amsgrad', False)
def step(self, closure=None):
"""Performs a single optimization step.
Arguments:
closure (callable, optional): A closure that reevaluates the model
and returns the loss.
"""
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
beta1, beta2 = group['betas']
lr, eps, amsgrad = group['lr'], group['eps'], group['amsgrad']
for p in group['params']:
if p.grad is None:
continue
if group['weight_decay'] != 0:
p.data.mul_(1. - lr * group['weight_decay'])
grad = p.grad.data
if grad.is_sparse:
raise RuntimeError('AdamW does not support sparse gradients, please consider SparseAdam instead')
state = self.state[p]
# State initialization
if 'step' not in state:
state['step'] = 0
# Exponential moving average of gradient values
state['exp_avg'] = torch.zeros_like(p.data)
# Exponential moving average of learning rates
state['exp_avg_lr_1'] = 0.
state['exp_avg_lr_2'] = 0.
# Exponential moving average of squared gradient values
state['exp_avg_var'] = torch.zeros_like(p.data)
if amsgrad:
# Maintains max of all exp. moving avg. of sq. grad. values
state['max_exp_avg_var'] = torch.zeros_like(p.data)
exp_avg, exp_avg_var = state['exp_avg'], state['exp_avg_var']
if amsgrad:
if not 'max_exp_avg_var' in state:
state['max_exp_avg_var'] = torch.zeros_like(p.data)
max_exp_avg_var = state['max_exp_avg_var']
state['step'] += 1
# We define the bias_correction1 such that it respects the possibly moving "group['lr']"
state['exp_avg_lr_1'] = state['exp_avg_lr_1'] * beta1 + (1. - beta1) * lr
state['exp_avg_lr_2'] = state['exp_avg_lr_2'] * beta2 + (1. - beta2)
#bias_correction1 = state['exp_avg_lr_1'] / group['lr'] if group['lr']!=0. else 1. #1. - beta1 ** state['step']
bias_correction2 = state['exp_avg_lr_2']
# For convenience, we directly use "sqrt_bias_correction2" and "step_size" as the following
sqrt_bias_correction2 = math.sqrt(bias_correction2)
# when state['exp_avg_lr_1'] is zero, exp_avg should also be zero and it is trivial
one_over_bias_correction1 = lr / state['exp_avg_lr_1'] if state['exp_avg_lr_1']!=0. else 0.
step_size = one_over_bias_correction1 * sqrt_bias_correction2 # instead of correcting "denom" by dividing it, we put the correction factor into "step_size" and "eps"
# Decay the first and second moment running average coefficient
diff = grad - exp_avg
exp_avg.mul_(beta1).add_(grad, alpha=(1. - beta1))
exp_avg_var.mul_(beta2).addcmul_(diff, diff, value=1. - beta2)
if amsgrad:
# Maintains the maximum of all 2nd moment running avg. till now
torch.max(max_exp_avg_var, exp_avg_var, out=max_exp_avg_var)
# Use the max. for normalizing running avg. of gradient
denom = max_exp_avg_var.sqrt().add_(eps * sqrt_bias_correction2)
# 'eps' is first multiplied by sqrt_bias_correction2 and then divided by sqrt_bias_correction2
else:
denom = exp_avg_var.sqrt().add_(eps * sqrt_bias_correction2)
p.data.addcdiv_(exp_avg, denom, value= - step_size * lr)
return loss
class LaProp(Optimizer):
def __init__(self, params, lr=4e-4, betas=(0.9, 0.999), eps=1e-15,
weight_decay=0., amsgrad=False, centered=False):
self.centered = centered
if not 0.0 <= lr:
raise ValueError("Invalid learning rate: {}".format(lr))
if not 0.0 <= eps:
raise ValueError("Invalid epsilon value: {}".format(eps))
if not 0.0 <= betas[0] < 1.0:
raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0]))
if not 0.0 <= betas[1] < 1.0:
raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1]))
if not 0.0 <= weight_decay:
raise ValueError("Invalid weight_decay value: {}".format(weight_decay))
defaults = dict(lr=lr, betas=betas, eps=eps,
weight_decay=weight_decay, amsgrad=amsgrad)
super(LaProp, self).__init__(params, defaults)
def __setstate__(self, state):
super(LaProp, self).__setstate__(state)
for group in self.param_groups:
group.setdefault('amsgrad', False)
def step(self, closure=None):
"""Performs a single optimization step.
Arguments:
closure (callable, optional): A closure that reevaluates the model
and returns the loss.
"""
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
if group['weight_decay'] != 0:
p.data.mul_(1. - group['lr'] * group['weight_decay'])
grad = p.grad.data
if grad.is_sparse:
raise RuntimeError('LaProp does not support sparse gradients, please consider SparseAdam instead')
amsgrad = group['amsgrad']
state = self.state[p]
# State initialization
if 'step' not in state:
state['step'] = 0
# Exponential moving average of gradient values
state['exp_avg'] = torch.zeros_like(p.data)
# Exponential moving average of learning rates
state['exp_avg_lr_1'] = 0.
state['exp_avg_lr_2'] = 0.
# Exponential moving average of squared gradient values
state['exp_avg_sq'] = torch.zeros_like(p.data)
state['exp_mean_avg_sq'] = torch.zeros_like(p.data)
if amsgrad:
# Maintains max of all exp. moving avg. of sq. grad. values
state['max_exp_avg_sq'] = torch.zeros_like(p.data)
state['PopArt_rescale'] = 1.
state['Momentum_rescale'] = 1.
exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']
if self.centered:
exp_mean_avg_sq = state['exp_mean_avg_sq']
if amsgrad:
max_exp_avg_sq = state['max_exp_avg_sq']
beta1, beta2 = group['betas']
state['step'] += 1
state['exp_avg_lr_1'] = state['exp_avg_lr_1'] * beta1 + (1. - beta1) * group['lr']
state['exp_avg_lr_2'] = state['exp_avg_lr_2'] * beta2 + (1. - beta2)
#bias_correction1 = state['exp_avg_lr_1'] / group['lr'] if group['lr']!=0. else 1. #1 - beta1 ** state['step']
bias_correction2 = state['exp_avg_lr_2']
# For convenience, we directly use "sqrt_bias_correction2" and "step_size" as the following
sqrt_bias_correction2 = math.sqrt(bias_correction2)
# when state['exp_avg_lr_1'] is zero, exp_avg should also be zero and it is trivial
one_over_bias_correction1 = group['lr'] / state['exp_avg_lr_1'] if state['exp_avg_lr_1']!=0. else 0.
step_size = one_over_bias_correction1
# Decay the first and second moment running average coefficient
rescaling = state['PopArt_rescale']
exp_avg_sq.mul_(beta2*rescaling**2).addcmul_(grad, grad, value=1. - beta2)
denom = exp_avg_sq
if self.centered:
exp_mean_avg_sq.mul_(beta2*rescaling).add_(grad, alpha=1. - beta2)
if state['step']>5:
denom = denom.addcmul(exp_mean_avg_sq, exp_mean_avg_sq, value=-1.)
if amsgrad:
if not (self.centered and state['step']<=5):
# Maintains the maximum of all (centered) 2nd moment running avg. till now
torch.max(max_exp_avg_sq, denom, out=max_exp_avg_sq)
# Use the max. for normalizing running avg. of gradient
denom = max_exp_avg_sq
denom = denom.sqrt().add_(group['eps'] * sqrt_bias_correction2) # instead of correcting "denom" by dividing it, we put the correction factor into "exp_avg" and "eps"
momentum_rescaling = state['Momentum_rescale']
exp_avg.mul_(beta1*momentum_rescaling).addcdiv_(grad, denom, value=(1. - beta1) * group['lr'] * sqrt_bias_correction2)
p.data.add_(exp_avg, alpha = -step_size)
return loss
|
<filename>C2CRIBuildDir/projects/C2C-RI/src/RIGUI/src/org/fhwa/c2cri/gui/wizard/testconfig/edit/page/SelectRequirementsPage.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.gui.wizard.testconfig.edit.page;
import com.github.cjwizard.WizardPage;
import com.github.cjwizard.WizardSettings;
import java.awt.KeyboardFocusManager;
import java.awt.event.ActionEvent;
import java.util.ArrayList;
import java.util.List;
import javax.swing.JTable;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.TableColumnModel;
import javax.swing.table.TableRowSorter;
import org.fhwa.c2cri.domain.testmodel.TestConfigurationController;
import org.fhwa.c2cri.gui.SelectionFlagEditor;
import org.fhwa.c2cri.gui.SelectionFlagListener;
import org.fhwa.c2cri.gui.SelectionFlagRenderer;
import org.fhwa.c2cri.gui.TextAreaRenderer;
import org.fhwa.c2cri.gui.wizard.C2CRIWizardPage;
import org.fhwa.c2cri.testmodel.DefaultLayerParameters;
import org.fhwa.c2cri.testmodel.ProjectRequirementsInterface;
import org.fhwa.c2cri.testmodel.Requirement;
import org.fhwa.c2cri.testmodel.UserNeedsInterface;
/**
*
* @author TransCore ITS, LLC
*/
public class SelectRequirementsPage extends C2CRIWizardPage implements java.awt.event.ActionListener {
/**
* The sorter2.
*/
private TableRowSorter<RequirementsTableModel> sorter2;
private String currentNeed;
private boolean appLayerSelected;
TestConfigurationController controller;
/**
* Creates new form SelectNeedsPage
*/
public SelectRequirementsPage(String title, String description, TestConfigurationController controller, boolean appLayerSelected, String selectedNeed) {
super(title, description);
initComponents();
this.controller = controller;
initRequirementsPanel(standardRequirementsTable, parametersTable, appLayerSelected ? controller.getAppLayerParams() : controller.getInfoLayerParams(), selectedNeed);
this.appLayerSelected = appLayerSelected;
currentNeed = selectedNeed;
requirementsClearAllButton.addActionListener(this);
}
/**
* Inits the parameters panel.
*
* Pre-Conditions: N/A Post-Conditions: N/A
*
* @param standardNeedsTable the standard needs table
* @param standardRequirementsTable the standard requirements table
* @param parametersTable the parameters table
* @param layerParams the layer params
*/
private void initRequirementsPanel(final JTable standardRequirementsTable, final JTable parametersTable, final DefaultLayerParameters layerParams, final String needId) {
System.out.println(" Loading the Parameter Panel ....... ");
//Ensure the tables do not currently have sorters activated
standardRequirementsTable.setRowSorter(null);
parametersTable.setRowSorter(null);
RequirementsTableModel requirementsTableModel = new RequirementsTableModel(layerParams.getNrtm(), needId);
OtherRequirementsTableModel otherRequirementsTableModel = new OtherRequirementsTableModel(layerParams.getNrtm(), needId);
standardRequirementsTable.setModel(requirementsTableModel);
parametersTable.setModel(otherRequirementsTableModel);
// To prevent tabbing between individual cells of the table disable the
// functions in the tables.
standardRequirementsTable.setFocusTraversalKeys(KeyboardFocusManager.FORWARD_TRAVERSAL_KEYS, null);
standardRequirementsTable.setFocusTraversalKeys(KeyboardFocusManager.BACKWARD_TRAVERSAL_KEYS, null);
standardRequirementsTable.setFocusCycleRoot(false);
parametersTable.setFocusTraversalKeys(KeyboardFocusManager.FORWARD_TRAVERSAL_KEYS, null);
parametersTable.setFocusTraversalKeys(KeyboardFocusManager.BACKWARD_TRAVERSAL_KEYS, null);
parametersTable.setFocusCycleRoot(false);
requirementsTableModel.fireTableStructureChanged();
otherRequirementsTableModel.fireTableStructureChanged();
requirementsTableModel.fireTableDataChanged();
otherRequirementsTableModel.fireTableDataChanged();
SelectionFlagListener unselectedNeedListener = new SelectionFlagListener() {
@Override
public void flagValueSetUpdate(int tableRow) {
standardRequirementsTable.setEnabled(true);
parametersTable.setEnabled(true);
}
@Override
public void flagValueClearedUpdate(int tableModelRow) {
String deselectedNeed = layerParams.getNrtm().getUserNeeds().needs.get(tableModelRow).getTitle();
List<String> selectedRequirementsList = new ArrayList<String>();
// Gather a list of optional selected requirements associated with this need
for (Requirement thisProjectRequirement : layerParams.getNrtm().getUserNeeds().getNeed(deselectedNeed).getProjectRequirements().requirements) {
if ((thisProjectRequirement.getFlagValue()) && (!thisProjectRequirement.getType().equals("M") && !thisProjectRequirement.getType().equals("Mandatory"))) {
thisProjectRequirement.setFlagValue(false);
((RequirementsTableModel) standardRequirementsTable.getModel()).fireTableDataChanged();
}
}
int currentRow = standardRequirementsTable.getSelectionModel().getLeadSelectionIndex();
standardRequirementsTable.getSelectionModel().removeSelectionInterval(currentRow, currentRow);
standardRequirementsTable.setEnabled(false);
parametersTable.setEnabled(false);
}
};
SelectionFlagEditor infoLayerFlagEditor = new SelectionFlagEditor(UserNeedsInterface.type_Header);
infoLayerFlagEditor.registerSelectionFlagListener(unselectedNeedListener);
//When selection changes, provide user with row numbers for
//both view and model.
//When selection changes, provide user with row numbers for
//both view and model.
standardRequirementsTable.getSelectionModel().addListSelectionListener(
new ListSelectionListener() {
public void valueChanged(ListSelectionEvent event) {
int viewRow = standardRequirementsTable.getSelectedRow();
if (parametersTable.isEditing()) {
parametersTable.getCellEditor().cancelCellEditing();
}
if (viewRow < 0) {
//Selection got filtered away.
} else {
Boolean requirementSelected = (Boolean) standardRequirementsTable.getValueAt(standardRequirementsTable.getSelectedRow(), standardRequirementsTable.getColumn(ProjectRequirementsInterface.flagValue_Header).getModelIndex());
if (requirementSelected) {
parametersTable.setEnabled(true);
} else {
parametersTable.setEnabled(false);
}
}
}
});
otherRequirementsTableModel.setRequirementListSelectionTable(standardRequirementsTable);
standardRequirementsTable.getSelectionModel().addListSelectionListener(otherRequirementsTableModel);
sorter2 = new TableRowSorter<RequirementsTableModel>(requirementsTableModel);
SelectionFlagListener projectRequirementSelectedListener = new SelectionFlagListener() {
@Override
public void flagValueSetUpdate(int tableRow) {
parametersTable.setEnabled(true);
}
@Override
public void flagValueClearedUpdate(int tableModelRow) {
if (parametersTable.isEditing()) {
parametersTable.getCellEditor().cancelCellEditing();
}
parametersTable.setEnabled(false);
}
};
SelectionFlagEditor projectRequirementsFlagEditor = new SelectionFlagEditor(ProjectRequirementsInterface.type_Header);
projectRequirementsFlagEditor.registerSelectionFlagListener(projectRequirementSelectedListener);
standardRequirementsTable.getColumn(ProjectRequirementsInterface.flagValue_Header).setCellRenderer(new SelectionFlagRenderer(ProjectRequirementsInterface.type_Header));
standardRequirementsTable.getColumn(ProjectRequirementsInterface.flagValue_Header).setCellEditor(projectRequirementsFlagEditor);
TableColumnModel cmodel = standardRequirementsTable.getColumnModel();
TextAreaRenderer textAreaRenderer = new TextAreaRenderer();
cmodel.getColumn(RequirementsTableModel.Text_Col).setCellRenderer(textAreaRenderer);
TableColumnModel parameterModel = parametersTable.getColumnModel();
TextAreaRenderer textAreaParameters = new TextAreaRenderer();
parameterModel.getColumn(OtherRequirementsTableModel.Text_Col).setCellRenderer(textAreaParameters);
requirementsTableModel.fireTableDataChanged();
otherRequirementsTableModel.fireTableDataChanged();
}
@Override
public void actionPerformed(ActionEvent arg0) {
for (Requirement theRequirement : appLayerSelected ? controller.getAppLayerParams().getNrtm().getUserNeeds().getNeed(currentNeed).getProjectRequirements().requirements
: controller.getInfoLayerParams().getNrtm().getUserNeeds().getNeed(currentNeed).getProjectRequirements().requirements) {
if ((!theRequirement.getType().equals("M")) && (!theRequirement.getType().equals("Mandatory"))) {
theRequirement.setFlagValue(false);
}
}
((RequirementsTableModel) standardRequirementsTable.getModel()).fireTableDataChanged();
}
@Override
public void rendering(List<WizardPage> path, WizardSettings settings) {
super.rendering(path, settings);
((RequirementsTableModel) standardRequirementsTable.getModel()).fireTableDataChanged();
}
@Override
public boolean isCheckRequiredBeforeCancel() {
return true;
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
testParametersPanel = new javax.swing.JPanel();
parametersScrollPane = new javax.swing.JScrollPane();
parametersTable = new javax.swing.JTable();
selectRequirementsPanel = new javax.swing.JPanel();
standardRequirementsScrollPane = new javax.swing.JScrollPane();
standardRequirementsTable = new javax.swing.JTable();
jPanel2 = new javax.swing.JPanel();
requirementsClearAllButton = new javax.swing.JButton();
testParametersPanel.setBorder(javax.swing.BorderFactory.createTitledBorder("Test Parameters"));
testParametersPanel.setPreferredSize(new java.awt.Dimension(678, 99));
parametersScrollPane.setPreferredSize(new java.awt.Dimension(452, 0));
parametersTable.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{"SubscriptionDelay", "The maximum time (in milliseconds to await a response", "1000"}
},
new String [] {
"Parameter", "Description", "Value"
}
) {
Class[] types = new Class [] {
java.lang.String.class, java.lang.String.class, java.lang.Object.class
};
boolean[] canEdit = new boolean [] {
false, false, true
};
public Class getColumnClass(int columnIndex) {
return types [columnIndex];
}
public boolean isCellEditable(int rowIndex, int columnIndex) {
return canEdit [columnIndex];
}
});
parametersTable.setToolTipText("Set information layer standard parameters.");
parametersTable.setFillsViewportHeight(true);
parametersScrollPane.setViewportView(parametersTable);
javax.swing.GroupLayout testParametersPanelLayout = new javax.swing.GroupLayout(testParametersPanel);
testParametersPanel.setLayout(testParametersPanelLayout);
testParametersPanelLayout.setHorizontalGroup(
testParametersPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(testParametersPanelLayout.createSequentialGroup()
.addContainerGap()
.addComponent(parametersScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 576, Short.MAX_VALUE)
.addContainerGap())
);
testParametersPanelLayout.setVerticalGroup(
testParametersPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(testParametersPanelLayout.createSequentialGroup()
.addGap(18, 18, 18)
.addComponent(parametersScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 52, Short.MAX_VALUE)
.addContainerGap())
);
selectRequirementsPanel.setBorder(javax.swing.BorderFactory.createTitledBorder("Select Requirements"));
selectRequirementsPanel.setPreferredSize(new java.awt.Dimension(678, 142));
standardRequirementsScrollPane.setViewportBorder(javax.swing.BorderFactory.createLineBorder(new java.awt.Color(0, 0, 0)));
standardRequirementsScrollPane.setPreferredSize(new java.awt.Dimension(452, 110));
standardRequirementsTable.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{"3.3.1.1.1", "Subscription Request", "M", new Boolean(true)},
{"3.3.1.1.2", "DMS Inventory", "O", new Boolean(true)},
{"3.3.1.1.3", "CCTV Control", "M", new Boolean(true)},
{"3.3.1.1.4", "FE Update", "O", null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null}
},
new String [] {
"Requirement", "Description", "Project Requirements", "Selected"
}
) {
Class[] types = new Class [] {
java.lang.Object.class, java.lang.Object.class, java.lang.Object.class, java.lang.Boolean.class
};
boolean[] canEdit = new boolean [] {
false, false, false, true
};
public Class getColumnClass(int columnIndex) {
return types [columnIndex];
}
public boolean isCellEditable(int rowIndex, int columnIndex) {
return canEdit [columnIndex];
}
});
standardRequirementsTable.setToolTipText("Selection information layer standard requirements.");
standardRequirementsTable.setFillsViewportHeight(true);
standardRequirementsTable.setMinimumSize(new java.awt.Dimension(60, 60));
standardRequirementsScrollPane.setViewportView(standardRequirementsTable);
jPanel2.setPreferredSize(new java.awt.Dimension(100, 23));
requirementsClearAllButton.setText("Clear Optional");
requirementsClearAllButton.setToolTipText("De-select all optional Requirements");
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addContainerGap(227, Short.MAX_VALUE)
.addComponent(requirementsClearAllButton)
.addContainerGap(246, Short.MAX_VALUE))
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(requirementsClearAllButton)
);
javax.swing.GroupLayout selectRequirementsPanelLayout = new javax.swing.GroupLayout(selectRequirementsPanel);
selectRequirementsPanel.setLayout(selectRequirementsPanelLayout);
selectRequirementsPanelLayout.setHorizontalGroup(
selectRequirementsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, selectRequirementsPanelLayout.createSequentialGroup()
.addContainerGap()
.addGroup(selectRequirementsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(standardRequirementsScrollPane, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, 576, Short.MAX_VALUE)
.addComponent(jPanel2, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, 576, Short.MAX_VALUE))
.addContainerGap())
);
selectRequirementsPanelLayout.setVerticalGroup(
selectRequirementsPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, selectRequirementsPanelLayout.createSequentialGroup()
.addComponent(standardRequirementsScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 163, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(testParametersPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 608, Short.MAX_VALUE)
.addComponent(selectRequirementsPanel, javax.swing.GroupLayout.Alignment.TRAILING, 0, 608, Short.MAX_VALUE))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap()
.addComponent(selectRequirementsPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 219, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(testParametersPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 103, Short.MAX_VALUE)
.addContainerGap())
);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JPanel jPanel2;
protected javax.swing.JScrollPane parametersScrollPane;
protected javax.swing.JTable parametersTable;
protected javax.swing.JButton requirementsClearAllButton;
private javax.swing.JPanel selectRequirementsPanel;
private javax.swing.JScrollPane standardRequirementsScrollPane;
protected javax.swing.JTable standardRequirementsTable;
private javax.swing.JPanel testParametersPanel;
// End of variables declaration//GEN-END:variables
}
|
<reponame>knofler/app<filename>app/containers/Delete/saga.js
/* eslint-disable comma-dangle */
/* eslint-disable no-console */
/*
*
* DELETE saga
*
*/
import { all, call, put, takeLatest } from "redux-saga/effects";
import { socket } from "utils/socketio-client";
import { DELETE_CONST_POST } from "./constants";
import { deleteActionPostSuccess, deleteActionPostError } from "./actions";
const herokuAPIURL = "https://aframework-api.herokuapp.com";
const model = "/api/books";
const getUrl = process.env.API_URL || herokuAPIURL;
const url = getUrl + model;
console.log("process.env.API_URL", process.env.API_URL);
console.log("herokuAPIURL is", herokuAPIURL);
console.log("url is ", url);
// Load Functions on Event Change
function* deleteSagaPost() {
yield takeLatest(DELETE_CONST_POST, fetchPostDelete);
}
function* fetchPostDelete(action) {
try {
// CRUD_CONST_DELETE event action and api call
console.log("DELETE_CONST_POST constant's action in saga is:: ", action);
console.log(
"DELETE_CONST_POST constant's action.id in saga is:: ",
action.id
);
console.log(
"DELETE_CONST_POST constant's action.model in saga is:: ",
action.model
);
if (action.model !== undefined && action.id !== undefined) {
const deleteUrl = `${getUrl}/api/${action.model}/${action.id}`;
console.log("deleteUrl:", deleteUrl);
//
const response = yield call(fetch, deleteUrl, {
method: "DELETE",
headers: {
Accept: "application/json",
"Content-Type": "application/json"
}
});
const responseBody = yield response.json();
console.log("responseBody of DELETE_CONST_POST in saga is", responseBody);
window.localStorage.setItem("delete-data", JSON.stringify(responseBody));
yield put(deleteActionPostSuccess(responseBody));
socket.emit("delete_data", responseBody);
}
} catch (error) {
yield put(deleteActionPostError(error));
}
}
// Individual exports for testing
export default function* deleteSaga() {
// See example in containers/HomePage/saga.js
yield all([deleteSagaPost()]);
}
|
#!/bin/bash
COVER_PROFILE=coverage.txt
echo "mode: set" > $COVER_PROFILE
FAIL=0
go test -cover ./polly/cli || FAIL=1
if [ "$FAIL" -ne 0 ]; then
exit 1
fi
COVER_PKG="github.com/emccode/polly"
go test -coverpkg=$COVER_PKG -coverprofile=profile.out ./test || FAIL=1
if [ -f profile.out ]; then
cat profile.out | grep -v "mode: set" >> $COVER_PROFILE
rm -f profile.out
fi
if [ "$FAIL" -ne 0 ]; then
exit 1
fi
if [ "$1" = "main" ]; then
rm -f $COVER_PROFILE
exit 0
fi
for DIR in $(find . -type d \
-not -path '*/.*' \
-not -path './.git*' \
-not -path '*/_*' \
-not -path './vendor/*' \
-not -path './polly/*' \
-not -path './core' \
-not -path '.'); do
if ls $DIR/*.go &> /dev/null; then
go test -coverprofile=profile.out $DIR || FAIL=1
if [ -f profile.out ]; then
cat profile.out | grep -v "mode: set" >> $COVER_PROFILE
rm -f profile.out
fi
fi
done
if [ -n "$COVERALLS" -a "$FAIL" -eq "0" ]; then
goveralls -v -coverprofile=$COVER_PROFILE
fi
if [ -n "$CODECOV" -a "$FAIL" -eq "0" ]; then
bash <(curl -s https://codecov.io/bash)
fi
rm -f $COVER_PROFILE
exit $FAIL
|
import * as azmaps from "azure-maps-control";
import { PieChartMarkerOptions } from './PieChartMarkerOptions';
import { ExtendedHtmlMarker } from './extentions/ExtendedHtmlMarker';
/**
* A class for creating Pie Charts as HTML Markers on a map.
*/
export class PieChartMarker extends azmaps.HtmlMarker implements ExtendedHtmlMarker {
/********************
* Private Properties
********************/
private _options = <PieChartMarkerOptions>{
values: [],
radius: 40,
colors: ['#d7191c', '#fdae61', '#ffffbf', '#abdda4', '#2b83ba'],
fillColor: 'transparent',
strokeWidth: 0,
strokeColor: '#666666',
innerRadius: 0
};
/** The total of all values. */
private _total: number = 0;
/** Additional colors to use when enough haven't been specified. */
public static _moreColors = [];
/********************
* Constructor
********************/
/**
* Creates an HTML Marker in the shape of a pie chart.
* @param options Options for rendering the Pie Chart marker.
*/
constructor(options: PieChartMarkerOptions) {
super(options);
super.setOptions({
htmlContent: document.createElement('div'),
pixelOffset: [0, 0],
anchor: 'center'
});
this.setOptions(options);
}
/********************
* Public Methods
********************/
/** ID of the marker. */
public id: string;
/** Any additional properties that you want to store with the marker. */
public properties: any = {};
/**
* Gets the total value of all slices summed togehter.
* @returns The total value of all slices summed togehter.
*/
public getTotalValue(): number {
return this._total;
}
/**
* Gets the value of a slice of the pie based on it's index.
* @param idx The index of the slice.
* @returns The value of a slice of the pie based on it's index.
*/
public getSliceValue(idx: number): number {
const vals = this._options.values;
return (idx >= 0 && idx < vals.length) ? vals[idx] : 0;
}
/**
* Gets the percentage value of a slice of the pie based on it's index.
* @param idx The index of the slice.
* @returns The percentage value of a slice of the pie based on it's index.
*/
public getSlicePercentage(idx: number): number {
const self = this;
return (self._total > 0) ? Math.round(self.getSliceValue(idx) / self._total * 10000) / 100 : 0;
}
/**
* Gets the options of the pie chart marker.
* @returns The options of the pie chart marker.
*/
public getOptions(): PieChartMarkerOptions {
return Object.assign({}, super.getOptions(), this._options);
}
/**
* Sets the options of the pie chart marker.
* @param options The options to set on the marker.
*/
public setOptions(options: PieChartMarkerOptions): void {
const self = this;
const opt = self._options;
const stringify = JSON.stringify;
let rerender = false;
if (options.radius && options.radius > 0 && options.radius != opt.radius) {
opt.radius = options.radius;
rerender = true;
}
if (options.innerRadius >= 0 && options.innerRadius != opt.innerRadius) {
opt.innerRadius = options.innerRadius;
rerender = true;
}
if (options.colors && stringify(options.colors) !== stringify(opt.colors)) {
opt.colors = options.colors;
rerender = true;
}
if (options.fillColor && stringify(options.fillColor) !== stringify(opt.fillColor)) {
opt.fillColor = options.fillColor;
rerender = true;
}
if (options.strokeColor && options.strokeColor !== opt.strokeColor) {
opt.strokeColor = options.strokeColor;
rerender = true;
}
if (options.strokeWidth >= 0 && options.strokeWidth != opt.strokeWidth) {
opt.strokeWidth = options.strokeWidth;
rerender = true;
}
if (options.tooltipCallback !== undefined && opt.tooltipCallback != options.tooltipCallback) {
opt.tooltipCallback = options.tooltipCallback;
rerender = true;
}
if (options.values && stringify(options.values) !== stringify(opt.values)) {
opt.values = options.values;
rerender = true;
}
if (options.text !== undefined && options.text !== opt.text) {
//opt.text = options.text;
super.setOptions({ text: options.text });
rerender = true;
}
if (options.textClassName !== undefined && options.textClassName !== opt.textClassName) {
opt.textClassName = options.textClassName;
rerender = true;
}
if (rerender) {
self._render();
}
super.setOptions(options);
}
/********************
* Private Methods
********************/
/**
* Method that generates the SVG pie chart for the marker.
*/
private _render() {
const self = this;
const opt = self._options;
const data = opt.values;
const radius = opt.radius;
let startAngle = 0, angle = 0;
if (data) {
self._total = data.reduce((a, b) => {
return a + b;
}, 0);
//Ensure that there are enough colors defined.
const moreColors = PieChartMarker._moreColors;
const random = Math.random;
const round = Math.round;
let mIdx = 0;
while (data.length > opt.colors.length) {
//Generate additional random colors, but try and stagger them such that there is a good variation between agenct colors.
if (moreColors.length < data.length) {
moreColors.push(`hsl(${round(random() * 360)},${round(random() * 20) + 70}%,${round(random() * 40) + 30}%)`);
}
//Grab the next additional color from the global pallet.
opt.colors.push(moreColors[mIdx]);
mIdx++;
}
//Origin for cx/cy
const o = radius + opt.strokeWidth;
const svg = [`<svg xmlns="http://www.w3.org/2000/svg" width="${2 * o}px" height="${2 * o}px">`];
let tooltip = '';
let maskId: string;
if (opt.innerRadius > 0 && opt.innerRadius <= opt.radius) {
maskId = 'piechart-innercircle-' + round(random() * 10000000);
svg.push(`<defs><mask id="${maskId}"><rect width="100%" height="100%" fill="white"/><circle r="${opt.innerRadius}" cx="${o}" cy="${o}" fill="black"/></mask></defs>
<circle r="${opt.innerRadius}" cx="${o}" cy="${o}" style="fill:${opt.fillColor};stroke:${opt.strokeColor};stroke-width:${opt.strokeWidth * 2}px;"/>`);
}
if (self._total > 0) {
const ttc = opt.tooltipCallback;
const ratio = Math.PI * 2 / self._total;
for (let i = 0; i < data.length; i++) {
angle = ratio * data[i];
if (ttc) {
tooltip = ttc(self, i);
}
const c = (i < opt.colors.length) ? opt.colors[i] : moreColors[i];
svg.push(self._createSlice(o, o, radius, startAngle, angle, c, tooltip, maskId));
startAngle += angle;
}
}
const text = self.getOptions().text;
if (text) {
svg.push(`<text x="${o}" y="${(o + 7)}" style="font-size:16px;font-family:arial;fill:#000;font-weight:bold;" class="${opt.textClassName || ''}" text-anchor="middle">${text}</text>`);
}
svg.push('</svg>');
(<HTMLDivElement>super.getOptions().htmlContent).innerHTML = svg.join('');
}
}
/**
* Generates the SVG path for an arc slice of a pie.
* @param cx Center x-origin of the arc.
* @param cy Center y-origin of the arc.
* @param r Radius of arc.
* @param startAngle The start angle of the arc (0 = up, PI/2 = right, PI = down, 3/2 PI = left)
* @param angle The angle width of the arc.
* @param fillColor The fill color of the path.
* @param tooltip The tooltip text to display when hovered.
*/
private _createSlice(cx: number, cy: number, r: number, startAngle: number, angle: number, fillColor: string, tooltip: string, maskId: string): string {
const opt = this._options;
const pi = Math.PI;
let mask = '';
if(maskId){
mask = ` mask="url(#${maskId}"`;
}
if (angle > 2 * pi * 0.99) {
//If the shape is nearly a complete circle, create a circle instead of an arc.
return `<circle r="${r}" cx="${cx}" cy="${cy}" style="fill:${fillColor};stroke:${opt.strokeColor};stroke-width:${opt.strokeWidth}px;"${mask}><title>${tooltip}</title></circle>`;
}
const sin = Math.sin;
const cos = Math.cos;
const x1 = cx + r * sin(startAngle);
const y1 = cy - r * cos(startAngle);
const x2 = cx + r * sin(startAngle + angle);
const y2 = cy - r * cos(startAngle + angle);
const x21 = cx + opt.innerRadius * sin(startAngle);
const y21 = cy - opt.innerRadius * cos(startAngle);
const x22 = cx + opt.innerRadius * sin(startAngle + angle);
const y22 = cy - opt.innerRadius * cos(startAngle + angle);
//Flag for when arcs are larger than 180 degrees in radians.
let big = 0;
if (angle > pi) {
big = 1;
}
return `<path d="M${cx} ${cy} L ${x1} ${y1} A ${r},${r} 0 ${big} 1 ${x2} ${y2}z" style="fill:${fillColor};stroke:${opt.strokeColor};stroke-width:${opt.strokeWidth}px;"${mask}><title>${tooltip}</title></path>`;
}
}
|
def distinct_subsequences(s):
n = len(s)
dp = [[0 for i in range(n+1)] for i in range(n+1)]
for i in range(n+1):
dp[i][0] = 1
for i in range(1, n+1):
for j in range(1, n+1):
if s[i-1] == s[j-1] and i != j:
dp[i][j] = dp[i-1][j-1] + dp[i-1][j]
else:
dp[i][j] = dp[i-1][j]
return dp[n][n]
|
#!/bin/sh
if [ ! $# -ge 1 ] || [ ! $# -le 3 ]; then
echo "Usage: $0 NAME (SECURITY_PROFILE) (CERTFILE)"
exit 1
fi
CLIENT_NAME=$1
CLIENT_SECURITY_PROFILE=$2
[ -z "$CLIENT_SECURITY_PROFILE" ] && CLIENT_SECURITY_PROFILE="idsc:BASE_SECURITY_PROFILE"
CLIENT_CERT="keys/clients/$CLIENT_NAME.cert"
if [ -n "$3" ]; then
[ ! -f "$3" ] && (echo "Cert not found"; exit 1)
cert_format="DER"
openssl x509 -noout -in "$3" 2>/dev/null && cert_format="PEM"
openssl x509 -inform "$cert_format" -in "$3" -text > "$CLIENT_CERT"
else
openssl req -newkey rsa:2048 -new -batch -nodes -x509 -days 3650 -text -keyout "keys/clients/${CLIENT_NAME}.key" -out "$CLIENT_CERT"
fi
SKI="$(grep -A1 "Subject Key Identifier" "$CLIENT_CERT" | tail -n 1 | tr -d ' ')"
AKI="$(grep -A1 "Authority Key Identifier" "$CLIENT_CERT" | tail -n 1 | tr -d ' ')"
CLIENT_ID="$SKI:$AKI"
CLIENT_CERT_SHA="$(openssl x509 -in "$CLIENT_CERT" -noout -sha256 -fingerprint | tr '[:upper:]' '[:lower:]' | tr -d : | sed 's/.*=//')"
cat >> config/clients.yml <<EOF
- client_id: $CLIENT_ID
client_name: $CLIENT_NAME
grant_types: client_credentials
token_endpoint_auth_method: private_key_jwt
scope: idsc:IDS_CONNECTOR_ATTRIBUTES_ALL
attributes:
- key: idsc
value: IDS_CONNECTOR_ATTRIBUTES_ALL
- key: securityProfile
value: $CLIENT_SECURITY_PROFILE
- key: referringConnector
value: http://${CLIENT_NAME}.demo
- key: "@type"
value: ids:DatPayload
- key: "@context"
value: https://w3id.org/idsa/contexts/context.jsonld
- key: transportCertsSha256
value: $CLIENT_CERT_SHA
import_certfile: $CLIENT_CERT
EOF
|
#!/bin/bash
QUALITY=$(pylint --rcfile=pylint.rc transport_proxy.py yandex_transport_core/*.py | grep -oP '(?<=Your code has been rated at).*?(?=/)')
echo "Quality : $QUALITY"
echo '"Code quality"' > code_quality.csv
echo $QUALITY >> code_quality.csv
SIZE_BYTES=$(docker image inspect owlsoul/ytproxy:dev --format='{{.Size}}')
SIZE_MB=$(( $SIZE_BYTES / 1024 / 1024))
echo "Docker image size (MB): $SIZE_MB"
echo '"Docker image size"' > docker_image_size.csv
echo $SIZE_MB >> docker_image_size.csv
|
#!/bin/sh
remove_directory="/usr/local/foglamp/python/foglamp/plugins/north/omf/"
# Remove dir if exists
if [ -d "${remove_directory}" ]; then
echo "FogLAMP package update: removing 'omf' Python north plugin ..."
rm -rf "${remove_directory}"
# Check
if [ -d "${remove_directory}" ]; then
echo "ERROR: FogLAMP plugin 'omf' not removed in '${remove_directory}'"
exit 1
else
echo "FogLAMP plugin 'omf' removed in '${remove_directory}'"
fi
fi
# The dummy C plugin has been renamed to random, remove the old plugin
dummy_directory="/usr/local/foglamp/plugins/south/dummy"
if [ -d $dummy_directory ]; then
echo "FogLAMP package update: removing 'dummy' South plugin"
rm -rf $dummy_directory
fi
# The omf C plugin has been renamed to PI_Server, remove the old plugin
omf_directory="/usr/local/foglamp/plugins/north/omf"
if [ -d $omf_directory ]; then
echo "FogLAMP package update: removing 'omf' North plugin"
rm -rf $omf_directory
fi
|
<filename>packages/web/src/components/Community/ModTools/RulesPane/RuleForm/RuleForm.test.tsx<gh_stars>0
import React from 'react';
import Modal from 'components/Modal/Modal';
import {
render,
fireEvent,
cleanup,
waitForElement,
} from '@testing-library/react';
import RuleForm from './RuleForm';
describe('<RuleForm />', () => {
const nameValue = 'testValue';
const descriptionValue = 'testDescription';
const cancelHandler = jest.fn();
const addRuleHandler = jest.fn();
const editRuleHandler = jest.fn();
const type = 'Add';
const editRule = {
name: 'Edit',
description: 'Rule',
scope: 'Posts Only',
date: 'date',
community: 'communityId',
user: 'userId',
_id: 'testId',
};
const { container, getByTestId, getByText, getByPlaceholderText } = render(
<RuleForm
cancelHandler={cancelHandler}
editRule={editRule}
addRuleHandler={addRuleHandler}
editRuleHandler={editRuleHandler}
type={type}
/>,
);
beforeEach(async () => {
console.log(container);
const nameInput = await waitForElement(() => getByPlaceholderText('Name'));
fireEvent.change(nameInput, { target: { value: nameValue } });
const descriptionInput = await waitForElement(() =>
getByPlaceholderText('Description'),
);
fireEvent.change(descriptionInput, { target: { value: descriptionValue } });
const scopeButton = getByText('Posts only');
fireEvent.click(scopeButton);
const submitButton = getByText('Add Rule');
fireEvent.click(submitButton);
});
afterEach(cleanup);
it('snapshot', () => {
expect(container).toMatchSnapshot();
});
it('should call addRuleHandler once', async () => {
expect(addRuleHandler).toHaveBeenCalledTimes(1);
});
});
|
#!/bin/bash
#
# Copyright 2021 SkyAPM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
pecl package
|
#!/bin/bash
rm dist/*
python setup.py sdist bdist_wheel
rm dist/*.egg
twine upload dist/*
|
#!/bin/bash
# Copyright (c) 2021, Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl
#
export vol_name=u01
########### SIGINT handler ############
function _int() {
echo "Stopping container.."
echo "SIGINT received, shutting down servers!"
echo ""
echo "Stopping Node Manager.."
/$vol_name/oracle/user_projects/domains/$DOMAIN_NAME/bin/stopNodeManager.sh
echo "Stopping Admin Server.."
/$vol_name/oracle/container-scripts/stopAdmin.sh
exit;
EOF
lsnrctl stop
}
########### SIGTERM handler ############
function _term() {
echo "Stopping container.."
echo "SIGTERM received, shutting down Servers!"
echo ""
echo "Stopping Node Manager.."
/$vol_name/oracle/user_projects/domains/$DOMAIN_NAME/bin/stopNodeManager.sh
echo "Stopping Admin Server.."
/$vol_name/oracle/container-scripts/stopAdmin.sh
exit;
EOF
lsnrctl stop
}
########### SIGKILL handler ############
function _kill() {
echo "SIGKILL received, shutting down Servers!"
echo ""
echo "Stopping Node Manager.."
/$vol_name/oracle/user_projects/domains/$DOMAIN_NAME/bin/stopNodeManager.sh
echo "Stopping Admin Server.."
/$vol_name/oracle/container-scripts/stopAdmin.sh
exit;
EOF
lsnrctl stop
}
# Set SIGINT handler
trap _int SIGINT
# Set SIGTERM handler
trap _term SIGTERM
# Set SIGKILL handler
trap _kill SIGKILL
export CONTAINERCONFIG_DIR_NAME="container-data"
export CONTAINERCONFIG_DIR="/$vol_name/oracle/user_projects/$CONTAINERCONFIG_DIR_NAME"
export CONTAINERCONFIG_LOG_DIR="$CONTAINERCONFIG_DIR/logs"
export CONTAINERCONFIG_DOMAIN_DIR="/$vol_name/oracle/user_projects/domains"
echo ""
echo "========================================================="
echo " WebCenter Content Docker Container "
echo " Admin Server "
echo " 12.2.1.4.0 "
echo "========================================================="
echo ""
echo ""
# Persistence volume location mapped to this location will need permission fixup
if [ -d $CONTAINERCONFIG_DIR ]; then
chown -R oracle:root $CONTAINERCONFIG_DOMAIN_DIR
chown -R oracle:root $CONTAINERCONFIG_DIR
chown -R oracle:root $CONTAINERCONFIG_LOG_DIR
else
mkdir -p $CONTAINERCONFIG_DIR
mkdir -p $CONTAINERCONFIG_LOG_DIR
mkdir -p $CONTAINERCONFIG_DOMAIN_DIR
chown -R oracle:root $CONTAINERCONFIG_DOMAIN_DIR
chown -R oracle:root $CONTAINERCONFIG_DIR
chown -R oracle:root $CONTAINERCONFIG_LOG_DIR
fi
echo ""
echo ""
export component=$component
echo "component=${component}"
# configuring wcc domain
sh /$vol_name/oracle/container-scripts/createWCCDomain.sh
retval=$?
if [ $retval -ne 0 ];
then
echo ""
echo ""
echo "Domain Creation failed. Exiting.."
exit 1
fi
#delimited code
IFS=',' read -r -a cmp <<< "$component"
size=${#cmp[@]}
echo "size of component=$size"
if [ $size -gt "0" ]
then
for i in "${cmp[@]}"
do
if [ "${i^^}" == "IPM" ]
then
echo "Call IPM Implementation"
fi
if [ "${i^^}" == "CAPTURE" ]
then
echo "Not yet Implemented"
fi
if [ "${i^^}" == "ADFUI" ]
then
echo "Not yet Implemented"
fi
done
fi
echo "start admin container"
# start admin container
sh /$vol_name/oracle/container-scripts/startAdminContainer.sh
|
#!/usr/bin/bash
# Copyright (c) 2021. Huawei Technologies Co.,Ltd.ALL rights reserved.
# This program is licensed under Mulan PSL v2.
# You can use it according to the terms and conditions of the Mulan PSL v2.
# http://license.coscl.org.cn/MulanPSL2
# THIS PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
# See the Mulan PSL v2 for more details.
# #############################################
# @Author : doraemon2020
# @Contact : xcl_job@163.com
# @Date : 2020-05-07
# @License : Mulan PSL v2
# @Desc : Test nmcli configure MACsec
# ############################################
source ../common/net_lib.sh
function config_params() {
LOG_INFO "Start to config params of the case."
get_free_eth 1
test_eth1=${LOCAL_ETH[0]}
con_name='test-macsec+'
LOG_INFO "End to config params of the case."
}
function run_test() {
LOG_INFO "Start to run test."
nmcli connection add type macsec \
con-name ${con_name} ifname macsec0 \
connection.autoconnect no \
macsec.parent ${test_eth1} macsec.mode psk \
macsec.mka-cak 12345678901234567890123456789012 \
macsec.mka-ckn 1234567890123456789012345678901234567890123456789012345678901234 \
ip4 192.0.2.100/24
CHECK_RESULT $?
nmcli connection up ${con_name}
CHECK_RESULT $?
LOG_INFO "End to run test."
}
function post_test() {
LOG_INFO "Start to restore the test environment."
nmcli con delete ${con_name}
LOG_INFO "End to restore the test environment."
}
main "$@"
|
package no.mnemonic.commons.logging;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicReference;
public class Logging {
private static final String LOGGING_PROPERTY_FILE = "META-INF/no.mnemonic.commons.logging.Logging.properties";
private static final String LOGGING_PROPERTY_KEY = "provider.class";
private final static AtomicReference<LoggingProvider> provider = new AtomicReference<>();
public static Logger getLogger(String name) {
if (name == null) name = "";
return getProvider().getLogger(name);
}
public static LoggingContext getLoggingContext() {
return getProvider().getLoggingContext();
}
public static Logger getLogger(Class clz) {
return getLogger(clz == null ? null : clz.getName());
}
public static void setProvider(LoggingProvider implementation) {
provider.set(implementation);
}
//resolve provider
private static LoggingProvider getProvider() {
if (provider.get() != null) return provider.get();
provider.set(resolveProvider());
return provider.get();
}
private static LoggingProvider resolveProvider() {
try (InputStream propertyStream = Logging.class.getClassLoader().getResourceAsStream(LOGGING_PROPERTY_FILE)) {
if (propertyStream == null) return createDefaultProvider();
Properties props = new Properties();
props.load(propertyStream);
if (props.containsKey(LOGGING_PROPERTY_KEY)) return loadProvider(props.getProperty(LOGGING_PROPERTY_KEY));
return createDefaultProvider();
} catch (IOException e) {
return createDefaultProvider();
}
}
private static LoggingProvider loadProvider(String providerClassName) {
try {
Class providerClz = Class.forName(providerClassName);
return (LoggingProvider) providerClz.newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private static LoggingProvider createDefaultProvider() {
System.err.println("ERROR: no.mnemonic.commons.logging.Logging: No logging provider found, using console logger as default. Add implementation package to classpath.");
return new LoggingProvider() {
@Override
public Logger getLogger(String name) {
return new ConsoleLoggerImpl();
}
@Override
public LoggingContext getLoggingContext() {
return new LoggingContext() {
@Override
public void clear() {
// do nothing
}
@Override
public boolean containsKey(String key) {
return false;
}
@Override
public String get(String key) {
return null;
}
@Override
public void put(String key, String value) {
// do nothing
}
@Override
public void remove(String key) {
// do nothing
}
};
}
};
}
}
|
End of preview. Expand
in Data Studio
No dataset card yet
- Downloads last month
- 1