text
stringlengths 1
1.05M
|
|---|
#!/bin/sh
DATA_DIR="data"
DATASET_URL="https://shapenet.cs.stanford.edu/media/modelnet40_normal_resampled.zip"
DATASET_ORIGINAL_NAME="modelnet40_normal_resampled"
DATASET_NAME="ModelNet40"
# option 1: download directly to your project folder
cd ..
mkdir $DATA_DIR && cd $DATA_DIR
wget $DATASET_URL --no-check-certificate
unzip "$DATASET_ORIGINAL_NAME.zip" && mv "$DATASET_ORIGINAL_NAME" "$DATASET_NAME"
rm "$DATASET_ORIGINAL_NAME.zip"
# option 2: download data in one dir(e.g. xxx/dataset/ModelNet) and then create a soft link in the current project data folder
# same as above to download the data
# ln -s /media/yinchao/Mastery/dataset/ModelNet40 ./data/ModelNet40
|
#!/usr/bin/env bash
# PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here
# will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent
# changes to this script, consider a proposal to conda-smithy so that other feedstocks can also
# benefit from the improvement.
FEEDSTOCK_ROOT=$(cd "$(dirname "$0")/.."; pwd;)
RECIPE_ROOT=$FEEDSTOCK_ROOT/recipe
docker info
config=$(cat <<CONDARC
channels:
- conda-forge
- defaults
conda-build:
root-dir: /feedstock_root/build_artefacts
show_channel_urls: true
CONDARC
)
rm -f "$FEEDSTOCK_ROOT/build_artefacts/conda-forge-build-done"
cat << EOF | docker run -i \
-v "${RECIPE_ROOT}":/recipe_root \
-v "${FEEDSTOCK_ROOT}":/feedstock_root \
-a stdin -a stdout -a stderr \
condaforge/linux-anvil \
bash || exit 1
export BINSTAR_TOKEN=${BINSTAR_TOKEN}
export PYTHONUNBUFFERED=1
echo "$config" > ~/.condarc
# A lock sometimes occurs with incomplete builds. The lock file is stored in build_artefacts.
conda clean --lock
conda install --yes --quiet conda-forge-build-setup
source run_conda_forge_build_setup
# Embarking on 1 case(s).
set -x
export CONDA_PERL=5.20.3.1
set +x
conda build /recipe_root --quiet || exit 1
upload_or_check_non_existence /recipe_root conda-forge --channel=main || exit 1
touch /feedstock_root/build_artefacts/conda-forge-build-done
EOF
# double-check that the build got to the end
# see https://github.com/conda-forge/conda-smithy/pull/337
# for a possible fix
set -x
test -f "$FEEDSTOCK_ROOT/build_artefacts/conda-forge-build-done" || exit 1
|
#!/bin/bash
#add 1 network card nmtui-modify name to [ens7], not select [Automatically connect],nmtui-active
cd /etc/sysconfig/network-scripts/
sed -i 's/BOOTPROTO=dhcp/BOOTPROTO=none/g' ifcfg-ens7
cd -
systemctl restart network
ip a
#add floating ip port
ovs-vsctl add-br br-eth1
ovs-vsctl add-port br-eth1 ens7
ovs-vsctl show
#modify ml2_conf.ini to surport VLAN
sed -i '67s/# network_vlan_ranges =/network_vlan_ranges = physnet1:1000:2999/g' /etc/neutron/plugins/ml2/ml2_conf.ini
cat << EOF >> /etc/neutron/plugins/ml2/ml2_conf.ini
[ovs]
tenant_network_type = vlan
bridge_mappings = physnet1:br-eth1
EOF
systemctl restart neutron-openvswitch-agent
systemctl status neutron-openvswitch-agent
|
import * as React from 'react';
import styled from 'styled-components';
import { NavList } from './Navbar';
import links, { icons } from '../../utils/mix';
import AniLink from 'gatsby-plugin-transition-link/AniLink';
interface Props {
title: string;
}
const StyledFooter = styled.footer`
padding: 1rem 0.5rem;
display: flex;
height: 100%;
position: relative;
align-items: center;
justify-content: space-between;
background: ${({ theme }) => theme.colors.coffee};
color: ${({ theme }) => theme.colors.black};
small {
margin-right: 2rem;
}
@media (max-width: 615px) {
display: flex;
small {
position: absolute;
right: 0;
bottom: 1.4rem;
}
}
`;
const FooterList = styled(NavList)`
li {
a {
&:hover {
background: ${({ theme }) => theme.colors.offWhite};
border-bottom: 2px solid ${({ theme }) => theme.colors.shark};
color: ${({ theme }) => theme.colors.shark};
}
}
}
@media (max-width: 615px) {
display: flex;
flex-direction: column;
}
`;
const IconList = styled(FooterList)`
margin-right: auto;
justify-content: flex-start;
li {
a {
&:hover {
background: none;
border: none;
color: ${({ theme }) => theme.colors.white};
}
}
}
@media (max-width: 615px) {
display: flex;
position: absolute;
top: 0;
right: 1rem;
li {
margin: 0.2rem 0;
}
}
`;
const Footer: React.FC<Props> = ({ title }) => {
let a;
return (
<StyledFooter>
{' '}
<div className="lists">
<FooterList>
{links.map(link => (
<li key={link.id}>
{' '}
<AniLink fade to={link.path}>
{link.text}
</AniLink>{' '}
</li>
))}
</FooterList>
<IconList>
{icons.map(icon => (
<li key={icon.id}>
{' '}
<a href={icon.url} target="_blank" rel="noopener noreferrer">
{icon.icon}
</a>{' '}
</li>
))}
</IconList>
</div>
<small>
© {new Date().getFullYear()}, {title}{' '}
</small>{' '}
</StyledFooter>
);
};
export default Footer;
|
create table B_SONET_GROUP_SUBJECT
(
ID int not null,
SITE_ID char(2 CHAR) not null,
NAME varchar2(255 CHAR) not null,
SORT int default '100' not null,
primary key (ID)
)
/
CREATE SEQUENCE SQ_B_SONET_GROUP_SUBJECT INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
CREATE TABLE B_SONET_GROUP_SUBJECT_SITE
(
SUBJECT_ID NUMBER(18) NOT NULL,
SITE_ID CHAR(2 CHAR) NOT NULL,
CONSTRAINT PK_B_SONET_GROUP_SUBJECT_SITE PRIMARY KEY (SUBJECT_ID, SITE_ID)
)
/
create table B_SONET_GROUP
(
ID int not null,
SITE_ID char(2 CHAR) not null,
NAME varchar2(255 CHAR) not null,
DESCRIPTION clob null,
DATE_CREATE date not null,
DATE_UPDATE date not null,
ACTIVE char(1 CHAR) default 'Y' not null,
VISIBLE char(1 CHAR) default 'Y' not null,
OPENED char(1 CHAR) default 'N' not null,
SUBJECT_ID int not null,
OWNER_ID int not null,
KEYWORDS varchar2(255 CHAR) null,
IMAGE_ID int null,
NUMBER_OF_MEMBERS int default 0 not null,
NUMBER_OF_MODERATORS int default 0 not null,
INITIATE_PERMS char(1 CHAR) default 'K' not null,
DATE_ACTIVITY date not null,
CLOSED char(1 CHAR) default 'N' not null,
SPAM_PERMS char(1 CHAR) default 'K' not null,
primary key (ID)
)
/
CREATE INDEX IX_B_SONET_GROUP_1 ON B_SONET_GROUP(OWNER_ID)
/
CREATE SEQUENCE SQ_B_SONET_GROUP INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
CREATE TABLE B_SONET_GROUP_SITE
(
GROUP_ID NUMBER(18) NOT NULL,
SITE_ID CHAR(2 CHAR) NOT NULL,
CONSTRAINT PK_B_SONET_GROUP_SITE PRIMARY KEY (GROUP_ID, SITE_ID)
)
/
create table B_SONET_USER2GROUP
(
ID int not null,
USER_ID int not null,
GROUP_ID int not null,
ROLE char(1 CHAR) default 'U' not null,
DATE_CREATE date not null,
DATE_UPDATE date not null,
INITIATED_BY_TYPE char(1 CHAR) default 'U' not null,
INITIATED_BY_USER_ID int not null,
MESSAGE clob null,
primary key (ID)
)
/
CREATE UNIQUE INDEX IX_B_SONET_USER2GROUP_1 ON B_SONET_USER2GROUP(USER_ID, GROUP_ID)
/
CREATE INDEX IX_B_SONET_USER2GROUP_2 ON B_SONET_USER2GROUP(USER_ID, GROUP_ID, ROLE)
/
CREATE INDEX IX_B_SONET_USER2GROUP_3 ON B_SONET_USER2GROUP(GROUP_ID, USER_ID, ROLE)
/
CREATE SEQUENCE SQ_B_SONET_USER2GROUP INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
create table B_SONET_FEATURES
(
ID int not null,
ENTITY_TYPE char(1 CHAR) default 'G' not null,
ENTITY_ID int not null,
FEATURE varchar2(50 CHAR) not null,
FEATURE_NAME varchar2(250 CHAR) null,
ACTIVE char(1 CHAR) default 'Y' not null,
DATE_CREATE date not null,
DATE_UPDATE date not null,
primary key (ID)
)
/
CREATE UNIQUE INDEX IX_SONET_GROUP_FEATURES_1 ON B_SONET_FEATURES(ENTITY_TYPE, ENTITY_ID, FEATURE)
/
CREATE SEQUENCE SQ_B_SONET_FEATURES INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
create table B_SONET_FEATURES2PERMS
(
ID int not null,
FEATURE_ID int not null,
OPERATION_ID varchar2(50 CHAR) not null,
ROLE char(1 CHAR) not null,
primary key (ID)
)
/
CREATE UNIQUE INDEX IX_SONET_FE2PERMS_1 ON B_SONET_FEATURES2PERMS(FEATURE_ID, OPERATION_ID)
/
CREATE INDEX IX_SONET_FE2PERMS_2 ON B_SONET_FEATURES2PERMS(FEATURE_ID, ROLE, OPERATION_ID)
/
CREATE SEQUENCE SQ_B_SONET_FEATURES2PERMS INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
create table B_SONET_USER_RELATIONS
(
ID int not null,
FIRST_USER_ID int not null,
SECOND_USER_ID int not null,
RELATION char(1 CHAR) default 'N' not null,
DATE_CREATE date not null,
DATE_UPDATE date not null,
MESSAGE clob null,
INITIATED_BY char(1 CHAR) default 'F' not null,
primary key (ID)
)
/
CREATE UNIQUE INDEX IX_SONET_RELATIONS_1 ON B_SONET_USER_RELATIONS(FIRST_USER_ID, SECOND_USER_ID)
/
CREATE INDEX IX_SONET_RELATIONS_2 ON B_SONET_USER_RELATIONS(FIRST_USER_ID, SECOND_USER_ID, RELATION)
/
CREATE SEQUENCE SQ_B_SONET_USER_RELATIONS INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
create table B_SONET_MESSAGES
(
ID int not null,
FROM_USER_ID int not null,
TO_USER_ID int not null,
TITLE varchar2(250 CHAR) null,
MESSAGE clob null,
DATE_CREATE date not null,
DATE_VIEW date null,
MESSAGE_TYPE char(1 CHAR) default 'P' not null,
FROM_DELETED char(1 CHAR) default 'N' not null,
TO_DELETED char(1 CHAR) default 'N' not null,
SEND_MAIL char(1 CHAR) default 'N' not null,
EMAIL_TEMPLATE varchar2(250 CHAR) null,
IS_LOG char(1 CHAR) null,
primary key (ID)
)
/
CREATE INDEX IX_SONET_MESSAGES_1 ON B_SONET_MESSAGES(FROM_USER_ID)
/
CREATE INDEX IX_SONET_MESSAGES_2 ON B_SONET_MESSAGES(TO_USER_ID)
/
CREATE SEQUENCE SQ_B_SONET_MESSAGES INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
create table B_SONET_SMILE
(
ID int not null,
SMILE_TYPE char(1 CHAR) default 'S' not null,
TYPING varchar2(100 CHAR) null,
IMAGE varchar2(128 CHAR) not null,
DESCRIPTION varchar2(50 CHAR),
CLICKABLE char(1 CHAR) default 'Y' not null,
SORT int default '150' not null,
IMAGE_WIDTH int default '0' not null,
IMAGE_HEIGHT int default '0' not null,
primary key (ID)
)
/
CREATE SEQUENCE SQ_B_SONET_SMILE INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
CREATE OR REPLACE TRIGGER B_SONET_SMILE_INSERT
BEFORE INSERT
ON B_SONET_SMILE
FOR EACH ROW
BEGIN
IF :NEW.ID IS NULL THEN
SELECT SQ_B_SONET_SMILE.NEXTVAL INTO :NEW.ID FROM dual;
END IF;
END;
/
create table B_SONET_SMILE_LANG
(
ID int not null,
SMILE_ID int default '0' not null,
LID char(2 CHAR) not null,
NAME varchar2(255 CHAR) not null,
primary key (ID)
)
/
CREATE UNIQUE INDEX IX_SONET_SMILE_K ON B_SONET_SMILE_LANG(SMILE_ID, LID)
/
CREATE SEQUENCE SQ_B_SONET_SMILE_LANG INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
CREATE OR REPLACE TRIGGER B_SONET_SMILE_LANG_INSERT
BEFORE INSERT
ON B_SONET_SMILE_LANG
FOR EACH ROW
BEGIN
IF :NEW.ID IS NULL THEN
SELECT SQ_B_SONET_SMILE_LANG.NEXTVAL INTO :NEW.ID FROM dual;
END IF;
END;
/
create table B_SONET_USER_PERMS
(
ID int not null,
USER_ID int not null,
OPERATION_ID varchar2(50 CHAR) not null,
RELATION_TYPE char(1 CHAR) not null,
primary key (ID)
)
/
CREATE INDEX IX_SONET_USER_PERMS_1 ON B_SONET_USER_PERMS(USER_ID)
/
CREATE UNIQUE INDEX IX_SONET_USER_PERMS_2 ON B_SONET_USER_PERMS(USER_ID, OPERATION_ID)
/
CREATE SEQUENCE SQ_B_SONET_USER_PERMS INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
create table B_SONET_USER_EVENTS
(
ID int not null,
USER_ID int not null,
EVENT_ID varchar2(50 CHAR) not null,
ACTIVE char(1 CHAR) default 'Y' not null,
SITE_ID char(2 CHAR) not null,
primary key (ID)
)
/
CREATE INDEX IX_SONET_USER_EVENTS_1 ON B_SONET_USER_EVENTS(USER_ID)
/
CREATE UNIQUE INDEX IX_SONET_USER_EVENTS_2 ON B_SONET_USER_EVENTS(USER_ID, EVENT_ID)
/
CREATE SEQUENCE SQ_B_SONET_USER_EVENTS INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
create table B_SONET_LOG
(
ID int not null,
ENTITY_TYPE varchar2(50 CHAR) default 'G' not null,
ENTITY_ID int not null,
EVENT_ID varchar2(50 CHAR) not null,
USER_ID int null,
LOG_DATE date not null,
TITLE_TEMPLATE varchar2(250 CHAR) null,
TITLE varchar2(250 CHAR) not null,
SITE_ID char(2 char) null,
MESSAGE clob null,
TEXT_MESSAGE clob null,
URL varchar2(500 CHAR) null,
MODULE_ID varchar2(50 CHAR) null,
CALLBACK_FUNC varchar2(250 CHAR) null,
EXTERNAL_ID varchar2(250 CHAR) null,
PARAMS clob null,
TMP_ID int null,
SOURCE_ID int null,
LOG_UPDATE date default sysdate not null,
COMMENTS_COUNT int null,
ENABLE_COMMENTS char(1 char) default 'Y',
RATING_TYPE_ID VARCHAR2(50 CHAR) null,
RATING_ENTITY_ID number(11) null,
SOURCE_TYPE VARCHAR2(50 CHAR) null,
primary key (ID)
)
/
CREATE INDEX IX_SONET_LOG_1 ON B_SONET_LOG(ENTITY_TYPE, ENTITY_ID, LOG_DATE)
/
CREATE INDEX IX_SONET_LOG_2 ON B_SONET_LOG(USER_ID, LOG_DATE, EVENT_ID)
/
CREATE INDEX IX_SONET_LOG_3 ON B_SONET_LOG(SOURCE_ID)
/
CREATE SEQUENCE SQ_B_SONET_LOG INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
CREATE TABLE B_SONET_LOG_SITE
(
LOG_ID NUMBER(18) NOT NULL,
SITE_ID CHAR(2 CHAR) NOT NULL,
CONSTRAINT PK_B_SONET_LOG_SITE PRIMARY KEY (LOG_ID, SITE_ID)
)
/
CREATE TABLE B_SONET_LOG_COMMENT
(
ID int not null,
LOG_ID int not null,
ENTITY_TYPE varchar2(50 CHAR) default 'G' not null,
ENTITY_ID int not null,
EVENT_ID varchar2(50 char) not null,
USER_ID int null,
LOG_DATE date not null,
MESSAGE clob null,
TEXT_MESSAGE clob null,
MODULE_ID varchar2(50 char) null,
SOURCE_ID int null,
URL varchar2(500 char) null,
RATING_TYPE_ID VARCHAR2(50 CHAR) null,
RATING_ENTITY_ID number(11) null,
primary key (ID)
)
/
CREATE INDEX IX_SONET_LOG_COMMENT_1 ON B_SONET_LOG_COMMENT(ENTITY_TYPE, ENTITY_ID, EVENT_ID)
/
CREATE INDEX IX_SONET_LOG_COMMENT_2 ON B_SONET_LOG_COMMENT(USER_ID, LOG_DATE, EVENT_ID)
/
CREATE SEQUENCE SQ_B_SONET_LOG_COMMENT INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
create table B_SONET_LOG_EVENTS
(
ID int not null,
USER_ID int not null,
ENTITY_TYPE varchar2(50 CHAR) default 'G' not null,
ENTITY_ID int not null,
ENTITY_CB char(1 CHAR) default 'N' not null,
ENTITY_MY char(1 CHAR) default 'N' not null,
EVENT_ID varchar2(50 CHAR) not null,
SITE_ID char(2 CHAR) null,
MAIL_EVENT char(1 CHAR) default 'N' not null,
TRANSPORT char(1 CHAR) default 'N' not null,
VISIBLE char(1 CHAR) default 'Y' not null,
primary key (ID)
)
/
CREATE INDEX IX_SONET_LOG_EVENTS_1 ON B_SONET_LOG_EVENTS(USER_ID)
/
CREATE INDEX IX_SONET_LOG_EVENTS_2 ON B_SONET_LOG_EVENTS(ENTITY_TYPE, ENTITY_ID, EVENT_ID)
/
CREATE UNIQUE INDEX IX_SONET_LOG_EVENTS_3 ON B_SONET_LOG_EVENTS(USER_ID, ENTITY_TYPE, ENTITY_ID, ENTITY_CB, ENTITY_MY, EVENT_ID, SITE_ID)
/
CREATE INDEX IX_SONET_LOG_EVENTS_4 ON B_SONET_LOG_EVENTS(USER_ID, ENTITY_CB, ENTITY_ID)
/
CREATE INDEX IX_SONET_LOG_EVENTS_5 ON B_SONET_LOG_EVENTS(USER_ID, ENTITY_MY, ENTITY_TYPE, ENTITY_ID)
/
CREATE SEQUENCE SQ_B_SONET_LOG_EVENTS INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
CREATE TABLE b_sonet_event_user_view
(
ENTITY_TYPE varchar2(50 CHAR) DEFAULT ('G') NOT NULL,
ENTITY_ID NUMBER(18) NOT NULL,
EVENT_ID VARCHAR2(50 CHAR) NOT NULL,
USER_ID NUMBER(18) DEFAULT ('0') NOT NULL,
USER_IM_ID NUMBER(18) DEFAULT ('0') NOT NULL,
USER_ANONYMOUS char(1 CHAR) DEFAULT ('N') NOT NULL,
constraint pk_b_sonet_event_user_view PRIMARY KEY (ENTITY_TYPE,ENTITY_ID,EVENT_ID,USER_ID,USER_IM_ID)
)
/
CREATE INDEX IX_SONET_EVENT_USER_VIEW_1 ON B_SONET_EVENT_USER_VIEW(USER_ID, EVENT_ID, ENTITY_TYPE, USER_ANONYMOUS)
/
CREATE TABLE B_SONET_LOG_RIGHT
(
ID int not null,
LOG_ID int not null,
GROUP_CODE varchar2(50 CHAR) not null,
primary key (ID)
)
/
CREATE INDEX IX_SONET_LOG_RIGHT_GROUP_CODE ON B_SONET_LOG_RIGHT(LOG_ID, GROUP_CODE)
/
CREATE SEQUENCE SQ_B_SONET_LOG_RIGHT INCREMENT BY 1 NOMAXVALUE NOCYCLE NOCACHE NOORDER
/
CREATE TABLE B_SONET_LOG_COUNTER
(
USER_ID int not null,
SITE_ID char(2 CHAR) default ('**') not null,
CODE varchar2(50 CHAR) default ('**') not null,
CNT int default ('0') not null,
LAST_DATE date,
PAGE_SIZE int null,
PAGE_LAST_DATE_1 date null,
CONSTRAINT PK_B_SONET_LOG_COUNTER PRIMARY KEY (USER_ID, SITE_ID, CODE)
)
/
CREATE TABLE b_sonet_log_page
(
USER_ID NUMBER(18) NOT NULL,
SITE_ID char(2 CHAR) default ('**') NOT NULL,
PAGE_SIZE NUMBER(18) NOT NULL,
PAGE_NUM NUMBER(18) default (1) NOT NULL,
PAGE_LAST_DATE date,
CONSTRAINT pk_b_sonet_log_page PRIMARY KEY (USER_ID, SITE_ID, PAGE_SIZE, PAGE_NUM)
)
/
CREATE TABLE b_sonet_log_follow
(
USER_ID NUMBER(18) NOT NULL,
CODE varchar2(50 CHAR) default ('**') not null,
TYPE CHAR(1 CHAR) default ('Y') not null,
FOLLOW_DATE date,
CONSTRAINT pk_b_sonet_log_follow PRIMARY KEY (USER_ID, CODE)
)
/
CREATE TABLE b_sonet_log_smartfilter
(
USER_ID NUMBER(18) NOT NULL,
TYPE CHAR(1 CHAR) default ('N') not null,
CONSTRAINT pk_b_sonet_log_smartfilter PRIMARY KEY (USER_ID)
)
/
CREATE TABLE B_SONET_LOG_FAVORITES
(
USER_ID int not null,
LOG_ID int not null,
CONSTRAINT PK_B_SONET_LOG_FAVORITES PRIMARY KEY (USER_ID, LOG_ID)
)
/
|
<reponame>mkovaltest/softwareTesting.JavaLearning<gh_stars>0
package ru.stqa.pft.addressbook.tests;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import ru.stqa.pft.addressbook.model.*;
import java.io.File;
import java.util.Collection;
import java.util.HashSet;
import static org.hamcrest.CoreMatchers.*;
import static org.hamcrest.MatcherAssert.*;
public class ContactAddInGroupTests extends TestBase{
@BeforeMethod
public void ensurePreconditions() {
if (app.db().groups().size() == 0) {
app.goTo().groupPage();
app.group().create(new GroupData().withName("test1"));
}
Groups groups = app.db().groups();
if (app.db().contacts().size() == 0) {
app.goTo().gotoHomePage();
app.contact().createContact(new ContactData()
.withFirstname("Michael").withLastname("Koval").withNickname("Hors68")
.withTitle("tester").withCompany("Cinimex").withAddress("Voronej")
.withMobilephone("123").withWorkphone("456").withHomephone("789").withSecondphone("777")
.withEmail("<EMAIL>").withEmail2("<EMAIL>").withEmail3("<EMAIL>")
.withPhoto(new File(app.getProperties().getProperty("addressbook.photo")))
.inGroup(groups.iterator().next()));
}
}
@Test
public void testAddContactInGroup() {
app.goTo().gotoHomePage();
ContactData addedContact = selectContact();
Groups before = addedContact.getGroups();
GroupData addedGroup = selectGroup(addedContact);
app.contact().addGroupToContact(addedContact, addedGroup);
Groups after = app.db().getContact(addedContact.getId()).getGroups();
System.out.println("before " + before);
System.out.println("after " + after);
assertThat(after, equalTo(before.withAdded(addedGroup)));
}
private ContactData selectContact() {
Groups groups = app.db().groups();
Contacts contacts = app.db().contacts();
for (ContactData contact: contacts) {
if (contact.getGroups().size() < groups.size()) {
return contact;
}
}
app.goTo().groupPage();
app.group().create(new GroupData().withName("test" + Math.random()));
app.goTo().gotoHomePage();
return contacts.iterator().next();
}
private GroupData selectGroup(ContactData contact) {
Groups groups = app.db().groups();
Collection<GroupData> groupsWithoutContacts = new HashSet(groups);
groupsWithoutContacts.removeAll(contact.getGroups());
return groupsWithoutContacts.iterator().next();
}
}
|
#!/usr/bin/env bash
declare SCRIPT_DIR
SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )
source ${SCRIPT_DIR}/common.sh
declare ALL_PIPELINES=($(get_versioned_pipelines))
function pipeline_to_args() {
local -r pipeline=${1}
local -r env=${2}
local -r test=${3}
local -r truth=${4}
local -r uncached=${5}
local -r common_args="--env ${env} -t ${test} -b ${truth} ${uncached}"
case ${pipeline} in
AnnotationFiltration)
echo AnnotationFiltration -t ${test} --env ${env};;
Arrays)
echo Arrays -a Single ${common_args};;
MultiSampleArrays)
echo Arrays -a Multi ${common_args};;
ExomeGermlineSingleSample)
echo GermlineSingleSample -d Exome ${common_args};;
ExomeReprocessing)
if [[ "${test}" == "Scientific" ]]; then
echo Reprocessing -d Exome --env ${env} -t Plumbing -b ${truth} ${uncached}
else
continue
fi;;
JointGenotyping)
echo JointGenotyping -d Exome ${common_args} --papi-version PAPIv2;
echo JointGenotyping -d WGS --env ${env} -t Plumbing -b ${truth} ${uncached} --papi-version PAPIv2;;
IlluminaGenotypingArray)
echo IlluminaGenotypingArray ${common_args};;
Imputation)
echo Imputation ${common_args};;
ExternalExomeReprocessing)
if [[ "${test}" == "Scientific" ]]; then
echo ExternalReprocessing -d Exome --env ${env} -t Plumbing -b ${truth} ${uncached}
else
continue
fi;;
ExternalWholeGenomeReprocessing)
if [[ "${test}" == "Scientific" ]]; then
echo ExternalReprocessing -d WGS --env ${env} -t Plumbing -b ${truth} ${uncached}
else
continue
fi;;
WholeGenomeGermlineSingleSample)
echo GermlineSingleSample -d WGS ${common_args};;
WholeGenomeReprocessing)
if [[ "${test}" == "Scientific" ]]; then
echo Reprocessing -d WGS --env ${env} -t Plumbing -b ${truth} ${uncached}
else
continue
fi;;
ValidateChip)
echo ValidateChip ${common_args};;
ReblockGVCF)
echo ReblockGvcf -d Exome ${common_args};
echo ReblockGvcf -d WGS ${common_args};;
RNAWithUMIsPipeline)
echo RNAWithUmis ${common_args};;
TargetedSomaticSingleSample)
echo SomaticSingleSample -d Targeted ${common_args};;
CramToUnmappedBams)
echo CramToUnmappedBams ${common_args};;
JointGenotypingByChromosomePartOne)
continue;;
JointGenotypingByChromosomePartTwo)
continue;;
GDCWholeGenomeSomaticSingleSample)
echo GDCWholeGenomeSomaticSingleSample -d WGS ${common_args};;
VariantCalling)
echo VariantCalling -d Exome -t Plumbing --env ${env} -b ${truth} ${uncached};
echo VariantCalling -d WGS -t Plumbing --env ${env} -b ${truth} ${uncached};;
esac
}
function main() {
local -r gittish=${1}
local -r test_all=${2}
local -r env=${3}
local -r test=${4}
local -r truth=${5}
local -r uncached=${6}
local -a changed_pipeline_paths=()
local -a args=()
if ${test_all}; then
changed_pipeline_paths=(${ALL_PIPELINES[@]})
else
changed_pipeline_paths=("$(get_pipelines_to_test ${gittish})")
fi
for changed_pipeline_path in ${changed_pipeline_paths[*]}; do
pipeline=$(basename ${changed_pipeline_path} .wdl)
arg="$(pipeline_to_args ${pipeline} ${env} ${test} ${truth} ${uncached})"
if [[ -n ${arg} ]]; then
args+=("${arg}")
fi
done
for arg in "${args[@]}"; do
echo "${arg}"
done
}
main ${1} ${2} ${3} ${4} ${5} ${6}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2673-1
#
# Security announcement date: 2015-07-20 00:00:00 UTC
# Script generation date: 2017-01-27 21:05:58 UTC
#
# Operating System: Ubuntu 12.04 LTS
# Architecture: i386
#
# Vulnerable packages fix on version:
# - thunderbird:1:31.8.0+build1-0ubuntu0.12.04.1
#
# Last versions recommanded by security team:
# - thunderbird:1:45.7.0+build1-0ubuntu0.12.04.1
#
# CVE List:
# - CVE-2015-2721
# - CVE-2015-2724
# - CVE-2015-2734
# - CVE-2015-2735
# - CVE-2015-2736
# - CVE-2015-2737
# - CVE-2015-2738
# - CVE-2015-2739
# - CVE-2015-2740
# - CVE-2015-4000
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade thunderbird=1:45.7.0+build1-0ubuntu0.12.04.1 -y
|
<gh_stars>0
package org.gbif.pipelines.ingest.java.transforms;
import java.util.List;
import java.util.Map;
import org.gbif.api.model.registry.MachineTag;
import org.gbif.pipelines.io.avro.ExtendedRecord;
import org.gbif.pipelines.parsers.ws.client.metadata.MetadataServiceClient;
import org.gbif.pipelines.transforms.SerializableSupplier;
import lombok.Builder;
/**
* Java transformations to use verbatim default term values defined as MachineTags in an
* MetadataRecord. transforms form {@link ExtendedRecord} to {@link ExtendedRecord}.
*/
public class DefaultValuesTransform {
private final org.gbif.pipelines.transforms.metadata.DefaultValuesTransform transform;
@Builder(buildMethodName = "create")
private DefaultValuesTransform(
String datasetId, SerializableSupplier<MetadataServiceClient> clientSupplier) {
this.transform =
org.gbif.pipelines.transforms.metadata.DefaultValuesTransform.builder()
.clientSupplier(clientSupplier)
.datasetId(datasetId)
.create();
}
public void setup() {
transform.setup();
}
public void tearDown() {
transform.tearDown();
}
public void replaceDefaultValues(Map<String, ExtendedRecord> source) {
List<MachineTag> tags = transform.getMachineTags();
if (!tags.isEmpty()) {
source.forEach((key, value) -> source.put(key, transform.replaceDefaultValues(value, tags)));
}
}
}
|
<reponame>wolfchinaliu/gameCenter
package weixin.guanjia.account.controller;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.jeecgframework.web.system.manager.ClientManager;
import org.jeecgframework.web.system.pojo.base.TSDepart;
import org.jeecgframework.web.system.pojo.base.TSFunction;
import org.jeecgframework.web.system.pojo.base.TSRole;
import org.jeecgframework.web.system.pojo.base.TSRoleFunction;
import org.jeecgframework.web.system.pojo.base.TSUser;
import org.jeecgframework.web.system.service.SystemService;
import org.jeecgframework.web.system.service.UserService;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.Restrictions;
import org.jeecgframework.core.common.hibernate.qbc.CriteriaQuery;
import org.jeecgframework.core.common.model.common.UploadFile;
import org.jeecgframework.core.common.model.json.AjaxJson;
import org.jeecgframework.core.common.model.json.ComboBox;
import org.jeecgframework.core.common.model.json.DataGrid;
import org.jeecgframework.core.common.model.json.ValidForm;
import org.jeecgframework.core.constant.Globals;
import org.jeecgframework.core.util.ListtoMenu;
import org.jeecgframework.core.util.PasswordUtil;
import org.jeecgframework.core.util.ResourceUtil;
import org.jeecgframework.core.util.RoletoJson;
import org.jeecgframework.core.util.SetListSort;
import org.jeecgframework.core.util.StringUtil;
import org.jeecgframework.core.util.oConvertUtils;
import org.jeecgframework.tag.core.easyui.TagUtil;
import org.jeecgframework.tag.vo.datatable.DataTableReturn;
import org.jeecgframework.tag.vo.datatable.DataTables;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
import weixin.guanjia.account.entity.WeixinAccountEntity;
/**
* @ClassName: UserController
* @Description: TODO(商户用户分配)
* @author
*/
@Scope("prototype")
@Controller
@RequestMapping("/weixinUserController")
public class WeixinUserController {
/**
* Logger for this class
*/
@SuppressWarnings("unused")
private static final Logger logger = Logger.getLogger(WeixinUserController.class);
private UserService userService;
private SystemService systemService;
private String message = null;
@Autowired
public void setSystemService(SystemService systemService) {
this.systemService = systemService;
}
@Autowired
public void setUserService(UserService userService) {
this.userService = userService;
}
/**
* 用户列表页面跳转
*
* @return
*/
@RequestMapping(params = "user")
public String user(HttpServletRequest request) {
//普通员工账号没有权限
TSUser users = userService.get(TSUser.class, ResourceUtil.getSessionUserName().getId());
if("0".equals(users.getType())){
return "common/403";
}
// 给部门查询条件中的下拉框准备数据
List<TSDepart> departList = systemService.getList(TSDepart.class);
if(null != departList)
request.setAttribute("departsReplace", RoletoJson.listToReplaceStr(departList, "departname", "id"));
return "weixin/account/userList";
}
/**
* easyuiAJAX用户列表请求数据
* @param request
* @param response
* @param dataGrid
*/
@RequestMapping(params = "datagrid")
public void datagrid(TSUser user,HttpServletRequest request, HttpServletResponse response, DataGrid dataGrid) {
CriteriaQuery cq = new CriteriaQuery(TSUser.class, dataGrid);
//查询条件组装器
org.jeecgframework.core.extend.hqlsearch.HqlGenerateUtil.installHql(cq, user);
Short[] userstate = new Short[] { Globals.User_Normal, Globals.User_ADMIN ,Globals.User_Forbidden};
cq.in("status", userstate);
cq.eq("type", "0");
cq.eq("accountid", ResourceUtil.getSessionUserName().getAccountid());//根据所属顶级公众号隔离
cq.add();
this.systemService.getDataGridReturn(cq, true);
TagUtil.datagrid(response, dataGrid);
}
/**
* 用户信息
*
* @return
*/
@RequestMapping(params = "userinfo")
public String userinfo(HttpServletRequest request) {
TSUser user = ResourceUtil.getSessionUserName();
request.setAttribute("user", user);
return "weixin/account/userinfo";
}
/**
* 修改密码
*
* @return
*/
@RequestMapping(params = "changepassword")
public String changepassword(HttpServletRequest request) {
TSUser user = ResourceUtil.getSessionUserName();
request.setAttribute("user", user);
return "weixin/account/changepassword";
}
/**
* 修改密码
*
* @return
*/
@RequestMapping(params = "savenewpwd")
@ResponseBody
public AjaxJson savenewpwd(HttpServletRequest request) {
AjaxJson j = new AjaxJson();
TSUser user = ResourceUtil.getSessionUserName();
String password = oConvertUtils.getString(request.getParameter("password"));
String newpassword = oConvertUtils.getString(request.getParameter("newpassword"));
String pString = PasswordUtil.encrypt(user.getUserName(), password, PasswordUtil.getStaticSalt());
if (!pString.equals(user.getPassword())) {
j.setMsg("原密码不正确");
j.setSuccess(false);
} else {
try {
user.setPassword(PasswordUtil.encrypt(user.getUserName(), newpassword, PasswordUtil.getStaticSalt()));
} catch (Exception e) {
e.printStackTrace();
}
systemService.updateEntitie(user);
j.setMsg("修改成功");
}
return j;
}
/**
* 锁定禁用账户
*
* @author Chj
*/
@RequestMapping(params = "lock")
@ResponseBody
public AjaxJson lock(String id, HttpServletRequest req) {
AjaxJson j = new AjaxJson();
TSUser user = systemService.getEntity(TSUser.class, id);
if("admin".equals(user.getUserName())){
message = "超级管理员[admin]不可锁定";
j.setMsg(message);
return j;
}
if(user.getStatus()!=Globals.User_Forbidden){
user.setStatus(Globals.User_Forbidden);
userService.updateEntitie(user);
message = "用户:" + user.getUserName() + "锁定成功";
} else {
message = "锁定账户失败";
}
j.setMsg(message);
return j;
}
/**
* 启用账户
*
* @author Chj
*/
@RequestMapping(params = "deblocking")
@ResponseBody
public AjaxJson deblocking(String id, HttpServletRequest req) {
AjaxJson j = new AjaxJson();
TSUser user = systemService.getEntity(TSUser.class, id);
if("admin".equals(user.getUserName())){
message = "超级管理员[admin]不可操作";
j.setMsg(message);
return j;
}
if(user.getStatus() == Globals.User_Forbidden){
user.setStatus(Globals.User_Normal);
userService.updateEntitie(user);
message = "用户:" + user.getUserName() + "启用成功";
} else {
message = "启用账户失败";
}
j.setMsg(message);
return j;
}
/**
*
* 修改用户密码
* @author Chj
*/
@RequestMapping(params = "changepasswordforuser")
public ModelAndView changepasswordforuser(TSUser user, HttpServletRequest req) {
if (StringUtil.isNotEmpty(user.getId())) {
user = systemService.getEntity(TSUser.class, user.getId());
req.setAttribute("user", user);
org.jeecgframework.core.util.LogUtil.info(user.getPassword()+"-----"+user.getRealName());
}
return new ModelAndView("weixin/account/adminchangepwd");
}
/**
* 保存重置密码
* @param req
* @return
*/
@RequestMapping(params = "savenewpwdforuser")
@ResponseBody
public AjaxJson savenewpwdforuser(HttpServletRequest req) {
AjaxJson j = new AjaxJson();
String id = oConvertUtils.getString(req.getParameter("id"));
String password = <PASSWORD>(req.getParameter("password"));
if (StringUtil.isNotEmpty(id)) {
TSUser users = systemService.getEntity(TSUser.class,id);
org.jeecgframework.core.util.LogUtil.info(users.getUserName());
users.setPassword(PasswordUtil.encrypt(users.getUserName(), password, PasswordUtil.getStaticSalt()));
users.setStatus(Globals.User_Normal);
users.setActivitiSync(users.getActivitiSync());
systemService.updateEntitie(users);
message = "用户: " + users.getUserName() + "密码重置成功";
}
j.setMsg(message);
return j;
}
/**
* 用户信息录入和更新
*
* @param user
* @param req
* @return
*/
@RequestMapping(params = "del")
@ResponseBody
public AjaxJson del(TSUser user, HttpServletRequest req) {
AjaxJson j = new AjaxJson();
if("admin".equals(user.getUserName())){
message = "超级管理员[admin]不可删除";
j.setMsg(message);
return j;
}
user = systemService.getEntity(TSUser.class, user.getId());
if (!user.getStatus().equals(Globals.User_ADMIN)) {
userService.delete(user);
message = "用户:" + user.getUserName() + "删除成功";
} else {
message = "超级管理员不可删除";
}
j.setMsg(message);
return j;
}
/**
* 检查用户名
*
* @param ids
* @return
*/
@RequestMapping(params = "checkUser")
@ResponseBody
public ValidForm checkUser(HttpServletRequest request) {
ValidForm v = new ValidForm();
String userName=oConvertUtils.getString(request.getParameter("param"));
String code=oConvertUtils.getString(request.getParameter("code"));
List<TSUser> roles=systemService.findByProperty(TSUser.class,"userName",userName);
if(roles.size()>0&&!code.equals(userName))
{
v.setInfo("用户名已存在");
v.setStatus("n");
}
return v;
}
/**
* 添加或修改员工账号
*
* @param request
* @param response
* @param dataGrid
* @param user
*/
@RequestMapping(params = "addorupdate")
public ModelAndView addorupdate(TSUser user, HttpServletRequest req) {
if (StringUtil.isNotEmpty(user.getId())) {
user = systemService.getEntity(TSUser.class, user.getId());
req.setAttribute("user", user);
}
return new ModelAndView("weixin/account/user");
}
/**
* 保存或修改员工账号
*
* @param user
* @param req
* @return
*/
@RequestMapping(params = "saveUser")
@ResponseBody
public AjaxJson saveUser(HttpServletRequest req, TSUser user) {
AjaxJson j = new AjaxJson();
String password = oConvertUtils.getString(req.getParameter("password"));
//修改
if (StringUtil.isNotEmpty(user.getId())) {
TSUser users = systemService.getEntity(TSUser.class, user.getId());
users.setEmail(user.getEmail());
users.setOfficePhone(user.getOfficePhone());
users.setMobilePhone(user.getMobilePhone());
users.setTSDepart(null);
users.setRealName(user.getRealName());
users.setStatus(Globals.User_Normal);
users.setActivitiSync(user.getActivitiSync());
systemService.updateEntitie(users);
message = "用户: " + users.getUserName() + "更新成功";
//systemService.addLog(message, Globals.Log_Type_UPDATE, Globals.Log_Leavel_INFO);
} else {
TSUser users = systemService.findUniqueByProperty(TSUser.class, "userName",user.getUserName());
if (users != null) {
message = "用户: " + users.getUserName() + "已经存在";
} else {
//新增用户
user.setPassword(PasswordUtil.encrypt(user.getUserName(), password, PasswordUtil.getStaticSalt()));
//角色
TSUser u = userService.getEntity(TSUser.class, ResourceUtil.getSessionUserName().getId());
String roleid = oConvertUtils.getString(u.getTSRole().getId());
TSRole uRole = systemService.getEntity(TSRole.class, roleid);
user.setTSDepart(null);
user.setTSRole(uRole);//角色
user.setStatus(Globals.User_Normal);
user.setType("0");//用户类型 0:商家员工账号
user.setAccountid(ResourceUtil.getWeiXinAccountId());
user.setTenantId(ResourceUtil.getSessionUserName().getTenantId());
systemService.save(user);
message = "用户: " + user.getUserName() + "添加成功";
//systemService.addLog(message, Globals.Log_Type_INSERT, Globals.Log_Leavel_INFO);
}
}
j.setMsg(message);
return j;
}
/**
* 用户录入
*
* @param user
* @param req
* @return
*/
@RequestMapping(params = "savesign", method = RequestMethod.POST)
@ResponseBody
public AjaxJson savesign(HttpServletRequest req) {
UploadFile uploadFile = new UploadFile(req);
String id = uploadFile.get("id");
TSUser user = systemService.getEntity(TSUser.class, id);
uploadFile.setRealPath("signatureFile");
uploadFile.setCusPath("signature");
uploadFile.setByteField("signature");
uploadFile.setBasePath("resources");
uploadFile.setRename(false);
uploadFile.setObject(user);
AjaxJson j = new AjaxJson();
message = user.getUserName() + "设置签名成功";
systemService.uploadFile(uploadFile);
//systemService.addLog(message, Globals.Log_Type_INSERT, Globals.Log_Leavel_INFO);
j.setMsg(message);
return j;
}
}
|
/*
Jameleon - An automation testing tool..
Copyright (C) 2006 <NAME> (<EMAIL>)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package net.sf.jameleon.bean;
import net.sf.jameleon.exception.JameleonException;
import net.sf.jameleon.util.TextMatcher;
/**
* This class is used to be a generic regex text matcher helper to be used in
* plug-ins wanting the regex (and maybe later the xpath) functionality to match
* the given expression and set the matching text in the provided context variable.
*/
public class MatchingTextHelper{
protected TextMatcher matcher;
/**
* Creates a MatchingTextHelper with a TextMatcher that is used for the following.
* <ol>
* <li>get the current screen as text.</li>
* <li>Get the matching regular expression.</li>
* <li>Set the variable in the context.</li>
* </ol>
* You should find the last two are methods that are already implemented in the FunctionTag.
* @param matcher - The interface used to communicate with the plug-in.
*/
public MatchingTextHelper(TextMatcher matcher){
this.matcher = matcher;
}
/**
* Sets the matching text in the provided context variable.
* @param varName - The context variable name
* @param regex - The regular with the grouping representing the text to match
* @param regexGroup - The number of the group to match.
*/
public void setMatchingTextInContext(String varName, String regex, int regexGroup){
String screenText = matcher.getCurrentScreenAsText();
// Finding the matching pattern to extract the required data
String matched = matcher.getMatchingRegexText(screenText, regex, regexGroup);
if (matched != null) {
// Setting the matched pattern as a Context variable
matcher.setVariable(varName, matched);
}else{
throw new JameleonException("No match found for ["+regex+"]!");
}
}
}
|
package org.museautomation.ui.taskinput;
import javafx.scene.*;
import javafx.scene.control.*;
import javafx.scene.layout.*;
import javafx.scene.paint.*;
import org.museautomation.core.*;
import org.museautomation.core.task.*;
import org.museautomation.core.task.input.*;
import org.museautomation.ui.extend.glyphs.*;
import java.util.*;
/**
* @author <NAME> (see LICENSE.txt for license details)
*/
public class TaskInputValueEditorRow
{
public TaskInputValueEditorRow(MuseExecutionContext context)
{
_input_field = new ValueSourceInputField(context);
_input_field.getNode().setId(VALUE_FIELD_ID);
_input_field.addListener(this::setSatisfactionIcon);
_use_default.setId(USE_DEFAULT_ID);
_use_default.setGraphic(Glyphs.create("FA:ARROW_LEFT"));
_use_default.setTooltip(new Tooltip("Use default value"));
_use_default.setOnAction(event -> _input_field.useDefault());
}
public void addToGrid(GridPane grid, int row_index)
{
grid.add(_name, 0, row_index);
grid.add(_input_field.getNode(), 1, row_index);
GridPane.setHgrow(_input_field.getNode(), Priority.ALWAYS);
if (_input.getDefault() != null)
grid.add(_use_default, 2, row_index);
grid.add(_type, 3, row_index);
grid.add(_satisfaction_label, 4, row_index);
setSatisfactionIcon(!_input.isRequired());
}
private void setSatisfactionIcon(boolean satisifed)
{
if (satisifed)
{
if (!REQUIRED_SATISFIED_ICON_ID.equals(_satisfaction_label.getId()))
{
_satisfaction_label.setGraphic(_satisfied_icon);
_satisfaction_label.setId(REQUIRED_SATISFIED_ICON_ID);
for (SatisfactionListener listener : _listeners)
listener.satisfactionChanged(false, true);
}
}
else
{
if (!REQUIRED_NOT_SATISFIED_ICON_ID.equals(_satisfaction_label.getId()))
{
_satisfaction_label.setGraphic(_not_satisfied_icon);
_satisfaction_label.setId(REQUIRED_NOT_SATISFIED_ICON_ID);
for (SatisfactionListener listener : _listeners)
listener.satisfactionChanged(true, false);
}
}
}
public void setInput(TaskInput input)
{
_input = input;
_input_field.setTaskInput(input);
if (input.getDefault() == null)
_use_default.setDisable(true);
_name.setText(new UiFriendlyMetadata(input.metadata()).getString("label", input.getName()));
_type.setText(input.getType().getName());
}
public TaskInput getInput()
{
return _input;
}
public boolean isSatisfied()
{
return _input_field.isSatisfied();
}
public ResolvedTaskInput getResolvedInput()
{
return _input_field.getResolvedInput();
}
public void addSatisfactionChangeListener(SatisfactionListener listener)
{
_listeners.add(listener);
}
private TaskInput _input;
private final Label _name = new Label();
private TaskInputField _input_field;
private final Button _use_default = new Button();
private final Label _type = new Label();
private final Label _satisfaction_label = new Label();
private final Node _satisfied_icon = Glyphs.create("FA:CHECK_CIRCLE", Color.GREEN, 20);
private final Node _not_satisfied_icon = Glyphs.create("FA:EXCLAMATION_CIRCLE", Color.RED, 20);
private final List<SatisfactionListener> _listeners = new ArrayList<>();
public final static String VALUE_FIELD_ID = "omuit-tiver-value-field";
public final static String USE_DEFAULT_ID = "omuit-tiver-use-default";
public final static String REQUIRED_NOT_SATISFIED_ICON_ID = "omuit-tiver-not-satified-icon";
public final static String REQUIRED_SATISFIED_ICON_ID = "omuit-tiver-satified-icon";
public interface SatisfactionListener
{
void satisfactionChanged(boolean old_value, boolean new_value);
}
}
|
// Create the icon image for the navigation bar button
let icon = UIImage(named: "backIcon")
// Define the action selector for the button
let action = #selector(goback(sender:))
// Set the left bar button item with the icon, action, and target
self.navigationItem.leftBarButtonItem = UIBarButtonItem(image: icon, landscapeImagePhone: nil, style: .plain, target: self, action: action)
// Implement the goback function to navigate back
func goback(sender: AnyObject? = nil) {
self.navigationController?.popViewController(animated: true)
}
|
#!/bin/sh
sudo chown -R nginx:nginx /var/www/html
sudo systemctl start nginx
|
package malte0811.controlengineering.gui.panel;
import malte0811.controlengineering.blockentity.panels.PanelDesignerBlockEntity;
import malte0811.controlengineering.controlpanels.PlacedComponent;
import malte0811.controlengineering.gui.CEContainerMenu;
import malte0811.controlengineering.gui.misc.LambdaDataSlot;
import malte0811.controlengineering.network.SimplePacket;
import malte0811.controlengineering.network.panellayout.FullSync;
import malte0811.controlengineering.network.panellayout.PanelPacket;
import malte0811.controlengineering.network.panellayout.PanelSubPacket;
import net.minecraft.world.inventory.DataSlot;
import net.minecraft.world.inventory.MenuType;
import java.util.ArrayList;
import java.util.List;
public class PanelDesignMenu extends CEContainerMenu<PanelSubPacket> {
private final DataSlot requiredLength;
private final DataSlot availableLength;
private final List<PlacedComponent> components;
public PanelDesignMenu(MenuType<?> type, int id) {
super(type, id);
this.requiredLength = addDataSlot(DataSlot.standalone());
this.availableLength = addDataSlot(DataSlot.standalone());
this.components = new ArrayList<>();
}
public PanelDesignMenu(MenuType<?> type, int id, PanelDesignerBlockEntity designer) {
super(type, id, isValidFor(designer), designer::setChanged);
this.requiredLength = addDataSlot(LambdaDataSlot.serverSide(designer::getLengthRequired));
this.availableLength = addDataSlot(LambdaDataSlot.serverSide(() -> designer.getKeypunch().getAvailable()));
this.components = designer.getComponents();
}
@Override
protected PanelSubPacket getInitialSync() {
return new FullSync(components);
}
@Override
protected SimplePacket makePacket(PanelSubPacket panelSubPacket) {
return new PanelPacket(panelSubPacket);
}
public List<PlacedComponent> getComponents() {
return components;
}
public int getRequiredTapeLength() {
return requiredLength.get();
}
public int getAvailableTapeLength() {
return availableLength.get();
}
}
|
<gh_stars>0
import React, { useState,useEffect } from "react";
// @material-ui/core components
import { makeStyles } from "@material-ui/core/styles";
// @material-ui/icons
// core components
import GridContainer from "components/Grid/GridContainer.js";
import GridItem from "components/Grid/GridItem.js";
import CustomInput from "components/CustomInput/CustomInput.js";
import Button from "components/CustomButtons/Button.js";
import styles from "assets/jss/material-kit-react/views/landingPageSections/workStyle.js";
import Axios from 'axios';
import { TextareaAutosize, TextField } from "@material-ui/core";
import Form from 'react-bootstrap/Form';
const useStyles = makeStyles(styles);
export default function FormSection() {
const classes = useStyles();
const [pesanList,setPesanList] = useState([]);
useEffect(() => {
Axios.get('http://localhost:3001/getpesan')
.then((response)=> {
setPesanList(response.data);
});
}, []);
const [nama, setNama] = useState('');
const [alamat_email, setAlamatEmail] = useState('');
const [pesan, setPesan] = useState('');
const submitPesan = () => {
Axios.post("http://localhost:3001/addpesan", {
nama: nama,
alamat_email: alamat_email,
pesan: pesan,
});
setPesanList([
...pesanList,
{ nama: nama, alamat_email: alamat_email, pesan: pesan },
]);
window.location.reload(false);
};
return (
<div className={classes.section}>
<GridContainer justify="center">
<GridItem cs={12} sm={12} md={8}>
<h2 className={classes.title}>Hubungi kami</h2>
<h4 className={classes.description}>
Anda bisa menghubungi kami melalui form di bawah ini.
</h4>
<form>
<GridContainer>
<GridItem xs={12} sm={12} md={6}>
<TextField label="Masukkan nama" id="nama" variant="outlined" style={{ width: '100%' }}
onChange={(e) => setNama(e.target.value)}/>
</GridItem>
<GridItem xs={12} sm={12} md={6}>
<TextField label="Masukkan email" id="alamat_email" variant="outlined" style={{ width: '100%' }}
onChange={(e) => setAlamatEmail(e.target.value)}
/>
</GridItem>
<GridItem xs={12} sm={12} md={12}>
<TextField label="Masukkan pesan" id="pesan" type="textarea" variant="outlined" style={{ width: '100%' }}
onChange={(e) => setPesan(e.target.value)}
/>
</GridItem>
<GridItem xs={12} sm={12} md={4}>
<Button color="primary" onClick={submitPesan}>Kirim Pesan</Button>
</GridItem>
</GridContainer>
</form>
</GridItem>
</GridContainer>
</div>
);
}
|
<reponame>wangjiegulu/MVPArchitecture<filename>library/src/main/java/com/wangjiegulu/mvparchitecture/library/presenter/Presenter.java
package com.wangjiegulu.mvparchitecture.library.presenter;
import com.wangjiegulu.mvparchitecture.library.contract.OnViewerDestroyListener;
import com.wangjiegulu.mvparchitecture.library.viewer.Viewer;
/**
* MVP的Presenter层,作为沟通 View 和 Model 的桥梁,它从 Model 层检索数据后,返回给 View 层,它也可以决定与 View 层的交互操作。
* 它包含一个 View 层的引用和一个或者多个 Model 层的引用
*
* Author: wangjie
* Email: <EMAIL>
* Date: 4/13/16.
*/
public interface Presenter extends OnViewerDestroyListener {
/**
* 提供对Viewer销毁状态的绑定
*
* 该方法还可以让子类重写用于做为Presenter的初始化方法,但是注意重写的时候必须要调用super.bind()!
*/
void bind(Viewer bindViewer);
/**
* 所有task操作需要在Viewer销毁时(通过上面的bind方法绑定)停止对UI的回调
*/
void closeAllTask();
}
|
dotfiles () {
cd $PROJECTS/dotfiles/
vim .
}
|
<reponame>mrken277/Cutternet<gh_stars>1-10
/***
* Copyright (C) <NAME>. All rights reserved.
* Licensed under the MIT license. See LICENSE file in the project root
* for full license information.
*
* =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
*
* For related information - https://github.com/CodeWithRodi/Cutternet/
*
* Cutternet Client Source Code
*
* =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
****/
import React from 'react';
import ReactDOM from 'react-dom';
import ReportWebVitals from './ReportWebVitals';
import Application from './Utils/Application';
import { BrowserRouter } from 'react-router-dom';
import { AuthProvider } from './Services/Auth/Context';
import { LinkProvider } from './Services/Link/Context';
import { LanguageProvider } from './Services/Language/Context';
ReactDOM.render(
<BrowserRouter>
<LanguageProvider>
<LinkProvider>
<AuthProvider>
<Application />
</AuthProvider>
</LinkProvider>
</LanguageProvider>
</BrowserRouter>,
document.getElementById('CutternetApp')
);
ReportWebVitals();
|
<reponame>trevorhein-matc/theTwistedLeafSite
import React from 'react'
import HeadingBase from '../HeadingBase'
const JumbotronHeading = props => (
<HeadingBase
fontSize={[1, 2, 3]}
{...props}
/>
)
export default JumbotronHeading
|
States = ('NNP', 'NN', 'VBD', 'TO', 'DT', 'NN')
Start Probability = {'NNP': 0.5, 'NN': 0.5}
Transition Probability = {
'NNP' : {'NN': 0.5, 'VBD': 0.5},
'NN' : {'TO': 0.5, 'DT': 0.5},
'VBD': {'TO': 1.0},
'TO' : {'DT': 1.0},
'DT' : {'NN': 1.0},
'NN' : {None: 1.0}
}
Emission Probability = {
'NNP': {'The': 1.0},
'NN': {'boy': 1.0, 'store': 1.0},
'VBD': {'went': 1.0},
'TO': {'to': 1.0},
'DT': {'the': 1.0}
}
|
class Inventory:
def __init__(self):
self.inventory = {}
def add_item(self, item_name, quantity, unit_price):
self.inventory[item_name] = {'quantity': quantity, 'unit_price': unit_price}
def remove_item(self, item_name):
if item_name in self.inventory:
del self.inventory[item_name]
def update_item(self, item_name, quantity, unit_price):
if item_name in self.inventory:
self.inventory[item_name]['quantity'] = quantity
self.inventory[item_name]['unit_price'] = unit_price
def display_inventory(self):
for item, details in self.inventory.items():
print(f"{item}: {details['quantity']} - {details['unit_price']}")
def generate_report(self):
total_value = sum(details['quantity'] * details['unit_price'] for details in self.inventory.values())
return total_value
# Test case
inventory = Inventory()
inventory.add_item("apple", 10, 1.5)
inventory.add_item("banana", 20, 0.75)
inventory.display_inventory()
print(inventory.generate_report())
inventory.update_item("apple", 15, 1.75)
inventory.remove_item("banana")
inventory.display_inventory()
print(inventory.generate_report())
|
<reponame>marcosbaima/eat-tasty
import { Router } from 'express';
import { celebrate, Segments, Joi } from 'celebrate';
import ProfileController from '../controllers/ProfileController';
import ensureAuthticated from '../middlewares/ensureAuthenticate';
const profileRouter = Router()
const profileController = new ProfileController()
profileRouter.use(ensureAuthticated)
profileRouter.put('/',celebrate({
[Segments.BODY]: Joi.object().keys({
name:Joi.string(),
email:Joi.string().email(),
//ownersId:Joi.string().uuid().allow(null),
groupsId:Joi.string().uuid(),
//branchsId:Joi.string().uuid().allow(null),
powerUser:Joi.string().allow(null),
old_password:Joi.string(),
password: Joi.when('<PASSWORD>', {
is: Joi.exist(),
then: Joi.required(),
}),
password_confirmation: Joi.when('password', {
is: Joi.exist(),
then: Joi.valid(Joi.ref('password')).required(),
}),
}),
}),profileController.update);
profileRouter.get('/',profileController.show);
export default profileRouter
|
<reponame>fylein/fyle-netsuite-app
/* tslint:disable */
import { CategoryMapping } from "./category-mapping.model";
export type CategoryMappingsResponse = {
count: number;
next: string;
previous: string;
results: CategoryMapping[];
};
|
#!/bin/bash
echo "================== Help for cqlsh ========================="
echo "DESCRIBE tables : Prints all tables in the current keyspace"
echo "DESCRIBE keyspaces : Prints all keyspaces in the current cluster"
echo "DESCRIBE <table_name> : Prints table detail information"
echo "help : for more cqlsh commands"
echo "help [cqlsh command] : Gives information about cqlsh commands"
echo "quit : quit"
echo "=================================================================="
docker exec -it cassandra cqlsh
|
package com.github.muirandy.docs.living.log.restful;
import com.github.muirandy.docs.living.api.Log;
import com.github.muirandy.docs.living.api.Logs;
import javax.ws.rs.*;
import javax.ws.rs.core.Response;
@Consumes("application/json")
@Produces("application/json")
public interface LoggerClient {
@POST
@Path("/log")
Response log(Log log);
@GET
@Path("/log/read")
Logs readAll();
@GET
@Path("/log/read/{marker}")
Logs read(@PathParam("marker") String marker);
@POST
@Path("/log/markEnd/{marker}")
Response markEnd(@PathParam("marker") String marker);
}
|
set -e
echo "Current version:" $(grep version package.json | sed -E 's/^.*"(4[^"]+)".*$/\1/')
echo "Enter beta version e.g., 2 will generate 4.0.0-beta.2: "
read BETA
VERSION="4.0.0-beta.$BETA"
read -p "Releasing v$VERSION - are you sure? (y/n)" -n 1 -r
echo # (optional) move to a new line
if [[ $REPLY =~ ^[Yy]$ ]]
then
echo "Releasing v$VERSION ..."
# clear existing ts cache
rm -rf node_modules/.rts2_cache
yarn run build
yarn run build:dts
yarn run test:dts
# generate the version so that the changelog can be generated too
yarn version --no-git-tag-version --no-commit-hooks --new-version $VERSION
# changelog
yarn run changelog
yarn prettier --write CHANGELOG.md
echo "Please check the git history and the changelog and press enter"
read OKAY
# commit and tag
git add CHANGELOG.md package.json
git commit -m "release: v$VERSION"
git tag "v$VERSION"
# commit
yarn publish --tag next --new-version "$VERSION" --no-commit-hooks --no-git-tag-version
# publish
git push origin refs/tags/v$VERSION
git push
fi
|
// Copyright 2020
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package toml
import (
"fmt"
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/astaxie/beego/core/config"
)
func TestConfig_Parse(t *testing.T) {
// file not found
cfg := &Config{}
_, err := cfg.Parse("invalid_file_name.txt")
assert.NotNil(t, err)
}
func TestConfig_ParseData(t *testing.T) {
data := `
name="Tom"
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
}
func TestConfigContainer_Bool(t *testing.T) {
data := `
Man=true
Woman="true"
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
val, err := c.Bool("Man")
assert.Nil(t, err)
assert.True(t, val)
_, err = c.Bool("Woman")
assert.NotNil(t, err)
assert.Equal(t, config.InvalidValueTypeError, err)
}
func TestConfigContainer_DefaultBool(t *testing.T) {
data := `
Man=true
Woman="false"
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
val := c.DefaultBool("Man11", true)
assert.True(t, val)
val = c.DefaultBool("Man", false)
assert.True(t, val)
val = c.DefaultBool("Woman", true)
assert.True(t, val)
}
func TestConfigContainer_DefaultFloat(t *testing.T) {
data := `
Price=12.3
PriceInvalid="12.3"
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
val := c.DefaultFloat("Price", 11.2)
assert.Equal(t, 12.3, val)
val = c.DefaultFloat("Price11", 11.2)
assert.Equal(t, 11.2, val)
val = c.DefaultFloat("PriceInvalid", 11.2)
assert.Equal(t, 11.2, val)
}
func TestConfigContainer_DefaultInt(t *testing.T) {
data := `
Age=12
AgeInvalid="13"
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
val := c.DefaultInt("Age", 11)
assert.Equal(t, 12, val)
val = c.DefaultInt("Price11", 11)
assert.Equal(t, 11, val)
val = c.DefaultInt("PriceInvalid", 11)
assert.Equal(t, 11, val)
}
func TestConfigContainer_DefaultString(t *testing.T) {
data := `
Name="Tom"
NameInvalid=13
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
val := c.DefaultString("Name", "Jerry")
assert.Equal(t, "Tom", val)
val = c.DefaultString("Name11", "Jerry")
assert.Equal(t, "Jerry", val)
val = c.DefaultString("NameInvalid", "Jerry")
assert.Equal(t, "Jerry", val)
}
func TestConfigContainer_DefaultStrings(t *testing.T) {
data := `
Name=["Tom", "Jerry"]
NameInvalid="Tom"
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
val := c.DefaultStrings("Name", []string{"Jerry"})
assert.Equal(t, []string{"Tom", "Jerry"}, val)
val = c.DefaultStrings("Name11", []string{"Jerry"})
assert.Equal(t, []string{"Jerry"}, val)
val = c.DefaultStrings("NameInvalid", []string{"Jerry"})
assert.Equal(t, []string{"Jerry"}, val)
}
func TestConfigContainer_DIY(t *testing.T) {
data := `
Name=["Tom", "Jerry"]
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
_, err = c.DIY("Name")
assert.Nil(t, err)
}
func TestConfigContainer_Float(t *testing.T) {
data := `
Price=12.3
PriceInvalid="12.3"
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
val, err := c.Float("Price")
assert.Nil(t, err)
assert.Equal(t, 12.3, val)
_, err = c.Float("Price11")
assert.Equal(t, config.KeyNotFoundError, err)
_, err = c.Float("PriceInvalid")
assert.Equal(t, config.InvalidValueTypeError, err)
}
func TestConfigContainer_Int(t *testing.T) {
data := `
Age=12
AgeInvalid="13"
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
val, err := c.Int("Age")
assert.Nil(t, err)
assert.Equal(t, 12, val)
_, err = c.Int("Age11")
assert.Equal(t, config.KeyNotFoundError, err)
_, err = c.Int("AgeInvalid")
assert.Equal(t, config.InvalidValueTypeError, err)
}
func TestConfigContainer_GetSection(t *testing.T) {
data := `
[servers]
# You can indent as you please. Tabs or spaces. TOML don't care.
[servers.alpha]
ip = "10.0.0.1"
dc = "eqdc10"
[servers.beta]
ip = "10.0.0.2"
dc = "eqdc10"
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
m, err := c.GetSection("servers")
assert.Nil(t, err)
assert.NotNil(t, m)
assert.Equal(t, 2, len(m))
}
func TestConfigContainer_String(t *testing.T) {
data := `
Name="Tom"
NameInvalid=13
[Person]
Name="Jerry"
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
val, err := c.String("Name")
assert.Nil(t, err)
assert.Equal(t, "Tom", val)
_, err = c.String("Name11")
assert.Equal(t, config.KeyNotFoundError, err)
_, err = c.String("NameInvalid")
assert.Equal(t, config.InvalidValueTypeError, err)
val, err = c.String("Person.Name")
assert.Nil(t, err)
assert.Equal(t, "Jerry", val)
}
func TestConfigContainer_Strings(t *testing.T) {
data := `
Name=["Tom", "Jerry"]
NameInvalid="Tom"
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
val, err := c.Strings("Name")
assert.Nil(t, err)
assert.Equal(t, []string{"Tom", "Jerry"}, val)
_, err = c.Strings("Name11")
assert.Equal(t, config.KeyNotFoundError, err)
_, err = c.Strings("NameInvalid")
assert.Equal(t, config.InvalidValueTypeError, err)
}
func TestConfigContainer_Set(t *testing.T) {
data := `
Name=["Tom", "Jerry"]
NameInvalid="Tom"
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
err = c.Set("Age", "11")
assert.Nil(t, err)
age, err := c.String("Age")
assert.Nil(t, err)
assert.Equal(t, "11", age)
}
func TestConfigContainer_SubAndMushall(t *testing.T) {
data := `
[servers]
# You can indent as you please. Tabs or spaces. TOML don't care.
[servers.alpha]
ip = "10.0.0.1"
dc = "eqdc10"
[servers.beta]
ip = "10.0.0.2"
dc = "eqdc10"
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
assert.Nil(t, err)
assert.NotNil(t, c)
sub, err := c.Sub("servers")
assert.Nil(t, err)
assert.NotNil(t, sub)
sub, err = sub.Sub("alpha")
assert.Nil(t, err)
assert.NotNil(t, sub)
ip, err := sub.String("ip")
assert.Nil(t, err)
assert.Equal(t, "10.0.0.1", ip)
svr := &Server{}
err = sub.Unmarshaler("", svr)
assert.Nil(t, err)
assert.Equal(t, "10.0.0.1", svr.Ip)
svr = &Server{}
err = c.Unmarshaler("servers.alpha", svr)
assert.Nil(t, err)
assert.Equal(t, "10.0.0.1", svr.Ip)
}
func TestConfigContainer_SaveConfigFile(t *testing.T) {
filename := "test_config.toml"
path := os.TempDir() + string(os.PathSeparator) + filename
data := `
[servers]
# You can indent as you please. Tabs or spaces. TOML don't care.
[servers.alpha]
ip = "10.0.0.1"
dc = "eqdc10"
[servers.beta]
ip = "10.0.0.2"
dc = "eqdc10"
`
cfg := &Config{}
c, err := cfg.ParseData([]byte(data))
fmt.Println(path)
assert.Nil(t, err)
assert.NotNil(t, c)
sub, err := c.Sub("servers")
assert.Nil(t, err)
err = sub.SaveConfigFile(path)
assert.Nil(t, err)
}
type Server struct {
Ip string `toml:"ip"`
}
|
# -*- coding: utf-8 -*-
# FeedCrawler
# Projekt von https://github.com/rix1337
# Enthält Code von:
# https://github.com/Gutz-Pilz/pyLoad-stuff/blob/master/SJ.py
from urllib.error import HTTPError
from urllib.parse import urlencode
from urllib.request import urlopen, Request
import simplejson as json
from feedcrawler import internal
from feedcrawler.config import CrawlerConfig
def api_request_cutter(message, n):
for i in range(0, len(message), n):
yield message[i:i + n]
def notify(items):
notifications = CrawlerConfig('Notifications', )
homeassistant_settings = notifications.get("homeassistant").split(',')
pushbullet_token = notifications.get("pushbullet")
telegram_settings = notifications.get("telegram").split(',')
pushover_settings = notifications.get("pushover").split(',')
if len(items) > 0:
cut_items = list(api_request_cutter(items, 5))
if len(notifications.get("homeassistant")) > 0:
for cut_item in cut_items:
homassistant_url = homeassistant_settings[0]
homeassistant_password = homeassistant_settings[1]
home_assistant(cut_item, homassistant_url,
homeassistant_password)
if len(notifications.get("pushbullet")) > 0:
pushbullet(items, pushbullet_token)
if len(notifications.get("telegram")) > 0:
for cut_item in cut_items:
telegram_token = telegram_settings[0]
telegram_chatid = telegram_settings[1]
telegram(cut_item, telegram_token, telegram_chatid)
if len(notifications.get('pushover')) > 0:
for cut_item in cut_items:
pushover_user = pushover_settings[0]
pushover_token = pushover_settings[1]
pushover(cut_item, pushover_user, pushover_token)
def home_assistant(items, homassistant_url, homeassistant_password):
data = urlencode({
'title': 'FeedCrawler:',
'body': "\n\n".join(items)
}).encode("utf-8")
try:
req = Request(homassistant_url, data)
req.add_header('X-HA-Access', homeassistant_password)
req.add_header('Content-Type', 'application/json')
response = urlopen(req)
except HTTPError:
internal.logger.debug('FEHLER - Konnte Home Assistant API nicht erreichen')
return False
res = json.load(response)
if res['sender_name']:
internal.logger.debug('Home Assistant Erfolgreich versendet')
else:
internal.logger.debug('FEHLER - Konnte nicht an Home Assistant Senden')
def telegram(items, token, chatid):
data = urlencode({
'chat_id': chatid,
'text': "\n\n".join(items)
}).encode("utf-8")
try:
req = Request("https://api.telegram.org/bot" + token + "/sendMessage", data)
response = urlopen(req)
except HTTPError:
internal.logger.debug('FEHLER - Konnte Telegram API nicht erreichen')
return False
res = json.load(response)
if res['ok']:
internal.logger.debug('Telegram Erfolgreich versendet')
else:
internal.logger.debug('FEHLER - Konnte nicht an Telegram Senden')
def pushbullet(items, token):
data = urlencode({
'type': 'note',
'title': 'FeedCrawler:',
'body': "\n\n".join(items)
}).encode("utf-8")
try:
req = Request('https://api.pushbullet.com/v2/pushes', data)
req.add_header('Access-Token', token)
response = urlopen(req)
except HTTPError:
internal.logger.debug('FEHLER - Konnte Pushbullet API nicht erreichen')
return False
res = json.load(response)
if res['sender_name']:
internal.logger.debug('Pushbullet Erfolgreich versendet')
else:
internal.logger.debug('FEHLER - Konnte nicht an Pushbullet Senden')
def pushover(items, pushover_user, pushover_token):
data = urlencode({
'user': pushover_user,
'token': pushover_token,
'title': 'FeedCrawler',
'message': "\n\n".join(items)
}).encode("utf-8")
try:
req = Request('https://api.pushover.net/1/messages.json', data)
response = urlopen(req)
except HTTPError:
internal.logger.debug('FEHLER - Konnte Pushover API nicht erreichen')
return False
res = json.load(response)
if res['status'] == 1:
internal.logger.debug('Pushover Erfolgreich versendet')
else:
internal.logger.debug('FEHLER - Konnte nicht an Pushover Senden')
|
#!/bin/bash
#
# script to build HermitCore's bootstrap compiler
#
# $1 = specifies the target architecture
# $2 = specifies the installation directory
BUILDDIR=build
CLONE_DEPTH="--depth=50"
PREFIX="$2"
TARGET=$1
NJOBS=-j"$(nproc)"
PATH=$PATH:$PREFIX/bin
ARCH_OPT="-mtune=native"
export CFLAGS_FOR_TARGET="-m64 -O3 -ftree-vectorize $ARCH_OPT"
export GOFLAGS_FOR_TARGET="-m64 -O3 -ftree-vectorize $ARCH_OPT"
export FCFLAGS_FOR_TARGET="-m64 -O3 -ftree-vectorize $ARCH_OPT"
export FFLAGS_FOR_TARGET="-m64 -O3 -ftree-vectorize $ARCH_OPT"
export CXXFLAGS_FOR_TARGET="-m64 -O3 -ftree-vectorize $ARCH_OPT"
echo "Build bootstrap toolchain for $TARGET with $NJOBS jobs for $PREFIX"
sleep 1
mkdir -p $BUILDDIR
cd $BUILDDIR
if [ ! -d "binutils" ]; then
git clone $CLONE_DEPTH https://github.com/hermitcore/binutils.git
fi
if [ ! -d "gcc" ]; then
git clone $CLONE_DEPTH https://github.com/hermitcore/gcc.git
wget ftp://gcc.gnu.org/pub/gcc/infrastructure/isl-0.15.tar.bz2 -O isl-0.15.tar.bz2
tar jxf isl-0.15.tar.bz2
mv isl-0.15 gcc/isl
fi
if [ ! -d "tmp/binutils" ]; then
mkdir -p tmp/binutils
cd tmp/binutils
../../binutils/configure --target=$TARGET --prefix=$PREFIX --with-sysroot --disable-multilib --disable-shared --disable-nls --disable-gdb --disable-libdecnumber --disable-readline --disable-sim --disable-libssp --enable-tls --enable-lto --enable-plugin && make $NJOBS CFLAGS=-Wno-format-overflow && make install
cd -
fi
if [ ! -d "tmp/bootstrap" ]; then
mkdir -p tmp/bootstrap
cd tmp/bootstrap
../../gcc/configure --target=$TARGET --prefix=$PREFIX --without-headers --disable-multilib --with-isl --enable-languages=c,c++,lto --disable-nls --disable-shared --disable-libssp --disable-libgomp --enable-threads=posix --enable-tls --enable-lto --disable-symvers && make $NJOBS all-gcc && make install-gcc
cd -
fi
|
import './Prosign.css';
import { Link } from 'react-router-dom';
function Prosign() {
return (
<div className ="fullscreen-signin">
<div className ="header-signin">
</div>
<div className ="content-signin">
<div className ="boxchatlogo-signin">
<img src="/Project/Prologin/boxchat.jpg" alt ="/Project/Prologin/boxchat.jpg"
width="600px" height="450px"/>
</div>
<div className ="user-signin">
<img src="/Project/Proregister/usernameText.jpg" alt="/Project/Proregister/usernameText.jpg"
width="100px" height="50px" />
</div>
<input type="USname-sign" id="UnameSign" name="usernameSign" placeholder="Your Username..."/>
<div className ="pass-signin">
<img src="/Project/Proregister/passwordText.jpg" alt="/Project/Proregister/passwordText.jpg"
width="100px" height="50px" />
</div>
<input type="PW-sign" id="PassSign" name="passeordSign" placeholder="Your Password..."/>
<Link to="/Profriend">
<div className ="logosignin-sign">
<img src="/Project/Prologin/sign.jpg" alt ="/Project/Prologin/sign.jpg"
width="350px" height="100px"/>
</div>
</Link>
</div>
<div className ="footer-signin">
</div>
</div>
);
}
export default Prosign;
|
#include <string>
// student class
class Student {
private:
std::string name;
float grades;
public:
// constructor
Student(std::string name, float grades):
name(name), grades(grades) {}
// getters
std::string getName() const {
return name;
}
float getGrades() const {
return grades;
}
};
|
$(document).ready(function () {
var grid_selector = "#jqGrid";
var pager_selector = "#jqGridPager";
//resize to fit page size
$(window).on('resize.jqGrid', function () {
$(grid_selector).jqGrid( 'setGridWidth', $(".page-content").width() );
})
//resize on sidebar collapse/expand
var parent_column = $(grid_selector).closest('[class*="col-"]');
$(document).on('settings.ace.jqGrid' , function(ev, event_name, collapsed) {
if( event_name === 'sidebar_collapsed' || event_name === 'main_container_fixed' ) {
//setTimeout is for webkit only to give time for DOM changes and then redraw!!!
setTimeout(function() {
$(grid_selector).jqGrid( 'setGridWidth', parent_column.width() );
}, 0);
}
})
$("#jqGrid").jqGrid({
url: 'system/users/list',
mtype: "GET",
datatype: "json",
//caption: "角色列表",
colNames: ['操作', '角色名称', '角色标识', '备注'],
colModel: [
{ name: 'ID', width: 50, formatter:editFormatter, key:true, sortable:false},
{ name: 'NAME', width: 75 },
{ name: 'IDENT', width: 50 },
{ name: 'REMARK', width: 150 }
],
page: 1, //当前页 page=1
height:325, //高度
shrinkToFit: true,
viewrecords:true, //显示总记录数
rowNum: 10, //每页显示记录数 rows=10
autowidth: true, //自动匹配宽度
multiselect: true, //可多选,为true则出现复选框
multiselectWidth: 25, //设置多选列宽度
sortable:true, //可以排序:参数将传递给后台
sortname: 'NAME', //排序字段名 sidx=NAME
sortorder: "desc", //排序方式 sord=desc
pager: "#jqGridPager",
altRows: true,
rownumbers: true,
loadComplete : function() {
var table = this;
setTimeout(function(){
styleCheckbox(table);
updateActionIcons(table);
updatePagerIcons(table);
enableTooltips(table);
}, 0);
},
});
$(window).triggerHandler('resize.jqGrid');
});
function editFormatter(cellvalue, options, rowObject){
//FIXME
return '<a href="#" data-toggle="modal" data-target=".modal" onClick="edit(\'' + cellvalue + '\')">' + '编辑' + '</a>';
}
function loadItem(id){
ajaxGet("system/roles/get/" + id, function(json){
console.log(json);
var item = json.data;
$("input[name='role.ROLE_ID']").val(item.ROLE_ID);
$("input[name='role.NAME']").val(item.NAME);
$("input[name='role.IDENT']").val(item.IDENT);
$("textarea[name='role.REMARK']").val(item.REMARK);
});
}
|
#!/bin/bash
set -e pipefail
# Utils scripts
. ./.bash_utils
log "\nUpdating config to your PC...\n"
copy .bash_aliases $BASH_ALIASES_PATH
copy .bash_profile $BASH_PROFILE_PATH
copy .bashrc $BASHRC_PATH
copy .gitconfig $GIT_CONFIG_PATH
copy .tmux.conf $TMUX_PATH
copy .vimrc $VIM_PATH
copy .zshrc $ZSH_PATH
if [[ "$OSTYPE" == *"linux"* ]]; then
# X
copy .Xdefaults $X_DEFAULT_PATH
copy .xinitrc $X_INITRC_PATH
copy .Xmodmap $X_MODMAP_PATH
copy .xprofile $X_PROFILE_PATH
copy .Xresources $X_RESOURCE_PATH
# Apps
copy .dunstrc $DUNST_PATH
# Tools
copy_folder .local/bin ~/.local
fi
|
<gh_stars>1000+
package org.apache.directory.api.ldap.model.entry;
public interface Value<T> {
}
|
def add_user(user, guild):
# Check if the user does not already exist in the guild
if len(get_user(user, guild, no_null=True)) == 0:
# Add the user to the guild
# Your implementation to add the user to the guild goes here
# For example, you might use an ORM to insert the user into the database
# Assuming an ORM usage, the code might look like:
# new_user = User(user_id=user, guild_id=guild)
# session.add(new_user)
# session.commit()
# Replace the above code with the appropriate database interaction based on your implementation
pass # Placeholder for the actual implementation
|
<reponame>zouvier/BlockChain-Voting
export declare function createProject(): Promise<void>;
export declare function confirmTelemetryConsent(): Promise<boolean | undefined>;
//# sourceMappingURL=project-creation.d.ts.map
|
from typing import Union, Dict
from werkzeug.exceptions import abort
def process_query_results(vis, query) -> Union[Dict, None]:
if vis is not None:
vis_dict = vis.to_dict()
else:
vis_dict = None
qr = query.latest_query_data
if qr is None:
abort(400, message="No Results for this query")
else:
return {'vis_dict': vis_dict, 'qr': qr}
|
let multiplexing_config = {
let mut mplex_config = mplex::MplexConfig::new();
mplex_config.max_concurrent_streams(100);
mplex_config.enable_stream_prioritization();
mplex_config.max_buffer_size(1024);
mplex_config
};
|
<reponame>karolsluszniak/cloudless-box<gh_stars>10-100
node.override['erlang']['esl']['version'] = "18.3-1"
node.override['elixir']['version'] = "1.2.5"
if (elixir_apps = applications.select(&:elixir?)).any?
include_recipe 'elixir::default'
end
|
<gh_stars>1-10
import sys
import logging
import time
def initiate_logger(log_path: str) -> logging.Logger:
"""
Initialize a logger with file handler and stream handler
"""
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s %(levelname)-s: %(message)s', datefmt='%H:%M:%S')
fh = logging.FileHandler(log_path)
fh.setLevel(logging.INFO)
fh.setFormatter(formatter)
logger.addHandler(fh)
sh = logging.StreamHandler(sys.stdout)
sh.setLevel(logging.INFO)
sh.setFormatter(formatter)
logger.addHandler(sh)
logger.info('===================================')
logger.info('Begin executing at {}'.format(time.ctime()))
logger.info('===================================')
return logger
|
def inverse_sort_0s_and_1s(arr):
s = sorted(arr, reverse=True)
return s
|
<filename>index.d.ts
import { CondItem, KeyValuePair } from './src/types'
export namespace G {
export function isDefined(subject: any): boolean
export function ifDefined<T = any>(subject: any, then: (subject: T) => void): T
export function call(fn: Function): void
export function T(): void
export function hasKeys(subject: any): boolean
export function toPairs<T extends {}>(subject: KeyValuePair): Array<[string, T]>
export function fromPairs<T extends {}>(subject: Array<[string, any]>): T
export function compose<T = any>(...functions: Array<Function>): (firstArg?: any) => T
export function clearObject(subject: KeyValuePair): KeyValuePair
export function values<T extends {}>(subject: KeyValuePair): Array<T>
export function always<T = any>(value: any): () => T
export function is(type: any, subject: any): boolean
export function cond<T = any, S = any>(array: Array<CondItem<T, S>>): (item: T) => S
export function hasElements(subject: any): boolean
export function all(...args: Array<boolean>): boolean
export function compareFunctions(a: Function, b: Function): boolean
export function isEmpty(subject: any): boolean
export function splitEvery<T = [] | string>(limit: number, collection: T): Array<T>
}
|
# Import modules
import socket
from http.server import HTTPServer
from http.server import BaseHTTPRequestHandler
# Define server IP address and port number
HOST_NAME = "localhost"
PORT_NUMBER = 80
# Create the server object
server = HTTPServer((HOST_NAME, PORT_NUMBER), BaseHTTPRequestHandler)
# Start the web server
print("Starting webserver on port", PORT_NUMBER)
server.serve_forever()
|
//
// Copyright (c) 2015-2020 Microsoft Corporation and Contributors.
// SPDX-License-Identifier: Apache-2.0
//
//
// Created by maharrim on 5/18/2020.
//
#ifdef ANDROID
#include <android/log.h>
#endif
#include "common/Common.hpp"
#include "common/MockIRuntimeConfig.hpp"
#include "common/MockIOfflineStorageObserver.hpp"
#include "offline/MemoryStorage.hpp"
#ifdef ANDROID
#include "offline/OfflineStorage_Room.hpp"
#endif
#include "offline/OfflineStorage_SQLite.hpp"
#include "NullObjects.hpp"
#include <functional>
#include <string>
#include <fstream>
#ifdef ANDROID
#include <http/HttpClient_Android.hpp>
#endif
namespace MAE = ::Microsoft::Applications::Events;
using namespace testing;
enum class StorageImplementation {
Room,
SQLite,
Memory
};
std::ostream & operator<<(std::ostream &o, StorageImplementation i) {
switch (i) {
case StorageImplementation::Room:
return o << "Room";
case StorageImplementation::SQLite:
return o << "SQLite";
case StorageImplementation ::Memory:
return o << "Memory";
default:
return o << static_cast<int>(i);
}
}
class OfflineStorageTestsRoom : public TestWithParam<StorageImplementation> {
public:
StrictMock<MockIRuntimeConfig> configMock;
StrictMock<MockIOfflineStorageObserver> observerMock;
ILogManager * const logManager;
std::unique_ptr<MAE::IOfflineStorage> offlineStorage;
NullLogManager nullLogManager;
StorageImplementation implementation;
OfflineStorageTestsRoom() : logManager(&nullLogManager)
{
EXPECT_CALL(configMock, GetOfflineStorageMaximumSizeBytes()).WillRepeatedly(
Return(32 * 4096));
EXPECT_CALL(configMock, GetMaximumRetryCount()).WillRepeatedly(
Return(5));
std::ostringstream name;
implementation = GetParam();
switch (implementation) {
#ifdef ANDROID
case StorageImplementation::Room:
configMock[CFG_STR_CACHE_FILE_PATH] = "OfflineStorageTestsRoom.db";
offlineStorage = std::make_unique<MAE::OfflineStorage_Room>(nullLogManager, configMock);
EXPECT_CALL(observerMock, OnStorageOpened("Room/Init"))
.RetiresOnSaturation();
break;
#endif
case StorageImplementation::SQLite:
name << MAE::GetTempDirectory() << "OfflineStorageTestsSQLite.db";
configMock[CFG_STR_CACHE_FILE_PATH] = name.str();
offlineStorage = std::make_unique<MAE::OfflineStorage_SQLite>(nullLogManager, configMock);
EXPECT_CALL(observerMock, OnStorageOpened("SQLite/Default"))
.RetiresOnSaturation();
break;
case StorageImplementation::Memory:
offlineStorage = std::make_unique<MAE::MemoryStorage>(nullLogManager, configMock);
break;
}
offlineStorage->Initialize(observerMock);
}
~OfflineStorageTestsRoom()
{
offlineStorage->Shutdown();
}
void DeleteAllRecords() {
auto records = offlineStorage->GetRecords(true, EventLatency_Unspecified, 0);
if (records.empty()) {
return;
}
std::vector<std::string> ids;
ids.reserve(records.size());
for (auto &record : records) {
ids.emplace_back(std::move(record.id));
}
HttpHeaders h;
bool fromMemory = false;
offlineStorage->DeleteRecords(ids, h, fromMemory);
EXPECT_EQ(0, offlineStorage->GetRecordCount());
}
void SetUp() override {
DeleteAllRecords();
}
void TearDown() override {
DeleteAllRecords();
}
void PopulateRecords() {
auto now = PAL::getUtcSystemTimeMs();
for (EventLatency latency : {EventLatency_Normal, EventLatency_RealTime}) {
StorageRecordVector records;
for (size_t i = 0; i < 10; ++i) {
std::ostringstream id_stream;
id_stream << "Fred-" << i << "-" << latency;
std::string id = id_stream.str();
records.emplace_back(
id,
id,
latency,
EventPersistence_Normal,
now,
StorageBlob{1, 2, 3});
}
offlineStorage->StoreRecords(records);
}
EXPECT_EQ(20, offlineStorage->GetRecordCount(EventLatency_Unspecified));
}
void VerifyBlob(StorageBlob const & blob)
{
EXPECT_EQ(3, blob.size());
for (size_t i = 0; i < blob.size(); ++i) {
EXPECT_EQ(i+1, blob[i]);
}
}
};
TEST_P(OfflineStorageTestsRoom, TestBadFile)
{
auto path = GetTempDirectory();
switch (implementation) {
case StorageImplementation::Memory:
return;
case StorageImplementation::Room:
path = path.substr(0, path.length() - 6) + "databases/BadDatabase.db";
break;
case StorageImplementation::SQLite:
path = path + "BadDatabase.db";
break;
}
auto badFile = std::ofstream(path);
badFile << "this is a BAD database" << std::endl;
badFile.close();
std::unique_ptr<MAE::IOfflineStorage> badStorage;
switch (implementation) {
#ifdef ANDROID
case StorageImplementation::Room:
configMock[CFG_STR_CACHE_FILE_PATH] = "BadDatabase.db";
badStorage = std::make_unique<MAE::OfflineStorage_Room>(nullLogManager, configMock);
EXPECT_CALL(observerMock, OnStorageOpened("Room/Init"))
.RetiresOnSaturation();
break;
#endif
case StorageImplementation::SQLite:
configMock[CFG_STR_CACHE_FILE_PATH] = path.c_str();
badStorage = std::make_unique<MAE::OfflineStorage_SQLite>(nullLogManager, configMock);
EXPECT_CALL(observerMock, OnStorageOpened("SQLite/Clean"))
.RetiresOnSaturation();
EXPECT_CALL(observerMock, OnStorageFailed("1")).RetiresOnSaturation();
break;
default:
return;
}
badStorage->Initialize(observerMock);
std::atomic<size_t> found(0);
EXPECT_FALSE(badStorage->GetAndReserveRecords( [&found](StorageRecord && record)->bool {
found += 1;
return true;
}, 5));
badStorage->Shutdown();
}
TEST_P(OfflineStorageTestsRoom, TestStoreRecords)
{
auto now = PAL::getUtcSystemTimeMs();
StorageRecordVector records;
for (size_t i = 0; i < 10; ++i) {
std::ostringstream id_stream;
id_stream << "Fred-" << i;
std::string id = id_stream.str();
records.emplace_back(
id,
id,
EventLatency_Normal,
EventPersistence_Normal,
now,
StorageBlob {1, 2, 3});
}
offlineStorage->StoreRecords(records);
EXPECT_EQ(10, offlineStorage->GetRecordCount(EventLatency_Normal));
EXPECT_EQ(10, offlineStorage->GetRecordCount(EventLatency_Unspecified));
auto found = offlineStorage->GetRecords(true, EventLatency_Unspecified, 0);
EXPECT_EQ(10, found.size());
for (auto const & record : found) {
VerifyBlob(record.blob);
EXPECT_EQ(EventLatency_Normal, record.latency);
EXPECT_EQ(EventPersistence_Normal, record.persistence);
EXPECT_EQ(now, record.timestamp);
EXPECT_EQ(0, record.reservedUntil);
}
}
std::ostream & operator<<(std::ostream &os, EventLatency const &latency)
{
switch (latency) {
case EventLatency_Normal:
os << "Normal";
break;
case EventLatency_RealTime:
os << "Real-Time";
break;
default:
os << "Other-" << static_cast<int>(latency);
break;
}
return os;
}
TEST_P(OfflineStorageTestsRoom, TestGetAndReserveManyAcceptSome) {
StorageRecordVector records;
auto now = PAL::getUtcSystemTimeMs();
for (size_t i = 0; i < 500000; ++i) {
std::ostringstream id_stream;
id_stream << "Fred-" << i;
std::string id = id_stream.str();
records.emplace_back(
id,
id,
EventLatency_Normal,
EventPersistence_Normal,
now,
StorageBlob{1, 2, 3});
if (records.size() >= 256) {
offlineStorage->StoreRecords(records);
records.clear();
}
}
if (!records.empty()) {
offlineStorage->StoreRecords(records);
records.clear();
}
EXPECT_TRUE(offlineStorage->GetAndReserveRecords(
[&records](StorageRecord &&record) -> bool {
if (records.size() >= 256) {
return false;
}
records.push_back(record);
return true;
},
5
));
}
TEST_P(OfflineStorageTestsRoom, TestGetAndReserveAcceptAll)
{
PopulateRecords();
StorageRecordVector found;
EXPECT_TRUE(offlineStorage->GetAndReserveRecords( [&found](StorageRecord && record)->bool {
found.push_back(record);
return true;
}, 5));
EXPECT_EQ(20, found.size());
ASSERT_EQ(20, offlineStorage->LastReadRecordCount());
ASSERT_EQ(
implementation == StorageImplementation::Memory,
offlineStorage->IsLastReadFromMemory());
for (size_t i = 0; i < 10; ++i) {
EXPECT_EQ(EventLatency_RealTime, found[i].latency);
}
for (size_t i = 10; i < 20; ++i) {
EXPECT_EQ(EventLatency_Normal, found[i].latency);
}
for (auto const & record : found) {
VerifyBlob(record.blob);
}
}
TEST_P(OfflineStorageTestsRoom, TestAcceptFunctor) {
PopulateRecords();
StorageRecordVector found;
size_t calls = 0u;
EXPECT_TRUE(offlineStorage->GetAndReserveRecords(
[&found, &calls](StorageRecord && record)->bool {
++calls;
if (record.latency == EventLatency_RealTime) {
found.push_back(record);
return true;
}
return false;
}
, 5));
ASSERT_EQ(10, found.size());
ASSERT_EQ(11, calls);
}
TEST_P(OfflineStorageTestsRoom, TestSettings) {
if (implementation == StorageImplementation::Memory) {
return;
}
for (size_t i = 0; i < 10; ++i) {
std::ostringstream nameStream;
nameStream << "Fred" << i;
offlineStorage->StoreSetting(nameStream.str(), nameStream.str());
EXPECT_EQ(nameStream.str(), offlineStorage->GetSetting(nameStream.str()));
}
offlineStorage->StoreSetting("Fred3", "another value");
for (size_t i = 0; i < 10; ++i) {
std::ostringstream nameStream;
nameStream << "Fred" << i;
if (i == 3) {
EXPECT_EQ(std::string("another value"), offlineStorage->GetSetting(nameStream.str()));
}
else {
EXPECT_EQ(nameStream.str(), offlineStorage->GetSetting(nameStream.str()));
}
}
EXPECT_EQ("", offlineStorage->GetSetting(std::string("something")));
for (size_t i = 0; i < 10; ++i) {
std::ostringstream nameStream;
nameStream << "Fred" << i;
offlineStorage->StoreSetting(nameStream.str(), "");
}
for (size_t i = 0; i < 10; ++i) {
std::ostringstream nameStream;
nameStream << "Fred" << i;
EXPECT_EQ("", offlineStorage->GetSetting(nameStream.str()));
}
}
TEST_P(OfflineStorageTestsRoom, TestGetRecords) {
if (implementation == StorageImplementation::Memory) {
// For MemoryStorage, GetRecords() returns very
// different results.
return;
}
auto now = PAL::getUtcSystemTimeMs();
StorageRecordVector records;
StorageRecord x;
for (size_t i = 0; i < 20; ++i) {
std::ostringstream s;
s << "Fred-" << i;
records.emplace_back(
s.str(),
s.str(),
i < 10 ? EventLatency_Normal : EventLatency_RealTime,
EventPersistence_Normal,
now,
StorageBlob {1, 2, 3}
);
}
offlineStorage->StoreRecords(records);
auto found = offlineStorage->GetRecords(false, EventLatency_Normal, 0);
ASSERT_EQ(10, found.size());
for (StorageRecord record : found) {
ASSERT_EQ(EventLatency_Normal, record.latency);
}
auto shutdown_found = offlineStorage->GetRecords(true, EventLatency_Normal, 0);
ASSERT_EQ(20, shutdown_found.size());
for (size_t i = 0; i < 10; ++i) {
ASSERT_EQ(EventLatency_RealTime, shutdown_found[i].latency);
}
for (size_t i = 10; i < 20; ++i) {
ASSERT_EQ(EventLatency_Normal, shutdown_found[i].latency);
}
}
TEST_P(OfflineStorageTestsRoom, TestManyExpiredRecords) {
size_t count = 5000;
auto now = PAL::getUtcSystemTimeMs();
auto retries = configMock.GetMaximumRetryCount() + 1;
std::vector<StorageRecord> manyRecords;
manyRecords.reserve(count);
EXPECT_EQ(0, offlineStorage->GetRecordCount(EventLatency_Normal));
for (size_t i = 0; i < count; ++i) {
std::string thing = std::to_string(i);
manyRecords.emplace_back(
thing, // id std::string const& tenantToken, EventLatency latency, EventPersistence persistence)
thing, // token
EventLatency_Normal,
EventPersistence_Normal,
now,
StorageBlob {1, 2, 3}
);
}
offlineStorage->StoreRecords(manyRecords);
std::vector<StorageRecordId> manyIds;
manyIds.reserve(count);
if (implementation != StorageImplementation::Memory) {
EXPECT_CALL(observerMock, OnStorageRecordsDropped(SizeIs(count))).
WillOnce(Return());
}
for (size_t retry = 0; retry < retries; ++retry) {
manyRecords.clear();
manyIds.clear();
offlineStorage->GetAndReserveRecords(
[&manyRecords](StorageRecord &&record) -> bool {
manyRecords.emplace_back(record);
return true;
},
5000u);
EXPECT_EQ(count, manyRecords.size());
EXPECT_THAT(manyRecords, Each(Field(&StorageRecord::retryCount, Eq(retry))));
for (auto const & record : manyRecords) {
manyIds.emplace_back(std::move(record.id));
}
bool fromMemory;
offlineStorage->ReleaseRecords(manyIds, true, HttpHeaders(), fromMemory);
}
size_t remainingRecords = 0;
if (implementation == StorageImplementation::Memory) {
remainingRecords = count;
}
EXPECT_EQ(remainingRecords, offlineStorage->GetRecordCount(EventLatency_Normal));
}
TEST_P(OfflineStorageTestsRoom, LastReadRecordCount) {
size_t count = 5000;
size_t consume = 315;
std::hash<size_t> id_hash;
StorageRecordVector records;
records.reserve(count);
auto now = PAL::getUtcSystemTimeMs();
for (size_t i = 0; i < count; i++) {
auto id = id_hash(i);
auto id_string = std::to_string(id);
records.emplace_back(
id_string,
id_string,
EventLatency_Normal,
EventPersistence_Normal,
now,
StorageBlob {3, 1, 4, 1, 5, 9}
);
}
offlineStorage->StoreRecords(records);
records.clear();
offlineStorage->GetAndReserveRecords(
[&records, consume](StorageRecord && record)->bool
{
if (records.size() >= consume) {
return false;
}
records.emplace_back(record);
return true;
},
5000
);
EXPECT_EQ(consume, offlineStorage->LastReadRecordCount());
}
TEST_P(OfflineStorageTestsRoom, ReleaseActuallyReleases) {
auto now = PAL::getUtcSystemTimeMs();
StorageRecord r(
"Fred",
"George",
EventLatency_Normal,
EventPersistence_Normal,
now,
StorageBlob {1, 2, 3}
);
offlineStorage->StoreRecord(r);
offlineStorage->GetAndReserveRecords(
[](StorageRecord && record)->bool
{
return false;
},
5000
);
EXPECT_EQ(0, offlineStorage->LastReadRecordCount());
StorageRecordVector records;
offlineStorage->GetAndReserveRecords(
[&records] (StorageRecord && record)->bool
{
records.emplace_back(std::move(record));
return true;
}, 5000
);
EXPECT_EQ(1, offlineStorage->LastReadRecordCount());
EXPECT_EQ(1, records.size());
offlineStorage->GetAndReserveRecords(
[] (StorageRecord && record)->bool
{
ADD_FAILURE();
return false;
},
5000
);
}
TEST_P(OfflineStorageTestsRoom, DeleteByToken)
{
StorageRecordVector records;
auto now = PAL::getUtcSystemTimeMs();
for (size_t i = 0; i < 1000; ++i) {
auto id = std::to_string(i);
auto tenantToken = std::to_string(i % 5);
records.emplace_back(
id,
tenantToken,
EventLatency_Normal,
EventPersistence_Normal,
now,
StorageBlob {1, 2, static_cast<unsigned char>(i), 4, 5}
);
}
offlineStorage->StoreRecords(records);
EXPECT_EQ(1000, offlineStorage->GetRecordCount());
offlineStorage->DeleteRecords({{ "tenant_token", "0"}});
EXPECT_EQ(800, offlineStorage->GetRecordCount());
}
TEST_P(OfflineStorageTestsRoom, ResizeDB)
{
if (implementation == StorageImplementation::Memory) {
return;
}
auto now = PAL::getUtcSystemTimeMs();
StorageRecord record(
"",
"TenantFred",
EventLatency_Normal,
EventPersistence_Normal,
now,
StorageBlob {1, 2, 3, 4}
);
size_t index = 1;
while (offlineStorage->GetSize() <= configMock.GetOfflineStorageMaximumSizeBytes()) {
record.id = std::to_string(index);
offlineStorage->StoreRecord(record);
index += 1;
}
auto preCount = offlineStorage->GetRecordCount();
offlineStorage->ResizeDb();
auto postCount = offlineStorage->GetRecordCount();
EXPECT_GT(preCount, postCount);
}
TEST_P(OfflineStorageTestsRoom, StoreManyRecords)
{
constexpr size_t targetSize = 2 * 1024 * 1024;
constexpr size_t blobSize = 512;
constexpr size_t blockSize = 1024;
std::random_device rd; // non-deterministic generator
std::mt19937_64 gen(rd()); // to seed mersenne twister.
std::uniform_int_distribution<> randomByte(0,255);
std::uniform_int_distribution<uint64_t> randomWord(0, UINT64_MAX);
auto now = PAL::getUtcSystemTimeMs();
StorageBlob masterBlob;
masterBlob.reserve(blobSize);
while (masterBlob.size() < blobSize) {
masterBlob.push_back(randomByte(gen));
}
size_t blocks = 0;
StorageRecordVector records;
records.reserve(blockSize);
while (records.size() < blockSize) {
records.emplace_back(
"",
"Fred-Doom-Token23",
EventLatency_Normal,
EventPersistence_Normal,
now,
StorageBlob(masterBlob)
);
}
while (offlineStorage->GetSize() < targetSize) {
for (auto & record : records) {
record.id = std::to_string(randomWord(gen));
}
offlineStorage->StoreRecords(records);
++blocks;
}
EXPECT_EQ(blocks * blockSize, offlineStorage->GetRecordCount());
}
#ifdef ANDROID
auto values = Values(StorageImplementation::Room, StorageImplementation::SQLite, StorageImplementation::Memory);
#else
auto values = Values(StorageImplementation::SQLite, StorageImplementation::Memory);
#endif
INSTANTIATE_TEST_CASE_P(Storage,
OfflineStorageTestsRoom,
values,
[](const testing::TestParamInfo<OfflineStorageTestsRoom::ParamType>& info)->std::string {
std::ostringstream s;
s << info.param;
return s.str();
});
|
<gh_stars>100-1000
/*
* Copyright © 2019 Lisk Foundation
*
* See the LICENSE file at the top-level directory of this distribution
* for licensing information.
*
* Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation,
* no part of this software, including this file, may be copied, modified,
* propagated, or distributed except according to the terms contained in the
* LICENSE file.
*
* Removal or modification of this copyright notice is prohibited.
*
*/
import {
DEFAULT_NEW_BUCKET_COUNT,
DEFAULT_NEW_BUCKET_SIZE,
DEFAULT_TRIED_BUCKET_COUNT,
DEFAULT_TRIED_BUCKET_SIZE,
PeerKind,
} from '../constants';
// eslint-disable-next-line import/no-cycle
import { ExistingPeerError } from '../errors';
// eslint-disable-next-line import/no-cycle
import { P2PEnhancedPeerInfo, P2PPeerInfo, PeerLists } from '../types';
// eslint-disable-next-line import/no-cycle
import { assignInternalInfo, PEER_TYPE } from '../utils';
// eslint-disable-next-line import/no-cycle
import { NewList } from './new_list';
// eslint-disable-next-line import/no-cycle
import { TriedList } from './tried_list';
// eslint-disable-next-line import/order
import shuffle = require('lodash.shuffle');
// eslint-disable-next-line @typescript-eslint/no-require-imports
export interface PeerBookConfig {
readonly sanitizedPeerLists: PeerLists;
readonly secret: number;
}
export class PeerBook {
private readonly _newPeers: NewList;
private readonly _triedPeers: TriedList;
private readonly _bannedIPs: Set<string>;
private readonly _blacklistedIPs: Set<string>;
private readonly _seedPeers: ReadonlyArray<P2PPeerInfo>;
private readonly _fixedPeers: ReadonlyArray<P2PPeerInfo>;
private readonly _whitelistedPeers: ReadonlyArray<P2PPeerInfo>;
private readonly _unbanTimers: Array<NodeJS.Timer | undefined>;
private readonly _secret: number;
public constructor({ sanitizedPeerLists, secret }: PeerBookConfig) {
this._newPeers = new NewList({
secret,
numOfBuckets: DEFAULT_NEW_BUCKET_COUNT,
bucketSize: DEFAULT_NEW_BUCKET_SIZE,
peerType: PEER_TYPE.NEW_PEER,
});
this._triedPeers = new TriedList({
secret,
numOfBuckets: DEFAULT_TRIED_BUCKET_COUNT,
bucketSize: DEFAULT_TRIED_BUCKET_SIZE,
peerType: PEER_TYPE.TRIED_PEER,
});
this._secret = secret;
this._bannedIPs = new Set([]);
this._blacklistedIPs = new Set([...sanitizedPeerLists.blacklistedIPs]);
this._seedPeers = [...sanitizedPeerLists.seedPeers];
this._fixedPeers = [...sanitizedPeerLists.fixedPeers];
this._whitelistedPeers = [...sanitizedPeerLists.whitelisted];
this._unbanTimers = [];
// Initialize peerBook lists
const newPeersToAdd = [
...sanitizedPeerLists.fixedPeers,
...sanitizedPeerLists.whitelisted,
...sanitizedPeerLists.previousPeers,
];
// Add peers to tried peers if want to re-use previously tried peers
// According to LIP, add whitelist peers to triedPeer by upgrading them initially.
newPeersToAdd.forEach(peerInfo => {
if (!this.hasPeer(peerInfo)) {
this.addPeer(peerInfo);
}
this.upgradePeer(peerInfo);
});
}
public get newPeers(): ReadonlyArray<P2PPeerInfo> {
return this._newPeers.peerList;
}
public get triedPeers(): ReadonlyArray<P2PPeerInfo> {
return this._triedPeers.peerList;
}
public get allPeers(): ReadonlyArray<P2PPeerInfo> {
return [...this.newPeers, ...this.triedPeers];
}
public get seedPeers(): ReadonlyArray<P2PPeerInfo> {
return this._seedPeers;
}
public get fixedPeers(): ReadonlyArray<P2PPeerInfo> {
return this._fixedPeers;
}
public get whitelistedPeers(): ReadonlyArray<P2PPeerInfo> {
return this._whitelistedPeers;
}
public get bannedIPs(): Set<string> {
return new Set([...this._blacklistedIPs, ...this._bannedIPs]);
}
public cleanUpTimers(): void {
this._unbanTimers.forEach(timer => {
if (timer) {
clearTimeout(timer);
}
});
}
public getRandomizedPeerList(
minimumPeerDiscoveryThreshold: number,
maxPeerDiscoveryResponseLength: number,
): ReadonlyArray<P2PPeerInfo> {
const allPeers = [...this.newPeers, ...this.triedPeers];
const min = Math.ceil(Math.min(maxPeerDiscoveryResponseLength, allPeers.length * 0.25));
const max = Math.floor(Math.min(maxPeerDiscoveryResponseLength, allPeers.length * 0.5));
const random = Math.floor(Math.random() * (max - min + 1) + min);
const randomPeerCount = Math.max(
random,
Math.min(minimumPeerDiscoveryThreshold, allPeers.length),
);
return shuffle(allPeers).slice(0, randomPeerCount);
}
public getPeer(peerInfo: P2PPeerInfo): P2PPeerInfo | undefined {
const triedPeer = this._triedPeers.getPeer(peerInfo.peerId);
if (triedPeer) {
return triedPeer;
}
return this._newPeers.getPeer(peerInfo.peerId);
}
public hasPeer(peerInfo: P2PPeerInfo): boolean {
return this._triedPeers.hasPeer(peerInfo.peerId) || this._newPeers.hasPeer(peerInfo.peerId);
}
public addPeer(peerInfo: P2PEnhancedPeerInfo): boolean {
if (this._bannedIPs.has(peerInfo.ipAddress)) {
return false;
}
if (this._triedPeers.getPeer(peerInfo.peerId)) {
throw new ExistingPeerError(peerInfo);
}
this._newPeers.addPeer(this._assignPeerKind(peerInfo));
return true;
}
public removePeer(peerInfo: P2PPeerInfo): void {
this._newPeers.removePeer(peerInfo);
this._triedPeers.removePeer(peerInfo);
}
public updatePeer(peerInfo: P2PPeerInfo): boolean {
if (this._triedPeers.getPeer(peerInfo.peerId)) {
return this._triedPeers.updatePeer(this._assignPeerKind(peerInfo));
}
if (this._newPeers.getPeer(peerInfo.peerId)) {
return this._newPeers.updatePeer(this._assignPeerKind(peerInfo));
}
return false;
}
public upgradePeer(peerInfo: P2PEnhancedPeerInfo): boolean {
if (this._triedPeers.hasPeer(peerInfo.peerId)) {
return true;
}
if (this._newPeers.hasPeer(peerInfo.peerId)) {
this.removePeer(peerInfo);
if (this.bannedIPs.has(peerInfo.ipAddress)) {
return false;
}
this._triedPeers.addPeer(this._assignPeerKind(peerInfo));
return true;
}
return false;
}
public downgradePeer(peerInfo: P2PEnhancedPeerInfo): boolean {
if (this.isTrustedPeer(peerInfo.peerId)) {
return false;
}
if (this._newPeers.hasPeer(peerInfo.peerId)) {
return this._newPeers.failedConnectionAction(peerInfo);
}
if (this._triedPeers.hasPeer(peerInfo.peerId)) {
const failed = this._triedPeers.failedConnectionAction(peerInfo);
if (failed) {
return this.addPeer(peerInfo);
}
}
return false;
}
public isTrustedPeer(peerId: string): boolean {
const isSeedPeer = this.seedPeers.find(peer => peer.peerId === peerId);
const isWhitelistedPeer = this.whitelistedPeers.find(peer => peer.peerId === peerId);
const isFixedPeer = this.fixedPeers.find(peer => peer.peerId === peerId);
return !!isSeedPeer || !!isWhitelistedPeer || !!isFixedPeer;
}
public addBannedPeer(peerId: string, peerBanTime: number): void {
const peerIpAddress = peerId.split(':')[0];
if (this.bannedIPs.has(peerIpAddress)) {
return;
}
// Whitelisted/FixedPeers are not allowed to be banned
if (
this.fixedPeers.find(peer => peer.peerId === peerId) ||
this.whitelistedPeers.find(peer => peer.peerId === peerId)
) {
return;
}
this._bannedIPs.add(peerIpAddress);
this.allPeers.forEach((peer: P2PPeerInfo) => {
if (peer.ipAddress === peerIpAddress) {
this.removePeer(peer);
}
});
// Unban temporary bans after peerBanTime
const unbanTimeout = setTimeout(() => {
this._removeBannedPeer(peerId);
}, peerBanTime);
this._unbanTimers.push(unbanTimeout);
}
private _removeBannedPeer(peerId: string): void {
const peerIpAddress = peerId.split(':')[0];
this._bannedIPs.delete(peerIpAddress);
}
private _assignPeerKind(peerInfo: P2PPeerInfo): P2PPeerInfo {
if (this.fixedPeers.find(peer => peer.ipAddress === peerInfo.ipAddress)) {
return {
...peerInfo,
internalState: {
...assignInternalInfo(peerInfo, this._secret),
peerKind: PeerKind.FIXED_PEER,
},
};
}
if (this.whitelistedPeers.find(peer => peer.ipAddress === peerInfo.ipAddress)) {
return {
...peerInfo,
internalState: {
...assignInternalInfo(peerInfo, this._secret),
peerKind: PeerKind.WHITELISTED_PEER,
},
};
}
if (this.seedPeers.find(peer => peer.ipAddress === peerInfo.ipAddress)) {
return {
...peerInfo,
internalState: {
...assignInternalInfo(peerInfo, this._secret),
peerKind: PeerKind.SEED_PEER,
},
};
}
return {
...peerInfo,
internalState: {
...assignInternalInfo(peerInfo, this._secret),
peerKind: PeerKind.NONE,
},
};
}
}
|
import * as ts from 'typescript';
import { ICompilerOptions } from './compiler-helper';
import { ICollectedTypeInfo } from './type-collector-snippet';
export interface IApplyTypesOptions extends ICompilerOptions {
/**
* A prefix that will be added in front of each type applied. You can use a javascript comment
* to mark the automatically added types. The prefix will be added after the colon character,
* just before the actual type.
*/
prefix?: string;
}
export declare function applyTypesToFile(source: string, typeInfo: ICollectedTypeInfo, options: IApplyTypesOptions, program?: ts.Program): string;
export declare function applyTypes(typeInfo: ICollectedTypeInfo, options?: IApplyTypesOptions): void;
|
class SecuritySystem:
STATE_ALARM_DISARMED = "disarmed"
STATE_ALARM_ARMED_HOME = "armed_home"
STATE_ALARM_ARMED_AWAY = "armed_away"
STATE_ALARM_TRIGGERED = "triggered"
DEVICE_ALARM_MODE = "alarm_mode"
def __init__(self):
# Initialize the security system with a default state of "disarmed"
self._state = self.STATE_ALARM_DISARMED
def arm_home(self):
# Set the alarm mode to "home" and change the state to "armed_home"
self.set_alarm_mode("home")
self._state = self.STATE_ALARM_ARMED_HOME
def arm_away(self):
# Set the alarm mode to "away" and change the state to "armed_away"
self.set_alarm_mode("away")
self._state = self.STATE_ALARM_ARMED_AWAY
def disarm(self):
# Change the state to "disarmed"
self._state = self.STATE_ALARM_DISARMED
def trigger_alarm(self):
# Change the state to "triggered"
self._state = self.STATE_ALARM_TRIGGERED
def set_alarm_mode(self, mode):
# Set the alarm mode to the given mode
# Assuming data is a dictionary containing device information
data = {self.DEVICE_ALARM_MODE: mode}
try:
if data[self.DEVICE_ALARM_MODE] in ["home", "away", "off"]:
data[self.DEVICE_ALARM_MODE] = mode
except KeyError:
pass
|
#!/bin/bash
set -e
NC='\033[0m' # No Color
Light_Green='\033[1;32m'
echo (){
command echo -e $1
}
STARTMSG="${Light_Green}[ENTRYPOINT_POSTFIX]${NC}"
POSTFIX_PATH="/etc/postfix"
POSTFIX_CONFIG="$POSTFIX_PATH/main.cf"
SMTP_AUTH="$POSTFIX_PATH/smtp_auth"
GENERIC="$POSTFIX_PATH/generic_misp"
# Set Environment Variables in Config
postconf myhostname="$HOSTNAME"
# Domain for Outgoing Mail
postconf mydomain="$DOMAIN"
# Relahost to Send Mails
postconf relayhost="$RELAYHOST"
# Allow only MISP Docker Container Access
postconf mynetworks="127.0.0.1/32 [::1]/128 $DOCKER_NETWORK"
# If you need to get more postfix output for a specified host normally the relayhost or misp-server
# if DEBUG_PEER isn't none set debug peer:
[ "$DEBUG_PEER" == "none" ] || postconf debug_peer_list="$DEBUG_PEER"
# Sender for local postfix outgoing Mails
#mysed SENDER_ADDRESS $SENDER_ADDRESS $GENERIC
echo "root $SENDER_ADDRESS" > $GENERIC
echo "@$DOMAIN $SENDER_ADDRESS" >> $GENERIC
# RELAY User and Password
echo -e "$RELAYHOST $RELAY_USER:$RELAY_PASSWORD" > $SMTP_AUTH
# Start Postfix
postmap $SMTP_AUTH
postmap $GENERIC
/usr/lib/postfix/sbin/post-install meta_directory=/etc/postfix create-missing
/usr/lib/postfix/sbin/master
# Check Postfix configuration
postconf -c /etc/postfix/
if [[ $? != 0 ]]; then
echo "$STARTMSG GPostfix configuration error, refusing to start."
exit 1
else
echo "$STARTMSG Start Postfix..." && postfix -c /etc/postfix/ start
sleep 126144000
fi
|
TERMUX_PKG_HOMEPAGE=https://lxqt.github.io
TERMUX_PKG_DESCRIPTION="Library providing components to build desktop file managers"
TERMUX_PKG_LICENSE="LGPL-2.1"
TERMUX_PKG_MAINTAINER="Simeon Huang <symeon@librehat.com>"
TERMUX_PKG_VERSION=0.17.1
TERMUX_PKG_REVISION=2
TERMUX_PKG_SRCURL="https://github.com/lxqt/libfm-qt/releases/download/${TERMUX_PKG_VERSION}/libfm-qt-${TERMUX_PKG_VERSION}.tar.xz"
TERMUX_PKG_SHA256=aa3f8f4d8035d106ed80e0b0ae5fced5ad61d4dac3f960392f3a71fb42a521a5
TERMUX_PKG_DEPENDS="qt5-qtbase, qt5-qtx11extras, glib, libxcb, libexif, menu-cache"
TERMUX_PKG_BUILD_DEPENDS="lxqt-build-tools, qt5-qtbase-cross-tools, qt5-qttools-cross-tools"
|
/*
TITLE Colored Starlike patterns Chapter12Exercise13.cpp
Bjarne Stroustrup "Programming: Principles and Practice Using C++"
COMMENT
Objective: Draw starline patterns by connecting
points on a superellipse.
Add color.
Input: -
Output: Graph on screen.
Author: <NAME>
Date: 17. 08. 2015
*/
#define PI 3.14159265359
//------------------------------------------------------------------------------------------------------------------------------
#include <iostream>
#include <vector>
#include "Simple_window.h"
#include "Chapter12Exercise13.h"
//------------------------------------------------------------------------------------------------------------------------------
int main()
{
// Superellipse parameters
// exponents
double n = 2/3.;
double m = 2/3.;
// coefficient
double A = 200.0;
double B = 200.0;
// center point of the graph
const int centX = x_max() / 2.;
const int centY = y_max() / 2.;
// incrementation step
double di = 0.01;
try
{
SuperEllipse se(A, B, n, m, centX, centY, di);
se.drawSuperEllipse();
se.drawStar();
}
catch(std::exception& e)
{
std::cerr << e.what() << std::endl;
}
catch(...)
{
std::cerr <<"Default exception!"<< std::endl;
}
}
|
namespace App\Models;
use Illuminate\Database\Eloquent\Factories\HasFactory;
use Illuminate\Database\Eloquent\Model;
class Souscategorie extends Model
{
use HasFactory;
protected $fillable = ['name'];
public function categorie()
{
return $this->belongsTo(Categorie::class);
}
}
|
#!/bin/bash
. path.sh
format=pdf # pdf svg
output=
. utils/parse_options.sh
if [ $# != 3 ]; then
echo "usage: $0 [--format pdf|svg] [--output <path-to-output>] <utt-id> <lattice-ark> <word-list>"
echo "e.g.: $0 utt-0001 \"test/lat.*.gz\" tri1/graph/words.txt"
exit 1;
fi
uttid=$1
lat=$2
words=$3
tmpdir=$(mktemp -d); trap "rm -r $tmpdir" EXIT # cleanup
gunzip -c $lat | lattice-to-fst ark:- ark,scp:$tmpdir/fst.ark,$tmpdir/fst.scp || exit 1
! grep "^$uttid " $tmpdir/fst.scp && echo "ERROR : Missing utterance '$uttid' from gzipped lattice ark '$lat'" && exit 1
fstcopy "scp:grep '^$uttid ' $tmpdir/fst.scp |" "scp:echo $uttid $tmpdir/$uttid.fst |" || exit 1
fstdraw --portrait=true --osymbols=$words $tmpdir/$uttid.fst | dot -T${format} > $tmpdir/$uttid.${format}
if [ ! -z $output ]; then
cp $tmpdir/$uttid.${format} $output
fi
[ $format == "pdf" ] && evince $tmpdir/$uttid.pdf
[ $format == "svg" ] && eog $tmpdir/$uttid.svg
exit 0
|
<gh_stars>10-100
#!/usr/bin/env python
from PySide import QtCore, QtGui, QtXmlPatterns
import schema_rc
from ui_schema import Ui_SchemaMainWindow
try:
# Python v2.
unicode
def encode_utf8(ba):
return unicode(ba, encoding='utf8')
def decode_utf8(qs):
return QtCore.QByteArray(str(qs))
except NameError:
# Python v3.
def encode_utf8(ba):
return str(ba, encoding='utf8')
def decode_utf8(qs):
return QtCore.QByteArray(bytes(qs, encoding='utf8'))
class XmlSyntaxHighlighter(QtGui.QSyntaxHighlighter):
def __init__(self, parent=None):
super(XmlSyntaxHighlighter, self).__init__(parent)
self.highlightingRules = []
# Tag format.
format = QtGui.QTextCharFormat()
format.setForeground(QtCore.Qt.darkBlue)
format.setFontWeight(QtGui.QFont.Bold)
pattern = QtCore.QRegExp("(<[a-zA-Z:]+\\b|<\\?[a-zA-Z:]+\\b|\\?>|>|/>|</[a-zA-Z:]+>)")
self.highlightingRules.append((pattern, format))
# Attribute format.
format = QtGui.QTextCharFormat()
format.setForeground(QtCore.Qt.darkGreen)
pattern = QtCore.QRegExp("[a-zA-Z:]+=")
self.highlightingRules.append((pattern, format))
# Attribute content format.
format = QtGui.QTextCharFormat()
format.setForeground(QtCore.Qt.red)
pattern = QtCore.QRegExp("(\"[^\"]*\"|'[^']*')")
self.highlightingRules.append((pattern, format))
# Comment format.
self.commentFormat = QtGui.QTextCharFormat()
self.commentFormat.setForeground(QtCore.Qt.lightGray)
self.commentFormat.setFontItalic(True)
self.commentStartExpression = QtCore.QRegExp("<!--")
self.commentEndExpression = QtCore.QRegExp("-->")
def highlightBlock(self, text):
for pattern, format in self.highlightingRules:
expression = QtCore.QRegExp(pattern)
index = expression.indexIn(text)
while index >= 0:
length = expression.matchedLength()
self.setFormat(index, length, format)
index = expression.indexIn(text, index + length)
self.setCurrentBlockState(0)
startIndex = 0
if self.previousBlockState() != 1:
startIndex = self.commentStartExpression.indexIn(text)
while startIndex >= 0:
endIndex = self.commentEndExpression.indexIn(text, startIndex)
if endIndex == -1:
self.setCurrentBlockState(1)
commentLength = text.length() - startIndex
else:
commentLength = endIndex - startIndex + self.commentEndExpression.matchedLength()
self.setFormat(startIndex, commentLength, self.commentFormat)
startIndex = self.commentStartExpression.indexIn(text,
startIndex + commentLength)
class MessageHandler(QtXmlPatterns.QAbstractMessageHandler):
def __init__(self):
super(MessageHandler, self).__init__()
self.m_description = ""
self.m_sourceLocation = QtXmlPatterns.QSourceLocation()
def statusMessage(self):
return self.m_description
def line(self):
return self.m_sourceLocation.line()
def column(self):
return self.m_sourceLocation.column()
def handleMessage(self, type, description, identifier, sourceLocation):
self.m_description = description
self.m_sourceLocation = sourceLocation
class MainWindow(QtGui.QMainWindow, Ui_SchemaMainWindow):
def __init__(self):
QtGui.QMainWindow.__init__(self)
self.setupUi(self)
XmlSyntaxHighlighter(self.schemaView.document())
XmlSyntaxHighlighter(self.instanceEdit.document())
self.schemaSelection.addItem("Contact Schema")
self.schemaSelection.addItem("Recipe Schema")
self.schemaSelection.addItem("Order Schema")
self.instanceSelection.addItem("Valid Contact Instance")
self.instanceSelection.addItem("Invalid Contact Instance")
self.schemaSelection.currentIndexChanged[int].connect(self.schemaSelected)
self.instanceSelection.currentIndexChanged[int].connect(self.instanceSelected)
self.validateButton.clicked.connect(self.validate)
self.instanceEdit.textChanged.connect(self.textChanged)
self.validationStatus.setAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignVCenter)
self.schemaSelected(0)
self.instanceSelected(0)
def schemaSelected(self, index):
self.instanceSelection.clear()
if index == 0:
self.instanceSelection.addItem("Valid Contact Instance")
self.instanceSelection.addItem("Invalid Contact Instance")
elif index == 1:
self.instanceSelection.addItem("Valid Recipe Instance")
self.instanceSelection.addItem("Invalid Recipe Instance")
elif index == 2:
self.instanceSelection.addItem("Valid Order Instance")
self.instanceSelection.addItem("Invalid Order Instance")
self.textChanged()
schemaFile = QtCore.QFile(':/schema_%d.xsd' % index)
schemaFile.open(QtCore.QIODevice.ReadOnly)
schemaData = schemaFile.readAll()
self.schemaView.setPlainText(encode_utf8(schemaData))
self.validate()
def instanceSelected(self, index):
index += 2 * self.schemaSelection.currentIndex()
instanceFile = QtCore.QFile(':/instance_%d.xml' % index)
instanceFile.open(QtCore.QIODevice.ReadOnly)
instanceData = instanceFile.readAll()
self.instanceEdit.setPlainText(encode_utf8(instanceData))
self.validate()
def validate(self):
schemaData = decode_utf8(self.schemaView.toPlainText())
instanceData = decode_utf8(self.instanceEdit.toPlainText())
messageHandler = MessageHandler()
schema = QtXmlPatterns.QXmlSchema()
schema.setMessageHandler(messageHandler)
schema.load(schemaData, QtCore.QUrl())
errorOccurred = False
if not schema.isValid():
errorOccurred = True
else:
validator = QtXmlPatterns.QXmlSchemaValidator(schema)
if not validator.validate(instanceData):
errorOccurred = True
if errorOccurred:
self.validationStatus.setText(messageHandler.statusMessage())
self.moveCursor(messageHandler.line(), messageHandler.column())
background = QtCore.Qt.red
else:
self.validationStatus.setText("validation successful")
background = QtCore.Qt.green
styleSheet = 'QLabel {background: %s; padding: 3px}' % QtGui.QColor(background).lighter(160).name()
self.validationStatus.setStyleSheet(styleSheet)
def textChanged(self):
self.instanceEdit.setExtraSelections([])
def moveCursor(self, line, column):
self.instanceEdit.moveCursor(QtGui.QTextCursor.Start)
for i in range(1, line):
self.instanceEdit.moveCursor(QtGui.QTextCursor.Down)
for i in range(1, column):
self.instanceEdit.moveCursor(QtGui.QTextCursor.Right)
extraSelections = []
selection = QtGui.QTextEdit.ExtraSelection()
lineColor = QtGui.QColor(QtCore.Qt.red).lighter(160)
selection.format.setBackground(lineColor)
selection.format.setProperty(QtGui.QTextFormat.FullWidthSelection, True)
selection.cursor = self.instanceEdit.textCursor()
selection.cursor.clearSelection()
extraSelections.append(selection)
self.instanceEdit.setExtraSelections(extraSelections)
self.instanceEdit.setFocus()
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_())
|
const { defaults } = require('jest-config')
module.exports = {
preset: defaults.preset,
verbose: process.env.NODE_ENV !== 'production',
moduleFileExtensions: ['ts', 'js', 'json', 'vue'],
transform: {
'^.+\\.vue$': 'vue-jest',
'.+\\.(css|styl|less|sass|scss|svg|png|jpg|ttf|woff|woff2)$': 'jest-transform-stub',
'^.+\\.tsx?$': 'ts-jest'
},
transformIgnorePatterns: ['node_modules'],
moduleNameMapper: {
'^@/(.*)$': '<rootDir>/src/$1',
'^&/(.*)$': '<rootDir>/types/$1'
},
snapshotSerializers: ['jest-serializer-vue'],
testMatch: [process.env.TEST_MODE === 'percy' ? '**/tests/percy/index.ts' : '**/tests/unit/**/*.spec.ts'],
testURL: 'http://localhost',
watchPlugins: [
'jest-watch-typeahead/filename',
'jest-watch-typeahead/testname'
],
globals: {
'ts-jest': {
babelConfig: true
}
},
collectCoverage: process.env.TEST_MODE !== 'percy',
collectCoverageFrom: [
'src/**/*.{js,ts,vue}',
'!src/(styleguide|index|storyscript).ts',
'!src/components/index.ts',
'!src/utils/*.{js,ts}',
'!src/directives/*.ts',
'!src/store/**/*.{js,ts}',
'!src/docs/**/*.vue'
]
}
|
<reponame>elibri/elibri_onix
module Inspector
def attribute_for_inspect(attr_name)
value = self.send(attr_name)
if value.is_a?(String) && value.length > 50
"#{value[0..50]}...".inspect
elsif value.is_a?(Date) || value.is_a?(Time)
%("#{value.to_s(:db)}")
else
value.inspect
end
end
def inspect
if self.respond_to?(:inspect_include_fields)
attributes_as_nice_string = self.inspect_include_fields.collect { |name|
"#{name}: #{attribute_for_inspect(name)}"
}.compact.join(", ")
"#<#{self.class} #{attributes_as_nice_string}>"
else
super
end
end
end
|
import style from './style.module.css'
const AboutPage = () => {
return (
<>
<div className={style.wrap}>AboutPage</div>
</>
)
}
export default AboutPage
|
#!/bin/bash
hugo server --disableFastRender
|
def is_anagram(str1, str2):
# convert strings to lowercase
str1 = str1.lower()
str2 = str2.lower()
# remove whitespaces from strings
str1 = str1.replace(" ", "")
str2 = str2.replace(" ", "")
# initiliase a boolean to be returned
anagram = False
# Check if the length of the two strings is equal.
if len(str1) == len(str2):
# Sort characters in each strings
if (sorted(str1) == sorted(str2)):
anagram = True
# return boolean
return anagram
|
<reponame>BriceMichalskiStudent/LivLyon_sources<gh_stars>0
function displayMap(){
var x = document.getElementById("mapid");
if (navigator.geolocation) {
navigator.geolocation.getCurrentPosition(showPosition);
} else {
x.innerHTML = "Geolocation is not supported by this browser.";
}
}
function showPosition(position) {
if (!zoom){
var zoom = 14;
}
if (typeof center !== 'undefined' ){
var mymap = L.map('mapid').setView([center.latitude,center.longitude], zoom);
}else{
var mymap = L.map('mapid').setView([position.coords.latitude,position.coords.longitude], zoom);
}
L.tileLayer('https://api.tiles.mapbox.com/v4/{id}/{z}/{x}/{y}.png?access_token={accessToken}', {
attribution: 'Map data © <a href="https://www.openstreetmap.org/">OpenStreetMap</a> contributors, <a href="https://creativecommons.org/licenses/by-sa/2.0/">CC-BY-SA</a>',
maxZoom: 18,
id: 'mapbox.streets',
accessToken: '<KEY>'
}).addTo(mymap);
var positionIcon = L.icon({
iconUrl: '/img/position.png',
iconSize: [50, 50], // size of the icon
iconAnchor: [25, 25], // point of the icon which will correspond to marker's location
shadowAnchor: [4, 62], // the same for the shadow
popupAnchor: [-3, -76] // point from which the popup should open relative to the iconAnchor
});
L.marker([position.coords.latitude,position.coords.longitude], {icon: positionIcon}).addTo(mymap);
markers.forEach(function (marker) {
marker.addTo(mymap)
});
}
|
def list_reverse(sequence):
if len(sequence) == 0:
return []
else:
return list_reverse(sequence[1:]) + [sequence[0]]
|
import pygments
from pygments import highlight
from pygments.lexers import get_lexer_by_name
from pygments.formatters import TerminalFormatter
def can_colorize(s: str):
"""True if we can colorize the string, False otherwise."""
if pygments is None:
return False
return True
def colorize_string(s: str) -> str:
"""Colorize the input string using Pygments if available, otherwise return the original string."""
if can_colorize(s):
lexer = get_lexer_by_name("python", stripall=True)
formatter = TerminalFormatter()
return highlight(s, lexer, formatter)
else:
return s
|
import java.beans.Transient;
import java.lang.annotation.*;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
public final class AnnotationTest {
public static enum TestElement {
FIRST, SECOND
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public static @interface CoolAnnotation {
String test();
boolean test1();
String[] arrayTest();
TestElement enumTest();
}
@Deprecated
private final String depField = "This field is deprecated";
public static void main(String[] args) {
try {
for (Field declaredField : AnnotationTest.class.getDeclaredFields()) {
for (Annotation annotation : declaredField.getAnnotations()) {
System.out.println(declaredField + " -> " + annotation);
}
}
for (Method declaredMethod : AnnotationTest.class.getDeclaredMethods()) {
for (Annotation annotation : declaredMethod.getAnnotations()) {
System.out.println(declaredMethod + " -> " + annotation);
}
for (Annotation[] parameterAnnotation : declaredMethod.getParameterAnnotations()) {
for (Annotation annotation : parameterAnnotation) {
System.out.println("(parameter) " + declaredMethod + " -> " + annotation);
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
@Transient(value = false)
@CoolAnnotation(test = "Wow so cool", test1 = false, arrayTest = {"element1", "element2", "element3"}, enumTest = TestElement.SECOND)
public void coolMethod(@Deprecated String param) {
System.out.println(param);
}
}
|
# Import the flask package
from flask import Flask
# Create a new Flam instance
app = Flask(__name__)
# create a new route for the API that accepts a search string for a books
@app.route('/api/search/<string:search>', methods=['GET'])
def search(search):
# Here would go code to query a database for books matching the search
# and returning a response as JSON
return response
# Finally, start the server
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
|
package com.cumbari.dps.protocol;
public class CategoryHits {
private final int numberOfCoupons;
private final String categoryId;
public CategoryHits(String categoryId, int noHits ) {
this.categoryId = categoryId;
this.numberOfCoupons = noHits;
}
public int getNumberOfCoupons() {
return numberOfCoupons;
}
public String getCategoryId() {
return categoryId;
}
}
|
<gh_stars>0
//
// NSString+ZWTime.h
// QQMusic
//
// Created by 郑亚伟 on 2016/12/29.
// Copyright © 2016年 郑亚伟. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface NSString (ZWTime)
+ (NSString *)stringWithTime:(NSTimeInterval)time;
@end
|
#!/usr/bin/env bash
# shellcheck disable=SC2002,SC2155
# Copyright 2019 Kohl's Department Stores, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -euxo pipefail
TAG_OWNER="gitopsconfig.eunomia.kohls.io/owner"
TAG_APPLIED="gitopsconfig.eunomia.kohls.io/applied"
# this is needed because we want the current namespace to be set as default if a namespace is not specified.
function setContext() {
# shellcheck disable=SC2154
$kubectl config set-context current --namespace="$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace)"
$kubectl config use-context current
}
function kube() {
$kubectl \
-s https://kubernetes.default.svc:443 \
--token "$(cat /var/run/secrets/kubernetes.io/serviceaccount/token)" \
--certificate-authority=/var/run/secrets/kubernetes.io/serviceaccount/ca.crt \
"$@"
}
# addLabels OWNER TIMESTAMP - patches the YAML&JSON files in $MANIFEST_DIR,
# adding labels tracking the OWNER and TIMESTAMP. The labels are intended to be
# used later in function deleteByOldLabels.
function addLabels() {
local owner="$1"
local timestamp="$2"
local tmpdir="$(mktemp -d)"
# shellcheck disable=SC2044
for file in $(find "$MANIFEST_DIR" -regextype posix-extended -iregex '.*\.(ya?ml|json)'); do
cat "$file" |
yq -y -s "map(select(.!=null)|setpath([\"metadata\",\"labels\",\"$TAG_OWNER\"]; \"$owner\"))|.[]" |
yq -y -s "map(select(.!=null)|setpath([\"metadata\",\"labels\",\"$TAG_APPLIED\"]; \"$timestamp\"))|.[]" \
>"$tmpdir/labeled"
# We must use a helper file (can't do this in single step), as the file would be truncated if we read & write from it in one pipeline
cat "$tmpdir/labeled" >"$file"
done
}
# deleteByOldLabels OWNER [TIMESTAMP] - deletes all kubernetes resources which have
# the OWNER label as provided [optional: but TIMESTAMP label different than provided].
function deleteByOldLabels() {
if [ "$DELETE_MODE" == "None" ]; then
echo "DELETE_MODE is set to None; Skipping deletion by old labels step."
exit 0
else
local owner="$1"
local timestamp="${2:-}"
local allKinds="$(kube api-resources --verbs=list,delete -o name | paste -sd, -)"
local ownedKinds="$(kube get "$allKinds" --ignore-not-found \
-l "$TAG_OWNER==$owner" \
-o jsonpath="{range .items[*]}{.kind} {.apiVersion}{'\n'}{end}" | # e.g. "Pod v1" OR "StorageClass storage.k8s.io/v1"
sort -u |
awk -F'[ /]' '{if (NF==2) {print $1} else {print $1"."$3"."$2}}' | # e.g. "Pod" OR "StorageClass.v1.storage.k8s.io"
paste -sd, -)"
if [ -z "$ownedKinds" ]; then
return
fi
local filter="${TAG_OWNER}==${owner}"
if [[ "${timestamp}" ]]; then
filter="${filter},${TAG_APPLIED}!=${timestamp}"
# Retrieve all resources owned by the GitOpsConfig that doesn't match the current jobs timestamp
# Check the timestamp on all of the resources and ONLY delete the resource if the timestamp label is older than the current job timestamp
# shellcheck disable=SC2005
echo "$(kube get "${ownedKinds}" -l "${filter}" -o yaml)" >/tmp/check_deletion.yaml
local resource_count=$(($(yq -y '.items | length' /tmp/check_deletion.yaml | head -qn 1) - 1))
if [[ "$resource_count" -ge "0" ]]; then
for i in $(seq 0 $resource_count); do
local resource_timestamp="$(yq -r -y '.items['"$i"'].metadata.labels."'$TAG_APPLIED'"' /tmp/check_deletion.yaml | head -qn 1)"
if [[ "$resource_timestamp" -lt "$timestamp" ]]; then
local delete_name=$(yq -y '.items['"$i"'].metadata.name' /tmp/check_deletion.yaml | head -qn 1)
local delete_kind=$(yq -y '.items['"$i"'].kind' /tmp/check_deletion.yaml | head -qn 1)
local delete_namespace_exists=$(yq -y '.items['"$i"'].metadata | has("namespace")' /tmp/check_deletion.yaml | head -qn 1)
if [[ "$delete_namespace_exists" =~ "false" ]]; then
kube delete --wait=false "$delete_kind" "$delete_name"
else
local delete_namespace=$(yq -y '.items['"$i"'].metadata.namespace' /tmp/check_deletion.yaml | head -qn 1)
kube delete --wait=false "$delete_kind" "$delete_name" -n "$delete_namespace"
fi
fi
done
fi
else
kube delete --wait=false "${ownedKinds}" -l "${filter}"
fi
fi
}
function createUpdateResources() {
local owner="$1"
local timestamp="$(date +%s)"
# Check if directory contains only hidden files like .gitkeep, or .gitignore.
# This would mean that user purposefully wanted to track an empty directory in git.
# https://git.wiki.kernel.org/index.php/Git_FAQ#Can_I_add_empty_directories.3F
if [[ -z $(ls "${MANIFEST_DIR}") ]]; then
echo "Manifest directory empty, skipping"
return
elif [[ -z $(find "$MANIFEST_DIR" -regextype posix-extended -iregex '.*\.(ya?ml|json)') ]]; then
echo "ERROR - no files with .yaml, .yml, or .json extension in manifest directory"
exit 1
fi
case "$CREATE_MODE" in
Apply)
addLabels "$owner" "$timestamp"
appendResourceVersion.py
kube apply -R -f "$MANIFEST_DIR"
deleteByOldLabels "$owner" "$timestamp"
;;
Create)
kube create -R -f "$MANIFEST_DIR"
;;
Delete)
kube delete --wait=false -R -f "$MANIFEST_DIR"
;;
Patch)
kube patch -R -f "$MANIFEST_DIR"
;;
Replace)
kube replace -R -f "$MANIFEST_DIR"
;;
None) ;;
esac
}
echo "Managing Resources"
setContext
# NOTE: Kubernetes currently requires that first *and last* character of
# label values are alphanumerical - we're adding the "own" prefix & suffix to
# ensure that. Also, Kubernetes requires it to be <=63 chars long, so we're
# taking a MD5 hash of actual name (MD5 hash is 33 chars long).
# See: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#syntax-and-character-set
owner="own.$(echo "$NAMESPACE $GITOPSCONFIG_NAME" | md5sum | awk '{print$1}').own"
case "$ACTION" in
create) createUpdateResources "$owner" ;;
delete) deleteByOldLabels "$owner" ;;
esac
|
import { Component, OnInit } from '@angular/core';
import { Observable } from 'rxjs';
import { AuthenticationService } from 'src/app/services/authentication.service';
@Component({
selector: 'app-user',
templateUrl: './user.component.html',
styleUrls: ['./user.component.scss']
})
export class UserComponent implements OnInit {
public user$: Observable<any>;
constructor(private auth: AuthenticationService) {
this.user$ = this.auth.getOIDCUser();
}
ngOnInit(): void {
}
triggerSignout(): void {
this.auth.signout();
}
}
|
pkg_name=mysql-client
pkg_origin=core
pkg_version=5.7.32
pkg_maintainer='The Habitat Maintainers <humans@habitat.sh>'
pkg_license=('GPL-2.0')
pkg_source=http://dev.mysql.com/get/Downloads/MySQL-5.7/mysql-${pkg_version}.tar.gz
pkg_shasum=1f4b59b43f82de4ccf4ba9cfce087318a192012a752aee8f66ca16f73bb082c9
pkg_upstream_url=https://www.mysql.com/
pkg_description="MySQL Client Tools"
pkg_deps=(
core/coreutils
core/gawk
core/gcc-libs
core/glibc
core/grep
core/inetutils
core/ncurses
core/openssl
core/pcre
core/perl
core/procps-ng
core/sed
)
pkg_build_deps=(
core/boost159
core/cmake
core/diffutils
core/gcc
core/make
core/patch
)
pkg_bin_dirs=(bin)
pkg_include_dirs=(include)
pkg_lib_dirs=(lib)
pkg_dirname="mysql-${pkg_version}"
do_build() {
cmake . -DLOCAL_BOOST_DIR="$(pkg_path_for core/boost159)" \
-DBOOST_INCLUDE_DIR="$(pkg_path_for core/boost159)"/include \
-DWITH_BOOST="$(pkg_path_for core/boost159)" \
-DCURSES_LIBRARY="$(pkg_path_for core/ncurses)/lib/libcurses.so" \
-DCURSES_INCLUDE_PATH="$(pkg_path_for core/ncurses)/include" \
-DWITH_SSL=yes \
-DOPENSSL_INCLUDE_DIR="$(pkg_path_for core/openssl)/include" \
-DOPENSSL_LIBRARY="$(pkg_path_for core/openssl)/lib/libssl.so" \
-DCRYPTO_LIBRARY="$(pkg_path_for core/openssl)/lib/libcrypto.so" \
-DWITHOUT_SERVER:BOOL=ON \
-DCMAKE_INSTALL_PREFIX="$pkg_prefix"
make --jobs="$(nproc)"
}
do_install() {
do_default_install
# Remove things we don't need
rm "$pkg_prefix/lib/"*.a "$pkg_prefix/bin/mysqld_"*
fix_interpreter "$pkg_prefix/bin/mysqldumpslow" core/perl bin/perl
}
do_check() {
ctest
}
|
/*
* Copyright (c) 2019-2021 GeyserMC. http://geysermc.org
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* @author GeyserMC
* @link https://github.com/GeyserMC/Floodgate
*/
package org.geysermc.floodgate.pluginmessage.channel;
import com.google.inject.Inject;
import java.nio.charset.StandardCharsets;
import java.util.UUID;
import org.geysermc.floodgate.platform.pluginmessage.PluginMessageUtils;
import org.geysermc.floodgate.pluginmessage.PluginMessageChannel;
public class TransferChannel implements PluginMessageChannel {
@Inject private PluginMessageUtils pluginMessageUtils;
@Override
public String getIdentifier() {
return "floodgate:transfer";
}
@Override
public Result handleProxyCall(
byte[] data,
UUID targetUuid,
String targetUsername,
Identity targetIdentity,
UUID sourceUuid,
String sourceUsername,
Identity sourceIdentity) {
if (sourceIdentity == Identity.SERVER) {
// send it to the client
return Result.forward();
}
if (sourceIdentity == Identity.PLAYER) {
handleServerCall(data, targetUuid, targetUsername);
}
return Result.handled();
}
@Override
public Result handleServerCall(byte[] data, UUID targetUuid, String targetUsername) {
return Result.kick("I'm sorry, I'm unable to transfer a server :(");
}
public boolean sendTransfer(UUID player, String address, int port) {
byte[] addressBytes = address.getBytes(StandardCharsets.UTF_8);
byte[] data = new byte[addressBytes.length + 4];
data[0] = (byte) (port >> 24);
data[1] = (byte) (port >> 16);
data[2] = (byte) (port >> 8);
data[3] = (byte) (port);
System.arraycopy(addressBytes, 0, data, 4, addressBytes.length);
return pluginMessageUtils.sendMessage(player, false, getIdentifier(), data);
}
}
|
def evenOddDifference(arr):
# Initialize difference
diff = 0
# Calculate sum of all even elements
evenSum = 0
for i in range(0, len(arr), 2):
evenSum += arr[i]
# Calculate sum of all odd elements
oddSum = 0
for i in range(1, len(arr), 2):
oddSum += arr[i]
# return difference
diff = evenSum - oddSum
# return absolute difference
return abs(diff)
# Driver code
arr = [1, 2, 3, 4, 5]
result = evenOddDifference(arr)
print(result)
|
package splitmix64_test
import (
"testing"
"github.com/db47h/rand64/v3/splitmix64"
)
const (
SEED1 = 1387366483214
)
var values = [...]uint64{
0xDDE04155BF79DF63,
0xFCFED2E9D540B529,
0x4C5AA74B9BE7FF3E,
0xA38A0EF197E488D9,
0xEDA0BA12AA8B5343,
0x94AC0EE844BA7CB6,
0x644375EBE6F55AAF,
0xBD7DF1EF1C84093D,
0xDBDB00E0A41BE9AB,
0xC7A8EB53EB467566,
}
func TestRng(t *testing.T) {
var rng splitmix64.Rng
rng.Seed(SEED1)
for _, v := range values {
n := rng.Uint64()
if n != v {
t.Fatalf("Expected %X, got %X", v, n)
}
}
rng.Seed(SEED1)
for _, v := range values {
n := rng.Int63()
if n != int64(v>>1) {
t.Fatalf("Expected %X, got %X", int64(v>>1), n)
}
}
}
|
def logarithm(number):
if number <= 0:
return None
else:
return math.log(number)
|
/*
* Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
* Copyright (C) 2009 - 2011 <NAME> (i-net software)
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.font;
import java.awt.Font;
import java.lang.reflect.Method;
import java.util.Locale;
import java.util.concurrent.ConcurrentHashMap;
import javax.swing.plaf.FontUIResource;
import cli.System.Drawing.FontFamily;
import ikvm.internal.NotYetImplementedError;
/*
* Interface between Java Fonts (java.awt.Font) and the underlying
* font files/native font resources and the Java and native font scalers.
*/
public final class FontManager {
public static final int NO_FALLBACK = 0;
public static final int PHYSICAL_FALLBACK = 1;
public static final int LOGICAL_FALLBACK = 2;
/* deprecated, unsupported hack - actually invokes a bug! */
private static boolean usePlatformFontMetrics = false;
private static ConcurrentHashMap<String, Font2D> fontNameCache = new ConcurrentHashMap<String, Font2D>();
private static final Method getFont2D;
static{
try{
getFont2D = Font.class.getDeclaredMethod("getFont2D");
getFont2D.setAccessible(true);
}catch(NoSuchMethodException ex){
NoClassDefFoundError error = new NoClassDefFoundError(ex.toString());
error.initCause(ex);
throw error;
}
}
/* Revise the implementation to in fact mean "font is a composite font.
* This ensures that Swing components will always benefit from the
* fall back fonts
*/
public static boolean fontSupportsDefaultEncoding(Font font) {
// In Java the font must be a instanceof CompositeFont
// because .NET fonts are all already Composite Fonts (I think) that we can return true
// and does not need to implements CompositeFont
return true;
}
/**
* This method is provided for internal and exclusive use by Swing.
*
* It may be used in conjunction with fontSupportsDefaultEncoding(Font)
* In the event that a desktop properties font doesn't directly
* support the default encoding, (ie because the host OS supports
* adding support for the current locale automatically for native apps),
* then Swing calls this method to get a font which uses the specified
* font for the code points it covers, but also supports this locale
* just as the standard composite fonts do.
* Note: this will over-ride any setting where an application
* specifies it prefers locale specific composite fonts.
* The logic for this, is that this method is used only where the user or
* application has specified that the native L&F be used, and that
* we should honour that request to use the same font as native apps use.
*
* The behaviour of this method is to construct a new composite
* Font object that uses the specified physical font as its first
* component, and adds all the components of "dialog" as fall back
* components.
* The method currently assumes that only the size and style attributes
* are set on the specified font. It doesn't copy the font transform or
* other attributes because they aren't set on a font created from
* the desktop. This will need to be fixed if use is broadened.
*
* Operations such as Font.deriveFont will work properly on the
* font returned by this method for deriving a different point size.
* Additionally it tries to support a different style by calling
* getNewComposite() below. That also supports replacing slot zero
* with a different physical font but that is expected to be "rare".
* Deriving with a different style is needed because its been shown
* that some applications try to do this for Swing FontUIResources.
* Also operations such as new Font(font.getFontName(..), Font.PLAIN, 14);
* will NOT yield the same result, as the new underlying CompositeFont
* cannot be "looked up" in the font registry.
* This returns a FontUIResource as that is the Font sub-class needed
* by Swing.
* Suggested usage is something like :
* FontUIResource fuir;
* Font desktopFont = getDesktopFont(..);
* // NOTE even if fontSupportsDefaultEncoding returns true because
* // you get Tahoma and are running in an English locale, you may
* // still want to just call getCompositeFontUIResource() anyway
* // as only then will you get fallback fonts - eg for CJK.
* if (FontManager.fontSupportsDefaultEncoding(desktopFont)) {
* fuir = new FontUIResource(..);
* } else {
* fuir = FontManager.getCompositeFontUIResource(desktopFont);
* }
* return fuir;
*/
public static FontUIResource getCompositeFontUIResource(Font font) {
throw new NotYetImplementedError();
}
public static Font2D getNewComposite(String family, int style, Font2D handle) {
throw new NotYetImplementedError();
}
/*
* return String representation of style prepended with "."
* This is useful for performance to avoid unnecessary string operations.
*/
private static String dotStyleStr(int num) {
switch(num){
case Font.BOLD:
return ".bold";
case Font.ITALIC:
return ".italic";
case Font.ITALIC | Font.BOLD:
return ".bolditalic";
default:
return ".plain";
}
}
/*
* The client supplies a name and a style.
* The name could be a family name, or a full name.
* A font may exist with the specified style, or it may
* exist only in some other style. For non-native fonts the scaler
* may be able to emulate the required style.
*/
public static Font2D findFont2D(String name, int style, int fallback){
String lowerCaseName = name.toLowerCase(Locale.ENGLISH);
String mapName = lowerCaseName + dotStyleStr(style);
Font2D font2D = fontNameCache.get(mapName);
if(font2D != null){
return font2D;
}
font2D = new PhysicalFont(name,style);
fontNameCache.put(mapName, font2D);
return font2D;
}
/**
* Create a new Font2D without caching. This is used from createFont
*
* @param family
* .NET FontFamily
* @param style
* the style
* @return a Font2D
*/
public static Font2D createFont2D( FontFamily family, int style ) {
return new PhysicalFont( family, style );
}
/* This method can be more efficient as it will only need to
* do the lookup once, and subsequent calls on the java.awt.Font
* instance can utilise the cached Font2D on that object.
* Its unfortunate it needs to be a native method, but the font2D
* variable has to be private.
*/
public static Font2D getFont2D(Font font){
try{
return (Font2D)getFont2D.invoke(font);
}catch(Exception ex){
throw new RuntimeException(ex);
}
}
/* Stuff below was in NativeFontWrapper and needed a new home */
/*
* Workaround for apps which are dependent on a font metrics bug
* in JDK 1.1. This is an unsupported win32 private setting.
*/
public static boolean usePlatformFontMetrics() {
return usePlatformFontMetrics;
}
/* This method doesn't check if alternates are selected in this app
* context. Its used by the FontMetrics caching code which in such
* a case cannot retrieve a cached metrics solely on the basis of
* the Font.equals() method since it needs to also check if the Font2D
* is the same.
* We also use non-standard composites for Swing native L&F fonts on
* Windows. In that case the policy is that the metrics reported are
* based solely on the physical font in the first slot which is the
* visible java.awt.Font. So in that case the metrics cache which tests
* the Font does what we want. In the near future when we expand the GTK
* logical font definitions we may need to revisit this if GTK reports
* combined metrics instead. For now though this test can be simple.
*/
static boolean maybeUsingAlternateCompositeFonts() {
// TODO Auto-generated method stub
return false;
}
public static synchronized void preferLocaleFonts() {
// TODO Auto-generated method stub
}
public static synchronized void preferProportionalFonts() {
// TODO Auto-generated method stub
}
public static boolean registerFont(Font font) {
/* This method should not be called with "null".
* It is the caller's responsibility to ensure that.
*/
// TODO Auto-generated method stub
return false;
}
/* This is called by Swing passing in a fontconfig family name
* such as "sans". In return Swing gets a FontUIResource instance
* that has queried fontconfig to resolve the font(s) used for this.
* Fontconfig will if asked return a list of fonts to give the largest
* possible code point coverage.
* For now we use only the first font returned by fontconfig, and
* back it up with the most closely matching JDK logical font.
* Essentially this means pre-pending what we return now with fontconfig's
* preferred physical font. This could lead to some duplication in cases,
* if we already included that font later. We probably should remove such
* duplicates, but it is not a significant problem. It can be addressed
* later as part of creating a Composite which uses more of the
* same fonts as fontconfig. At that time we also should pay more
* attention to the special rendering instructions fontconfig returns,
* such as whether we should prefer embedded bitmaps over antialiasing.
* There's no way to express that via a Font at present.
*/
public static FontUIResource getFontConfigFUIR( String fcFamily, int style, int size ) {
return new FontUIResource( fcFamily, style, size );
}
/* The following fields and methods which relate to layout
* perhaps belong in some other class but FontManager is already
* widely used as an entry point for other JDK code that needs
* access to the font system internals.
*/
/**
* Referenced by code in the JDK which wants to test for the
* minimum char code for which layout may be required.
* Note that even basic latin text can benefit from ligatures,
* eg "ffi" but we presently apply those only if explicitly
* requested with TextAttribute.LIGATURES_ON.
* The value here indicates the lowest char code for which failing
* to invoke layout would prevent acceptable rendering.
*/
public static final int MIN_LAYOUT_CHARCODE = 0x0300;
/**
* Referenced by code in the JDK which wants to test for the
* maximum char code for which layout may be required.
* Note this does not account for supplementary characters
* where the caller interprets 'layout' to mean any case where
* one 'char' (ie the java type char) does not map to one glyph
*/
public static final int MAX_LAYOUT_CHARCODE = 0x206F;
/* If the character code falls into any of a number of unicode ranges
* where we know that simple left->right layout mapping chars to glyphs
* 1:1 and accumulating advances is going to produce incorrect results,
* we want to know this so the caller can use a more intelligent layout
* approach. A caller who cares about optimum performance may want to
* check the first case and skip the method call if its in that range.
* Although there's a lot of tests in here, knowing you can skip
* CTL saves a great deal more. The rest of the checks are ordered
* so that rather than checking explicitly if (>= start & <= end)
* which would mean all ranges would need to be checked so be sure
* CTL is not needed, the method returns as soon as it recognises
* the code point is outside of a CTL ranges.
* NOTE: Since this method accepts an 'int' it is asssumed to properly
* represent a CHARACTER. ie it assumes the caller has already
* converted surrogate pairs into supplementary characters, and so
* can handle this case and doesn't need to be told such a case is
* 'complex'.
*/
static boolean isComplexCharCode(int code) {
if (code < MIN_LAYOUT_CHARCODE || code > MAX_LAYOUT_CHARCODE) {
return false;
}
else if (code <= 0x036f) {
// Trigger layout for combining diacriticals 0x0300->0x036f
return true;
}
else if (code < 0x0590) {
// No automatic layout for Greek, Cyrillic, Armenian.
return false;
}
else if (code <= 0x06ff) {
// Hebrew 0590 - 05ff
// Arabic 0600 - 06ff
return true;
}
else if (code < 0x0900) {
return false; // Syriac and Thaana
}
else if (code <= 0x0e7f) {
// if Indic, assume shaping for conjuncts, reordering:
// 0900 - 097F Devanagari
// 0980 - 09FF Bengali
// 0A00 - 0A7F Gurmukhi
// 0A80 - 0AFF Gujarati
// 0B00 - 0B7F Oriya
// 0B80 - 0BFF Tamil
// 0C00 - 0C7F Telugu
// 0C80 - 0CFF Kannada
// 0D00 - 0D7F Malayalam
// 0D80 - 0DFF Sinhala
// 0E00 - 0E7F if Thai, assume shaping for vowel, tone marks
return true;
}
else if (code < 0x1780) {
return false;
}
else if (code <= 0x17ff) { // 1780 - 17FF Khmer
return true;
}
else if (code < 0x200c) {
return false;
}
else if (code <= 0x200d) { // zwj or zwnj
return true;
}
else if (code >= 0x202a && code <= 0x202e) { // directional control
return true;
}
else if (code >= 0x206a && code <= 0x206f) { // directional control
return true;
}
return false;
}
/* This is almost the same as the method above, except it takes a
* char which means it may include undecoded surrogate pairs.
* The distinction is made so that code which needs to identify all
* cases in which we do not have a simple mapping from
* char->unicode character->glyph can be be identified.
* For example measurement cannot simply sum advances of 'chars',
* the caret in editable text cannot advance one 'char' at a time, etc.
* These callers really are asking for more than whether 'layout'
* needs to be run, they need to know if they can assume 1->1
* char->glyph mapping.
*/
static boolean isNonSimpleChar(char ch) {
return
isComplexCharCode(ch) ||
(ch >= CharToGlyphMapper.HI_SURROGATE_START &&
ch <= CharToGlyphMapper.LO_SURROGATE_END);
}
/**
* If there is anything in the text which triggers a case
* where char->glyph does not map 1:1 in straightforward
* left->right ordering, then this method returns true.
* Scripts which might require it but are not treated as such
* due to JDK implementations will not return true.
* ie a 'true' return is an indication of the treatment by
* the implementation.
* Whether supplementary characters should be considered is dependent
* on the needs of the caller. Since this method accepts the 'char' type
* then such chars are always represented by a pair. From a rendering
* perspective these will all (in the cases I know of) still be one
* unicode character -> one glyph. But if a caller is using this to
* discover any case where it cannot make naive assumptions about
* the number of chars, and how to index through them, then it may
* need the option to have a 'true' return in such a case.
*/
public static boolean isComplexText(char [] chs, int start, int limit) {
for (int i = start; i < limit; i++) {
if (chs[i] < MIN_LAYOUT_CHARCODE) {
continue;
}
else if (isNonSimpleChar(chs[i])) {
return true;
}
}
return false;
}
}
|
#!/bin/bash
set -ex
VERSION="$(git describe --abbrev=8)"
## Collect files
pushd package/win/elm-format
tar zxvf "elm-format-${VERSION}-win-i386.tgz"
zip "elm-format-${VERSION}-win-i386.zip" elm-format.exe
popd
cp -v package/win/elm-format/elm-format-"${VERSION}"-win-i386.zip ./
for i in elm-format-${VERSION}-{mac-x64.tgz,win-i386.zip,linux-x64.tgz}; do
keybase pgp sign --detached --infile "$i" --outfile "$i".asc
# github-release upload --user avh4 --repo elm-format --tag "$VERSION" --file "$BUILD".tgz
# github-release upload --user avh4 --repo elm-format --tag "$VERSION" --file "$BUILD".tgz.asc
done
|
#!/usr/bin/env /bash
set -e
pip install -r requirements-dev.txt
python setup.py doc
mv doc/html doc/"$CI_COMMIT_REF_NAME"
|
const verifyAPIToken = require('../../middle/clusterFunctions').verifyAPIToken;
const verifyNodeToken = require('../../middle/nodeFunctions').verifyNodeToken;
const mongoose = require('mongoose');
const Meta = require('../../models/meta');
const Cluster = require('../../models/cluster');
const Boom = require('boom');
//current format = {reading_name: reading_value}
module.exports = {
method: 'POST',
path: '/api/metas',
pre: [verifyAPIToken, verifyNodeToken],
handler: async (req, res) => {
try {
await verifyAPIToken(req, res);
await verifyNodeToken(req, res);
let data = req.body.metas, keys = Object.keys(data);
let metas = [];
for (let i = 0; i < keys.length; i++) {
//check and remove dups
let dup = await Meta.findOneAndDelete({ $and: [{ reading_name: keys[i] }, { reading_value: data[keys[i]] }, { created_at: data['time'] }] });
/*if (dup) {
console.log(`Duplicate Found: ${dup._id}`);
let cluster = await Cluster.find({_id: dup.cluster_id});
let index = cluster.historical_meta.findIndex((e) => {
return mongoose.Types.ObjectId(e.meta_id).equals(ongoose.Types.ObjectId(dup._id));
});
cluster.historical_meta = historical_meta.splice(0, index).concat(historical_meta.splice(index+1, historical_meta.length));
//console.log(cluster.historical_meta)
cluster.historical_meta = cluster.historical_meta
await cluster.save();
//await Cluster.update({_id: dup.cluster_id, api_token: req.body.api_token}, {$pull: {historical_meta: {$elemMatch: {meta_id: mongoose.Types.ObjectId(dup._id)}}}});
} */
let meta = new Meta();
meta.reading_name = keys[i];
meta.reading_value = data[keys[i]];
meta.created_at = data['time'];
await res.locals.node.addMeta(meta);
}
return res.send(metas);
} catch (err) {
return res.send(Boom.badRequest(err));
}
}
}
|
import { CONFIG_GENERAL_FUNCTION_COMPONENT_KEYS } from './componentConstants.js';
import prettifyComponentConfigError from './prettifyComponentConfigError.js';
import validateKeyValues from '../util/validateKeyValues.js';
import validateComponentName from './validateComponentName.js';
import validatePropertiesConfig from './validatePropertiesConfig.js';
import validateFunctionConfig from './validateFunctionConfig.js';
export default function validateConfigForFunctionalComponent(
config, platformAdaption) {
let err =
validateKeyValues(config,
key => CONFIG_GENERAL_FUNCTION_COMPONENT_KEYS.has(key))
|| validateComponentName(config.name)
|| validatePropertiesConfig(config)
|| validateFunctionConfig(config, 'initProcess', true);
if (err) {
throw prettifyComponentConfigError(err, config);
}
return err;
}
|
<filename>node_modules/@chakra-ui/theme/dist/types/components/editable.d.ts
declare const _default: {
parts: ("preview" | "input")[];
baseStyle: Partial<Record<"preview" | "input", import("@chakra-ui/styled-system").RecursiveCSSObject<import("@chakra-ui/styled-system").CSSWithMultiValues>>>;
};
export default _default;
//# sourceMappingURL=editable.d.ts.map
|
<filename>kernel/contract/bridge/xbridge.go
package bridge
import (
"fmt"
"io"
"path/filepath"
"github.com/xuperchain/xupercore/kernel/contract"
"github.com/xuperchain/xupercore/kernel/ledger"
"github.com/xuperchain/xupercore/protos"
)
// XBridge 用于注册用户虚拟机以及向Xchain Core注册可被识别的vm.VirtualMachine
type XBridge struct {
ctxmgr *ContextManager
syscallService *SyscallService
basedir string
vmconfigs map[ContractType]VMConfig
creators map[ContractType]InstanceCreator
xmodel ledger.XMReader
config ContractConfig
// debugLogger *log.Logger
*contractManager
}
type XBridgeConfig struct {
Basedir string
VMConfigs map[ContractType]VMConfig
XModel ledger.XMReader
Config ContractConfig
LogWriter io.Writer
Core contract.ChainCore
}
// New instances a new XBridge
func New(cfg *XBridgeConfig) (*XBridge, error) {
ctxmgr := NewContextManager()
xbridge := &XBridge{
ctxmgr: ctxmgr,
basedir: cfg.Basedir,
vmconfigs: cfg.VMConfigs,
creators: make(map[ContractType]InstanceCreator),
xmodel: cfg.XModel,
config: cfg.Config,
}
xbridge.contractManager = &contractManager{
xbridge: xbridge,
codeProvider: newCodeProviderFromXMReader(cfg.XModel),
}
syscallService := NewSyscallService(ctxmgr, xbridge)
xbridge.syscallService = syscallService
err := xbridge.initVM()
if err != nil {
return nil, err
}
// err = xbridge.initDebugLogger(cfg)
// if err != nil {
// return nil, err
// }
return xbridge, nil
}
func (v *XBridge) initVM() error {
types := []ContractType{TypeWasm, TypeNative, TypeEvm, TypeKernel}
for _, tp := range types {
vmconfig, ok := v.vmconfigs[tp]
if !ok {
// log.Error("config for contract type not found", "type", tp)
continue
}
if !vmconfig.IsEnable() {
// log.Info("contract type disabled", "type", tp)
continue
}
creatorConfig := &InstanceCreatorConfig{
Basedir: filepath.Join(v.basedir, vmconfig.DriverName()),
SyscallService: v.syscallService,
VMConfig: vmconfig,
}
creator, err := Open(tp, vmconfig.DriverName(), creatorConfig)
if err != nil {
return err
}
v.creators[tp] = creator
}
return nil
}
// func (v *XBridge) initDebugLogger(cfg *XBridgeConfig) error {
// // 如果日志开启,并且没有自定义writter则使用配置文件打开日志对象
// if cfg.Config.EnableDebugLog && cfg.LogWriter == nil {
// debugLogger, err := log.OpenLog(&cfg.Config.DebugLog)
// if err != nil {
// return err
// }
// v.debugLogger = &debugLogger
// return nil
// }
// w := cfg.LogWriter
// if w == nil {
// w = ioutil.Discard
// }
// logger := log15.Root().New()
// logger.SetHandler(log15.StreamHandler(w, log15.LogfmtFormat()))
// v.debugLogger = &log.Logger{Logger: logger}
// return nil
// }
func (v *XBridge) getCreator(tp ContractType) InstanceCreator {
return v.creators[tp]
}
func (v *XBridge) NewContext(ctxCfg *contract.ContextConfig) (contract.Context, error) {
var desc *protos.WasmCodeDesc
var err error
if ctxCfg.Module == string(TypeKernel) {
desc = &protos.WasmCodeDesc{
ContractType: ctxCfg.Module,
}
} else {
// test if contract exists
desc, err = newCodeProvider(ctxCfg.State).GetContractCodeDesc(ctxCfg.ContractName)
if err != nil {
return nil, err
}
}
tp, err := getContractType(desc)
if err != nil {
return nil, err
}
vm := v.getCreator(tp)
if vm == nil {
return nil, fmt.Errorf("vm for contract type %s not supported", tp)
}
var cp ContractCodeProvider
// 如果当前在部署合约,合约代码从cache获取
// 合约调用的情况则从model中拿取合约代码,避免交易中包含合约代码的引用。
if ctxCfg.ContractCodeFromCache {
cp = newCodeProvider(ctxCfg.State)
} else {
cp = newDescProvider(v.codeProvider, desc)
}
ctx := v.ctxmgr.MakeContext()
ctx.State = ctxCfg.State
ctx.ContractName = ctxCfg.ContractName
ctx.Initiator = ctxCfg.Initiator
ctx.AuthRequire = ctxCfg.AuthRequire
ctx.ResourceLimits = ctxCfg.ResourceLimits
ctx.CanInitialize = ctxCfg.CanInitialize
// ctx.Core = ctxCfg.Core
ctx.TransferAmount = ctxCfg.TransferAmount
ctx.ContractSet = ctxCfg.ContractSet
if ctx.ContractSet == nil {
ctx.ContractSet = make(map[string]bool)
ctx.ContractSet[ctx.ContractName] = true
}
// ctx.Logger = v.xbridge.debugLogger.New("contract", ctx.ContractName, "ctxid", ctx.ID)
release := func() {
v.ctxmgr.DestroyContext(ctx)
}
instance, err := vm.CreateInstance(ctx, cp)
if err != nil {
v.ctxmgr.DestroyContext(ctx)
return nil, err
}
ctx.Instance = instance
return &vmContextImpl{
ctx: ctx,
instance: instance,
release: release,
}, nil
}
|
#!/bin/bash
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
KUBE_ROOT=$(dirname "${BASH_SOURCE}")/..
# Set the environment variables required by the build.
source "${KUBE_ROOT}/hack/config-go.sh"
# Go to the top of the tree.
cd "${KUBE_REPO_ROOT}"
# Check for `go` binary and set ${GOPATH}.
kube::setup_go_environment
# Use eval to preserve embedded quoted strings.
eval "goflags=(${GOFLAGS:-})"
# Filter out arguments that start with "-" and move them to goflags.
targets=()
for arg; do
if [[ "${arg}" == -* ]]; then
goflags+=("${arg}")
else
targets+=("${arg}")
fi
done
if [[ "${targets[@]+set}" != "set" ]]; then
targets=("...")
fi
rc=0
# Filter silly "exit status 1" lines and send main output to stdout.
# This is tricky - pipefail means any non-zero exit in a pipeline is reported,
# and errexit exits on error. Turning that into an || expression blocks the
# errexit. But $? is still not useful because grep will return an error when it
# receives no input, which is exactly what go vet produces on success. In short,
# if go vet fails (produces output), grep will succeed, but if go vet succeeds
# (produces no output) grep will fail. Then we just look at PIPESTATUS[0] which
# is go's exit code.
go vet "${goflags[@]:+${goflags[@]}}" "${targets[@]/#/./}" 2>&1 \
| grep -v "^exit status " \
|| rc=${PIPESTATUS[0]}
exit "${rc}"
|
#!/usr/bin/env bash
# Azure Environment
export RESOURCEGROUP_NAME=<my-resourcegroup>
export WEBAPP_NAME=<my-webapp-name>
export WEBAPP_PLAN_NAME=${WEBAPP_NAME}-appservice-plan
export REGION=westus
# Supply these secrets for PostgreSQL
export POSTGRES_SERVER_NAME=<my petstore-db name>
export POSTGRES_SERVER_ADMIN_LOGIN_NAME=<my postgres admin login name>
export POSTGRES_SERVER_ADMIN_PASSWORD=<my admin password>
export POSTGRES_DATABASE_NAME=<my postgres database name>
# Secrets composed from supplied secrets for PostgreSQL
export POSTGRES_SERVER_FULL_NAME=${POSTGRES_SERVER_NAME}.postgres.database.azure.com
export POSTGRES_CONNECTION_URL=jdbc:postgresql://${POSTGRES_SERVER_FULL_NAME}:5432/${POSTGRES_DATABASE_NAME}?ssl=true
export POSTGRES_SERVER_ADMIN_FULL_NAME=${POSTGRES_SERVER_ADMIN_LOGIN_NAME}@${POSTGRES_SERVER_NAME}
# Supply these secrets for MySQL
export MYSQL_SERVER_NAME=<my petstore-db name>
export MYSQL_SERVER_ADMIN_LOGIN_NAME=<my mysql admin login name>
export MYSQL_SERVER_ADMIN_PASSWORD=<my mysql admin password>
export MYSQL_DATABASE_NAME=<my pestore database name>
# Secrets composed from supplied secrets for MySQL
export MYSQL_SERVER_FULL_NAME=${MYSQL_SERVER_NAME}.mysql.database.azure.com
export MYSQL_CONNECTION_URL=jdbc:mysql://${MYSQL_SERVER_FULL_NAME}:3306/${MYSQL_DATABASE_NAME}?ssl=true\&useLegacyDatetimeCode=false\&serverTimezone=GMT
export MYSQL_SERVER_ADMIN_FULL_NAME=${MYSQL_SERVER_ADMIN_LOGIN_NAME}\@${MYSQL_SERVER_NAME}
# FTP Secrets
# Use Azure CLI to get them
# az webapp deployment list-publishing-profiles -g ${RESOURCEGROUP_NAME} -n ${WEBAPP_NAME}
export FTP_HOST=<my ftp host>
export FTP_USERNAME=<my ftp user name>
export FTP_PASSWORD=<my ftp password>
#IPCONFIG
export DEVBOX_IP_ADDRESS=<my devbox>
|
// ou somente
export {setCookie, getCookie, checkLoginAdmin, doLogoutAdmin, checkLogin, doLogout} from './Authentication';
//export { default as AccordionGroup } from "./AccordionGroup";
|
<filename>js/search_qbank.js
function updateQueryStringParameter(uri, key, value) {
var re = new RegExp("([?&])" + key + "=.*?(&|$)", "i");
var separator = uri.indexOf('?') !== -1 ? "&" : "?";
if (uri.match(re)) {
return uri.replace(re, '$1' + key + "=" + value + '$2');
}
else {
return uri + separator + key + "=" + value;
}
}
function search_qbank(event,e){
if(e.keyCode=='13'){
s= $(event).val();
while(s.indexOf("+")>-1)
s =s.replace("+","%2B");
while(s.indexOf(" ")>-1)
s =s.replace(" ","+");
new_url=encodeURI(updateQueryStringParameter(window.location.href,'search',s));
window.location.assign(new_url);
}
}
function search_qbank1(){
s=$('#txtSearch').val();
while(s.indexOf("+")>-1)
s =s.replace("+","%2B");
while(s.indexOf(" ")>-1)
s =s.replace(" ","+");
new_url=encodeURI(updateQueryStringParameter(window.location.href,'search',s));
window.location.assign(new_url);
}
function search_quiz(event,e,site_url){
if(e.keyCode=='13'){
s= $(event).val();
while(s.indexOf("+")>-1)
s =s.replace("+","%2B");
while(s.indexOf(" ")>-1)
s =s.replace(" ","+");
new_url=encodeURI(updateQueryStringParameter(window.location.href,'search',s));
spilit_url = new_url.split("?");
new_url1 = site_url+"/quiz/index/0/grid?"+spilit_url["1"];
window.location.assign(new_url1);
}
}
function search_quiz1(site_url){
s=$('#txtSearch').val();
while(s.indexOf("+")>-1)
s =s.replace("+","%2B");
while(s.indexOf(" ")>-1)
s =s.replace(" ","+");
new_url=encodeURI(updateQueryStringParameter(window.location.href,'search',s));
spilit_url = new_url.split("?");
new_url1 = site_url+"/quiz/index/0/grid?"+spilit_url["1"];
window.location.assign(new_url1);
}
|
<reponame>healer1064/Gimbal
import fs from 'fs';
import path from 'path';
import program from 'commander';
import readPkg from 'read-pkg';
import updateNotifier from 'update-notifier';
import { preparseOptions } from '@/command';
import audit from '@/command/audit/program';
import Config from '@/config';
import processAudits from '@/config/audits';
import processJobs from '@/config/jobs';
import Logger, { setFromConfigs } from '@/logger';
import { CHILD_GIMBAL_PROCESS } from '@/utils/constants';
(async (): Promise<void> => {
const isBuilt = path.extname(__filename) === '.js';
if (!process.env[CHILD_GIMBAL_PROCESS]) {
const gimbal = fs.readFileSync(path.join(__dirname, 'ascii_art/gimbal.txt'), 'utf8');
/* eslint-disable-next-line no-console */
console.log(gimbal);
}
const packageJson = await readPkg({
cwd: isBuilt ? path.join(__dirname, '../../..') : path.join(__dirname, '..'),
});
program
.version(packageJson.version)
.description('A CLI tool for monitoring web performance in modern web projects')
// global options all command will receive
.option('--cwd [dir]', 'The directory to work in. Defaults to where the command was executed from.', process.cwd())
.option('--config [file]', 'The file to load as the configuration file.')
.option('--no-comment', 'Set to disable commenting results on the VCS')
.option('--no-check-thresholds', 'Set to disable checking thresholds.')
.option('--output-html [file]', 'The path to write the results as HTML to.')
.option('--output-json [file]', 'The path to write the results as JSON to.')
.option('--output-markdown [file]', 'The path to write the results as Markdown to.')
.option('--verbose', 'Turn on extra logging during command executions.')
// audit options
.option(
'--build-dir <dir>',
'Directory storing the build artifacts relative to the --cwd (defaults to "build")',
'build',
)
.option('--no-size', 'Disable checking resource sizes')
.option('--no-calculate-unused-source', 'Disable calculating unused CSS and JavaScript')
.option('--no-heap-snapshot', 'Disable getting a heap snapshot')
.option('--no-lighthouse', 'Disable the lighthouse auditing')
.option('--lighthouse-output-html <file>', 'Location to output the lighthouse HTML report to.')
.option(
'--route <route>',
'Route to run tests on.',
(value: string, previous: string | string[]): string[] => {
// means previous is just the defaultValue
if (!Array.isArray(previous)) {
return [value];
}
previous.push(value);
return previous;
},
'/',
);
// backwards compat so `gimbal audit` doesn't fail, we handle it below
program.command('audit');
// need to parse the options before commander kicks off so the config file
// is loaded. This way things like plugins will be ready
const options = preparseOptions();
try {
const config = await Config.load(options.cwd, options);
setFromConfigs();
// Notify of new package
updateNotifier({ pkg: packageJson }).notify();
// kick off commander
program.parse(process.argv);
if (config) {
const { audits, jobs } = config;
if (jobs && jobs.length) {
await processJobs(jobs, options);
} else if (audits && audits.length) {
await processAudits();
} else {
// no jobs so there is nothing to execute
// so let's show the help screen
program.help();
}
} else {
await audit.run();
}
Logger.log('Finished successfully');
process.exit(0);
} catch (e) {
Logger.log(e);
Logger.log('Finished with failure');
process.exit(1);
}
})();
|
#!/usr/bin/env bash
openssl req -x509 -out fullchain.pem -keyout privkey.pem \
-newkey rsa:2048 -nodes -sha256 \
-extensions EXT -config openssl.conf
|
<reponame>Head8che/CMPUT404-project-socialdistribution<filename>src/components/LogInModal.js
import React from "react"
import { Button, Modal, Form, InputGroup } from "react-bootstrap"
import { useForm } from "react-hook-form"
import { yupResolver } from "@hookform/resolvers/yup"
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"
import { faEyeSlash, faEye } from "@fortawesome/free-solid-svg-icons"
import { useUserHandler } from "../UserContext"
import { useHistory } from "react-router-dom"
import axios from "axios"
import * as Yup from "yup"
import { getBackEndHostWithSlash } from "../utils"
export default function LogInModal({ show, onHide, closeModal }) {
// boolean for showing or hiding the password
const [passwordHidden, setPasswordHidden] = React.useState(true)
const [invalidCredentials, setInvalidCredentials] = React.useState(false)
const { setLoggedInUser } = useUserHandler()
// redirect away from the Login modal with useHistory
const history = useHistory()
// schema to validate form inputs
const validationSchema = Yup.object().shape({
username: Yup.string().required("Username is required"),
password: Yup.string().required("<PASSWORD>"),
})
// get form functions and link validation schema to form
const {
register,
handleSubmit,
reset,
setError,
formState: { errors },
} = useForm({
resolver: yupResolver(validationSchema),
})
const submitHandler = (data) => {
// remove invalid credentials error
setInvalidCredentials(false)
const host = getBackEndHostWithSlash()
// post the validated data to the backend registration service
host &&
axios
.post(`${host}service/author/login/`, data)
.then((response) => {
// close the modal
closeModal()
// empty out the form
reset()
// reset the token
localStorage.removeItem("token")
localStorage.removeItem("refresh")
localStorage.removeItem("user")
localStorage.setItem("token", response.data.token)
localStorage.setItem("refresh", response.data.refresh)
localStorage.setItem(
"user",
JSON.stringify({ ...response.data.user })
)
// set the logged in user
setLoggedInUser({ ...response.data.user })
history.push(`/stream`)
})
.catch((e) => {
// get the errors object
const errors = e.response.data
// set username errors
if (errors.username) {
setError("username", {
type: "server",
message: errors.username[0],
})
}
// set password errors
if (errors.password) {
setError("password", {
type: "server",
message: errors.password[0],
})
}
// show invalid credentials error
setInvalidCredentials(true)
// clear any existing tokens
localStorage.removeItem("token")
localStorage.removeItem("refresh")
localStorage.removeItem("user")
})
}
return (
<Modal
show={show}
onHide={onHide}
size="lg"
aria-labelledby="contained-modal-title-vcenter"
centered
>
<Modal.Header closeButton>
<Modal.Title id="contained-modal-title-vcenter">
Log in to Plurr
</Modal.Title>
</Modal.Header>
{/* show error when credentials are invalid */}
{invalidCredentials ? (
<div className="alert alert-danger mb-0 rounded-0 alert-dismissible fade show">
<strong>Error!</strong> Invalid credentials. Your account may
not have been activated yet.
</div>
) : null}
<Modal.Body>
<Form onSubmit={handleSubmit(submitHandler)}>
{/* username Form Field */}
<Form.Group className="mb-3">
<Form.Label>Username</Form.Label>
<Form.Control
defaultValue=""
name="username"
placeholder="Username"
{...register("username")}
className={`form-control ${errors.username ? "is-invalid" : ""}`}
/>
<Form.Text className="invalid-feedback">
{errors.username?.message}
</Form.Text>
</Form.Group>
{/* password Form Field */}
<Form.Group className="mb-3">
<Form.Label>Password</Form.Label>
<InputGroup>
<Form.Control
defaultValue=""
name="password"
type={passwordHidden ? "password" : "text"}
placeholder="Password"
{...register("password")}
className={`form-control ${
errors.password ? "is-invalid" : ""
}`}
/>
<InputGroup.Text
style={{ cursor: "pointer" }}
onClick={() => {
setPasswordHidden(!passwordHidden)
}}
>
<FontAwesomeIcon icon={passwordHidden ? faEyeSlash : faEye} />
</InputGroup.Text>
<Form.Text className="invalid-feedback">
{errors.password?.message}
</Form.Text>
</InputGroup>
</Form.Group>
{/* Submit Button */}
<div className="flex-row-reverse">
<Button className="pl-5" variant="primary" type="submit">
Log In
</Button>
</div>
</Form>
</Modal.Body>
</Modal>
)
}
|
"use strict";
exports.__esModule = true;
var random_1 = require("./random");
var MIN_INT_1 = require("../number/MIN_INT");
var MAX_INT_1 = require("../number/MAX_INT");
/**
* Returns random number inside range
*/
function rand(min, max) {
min = min == null ? MIN_INT_1["default"] : min;
max = max == null ? MAX_INT_1["default"] : max;
return min + (max - min) * random_1["default"]();
}
exports["default"] = rand;
|
<gh_stars>0
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.messagemanager;
import java.util.ArrayList;
/**
* The Interface MessageInterface defines all methods that a Message class must implement.
*
* @author TransCore ITS, LLC
* Last Updated: 1/8/2014
*/
public interface MessageInterface {
/**
* To xml.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @return the string
*/
public String toXML();
/**
* Message specification to xml.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @return the string
*/
public String messageSpecificationToXML();
/**
* Gets the message specification.
*
* @return the message specification
*/
public ArrayList<String> getMessageSpecification();
/**
* Sets the message specification.
*
* @param messageSpecification the new message specification
*/
public void setMessageSpecification(ArrayList<String> messageSpecification);
/**
* Gets the message body.
*
* @return the message body
*/
public byte[] getMessageBody();
/**
* Sets the message body.
*
* @param messageBody the new message body
*/
public void setMessageBody(byte[] messageBody);
/**
* Gets the message name.
*
* @return the message name
*/
public String getMessageName();
/**
* Sets the message name.
*
* @param messageName the new message name
*/
public void setMessageName(String messageName);
/**
* Gets the message type.
*
* @return the message type
*/
public String getMessageType();
}
|
<reponame>DossierSansTitreEx/MiaouOS
#ifndef _ISO646_H
#define _ISO646_H 1
#if !defined(__cplusplus)
# define and &&
# define and_eq &=
# define bitand &
# define bitor |
# define compl ~
# define not !
# define not_eq !=
# define or ||
# define or_eq |=
# define xor ^
# define xor_eq ^=
#endif
#endif
|
def parse_names(string):
names = []
for word in string.split():
if word[0].isupper():
names.append(word)
return names
|
# Generated by Django 3.1.1 on 2020-10-21 10:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('loginsignup', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='beaver',
name='college',
field=models.TextField(null=True),
),
migrations.AddField(
model_name='beaver',
name='company',
field=models.TextField(null=True),
),
migrations.AddField(
model_name='beaver',
name='industry',
field=models.TextField(null=True),
),
]
|
<gh_stars>1-10
package com.benmu.framework.extend.module;
import com.benmu.framework.model.WeexEventBean;
import com.taobao.weex.annotation.JSMethod;
import com.taobao.weex.bridge.JSCallback;
import com.taobao.weex.common.WXModule;
import com.benmu.framework.manager.ManagerFactory;
import com.benmu.framework.manager.impl.dispatcher.DispatchEventManager;
import com.benmu.framework.constant.WXConstant;
import java.util.ArrayList;
/**
* Created by Carry on 17/2/8.
*/
public class ModalModule extends WXModule {
@JSMethod(uiThread = true)
public void alert(String options, final JSCallback callback) {
WeexEventBean eventBean = new WeexEventBean();
eventBean.setContext(mWXSDKInstance.getContext());
eventBean.setKey(WXConstant.WXEventCenter.EVENT_MODAL_ALERT);
eventBean.setJsParams(options);
eventBean.setJscallback(callback);
ManagerFactory.getManagerService(DispatchEventManager.class).getBus().post(eventBean);
}
@JSMethod(uiThread = true)
public void confirm(String options, final JSCallback cancel, final JSCallback ok) {
WeexEventBean eventBean = new WeexEventBean();
eventBean.setContext(mWXSDKInstance.getContext());
eventBean.setKey(WXConstant.WXEventCenter.EVENT_MODAL_CONFIRM);
eventBean.setJsParams(options);
ArrayList<JSCallback> jsCallbacks = new ArrayList<>();
jsCallbacks.add(cancel);
jsCallbacks.add(ok);
eventBean.setCallbacks(jsCallbacks);
ManagerFactory.getManagerService(DispatchEventManager.class).getBus().post(eventBean);
}
@JSMethod(uiThread = true)
public void showLoading(String options, JSCallback callback) {
WeexEventBean eventBean = new WeexEventBean();
eventBean.setContext(mWXSDKInstance.getContext());
eventBean.setKey(WXConstant.WXEventCenter.EVENT_MODAL_SHOWLOADING);
eventBean.setJsParams(options);
eventBean.setJscallback(callback);
ManagerFactory.getManagerService(DispatchEventManager.class).getBus().post(eventBean);
}
@JSMethod(uiThread = true)
public void hideLoading(JSCallback callback) {
WeexEventBean eventBean = new WeexEventBean();
eventBean.setContext(mWXSDKInstance.getContext());
eventBean.setKey(WXConstant.WXEventCenter.EVENT_MODAL_DISMISSLOADING);
eventBean.setJscallback(callback);
ManagerFactory.getManagerService(DispatchEventManager.class).getBus().post(eventBean);
}
@JSMethod(uiThread = true)
public void toast(String options) {
WeexEventBean eventBean = new WeexEventBean();
eventBean.setContext(mWXSDKInstance.getContext());
eventBean.setKey(WXConstant.WXEventCenter.EVENT_MODAL_TOAST);
eventBean.setJsParams(options);
ManagerFactory.getManagerService(DispatchEventManager.class).getBus().post(eventBean);
}
}
|
<filename>blingfirecompile.library/inc/FAMultiMap_judy.h
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
#ifndef _FA_MULTI_MAP_JUDY_H_
#define _FA_MULTI_MAP_JUDY_H_
#include "FAConfig.h"
#include "FAMultiMapA.h"
#include "FAMap_judy.h"
#include "FAArray_cont_t.h"
#include "FAHeap_t.h"
#include "FASecurity.h"
namespace BlingFire
{
class FAAllocatorA;
/// Judy-based implementation of the FAMultiMapA
/// see FAMultiMapA.h for details
class FAMultiMap_judy : public FAMultiMapA {
public:
FAMultiMap_judy ();
virtual ~FAMultiMap_judy ();
public:
void SetAllocator (FAAllocatorA * pAlloc);
const int Get (
const int Key,
__out_ecount_opt(MaxCount) int * pValues,
const int MaxCount
) const;
const int GetMaxCount () const;
const int Get (const int Key, const int ** ppValues) const;
void Set (const int Key, const int * pValues, const int ValuesCount);
void Add (const int Key, const int Value);
const int Next (int * pKey, const int ** ppValues) const;
const int Prev (int * pKey, const int ** ppValues) const;
/// additional functionality
public:
/// makes Arrays of values sorted and uniq
void SortUniq ();
/// removes Key -> Values pair,
/// restructures the map so pointers returned becomes invalid!
void Remove (const int Key);
/// makes map as if it was just constructed
void Clear ();
private:
inline const int GetNewIdx ();
private:
/// map: key -> idx
FAMap_judy m_key2idx;
/// map: idx -> vals
FAArray_cont_t < FAArray_cont_t < int > > m_idx2vals;
/// keeps unused indices
FAHeap_t < int > m_deleted;
/// allocator pointer
FAAllocatorA * m_pAlloc;
/// maximum size of the array asociated with the key
int m_MaxCount;
};
}
#endif
|
#!/bin/bash
# Ensure cron is running, so sentinel is run periodically
crond
if [ -z ${RPC_HOST+x} ]; then
echo "ENV RPC_HOST must be defined in order to run sentinel."
exit 1;
fi
if [ -z ${RPC_PORT+x} ]; then
echo "ENV RPC_PORT must be defined in order to run sentinel."
exit 1;
fi
if [ -z ${RPC_USER+x} ]; then
echo "ENV RPC_USER must be defined in order to run sentinel."
exit 1;
fi
if [ -z ${RPC_PASSWORD+x} ]; then
echo "ENV RPC_PASSWORD must be defined in order to run sentinel."
exit 1;
fi
echo "rpcport=80" > ${WALLET_CONF}
echo "rpcuser=${RPC_USER}" >> ${WALLET_CONF}
echo "rpcpassword=${RPC_PASSWORD}" >> ${WALLET_CONF}
echo "Forwarding TCP traffic from localhost:80 to ${RPC_HOST}:${RPC_PORT}..."
socat TCP-LISTEN:${RPC_PORT},fork TCP:${RPC_HOST}:${RPC_PORT}
|
package io.github.rcarlosdasilva.weixin.model.response.comment;
import java.util.List;
import com.google.gson.annotations.SerializedName;
import io.github.rcarlosdasilva.weixin.model.response.comment.bean.Comment;
public class CommentListResponse {
private int total;
@SerializedName("comment")
private List<Comment> comments;
public int getTotal() {
return total;
}
public List<Comment> getComments() {
return comments;
}
}
|
CREATE TABLE bookings (
id INT PRIMARY KEY,
room_category_id INT NOT NULL,
date_from DATE NOT NULL,
date_to DATE NOT NULL
);
|
/* mbed Microcontroller Library
* Copyright (c) 2006-2013 ARM Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MBED_PINNAMES_H
#define MBED_PINNAMES_H
#include "cmsis.h"
#ifdef __cplusplus
extern "C" {
#endif
typedef enum {
PIN_INPUT,
PIN_OUTPUT
} PinDirection;
#define PIN_SHIFT 8
typedef enum {
// LPC824 Pin Names (PIN[11:8] + IOCON offset[7:0])
P0_0 = ( 0 << PIN_SHIFT) | 0x44,
P0_1 = ( 1 << PIN_SHIFT) | 0x2C,
P0_2 = ( 2 << PIN_SHIFT) | 0x18,
P0_3 = ( 3 << PIN_SHIFT) | 0x14,
P0_4 = ( 4 << PIN_SHIFT) | 0x10,
P0_5 = ( 5 << PIN_SHIFT) | 0x0C,
P0_6 = ( 6 << PIN_SHIFT) | 0x40,
P0_7 = ( 7 << PIN_SHIFT) | 0x3C,
P0_8 = ( 8 << PIN_SHIFT) | 0x38,
P0_9 = ( 9 << PIN_SHIFT) | 0x34,
P0_10 = (10 << PIN_SHIFT) | 0x20,
P0_11 = (11 << PIN_SHIFT) | 0x1C,
P0_12 = (12 << PIN_SHIFT) | 0x08,
P0_13 = (13 << PIN_SHIFT) | 0x04,
P0_14 = (14 << PIN_SHIFT) | 0x48,
P0_15 = (15 << PIN_SHIFT) | 0x28,
P0_16 = (16 << PIN_SHIFT) | 0x24,
P0_17 = (17 << PIN_SHIFT) | 0x00,
P0_18 = (18 << PIN_SHIFT) | 0x78,
P0_19 = (19 << PIN_SHIFT) | 0x74,
P0_20 = (20 << PIN_SHIFT) | 0x70,
P0_21 = (21 << PIN_SHIFT) | 0x6C,
P0_22 = (22 << PIN_SHIFT) | 0x68,
P0_23 = (23 << PIN_SHIFT) | 0x64,
P0_24 = (24 << PIN_SHIFT) | 0x60,
P0_25 = (25 << PIN_SHIFT) | 0x5C,
P0_26 = (26 << PIN_SHIFT) | 0x58,
P0_27 = (27 << PIN_SHIFT) | 0x54,
P0_28 = (28 << PIN_SHIFT) | 0x50,
D0 = P0_0,
D1 = P0_4,
D2 = P0_19,
D3 = P0_12, // LED_RED
D4 = P0_18,
D5 = P0_28,
D6 = P0_16, // LED_GREEN
D7 = P0_17,
D8 = P0_13,
D9 = P0_27, // LED_BLUE
D10 = P0_15,
D11 = P0_26,
D12 = P0_25,
D13 = P0_24,
D14 = P0_11,
D15 = P0_10,
A0 = P0_6,
A1 = P0_14,
A2 = P0_23,
A3 = P0_22,
A4 = P0_21,
A5 = P0_20,
// LPC824-MAX board
LED_RED = P0_12,
LED_GREEN = P0_16,
LED_BLUE = P0_27,
// mbed original LED naming
LED1 = LED_RED,
LED2 = LED_GREEN,
LED3 = LED_BLUE,
LED4 = LED_BLUE,
// Serial to USB pins
USBTX = P0_7,
USBRX = P0_18,
// I2C pins
SCL = P0_10,
SDA = P0_11,
I2C_SCL = P0_10,
I2C_SDA = P0_11,
// Not connected
NC = (int)0xFFFFFFFF,
} PinName;
typedef enum {
PullUp = 2,
PullDown = 1,
PullNone = 0,
Repeater = 3,
OpenDrain = 4,
PullDefault = PullDown
} PinMode;
#define STDIO_UART_TX USBTX
#define STDIO_UART_RX USBRX
typedef struct {
unsigned char n;
unsigned char offset;
} SWM_Map;
#ifdef __cplusplus
}
#endif
#endif
|
import requests
import json
# OpenWeatherMap API key
API_KEY = 'your_api_key'
# City Name
city_name = 'Paris'
def get_weather_data(city_name):
# URL for the OpenWeatherMap API
url = 'http://api.openweathermap.org/data/2.5/weather?q={}&appid={}'.format(city_name, API_KEY)
# Make a request to the API and get the response
response = requests.get(url)
content = json.loads(response.text)
# Get the required 'main' data
main = content['main']
# Return the 'main' data
return main
# Call the function and get the weather data
weather_data = get_weather_data(city_name)
# Print the data
print(weather_data)
|
package io.vertx.ext.auth.webauthn;
import io.vertx.core.json.JsonObject;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.impl.JsonUtil;
import java.time.Instant;
import java.time.format.DateTimeFormatter;
import java.util.Base64;
/**
* Converter and mapper for {@link io.vertx.ext.auth.webauthn.RelyingParty}.
* NOTE: This class has been automatically generated from the {@link io.vertx.ext.auth.webauthn.RelyingParty} original class using Vert.x codegen.
*/
public class RelyingPartyConverter {
private static final Base64.Decoder BASE64_DECODER = JsonUtil.BASE64_DECODER;
private static final Base64.Encoder BASE64_ENCODER = JsonUtil.BASE64_ENCODER;
public static void fromJson(Iterable<java.util.Map.Entry<String, Object>> json, RelyingParty obj) {
for (java.util.Map.Entry<String, Object> member : json) {
switch (member.getKey()) {
case "icon":
if (member.getValue() instanceof String) {
obj.setIcon((String)member.getValue());
}
break;
case "id":
if (member.getValue() instanceof String) {
obj.setId((String)member.getValue());
}
break;
case "name":
if (member.getValue() instanceof String) {
obj.setName((String)member.getValue());
}
break;
}
}
}
public static void toJson(RelyingParty obj, JsonObject json) {
toJson(obj, json.getMap());
}
public static void toJson(RelyingParty obj, java.util.Map<String, Object> json) {
if (obj.getIcon() != null) {
json.put("icon", obj.getIcon());
}
if (obj.getId() != null) {
json.put("id", obj.getId());
}
if (obj.getName() != null) {
json.put("name", obj.getName());
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.