text
stringlengths 1
1.05M
|
|---|
<filename>app/src/main/java/edu/byu/cet/founderdirectory/utilities/BitmapWorkerTask.java
package edu.byu.cet.founderdirectory.utilities;
import android.app.ActivityManager;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Path;
import android.graphics.Rect;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.AsyncTask;
import android.support.v4.util.LruCache;
import android.util.Log;
import android.widget.ImageView;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.ref.WeakReference;
/**
* See http://bit.ly/1UKT7zj for description of this class.
*
* Created by Liddle on 3/22/16.
*/
public class BitmapWorkerTask extends AsyncTask<String, Void, Bitmap> {
/**
* Tag for logging.
*/
private static final String TAG = "BitmapWorkerTask";
private static LruCache<String, Bitmap> cache = null;
private static DiskLruCache diskCache = null;
private final WeakReference<ImageView> weakReference;
private String url = "";
public static void clearImageFromCache(String url) {
if (cache != null) {
Log.d(TAG, "clearImageFromCache: removing mem " + url);
cache.remove(url);
}
if (diskCache != null) {
try {
Log.d(TAG, "clearImageFromCache: removing disk " + url);
diskCache.remove(url);
} catch (IOException e) {
// Ignore
}
}
}
public BitmapWorkerTask(ImageView imageView) {
// Use a WeakReference to ensure the ImageView can be garbage collected
if (cache == null) {
final int memClass = ((ActivityManager) imageView.getContext().getSystemService(
Context.ACTIVITY_SERVICE)).getMemoryClass();
cache = new LruCache<String, Bitmap>(1024 * 1024 * memClass / 3) {
@Override
protected int sizeOf(String key, Bitmap value) {
return value.getRowBytes() * value.getHeight();
}
};
}
if (diskCache == null) {
try {
diskCache = DiskLruCache.open(imageView.getContext().getCacheDir(), 1, 1, 1024 * 1024 * 10);
} catch (IOException e) {
e.printStackTrace();
}
}
weakReference = new WeakReference<ImageView>(imageView);
}
@Override
protected Bitmap doInBackground(String... strings) {
try {
url = strings[0];
Bitmap bitmap = getBitmapFromCache(url);
if (bitmap != null) {
Log.d(TAG, "doInBackground: used cached bitmap for url " + url);
return bitmap;
}
bitmap = BitmapFactory.decodeStream(new FileInputStream(url));
// addBitmapToCache(url, bitmap);
return bitmap;
} catch (IOException e) {
return null;
}
}
public Bitmap getBitmapFromCache(String key) {
Bitmap bitmap = getBitmapFromMemCache(key);
if (bitmap == null) {
bitmap = getBitmapFromDiskCache(key);
if (bitmap != null) {
addBitmapToCache(key, bitmap);
}
}
return bitmap;
}
public void addBitmapToCache(String key, Bitmap bitmap) {
if (getBitmapFromMemCache(key) == null) {
addBitmapToMemoryCache(key, bitmap);
}
if (getBitmapFromDiskCache(key) == null) {
addBitmapToDiskCache(bitmap, key);
}
}
@Override
protected void onPostExecute(Bitmap bitmap) {
if (isCancelled()) {
bitmap = null;
}
if (weakReference != null && bitmap != null) {
final ImageView imageView = weakReference.get();
final BitmapWorkerTask bitmapWorkerTask =
getBitmapWorkerTask(imageView);
if (this == bitmapWorkerTask && imageView != null) {
imageView.setImageBitmap(bitmap);
}
}
}
public void addBitmapToMemoryCache(String key, Bitmap bitmap) {
if (getBitmapFromMemCache(key) == null) {
cache.put(key, bitmap);
}
}
public void addBitmapToDiskCache(Bitmap bitmap, String key) {
try {
DiskLruCache.Editor editor = diskCache.edit(key.hashCode() + "");
if (editor != null) {
OutputStream os = editor.newOutputStream(0);
bitmap.compress(Bitmap.CompressFormat.PNG, 100, os);
editor.commit();
}
} catch (IOException e) {
e.printStackTrace();
}
}
public Bitmap getBitmapFromMemCache(String key) {
return cache.get(key);
}
public Bitmap getBitmapFromDiskCache(String key) {
Bitmap bitmap = null;
try {
DiskLruCache.Snapshot snapshot = diskCache.get(key.hashCode() + "");
if (snapshot != null) {
bitmap = BitmapFactory.decodeStream(snapshot.getInputStream(0));
}
} catch (IOException e) {
bitmap = null;
}
return bitmap;
}
private static BitmapWorkerTask getBitmapWorkerTask(ImageView imageView) {
if (imageView != null) {
final Drawable drawable = imageView.getDrawable();
if (drawable instanceof AsyncDrawable) {
final AsyncDrawable asyncDrawable = (AsyncDrawable) drawable;
return asyncDrawable.getBitmapWorkerTask();
}
}
return null;
}
public static boolean cancelPotentialWork(String url, ImageView imageView) {
final BitmapWorkerTask bitmapWorkerTask = getBitmapWorkerTask(imageView);
if (bitmapWorkerTask != null) {
Log.d(TAG, "cancelPotentialWork: task not null");
final String bitmapData = bitmapWorkerTask.url;
if (!bitmapData.equals(url)) {
// Cancel previous task
Log.d(TAG, "cancelPotentialWork: canceling previous task");
bitmapWorkerTask.cancel(true);
} else {
// The same work is already in progress
return false;
}
}
// No task associated with the ImageView, or an existing task was cancelled
return true;
}
public static void loadBitmapForFragment(Context context, String url, ImageView imageView) {
if (cancelPotentialWork(url, imageView)) {
Log.d(TAG, "loadBitmap: starting BitmapWorkerTask");
final BitmapWorkerTask task = new BitmapWorkerTask(imageView);
final AsyncDrawable asyncDrawable = new AsyncDrawable(context.getResources(), null, task);
imageView.setImageDrawable(asyncDrawable);
task.execute(url);
}
}
public static void loadBitmap(Context context, String url, ImageView imageView) {
if (cancelPotentialWork(url, imageView)) {
Log.d(TAG, "loadBitmap: starting BitmapWorkerTask");
final BitmapWorkerTask task = new BitmapWorkerTask(imageView);
final AsyncDrawable asyncDrawable = new AsyncDrawable(context.getResources(), null, task);
imageView.setImageDrawable(asyncDrawable);
task.execute(url);
}
}
public static void loadBitmapCircle(Context context, String url, ImageView imageView) {
if (cancelPotentialWork(url, imageView)) {
Log.d(TAG, "loadBitmap: starting BitmapWorkerTask");
final BitmapWorkerTask task = new BitmapWorkerTask(imageView);
final AsyncDrawableCircle asyncDrawableCircle = new AsyncDrawableCircle(context.getResources(), null, task);
imageView.setImageDrawable(asyncDrawableCircle);
task.execute(url);
}
}
public static class AsyncDrawableCircle extends BitmapDrawable {
private final WeakReference<BitmapWorkerTask> bitmapWorkerTaskReference;
public AsyncDrawableCircle(Resources res, Bitmap bitmap, BitmapWorkerTask bitmapWorkerTask) {
super(res, getRoundedShape(bitmap, 100));
bitmapWorkerTaskReference = new WeakReference<BitmapWorkerTask>(bitmapWorkerTask);
}
public BitmapWorkerTask getBitmapWorkerTask() {
return bitmapWorkerTaskReference.get();
}
}
public static class AsyncDrawable extends BitmapDrawable {
private final WeakReference<BitmapWorkerTask> bitmapWorkerTaskReference;
public AsyncDrawable(Resources res, Bitmap bitmap, BitmapWorkerTask bitmapWorkerTask) {
super(res, bitmap);
bitmapWorkerTaskReference = new WeakReference<BitmapWorkerTask>(bitmapWorkerTask);
}
public BitmapWorkerTask getBitmapWorkerTask() {
return bitmapWorkerTaskReference.get();
}
}
public static Bitmap getRoundedShape(Bitmap scaleBitmapImage,int width) {
// TODO Auto-generated method stub
int targetWidth = width;
int targetHeight = width;
Bitmap targetBitmap = Bitmap.createBitmap(targetWidth,
targetHeight,Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(targetBitmap);
Path path = new Path();
path.addCircle(((float) targetWidth - 1) / 2,
((float) targetHeight - 1) / 2,
(Math.min(((float) targetWidth),
((float) targetHeight)) / 2),
Path.Direction.CCW);
canvas.clipPath(path);
Bitmap sourceBitmap = scaleBitmapImage;
canvas.drawBitmap(sourceBitmap,
new Rect(0, 0, sourceBitmap.getWidth(),
sourceBitmap.getHeight()),
new Rect(0, 0, targetWidth,
targetHeight), null);
return targetBitmap;
}
}
|
<gh_stars>1-10
# Use this enabled script to enable the metering dashboard.
DASHBOARD = 'metering'
DISABLED = False
ADD_INSTALLED_APPS = [
'openstack_dashboard.dashboards.metering'
]
|
#!/bin/bash
check_vmware=$(ps aux | grep "/usr/lib/vmware/bin/vmware$")
#if [ ! -z "$check_vmware" ];
if [ -z "$check_vmware" ];
then
#echo $check_vmware
/usr/lib/vmware/bin/vmware &
fi
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1""
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Protobuf/Protobuf.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Protobuf/Protobuf.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
import { schemaComposer, ObjectTypeComposer } from "graphql-compose";
import addOrderby from "../src/orderby";
const typeDefs = `
type User {
userid: Int
firstName: String
lastName: String
}
`;
describe("Schema Orderby", () => {
test("confirms order by", () => {
const ts = schemaComposer.addTypeDefs(typeDefs);
const tc = <ObjectTypeComposer>ts.get("User");
addOrderby(tc, schemaComposer);
expect(tc.toSDL()).toMatchInlineSnapshot(`
"type User {
userid: Int
firstName: String
lastName: String
}"
`);
expect(schemaComposer.getAnyTC("UserOrderBy").toSDL())
.toMatchInlineSnapshot(`
"\\"\\"\\"Sort the field \\"\\"\\"
input UserOrderBy {
userid: _OrderBy
firstName: _OrderBy
lastName: _OrderBy
}"
`);
expect(schemaComposer.getAnyTC("_OrderBy").toSDL()).toMatchInlineSnapshot(`
"enum _OrderBy {
\\"\\"\\"Sort ascending\\"\\"\\"
asc
\\"\\"\\"Sort descending\\"\\"\\"
desc
}"
`);
});
});
|
#!/usr/bin/env bash
# exit code:
# 01 - illegal arguments
for OPT in "$@"; do
case "$OPT" in
'--rulesdir' )
if [[ -z "$2" ]] || [[ "$2" =~ ^-+ ]]; then
exit 1
fi
RULESDIR=`readlink -f $2`
RULESDIR_WINDOWS=`cygpath -w "$RULESDIR"`
PARAMS+=( '--rulesdir', $RULESDIR_WINDOWS )
shift 2
;;
'--stdin-filename' )
if [[ -z "$2" ]] || [[ "$2" =~ ^-+ ]]; then
exit 1
fi
STDIN_FILENAME=`readlink -f $2`
STDIN_FILENAME_WINDOWS=`cygpath -w "$STDIN_FILENAME"`
PARAMS+=( '--stdin-filename', $STDIN_FILENAME_WINDOWS )
shift 2
;;
*)
PARAMS+=( "$1" )
shift 1
;;
esac
done
STDIN_FILENAME_WINDOWS=$(echo $STDIN_FILENAME_WINDOWS | sed -e 's/\\/\\\\/g')
eslint "${PARAMS[@]}" | sed -e "s,$STDIN_FILENAME_WINDOWS,$STDIN_FILENAME,"
|
. ../env.sh
mkdir -p mods
mkdir -p patches
mkdir -p mlib
mkdir -p patchlib
echo "javac $JAVAC_OPTIONS -d mods --module-path amlib${PATH_SEPARATOR}mlib --module-source-path src \$(find src -name \"*.java\" | grep -v modtest.whitebox)"
$JAVA_HOME/bin/javac $JAVAC_OPTIONS -d mods --module-path amlib${PATH_SEPARATOR}mlib --module-source-path src $(find src -name "*.java" | grep -v modtest.whitebox) 2>&1
pushd mods > /dev/null 2>&1
for dir in */;
do
MODDIR=${dir%*/}
echo "jar $JAR_OPTIONS --create --file=../mlib/${MODDIR}.jar -C ${MODDIR} ."
$JAVA_HOME/bin/jar $JAR_OPTIONS --create --file=../mlib/${MODDIR}.jar -C ${MODDIR} . 2>&1
done
popd >/dev/null 2>&1
|
#!/bin/bash
# Configuring the environment for more file watchers
echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p
# Following https://adoptopenjdk.net/installation.html
wget -qO - https://adoptopenjdk.jfrog.io/adoptopenjdk/api/gpg/key/public | sudo apt-key add -
sudo add-apt-repository --yes https://adoptopenjdk.jfrog.io/adoptopenjdk/deb/
# Following https://github.com/nodesource/distributions/blob/master/README.md#deb
curl -sL https://deb.nodesource.com/setup_13.x | sudo -E bash -
sudo add-apt-repository ppa:ubuntu-toolchain-r/test
sudo apt-get update -y
sudo apt-get upgrade -y
sudo apt-get install -y gcc-4.9
sudo apt-get upgrade -y libstdc++6
sudo apt install adoptopenjdk-11-hotspot-jre nodejs
|
<filename>backend/src/routes/users.routes.ts<gh_stars>0
import { Router } from 'express';
import CreateUserService from '../services/CreateUserService';
import { getRepository } from 'typeorm';
import User from '../models/User';
import UserDetails from '../models/UserDetails';
import verifyAuth from '../middlewares/verifyAuth';
const usersRoutes = Router();
usersRoutes.post('/', async (request, response) => {
try {
const {
name,
email,
password,
birthdate,
phone,
website,
address,
status,
} = request.body;
const createUser = new CreateUserService();
const user = await createUser.saveUser({
name,
email,
password,
birthdate,
phone,
website,
address,
status,
});
// delete user.password;
return response.json(user);
} catch (err) {
return response.status(400).json({ error: err.message });
}
});
usersRoutes.get('/', verifyAuth, async (request, response) => {
const users = getRepository(UserDetails);
const getUsers = await users.find({ relations: ['user'] });
return response.json(getUsers);
});
usersRoutes.get('/:id', verifyAuth, async (request, response) => {
const userId = request.params.id;
const user = getRepository(UserDetails);
const getUser = await user.findOne({
where: { userId },
relations: ['user'],
});
delete getUser?.user.password;
return response.json(getUser);
});
export default usersRoutes;
|
package proto
import (
"bytes"
"path/filepath"
"reflect"
"testing"
"github.com/pachisi456/Sia/build"
"github.com/pachisi456/Sia/crypto"
"github.com/pachisi456/Sia/encoding"
"github.com/pachisi456/Sia/modules"
"github.com/pachisi456/Sia/types"
)
// TestContractUncommittedTxn tests that if a contract revision is left in an
// uncommitted state, either version of the contract can be recovered.
func TestContractUncommittedTxn(t *testing.T) {
if testing.Short() {
t.SkipNow()
}
// create contract set with one contract
dir := build.TempDir(filepath.Join("proto", t.Name()))
cs, err := NewContractSet(dir, modules.ProdDependencies)
if err != nil {
t.Fatal(err)
}
initialHeader := contractHeader{
Transaction: types.Transaction{
FileContractRevisions: []types.FileContractRevision{{
NewRevisionNumber: 1,
NewValidProofOutputs: []types.SiacoinOutput{{}, {}},
UnlockConditions: types.UnlockConditions{
PublicKeys: []types.SiaPublicKey{{}, {}},
},
}},
},
}
initialRoots := []crypto.Hash{{1}}
c, err := cs.managedInsertContract(initialHeader, initialRoots)
if err != nil {
t.Fatal(err)
}
// apply an update to the contract, but don't commit it
sc := cs.mustAcquire(t, c.ID)
revisedHeader := contractHeader{
Transaction: types.Transaction{
FileContractRevisions: []types.FileContractRevision{{
NewRevisionNumber: 2,
NewValidProofOutputs: []types.SiacoinOutput{{}, {}},
UnlockConditions: types.UnlockConditions{
PublicKeys: []types.SiaPublicKey{{}, {}},
},
}},
},
StorageSpending: types.NewCurrency64(7),
UploadSpending: types.NewCurrency64(17),
}
revisedRoots := []crypto.Hash{{1}, {2}}
fcr := revisedHeader.Transaction.FileContractRevisions[0]
newRoot := revisedRoots[1]
storageCost := revisedHeader.StorageSpending.Sub(initialHeader.StorageSpending)
bandwidthCost := revisedHeader.UploadSpending.Sub(initialHeader.UploadSpending)
walTxn, err := sc.recordUploadIntent(fcr, newRoot, storageCost, bandwidthCost)
if err != nil {
t.Fatal(err)
}
// the state of the contract should match the initial state
// NOTE: can't use reflect.DeepEqual for the header because it contains
// types.Currency fields
merkleRoots, err := sc.merkleRoots.merkleRoots()
if err != nil {
t.Fatal("failed to get merkle roots", err)
}
if !bytes.Equal(encoding.Marshal(sc.header), encoding.Marshal(initialHeader)) {
t.Fatal("contractHeader should match initial contractHeader")
} else if !reflect.DeepEqual(merkleRoots, initialRoots) {
t.Fatal("Merkle roots should match initial Merkle roots")
}
// close and reopen the contract set
cs.Close()
cs, err = NewContractSet(dir, modules.ProdDependencies)
if err != nil {
t.Fatal(err)
}
// the uncommitted transaction should be stored in the contract
sc = cs.mustAcquire(t, c.ID)
if len(sc.unappliedTxns) != 1 {
t.Fatal("expected 1 unappliedTxn, got", len(sc.unappliedTxns))
} else if !bytes.Equal(sc.unappliedTxns[0].Updates[0].Instructions, walTxn.Updates[0].Instructions) {
t.Fatal("WAL transaction changed")
}
// the state of the contract should match the initial state
merkleRoots, err = sc.merkleRoots.merkleRoots()
if err != nil {
t.Fatal("failed to get merkle roots:", err)
}
if !bytes.Equal(encoding.Marshal(sc.header), encoding.Marshal(initialHeader)) {
t.Fatal("contractHeader should match initial contractHeader", sc.header, initialHeader)
} else if !reflect.DeepEqual(merkleRoots, initialRoots) {
t.Fatal("Merkle roots should match initial Merkle roots")
}
// apply the uncommitted transaction
err = sc.commitTxns()
if err != nil {
t.Fatal(err)
}
// the uncommitted transaction should be gone now
if len(sc.unappliedTxns) != 0 {
t.Fatal("expected 0 unappliedTxns, got", len(sc.unappliedTxns))
}
// the state of the contract should now match the revised state
merkleRoots, err = sc.merkleRoots.merkleRoots()
if err != nil {
t.Fatal("failed to get merkle roots:", err)
}
if !bytes.Equal(encoding.Marshal(sc.header), encoding.Marshal(revisedHeader)) {
t.Fatal("contractHeader should match revised contractHeader", sc.header, revisedHeader)
} else if !reflect.DeepEqual(merkleRoots, revisedRoots) {
t.Fatal("Merkle roots should match revised Merkle roots")
}
}
|
#!/bin/bash
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
(set -o igncr) 2>/dev/null && set -o igncr; # comment is needed
## Make sure McAfee is shutdown
net stop mcshield
|
def formatDate(date):
def formatDateStr(date):
months = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"]
year, month, day = map(int, date.split('-'))
suffix = "th" if 11 <= day <= 13 else {1: "st", 2: "nd", 3: "rd"}.get(day % 10, "th")
return f"{months[month - 1]} {day}{suffix}, {year}"
return formatDateStr(date)
|
/*
* Copyright 2004-2007 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.josql.incubator;
import java.util.List;
import java.util.Collection;
import java.util.Collections;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Iterator;
import org.josql.Query;
import org.josql.exceptions.QueryExecutionException;
import org.josql.exceptions.QueryParseException;
import org.josql.expressions.Expression;
public class FilteredArrayList extends ArrayList
{
private Expression where = null;
private Comparator orderByComp = null;
private Query q = null;
private Exception ex = null;
private boolean noThrow = false;
public FilteredArrayList (String q)
throws QueryParseException
{
this (q,
10);
}
public FilteredArrayList (String q,
int cap)
throws QueryParseException
{
super (cap);
this.q = new Query ();
this.q.parse (q);
this.where = this.q.getWhereClause ();
}
public FilteredArrayList (String q,
Collection c)
throws QueryParseException
{
this (q);
this.addAll (c);
}
public FilteredArrayList (Query q)
{
this.q = q;
}
public FilteredArrayList (Query q,
Collection c)
{
this (q);
this.addAll (c);
}
public boolean isNoThrowOnWhereFalse ()
{
return this.noThrow;
}
public void setNoThrowOnWhereFalse (boolean v)
{
this.noThrow = v;
}
public Exception getException ()
{
return this.ex;
}
public Query getQuery ()
{
return this.q;
}
public void resort ()
{
if (this.orderByComp == null)
{
this.orderByComp = this.q.getOrderByComparator ();
}
if (this.orderByComp != null)
{
Collections.sort (this,
this.orderByComp);
return;
}
Collections.sort (this);
}
private boolean check (Object o)
throws IllegalArgumentException
{
this.ex = null;
if (this.where == null)
{
return true;
}
try
{
if (!this.where.isTrue (o,
this.q))
{
if (!this.noThrow)
{
throw new IllegalArgumentException ("Where clause: " +
this.where +
" evaluates to false for object cannot be added");
}
return false;
}
return true;
} catch (QueryExecutionException e) {
this.ex = e;
throw new IllegalArgumentException ("Where clause: " +
this.where +
" throws exception during execution, use: getException for details.");
}
}
public boolean addAll (Collection c)
throws IllegalArgumentException
{
int s = this.size () - 1;
if (s < 0)
{
s = 0;
}
return this.addAll (s,
c);
}
public boolean addAll (int index,
Collection c)
throws IllegalArgumentException
{
this.ex = null;
if (c == null)
{
throw new NullPointerException ("Expected collection to be non-null.");
}
boolean change = false;
int st = index;
if (c instanceof List)
{
List l = (List) c;
int s = l.size ();
for (int i = 0; i < s; i++)
{
Object o = l.get (i);
try
{
if (this.where.isTrue (o,
this.q))
{
super.add (st,
o);
st++;
change = true;
} else {
if (!this.noThrow)
{
throw new IllegalArgumentException ("Where clause: " +
this.where +
" evaluates to false for object cannot be added");
}
}
} catch (QueryExecutionException e) {
this.ex = e;
throw new IllegalArgumentException ("Where clause: " +
this.where +
" throws exception during execution, use: getException for details.");
}
}
} else {
Iterator iter = c.iterator ();
while (iter.hasNext ())
{
Object o = iter.next ();
try
{
if (this.where.isTrue (o,
this.q))
{
super.add (st,
o);
st++;
change = true;
} else {
if (!this.noThrow)
{
throw new IllegalArgumentException ("Where clause: " +
this.where +
" evaluates to false for object cannot be added");
}
}
} catch (QueryExecutionException e) {
this.ex = e;
throw new IllegalArgumentException ("Where clause: " +
this.where +
" throws exception during execution, use: getException for details.");
}
}
}
return change;
}
public void add (int index,
Object o)
throws IllegalArgumentException
{
if (!this.check (o))
{
return;
}
super.add (index,
o);
}
public Object set (int index,
Object o)
throws IllegalArgumentException
{
Object oo = this.get (index);
if (!this.check (o))
{
return oo;
}
super.set (index,
o);
return oo;
}
public boolean add (Object o)
throws IllegalArgumentException
{
if (!this.check (o))
{
return false;
}
return super.add (o);
}
public boolean canAdd (Object o)
throws QueryExecutionException
{
return this.where.isTrue (o,
this.q);
}
public Object clone ()
{
FilteredArrayList l = new FilteredArrayList (this.q,
this);
return l;
}
public List cloneList (Query q)
{
return new FilteredArrayList (q,
this);
}
public List cloneList ()
{
return new FilteredArrayList (this.q,
this);
}
public FilteredArrayList cloneSelf ()
{
return (FilteredArrayList) this.cloneList ();
}
public FilteredArrayList cloneSelf (Query q)
{
return (FilteredArrayList) this.cloneList (q);
}
}
|
from RFEM.initModel import *
from RFEM.enums import BracingType
class Bracing():
def __init__(self,
no: int = 1,
member_type = BracingType.TYPE_HORIZONTAL,
start_node_no: int = 1,
end_node_no: int = 2,
rotation_angle: float = 0.0,
start_section_no: int = 1,
end_section_no: int = 1,
comment: str = '',
params: dict = {}):
# Client model | Bracing
clientObject = clientModel.factory.create('ns0:bracing')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Bracing No.
clientObject.no = no
# Bracing Type
clientObject.type = bracing_type.name
# Start Node No.
clientObject.node_start = start_node_no
# End Node No.
clientObject.node_end = end_node_no
# Bracing Rotation Angle beta
clientObject.rotation_angle = rotation_angle
# Start Section No.
clientObject.section_start = start_section_no
# End Section No.
clientObject.section_end = end_section_no
# Start Bracing Hinge No.
clientObject.bracing_hinge_start = start_bracing_hinge_no
# End Bracing Hinge No.
clientObject.bracing_hinge_end = end_bracing_hinge_no
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Member to client model
clientModel.service.set_bracing(clientObject)
def Horizontal(self,
no: int = 1,
bracing_type = BracingType.TYPE_HORIZONTAL,
start_node_no: int = 1,
end_node_no: int = 2,
rotation_angle: float = 0.0,
start_section_no: int = 1,
end_section_no: int = 1,
start_bracing_hinge_no: int = 0,
end_bracing_hinge_no: int = 0,
comment: str = '',
params: dict = {}):
# Client model | Bracing
clientObject = clientModel.factory.create('ns0:bracing')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Bracing No.
clientObject.no = no
# Bracing Type
clientObject.type = bracing_type.name
# Start Node No.
clientObject.node_start = start_node_no
# End Node No.
clientObject.node_end = end_node_no
# Bracing Rotation Angle beta
clientObject.rotation_angle = rotation_angle
# Start Section No.
clientObject.section_start = start_section_no
# End Section No.
clientObject.section_end = end_section_no
# Start Bracing Hinge No.
clientObject.bracing_hinge_start = start_bracing_hinge_no
# End Bracing Hinge No.
clientObject.bracing_hinge_end = end_bracing_hinge_no
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Bracing to client model
clientModel.service.set_bracing(clientObject)
def Vertical(self,
no: int = 1,
bracing_type = BracingType.TYPE_VERTICAL,
start_node_no: int = 1,
end_node_no: int = 2,
rotation_angle: float = 0.0,
start_section_no: int = 1,
end_section_no: int = 1,
start_bracing_hinge_no: int = 0,
end_bracing_hinge_no: int = 0,
comment: str = '',
params: dict = {}):
# Client model | Bracing
clientObject = clientModel.factory.create('ns0:bracing')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Bracing No.
clientObject.no = no
# Bracing Type
clientObject.type = bracing_type.name
# Start Node No.
clientObject.node_start = start_node_no
# End Node No.
clientObject.node_end = end_node_no
# Bracing Rotation Angle beta
clientObject.rotation_angle = rotation_angle
# Start Section No.
clientObject.section_start = start_section_no
# End Section No.
clientObject.section_end = end_section_no
# Start Bracing Hinge No.
clientObject.bracing_hinge_start = start_bracing_hinge_no
# End Bracing Hinge No.
clientObject.bracing_hinge_end = end_bracing_hinge_no
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Bracing to client model
clientModel.service.set_bracing(clientObject)
|
#
# Copyright SecureKey Technologies Inc. All Rights Reserved.
#
# SPDX-License-Identifier: Apache-2.0
#
# Release Parameters
BASE_VERSION=0.1.6
IS_RELEASE=false
# Project Parameters
SOURCE_REPO=edv
BASE_PKG_NAME=edv
RELEASE_REPO=ghcr.io/trustbloc
SNAPSHOT_REPO=ghcr.io/trustbloc-cicd
if [ ${IS_RELEASE} = false ]
then
EXTRA_VERSION=snapshot-$(git rev-parse --short=7 HEAD)
PROJECT_VERSION=${BASE_VERSION}-${EXTRA_VERSION}
PROJECT_PKG_REPO=${SNAPSHOT_REPO}
else
PROJECT_VERSION=${BASE_VERSION}
PROJECT_PKG_REPO=${RELEASE_REPO}
fi
export EDV_TAG=${PROJECT_VERSION}
export EDV_PKG=${PROJECT_PKG_REPO}/${BASE_PKG_NAME}
|
from typing import AsyncIterator, Callable, List
import pytest
import csv
import io
from aiocsv._parser import parser as fast_parser
from aiocsv.parser import parser as py_parser
from aiocsv.protocols import WithAsyncRead
Parser = Callable[[WithAsyncRead, csv.Dialect], AsyncIterator[List[str]]]
PARSERS: List[Parser] = [fast_parser, py_parser]
PARSER_NAMES: List[str] = ["fast_cython_parser", "pure_python_parser"]
class AsyncStringIO:
"""Simple wrapper to fulfill WithAsyncRead around a string"""
def __init__(self, data: str = "") -> None:
self.ptr = 0
self.data = data
async def read(self, size: int) -> str:
start = self.ptr
self.ptr += size
return self.data[start:self.ptr]
@pytest.mark.asyncio
@pytest.mark.parametrize("parser", PARSERS, ids=PARSER_NAMES)
async def test_parsing_simple(parser: Parser):
data = 'abc,"def",ghi\r\n' \
'"j""k""l",mno,pqr\r\n' \
'stu,vwx,"yz"\r\n'
csv_result = list(csv.reader(io.StringIO(data, newline="")))
custom_result = [r async for r in parser(AsyncStringIO(data), csv.get_dialect("excel"))]
assert csv_result == custom_result
assert custom_result == [
["abc", "def", "ghi"], ['j"k"l', "mno", "pqr"], ["stu", "vwx", "yz"]
]
@pytest.mark.asyncio
@pytest.mark.parametrize("parser", PARSERS, ids=PARSER_NAMES)
async def test_parsing_escapes(parser: Parser):
data = 'ab$"c,de$\nf\r\n' \
'"$"",$$gh$"\r\n' \
'"i\nj",k$,\r\n' \
csv_parser = csv.reader(io.StringIO(data, newline=""), escapechar="$", strict=True)
csv_result = list(csv_parser)
custom_result = [r async for r in parser(AsyncStringIO(data), csv_parser.dialect)]
assert csv_result == custom_result
assert custom_result == [
['ab"c', "de\nf"], ['"', '$gh"'], ['i\nj', "k,"]
]
@pytest.mark.asyncio
@pytest.mark.parametrize("parser", PARSERS, ids=PARSER_NAMES)
async def test_parsing_empty(parser: Parser):
data = '\r\n a,,\r\n,\r\n '
csv_parser = csv.reader(io.StringIO(data, newline=""), skipinitialspace=True, strict=True)
csv_result = list(csv_parser)
custom_result = [r async for r in parser(AsyncStringIO(data), csv_parser.dialect)]
assert csv_result == custom_result
assert custom_result == [
[], ["a", "", ""], ["", ""], [""]
]
@pytest.mark.asyncio
@pytest.mark.parametrize("parser", PARSERS, ids=PARSER_NAMES)
async def test_parsing_nonnumeric(parser: Parser):
data = '1,2\n"a",,3.14'
csv_parser = csv.reader(io.StringIO(data, newline=""), quoting=csv.QUOTE_NONNUMERIC,
strict=True)
csv_result = list(csv_parser)
custom_result = [r async for r in parser(AsyncStringIO(data), csv_parser.dialect)]
assert csv_result == custom_result
assert custom_result == [
[1.0, 2.0], ["a", "", 3.14]
]
@pytest.mark.asyncio
@pytest.mark.parametrize("parser", PARSERS, ids=PARSER_NAMES)
async def test_parsing_nonnumeric_invalid(parser: Parser):
data = '1,2\na,3.14\n'
csv_parser = csv.reader(io.StringIO(data, newline=""), quoting=csv.QUOTE_NONNUMERIC,
strict=True)
with pytest.raises(ValueError):
list(csv_parser)
with pytest.raises(ValueError):
[r async for r in parser(AsyncStringIO(data), csv_parser.dialect)]
@pytest.mark.asyncio
@pytest.mark.parametrize("parser", PARSERS, ids=PARSER_NAMES)
async def test_parsing_none_quoting(parser: Parser):
data = '1" hello,"2\na","3.14"'
csv_parser = csv.reader(io.StringIO(data, newline=""), quoting=csv.QUOTE_NONE, strict=True)
csv_result = list(csv_parser)
custom_result = [r async for r in parser(AsyncStringIO(data), csv_parser.dialect)]
assert csv_result == custom_result
assert custom_result == [
['1" hello', '"2'], ['a"', '"3.14"']
]
@pytest.mark.asyncio
@pytest.mark.parametrize("parser", PARSERS, ids=PARSER_NAMES)
async def test_parsing_weird_quotes(parser: Parser):
data = 'a"b,$"cd"\r\n' \
'"ef"g",\r\n' \
'"$"""","e"$f"\r\n'
csv_parser = csv.reader(io.StringIO(data, newline=""), escapechar="$", strict=False)
csv_result = list(csv_parser)
custom_result = [r async for r in parser(AsyncStringIO(data), csv_parser.dialect)]
assert csv_result == custom_result
assert custom_result == [
['a"b', '"cd"'], ['efg"', ""], ['""', 'e$f"']
]
@pytest.mark.asyncio
@pytest.mark.parametrize("parser", PARSERS, ids=PARSER_NAMES)
async def test_parsing_strict_quoting(parser: Parser):
data = '"ab"c,def\r\n'
csv_parser = csv.reader(io.StringIO(data, newline=""), strict=True)
with pytest.raises(csv.Error):
list(csv_parser)
with pytest.raises(csv.Error):
[r async for r in parser(AsyncStringIO(data), csv_parser.dialect)]
@pytest.mark.asyncio
@pytest.mark.parametrize("parser", PARSERS, ids=PARSER_NAMES)
async def test_parsing_weird_quotes_nonnumeric(parser: Parser):
data = '3.0,\r\n"1."5,"15"\r\n$2,"-4".5\r\n-5$.2,-11'
csv_parser = csv.reader(io.StringIO(data, newline=""), quoting=csv.QUOTE_NONNUMERIC,
escapechar="$", strict=False)
csv_result = list(csv_parser)
custom_result = [r async for r in parser(AsyncStringIO(data), csv_parser.dialect)]
assert csv_result == custom_result
assert custom_result == [
[3.0, ""], ["1.5", "15"], ["2", "-4.5"], [-5.2, -11.0]
]
|
/**
* Created by User on 21.04.2017.
*/
$('button').on('click', function(event){
event.preventDefault();
var element = $(this);
$.ajax({
url: '/like_treasure',
type: 'GET',
data: {treasure_id: element.attr("data-id")},
success: function(response){
element.html(' ' + response);
}
});
});
|
import Container from "@material-ui/core/Container";
import Divider from "@material-ui/core/Divider";
import Grid from "@material-ui/core/Grid";
import TablePagination from "@material-ui/core/TablePagination";
import Typography from "@material-ui/core/Typography";
import { NavigateFn } from "@reach/router";
import { graphql } from "gatsby";
import TagOutline from "mdi-material-ui/TagOutline";
import React, { FC, memo, useCallback } from "react";
import BlogListItem from "../components/blogs/blog-list-item";
import Navs from "../components/navs";
import SEO from "../components/seo";
import {
MarkdownRemarkConnection,
Site,
SitePageContext,
SiteSiteMetadata,
SiteSiteMetadataSocials,
} from "../graph-types";
type TagsProps = {
data: {
allMarkdownRemark: MarkdownRemarkConnection;
site: Site;
};
pageContext: SitePageContext;
navigate: NavigateFn;
};
const Tags: FC<TagsProps> = memo(({ data, pageContext, navigate }) => {
const { tag, totalCount, limit, currentPage, numPages } = pageContext;
const onChangePage = useCallback(
(_: React.MouseEvent<HTMLButtonElement> | null, page: number) => {
console.log(page);
},
[]
);
const navigateFn = useCallback(navigate, []);
return (
<>
<SEO title={tag as string} />
<Container maxWidth={"lg"}>
<Typography variant={"h3"} gutterBottom>
<TagOutline fontSize={"large"} /> Tag "{tag}"
</Typography>
<Typography variant={"h5"} gutterBottom>
{totalCount} {totalCount === 1 ? "result" : "results"}
</Typography>
<Divider />
<Grid container spacing={6}>
{data.allMarkdownRemark.edges.map((edge, index) => {
return (
<Grid item key={index} xs={12} md={6}>
<BlogListItem item={edge} navigate={navigateFn} />
</Grid>
);
})}
</Grid>
{(numPages as number) > 1 && (
<TablePagination
labelRowsPerPage={null}
rowsPerPage={limit as number}
page={(currentPage as number) - 1}
count={totalCount as number}
onChangePage={onChangePage}
/>
)}
<Navs
socials={
(data.site.siteMetadata as SiteSiteMetadata)
.socials as SiteSiteMetadataSocials[]
}
/>
</Container>
</>
);
});
export default Tags;
export const tagsQuery = graphql`
query tagsQuery($skip: Int!, $limit: Int!, $tag: String!) {
allMarkdownRemark(
sort: { fields: [frontmatter___date], order: [DESC] }
filter: { frontmatter: { draft: { ne: true }, tags: { in: [$tag] } } }
limit: $limit
skip: $skip
) {
edges {
node {
excerpt
timeToRead
frontmatter {
date(formatString: " MM/DD/YYYY")
tags
title
langs
cover {
childImageSharp {
fluid(maxWidth: 1080, fit: COVER, quality: 80) {
...GatsbyImageSharpFluid
}
}
}
}
fields {
slug
langKey
}
}
}
}
site {
siteMetadata {
socials {
link
type
}
}
}
}
`;
|
#!/usr/bin/env bash
#
# These are the commands available in an .envrc context
#
# ShellCheck exceptions:
#
# SC1090: Can't follow non-constant source. Use a directive to specify location.
# SC1091: Not following: (file missing)
# SC1117: Backslash is literal in "\n". Prefer explicit escaping: "\\n".
# SC2059: Don't use variables in the printf format string. Use printf "..%s.." "$foo".
set -e
# NOTE: don't touch the RHS, it gets replaced at runtime
direnv="$(command -v direnv)"
# Config, change in the direnvrc
DIRENV_LOG_FORMAT="${DIRENV_LOG_FORMAT-direnv: %s}"
# This variable can be used by programs to detect when they are running inside
# of a .envrc evaluation context. It is ignored by the direnv diffing
# algorithm and so it won't be re-exported.
export DIRENV_IN_ENVRC=1
# Usage: direnv_layout_dir
#
# Prints the folder path that direnv should use to store layout content.
# This needs to be a function as $PWD might change during source_env/up.
#
# The output defaults to $PWD/.direnv.
direnv_layout_dir() {
echo "${direnv_layout_dir:-$PWD/.direnv}"
}
# Usage: log_status [<message> ...]
#
# Logs a status message. Acts like echo,
# but wraps output in the standard direnv log format
# (controlled by $DIRENV_LOG_FORMAT), and directs it
# to stderr rather than stdout.
#
# Example:
#
# log_status "Loading ..."
#
log_status() {
if [[ -n $DIRENV_LOG_FORMAT ]]; then
local msg=$*
# shellcheck disable=SC2059,SC1117
printf "${DIRENV_LOG_FORMAT}\n" "$msg" >&2
fi
}
# Usage: log_error [<message> ...]
#
# Logs an error message. Acts like echo,
# but wraps output in the standard direnv log format
# (controlled by $DIRENV_LOG_FORMAT), and directs it
# to stderr rather than stdout.
#
# Example:
#
# log_error "Unable to find specified directory!"
log_error() {
local color_normal
local color_error
color_normal=$(tput sgr0)
color_error=$(tput setaf 1)
if [[ -n $DIRENV_LOG_FORMAT ]]; then
local msg=$*
# shellcheck disable=SC2059,SC1117
printf "${color_error}${DIRENV_LOG_FORMAT}${color_normal}\n" "$msg" >&2
fi
}
# Usage: has <command>
#
# Returns 0 if the <command> is available. Returns 1 otherwise. It can be a
# binary in the PATH or a shell function.
#
# Example:
#
# if has curl; then
# echo "Yes we do"
# fi
#
has() {
type "$1" &>/dev/null
}
# Usage: join_args [args...]
#
# Joins all the passed arguments into a single string that can be evaluated by bash
#
# This is useful when one has to serialize an array of arguments back into a string
join_args() {
printf '%q ' "$@"
}
# Usage: expand_path <rel_path> [<relative_to>]
#
# Outputs the absolute path of <rel_path> relative to <relative_to> or the
# current directory.
#
# Example:
#
# cd /usr/local/games
# expand_path ../foo
# # output: /usr/local/foo
#
expand_path() {
"$direnv" expand_path "$@"
}
# Usage: dotenv [<dotenv>]
#
# Loads a ".env" file into the current environment
#
dotenv() {
local path=$1
if [[ -z $path ]]; then
path=$PWD/.env
elif [[ -d $path ]]; then
path=$path/.env
fi
if ! [[ -f $path ]]; then
log_error ".env at $path not found"
return 1
fi
eval "$("$direnv" dotenv bash "$@")"
watch_file "$path"
}
# Usage: user_rel_path <abs_path>
#
# Transforms an absolute path <abs_path> into a user-relative path if
# possible.
#
# Example:
#
# echo $HOME
# # output: /home/user
# user_rel_path /home/user/my/project
# # output: ~/my/project
# user_rel_path /usr/local/lib
# # output: /usr/local/lib
#
user_rel_path() {
local abs_path=${1#-}
if [[ -z $abs_path ]]; then return; fi
if [[ -n $HOME ]]; then
local rel_path=${abs_path#$HOME}
if [[ $rel_path != "$abs_path" ]]; then
abs_path=~$rel_path
fi
fi
echo "$abs_path"
}
# Usage: find_up <filename>
#
# Outputs the path of <filename> when searched from the current directory up to
# /. Returns 1 if the file has not been found.
#
# Example:
#
# cd /usr/local/my
# mkdir -p project/foo
# touch bar
# cd project/foo
# find_up bar
# # output: /usr/local/my/bar
#
find_up() {
(
while true; do
if [[ -f $1 ]]; then
echo "$PWD/$1"
return 0
fi
if [[ $PWD == / ]] || [[ $PWD == // ]]; then
return 1
fi
cd ..
done
)
}
# Usage: source_env <file_or_dir_path>
#
# Loads another ".envrc" either by specifying its path or filename.
#
# NOTE: the other ".envrc" is not checked by the security framework.
source_env() {
local rcpath=${1/#\~/$HOME}
local rcfile
if [[ -d $rcpath ]]; then
rcpath=$rcpath/.envrc
fi
if [[ ! -e $rcpath ]]; then
log_status "referenced $rcpath does not exist"
return 1
fi
rcfile=$(user_rel_path "$rcpath")
watch_file "$rcpath"
pushd "$(pwd 2>/dev/null)" >/dev/null
pushd "$(dirname "$rcpath")" >/dev/null
if [[ -f ./$(basename "$rcpath") ]]; then
log_status "loading $rcfile"
# shellcheck disable=SC1090
. "./$(basename "$rcpath")"
else
log_status "referenced $rcfile does not exist"
fi
popd >/dev/null
popd >/dev/null
}
# Usage: watch_file <filename>
#
# Adds <path> to the list of files that direnv will watch for changes - useful when the contents
# of a file influence how variables are set - especially in direnvrc
#
watch_file() {
local file=${1/#\~/$HOME}
eval "$("$direnv" watch "$file")"
}
# Usage: source_up [<filename>]
#
# Loads another ".envrc" if found with the find_up command.
#
# NOTE: the other ".envrc" is not checked by the security framework.
source_up() {
local file=$1
local dir
if [[ -z $file ]]; then
file=.envrc
fi
dir=$(cd .. && find_up "$file")
if [[ -n $dir ]]; then
source_env "$dir"
fi
}
# Usage: direnv_load <command-generating-dump-output>
# e.g: direnv_load opam-env exec -- "$direnv" dump
#
# Applies the environment generated by running <argv> as a
# command. This is useful for adopting the environment of a child
# process - cause that process to run "direnv dump" and then wrap
# the results with direnv_load.
#
direnv_load() {
local exports
# backup and restore watches in case of nix-shell --pure
local __watches=$DIRENV_WATCHES
exports=$("$direnv" apply_dump <("$@"))
local es=$?
if [[ $es -ne 0 ]]; then
return $es
fi
eval "$exports"
export DIRENV_WATCHES=$__watches
}
# Usage: PATH_add <path> [<path> ...]
#
# Prepends the expanded <path> to the PATH environment variable, in order.
# It prevents a common mistake where PATH is replaced by only the new <path>,
# or where a trailing colon is left in PATH, resulting in the current directory
# being considered in the PATH. Supports adding multiple directories at once.
#
# Example:
#
# pwd
# # output: /my/project
# PATH_add bin
# echo $PATH
# # output: /my/project/bin:/usr/bin:/bin
# PATH_add bam boum
# echo $PATH
# # output: /my/project/bam:/my/project/boum:/my/project/bin:/usr/bin:/bin
#
PATH_add() {
path_add PATH "$@"
}
# Usage: path_add <varname> <path> [<path> ...]
#
# Works like PATH_add except that it's for an arbitrary <varname>.
path_add() {
local path
local var_name="$1"
# split existing paths into an array
declare -a path_array
IFS=: read -ra path_array <<<"${!1}"
shift
# prepend the passed paths in the right order
for ((i = $#; i > 0; i--)); do
path_array=("$(expand_path "${!i}")" "${path_array[@]}")
done
# join back all the paths
path=$(
IFS=:
echo "${path_array[*]}"
)
# and finally export back the result to the original variable
export "$var_name=$path"
}
# Usage: MANPATH_add <path>
#
# Prepends a path to the MANPATH environment variable while making sure that
# `man` can still lookup the system manual pages.
#
# If MANPATH is not empty, man will only look in MANPATH.
# So if we set MANPATH=$path, man will only look in $path.
# Instead, prepend to `man -w` (which outputs man's default paths).
#
MANPATH_add() {
local old_paths="${MANPATH:-$(man -w)}"
local dir
dir=$(expand_path "$1")
export "MANPATH=$dir:$old_paths"
}
# Usage: load_prefix <prefix_path>
#
# Expands some common path variables for the given <prefix_path> prefix. This is
# useful if you installed something in the <prefix_path> using
# $(./configure --prefix=<prefix_path> && make install) and want to use it in
# the project.
#
# Variables set:
#
# CPATH
# LD_LIBRARY_PATH
# LIBRARY_PATH
# MANPATH
# PATH
# PKG_CONFIG_PATH
#
# Example:
#
# ./configure --prefix=$HOME/rubies/ruby-1.9.3
# make && make install
# # Then in the .envrc
# load_prefix ~/rubies/ruby-1.9.3
#
load_prefix() {
local dir
dir=$(expand_path "$1")
MANPATH_add "$dir/man"
MANPATH_add "$dir/share/man"
path_add CPATH "$dir/include"
path_add LD_LIBRARY_PATH "$dir/lib"
path_add LIBRARY_PATH "$dir/lib"
path_add PATH "$dir/bin"
path_add PKG_CONFIG_PATH "$dir/lib/pkgconfig"
}
# Usage: layout <type>
#
# A semantic dispatch used to describe common project layouts.
#
layout() {
local name=$1
shift
eval "layout_$name" "$@"
}
# Usage: layout go
#
# Sets the GOPATH environment variable to the current directory.
#
layout_go() {
path_add GOPATH "$PWD"
PATH_add bin
}
# Usage: layout node
#
# Adds "$PWD/node_modules/.bin" to the PATH environment variable.
layout_node() {
PATH_add node_modules/.bin
}
# Usage: layout perl
#
# Setup environment variables required by perl's local::lib
# See http://search.cpan.org/dist/local-lib/lib/local/lib.pm for more details
#
layout_perl() {
local libdir
libdir=$(direnv_layout_dir)/perl5
export LOCAL_LIB_DIR=$libdir
export PERL_MB_OPT="--install_base '$libdir'"
export PERL_MM_OPT="INSTALL_BASE=$libdir"
path_add PERL5LIB "$libdir/lib/perl5"
path_add PERL_LOCAL_LIB_ROOT "$libdir"
PATH_add "$libdir/bin"
}
# Usage: layout php
#
# Adds "$PWD/vendor/bin" to the PATH environment variable
layout_php() {
PATH_add vendor/bin
}
# Usage: layout python <python_exe>
#
# Creates and loads a virtualenv environment under
# "$direnv_layout_dir/python-$python_version".
# This forces the installation of any egg into the project's sub-folder.
#
# It's possible to specify the python executable if you want to use different
# versions of python.
#
layout_python() {
local old_env
local python=${1:-python}
[[ $# -gt 0 ]] && shift
old_env=$(direnv_layout_dir)/virtualenv
unset PYTHONHOME
if [[ -d $old_env && $python == python ]]; then
export VIRTUAL_ENV=$old_env
else
local python_version
python_version=$("$python" -c "import platform as p;print(p.python_version())")
if [[ -z $python_version ]]; then
log_error "Could not find python's version"
return 1
fi
VIRTUAL_ENV=$(direnv_layout_dir)/python-$python_version
export VIRTUAL_ENV
if [[ ! -d $VIRTUAL_ENV ]]; then
virtualenv "--python=$python" "$@" "$VIRTUAL_ENV"
fi
fi
PATH_add "$VIRTUAL_ENV/bin"
}
# Usage: layout python2
#
# A shortcut for $(layout python python2)
#
layout_python2() {
layout_python python2 "$@"
}
# Usage: layout python3
#
# A shortcut for $(layout python python3)
#
layout_python3() {
layout_python python3 "$@"
}
# Usage: layout anaconda <environment_name> [<conda_exe>]
#
# Activates anaconda for the named environment. If the environment
# hasn't been created, it will be using the environment.yml file in
# the current directory. <conda_exe> is optional and will default to
# the one found in the system environment.
#
layout_anaconda() {
local env_name=$1
local env_loc
local conda
if [[ $# -gt 1 ]]; then
conda=${2}
else
conda=$(command -v conda)
fi
PATH_add "$(dirname "$conda")"
env_loc=$("$conda" env list | grep -- "^$env_name\s")
if [[ ! "$env_loc" == $env_name*$env_name ]]; then
if [[ -e environment.yml ]]; then
log_status "creating conda environment"
"$conda" env create
else
log_error "Could not find environment.yml"
return 1
fi
fi
# shellcheck disable=SC1091
source activate "$env_name"
}
# Usage: layout pipenv
#
# Similar to layout_python, but uses Pipenv to build a
# virtualenv from the Pipfile located in the same directory.
#
layout_pipenv() {
PIPENV_PIPFILE="${PIPENV_PIPFILE:-Pipfile}"
if [[ ! -f "$PIPENV_PIPFILE" ]]; then
log_error "No Pipfile found. Use \`pipenv\` to create a \`$PIPENV_PIPFILE\` first."
exit 2
fi
VIRTUAL_ENV=$(pipenv --venv 2>/dev/null ; true)
if [[ -z $VIRTUAL_ENV || ! -d $VIRTUAL_ENV ]]; then
pipenv install --dev
VIRTUAL_ENV=$(pipenv --venv)
fi
PATH_add "$VIRTUAL_ENV/bin"
export PIPENV_ACTIVE=1
export VIRTUAL_ENV
}
# Usage: layout ruby
#
# Sets the GEM_HOME environment variable to "$(direnv_layout_dir)/ruby/RUBY_VERSION".
# This forces the installation of any gems into the project's sub-folder.
# If you're using bundler it will create wrapper programs that can be invoked
# directly instead of using the $(bundle exec) prefix.
#
layout_ruby() {
BUNDLE_BIN=$(direnv_layout_dir)/bin
if ruby -e "exit Gem::VERSION > '2.2.0'" 2>/dev/null; then
GEM_HOME=$(direnv_layout_dir)/ruby
else
local ruby_version
ruby_version=$(ruby -e"puts (defined?(RUBY_ENGINE) ? RUBY_ENGINE : 'ruby') + '-' + RUBY_VERSION")
GEM_HOME=$(direnv_layout_dir)/ruby-${ruby_version}
fi
export BUNDLE_BIN
export GEM_HOME
PATH_add "$GEM_HOME/bin"
PATH_add "$BUNDLE_BIN"
}
# Usage: use <program_name> [<version>]
#
# A semantic command dispatch intended for loading external dependencies into
# the environment.
#
# Example:
#
# use_ruby() {
# echo "Ruby $1"
# }
# use ruby 1.9.3
# # output: Ruby 1.9.3
#
use() {
local cmd=$1
log_status "using $*"
shift
"use_$cmd" "$@"
}
# Usage: use rbenv
#
# Loads rbenv which add the ruby wrappers available on the PATH.
#
use_rbenv() {
eval "$(rbenv init -)"
}
# Usage: rvm [...]
#
# Should work just like in the shell if you have rvm installed.#
#
rvm() {
unset rvm
if [[ -n ${rvm_scripts_path:-} ]]; then
# shellcheck disable=SC1090
source "${rvm_scripts_path}/rvm"
elif [[ -n ${rvm_path:-} ]]; then
# shellcheck disable=SC1090
source "${rvm_path}/scripts/rvm"
else
# shellcheck disable=SC1090
source "$HOME/.rvm/scripts/rvm"
fi
rvm "$@"
}
# Usage: use node
# Loads NodeJS version from a `.node-version` or `.nvmrc` file.
#
# Usage: use node <version>
# Loads specified NodeJS version.
#
# If you specify a partial NodeJS version (i.e. `4.2`), a fuzzy match
# is performed and the highest matching version installed is selected.
#
# Environment Variables:
#
# - $NODE_VERSIONS (required)
# You must specify a path to your installed NodeJS versions via the `$NODE_VERSIONS` variable.
#
# - $NODE_VERSION_PREFIX (optional) [default="node-v"]
# Overrides the default version prefix.
use_node() {
local version=$1
local via=""
local node_version_prefix=${NODE_VERSION_PREFIX-node-v}
local node_wanted
local node_prefix
if [[ -z $NODE_VERSIONS ]] || [[ ! -d $NODE_VERSIONS ]]; then
log_error "You must specify a \$NODE_VERSIONS environment variable and the directory specified must exist!"
return 1
fi
if [[ -z $version ]] && [[ -f .nvmrc ]]; then
version=$(<.nvmrc)
via=".nvmrc"
fi
if [[ -z $version ]] && [[ -f .node-version ]]; then
version=$(<.node-version)
via=".node-version"
fi
if [[ -z $version ]]; then
log_error "I do not know which NodeJS version to load because one has not been specified!"
return 1
fi
node_wanted=${node_version_prefix}${version}
node_prefix=$(
# Look for matching node versions in $NODE_VERSIONS path
# Strip possible "/" suffix from $NODE_VERSIONS, then use that to
# Strip $NODE_VERSIONS/$NODE_VERSION_PREFIX prefix from line.
# Sort by version: split by "." then reverse numeric sort for each piece of the version string
# The first one is the highest
find "$NODE_VERSIONS" -maxdepth 1 -mindepth 1 -type d -name "$node_wanted*" \
| while IFS= read -r line; do echo "${line#${NODE_VERSIONS%/}/${node_version_prefix}}"; done \
| sort -t . -k 1,1rn -k 2,2rn -k 3,3rn \
| head -1
)
node_prefix="${NODE_VERSIONS}/${node_version_prefix}${node_prefix}"
if [[ ! -d $node_prefix ]]; then
log_error "Unable to find NodeJS version ($version) in ($NODE_VERSIONS)!"
return 1
fi
if [[ ! -x $node_prefix/bin/node ]]; then
log_error "Unable to load NodeJS binary (node) for version ($version) in ($NODE_VERSIONS)!"
return 1
fi
load_prefix "$node_prefix"
if [[ -z $via ]]; then
log_status "Successfully loaded NodeJS $(node --version), from prefix ($node_prefix)"
else
log_status "Successfully loaded NodeJS $(node --version) (via $via), from prefix ($node_prefix)"
fi
}
# Usage: use_nix [...]
#
# Load environment variables from `nix-shell`.
# If you have a `default.nix` or `shell.nix` these will be
# used by default, but you can also specify packages directly
# (e.g `use nix -p ocaml`).
#
use_nix() {
direnv_load nix-shell --show-trace "$@" --run "$(join_args "$direnv" dump)"
if [[ $# == 0 ]]; then
watch_file default.nix
watch_file shell.nix
fi
}
# Usage: use_guix [...]
#
# Load environment variables from `guix environment`.
# Any arguments given will be passed to guix environment. For example,
# `use guix hello` would setup an environment with the dependencies of
# the hello package. To create an environment including hello, the
# `--ad-hoc` flag is used `use guix --ad-hoc hello`. Other options
# include `--load` which allows loading an environment from a
# file. For a full list of options, consult the documentation for the
# `guix environment` command.
use_guix() {
eval "$(guix environment "$@" --search-paths)"
}
## Load the global ~/.direnvrc if present
if [[ -f ${XDG_CONFIG_HOME:-$HOME/.config}/direnv/direnvrc ]]; then
# shellcheck disable=SC1090
source "${XDG_CONFIG_HOME:-$HOME/.config}/direnv/direnvrc" >&2
elif [[ -f $HOME/.direnvrc ]]; then
# shellcheck disable=SC1090
source "$HOME/.direnvrc" >&2
fi
|
<reponame>tomvodi/QTail<filename>src/gui/file_views/PlainTextEdit.cpp<gh_stars>1-10
/**
* @author <NAME> <<EMAIL>>
*
* @section LICENSE
* See LICENSE for more informations.
*
*/
#include <QPainter>
#include <QTextBlock>
#include <QScrollBar>
#include <QContextMenuEvent>
#include <QMenu>
#include <QTextDocumentFragment>
#include "PlainTextEdit.h"
#include "LineNumberArea.h"
static const QLatin1Char MetricsBaseChar('9');
// The paddings of the line number in the line number area.
// This number represents the percentage of the width of the MetricsBaseChar
static const qreal LineAreaLeftPadding = 0.7;
static const qreal LineAreaRightPadding = 0.4;
PlainTextEdit::PlainTextEdit(QWidget *parent)
: QPlainTextEdit(parent),
m_lineNumberArea(new LineNumberArea(this))
{
setReadOnly(true);
connect(this, &PlainTextEdit::blockCountChanged,
this, &PlainTextEdit::updateLineNumberAreaWidth);
connect(this, &PlainTextEdit::updateRequest,
this, &PlainTextEdit::updateLineNumberArea);
connect(this, &PlainTextEdit::cursorPositionChanged,
this, &PlainTextEdit::highlightCurrentLine);
}
quint16 PlainTextEdit::lineNumberAreaWidth() const
{
int digits = 1;
int max = qMax(1, blockCount());
while (max >= 10) {
max /= 10;
++digits;
}
QFontMetrics metrics(m_lineNumberArea->font());
int charWidth(metrics.width(MetricsBaseChar));
int leftPadding = lineNumberAreaLeftPadding(metrics);
int rightPadding = lineNumberAreaRightPadding(metrics);
int width = leftPadding + rightPadding + charWidth * digits;
return width;
}
int PlainTextEdit::lineNumberAreaLeftPadding(const QFontMetrics &metrics) const
{
int charWidth(metrics.width(MetricsBaseChar));
return qRound(LineAreaLeftPadding * charWidth);
}
int PlainTextEdit::lineNumberAreaRightPadding(const QFontMetrics &metrics) const
{
int charWidth(metrics.width(MetricsBaseChar));
return qRound(LineAreaRightPadding * charWidth);
}
void PlainTextEdit::adjustViewportMarginsForLineNumberArea()
{
setViewportMargins(lineNumberAreaWidth(), 0, 0, 0);
}
void PlainTextEdit::lineNumberAreaPaintEvent(QPaintEvent *event)
{
QPainter painter(m_lineNumberArea.data());
painter.fillRect(event->rect(), Qt::lightGray);
QTextBlock block = firstVisibleBlock();
int blockNumber = block.blockNumber();
int top = (int) blockBoundingGeometry(block).translated(contentOffset()).top();
int bottom = top + (int) blockBoundingRect(block).height();
painter.save();
QPen pen(Qt::darkGray, 1.0);
painter.setPen(pen);
painter.drawLine(event->rect().width() - pen.width(), event->rect().top(),
event->rect().width() - pen.width(), event->rect().bottom());
painter.restore();
QFontMetrics numberAreaMetrics(m_lineNumberArea->font());
int rightPadding = lineNumberAreaRightPadding(numberAreaMetrics);
int numberAreaWidth = lineNumberAreaWidth() - rightPadding;
int numberAreaLineHeight = numberAreaMetrics.height();
while (block.isValid() && top <= event->rect().bottom()) {
if (block.isVisible() && bottom >= event->rect().top()) {
QString number = QString::number(blockNumber + 1);
painter.setPen(Qt::black);
painter.drawText(0, top, numberAreaWidth, numberAreaLineHeight, Qt::AlignRight, number);
}
block = block.next();
top = bottom;
bottom = top + (int) blockBoundingRect(block).height();
++blockNumber;
}
}
void PlainTextEdit::setLineNumberAreaFont(const QFont &font)
{
m_lineNumberArea->setFont(font);
adjustViewportMarginsForLineNumberArea();
}
void PlainTextEdit::scrollToTop()
{
verticalScrollBar()->setValue(0);
}
void PlainTextEdit::scrollToBottom()
{
verticalScrollBar()->setValue(verticalScrollBar()->maximum());
}
void PlainTextEdit::scrollToCursor()
{
setCenterOnScroll(true);
ensureCursorVisible();
setCenterOnScroll(false);
}
void PlainTextEdit::resizeEvent(QResizeEvent *event)
{
QPlainTextEdit::resizeEvent(event);
QRect cr = contentsRect();
m_lineNumberArea->setGeometry(QRect(cr.left(), cr.top(),
lineNumberAreaWidth(), cr.height()));
}
void PlainTextEdit::contextMenuEvent(QContextMenuEvent *event)
{
QMenu *menu = createStandardContextMenu(event->pos());
// From here on, add actions that depending on a current text selection.
QAction *textSelectedAction = nullptr;
QString selectedText;
QTextCursor cursor = textCursor();
if (!cursor.hasSelection()) {
goto exec_menu;
}
selectedText = cursor.selection().toPlainText();
textSelectedAction = menu->addAction(tr("Create line highlight rule"));
connect(textSelectedAction, &QAction::triggered, [this, selectedText] {
HighlightingRule rule(selectedText);
emit addLineHighlightRequested(rule);
});
textSelectedAction = menu->addAction(tr("Create word highlight rule"));
connect(textSelectedAction, &QAction::triggered, [this, selectedText] {
HighlightingRule rule(selectedText);
emit addWordHighlightRequested(rule);
});
exec_menu:
menu->exec(event->globalPos());
delete menu;
}
void PlainTextEdit::updateLineNumberAreaWidth(int newBlockCount)
{
adjustViewportMarginsForLineNumberArea();
}
void PlainTextEdit::highlightCurrentLine()
{
QList<QTextEdit::ExtraSelection> extraSelections;
QTextEdit::ExtraSelection selection;
QColor lineBackgroundColor = QColor(Qt::yellow).lighter(160);
QColor lineForegroundColor = QColor(Qt::black);
selection.format.setBackground(lineBackgroundColor);
selection.format.setForeground(lineForegroundColor);
selection.format.setProperty(QTextFormat::FullWidthSelection, true);
selection.cursor = textCursor();
selection.cursor.clearSelection();
extraSelections.append(selection);
setExtraSelections(extraSelections);
}
void PlainTextEdit::updateLineNumberArea(const QRect &rect, int dy)
{
if (dy) {
m_lineNumberArea->scroll(0, dy);
} else {
m_lineNumberArea->update(0, rect.y(), m_lineNumberArea->width(), rect.height());
}
if (rect.contains(viewport()->rect())) {
updateLineNumberAreaWidth(0);
}
}
|
// create a range from 0 to 9
let arr = Array.from(Array(10).keys());
console.log(arr); // => [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
|
PrintChars(){ printf %${2}s | tr ' ' "$1"; }
# 右から左へ1文字ずつ詰めて表示される
# $1 対象テキスト
ToLeft(){
tput cub $(($col - ))
}
Wait(){
local target=/tmp/work/chars.txt
echo "日本語を一文字ずつ表示する。この度はお疲れ様でした。" > $target
while IFS= read -rN1 char;
do
printf "$char";
sleep 0.05;
done < $target
#done < echo $target
}
Wait
Wait2(){
local text='helloworld!!'
#local text='日本語を一文字ずつ表示する。この度はお疲れ様でした。'
#echo "$text" | awk -v FS='' '{
# for (i = 1; i <= NF; i++) print $i;
#}'
echo "$text" | awk -v FS='' '{
for (i = 1; i <= NF; i++) { sleep 1; printf $i; }
}'
}
Wait2
Wait3(){
#local text='helloworld!!'
local text='日本語を一文字ずつ表示する。この度はお疲れ様でした。'
#echo "$text" | awk -v FS='' '{
# for (i = 1; i <= NF; i++) print $i;
#}'
echo "$text" | fold -s1
for char in `echo "$text" | fold -s1`; do
echo -n $char
sleep 0.05
done
}
Wait3
Wait4(){
local cols=`tput cols`
local count=0
while [ $count -lt ${#1} ]; do
#tput cub $(($col - $count))
tput cub $(($col))
local pre=`PrintChars ' ' $((col))`
local target=${1:0:$(($cols-$count))}
local suf=`PrintChars ' ' $((col))`
echo "$pre$target$suf"
tput cuu 1
#PrintChars ' ' $((col))
((count++))
sleep 0.05
done
#tput cub $cols
}
Wait4 "日本語を一文字ずつ表示する。この度はお疲れ様でした。"
|
<reponame>ivonildo-lopes/PedidoVenda<filename>src/main/java/com/algaworks/pedidovenda/validation/PedidoAlteradoEvent2.java<gh_stars>1-10
package com.algaworks.pedidovenda.validation;
import com.algaworks.pedidovenda.model.Pedido2;
public class PedidoAlteradoEvent2 {
private Pedido2 pedido;
public PedidoAlteradoEvent2(Pedido2 pedido){
this.pedido = pedido;
}
public Pedido2 getPedido() {
return pedido;
}
}
|
#!/usr/bin/env bash
#
# Generate all protobuf bindings.
# Run from repository root.
#
# Initial script taken from etcd under the Apache 2.0 license
# File: https://github.com/coreos/etcd/blob/78a5eb79b510eb497deddd1a76f5153bc4b202d2/scripts/genproto.sh
set -e
set -u
if ! [[ "$0" =~ "scripts/genproto.sh" ]]; then
echo "must be run from repository root"
exit 255
fi
if ! [[ $(protoc --version) =~ "3.15.8" ]]; then
echo "could not find protoc 3.15.8, is it installed + in PATH?"
exit 255
fi
echo "installing plugins"
go mod download
INSTALL_PKGS="golang.org/x/tools/cmd/goimports github.com/gogo/protobuf/protoc-gen-gogofast"
for pkg in ${INSTALL_PKGS}; do
go install "$pkg"
done
GOGOPROTO_ROOT="$(go list -mod=readonly -f '{{ .Dir }}' -m github.com/gogo/protobuf)"
GOGOPROTO_PATH="${GOGOPROTO_ROOT}:${GOGOPROTO_ROOT}/protobuf"
DIRS="nflog/nflogpb silence/silencepb cluster/clusterpb"
echo "generating files"
for dir in ${DIRS}; do
pushd ${dir}
protoc --gogofast_out=:. -I=. \
-I="${GOGOPROTO_PATH}" \
*.proto
sed -i.bak -E 's/import _ \"gogoproto\"//g' *.pb.go
sed -i.bak -E 's/import _ \"google\/protobuf\"//g' *.pb.go
sed -i.bak -E 's/\t_ \"google\/protobuf\"//g' -- *.pb.go
rm -f *.bak
goimports -w *.pb.go
popd
done
|
#!/bin/sh
make -C /Users/lbajo/ros2_mod_ws/build/ament_cmake_export_interfaces -f /Users/lbajo/ros2_mod_ws/build/ament_cmake_export_interfaces/CMakeScripts/ZERO_CHECK_cmakeRulesBuildPhase.make$CONFIGURATION all
|
<gh_stars>1-10
package libs.trustconnector.scdp.smartcard.application.telecom.cat.sms;
import libs.trustconnector.scdp.util.*;
import libs.trustconnector.scdp.util.ByteArray;
import libs.trustconnector.scdp.util.Util;
public class CommandPackage
{
protected int defChecksumAlg;
protected int defChecksumKID;
protected short SPI;
protected byte KIc;
protected byte KID;
protected byte[] TAR;
protected byte[] Count;
protected int PadCount;
protected byte[] RcCcDs;
protected byte[] secData;
protected byte[] EncCount;
protected int EncPadCount;
protected byte[] EncRcCcDs;
protected byte[] EncSecData;
protected int expChecksumLen;
static final int CHECKSUM_TYPE_NON = 0;
static final int CHECKSUM_TYPE_RC = 1;
static final int CHECKSUM_TYPE_CC = 2;
static final int CHECKSUM_TYPE_DS = 3;
static final int CHECKSUM_TYPE_MASK = 3;
public static final int DATA_CIPHER = 4;
public static final int CHECKSUM_ALG_RC_CRC16 = 16;
public static final int CHECKSUM_ALG_RC_CRC32 = 80;
public static final int CHECKSUM_ALG_CC_DES = 1;
public static final int CHECKSUM_ALG_CC_TDES_2KEY = 5;
public static final int CHECKSUM_ALG_CC_TDES_3KEY = 9;
public static final int CHECKSUM_ALG_CC_AES = 2;
public static final int CHECKSUM_ALG_NON = 0;
public static final int COUNT_NON = 0;
public static final int COUNT_CHECK_NO_CHECK = 8;
public static final int COUNT_CHECK_HIGH = 16;
public static final int COUNT_CHECK_ONE_HIGH = 24;
protected static final int COUNT_TYPE_MASK = 24;
public static final int PoR_REQ_NON = 0;
public static final int PoR_REQ_ALWAYS = 256;
public static final int PoR_REQ_ON_ERR = 512;
protected static final int PoR_REQ_MASK = 768;
static final int PoR_CHECKSUM_NON = 0;
static final int PoR_CHECKSUM_RC = 1024;
static final int PoR_CHECKSUM_CC = 2048;
static final int PoR_CHECKSUM_DS = 3072;
static final int PoR_CHECKSUM_MASK = 3072;
protected static final int PoR_CIPHER = 4096;
public static final int PoR_TYPE_DELIVER_REPORT = 0;
public static final int PoR_TYPE_SUBMIT = 8192;
protected static final int PoR_TYPE_MASK = 8192;
public static final int CIPHER_DES = 1;
public static final int CIPHER_DES_ECB = 13;
public static final int CIPHER_TDES_2KEY = 5;
public static final int CIPHER_TDES_3KEY = 9;
public static final int CIPHER_AES = 2;
public static final int CIPHER_NULL = 240;
public static final int CIPHER_MASK = 15;
public CommandPackage() {
this.TAR = new byte[3];
}
public CommandPackage(final int spi, final int kic, final int kid, final int tar) {
this(spi, kic, kid, tar, null);
}
public CommandPackage(final int spi, final int kic, final int kid, final int tar, final String apdus) {
final byte spi2 = (byte)(spi >> 8);
final byte spi3 = (byte)spi;
this.SPI = (short)((spi3 << 8 & 0xFF00) | (spi2 & 0xFF));
this.KIc = (byte)kic;
this.KID = (byte)kid;
final byte[] tar_b = { (byte)(tar >> 16), (byte)(tar >> 8), (byte)tar };
this.TAR = tar_b;
if (apdus != null) {
this.secData = ByteArray.convert(apdus);
}
}
public void setChecksum(final int checksumAlg) {
this.setChecksum(checksumAlg, 0);
}
public void setChecksumRCDefault(final int checksumAlg) {
this.SPI &= 0xFFFFFFFC;
this.SPI |= 0x1;
this.KID = 0;
this.defChecksumAlg = checksumAlg;
}
public void setChecksumCCDefault(final int checksumAlg, final int keyVer) {
this.SPI &= 0xFFFFFFFC;
this.SPI |= 0x2;
this.KID = 0;
this.defChecksumAlg = checksumAlg;
this.defChecksumKID = keyVer;
}
public void setChecksum(int checksumAlg, final int keyVer) {
this.SPI &= 0xFFFFFFFC;
switch (checksumAlg) {
case 16:
case 80: {
this.SPI |= 0x1;
checksumAlg >>= 4;
this.KID = (byte)(checksumAlg & 0xF);
break;
}
case 0: {
this.KID = (byte)(keyVer << 4);
break;
}
default: {
this.SPI |= 0x2;
this.KID = (byte)(keyVer << 4 | (checksumAlg & 0xF));
break;
}
}
}
public int getChecksumAlg() {
if ((this.SPI & 0x1) == 0x1) {
if (this.KID == 0) {
return this.defChecksumAlg;
}
return (this.KID & 0xF) << 4 & 0xFF;
}
else {
if ((this.SPI & 0x2) != 0x2) {
return 0;
}
if (this.KID == 0) {
return this.defChecksumAlg;
}
return this.KID & 0xF;
}
}
public void setChecksum(final int checksumAlg, final int keyVer, final int expChecksumLen) {
this.setChecksum(checksumAlg, keyVer);
this.expChecksumLen = expChecksumLen;
}
public void setCipher(final int cipherType, final int keyVer) {
if (cipherType == 240) {
this.SPI &= 0xFFFFFFFB;
this.KIc = (byte)(keyVer << 4);
}
else {
this.SPI |= 0x4;
this.KIc = (byte)(keyVer << 4 | (cipherType & 0xF));
}
}
public void setPoRReq(final int PoRReqType) {
this.SPI &= 0xFFFFFCFF;
this.SPI |= (short)(PoRReqType & 0x300);
}
public void setPoRChecksum(final int checksumAlg) {
this.SPI &= 0xFFFFF3FF;
switch (checksumAlg) {
case 16:
case 80: {
this.SPI |= 0x400;
break;
}
case 0: {
break;
}
default: {
this.SPI |= 0x800;
break;
}
}
}
public void setPoRChecksum(int checksumAlg, final int keyVer) {
this.SPI &= 0xFFFFF3FF;
switch (checksumAlg) {
case 16:
case 80: {
this.SPI |= 0x400;
checksumAlg >>= 4;
this.KID = (byte)(checksumAlg & 0xF);
break;
}
case 0: {
this.KID = (byte)(keyVer << 4);
break;
}
default: {
this.SPI |= 0x800;
this.KID = (byte)(keyVer << 4 | (checksumAlg & 0xF));
break;
}
}
}
public void setPoRCipher(final boolean bPoRCipher) {
if (bPoRCipher) {
this.SPI |= 0x1000;
}
else {
this.SPI &= 0xFFFFEFFF;
}
}
public void setPoRType(final int PoRType) {
this.SPI &= 0xFFFFDFFF;
this.SPI |= (short)(PoRType & 0x2000);
}
public short getSPI() {
return this.SPI;
}
public byte getKIc() {
return this.KIc;
}
public byte getKID() {
return this.KID;
}
public void setTAR(final String TAR) {
this.TAR = ByteArray.convert(TAR);
}
public void setTAR(final byte[] TAR) {
this.TAR = TAR.clone();
}
public byte[] getTAR() {
return this.TAR;
}
public void setCount(final int value, final int type) {
this.SPI &= 0xFFFFFFE7;
this.SPI |= (short)(type & 0x18);
this.Count = Util.intToBytes(value, 5);
}
public void setCount(final byte[] count, final int type) {
this.SPI &= 0xFFFFFFE7;
this.SPI |= (short)(type & 0x18);
this.Count = count;
}
public void setCount(final int type) {
this.SPI &= 0xFFFFFFE7;
this.SPI |= (short)(type & 0x18);
if (type == 0) {
this.Count = new byte[5];
}
else {
this.Count = null;
}
}
public byte[] getCount() {
return this.Count;
}
void setCount(final byte[] count) {
this.Count = count.clone();
}
public void setChecksum(final byte[] checksum) {
if (checksum != null) {
this.RcCcDs = checksum.clone();
}
else {
this.RcCcDs = null;
}
}
public void setChecksumWrong() {
if (this.EncRcCcDs != null) {
ByteArray.not(this.EncRcCcDs, 0, this.EncRcCcDs, 0, this.EncRcCcDs.length);
}
else if (this.RcCcDs != null) {
ByteArray.not(this.RcCcDs, 0, this.RcCcDs, 0, this.RcCcDs.length);
}
}
public byte[] getChecksum() {
return this.RcCcDs.clone();
}
public void setData(final byte[] data) {
this.secData = data.clone();
this.EncCount = null;
this.EncPadCount = 0;
this.EncRcCcDs = null;
this.EncSecData = null;
}
public byte[] getData() {
return this.secData.clone();
}
public int getDataLen() {
return this.secData.length;
}
public byte[] toBytes() {
final ByteArray b = new ByteArray();
int chl = 13;
if (this.RcCcDs != null) {
chl += this.RcCcDs.length;
}
byte[] data = this.EncSecData;
if (data == null) {
data = this.secData;
}
final int cpl = 1 + chl + ((data == null) ? 0 : data.length);
b.append(cpl, 2);
b.append(chl, 1);
b.append((byte)this.SPI);
b.append((byte)(this.SPI >> 8));
b.append(this.KIc);
b.append(this.KID);
b.append(this.TAR);
if (this.EncCount != null) {
b.append(this.EncCount);
}
else {
b.append(this.Count);
}
if (this.EncCount != null) {
b.append(this.EncPadCount, 1);
}
else {
b.append(this.PadCount, 1);
}
if (this.EncRcCcDs != null) {
b.append(this.EncRcCcDs);
}
else {
b.append(this.RcCcDs);
}
if (this.EncSecData != null) {
b.append(this.EncSecData);
}
else {
b.append(this.secData);
}
return b.toBytes();
}
public byte[] toCATTPFormat() {
return null;
}
int getExpChecksumLen() {
return this.expChecksumLen;
}
void setPCount(final int paddingCount) {
this.PadCount = paddingCount;
}
byte[] getChecksumData(final int checksumLen) {
final ByteArray checksumData = new ByteArray();
final int chl = 13 + checksumLen;
final int cpl = 1 + chl + this.secData.length + this.PadCount;
checksumData.append(cpl, 2);
checksumData.append(chl, 1);
checksumData.append((byte)this.SPI);
checksumData.append((byte)(this.SPI >> 8));
checksumData.append(this.KIc);
checksumData.append(this.KID);
checksumData.append(this.TAR);
checksumData.append(this.Count);
checksumData.append((byte)this.PadCount);
checksumData.append(this.secData);
if (this.PadCount > 0) {
checksumData.append(new byte[this.PadCount]);
}
return checksumData.toBytes();
}
byte[] getEncOrgData() {
final ByteArray cipherDataBytes = new ByteArray();
cipherDataBytes.append(this.Count);
cipherDataBytes.append((byte)this.PadCount);
cipherDataBytes.append(this.RcCcDs);
cipherDataBytes.append(this.secData);
return cipherDataBytes.toBytes();
}
void setEncData(final byte[] encData) {
int offset = 0;
System.arraycopy(encData, offset, this.EncCount = new byte[5], 0, 5);
offset += 5;
this.EncPadCount = encData[offset];
++offset;
if (this.RcCcDs != null) {
System.arraycopy(encData, offset, this.EncRcCcDs = new byte[this.RcCcDs.length], 0, this.RcCcDs.length);
offset += this.RcCcDs.length;
}
System.arraycopy(encData, offset, this.EncSecData = new byte[encData.length - offset], 0, this.EncSecData.length);
}
public int getDefaultChecksumAlg() {
return this.defChecksumAlg;
}
public int getDefaultChecksumKID() {
return this.defChecksumKID;
}
@Override
public String toString() {
final String res = "";
return res;
}
}
|
from os import system
def runPhishing(social):
# Remove all files in the directory base/Server/www/
# Create a new file named cat.txt in the directory base/Server/www/
system('sudo rm -Rf base/Server/www/*.* && touch base/Server/www/cat.txt')
|
<reponame>TerriaJS/natmap-catalog-v8
"use strict";
const sortItemsByName = require('../helpers/sortItemsByName');
function recursivelySortMembersByName(members) {
for (let i = 0; i < members.length; ++i) {
const m = members[i];
if (m.members != null) {
recursivelySortMembersByName(m.members);
}
}
return sortItemsByName(members);
}
module.exports = recursivelySortMembersByName;
|
test::md5_file() {
touch "${TMPDIR}/md5_file"
EXPECT_EQ 'd41d8cd98f00b204e9800998ecf8427e' \
"$(sub::md5_file "${TMPDIR}/md5_file")"
echo -n 'foo' >"${TMPDIR}/md5_file"
EXPECT_EQ 'acbd18db4cc2f85cedef654fccc4a4d8' \
"$(sub::md5_file "${TMPDIR}/md5_file")"
local result=''
func::md5_file result "${TMPDIR}/md5_file"
EXPECT_EQ 'acbd18db4cc2f85cedef654fccc4a4d8' "${result}"
}
|
/**
* Quiz Schema Utilities.
*
* Contains various utilities for use with the quiz database schema.
*
* @module quizUtils
* @file This file defines utilites for creating, manipulating, and
* processing quizzes defined by the quiz schema.
* @author syoung908
* @version 1.0.0
* @since 1.0.0
*/
/**
* processQuiz.
*
* Compares the user's answers with the answer key and returns the user's score
* and the correct answer for any incorrect answers.
*
* @since 1.0.0
*
* @param {Object} userAnswers An object containing questionId, answer
* pairs that the user submitted.
* @param {Map} answerKey A map with the questionId as the key and the
* correct answer as the value.
*
* @return {Object} The number of correct answers, total questions, and the
* correct answers for any questions the user answered
* incorrectly as an object with questionId, answerPairs.
*/
exports.processQuiz = (userAnswers, answerKey) => {
let numCorrect = 0;
let corrections = {};
let answerKeyCopy = new Map(answerKey);
//Check Answers
for (let id in userAnswers) {
if (answerKey.get(id) == userAnswers[id]) {
numCorrect++;
} else if (answerKey.has(id)){
corrections[id] = answerKey.get(id);
}
answerKeyCopy.delete(id);
}
// Add missing answers to corrections list
if (answerKey.size !== 0){
answerKeyCopy.forEach((value, key)=> {
corrections[key] = value;
})
}
// Return results as object
return({
correct: numCorrect,
total: answerKey.size,
corrections: corrections
});
}
|
<filename>view/errors.go
package view
type Redirect string
func (self Redirect) Error() string {
return string(self)
}
type PermanentRedirect string
func (self PermanentRedirect) Error() string {
return string(self)
}
type NotFound string
func (self NotFound) Error() string {
return string(self)
}
type Forbidden string
func (self Forbidden) Error() string {
return string(self)
}
|
def update_pin(users: dict):
previous_pin_inp = int(input('Please Enter previous PIN: '))
new_pin_inp = int(input('Please Enter new PIN: '))
if previous_pin_inp != new_pin_inp:
if previous_pin_inp in users:
users[int(new_pin_inp)] = users.pop(int(previous_pin_inp))
print(users)
print('Dear user, your PIN has been updated successfully..!')
else:
print('Invalid PIN')
else:
print('Card is necessary')
return users
|
def longest_consecutive_ones(s: str) -> int:
max_count = 0
current_count = 0
for char in s:
if char == '1':
current_count += 1
max_count = max(max_count, current_count)
else:
current_count = 0
return max_count
|
#!/usr/bin/env bash
# Base16 Atelier Lakeside Light - Mate Terminal color scheme install script
# Bram de Haan (http://atelierbramdehaan.nl)
[[ -z "$PROFILE_NAME" ]] && PROFILE_NAME="Base 16 Atelier Lakeside Light 256"
[[ -z "$PROFILE_SLUG" ]] && PROFILE_SLUG="base-16-atelier-lakeside-light-256"
[[ -z "$DCONFTOOL" ]] && DCONFTOOL=dconf
[[ -z "$BASE_KEY" ]] && BASE_KEY=/org/mate/terminal
PROFILE_KEY="$BASE_KEY/profiles/$PROFILE_SLUG"
DEFAULT_SLUG=$("$DCONFTOOL" read "$BASE_KEY/global/default-profile" | tr -d \')
DEFAULT_KEY="$BASE_KEY/profiles/$DEFAULT_SLUG"
dcopy() {
local from="$1"; shift
local to="$1"; shift
"$DCONFTOOL" dump "$from/" | "$DCONFTOOL" load "$to/"
}
dset() {
local key="$1"; shift
local val="$1"; shift
"$DCONFTOOL" write "$PROFILE_KEY/$key" "$val"
}
glist_append() {
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$DCONFTOOL" read "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "'$val'"
} | head -c-1 | tr "\n" ,
)"
"$DCONFTOOL" write "$key" "[$entries]"
}
if [ -n "$DEFAULT_SLUG" ]; then
dcopy "$DEFAULT_KEY" "$PROFILE_KEY"
fi
dset visible-name "'$PROFILE_NAME'"
dset palette "'#ebf8ff:#d22d72:#568c3b:#8a8a0f:#257fad:#6b6bb8:#2d8f6f:#516d7b:#7195a8:#d22d72:#568c3b:#8a8a0f:#257fad:#6b6bb8:#2d8f6f:#161b1d'"
dset background-color "'#ebf8ff'"
dset foreground-color "'#516d7b'"
dset bold-color "'#516d7b'"
dset bold-color-same-as-fg "true"
dset use-theme-colors "false"
dset use-theme-background "false"
glist_append "$BASE_KEY/global/profile-list" "$PROFILE_SLUG"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONFTOOL
unset BASE_KEY
unset PROFILE_KEY
unset DEFAULT_SLUG
unset DEFAULT_KEY
|
<reponame>phvictorino/scheduler-api
import { Body, Controller, Get, Post } from '@nestjs/common';
import { SchedulesService } from './schedules.service';
import { Schedule } from '../../collections/schedule';
import { CreateScheduleDTO } from './dtos/create-schedule.dto';
@Controller('schedules')
export class SchedulesController {
constructor(private readonly schedulesService: SchedulesService) {}
@Get('/')
getAllSchedules(): Promise<Schedule[]> {
return this.schedulesService.findAllSchedules();
}
@Post()
saveSchedule(@Body() createDto: CreateScheduleDTO): Promise<Schedule> {
return this.schedulesService.createSchedule(createDto);
}
}
|
package set summary "Network authentication protocol"
package set webpage "https://web.mit.edu/kerberos"
package set src.url "https://kerberos.org/dist/krb5/1.18/krb5-1.18.3.tar.gz"
package set src.sum "e61783c292b5efd9afb45c555a80dd267ac67eebabca42185362bee6c4fbd719"
package set sourced "src"
package set bsystem "configure"
package set dep.pkg "readline openssl berkeley-db libglob"
package set ldflags "-lglob -lncurses"
prepare() {
sed_in_place '/search_paths_first"/d' configure && {
for item in $(grep '<db.h>' -rl plugins/kdb/db2)
do
sed_in_place 's|<db.h>|<db_185.h>|g' "$item" || return 1
done
}
}
build() {
# int getifaddrs(struct ifaddrs** __list_ptr) __INTRODUCED_IN(24);
if [ "$TARGET_OS_VERS" -lt 24 ] ; then
ac_cv_header_ifaddrs_h=no
else
ac_cv_header_ifaddrs_h=yes
fi
configure \
--disable-static \
--enable-dns-for-realm \
--without-system-verto \
--with-readline \
--with-netlib=-lc \
--with-size-optimizations \
--with-system-db \
krb5_cv_attr_constructor_destructor='yes,yes' \
ac_cv_func_regcomp=yes \
ac_cv_printf_positional=yes \
ac_cv_lib_readline_main=yes \
ac_cv_header_ifaddrs_h="$ac_cv_header_ifaddrs_h"
}
|
#!/bin/bash
# Setup postgres 9.3 or 11,
# install wal-e,
# run backup fetch
echo -e "\n$APP_WRAPPER$ENCD_INSTALL_TAG $(basename $0)"
# Check previous failure flag
if [ -f "$encd_failed_flag" ]; then
echo -e "\n\t$APP_WRAPPER$ENCD_INSTALL_TAG $(basename $0) Skipping: encd_failed_flag exits"
exit 1
fi
echo -e "\n\t$APP_WRAPPER$ENCD_INSTALL_TAG $(basename $0) Running"
# Script Below
if [ "$ENCD_BUILD_TYPE" == 'app' ] || [ "$ENCD_BUILD_TYPE" == 'app-es' ]; then
echo -e "\n\t$APP_WRAPPER$ENCD_INSTALL_TAG $(basename $0) Skipping install for no pg build"
exit 0
fi
standby_mode="$1"
### Variables
AWS_CREDS_DIR='/var/lib/postgresql/.aws'
AWS_PROFILE='default'
PG_CONF_DEST="/etc/postgresql/$ENCD_PG_VERSION/main"
PG_CONF_SRC="$ENCD_CC_DIR/configs/postgresql"
PG_DATA="/var/lib/postgresql/$ENCD_PG_VERSION/main"
WALE_DIR='/opt/pg-wal-e'
WALE_VENV="$WALE_DIR/.pyenv-wal-e"
WALE_BIN="$WALE_VENV/bin"
WALE_ENV='/etc/wal-e.d/env'
WALE_REQS_SRC="$ENCD_SCRIPTS_DIR/app-pg-wale-pyreqs.txt"
WALE_REQS_DST="$WALE_DIR/app-pg-wale-pyreqs.txt"
### Functions
function copy_with_permission {
src_file="$1/$3"
dest_file="$2/$3"
sudo -u root cp "$src_file" "$dest_file"
sudo -u root chown postgres:postgres "$dest_file"
}
function append_with_user {
line="$1"
user="$2"
dest="$3"
echo "$line" | sudo -u $user tee -a $dest
}
### Configure
echo -e "\n\t$APP_WRAPPER$ENCD_INSTALL_TAG $(basename $0) Setup aws keys for wal-e"
# Downlaod postgres demo aws keys
pg_keys_dir='/home/ubuntu/pg-aws-keys'
mkdir "$pg_keys_dir"
aws s3 cp --region=us-west-2 --recursive s3://encoded-conf-prod/pg-aws-keys "$pg_keys_dir"
if [ ! -f "$pg_keys_dir/credentials" ]; then
echo -e "\n\t$ENCD_INSTALL_TAG $(basename $0) ENCD FAILED: ubuntu home pg aws creds"
# Build has failed
touch "$encd_failed_flag"
exit 1
fi
## Copy postgres aws to home
pg_keys_dir='/home/ubuntu/pg-aws-keys'
if [ ! -f "$pg_keys_dir/credentials" ]; then
echo -e "\n\t$ENCD_INSTALL_TAG $(basename $0) ENCD FAILED: ubuntu home pg aws creds"
# Build has failed
touch "$encd_failed_flag"
exit 1
fi
sudo -u root mkdir /var/lib/postgresql/.aws
sudo -u root cp /home/ubuntu/pg-aws-keys/* ~postgres/.aws/
sudo -u root chown -R postgres:postgres /var/lib/postgresql/.aws/
# Add ssh keys to postgres user
sudo -u postgres mkdir /var/lib/postgresql/.ssh
sudo -u root cp /home/ubuntu/.ssh/authorized_keys /var/lib/postgresql/.ssh/authorized_keys
sudo -u root chown -R postgres:postgres /var/lib/postgresql/.ssh/
echo -e "\n\t$APP_WRAPPER$ENCD_INSTALL_TAG $(basename $0) Setup postgres configuration"
## Copy pg confs from encoded repo to pg conf dir
for filename in 'custom.conf' 'demo.conf' 'master.conf' 'recovery.conf'; do
copy_with_permission "$PG_CONF_SRC" "$PG_CONF_DEST" "$filename"
done
append_with_user "$ENCD_WALE_S3_PREFIX" 'postgres' "$PG_CONF_DEST/WALE_S3_PREFIX"
## pg conf master.conf:
# Create the archive_command variable with agrument and store as string
#
# 'In archive_command, %p is replaced by the path name of the file to archive,
# while %f is replaced by only the file name'
# -https://www.postgresql.org/docs/11/continuous-archiving.html
#
wale_push_cmd="archive_command = '\
$WALE_BIN/envdir $WALE_ENV \
$WALE_BIN/wal-e \
wal-push \"%p\"\
'"
if [ "$ENCD_ROLE" == 'candidate' ]; then
# Only production needs wal-e push ability? Move to ENCD_ROLE='candidate'?"
append_with_user "$wale_push_cmd" 'postgres' "$PG_CONF_DEST/master.conf"
fi
## pg conf recovery.conf
wale_fetch_cmd="restore_command = '\
$WALE_BIN/envdir $WALE_ENV \
$WALE_BIN/wal-e \
wal-fetch \"%f\" \"%p\"\
'"
standby_mode_cmd="standby_mode = $standby_mode"
append_with_user "$wale_fetch_cmd" 'postgres' "$PG_CONF_DEST/recovery.conf"
append_with_user "$standby_mode_cmd" 'postgres' "$PG_CONF_DEST/recovery.conf"
# Set db to recovery mode
sudo -u postgres ln -s "$PG_CONF_DEST/recovery.conf" "$PG_DATA/"
## pg conf postgresql.conf
include_custom="include 'custom.conf'"
append_with_user "$include_custom" 'postgres' "$PG_CONF_DEST/postgresql.conf"
if [ ! "$ENCD_ROLE" == 'candidate' ]; then
include_demo="include 'demo.conf'"
append_with_user "$include_demo" 'postgres' "$PG_CONF_DEST/postgresql.conf"
fi
### Create db
echo -e "\n\t$APP_WRAPPER$ENCD_INSTALL_TAG $(basename $0) Create encode db"
sudo -u postgres createuser encoded
sudo -u postgres createdb --owner=encoded encoded
### Wale-E
echo -e "\n\t$APP_WRAPPER$ENCD_INSTALL_TAG $(basename $0) Install wal-e"
## Create Wal-e ENV - python3 only
sudo -u root mkdir -p "$WALE_ENV"
sudo -u root chown postgres:postgres "$WALE_ENV"
for filename in 'AWS_ACCESS_KEY_ID' 'AWS_SECRET_ACCESS_KEY' 'AWS_REGION'; do
copy_with_permission "$AWS_CREDS_DIR" "$WALE_ENV" "$filename"
done
copy_with_permission "$PG_CONF_DEST" "$WALE_ENV" 'WALE_S3_PREFIX'
## Install wal-e
sudo -u root mkdir -p "$WALE_DIR"
sudo -u root chown postgres:postgres "$WALE_DIR"
sudo -u root cp "$WALE_REQS_SRC" "$WALE_REQS_DST"
sudo -u root chown postgres:postgres "$WALE_REQS_DST"
sudo -H -u postgres /usr/bin/python3.6 -m venv "$WALE_VENV"
if [ ! -f "$WALE_BIN/pip" ]; then
echo -e "\n\t$ENCD_INSTALL_TAG $(basename $0) ENCD FAILED: Wale bin does not exist"
touch "$encd_failed_flag"
exit 1
fi
sudo -H -u postgres "$WALE_BIN/pip" install pip==21.1.2 setuptools==57.0.0 --upgrade
sudo -H -u postgres "$WALE_BIN/pip" install wheel
sudo -H -u postgres "$WALE_BIN/pip" install -r "$WALE_REQS_DST"
sudo -u postgres git clone --branch v1.1.1 https://github.com/wal-e/wal-e.git "$WALE_DIR/wal-e"
sudo -H -u postgres "$WALE_BIN/pip" install -e "$WALE_DIR/wal-e"
### Postgres
echo -e "\n\t$APP_WRAPPER$ENCD_INSTALL_TAG $(basename $0) Do initial wal-e backup-fetch"
## Update db from wale backup
sudo -u postgres pg_ctlcluster 11 main stop
sudo -u postgres "$WALE_BIN/envdir" "$WALE_ENV" "$WALE_BIN/wal-e" backup-fetch "$PG_DATA" LATEST
## Restart
if [ "$ENCD_PG_OPEN" == 'true' ]; then
append_with_user "listen_addresses='*'" 'postgres' "$PG_CONF_DEST/postgresql.conf"
append_with_user "host all all 0.0.0.0/0 trust" 'postgres' "$PG_CONF_DEST/pg_hba.conf"
fi
sudo -u postgres pg_ctlcluster 11 main start
## Wait for psql to come up
$ENCD_SCRIPTS_DIR/app-pg-status.sh
if [ -f "$encd_failed_flag" ]; then
echo -e "\n\t$ENCD_INSTALL_TAG $(basename $0) ENCD FAILED: App pg status"
exit 1
fi
|
<gh_stars>0
/**
* @module enum
*/
// 基础类型空间占用
export const enum ByteLength {
INT8 = 1,
UINT8 = 1,
INT16 = 2,
UINT16 = 2,
INT32 = 4,
UINT32 = 4,
INI64 = 8,
UINT64 = 8,
FLOAT32 = 4,
FLOAT64 = 8
}
|
<reponame>AIJoeDev/nuxt-firebase-ssr<gh_stars>1-10
// derived from the js-cookie source files.
export default function(rawCookies, key) {
rawCookies = rfc6265Converter(rawCookies).split(';');
const jar = [];
rawCookies.forEach(c => {
const crumbs = c.split('=').map(x => x.trim());
let content;
try {
content = JSON.parse(crumbs[1]);
} catch (e) {
content = crumbs[1];
}
const baked = {};
baked[crumbs[0]] = content;
jar.push(baked);
});
if (key) {
for (const cookie of jar) {
if (cookie[key]) return cookie[key];
}
}
return jar;
}
const rfc6265Converter = function(value) {
return value.replace(/(%[\dA-F]{2})+/gi, decodeURIComponent);
};
|
#!/bin/bash
# Function to execute command inside Docker container and check for expected output
execute_and_verify() {
local container_name=$1
local command=$2
local expected_output=$3
# Execute command inside the container and capture the output
output=$(docker exec "${container_name}" /bin/su - root -mc "${command}")
# Check if the expected output is present in the captured output
if [[ $output == *"${expected_output}"* ]]; then
echo "Test passed: Expected output found"
return 0 # Success status
else
echo "Test failed: Expected output not found"
return 1 # Failure status
fi
}
# Example usage
container="your_container_name"
command="cat /etc/krb5.conf | grep 'domain-2'"
expected="domain-2"
# Call the function with the provided parameters
execute_and_verify "$container" "$command" "$expected"
|
class Container:
def __init__(self, container_id):
self.container_id = container_id
class LabManager:
def __init__(self, instrument, containers):
self.mount = instrument.mount
self.containers = [
Container(container)
for container in containers
]
self.tip_racks = [
Container(container)
for container in instrument.tip_racks]
def check_availability(self, item_id):
for container in self.containers:
if container.container_id == item_id:
return True
for tip_rack in self.tip_racks:
if tip_rack.container_id == item_id:
return True
return False
|
package leetCode;//给定两个字符串 s 和 p,找到 s 中所有 p 的 异位词 的子串,返回这些子串的起始索引。不考虑答案输出的顺序。
//
// 异位词 指字母相同,但排列不同的字符串。
//
//
//
// 示例 1:
//
//
//输入: s = "cbaebabacd", p = "abc"
//输出: [0,6]
//解释:
//起始索引等于 0 的子串是 "cba", 它是 "abc" 的异位词。
//起始索引等于 6 的子串是 "bac", 它是 "abc" 的异位词。
//
//
// 示例 2:
//
//
//输入: s = "abab", p = "ab"
//输出: [0,1,2]
//解释:
//起始索引等于 0 的子串是 "ab", 它是 "ab" 的异位词。
//起始索引等于 1 的子串是 "ba", 它是 "ab" 的异位词。
//起始索引等于 2 的子串是 "ab", 它是 "ab" 的异位词。
//
//
//
//
// 提示:
//
//
// 1 <= s.length, p.length <= 3 * 104
// s 和 p 仅包含小写字母
//
// Related Topics 哈希表 字符串 滑动窗口
// 👍 579 👎 0
import com.alibaba.fastjson.JSONArray;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
//leetcode submit region begin(Prohibit modification and deletion)
public class L10438_FindAnagrams {
public static void main(String[] args) {
System.out.println(JSONArray.toJSONString(findAnagrams(
"aabaa", "aabaa")));
}
public static List<Integer> findAnagrams(String s, String p) {
if (s == null || s.length() == 0 || p == null || p.length() == 0 || p.length() > s.length()) {
return new ArrayList<>();
}
int[] targetCount = new int[26];
for (int index = 0; index < p.toCharArray().length; index++) {
char character = p.charAt(index);
targetCount[character - 'a']++;
}
int[] slideWindowsCount = new int[26];
List<Integer> result = new ArrayList<>();
for (int index = 0; index < s.length(); index++) {
char indexCharacter = s.charAt(index);
slideWindowsCount[indexCharacter - 'a']++;
if (checkMatch(targetCount, slideWindowsCount)) {
result.add(index - p.length() + 1);
}
if (index >= p.length() - 1) {
char outCharacter = s.charAt(index - p.length() + 1);
slideWindowsCount[outCharacter - 'a']--;
}
}
return result;
}
private static boolean checkMatch(int[] targetCount, int[] slideWindowsCount) {
for (int index = 0; index < 26 ; index++) {
if (targetCount[index] != slideWindowsCount[index]){
return false;
}
}
return true;
}
}
|
<reponame>ultimategdbot/ultimategdbot-core
package com.github.alex1304.ultimategdbot.core;
import static reactor.function.TupleUtils.function;
import java.time.Duration;
import com.github.alex1304.ultimategdbot.api.Translator;
import com.github.alex1304.ultimategdbot.api.command.Context;
import com.github.alex1304.ultimategdbot.api.command.annotated.CommandAction;
import com.github.alex1304.ultimategdbot.api.command.annotated.CommandDescriptor;
import com.github.alex1304.ultimategdbot.api.command.annotated.CommandDoc;
import com.github.alex1304.ultimategdbot.api.util.DurationUtils;
import discord4j.core.event.domain.message.MessageCreateEvent;
import discord4j.gateway.GatewayClient;
import reactor.core.publisher.Mono;
@CommandDescriptor(
aliases = "ping",
shortDescription = "tr:CoreStrings/ping_desc"
)
public final class PingCommand {
@CommandAction
@CommandDoc("tr:CoreStrings/ping_run")
public static Mono<Void> run(Context ctx) {
return ctx.reply(ctx.translate("CoreStrings", "pong"))
.elapsed()
.flatMap(function((apiLatency, message) -> message.edit(
spec -> spec.setContent(computeLatency(ctx, ctx.event(), apiLatency)))))
.then();
}
private static String computeLatency(Translator tr, MessageCreateEvent event, long apiLatency) {
return tr.translate("CoreStrings", "pong") + '\n'
+ tr.translate("CoreStrings", "api_latency") + ' ' + DurationUtils.format(Duration.ofMillis(apiLatency)) + "\n"
+ tr.translate("CoreStrings", "gateway_latency") + ' ' + event.getClient()
.getGatewayClient(event.getShardInfo().getIndex())
.map(GatewayClient::getResponseTime)
.map(DurationUtils::format)
.orElse(tr.translate("CoreStrings", "unknown"));
}
}
|
require "rus_bank_rails/acts_as_bank"
require "rus_bank_rails/acts_as_region"
module RusBankRails
end
|
/*
* Copyright (c) 2006-2014 <NAME> <<EMAIL>>,
* 2006-2009 <NAME> <<EMAIL>>,
* 2015 <NAME> <<EMAIL>>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef __USER_H__
#define __USER_H__
#include "window.h"
#include "server.h"
#define USERS 16
struct tun_user {
char id;
int active;
int authenticated;
int authenticated_raw;
time_t last_pkt;
struct timeval dns_timeout;
int seed;
in_addr_t tun_ip;
struct sockaddr_storage host;
socklen_t hostlen;
struct sockaddr_storage remoteforward_addr;
socklen_t remoteforward_addr_len; /* 0 if no remote forwarding enabled */
int remote_tcp_fd;
int remote_forward_connected; /* 0 if not connected, -1 if error or 1 if OK */
struct frag_buffer *incoming;
struct frag_buffer *outgoing;
int next_upstream_ack;
struct encoder *encoder;
char downenc;
int downenc_bits;
int down_compression;
int fragsize;
enum connection conn;
int lazy;
struct qmem_buffer qmem;
};
extern struct tun_user *users;
extern int created_users;
int user_sending(int user);
int all_users_waiting_to_send();
int user_active(int i);
int check_authenticated_user_and_ip(int userid, struct query *q, int check_ip);
int check_user_and_ip(int userid, struct query *q, int check_ip);
int init_users(in_addr_t, int);
const char* users_get_first_ip();
int find_user_by_ip(uint32_t);
int find_available_user();
void user_switch_codec(int userid, struct encoder *enc);
void user_set_conn_type(int userid, enum connection c);
int set_user_tcp_fds(fd_set *fds, int);
#endif
|
<filename>test/content.test.js<gh_stars>0
import puppeteer from 'puppeteer';
import path from 'path';
import assert from 'power-assert';
describe('backgroud.js', function () {
let browser;
beforeEach(async function () {
const pathToExtension = path.join(__dirname, '../dist');
browser = await puppeteer.launch({
headless: false,
slowMo: 250,
args: [
`--disable-extensions-except=${pathToExtension}`,
`--load-extension=${pathToExtension}`
]
});
});
afterEach(async function () {
await browser.close();
});
it('GitHub repository page has a bugspots button', async function () {
const page = await browser.newPage();
await page.goto('https://github.com/igrigorik/bugspots');
const bugspots = await page.$x('//button[text()="Display bugspots"]');
assert.equal(bugspots.length, 1);
});
it('Other repository page has no bugspots buttons', async function () {
const page = await browser.newPage();
await page.goto('https://www.google.co.jp/');
const bugspots = await page.$x('//button[text()="Display bugspots"]');
assert.equal(bugspots.length, 0);
});
});
|
<gh_stars>10-100
/* Copyright (c) 2001-2014, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb.auth;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Set;
import org.hsqldb.lib.FrameworkLogger;
/**
* Delegates authentication decisions, and optionally determination of user
* roles and schema, to a different HyperSQL catalog, which may be in the same
* JVM or remote.
*
* For now, at least, this class uses DriverManager to make the ephemeral
* database connections.
*
* @see AuthFunctionBean
* @author <NAME> (blaine dot simpson at admc dot com)
* @since 2.0.1
*/
public class HsqldbSlaveAuthBean implements AuthFunctionBean {
private static FrameworkLogger logger =
FrameworkLogger.getLog(HsqldbSlaveAuthBean.class);
private String masterJdbcUrl, validationUser, validationPassword;
private boolean delegateRolesSchema = true;
protected boolean initialized;
/**
* Use this method and setValidationPassword if you want access to the
* master database to be verified upon instance initialization.
*/
public void setValidationUser(String validationUser) {
this.validationUser = validationUser;
}
/**
* Use this method and setValidationUser if you want access to the
* master database to be verified upon instance initialization.
*/
public void setValidationPassword(String validationPassword) {
this.validationPassword = validationPassword;
}
public void setMasterJdbcUrl(String masterJdbcUrl) {
this.masterJdbcUrl = masterJdbcUrl;
}
/**
* Defaults to true.
*
* Whether roles and initial schema for the new session will be determined
* by what they are for this user in the master database.
*/
public void setDelegateRolesSchema(boolean doDelegateRolesSchema) {
delegateRolesSchema = doDelegateRolesSchema;
}
public HsqldbSlaveAuthBean() {
// Intentionally empty
}
/**
* @throws IllegalStateException if any required setting has not been set.
* @throws SQLException if properties 'validationUser' and
* 'validationPassword' have been set, but we fail to connect to the
* master database.
*/
public void init() throws SQLException {
if (masterJdbcUrl == null) {
throw new IllegalStateException(
"Required property 'masterJdbcUrl' not set");
}
if (validationUser != null || validationPassword != null) {
if (validationUser == null || validationPassword == null) {
throw new IllegalStateException(
"If you set one property of 'validationUser' or "
+ "'validationPassword', then you must set both.");
}
Connection c = null;
SQLException problem = null;
try {
c = DriverManager.getConnection(
masterJdbcUrl, validationUser, validationPassword);
} catch (SQLException se) {
logger.error("Master/slave Connection validation failure", se);
problem = se; // Just indicates to let the original exception
// percolate through in the finally block, to prevent an
// exception in the finally block from obscuring the ultimate
// cause of the problem.
} finally {
if (c != null) try {
c.close();
c = null; // Encourage GC
} catch (SQLException nestedSe) {
logger.error(
"Failed to close test master/slave Connection",
nestedSe);
if (problem == null) {
throw nestedSe;
}
}
}
}
initialized = true;
}
/**
* @see AuthFunctionBean#authenticate(String, String)
*/
public String[] authenticate(String userName, String password)
throws DenyException {
if (!initialized) {
throw new IllegalStateException(
"You must invoke the 'init' method to initialize the "
+ HsqldbSlaveAuthBean.class.getName() + " instance.");
}
Connection c = null;
try {
c = DriverManager.getConnection(masterJdbcUrl, userName, password);
if (delegateRolesSchema) {
Set<String> schemaAndRoles = AuthUtils.getEnabledRoles(c);
String schemaOnMaster = AuthUtils.getInitialSchema(c);
if (schemaOnMaster != null) {
schemaAndRoles.add(schemaOnMaster);
}
logger.finer("Slave delegating schema+roles: "
+ schemaAndRoles);
return schemaAndRoles.toArray(new String[0]);
}
return null;
} catch (SQLException se) {
throw new DenyException();
} finally {
if (c != null) try {
c.close();
c = null; // Encourage GC
} catch (SQLException nestedSe) {
logger.severe(
"Failed to close master/slave Connection", nestedSe);
}
}
}
}
|
#!/bin/bash
#set -x
set -e
ops_man_version=2.2.6
if [ -f ./keysrc ]; then
echo "found rc file, sourcing..."
echo
source ./keysrc
fi
pivnet_token=${PIVNET_TOKEN:-fake}
region=${AWS_REGION:-us-west-2}
az=${AWS_REGION:-us-west-2a}
echo "region : $region"
echo "az : $az"
ip=$(./whats-my-ip.sh)
replace_str="$ip/32"
echo "ip : $replace_str"
ops_man_releases_page=$(http https://network.pivotal.io/api/v2/products | jq -r '.products | .[] | select( .slug == "ops-manager" ) | ._links.releases.href')
ops_man_product_files_page=$(http $ops_man_releases_page | jq -r '.releases | .[] | select( .version == "2.2.6") | ._links.product_files.href')
ops_man_ami_ymls=$(http $ops_man_product_files_page | jq -r '.product_files | .[] | select( .aws_object_key | contains("onAWS.yml")) | ._links.download.href')
echo "yml href: $ops_man_ami_ymls"
auth="Authorization:Token $pivnet_token"
query=".[\"$region\"]"
ops_man_ami=$(http --follow $ops_man_ami_ymls "$auth" | yq -r $query)
echo "ami: $ops_man_ami"
# in place replace things in the terraform.tfvars file
sed -i "" "\#^yourip#s#\".*\"#\"$replace_str\"#g" terraform.tfvars
sed -i "" "/^opsman_ami/s/\".*\"/\"$ops_man_ami\"/g" terraform.tfvars
sed -i "" "/^az/s/\".*\"/\"$az\"/g" terraform.tfvars
sed -i "" "/^access_key_id/s/\".*\"/\"$AWS_ACCESS_KEY_ID\"/g" terraform.tfvars
sed -i "" "\#^secret_access_key#s#\".*\"#\"$AWS_SECRET_ACCESS_KEY\"#g" terraform.tfvars
terraform init
terraform plan -out=plan
sleep 3
terraform apply plan
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
local source="${BUILT_PRODUCTS_DIR}/Pods-URLImageCache_Example/$1"
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source=$(readlink "${source}")
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers/" --filter "- PrivateHeaders/" --filter "- Modules/" ${source} ${destination}"
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers/" --filter "- PrivateHeaders/" --filter "- Modules/" "${source}" "${destination}"
# Resign the code if required by the build settings to avoid unstable apps
if [ "${CODE_SIGNING_REQUIRED}" == "YES" ]; then
code_sign "${destination}/$1"
fi
# Embed linked Swift runtime libraries
local basename
basename=$(echo $1 | sed -E s/\\..+// && exit ${PIPESTATUS[0]})
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/$1/${basename}" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
if [ "${CODE_SIGNING_REQUIRED}" == "YES" ]; then
code_sign "${destination}/${lib}"
fi
done
}
# Signs a framework with the provided identity
code_sign() {
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements $1"
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework 'URLImageCache.framework'
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework 'URLImageCache.framework'
fi
|
#!/bin/bash
cat list.txt | awk -F"(" '{ print $2 }' | awk -F"/" '{ print $1 }' | awk -F")" '{ print $1 }' > good-serials.txt
|
#! /usr/bin/env bash
test -e /etc/os-release && cat /etc/os-release
if [ -n "${BUILD_NATIVE_GEM:-}" ] ; then
# normally part of rake-compiler-dock runas which we can't easily use in concourse
. /etc/rubybashrc
ln -s /usr/local/rake-compiler "$HOME"/.rake-compiler
export RAKE_EXTENSION_TASK_NO_NATIVE=true
fi
cd nokogiri
set -e -x -u # after the `cd` because of rvm
OUTPUT_DIR="../gems"
# inputs from a real git resource will contain this dir, but we may
# run this task via `fly execute` and so we need to do this to avoid
# cleanup, see extconf.rb do_clean
mkdir -p .git
bundle install --local || bundle install
# generate a fake version number
bundle exec rake set-version-to-timestamp
if [ -n "${BUILD_NATIVE_GEM:-}" ] ; then
bundle exec rake gem:${BUILD_NATIVE_GEM}:builder FORCE_CROSS_COMPILING=true
else
# TODO we're only compiling so that we retrieve libxml2/libxslt
# tarballs, we can do better a couple of different ways
bundle exec rake clean compile
bundle exec rake gem
fi
if [[ -e ./scripts/test-gem-file-contents ]] ; then
./scripts/test-gem-file-contents pkg/nokogiri*.gem
fi
mkdir -p ${OUTPUT_DIR}
cp -v pkg/nokogiri*.gem ${OUTPUT_DIR}
sha256sum ${OUTPUT_DIR}/*
|
#!/usr/bin/env bash
set -e
readonly PROGNAME=$(basename $0)
readonly ARGS="$@"
function usage {
cat <<- EOF
usage: $PROGNAME DEST
Downloads the file at the given url checking it against the given sha256.
If checksum matches return 0 otherwise delete the downloaded file and return non-zero.
Download is placed in the directory DEST.
OPTIONS:
-u --url The url of the file to download.
-c --sha256 The sha256 checksum to use to validate the download.
-h --help Show this help.
-x --debug Debug this script.
Examples:
$PROGNAME https://github.com/just-containers/s6-overlay/releases/download/v1.22.1.0/s6-overlay-amd64.tar.gz 7f3aba1d803543dd1df3944d014f055112cf8dadf0a583c76dd5f46578ebe3c2 /opt/downloads
EOF
}
function cmdline {
local arg=
for arg
do
local delim=""
case "$arg" in
# Translate --gnu-long-options to -g (short options)
--url) args="${args}-u ";;
--sha256) args="${args}-c ";;
--help) args="${args}-h ";;
--debug) args="${args}-x ";;
# Pass through anything else
*) [[ "${arg:0:1}" == "-" ]] || delim="\""
args="${args}${delim}${arg}${delim} ";;
esac
done
# Reset the positional parameters to the short options
eval set -- $args
while getopts "u:c:hx" OPTION
do
case $OPTION in
u)
readonly URL=${OPTARG}
;;
c)
readonly CHECKSUM=${OPTARG}
;;
h)
usage
exit 0
;;
x)
readonly DEBUG='-x'
set -x
;;
esac
done
if [[ -z $URL || -z $CHECKSUM ]]; then
echo "Missing one or more required options: --url --sha256"
exit 1
fi
# The only parameters is the destination directory.
shift $((OPTIND-1))
if [ "$#" -ne 1 ]; then
echo "Illegal number of parameters"
usage
return 1
fi
readonly DEST="${1}"
return 0
}
function validate {
local file=${1}
echo "sha256sum ${file}: " $(sha256sum "${file}" | cut -f1 -d' ')
sha256sum "${file}" | cut -f1 -d' ' | xargs test "${CHECKSUM}" ==
}
function main {
cmdline ${ARGS}
local file="${DEST}/$(basename ${URL})"
# Remove the downloaded file if it exist and does not match the checksum so that it can be downloaded again.
if [ -f "${file}" ] && ! validate "${file}"; then
rm "${file}"
fi
wget -N -P "${DEST}" "${URL}"
# Return non-zero if the checksum doesn't match the downloaded file.
validate "${file}"
}
main
|
def LCS(x , y):
# find the length of the strings
m = len(x)
n = len(y)
# declaring the array for storing
# the dp values
L = [[None]*(n+1) for i in range(m+1)]
"""Following steps build L[m+1][n+1]
in bottom up fashion
Note: L[i][j] contains length of
LCS of X[0..i-1] and Y[0..j-1]"""
for i in range(m+1):
for j in range(n+1):
if i == 0 or j == 0 :
L[i][j] = 0
elif x[i-1] == y[j-1]:
L[i][j] = L[i-1][j-1]+1
else:
L[i][j] = max(L[i-1][j] , L[i][j-1])
# L[m][n] contains the length of LCS
# of X[0..n-1] & Y[0..m-1]
return L[m][n]
|
import numpy as np
def calculate_scores(state, next_state, action):
front_foot = state[:, 7]
my_range = 0
heading_penalty_factor = 10
scores = np.zeros(len(action))
# Apply penalty based on front foot position
scores[front_foot >= my_range] += heading_penalty_factor
# Adjust scores based on state and action differences
scores -= (next_state[:, 17] - state[:, 17]) / 0.01 + 0.1 * (np.sum(action**2, axis=1))
return scores
|
<filename>priest-satellite-api/src/main/java/com/kinstalk/satellite/service/api/LogApiService.java
package com.kinstalk.satellite.service.api;
import com.kinstalk.satellite.domain.LogApi;
import com.kinstalk.satellite.domain.ReportDayApi;
import com.kinstalk.satellite.domain.ReportLastApi;
import com.kinstalk.satellite.domain.ReportLastHoursApi;
import java.util.List;
/**
* User: liuling
* Date: 16/4/12
* Time: 下午5:54
*/
public interface LogApiService {
/**
* 保存流水表记录
*
* @param logApiDTO 流水信息对象
* @return 保存是否成功
*/
boolean save(LogApi logApiDTO);
/**
* 批量插入流水表记录
*
* @param apis 批量信息
* @return 是否插入成功
*/
boolean batchInsertLogApi(List<LogApi> apis);
/**
* 根据时间查询一批接口的情况
*
* @param createTime 时间
* @return 接口列表
*/
List<ReportLastApi> batchFindByTime(Long createTime);
/**
* 根据时间查询过去24小时详细状况
*
* @param createTime 查询时间
* @param updateTime 入库的时间
* @return 接口详情列表
*/
List<ReportLastHoursApi> batchFindLastHoursByTime(Long createTime, Long updateTime);
/**
* 某时间段内接口状况
*
* @param startTime 开始时间
* @param endTime 结束时间
* @param updateTime 入库时间
* @return
*/
List<ReportDayApi> batchFindDaysByTime(Long startTime, Long endTime, Long updateTime);
}
|
<filename>src/main/java/net/jamsimulator/jams/mips/simulation/file/event/SimulationFileOpenEvent.java<gh_stars>1-10
/*
* MIT License
*
* Copyright (c) 2021 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.jamsimulator.jams.mips.simulation.file.event;
import net.jamsimulator.jams.event.Cancellable;
import net.jamsimulator.jams.event.Event;
import net.jamsimulator.jams.mips.simulation.file.SimulationFile;
import net.jamsimulator.jams.mips.simulation.file.SimulationFiles;
import net.jamsimulator.jams.utils.Validate;
import java.io.File;
/**
* This event is called when a simulation file is open.
*/
public class SimulationFileOpenEvent extends Event {
protected final SimulationFiles files;
protected File file;
/**
* Creates the event.
*
* @param files the {@link SimulationFiles} opening the given {@link File}.
* @param file the {@link File} to open.
*/
private SimulationFileOpenEvent(SimulationFiles files, File file) {
Validate.notNull(files, "Files cannot be null!");
Validate.notNull(file, "File cannot be null!");
this.files = files;
this.file = file;
}
/**
* Returns the {@link SimulationFiles} opening the {@link File}.
*
* @return the {@link SimulationFiles}.
*/
public SimulationFiles getFiles() {
return files;
}
/**
* Returns the {@link File} to open.
*
* @return the {@link File}.
*/
public File getFile() {
return file;
}
/**
* Event called before opening a file.
*/
public static class Before extends SimulationFileOpenEvent implements Cancellable {
private boolean cancelled;
/**
* Creates the event.
*
* @param files the {@link SimulationFiles} opening the given {@link File}.
* @param file the {@link File} to open.
*/
public Before(SimulationFiles files, File file) {
super(files, file);
}
/**
* Sets the {@link File} to open.
*
* @param file the {@link File}.
*/
public void setFile(File file) {
Validate.notNull(file, "File cannot be null!");
this.file = file;
}
@Override
public boolean isCancelled() {
return cancelled;
}
@Override
public void setCancelled(boolean cancelled) {
this.cancelled = cancelled;
}
}
/**
* Event called after opening a file.
*/
public static class After extends SimulationFileOpenEvent {
private final SimulationFile simulationFile;
/**
* Creates the event.
*
* @param files the {@link SimulationFiles} opening the given {@link File}.
* @param file the open {@link File}
* @param simulationFile the {@link SimulationFile} representing the open {@link File}.
*/
public After(SimulationFiles files, File file, SimulationFile simulationFile) {
super(files, file);
Validate.notNull(simulationFile, "Simulation file cannot be null!");
this.simulationFile = simulationFile;
}
/**
* Returns the {@link SimulationFile} representing the open {@link File}.
*
* @return the {@link SimulationFile}.
*/
public SimulationFile getSimulationFile() {
return simulationFile;
}
}
}
|
<filename>console/src/boost_1_78_0/libs/preprocessor/test/seq.cxx
# /* **************************************************************************
# * *
# * (C) Copyright <NAME> 2002.
# * Distributed under the Boost Software License, Version 1.0. (See
# * accompanying file LICENSE_1_0.txt or copy at
# * http://www.boost.org/LICENSE_1_0.txt)
# * *
# ************************************************************************** */
#
# /* Revised by <NAME> (2011,2020) */
#
# /* See http://www.boost.org for most recent version. */
#
# include <boost/preprocessor/config/limits.hpp>
# include <boost/preprocessor/arithmetic/add.hpp>
# include <boost/preprocessor/arithmetic/dec.hpp>
# include <boost/preprocessor/arithmetic/div.hpp>
# include <boost/preprocessor/arithmetic/inc.hpp>
# include <boost/preprocessor/arithmetic/mod.hpp>
# include <boost/preprocessor/arithmetic/sub.hpp>
# include <boost/preprocessor/cat.hpp>
# include <boost/preprocessor/comparison/equal.hpp>
# include <boost/preprocessor/comparison/less.hpp>
# include <boost/preprocessor/control/iif.hpp>
# include <boost/preprocessor/control/expr_iif.hpp>
# include <boost/preprocessor/facilities/is_empty.hpp>
# include <boost/preprocessor/logical/bitor.hpp>
# include <boost/preprocessor/logical/not.hpp>
# include <boost/preprocessor/seq.hpp>
# include <boost/preprocessor/array/elem.hpp>
# include <boost/preprocessor/array/size.hpp>
# include <boost/preprocessor/tuple/elem.hpp>
# include <boost/preprocessor/tuple/size.hpp>
# include <boost/preprocessor/list/at.hpp>
# include <boost/preprocessor/list/size.hpp>
# include <boost/preprocessor/variadic/elem.hpp>
# include <boost/preprocessor/variadic/size.hpp>
# include <boost/preprocessor/variadic/has_opt.hpp>
# include <libs/preprocessor/test/test.h>
# define SEQ_NONE ()
# define SEQ (4)(1)(5)(2)
# define SEQ_100 \
(1)(2)(3)(4)(5)(6)(7)(8)(9) \
(10)(11)(12)(13)(14)(15)(16)(17)(18)(19) \
(20)(21)(22)(23)(24)(25)(26)(27)(28)(29) \
(30)(31)(32)(33)(34)(35)(36)(37)(38)(39) \
(40)(41)(42)(43)(44)(45)(46)(47)(48)(49) \
(50)(51)(52)(53)(54)(55)(56)(57)(58)(59) \
(60)(61)(62)(63)(64)(65)(66)(67)(68)(69) \
(70)(71)(72)(73)(74)(75)(76)(77)(78)(79) \
(80)(81)(82)(83)(84)(85)(86)(87)(88)(89) \
(90)(91)(92)(93)(94)(95)(96)(97)(98)(99) \
(100)
# define SEQ_255 \
(1)(2)(3)(4)(5)(6)(7)(8)(9) \
(10)(11)(12)(13)(14)(15)(16)(17)(18)(19) \
(20)(21)(22)(23)(24)(25)(26)(27)(28)(29) \
(30)(31)(32)(33)(34)(35)(36)(37)(38)(39) \
(40)(41)(42)(43)(44)(45)(46)(47)(48)(49) \
(50)(51)(52)(53)(54)(55)(56)(57)(58)(59) \
(60)(61)(62)(63)(64)(65)(66)(67)(68)(69) \
(70)(71)(72)(73)(74)(75)(76)(77)(78)(79) \
(80)(81)(82)(83)(84)(85)(86)(87)(88)(89) \
(90)(91)(92)(93)(94)(95)(96)(97)(98)(99) \
(100)(101)(102)(103)(104)(105)(106)(107)(108)(109) \
(110)(111)(112)(113)(114)(115)(116)(117)(118)(119) \
(120)(121)(122)(123)(124)(125)(126)(127)(128)(129) \
(130)(131)(132)(133)(134)(135)(136)(137)(138)(139) \
(140)(141)(142)(143)(144)(145)(146)(147)(148)(149) \
(150)(151)(152)(153)(154)(155)(156)(157)(158)(159) \
(160)(161)(162)(163)(164)(165)(166)(167)(168)(169) \
(170)(171)(172)(173)(174)(175)(176)(177)(178)(179) \
(180)(181)(182)(183)(184)(185)(186)(187)(188)(189) \
(190)(191)(192)(193)(194)(195)(196)(197)(198)(199) \
(200)(201)(202)(203)(204)(205)(206)(207)(208)(209) \
(210)(211)(212)(213)(214)(215)(216)(217)(218)(219) \
(220)(221)(222)(223)(224)(225)(226)(227)(228)(229) \
(230)(231)(232)(233)(234)(235)(236)(237)(238)(239) \
(240)(241)(242)(243)(244)(245)(246)(247)(248)(249) \
(250)(251)(252)(253)(254)(255)
# define SEQ_256 SEQ_255(256)
# if BOOST_PP_LIMIT_SEQ == 512
# define SEQ_511 \
(1)(2)(3)(4)(5)(6)(7)(8)(9) \
(10)(11)(12)(13)(14)(15)(16)(17)(18)(19) \
(20)(21)(22)(23)(24)(25)(26)(27)(28)(29) \
(30)(31)(32)(33)(34)(35)(36)(37)(38)(39) \
(40)(41)(42)(43)(44)(45)(46)(47)(48)(49) \
(50)(51)(52)(53)(54)(55)(56)(57)(58)(59) \
(60)(61)(62)(63)(64)(65)(66)(67)(68)(69) \
(70)(71)(72)(73)(74)(75)(76)(77)(78)(79) \
(80)(81)(82)(83)(84)(85)(86)(87)(88)(89) \
(90)(91)(92)(93)(94)(95)(96)(97)(98)(99) \
(100)(101)(102)(103)(104)(105)(106)(107)(108)(109) \
(110)(111)(112)(113)(114)(115)(116)(117)(118)(119) \
(120)(121)(122)(123)(124)(125)(126)(127)(128)(129) \
(130)(131)(132)(133)(134)(135)(136)(137)(138)(139) \
(140)(141)(142)(143)(144)(145)(146)(147)(148)(149) \
(150)(151)(152)(153)(154)(155)(156)(157)(158)(159) \
(160)(161)(162)(163)(164)(165)(166)(167)(168)(169) \
(170)(171)(172)(173)(174)(175)(176)(177)(178)(179) \
(180)(181)(182)(183)(184)(185)(186)(187)(188)(189) \
(190)(191)(192)(193)(194)(195)(196)(197)(198)(199) \
(200)(201)(202)(203)(204)(205)(206)(207)(208)(209) \
(210)(211)(212)(213)(214)(215)(216)(217)(218)(219) \
(220)(221)(222)(223)(224)(225)(226)(227)(228)(229) \
(230)(231)(232)(233)(234)(235)(236)(237)(238)(239) \
(240)(241)(242)(243)(244)(245)(246)(247)(248)(249) \
(250)(251)(252)(253)(254)(255)(256)(257)(258)(259) \
(260)(261)(262)(263)(264)(265)(266)(267)(268)(269) \
(270)(271)(272)(273)(274)(275)(276)(277)(278)(279) \
(280)(281)(282)(283)(284)(285)(286)(287)(288)(289) \
(290)(291)(292)(293)(294)(295)(296)(297)(298)(299) \
(300)(301)(302)(303)(304)(305)(306)(307)(308)(309) \
(310)(311)(312)(313)(314)(315)(316)(317)(318)(319) \
(320)(321)(322)(323)(324)(325)(326)(327)(328)(329) \
(330)(331)(332)(333)(334)(335)(336)(337)(338)(339) \
(340)(341)(342)(343)(344)(345)(346)(347)(348)(349) \
(350)(351)(352)(353)(354)(355)(356)(357)(358)(359) \
(360)(361)(362)(363)(364)(365)(366)(367)(368)(369) \
(370)(371)(372)(373)(374)(375)(376)(377)(378)(379) \
(380)(381)(382)(383)(384)(385)(386)(387)(388)(389) \
(390)(391)(392)(393)(394)(395)(396)(397)(398)(399) \
(400)(401)(402)(403)(404)(405)(406)(407)(408)(409) \
(410)(411)(412)(413)(414)(415)(416)(417)(418)(419) \
(420)(421)(422)(423)(424)(425)(426)(427)(428)(429) \
(430)(431)(432)(433)(434)(435)(436)(437)(438)(439) \
(440)(441)(442)(443)(444)(445)(446)(447)(448)(449) \
(450)(451)(452)(453)(454)(455)(456)(457)(458)(459) \
(460)(461)(462)(463)(464)(465)(466)(467)(468)(469) \
(470)(471)(472)(473)(474)(475)(476)(477)(478)(479) \
(480)(481)(482)(483)(484)(485)(486)(487)(488)(489) \
(490)(491)(492)(493)(494)(495)(496)(497)(498)(499) \
(500)(501)(502)(503)(504)(505)(506)(507)(508)(509) \
(510)(511)
# define SEQ_512 SEQ_511(512)
# endif
# if BOOST_PP_LIMIT_SEQ == 1024
# define SEQ_1023 \
(1)(2)(3)(4)(5)(6)(7)(8)(9) \
(10)(11)(12)(13)(14)(15)(16)(17)(18)(19) \
(20)(21)(22)(23)(24)(25)(26)(27)(28)(29) \
(30)(31)(32)(33)(34)(35)(36)(37)(38)(39) \
(40)(41)(42)(43)(44)(45)(46)(47)(48)(49) \
(50)(51)(52)(53)(54)(55)(56)(57)(58)(59) \
(60)(61)(62)(63)(64)(65)(66)(67)(68)(69) \
(70)(71)(72)(73)(74)(75)(76)(77)(78)(79) \
(80)(81)(82)(83)(84)(85)(86)(87)(88)(89) \
(90)(91)(92)(93)(94)(95)(96)(97)(98)(99) \
(100)(101)(102)(103)(104)(105)(106)(107)(108)(109) \
(110)(111)(112)(113)(114)(115)(116)(117)(118)(119) \
(120)(121)(122)(123)(124)(125)(126)(127)(128)(129) \
(130)(131)(132)(133)(134)(135)(136)(137)(138)(139) \
(140)(141)(142)(143)(144)(145)(146)(147)(148)(149) \
(150)(151)(152)(153)(154)(155)(156)(157)(158)(159) \
(160)(161)(162)(163)(164)(165)(166)(167)(168)(169) \
(170)(171)(172)(173)(174)(175)(176)(177)(178)(179) \
(180)(181)(182)(183)(184)(185)(186)(187)(188)(189) \
(190)(191)(192)(193)(194)(195)(196)(197)(198)(199) \
(200)(201)(202)(203)(204)(205)(206)(207)(208)(209) \
(210)(211)(212)(213)(214)(215)(216)(217)(218)(219) \
(220)(221)(222)(223)(224)(225)(226)(227)(228)(229) \
(230)(231)(232)(233)(234)(235)(236)(237)(238)(239) \
(240)(241)(242)(243)(244)(245)(246)(247)(248)(249) \
(250)(251)(252)(253)(254)(255)(256)(257)(258)(259) \
(260)(261)(262)(263)(264)(265)(266)(267)(268)(269) \
(270)(271)(272)(273)(274)(275)(276)(277)(278)(279) \
(280)(281)(282)(283)(284)(285)(286)(287)(288)(289) \
(290)(291)(292)(293)(294)(295)(296)(297)(298)(299) \
(300)(301)(302)(303)(304)(305)(306)(307)(308)(309) \
(310)(311)(312)(313)(314)(315)(316)(317)(318)(319) \
(320)(321)(322)(323)(324)(325)(326)(327)(328)(329) \
(330)(331)(332)(333)(334)(335)(336)(337)(338)(339) \
(340)(341)(342)(343)(344)(345)(346)(347)(348)(349) \
(350)(351)(352)(353)(354)(355)(356)(357)(358)(359) \
(360)(361)(362)(363)(364)(365)(366)(367)(368)(369) \
(370)(371)(372)(373)(374)(375)(376)(377)(378)(379) \
(380)(381)(382)(383)(384)(385)(386)(387)(388)(389) \
(390)(391)(392)(393)(394)(395)(396)(397)(398)(399) \
(400)(401)(402)(403)(404)(405)(406)(407)(408)(409) \
(410)(411)(412)(413)(414)(415)(416)(417)(418)(419) \
(420)(421)(422)(423)(424)(425)(426)(427)(428)(429) \
(430)(431)(432)(433)(434)(435)(436)(437)(438)(439) \
(440)(441)(442)(443)(444)(445)(446)(447)(448)(449) \
(450)(451)(452)(453)(454)(455)(456)(457)(458)(459) \
(460)(461)(462)(463)(464)(465)(466)(467)(468)(469) \
(470)(471)(472)(473)(474)(475)(476)(477)(478)(479) \
(480)(481)(482)(483)(484)(485)(486)(487)(488)(489) \
(490)(491)(492)(493)(494)(495)(496)(497)(498)(499) \
(500)(501)(502)(503)(504)(505)(506)(507)(508)(509) \
(510)(511)(512) \
(513)(514)(515)(516)(517)(518)(519)(520)(521) \
(522)(523)(524)(525)(526)(527)(528)(529)(530)(531) \
(532)(533)(534)(535)(536)(537)(538)(539)(540)(541) \
(542)(543)(544)(545)(546)(547)(548)(549)(550)(551) \
(552)(553)(554)(555)(556)(557)(558)(559)(560)(561) \
(562)(563)(564)(565)(566)(567)(568)(569)(570)(571) \
(572)(573)(574)(575)(576)(577)(578)(579)(580)(581) \
(582)(583)(584)(585)(586)(587)(588)(589)(590)(591) \
(592)(593)(594)(595)(596)(597)(598)(599)(600)(601) \
(602)(603)(604)(605)(606)(607)(608)(609)(610)(611) \
(612)(613)(614)(615)(616)(617)(618)(619)(620)(621) \
(622)(623)(624)(625)(626)(627)(628)(629)(630)(631) \
(632)(633)(634)(635)(636)(637)(638)(639)(640)(641) \
(642)(643)(644)(645)(646)(647)(648)(649)(650)(651) \
(652)(653)(654)(655)(656)(657)(658)(659)(660)(661) \
(662)(663)(664)(665)(666)(667)(668)(669)(670)(671) \
(672)(673)(674)(675)(676)(677)(678)(679)(680)(681) \
(682)(683)(684)(685)(686)(687)(688)(689)(690)(691) \
(692)(693)(694)(695)(696)(697)(698)(699)(700)(701) \
(702)(703)(704)(705)(706)(707)(708)(709)(710)(711) \
(712)(713)(714)(715)(716)(717)(718)(719)(720)(721) \
(722)(723)(724)(725)(726)(727)(728)(729)(730)(731) \
(732)(733)(734)(735)(736)(737)(738)(739)(740)(741) \
(742)(743)(744)(745)(746)(747)(748)(749)(750)(751) \
(752)(753)(754)(755)(756)(757)(758)(759)(760)(761) \
(762)(763)(764)(765)(766)(767)(768)(769)(770)(771) \
(772)(773)(774)(775)(776)(777)(778)(779)(780)(781) \
(782)(783)(784)(785)(786)(787)(788)(789)(790)(791) \
(792)(793)(794)(795)(796)(797)(798)(799)(800)(801) \
(802)(803)(804)(805)(806)(807)(808)(809)(810)(811) \
(812)(813)(814)(815)(816)(817)(818)(819)(820)(821) \
(822)(823)(824)(825)(826)(827)(828)(829)(830)(831) \
(832)(833)(834)(835)(836)(837)(838)(839)(840)(841) \
(842)(843)(844)(845)(846)(847)(848)(849)(850)(851) \
(852)(853)(854)(855)(856)(857)(858)(859)(860)(861) \
(862)(863)(864)(865)(866)(867)(868)(869)(870)(871) \
(872)(873)(874)(875)(876)(877)(878)(879)(880)(881) \
(882)(883)(884)(885)(886)(887)(888)(889)(890)(891) \
(892)(893)(894)(895)(896)(897)(898)(899)(900)(901) \
(902)(903)(904)(905)(906)(907)(908)(909)(910)(911) \
(912)(913)(914)(915)(916)(917)(918)(919)(920)(921) \
(922)(923)(924)(925)(926)(927)(928)(929)(930)(931) \
(932)(933)(934)(935)(936)(937)(938)(939)(940)(941) \
(942)(943)(944)(945)(946)(947)(948)(949)(950)(951) \
(952)(953)(954)(955)(956)(957)(958)(959)(960)(961) \
(962)(963)(964)(965)(966)(967)(968)(969)(970)(971) \
(972)(973)(974)(975)(976)(977)(978)(979)(980)(981) \
(982)(983)(984)(985)(986)(987)(988)(989)(990)(991) \
(992)(993)(994)(995)(996)(997)(998)(999)(1000)(1001) \
(1002)(1003)(1004)(1005)(1006)(1007)(1008)(1009)(1010)(1011) \
(1012)(1013)(1014)(1015)(1016)(1017)(1018)(1019)(1020)(1021) \
(1022)(1023)
# define SEQ_1024 SEQ_1023(1024)
# endif
# define SEQVAR (4,5,8,3,61)(1,0)(5,22,43)(2)(17,45,33)
# define REVERSAL(s, x, y) BOOST_PP_SUB(y, x)
# define SUB_S(s, x, y) BOOST_PP_SUB(x, y)
# define ADD_S(s, x, y) BOOST_PP_ADD(x, y)
# define CAT_S(s, x, y) BOOST_PP_CAT(x, BOOST_PP_IS_EMPTY(y))
BEGIN BOOST_PP_IS_EMPTY(BOOST_PP_SEQ_HEAD(SEQ_NONE)) == 1 END
BEGIN BOOST_PP_SEQ_HEAD(SEQ) == 4 END
BEGIN BOOST_PP_SEQ_HEAD(SEQ_255) == 1 END
BEGIN BOOST_PP_SEQ_HEAD(SEQ_256) == 1 END
# if BOOST_PP_LIMIT_SEQ == 512
BEGIN BOOST_PP_SEQ_HEAD(SEQ_511) == 1 END
BEGIN BOOST_PP_SEQ_HEAD(SEQ_512) == 1 END
# endif
# if BOOST_PP_LIMIT_SEQ == 1024
BEGIN BOOST_PP_SEQ_HEAD(SEQ_1023) == 1 END
BEGIN BOOST_PP_SEQ_HEAD(SEQ_1024) == 1 END
# endif
BEGIN BOOST_PP_SEQ_FOLD_LEFT(CAT_S, 1, SEQ_NONE) == 11 END
BEGIN BOOST_PP_SEQ_FOLD_LEFT(SUB_S, 22, SEQ) == 10 END
BEGIN BOOST_PP_SEQ_FOLD_RIGHT(CAT_S, 2, SEQ_NONE) == 21 END
BEGIN BOOST_PP_SEQ_FOLD_RIGHT(ADD_S, 0, SEQ) == 12 END
BEGIN BOOST_PP_SEQ_FOLD_RIGHT(REVERSAL, 0, SEQ) == 4 END
BEGIN BOOST_PP_IS_EMPTY(BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_REVERSE(SEQ_NONE))) == 1 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_REVERSE(SEQ)) == 2514 END
# if BOOST_PP_LIMIT_SEQ == 512
BEGIN BOOST_PP_SEQ_HEAD(BOOST_PP_SEQ_REVERSE(SEQ_512)) == 512 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
BEGIN BOOST_PP_SEQ_HEAD(BOOST_PP_SEQ_REVERSE(SEQ_1024)) == 1024 END
#endif
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_REST_N(2, SEQ)) == 52 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_REST_N(99, SEQ_100)) == 100 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_REST_N(255, SEQ_256)) == 256 END
# if BOOST_PP_LIMIT_SEQ == 512
BEGIN BOOST_PP_SEQ_HEAD(BOOST_PP_SEQ_REST_N(509, SEQ_511)) == 510 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
BEGIN BOOST_PP_SEQ_HEAD(BOOST_PP_SEQ_REST_N(1020, SEQ_1023)) == 1021 END
#endif
BEGIN BOOST_PP_IS_EMPTY(BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_FIRST_N(1, SEQ_NONE))) == 1 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_FIRST_N(2, SEQ)) == 41 END
BEGIN BOOST_PP_SEQ_ELEM(50,BOOST_PP_SEQ_FIRST_N(100, SEQ_100)) == 51 END
BEGIN BOOST_PP_SEQ_ELEM(100,BOOST_PP_SEQ_FIRST_N(255, SEQ_255)) == 101 END
BEGIN BOOST_PP_SEQ_ELEM(200,BOOST_PP_SEQ_FIRST_N(256, SEQ_256)) == 201 END
# if BOOST_PP_LIMIT_SEQ == 512
BEGIN BOOST_PP_SEQ_ELEM(7,BOOST_PP_SEQ_FIRST_N(15, SEQ_512)) == 8 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
BEGIN BOOST_PP_SEQ_ELEM(14,BOOST_PP_SEQ_FIRST_N(25, SEQ_1024)) == 15 END
#endif
BEGIN BOOST_PP_IS_EMPTY(BOOST_PP_SEQ_ELEM(0, SEQ_NONE)) == 1 END
BEGIN BOOST_PP_SEQ_SIZE(SEQ_NONE) == 1 END
BEGIN BOOST_PP_SEQ_ELEM(2, SEQ) == 5 END
BEGIN BOOST_PP_SEQ_ELEM(20, SEQ_255) == 21 END
BEGIN BOOST_PP_SEQ_ELEM(254, SEQ_255) == 255 END
BEGIN BOOST_PP_SEQ_ELEM(220, SEQ_256) == 221 END
BEGIN BOOST_PP_SEQ_ELEM(255, SEQ_256) == 256 END
BEGIN BOOST_PP_SEQ_SIZE(SEQ) == 4 END
# if BOOST_PP_LIMIT_SEQ == 512
BEGIN BOOST_PP_SEQ_SIZE(SEQ_511) == 511 END
BEGIN BOOST_PP_SEQ_ELEM(476, SEQ_512) == 477 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
BEGIN BOOST_PP_SEQ_SIZE(SEQ_1023) == 1023 END
BEGIN BOOST_PP_SEQ_ELEM(934, SEQ_1024) == 935 END
#endif
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_TRANSFORM(CAT_S, 13, SEQ_NONE)) == 131 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_TRANSFORM(ADD_S, 2, SEQ)) == 6374 END
# if BOOST_PP_LIMIT_SEQ == 512
#define STRANS_512(s,data,elem) BOOST_PP_ADD(elem,data)
BEGIN BOOST_PP_SEQ_ELEM(383, BOOST_PP_SEQ_TRANSFORM(STRANS_512,2,SEQ_512)) == 386 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
#define STRANS_1024(s,data,elem) BOOST_PP_ADD(elem,data)
BEGIN BOOST_PP_SEQ_ELEM(728, BOOST_PP_SEQ_TRANSFORM(STRANS_1024,1,SEQ_1024)) == 730 END
#endif
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_TAIL(SEQ) SEQ) == 1524152 END
# if BOOST_PP_LIMIT_SEQ == 512
BEGIN BOOST_PP_SEQ_ELEM(459, BOOST_PP_SEQ_TAIL(SEQ_511)) == 461 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
BEGIN BOOST_PP_SEQ_ELEM(624, BOOST_PP_SEQ_TAIL(SEQ_1023)) == 626 END
#endif
# define F1(r, state, x) + x + state
# define FI2(r, state, i, x) BOOST_PP_IIF(BOOST_PP_EQUAL(i,2),+ x + x + state,+ x + state)
BEGIN BOOST_PP_SEQ_FOR_EACH(F1, 1, SEQ) == 16 END
BEGIN BOOST_PP_SEQ_FOR_EACH_I(FI2, 1, SEQ) == 21 END
# if BOOST_PP_LIMIT_SEQ == 512 && BOOST_PP_LIMIT_FOR == 512
# define FE5I2(r, state, i, x) BOOST_PP_EXPR_IIF(BOOST_PP_LESS(i,5),+ x - state)
BEGIN BOOST_PP_SEQ_FOR_EACH_I(FE5I2, 1, SEQ_512) == 10 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024 && BOOST_PP_LIMIT_FOR == 1024
# define FE1024(r, state, i, x) BOOST_PP_EXPR_IIF(BOOST_PP_LESS(i,6),+ x - state)
BEGIN BOOST_PP_SEQ_FOR_EACH_I(FE1024, 2, SEQ_1024) == 9 END
#endif
BEGIN BOOST_PP_TUPLE_ELEM(4, 3, BOOST_PP_SEQ_TO_TUPLE(SEQ)) == 2 END
BEGIN BOOST_PP_IS_EMPTY(BOOST_PP_TUPLE_ELEM(1, 0, BOOST_PP_SEQ_TO_TUPLE(SEQ_NONE))) == 1 END
BEGIN BOOST_PP_TUPLE_SIZE(BOOST_PP_SEQ_TO_TUPLE(SEQ_NONE)) == 1 END
BEGIN BOOST_PP_ARRAY_ELEM(3, BOOST_PP_SEQ_TO_ARRAY(SEQ)) == 2 END
BEGIN BOOST_PP_IS_EMPTY(BOOST_PP_ARRAY_ELEM(0, BOOST_PP_SEQ_TO_ARRAY(SEQ_NONE))) == 1 END
BEGIN BOOST_PP_ARRAY_SIZE(BOOST_PP_SEQ_TO_ARRAY(SEQ_NONE)) == 1 END
# define LESS_S(s, x, y) BOOST_PP_LESS(x, y)
# define FILTER_MOD_S(s, data, elem) BOOST_PP_NOT(BOOST_PP_MOD(elem,data))
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_FILTER(LESS_S, 3, SEQ)) == 45 END
BEGIN BOOST_PP_SEQ_ELEM(4,BOOST_PP_SEQ_FILTER(FILTER_MOD_S, 20, SEQ_100)) == 100 END
BEGIN BOOST_PP_SEQ_ELEM(2,BOOST_PP_SEQ_FILTER(FILTER_MOD_S, 30, SEQ_100)) == 90 END
# if BOOST_PP_LIMIT_SEQ == 512
# define FILTER_EQ_512_S(s, data, elem) \
BOOST_PP_BITOR \
( \
BOOST_PP_EQUAL(BOOST_PP_DEC(data),elem), \
BOOST_PP_EQUAL(BOOST_PP_INC(data),elem) \
) \
/* */
BEGIN BOOST_PP_SEQ_ELEM(1,BOOST_PP_SEQ_FILTER(FILTER_EQ_512_S, 20, SEQ_512)) == 21 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
# define FILTER_EQ_1024_S(s, data, elem) \
BOOST_PP_BITOR \
( \
BOOST_PP_EQUAL(BOOST_PP_DEC(data),elem), \
BOOST_PP_EQUAL(BOOST_PP_INC(data),elem) \
) \
/* */
BEGIN BOOST_PP_SEQ_ELEM(0,BOOST_PP_SEQ_FILTER(FILTER_EQ_1024_S, 100, SEQ_1024)) == 99 END
#endif
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_INSERT(SEQ_NONE, 0, 7)) == 7 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_INSERT(SEQ, 0, 3)) == 34152 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_INSERT(SEQ, 2, 3)) == 41352 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_INSERT(SEQ, 4, 3)) == 41523 END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_INSERT(SEQ_255, 0, 100)) == 256 END
BEGIN BOOST_PP_SEQ_ELEM(255,BOOST_PP_SEQ_INSERT(SEQ_255, 0, 100)) == 255 END
BEGIN BOOST_PP_SEQ_ELEM(0,BOOST_PP_SEQ_INSERT(SEQ_255, 0, 113)) == 113 END
# if BOOST_PP_LIMIT_SEQ == 512
BEGIN BOOST_PP_SEQ_ELEM(511,BOOST_PP_SEQ_INSERT(SEQ_511, 0, 0)) == 511 END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_INSERT(SEQ_511, 510, 431)) == 512 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
BEGIN BOOST_PP_SEQ_ELEM(1023,BOOST_PP_SEQ_INSERT(SEQ_1023, 0, 0)) == 1023 END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_INSERT(SEQ_1023, 742, 29)) == 1024 END
#endif
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_POP_BACK(SEQ)) == 415 END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_POP_BACK(SEQ_256)) == 255 END
BEGIN BOOST_PP_SEQ_ELEM(254,BOOST_PP_SEQ_POP_BACK(SEQ_256)) == 255 END
BEGIN BOOST_PP_SEQ_ELEM(100,BOOST_PP_SEQ_POP_BACK(SEQ_256)) == 101 END
# if BOOST_PP_LIMIT_SEQ == 512
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_POP_BACK(SEQ_512)) == 511 END
BEGIN BOOST_PP_SEQ_ELEM(437,BOOST_PP_SEQ_POP_BACK(SEQ_511)) == 438 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_POP_BACK(SEQ_1024)) == 1023 END
BEGIN BOOST_PP_SEQ_ELEM(632,BOOST_PP_SEQ_POP_BACK(SEQ_1023)) == 633 END
#endif
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_POP_FRONT(SEQ)) == 152 END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_POP_FRONT(SEQ_256)) == 255 END
BEGIN BOOST_PP_SEQ_ELEM(0,BOOST_PP_SEQ_POP_FRONT(SEQ_256)) == 2 END
BEGIN BOOST_PP_SEQ_ELEM(254,BOOST_PP_SEQ_POP_FRONT(SEQ_256)) == 256 END
# if BOOST_PP_LIMIT_SEQ == 512
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_POP_FRONT(SEQ_512)) == 511 END
BEGIN BOOST_PP_SEQ_ELEM(347,BOOST_PP_SEQ_POP_FRONT(SEQ_512)) == 349 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_POP_FRONT(SEQ_1024)) == 1023 END
BEGIN BOOST_PP_SEQ_ELEM(875,BOOST_PP_SEQ_POP_FRONT(SEQ_1024)) == 877 END
#endif
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_PUSH_FRONT(SEQ_NONE, 145)) == 145 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_PUSH_FRONT(SEQ, 3)) == 34152 END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_PUSH_FRONT(SEQ_255, 57)) == 256 END
BEGIN BOOST_PP_SEQ_ELEM(0,BOOST_PP_SEQ_PUSH_FRONT(SEQ_255, 222)) == 222 END
BEGIN BOOST_PP_SEQ_ELEM(255,BOOST_PP_SEQ_PUSH_FRONT(SEQ_255, 111)) == 255 END
# if BOOST_PP_LIMIT_SEQ == 512
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_PUSH_FRONT(SEQ_511, 0)) == 512 END
BEGIN BOOST_PP_SEQ_ELEM(391,BOOST_PP_SEQ_PUSH_FRONT(SEQ_511, 435)) == 391 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_PUSH_FRONT(SEQ_1023, 0)) == 1024 END
BEGIN BOOST_PP_SEQ_ELEM(961,BOOST_PP_SEQ_PUSH_FRONT(SEQ_1023, 435)) == 961 END
#endif
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_PUSH_BACK(SEQ_NONE, 79)) == 79 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_PUSH_BACK(SEQ, 3)) == 41523 END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_PUSH_BACK(SEQ_255, 199)) == 256 END
BEGIN BOOST_PP_SEQ_ELEM(254,BOOST_PP_SEQ_PUSH_BACK(SEQ_255, 99)) == 255 END
BEGIN BOOST_PP_SEQ_ELEM(255,BOOST_PP_SEQ_PUSH_BACK(SEQ_255, 99)) == 99 END
# if BOOST_PP_LIMIT_SEQ == 512
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_PUSH_BACK(SEQ_511, 275)) == 512 END
BEGIN BOOST_PP_SEQ_ELEM(473,BOOST_PP_SEQ_PUSH_BACK(SEQ_511, 78)) == 474 END
BEGIN BOOST_PP_SEQ_ELEM(511,BOOST_PP_SEQ_PUSH_BACK(SEQ_511, 78)) == 78 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_PUSH_BACK(SEQ_1023, 275)) == 1024 END
BEGIN BOOST_PP_SEQ_ELEM(846,BOOST_PP_SEQ_PUSH_BACK(SEQ_1023, 720)) == 847 END
BEGIN BOOST_PP_SEQ_ELEM(1023,BOOST_PP_SEQ_PUSH_BACK(SEQ_1023, 311)) == 311 END
#endif
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_REMOVE(SEQ, 0)) == 152 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_REMOVE(SEQ, 2)) == 412 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_REMOVE(SEQ, 3)) == 415 END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_REMOVE(SEQ_255, 254)) == 254 END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_REMOVE(SEQ_256, 255)) == 255 END
# if BOOST_PP_LIMIT_SEQ == 512
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_REMOVE(SEQ_511, 429)) == 510 END
BEGIN BOOST_PP_SEQ_ELEM(462,BOOST_PP_SEQ_REMOVE(SEQ_512, 374)) == 464 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_REMOVE(SEQ_1023, 989)) == 1022 END
BEGIN BOOST_PP_SEQ_ELEM(731,BOOST_PP_SEQ_REMOVE(SEQ_1024, 555)) == 733 END
#endif
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_REPLACE(SEQ_NONE, 0, 22)) == 22 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_REPLACE(SEQ, 0, 3)) == 3152 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_REPLACE(SEQ, 1, 3)) == 4352 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_REPLACE(SEQ, 3, 3)) == 4153 END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_REPLACE(SEQ_256, 255, 22)) == 256 END
BEGIN BOOST_PP_SEQ_ELEM(233,BOOST_PP_SEQ_REPLACE(SEQ_256, 255, 22)) == 234 END
BEGIN BOOST_PP_SEQ_ELEM(255,BOOST_PP_SEQ_REPLACE(SEQ_256, 255, 22)) == 22 END
# if BOOST_PP_LIMIT_SEQ == 512
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_REPLACE(SEQ_511, 509, 350)) == 511 END
BEGIN BOOST_PP_SEQ_ELEM(482,BOOST_PP_SEQ_REPLACE(SEQ_512, 436, 33)) == 483 END
BEGIN BOOST_PP_SEQ_ELEM(436,BOOST_PP_SEQ_REPLACE(SEQ_512, 436, 33)) == 33 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_REPLACE(SEQ_1023, 846, 25)) == 1023 END
BEGIN BOOST_PP_SEQ_ELEM(984,BOOST_PP_SEQ_REPLACE(SEQ_1024, 841, 670)) == 985 END
BEGIN BOOST_PP_SEQ_ELEM(841,BOOST_PP_SEQ_REPLACE(SEQ_1024, 841, 670)) == 670 END
#endif
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_SUBSEQ(SEQ, 0, 4)) == 4152 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_SUBSEQ(SEQ, 0, 2)) == 41 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_SUBSEQ(SEQ, 1, 2)) == 15 END
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_SUBSEQ(SEQ, 2, 2)) == 52 END
# if BOOST_PP_LIMIT_SEQ == 512
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_SUBSEQ(SEQ_511, 372, 5)) == 5 END
BEGIN BOOST_PP_SEQ_ELEM(14,BOOST_PP_SEQ_SUBSEQ(SEQ_512, 293, 17)) == 308 END
#endif
# if BOOST_PP_LIMIT_SEQ == 1024
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_SUBSEQ(SEQ_1023, 846, 5)) == 5 END
BEGIN BOOST_PP_SEQ_ELEM(16,BOOST_PP_SEQ_SUBSEQ(SEQ_1024, 843, 19)) == 860 END
#endif
# define F2(r, x) + BOOST_PP_SEQ_ELEM(0, x) + 2 - BOOST_PP_SEQ_ELEM(1, x)
#define ADD_NIL(x) x(nil)
BEGIN BOOST_PP_SEQ_FOR_EACH_PRODUCT(F2, ((1)(0)) ((2)(3))) == 0 END
# define L1 (0)(x)
# define L2 (a)(1)(b)(2)
# define L3 (c)(3)(d)
# define LL (L1)(L2)(L3)
#define SEQ_APPEND(s, state, elem) state elem
BEGIN BOOST_PP_SEQ_CAT(BOOST_PP_SEQ_TAIL(BOOST_PP_SEQ_FOLD_LEFT(SEQ_APPEND, (~), LL))) == 0x0a1b2c3d END
BEGIN BOOST_PP_SEQ_SIZE(BOOST_PP_SEQ_TAIL(BOOST_PP_SEQ_FOLD_LEFT(SEQ_APPEND, (~), LL))) == 9 END
BEGIN BOOST_PP_LIST_AT(BOOST_PP_SEQ_TO_LIST(SEQ), 2) == 5 END
BEGIN BOOST_PP_IS_EMPTY(BOOST_PP_LIST_AT(BOOST_PP_SEQ_TO_LIST(SEQ_NONE),0)) == 1 END
BEGIN BOOST_PP_LIST_SIZE(BOOST_PP_SEQ_TO_LIST(SEQ_NONE)) == 1 END
# if BOOST_PP_VARIADIC_HAS_OPT()
BEGIN BOOST_PP_VARIADIC_SIZE(BOOST_PP_SEQ_ENUM(SEQ_NONE)) == 0 END
# else
BEGIN BOOST_PP_VARIADIC_SIZE(BOOST_PP_SEQ_ENUM(SEQ_NONE)) == 1 END
# endif
BEGIN BOOST_PP_VARIADIC_ELEM(0,BOOST_PP_SEQ_ENUM(SEQ)) == 4 END
BEGIN BOOST_PP_TUPLE_ELEM(2,BOOST_PP_SEQ_ELEM(0,BOOST_PP_VARIADIC_SEQ_TO_SEQ(SEQVAR))) == 8 END
|
#!/bin/bash
rofi_command="rofi -theme powermenu.rasi"
power_off=""
reboot=""
lock=""
suspend="鈴"
log_out=""
options="$power_off\n$reboot\n$lock\n$suspend\n$log_out"
chosen="$(echo -e "$options" | $rofi_command -dmenu -selected-row 2)"
case $chosen in
$power_off)
promptmenu.sh --yes-command "systemctl poweroff" --query "Shutdown?"
;;
$reboot)
promptmenu.sh --yes-command "systemctl reboot" --query "Reboot?"
;;
$lock)
screenlock.sh
;;
$suspend)
systemctl suspend
;;
$log_out)
bspc quit
;;
esac
|
#!/bin/sh
NV=`python3 -c 'import ninfs.__init__ as i; print(i.__version__)'`
DMGDIR=build/dmg/ninfs-$NV
rm -rf $DMGDIR
set -e -u
mkdir -p $DMGDIR
cp -rpc dist/ninfs.app $DMGDIR/ninfs.app
ln -s /Applications $DMGDIR/Applications
cp resources/MacGettingStarted.pdf $DMGDIR/Getting\ Started.pdf
hdiutil create -format UDZO -srcfolder $DMGDIR -fs HFS+ dist/ninfs-$NV-macos.dmg -ov
|
<reponame>kali-linux-sh/4METHOD
import urllib, urllib.request
print("================================")
print("------DobleHtMl-----------------")
print("================================ \n")
url = input("Ingrese algun url: ")
nombre = input("Ruta o nombre: ")
with urllib.request.urlopen(url) as html_handler:
html = str(html_handler.read(), 'utf-8')
with open(nombre, "w") as file_handler:
print("Copiando pagina web en tu escritorio...")
file_handler.write(html)
print("Pagina web copiada con exito!")
|
<filename>tests/tests.runtime.js
"use strict";
QUnit.test( "Runtime: API", function test(assert){
assert.expect( 14 );
assert.ok( _isFunction( any ), "any(..)" );
assert.ok( _isFunction( nul ), "nul(..)" );
assert.ok( _isFunction( undef ), "undef(..)" );
assert.ok( _isFunction( string ), "string(..)" );
assert.ok( _isFunction( bool ), "bool(..)" );
assert.ok( _isFunction( number ), "number(..)" );
assert.ok( _isFunction( finite ), "finite(..)" );
assert.ok( _isFunction( int ), "int(..)" );
assert.ok( _isFunction( bint ), "bint(..)" );
assert.ok( _isFunction( symb ), "symb(..)" );
assert.ok( _isFunction( array ), "array(..)" );
assert.ok( _isFunction( object ), "object(..)" );
assert.ok( _isFunction( func ), "func(..)" );
assert.ok( _isFunction( regex ), "regex(..)" );
} );
QUnit.test( "Runtime: any(..)", function test(assert){
var rExpected = "hello world";
var pExpected = 42;
var qExpected = " hello 42 \ntrue! ";
var tExpected = undefined;
var rActual = any`hello world`;
var pActual = any`${42}`;
var qActual = any` hello ${42} \n${true}! `;
var tActual = any``;
assert.expect( 4 );
assert.strictEqual( rActual, rExpected, "literal" );
assert.strictEqual( pActual, pExpected, "value" );
assert.strictEqual( qActual, qExpected, "multiple strings/values" );
assert.strictEqual( tActual, tExpected, "empty default" );
} );
QUnit.test( "Runtime: undef(..)", function test(assert){
var rExpected = undefined;
var pExpected = undefined;
var qExpected = undefined;
var tExpected = undefined;
var sExpected = undefined;
var uExpected = "invalid 1";
var vExpected = "invalid 2";
var wExpected = "invalid 3";
var xExpected = "failed 1";
var yExpected = "failed 2";
var rActual = undef`undefined`;
var pActual = undef`${undefined}`;
var qActual = undef` \n undefined \t `;
var tActual = undef` \n ${undefined} \t `;
var sActual = undef``;
var uActual;
try {
uActual = undef` x ${undefined} y `;
}
catch (e) {
uActual = (/invalid/i.test(e) ? "invalid 1" : e.toString());
}
var vActual;
try {
vActual = undef` x ${Object.create(null)} y `;
}
catch (e) {
vActual = (/invalid/i.test(e) ? "invalid 2" : e.toString());
}
var wActual;
try {
wActual = undef`${undefined} ${undefined}`;
}
catch (e) {
wActual = (/invalid/i.test(e) ? "invalid 3" : e.toString());
}
var xActual;
try {
xActual = undef`not undefined`;
}
catch (e) {
xActual = (/is not type: 'undefined'/i.test(e) ? "failed 1" : e.toString());
}
var yActual;
try {
yActual = undef`${null}`;
}
catch (e) {
yActual = (/is not type: 'undefined'/i.test(e) ? "failed 2" : e.toString());
}
assert.expect( 10 );
assert.strictEqual( rActual, rExpected, "literal" );
assert.strictEqual( pActual, pExpected, "value" );
assert.strictEqual( qActual, qExpected, "extra whitespace: literal" );
assert.strictEqual( tActual, tExpected, "extra whitespace: value" );
assert.strictEqual( sActual, sExpected, "empty default" );
assert.strictEqual( uActual, uExpected, "invalid: literals" );
assert.strictEqual( vActual, vExpected, "invalid: non-string-coercible" );
assert.strictEqual( wActual, wExpected, "invalid: multiple values" );
assert.strictEqual( xActual, xExpected, "failed: literal" );
assert.strictEqual( yActual, yExpected, "failed: null value" );
} );
QUnit.test( "Runtime: nul(..)", function test(assert){
var rExpected = null;
var pExpected = null;
var qExpected = null;
var tExpected = null;
var sExpected = null;
var uExpected = "invalid 1";
var vExpected = "invalid 2";
var wExpected = "invalid 3";
var xExpected = "failed 1";
var yExpected = "failed 2";
var zExpected = "failed 3";
var rActual = nul`null`;
var pActual = nul`${null}`;
var qActual = nul` \n null \t `;
var tActual = nul` \n ${null} \t `;
var sActual = nul``;
var uActual;
try {
uActual = nul` x ${null} y `;
}
catch (e) {
uActual = (/invalid/i.test(e) ? "invalid 1" : e.toString());
}
var vActual;
try {
vActual = nul` x ${Object.create(null)} y `;
}
catch (e) {
vActual = (/invalid/i.test(e) ? "invalid 2" : e.toString());
}
var wActual;
try {
wActual = nul`${null} ${null}`;
}
catch (e) {
wActual = (/invalid/i.test(e) ? "invalid 3" : e.toString());
}
var xActual;
try {
xActual = nul`not null`;
}
catch (e) {
xActual = (/is not type: 'null'/i.test(e) ? "failed 1" : e.toString());
}
var yActual;
try {
yActual = nul`${undefined}`;
}
catch (e) {
yActual = (/is not type: 'null'/i.test(e) ? "failed 2" : e.toString());
}
var zActual;
try {
zActual = nul`${Object.create(null)}`;
}
catch (e) {
zActual = (/is not type: 'null'/i.test(e) ? "failed 3" : e.toString());
}
assert.expect( 11 );
assert.strictEqual( rActual, rExpected, "literal" );
assert.strictEqual( pActual, pExpected, "value" );
assert.strictEqual( qActual, qExpected, "extra whitespace: literal" );
assert.strictEqual( tActual, tExpected, "extra whitespace: value" );
assert.strictEqual( sActual, sExpected, "empty default" );
assert.strictEqual( uActual, uExpected, "invalid: literals" );
assert.strictEqual( vActual, vExpected, "invalid: non-string-coercible" );
assert.strictEqual( wActual, wExpected, "invalid: multiple values" );
assert.strictEqual( xActual, xExpected, "failed: literal" );
assert.strictEqual( yActual, yExpected, "failed: undefined value" );
assert.strictEqual( zActual, zExpected, "failed: non-string-coercible" );
} );
QUnit.test( "Runtime: string(..)", function test(assert){
var rExpected = "hello";
var pExpected = "hello";
var qExpected = " \n hello \t ";
var tExpected = " \n hello \t ";
var sExpected = "";
var uExpected = "failed 1";
var vExpected = "failed 2";
var wExpected = "failed 3";
var rActual = string`hello`;
var pActual = string`${"hello"}`;
var qActual = string` \n hello \t `;
var tActual = string` \n ${"hello"} \t `;
var sActual = string``;
var uActual;
try {
uActual = string`${42}`;
}
catch (e) {
uActual = (/is not type: 'string'/i.test(e) ? "failed 1" : e.toString());
}
var vActual;
try {
vActual = string`x ${42} y`;
}
catch (e) {
vActual = (/is not type: 'string'/i.test(e) ? "failed 2" : e.toString());
}
var wActual;
try {
wActual = string`${"foo"} ${"bar"} ${42}`;
}
catch (e) {
wActual = (/is not type: 'string'/i.test(e) ? "failed 3" : e.toString());
}
assert.expect( 8 );
assert.strictEqual( rActual, rExpected, "literal" );
assert.strictEqual( pActual, pExpected, "value" );
assert.strictEqual( qActual, qExpected, "extra whitespace: literal" );
assert.strictEqual( tActual, tExpected, "extra whitespace: value" );
assert.strictEqual( sActual, sExpected, "empty string" );
assert.strictEqual( uActual, uExpected, "failed: number value" );
assert.strictEqual( vActual, vExpected, "failed: literals + number" );
assert.strictEqual( wActual, wExpected, "failed: multiple values" );
} );
QUnit.test( "Runtime: bool(..)", function test(assert){
var rExpected = false;
var pExpected = false;
var qExpected = true;
var tExpected = true;
var sExpected = "invalid 1";
var uExpected = "invalid 2";
var vExpected = "invalid 3";
var wExpected = "failed 1";
var xExpected = "failed 2";
var yExpected = "failed 3";
var rActual = bool`false`;
var pActual = bool`${false}`;
var qActual = bool` \n true \t `;
var tActual = bool` \n ${true} \t `;
var sActual;
try {
sActual = bool` x ${true} y `;
}
catch (e) {
sActual = (/invalid/i.test(e) ? "invalid 1" : e.toString());
}
var uActual;
try {
uActual = bool` x ${Object.create(null)} y `;
}
catch (e) {
uActual = (/invalid/i.test(e) ? "invalid 2" : e.toString());
}
var vActual;
try {
vActual = bool`${false} ${true}`;
}
catch (e) {
vActual = (/invalid/i.test(e) ? "invalid 3" : e.toString());
}
var wActual;
try {
wActual = bool`not false`;
}
catch (e) {
wActual = (/is not type: 'boolean'/i.test(e) ? "failed 1" : e.toString());
}
var xActual;
try {
xActual = bool`${1}`;
}
catch (e) {
xActual = (/is not type: 'boolean'/i.test(e) ? "failed 2" : e.toString());
}
var yActual;
try {
yActual = bool``;
}
catch (e) {
yActual = (/no default for type: boolean/i.test(e) ? "failed 3" : e.toString());
}
assert.expect( 10 );
assert.strictEqual( rActual, rExpected, "literal" );
assert.strictEqual( pActual, pExpected, "value" );
assert.strictEqual( qActual, qExpected, "extra whitespace: literal" );
assert.strictEqual( tActual, tExpected, "extra whitespace: value" );
assert.strictEqual( sActual, sExpected, "invalid: literals" );
assert.strictEqual( uActual, uExpected, "invalid: non-string-coercible" );
assert.strictEqual( vActual, vExpected, "invalid: multiple values" );
assert.strictEqual( wActual, wExpected, "failed: literal" );
assert.strictEqual( xActual, xExpected, "failed: number value" );
assert.strictEqual( yActual, yExpected, "failed: no empty default" );
} );
QUnit.test( "Runtime: number(..)", function test(assert){
var rExpected = 42;
var pExpected = 42;
var qExpected = NaN;
var tExpected = NaN;
var sExpected = "invalid 1";
var uExpected = "invalid 2";
var vExpected = "invalid 3";
var wExpected = "failed 1";
var xExpected = "failed 2";
var yExpected = "failed 3";
var rActual = number`42`;
var pActual = number`${42}`;
var qActual = number` \n NaN \t `;
var tActual = number` \n ${NaN} \t `;
var sActual;
try {
sActual = number` x ${42} y `;
}
catch (e) {
sActual = (/invalid/i.test(e) ? "invalid 1" : e.toString());
}
var uActual;
try {
uActual = number` x ${Object.create(null)} y `;
}
catch (e) {
uActual = (/invalid/i.test(e) ? "invalid 2" : e.toString());
}
var vActual;
try {
vActual = number`${42} ${42}`;
}
catch (e) {
vActual = (/invalid/i.test(e) ? "invalid 3" : e.toString());
}
var wActual;
try {
wActual = number`forty-two`;
}
catch (e) {
wActual = (/is not type: 'number'/i.test(e) ? "failed 1" : e.toString());
}
var xActual;
try {
xActual = number``;
}
catch (e) {
xActual = (/no default for type: number/i.test(e) ? "failed 2" : e.toString());
}
var yActual;
try {
yActual = number`${"hello"}`;
}
catch (e) {
yActual = (/is not type: 'number'/i.test(e) ? "failed 3" : e.toString());
}
assert.expect( 10 );
assert.strictEqual( rActual, rExpected, "literal" );
assert.strictEqual( pActual, pExpected, "value" );
assert.ok( Object.is( qActual, qExpected ), "extra whitespace: literal (NaN)" );
assert.ok( Object.is( tActual, tExpected ), "extra whitespace: value (NaN)" );
assert.strictEqual( sActual, sExpected, "invalid: literals" );
assert.strictEqual( uActual, uExpected, "invalid: non-string-coercible" );
assert.strictEqual( vActual, vExpected, "invalid: multiple values" );
assert.strictEqual( wActual, wExpected, "failed: literal" );
assert.strictEqual( xActual, xExpected, "failed: no empty default" );
assert.strictEqual( yActual, yExpected, "failed: string value" );
} );
QUnit.test( "Runtime: finite(..)", function test(assert){
var rExpected = 1E308;
var pExpected = 1E308;
var qExpected = 42;
var tExpected = 42;
var sExpected = "invalid 1";
var uExpected = "invalid 2";
var vExpected = "invalid 3";
var wExpected = "failed 1";
var xExpected = "failed 2";
var yExpected = "failed 3";
var zExpected = "failed 4";
var rActual = finite`1E308`;
var pActual = finite`${1E308}`;
var qActual = finite` \n 42 \t `;
var tActual = finite` \n ${42} \t `;
var sActual;
try {
sActual = finite` x ${42} y `;
}
catch (e) {
sActual = (/invalid/i.test(e) ? "invalid 1" : e.toString());
}
var uActual;
try {
uActual = finite` x ${Object.create(null)} y `;
}
catch (e) {
uActual = (/invalid/i.test(e) ? "invalid 2" : e.toString());
}
var vActual;
try {
vActual = finite`${42} ${42}`;
}
catch (e) {
vActual = (/invalid/i.test(e) ? "invalid 3" : e.toString());
}
var wActual;
try {
wActual = finite`infinitely`;
}
catch (e) {
wActual = (/is not type: 'finite number'/i.test(e) ? "failed 1" : e.toString());
}
var xActual;
try {
xActual = finite``;
}
catch (e) {
xActual = (/no default for type: 'finite number'/i.test(e) ? "failed 2" : e.toString());
}
var yActual;
try {
yActual = finite`${Infinity}`;
}
catch (e) {
yActual = (/is not type: 'finite number'/i.test(e) ? "failed 3" : e.toString());
}
var zActual;
try {
zActual = finite`${"hello"}`;
}
catch (e) {
zActual = (/is not type: 'finite number'/i.test(e) ? "failed 4" : e.toString());
}
assert.expect( 11 );
assert.strictEqual( rActual, rExpected, "literal" );
assert.strictEqual( pActual, pExpected, "value" );
assert.strictEqual( qActual, qExpected, "extra whitespace: literal" );
assert.strictEqual( tActual, tExpected, "extra whitespace: value" );
assert.strictEqual( sActual, sExpected, "invalid: literals" );
assert.strictEqual( uActual, uExpected, "invalid: non-string-coercible" );
assert.strictEqual( vActual, vExpected, "invalid: multiple values" );
assert.strictEqual( wActual, wExpected, "failed: literal" );
assert.strictEqual( xActual, xExpected, "failed: no empty default" );
assert.strictEqual( yActual, yExpected, "failed: Infinity" );
assert.strictEqual( zActual, zExpected, "failed: string value" );
} );
QUnit.test( "Runtime: int(..)", function test(assert){
var rExpected = 42;
var pExpected = 42;
var qExpected = 42;
var tExpected = 42;
var sExpected = "invalid 1";
var uExpected = "invalid 2";
var vExpected = "invalid 3";
var wExpected = "failed 1";
var xExpected = "failed 2";
var yExpected = "failed 3";
var zExpected = "failed 4";
var aExpected = "failed 5";
var bExpected = "failed 6";
var rActual = int`42`;
var pActual = int`${42}`;
var qActual = int` \n 42 \t `;
var tActual = int` \n ${42} \t `;
var sActual;
try {
sActual = int` x ${42} y `;
}
catch (e) {
sActual = (/invalid/i.test(e) ? "invalid 1" : e.toString());
}
var uActual;
try {
uActual = int` x ${Object.create(null)} y `;
}
catch (e) {
uActual = (/invalid/i.test(e) ? "invalid 2" : e.toString());
}
var vActual;
try {
vActual = int`${42} ${42}`;
}
catch (e) {
vActual = (/invalid/i.test(e) ? "invalid 3" : e.toString());
}
var wActual;
try {
wActual = int`PI`;
}
catch (e) {
wActual = (/is not type: 'integer'/i.test(e) ? "failed 1" : e.toString());
}
var xActual;
try {
xActual = int``;
}
catch (e) {
xActual = (/no default for type: integer/i.test(e) ? "failed 2" : e.toString());
}
var yActual;
try {
yActual = int`${3.14}`;
}
catch (e) {
yActual = (/is not type: 'integer'/i.test(e) ? "failed 3" : e.toString());
}
var zActual;
try {
zActual = int`${"hello"}`;
}
catch (e) {
zActual = (/is not type: 'integer'/i.test(e) ? "failed 4" : e.toString());
}
var aActual;
try {
aActual = int`-0`;
}
catch (e) {
aActual = (/is not type: 'integer'/i.test(e) ? "failed 5" : e.toString());
}
var bActual;
try {
bActual = int`${-0}`;
}
catch (e) {
bActual = (/is not type: 'integer'/i.test(e) ? "failed 6" : e.toString());
}
assert.expect( 13 );
assert.strictEqual( rActual, rExpected, "literal" );
assert.strictEqual( pActual, pExpected, "value" );
assert.strictEqual( qActual, qExpected, "extra whitespace: literal" );
assert.strictEqual( tActual, tExpected, "extra whitespace: value" );
assert.strictEqual( sActual, sExpected, "invalid: literals" );
assert.strictEqual( uActual, uExpected, "invalid: non-string-coercible" );
assert.strictEqual( vActual, vExpected, "invalid: multiple values" );
assert.strictEqual( wActual, wExpected, "failed: literal" );
assert.strictEqual( xActual, xExpected, "failed: no empty default" );
assert.strictEqual( yActual, yExpected, "failed: floating point" );
assert.strictEqual( zActual, zExpected, "failed: string value" );
assert.strictEqual( aActual, aExpected, "failed: -0 literal" );
assert.strictEqual( bActual, bExpected, "failed: -0 value" );
} );
QUnit.test( "Runtime: bint(..)", function test(assert){
if (typeof BigInt == "undefined") {
assert.expect(0);
return;
}
var rExpected = BigInt("42");
var pExpected = BigInt("42");
var qExpected = BigInt("42");
var tExpected = BigInt("42");
var sExpected = "invalid 1";
var uExpected = "invalid 2";
var vExpected = "invalid 3";
var wExpected = "failed 1";
var xExpected = "failed 2";
var yExpected = "failed 3";
var zExpected = "failed 4";
var aExpected = "failed 5";
var bExpected = "failed 6";
var rActual = bint`42n`;
var pActual = bint`${BigInt("42")}`;
var qActual = bint` \n 42n \t `;
var tActual = bint` \n ${BigInt("42")} \t `;
var sActual;
try {
sActual = bint` x ${BigInt("42")} y `;
}
catch (e) {
sActual = (/invalid/i.test(e) ? "invalid 1" : e.toString());
}
var uActual;
try {
uActual = bint` x ${Object.create(null)} y `;
}
catch (e) {
uActual = (/invalid/i.test(e) ? "invalid 2" : e.toString());
}
var vActual;
try {
vActual = bint`${BigInt("42")} ${BigInt("42")}`;
}
catch (e) {
vActual = (/invalid/i.test(e) ? "invalid 3" : e.toString());
}
var wActual;
try {
wActual = bint`42big`;
}
catch (e) {
wActual = (/is not type: 'bigint'/i.test(e) ? "failed 1" : e.toString());
}
var xActual;
try {
xActual = bint``;
}
catch (e) {
xActual = (/no default for type: bigint/i.test(e) ? "failed 2" : e.toString());
}
var yActual;
try {
yActual = bint`${42}`;
}
catch (e) {
yActual = (/is not type: 'bigint'/i.test(e) ? "failed 3" : e.toString());
}
var zActual;
try {
zActual = bint`${"hello"}`;
}
catch (e) {
zActual = (/is not type: 'bigint'/i.test(e) ? "failed 4" : e.toString());
}
var aActual;
try {
aActual = bint`-0`;
}
catch (e) {
aActual = (/is not type: 'bigint'/i.test(e) ? "failed 5" : e.toString());
}
var bActual;
try {
bActual = bint`${-0}`;
}
catch (e) {
bActual = (/is not type: 'bigint'/i.test(e) ? "failed 6" : e.toString());
}
assert.expect( 13 );
assert.strictEqual( rActual, rExpected, "literal" );
assert.strictEqual( pActual, pExpected, "value" );
assert.strictEqual( qActual, qExpected, "extra whitespace: literal" );
assert.strictEqual( tActual, tExpected, "extra whitespace: value" );
assert.strictEqual( sActual, sExpected, "invalid: literals" );
assert.strictEqual( uActual, uExpected, "invalid: non-string-coercible" );
assert.strictEqual( vActual, vExpected, "invalid: multiple values" );
assert.strictEqual( wActual, wExpected, "failed: literal" );
assert.strictEqual( xActual, xExpected, "failed: no empty default" );
assert.strictEqual( yActual, yExpected, "failed: floating point" );
assert.strictEqual( zActual, zExpected, "failed: string value" );
assert.strictEqual( aActual, aExpected, "failed: -0 literal" );
assert.strictEqual( bActual, bExpected, "failed: -0 value" );
} );
QUnit.test( "Runtime: symb(..)", function test(assert){
var rExpected = "Symbol(abc)";
var pExpected = "Symbol(abc)";
var qExpected = "Symbol(abc)";
var tExpected = "Symbol(abc)";
var sExpected = "Symbol()";
var uExpected = "invalid 1";
var vExpected = "invalid 2";
var wExpected = "invalid 3";
var xExpected = "failed 1";
var yExpected = "failed 2";
var rActual = String( symb`Symbol('abc')` );
var pActual = String( symb`${Symbol("abc")}` );
var qActual = String( symb` \n Symbol("abc") \t ` );
var tActual = String( symb` \n ${Symbol("abc")} \t ` );
var sActual = String( symb`` );
var uActual;
try {
uActual = symb` x ${Symbol("abc")} y `;
}
catch (e) {
uActual = (/invalid/i.test(e) ? "invalid 1" : e.toString());
}
var vActual;
try {
vActual = symb` x ${Object.create(null)} y `;
}
catch (e) {
vActual = (/invalid/i.test(e) ? "invalid 2" : e.toString());
}
var wActual;
try {
wActual = symb`${"abc"} ${"abc"}`;
}
catch (e) {
wActual = (/invalid/i.test(e) ? "invalid 3" : e.toString());
}
var xActual;
try {
xActual = symb`symbol-abc`;
}
catch (e) {
xActual = (/is not type: 'symbol'/i.test(e) ? "failed 1" : e.toString());
}
var yActual;
try {
yActual = symb`${1}`;
}
catch (e) {
yActual = (/is not type: 'symbol'/i.test(e) ? "failed 2" : e.toString());
}
assert.expect( 10 );
assert.strictEqual( rActual, rExpected, "literal" );
assert.strictEqual( pActual, pExpected, "value" );
assert.strictEqual( qActual, qExpected, "extra whitespace: literal" );
assert.strictEqual( tActual, tExpected, "extra whitespace: value" );
assert.strictEqual( sActual, sExpected, "empty default" );
assert.strictEqual( uActual, uExpected, "invalid: literals" );
assert.strictEqual( vActual, vExpected, "invalid: non-string-coercible" );
assert.strictEqual( wActual, wExpected, "invalid: multiple values" );
assert.strictEqual( xActual, xExpected, "failed: literal" );
assert.strictEqual( yActual, yExpected, "failed: number value" );
} );
QUnit.test( "Runtime: array(..)", function test(assert){
var rExpected = [1,2,3,];
var pExpected = [1,2,3,];
var qExpected = [1,2,3,];
var tExpected = [1,2,3,];
var sExpected = [];
var uExpected = "invalid 1";
var vExpected = "invalid 2";
var wExpected = "invalid 3";
var xExpected = "failed 1";
var yExpected = "failed 2";
var zExpected = "failed 3";
var rActual = array`[1,2,3]`;
var pActual = array`${[1,2,3,]}`;
var qActual = array` \n [1,2,3] \t `;
var tActual = array` \n ${[1,2,3,]} \t `;
var sActual = array``;
var uActual;
try {
uActual = array` x ${[1,2,3,]} y `;
}
catch (e) {
uActual = (/invalid/i.test(e) ? "invalid 1" : e.toString());
}
var vActual;
try {
vActual = array` x ${Object.create(null)} y `;
}
catch (e) {
vActual = (/invalid/i.test(e) ? "invalid 2" : e.toString());
}
var wActual;
try {
wActual = array`${[1,2,3,]} ${[1,2,3,]}`;
}
catch (e) {
wActual = (/invalid/i.test(e) ? "invalid 3" : e.toString());
}
var xActual;
try {
xActual = array`${1}`;
}
catch (e) {
xActual = (/is not type: 'array'/i.test(e) ? "failed 1" : e.toString());
}
var yActual;
try {
yActual = array`${"[1,2,3]"}`;
}
catch (e) {
yActual = (/is not type: 'array'/i.test(e) ? "failed 2" : e.toString());
}
var zActual;
try {
zActual = array`[1,2,3,[]`;
}
catch (e) {
zActual = (/is not type: 'array'/i.test(e) ? "failed 3" : e.toString());
}
assert.expect( 11 );
assert.deepEqual( rActual, rExpected, "literal" );
assert.deepEqual( pActual, pExpected, "value" );
assert.deepEqual( qActual, qExpected, "extra whitespace: literal" );
assert.deepEqual( tActual, tExpected, "extra whitespace: value" );
assert.deepEqual( sActual, sExpected, "empty default" );
assert.strictEqual( uActual, uExpected, "invalid: literals" );
assert.strictEqual( vActual, vExpected, "invalid: non-string-coercible" );
assert.strictEqual( wActual, wExpected, "invalid: multiple values" );
assert.strictEqual( xActual, xExpected, "failed: number value" );
assert.strictEqual( yActual, yExpected, "failed: string '[1,2,3]'" );
assert.strictEqual( zActual, zExpected, "failed: malformed array in literal" );
} );
QUnit.test( "Runtime: array(..), parse shapes only", function test(assert){
var rExpected = {
type: "array",
contains: "int",
description: "int[]",
};
var pExpected = {
type: "array",
contains: "string",
description: "string[]",
};
var qExpected = {
type: "array",
contains: {
type: "array",
contains: "string",
description: "string[]",
},
description: "string[][]",
};
var tExpected = {
type: "array",
contains: [ "int", "string", ],
description: "<int,string>",
};
var sExpected = {
type: "array",
contains: {
type: "array",
contains: [ "int", "string", ],
description: "<int,string>",
},
description: "<int,string>[]",
};
var uExpected = {
type: "array", contains:
[
{
type: "array",
contains: "int",
description: "int[]",
},
"string",
],
description: "<int[],string>",
};
var vExpected = {
type: "array",
contains: {
type: "array",
contains: [
{
type: "array",
contains: {
type: "array",
contains: "int",
description: "int[]",
},
description: "int[][]",
},
{
type: "array",
contains: "string",
description: "string[]",
},
],
description: "<int[][],string[]>",
},
description: "<int[][],string[]>[]",
};
var wExpected = {
type: "array",
contains: {
type: "array",
contains: [
{
type: "array",
contains: [ "int", "string", ],
description: "<int,string>",
},
{
type: "array",
contains: "int",
description: "int[]",
},
{
type: "array",
contains: [
{
type: "array",
contains: {
type: "array",
contains: [
"int",
{
type: "array",
contains: "string",
description: "string[]",
},
],
description: "<int,string[]>",
},
description: "<int,string[]>[]",
},
"string",
],
description: "<<int,string[]>[],string>",
},
],
description: "<<int,string>,int[],<<int,string[]>[],string>>",
},
description: "<<int,string>,int[],<<int,string[]>[],string>>[]",
};
var rActual = array({ parseShapeOnly: true, v: ["int[ ]",], });
var pActual = array({ parseShapeOnly: true, v: [`string[
]`,], });
var qActual = array({ parseShapeOnly: true, v: ["string[][]",], });
var tActual = array({ parseShapeOnly: true, v: ["<int,string>",], });
var sActual = array({ parseShapeOnly: true, v: ["<int,string>[]",], });
var uActual = array({ parseShapeOnly: true, v: ["<int[],string>",], });
var vActual = array({ parseShapeOnly: true, v: ["<int[][],string[]>[]",], });
var wActual = array({ parseShapeOnly: true, v: ["<(<int,(string)>),int[],<(<int,string[]>)[],string>>[]",], });
assert.expect( 8 );
assert.deepEqual( rActual, rExpected, "parse: `int[]`" );
assert.deepEqual( pActual, pExpected, "parse: `string[]`" );
assert.deepEqual( qActual, qExpected, "parse: `string[][]`" );
assert.deepEqual( tActual, tExpected, "parse: `<int,string>`" );
assert.deepEqual( sActual, sExpected, "parse: `<int,string>[]`" );
assert.deepEqual( uActual, uExpected, "parse: `<int[],string>`" );
assert.deepEqual( vActual, vExpected, "parse: `<int[][],string[]>[]`" );
assert.deepEqual( wActual, wExpected, "parse: `<(<int,(string)>),int[],<(<int,string[]>)[],string>>[]`" );
} );
QUnit.test( "Runtime: array(..), shape parse failure", function test(assert){
var rExpected = "invalid 1";
var pExpected = "invalid 2";
var qExpected = "invalid 3";
var tExpected = "invalid 4";
var sExpected = "invalid 5";
var uExpected = "invalid 6";
var vExpected = "invalid 7";
var wExpected = "invalid 8";
var xExpected = "invalid 9";
var yExpected = "invalid 10";
var rActual;
try {
rActual = array({ parseShapeOnly: true, v: ["int",], });
}
catch (e) {
rActual = (/not an array/i.test(e) ? "invalid 1" : e.toString());
}
var pActual;
try {
pActual = array({ parseShapeOnly: true, v: ["(int) string",], });
}
catch (e) {
pActual = (/not allowed/i.test(e) ? "invalid 2" : e.toString());
}
var qActual;
try {
qActual = array({ parseShapeOnly: true, v: ["(int[]",], });
}
catch (e) {
qActual = (/unterminated/i.test(e) ? "invalid 3" : e.toString());
}
var tActual;
try {
tActual = array({ parseShapeOnly: true, v: ["( <<int>)",], });
}
catch (e) {
tActual = (/not allowed/i.test(e) ? "invalid 4" : e.toString());
}
var sActual;
try {
sActual = array({ parseShapeOnly: true, v: ["[1,2,3]",], });
}
catch (e) {
sActual = (/shape missing/i.test(e) ? "invalid 5" : e.toString());
}
var uActual;
try {
uActual = array({ parseShapeOnly: true, v: ["<,int>",], });
}
catch (e) {
uActual = (/not allowed/i.test(e) ? "invalid 6" : e.toString());
}
var vActual;
try {
vActual = array({ parseShapeOnly: true, v: ["<int,>",], });
}
catch (e) {
vActual = (/not allowed/i.test(e) ? "invalid 7" : e.toString());
}
var wActual;
try {
wActual = array({ parseShapeOnly: true, v: ["<int,,string>",], });
}
catch (e) {
wActual = (/not allowed/i.test(e) ? "invalid 8" : e.toString());
}
var xActual;
try {
xActual = array({ parseShapeOnly: true, v: ["<>",], });
}
catch (e) {
xActual = (/not allowed/i.test(e) ? "invalid 9" : e.toString());
}
var yActual;
try {
yActual = array({ parseShapeOnly: true, v: ["int[string]",], });
}
catch (e) {
yActual = (/not an array/i.test(e) ? "invalid 10" : e.toString());
}
assert.expect( 10 );
assert.deepEqual( rActual, rExpected, "parse: `int`" );
assert.deepEqual( pActual, pExpected, "parse: `(int) string`" );
assert.deepEqual( qActual, qExpected, "parse: `(int[]`" );
assert.deepEqual( tActual, tExpected, "parse: `( <<int>)`" );
assert.deepEqual( sActual, sExpected, "parse: `[1,2,3]`" );
assert.deepEqual( uActual, uExpected, "parse: `<,int>`" );
assert.deepEqual( vActual, vExpected, "parse: `<int,>`" );
assert.deepEqual( wActual, wExpected, "parse: `<int,,string>`" );
assert.deepEqual( xActual, xExpected, "parse: `<>`" );
assert.deepEqual( yActual, yExpected, "parse: `int[string]`" );
} );
QUnit.test( "Runtime: array(..), shape: int[]", function test(assert){
var rExpected = [1,2,3,];
var pExpected = [1,2,3,];
var qExpected = [1,2,3,];
var tExpected = [1,2,3,];
var sExpected = [];
var uExpected = "shape-mismatch 1";
var vExpected = "shape-mismatch 2";
var wExpected = "shape-mismatch 3";
var rActual = array`int[]``[1,2,3]`;
var pActual = array`int[+]``${[1,2,3,]}`;
var qActual = array`int[]`` \n [1,2,3] \t `;
var tActual = array`int[]`` \n ${[1,2,3,]} \t `;
var sActual = array`int[]```;
var uActual;
try {
uActual = array`int[]``[1,"hello"]`;
}
catch (e) {
uActual = (/not of type/i.test(e) ? "shape-mismatch 1" : e.toString());
}
var vActual;
try {
vActual = array`int[]``${[1,"hello",]}`;
}
catch (e) {
vActual = (/not of type/i.test(e) ? "shape-mismatch 2" : e.toString());
}
var wActual;
try {
wActual = array`int[+]``${[]}`;
}
catch (e) {
wActual = (/element\(s\) of type/i.test(e) ? "shape-mismatch 3" : e.toString());
}
assert.expect( 8 );
assert.deepEqual( rActual, rExpected, "literal" );
assert.deepEqual( pActual, pExpected, "value" );
assert.deepEqual( qActual, qExpected, "extra whitespace: literal" );
assert.deepEqual( tActual, tExpected, "extra whitespace: value" );
assert.deepEqual( sActual, sExpected, "empty default" );
assert.strictEqual( uActual, uExpected, "shape-mismatch: mixed array literal" );
assert.strictEqual( vActual, vExpected, "shape-mismatch: mixed array expression" );
assert.strictEqual( wActual, wExpected, "shape-mismatch: empty array" );
} );
QUnit.test( "Runtime: array(..), shape: <int[][],string>[]", function test(assert){
var val = [[[[1,2,],[3,4,],],["hello",],],[[[5,6,],[7,8,],],["world",],],];
var rExpected = val;
var pExpected = val;
var qExpected = val;
var tExpected = val;
var sExpected = "failed 1";
var uExpected = "failed 2";
var vExpected = "failed 3";
var wExpected = "failed 4";
var xExpected = "failed 5";
var yExpected = [42,];
var rActual = array`<int[][],<string>>[]``[[[[1,2,],[3,4,],],["hello",],],[[[5,6,],[7,8,],],["world",],],]`;
var pActual = array`<int[+][],<string>>[+]``${val}`;
var qActual = array`<int[][],<string>>[]`` \n [[[[1,2,],[3,4,],],["hello",],],[[[5,6,],[7,8,],],["world",],],] \t `;
var tActual = array`<int[][],<string>>[]`` \n ${val} \t `;
var sActual;
try {
sActual = array`<int[][],<string>>[]``${[[[[1,2,],[3,4,],],],]}`;
}
catch (e) {
sActual = (/missing expected element/i.test(e) ? "failed 1" : e.toString());
}
var uActual;
try {
uActual = array`<int[][],<string>>[]``${[[[[1,2,],[3,4,],],["hello","world",],],]}`;
}
catch (e) {
uActual = (/beyond the tuple/i.test(e) ? "failed 2" : e.toString());
}
var vActual;
try {
vActual = array`<int[][],<string>>[]``${[[[[1,2,],3,],["hello","world",],],]}`;
}
catch (e) {
vActual = (/not an array/i.test(e) ? "failed 3" : e.toString());
}
var wActual;
try {
global.myint = "nothing";
wActual = array`<myint>``${[42,]}`;
}
catch (e) {
wActual = (/not of type/i.test(e) ? "failed 4" : e.toString());
}
finally {
delete global.myint;
}
var xActual;
try {
global.myint = int;
xActual = array`<myint>``${["hello",]}`;
}
catch (e) {
xActual = (/not of type/i.test(e) ? "failed 5" : e.toString());
}
finally {
delete global.myint;
}
var yActual;
try {
global.myint = int;
yActual = array`<myint>``${[42,]}`;
}
catch (e) {
yActual = (/not of type/i.test(e) ? "failed 6" : e.toString());
}
finally {
delete global.myint;
}
assert.expect( 10 );
assert.deepEqual( rActual, rExpected, "literal" );
assert.deepEqual( pActual, pExpected, "value" );
assert.deepEqual( qActual, qExpected, "extra whitespace: literal" );
assert.deepEqual( tActual, tExpected, "extra whitespace: value" );
assert.deepEqual( sActual, sExpected, "failed: missing tuple element" );
assert.deepEqual( uActual, uExpected, "failed: extra tuple element" );
assert.deepEqual( vActual, vExpected, "failed: number instead of array" );
assert.deepEqual( wActual, wExpected, "failed: no 'myint' type" );
assert.deepEqual( xActual, xExpected, "failed: 'myint' (as 'int' alias)" );
assert.deepEqual( yActual, yExpected, "'myint' (as 'int' alias)" );
} );
QUnit.test( "Runtime: object(..)", function test(assert){
var rExpected = {a:1,b:2,c:3,};
var pExpected = {a:1,b:2,c:3,};
var qExpected = {a:1,b:2,c:3,};
var tExpected = {a:1,b:2,c:3,};
var sExpected = {};
var uExpected = "invalid 1";
var vExpected = "invalid 2";
var wExpected = "invalid 3";
var xExpected = "failed 1";
var yExpected = "failed 2";
var rActual = object`{a:1,b:2,c:3,}`;
var pActual = object`${{a:1,b:2,c:3,}}`;
var qActual = object` \n {a:1,b:2,c:3,} \t `;
var tActual = object` \n ${{a:1,b:2,c:3,}} \t `;
var sActual = object``;
var uActual;
try {
uActual = object` x ${{a:1,b:2,c:3,}} y `;
}
catch (e) {
uActual = (/invalid/i.test(e) ? "invalid 1" : e.toString());
}
var vActual;
try {
vActual = object` x ${Object.create(null)} y `;
}
catch (e) {
vActual = (/invalid/i.test(e) ? "invalid 2" : e.toString());
}
var wActual;
try {
wActual = object`${{}} ${{}}`;
}
catch (e) {
wActual = (/invalid/i.test(e) ? "invalid 3" : e.toString());
}
var xActual;
try {
xActual = object`objs`;
}
catch (e) {
xActual = (/is not type: 'object'/i.test(e) ? "failed 1" : e.toString());
}
var yActual;
try {
yActual = object`${1}`;
}
catch (e) {
yActual = (/is not type: 'object'/i.test(e) ? "failed 2" : e.toString());
}
assert.expect( 10 );
assert.deepEqual( rActual, rExpected, "literal" );
assert.deepEqual( pActual, pExpected, "value" );
assert.deepEqual( qActual, qExpected, "extra whitespace: literal" );
assert.deepEqual( tActual, tExpected, "extra whitespace: value" );
assert.deepEqual( sActual, sExpected, "empty default" );
assert.strictEqual( uActual, uExpected, "invalid: literals" );
assert.strictEqual( vActual, vExpected, "invalid: non-string-coercible" );
assert.strictEqual( wActual, wExpected, "invalid: multiple values" );
assert.strictEqual( xActual, xExpected, "failed: literal" );
assert.strictEqual( yActual, yExpected, "failed: number value" );
} );
QUnit.test( "Runtime: func(..)", function test(assert){
function foo() { var x = 1; }
var rExpected = foo.toString();
var pExpected = foo.toString();
var qExpected = foo.toString();
var tExpected = foo.toString();
var sExpected = (()=>undefined).toString();
var uExpected = "invalid 1";
var vExpected = "invalid 2";
var wExpected = "invalid 3";
var xExpected = "failed 1";
var yExpected = "failed 2";
var rActual = String( func`function foo() { var x = 1; }` );
var pActual = String( func`${foo}` );
var qActual = String( func` \n function foo() { var x = 1; } \t ` );
var tActual = String( func` \n ${foo} \t ` );
var sActual = String( func`` );
var uActual;
try {
uActual = func` x ${foo} y `;
}
catch (e) {
uActual = (/invalid/i.test(e) ? "invalid 1" : e.toString());
}
var vActual;
try {
vActual = func` x ${Object.create(null)} y `;
}
catch (e) {
vActual = (/invalid/i.test(e) ? "invalid 2" : e.toString());
}
var wActual;
try {
wActual = func`${foo} ${foo}`;
}
catch (e) {
wActual = (/invalid/i.test(e) ? "invalid 3" : e.toString());
}
var xActual;
try {
xActual = func`funfunfun`;
}
catch (e) {
xActual = (/is not type: 'function'/i.test(e) ? "failed 1" : e.toString());
}
var yActual;
try {
yActual = func`${1}`;
}
catch (e) {
yActual = (/is not type: 'function'/i.test(e) ? "failed 2" : e.toString());
}
assert.expect( 10 );
assert.strictEqual( rActual, rExpected, "literal" );
assert.strictEqual( pActual, pExpected, "value" );
assert.strictEqual( qActual, qExpected, "extra whitespace: literal" );
assert.strictEqual( tActual, tExpected, "extra whitespace: value" );
assert.strictEqual( sActual, sExpected, "empty default" );
assert.strictEqual( uActual, uExpected, "invalid: literals" );
assert.strictEqual( vActual, vExpected, "invalid: non-string-coercible" );
assert.strictEqual( wActual, wExpected, "invalid: multiple values" );
assert.strictEqual( xActual, xExpected, "failed: literal" );
assert.strictEqual( yActual, yExpected, "failed: number value" );
} );
QUnit.test( "Runtime: regex(..)", function test(assert){
var rExpected = "/foo+bar?/gs";
var pExpected = "/foo+bar?/gs";
var qExpected = "/foo+bar?/gs";
var tExpected = "/foo+bar?/gs";
var sExpected = "/foo+42bar?10/gs";
var uExpected = "/(?:)/";
var vExpected = "failed 1";
var wExpected = "failed 2";
var xExpected = "failed 3";
var rActual = String( regex`/foo+bar?/gs` );
var pActual = String( regex`${/foo+bar?/gs}` );
var qActual = String( regex` \n /foo+bar?/gs \t ` );
var tActual = String( regex` \n ${/foo+bar?/gs} \t ` );
var sActual = String( regex`/foo+${42}bar?${10}/gs` );
var uActual = String( regex`` );
var vActual;
try {
vActual = regex`${42}`;
}
catch (e) {
vActual = (/is not type: 'regular expression'/i.test(e) ? "failed 1" : e.toString());
}
var wActual;
try {
wActual = regex`x ${/foo/} y`;
}
catch (e) {
wActual = (/is not type: 'regular expression'/i.test(e) ? "failed 2" : e.toString());
}
var xActual;
try {
xActual = regex`${/foo/} ${/bar/} ${42}`;
}
catch (e) {
xActual = (/is not type: 'regular expression'/i.test(e) ? "failed 3" : e.toString());
}
assert.expect( 9 );
assert.strictEqual( rActual, rExpected, "literal" );
assert.strictEqual( pActual, pExpected, "value" );
assert.strictEqual( qActual, qExpected, "extra whitespace: literal" );
assert.strictEqual( tActual, tExpected, "extra whitespace: value" );
assert.strictEqual( sActual, sExpected, "multiple values" );
assert.strictEqual( uActual, uExpected, "empty default" );
assert.strictEqual( vActual, vExpected, "failed: number value" );
assert.strictEqual( wActual, wExpected, "failed: literals + regex" );
assert.strictEqual( xActual, xExpected, "failed: multiple values" );
} );
function _isFunction(v) {
return typeof v == "function";
}
|
#include <stdio.h>
#include <cstdint>
#include <iostream>
#include <array>
#include <filesystem>
#include <string>
#include <optional>
#include <glm/glm.hpp>
#include "SOIS/ImGuiSample.hpp"
#include "SOIS/ApplicationContext.hpp"
#include "SOIS/Renderer.hpp"
#include "SDL.h"
#include "imgui_stdlib.h"
#include "imgui_internal.h"
#include "nfd.h"
// Returns float between 0 and 1, inclusive;
float Rand()
{
return ((float)rand() / (RAND_MAX));
}
ImVec2 ToImgui(glm::vec2 vec)
{
return ImVec2(vec.x, vec.y);
}
class FancyPoint
{
public:
glm::vec2 mPos;
glm::vec2 mVelocity;
glm::vec3 mColor;
float mRadius = 2.f;
FancyPoint(glm::vec2 pos, float r = 2.f, glm::vec3 c = glm::vec3(1.f, 1.f, 1.f), glm::vec2 velocity = glm::vec2(1, 1))
{
mPos = pos;
mRadius = r;
mColor = c;
mVelocity = velocity;
}
bool IsOutCanvas()
{
ImGuiIO& io = ImGui::GetIO();
glm::vec2 p = mPos;
float w = io.DisplaySize.x;
float h = io.DisplaySize.y;
return (p.x > w || p.y > h || p.x < 0 || p.y < 0);
}
void update()
{
mPos.x += mVelocity.x;
mPos.y += mVelocity.y;
//mVelocity.x *= (Rand() > .01) ? 1 : -1;
//mVelocity.y *= (Rand() > .01) ? 1 : -1;
if (IsOutCanvas())
{
ImGuiIO& io = ImGui::GetIO();
mPos = glm::vec2(Rand() * io.DisplaySize.x, Rand() * io.DisplaySize.y);
mVelocity.x *= Rand() > .5 ? 1 : -1;
mVelocity.y *= Rand() > .5 ? 1 : -1;
}
}
void draw() const
{
ImDrawList* draw_list = ImGui::GetWindowDrawList();
draw_list->AddCircleFilled(ImVec2(mPos.x, mPos.y), mRadius, ImColor{mColor.r, mColor.g, mColor.b});
}
};
constexpr float cMinDistance = 73.f;
std::vector<FancyPoint> InitPoints()
{
std::vector<FancyPoint> points;
ImGuiIO& io = ImGui::GetIO();
float canvasWidth = io.DisplaySize.x;
float canvasHeight = io.DisplaySize.y;
for (size_t i = 0; i < 100; i++)
{
points.emplace_back(glm::vec2(Rand() * canvasWidth, Rand() * canvasHeight), 3.4f, glm::vec3(1,0,0), glm::vec2(Rand()>.5?Rand():-Rand(), Rand()>.5?Rand():-Rand()));
}
return std::move(points);
}
void DrawPoints(std::vector<FancyPoint>& points)
{
for (auto const& point1 : points)
{
for (auto const& point2 : points)
{
if ((&point1 != &point2))
{
auto distance = glm::distance(point1.mPos, point2.mPos);
if (distance <= cMinDistance)
{
ImDrawList* draw_list = ImGui::GetWindowDrawList();
draw_list->AddLine(ToImgui(point1.mPos), ToImgui(point2.mPos), ImColor(1.f, 1.f, 1.f, 1 - (distance / cMinDistance)));
}
}
}
}
// Draw the points separately to make them draw on top
for (auto const& point1 : points)
{
point1.draw();
}
}
void UpdatePoints(std::vector<FancyPoint>& points)
{
ImGuiIO& io = ImGui::GetIO();
glm::vec2 mouse = glm::vec2(io.MousePos.x, io.MousePos.y);
for (auto& point : points)
{
point.update();
if (glm::distance(mouse, point.mPos) < 100.f)
{
auto direction = glm::normalize(point.mPos - mouse);
point.mPos = mouse + (direction * 100.f);
}
}
}
std::string GetImGuiIniPath()
{
auto sdlIniPath = SDL_GetPrefPath("PlaymerTools", "PadInput");
std::filesystem::path path{ sdlIniPath };
SDL_free(sdlIniPath);
path /= "imgui.ini";
return path.u8string();
}
std::string PickImageFile()
{
std::string out;
nfdchar_t *outPath = nullptr;
nfdresult_t result = NFD_OpenDialog("png,jpg", NULL, &outPath);
if ( result == NFD_OKAY ) {
out = outPath;
free(outPath);
}
return out;
}
static bool gShowMainWindow = true;
static std::string gBingoCard;
static std::string gBingoChip;
std::unique_ptr<SOIS::Texture> gBingoCardTexture;
std::unique_ptr<SOIS::Texture> gBingoChipTexture;
bool FileUpdate(char const* aButtonLabel, char const* aLabel, std::string& file)
{
bool changed = false;
if (ImGui::Button(aButtonLabel))
{
file = PickImageFile();
changed = changed || !file.empty();
}
ImGui::SameLine();
return changed || ImGui::InputText(aLabel, &file);
}
struct ImageDisplay
{
ImVec2 Dimensions;
ImVec2 Position;
};
// https://codereview.stackexchange.com/a/70916
ImageDisplay StretchToFit(ImVec2 aImageResolution, ImVec2 aWindowResolution)
{
float scaleHeight = aWindowResolution.y / aImageResolution.y;
float scaleWidth = aWindowResolution.x / aImageResolution.x;
float scale = std::min(scaleHeight, scaleWidth);
auto dimensions = ImVec2(scale * aImageResolution.x, scale * aImageResolution.y);
auto position = ImVec2(0.f, 0.f);
position = ImVec2((aWindowResolution.x - dimensions.x)/2, (aWindowResolution.y - dimensions.y)/2);
return ImageDisplay{ dimensions, position };
}
ImageDisplay GetRenderDimensions(ImVec2 aImageResolution)
{
ImGuiIO& io = ImGui::GetIO();
return StretchToFit(aImageResolution, io.DisplaySize);
}
void RenderBingoCard()
{
auto& io = ImGui::GetIO();
auto dimensions = GetRenderDimensions(ImVec2( gBingoCardTexture->Width, gBingoCardTexture->Height));
ImGui::SetCursorPos(ImVec2{ dimensions.Position.x + 10, dimensions.Position.y + 10 });
ImGui::Image((void*)gBingoCardTexture->GetTextureId(), dimensions.Dimensions/*, uv1, uv2*/);
}
std::vector<ImVec2> gChipPositions;// {ImVec2{ 10.0f, 10.0f }};
void RenderBingoChips()
{
std::optional<size_t> toBeDeleted;
auto& io = ImGui::GetIO();
float scale = 100;
bool isAnythingClicked = false;
size_t i = 0;
for (auto& chipPosition : gChipPositions)
{
ImGui::SetCursorPos(chipPosition);
ImGui::IsItemClicked();
ImGui::PushID((size_t)gBingoChipTexture->GetTextureId() + i);
if (ImGui::ImageButton(gBingoChipTexture->GetTextureId(), ImVec2{ scale * 1, scale * 1 }, ImVec2{ 0,0 }, ImVec2{ 1,1 }, 0)
|| ImGui::IsItemActive())
{
ImGui::PopID();
chipPosition = ImVec2{ chipPosition.x + io.MouseDelta.x, chipPosition.y + io.MouseDelta.y};
isAnythingClicked = true;
}
else if (ImGui::IsItemClicked(ImGuiMouseButton_Right))
{
toBeDeleted = i;
isAnythingClicked = true;
ImGui::PopID();
}
else
ImGui::PopID();
++i;
}
if (toBeDeleted.has_value())
{
gChipPositions.erase(gChipPositions.begin() + *toBeDeleted);
}
if (!isAnythingClicked && ImGui::IsMouseClicked(ImGuiMouseButton_Left) && ImGui::IsWindowFocused())
{
ImVec2 mousePosition = ImGui::GetMousePos();
float toSubtract = scale * .5f;
ImVec2 position{mousePosition.x - toSubtract, mousePosition.y - toSubtract };
gChipPositions.push_back(position);
}
}
bool gDisableFancyPoints = false;
void MainWindow(SOIS::ApplicationContext& aContext)
{
auto& io = ImGui::GetIO();
if (gBingoCardTexture)
RenderBingoCard();
if (io.KeysDown[SDL_SCANCODE_ESCAPE] && (io.KeysDownDuration[SDL_SCANCODE_ESCAPE] == 0.f))
gShowMainWindow = !gShowMainWindow;
if (false == gShowMainWindow)
return;
if (ImGui::Begin("Settings Window"))
{
if (FileUpdate("Open Bingo Card", "Bingo Card", gBingoCard))
gBingoCardTexture = aContext.GetRenderer()->LoadTextureFromFile(gBingoCard);
if (FileUpdate("Open Bingo Chip", "Bingo Chip", gBingoChip))
gBingoChipTexture = aContext.GetRenderer()->LoadTextureFromFile(gBingoChip);
if (ImGui::Button("Clear Chips"))
gChipPositions.clear();
if (ImGui::Button("Toggle Fancy Points"))
gDisableFancyPoints = !gDisableFancyPoints;
ImGui::End();
}
if (gBingoChipTexture)
RenderBingoChips();
}
int main(int, char**)
{
SDL_SetHint(SDL_HINT_JOYSTICK_ALLOW_BACKGROUND_EVENTS, "1");
SOIS::ApplicationInitialization();
auto iniPath = GetImGuiIniPath();
SOIS::ApplicationContextConfig config;
config.aBlocking = false;
config.aIniFile = iniPath.c_str();
config.aWindowName = "SOIS Template";
SOIS::ApplicationContext context{config};
//SOIS::ImGuiSample sample;
std::vector<FancyPoint> points;
while (context.Update())
{
ImGui::Begin(
"Canvas",
nullptr,
ImGuiWindowFlags_NoBackground |
ImGuiWindowFlags_NoBringToFrontOnFocus |
ImGuiWindowFlags_NoCollapse |
ImGuiWindowFlags_NoDecoration |
ImGuiWindowFlags_NoDocking |
ImGuiWindowFlags_NoMove);
{
static bool firstRun = true;
if (firstRun)
{
points = InitPoints();
firstRun = false;
}
ImGuiIO& io = ImGui::GetIO();
ImGui::SetWindowSize(ImVec2(io.DisplaySize.x, io.DisplaySize.y));
ImGui::SetWindowPos(ImVec2(0, 0));
if (!gDisableFancyPoints)
DrawPoints(points);
MainWindow(context);
}
ImGui::End();
UpdatePoints(points);
//sample.Update();
}
return 0;
}
|
export function cli_arguments () {
var argv = process.argv,
imax = argv.length,
i = 2,
args = {
args: []
},
x;
for (; i < imax; i++){
x = argv[i];
if (x[0] === '-') {
args[x.replace(/^[\-]+/, '')] = true;
continue;
}
var eq = x.indexOf('=');
if (eq !== -1) {
args[x.substring(0, eq)] = x.substring(eq + 1);
continue;
}
args.args.push(x);
}
return args;
};
|
// https://msdn.microsoft.com/en-us/library/windows/desktop/dd317756(v=vs.85).aspx
var WshShell = new ActiveXObject("WScript.Shell");
var OEMCP = WshShell.RegRead("HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\Nls\\CodePage\\OEMCP");
var ACP = WshShell.RegRead("HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\Nls\\CodePage\\ACP");
var sDecimal = WshShell.RegRead("HKEY_CURRENT_USER\\Control Panel\\International\\sDecimal");
var sShortDate = WshShell.RegRead("HKEY_CURRENT_USER\\Control Panel\\International\\sShortDate");
WScript.Echo(OEMCP, ACP, sDecimal, sShortDate);
|
<gh_stars>0
package net.blay09.mods.cookingforblockheads.client.model;
// Designed by Blay09 with Cubik Studio - https://cubik.studio
import net.minecraft.client.model.ModelBase;
import net.minecraft.client.model.ModelRenderer;
public class ModelCabinet extends ModelBase {
protected boolean isFlipped;
public ModelRenderer BottomWall;
public ModelRenderer RightWall;
public ModelRenderer LeftWall;
public ModelRenderer BackWall;
public ModelRenderer BottomStick;
public ModelRenderer InnerBoard;
public ModelRenderer LeftStick;
public ModelRenderer RightStick;
public ModelRenderer TopWall;
public ModelRenderer Door;
public ModelRenderer DoorFlipped;
public ModelRenderer DoorHandle;
public ModelRenderer DoorHandleFlipped;
public ModelCabinet()
{
this.textureWidth = 128;
this.textureHeight = 128;
this.Door = new ModelRenderer(this, 0, 47);
this.Door.setRotationPoint(-5.5F, 23.0F, -6.0F);
this.Door.addBox(0.0F, -11.0F, 0.0F, 11, 11, 1, 0.0F);
this.LeftWall = new ModelRenderer(this, 0, 59);
this.LeftWall.setRotationPoint(7.0F, 24.0F, -5.0F);
this.LeftWall.addBox(0.0F, -14.0F, 0.0F, 1, 14, 13, 0.0F);
this.TopWall = new ModelRenderer(this, 0, 113);
this.TopWall.setRotationPoint(-7.0F, 11.5F, -5.0F);
this.TopWall.addBox(0.0F, -1.5F, 0.0F, 14, 2, 13, 0.0F);
this.DoorFlipped = new ModelRenderer(this, 0, 47);
this.DoorFlipped.setRotationPoint(5.5F, 23.0F, -6.0F);
this.DoorFlipped.addBox(-11.0F, -11.0F, 0.0F, 11, 11, 1, 0.0F);
this.BackWall = new ModelRenderer(this, 28, 87);
this.BackWall.setRotationPoint(-7.0F, 24.0F, 6.5F);
this.BackWall.addBox(0.0F, -13.0F, 0.0F, 14, 13, 1, 0.0F);
this.InnerBoard = new ModelRenderer(this, 54, 115);
this.InnerBoard.setRotationPoint(-7.0F, 18.0F, -4.9F);
this.InnerBoard.addBox(0.0F, -1.0F, 0.0F, 14, 1, 12, 0.0F);
this.LeftStick = new ModelRenderer(this, 112, 116);
this.LeftStick.setRotationPoint(-7.0F, 23.0F, -5.0F);
this.LeftStick.addBox(0.0F, -11.0F, 0.0F, 2, 11, 1, 0.0F);
this.DoorHandle = new ModelRenderer(this, 118, 124);
this.DoorHandle.setRotationPoint(-5.5F, 19.0F, -6.5F);
this.DoorHandle.addBox(8.5F, -3.0F, 0.0F, 1, 3, 1, 0.0F);
this.RightStick = new ModelRenderer(this, 106, 116);
this.RightStick.setRotationPoint(5.5F, 23.0F, -5.0F);
this.RightStick.addBox(0.0F, -11.0F, 0.0F, 2, 11, 1, 0.0F);
this.DoorHandleFlipped = new ModelRenderer(this, 118, 124);
this.DoorHandleFlipped.setRotationPoint(5.5F, 19.0F, -6.5F);
this.DoorHandleFlipped.addBox(-9.5F, -3.0F, 0.0F, 1, 3, 1, 0.0F);
this.RightWall = new ModelRenderer(this, 0, 86);
this.RightWall.setRotationPoint(-8.0F, 24.0F, -5.0F);
this.RightWall.addBox(0.0F, -14.0F, 0.0F, 1, 14, 13, 0.0F);
this.BottomWall = new ModelRenderer(this, 28, 101);
this.BottomWall.setRotationPoint(-7.0F, 23.5F, -4.0F);
this.BottomWall.addBox(0.0F, -0.5F, 0.0F, 14, 1, 11, 0.0F);
this.BottomStick = new ModelRenderer(this, 78, 109);
this.BottomStick.setRotationPoint(-7.0F, 23.5F, -5.0F);
this.BottomStick.addBox(0.0F, -1.5F, 0.0F, 14, 2, 2, 0.0F);
}
public void renderUncolored() {
float f5 = 0.0625f;
if(isFlipped) {
this.DoorHandleFlipped.render(f5);
} else {
this.DoorHandle.render(f5);
}
}
public void renderColored() {
float f5 = 0.0625f;
if(isFlipped) {
this.DoorFlipped.render(f5);
} else {
this.Door.render(f5);
}
this.BottomWall.render(f5);
this.TopWall.render(f5);
this.RightWall.render(f5);
this.LeftWall.render(f5);
this.BackWall.render(f5);
this.BottomStick.render(f5);
this.LeftStick.render(f5);
this.RightStick.render(f5);
}
public void renderInterior() {
float f5 = 0.0625f;
this.InnerBoard.render(f5);
}
public void setFlipped(boolean doorFlipped) {
this.isFlipped = doorFlipped;
}
}
|
<reponame>uconomy/tezos-builder-suite<filename>backend/src/graphql/context.ts<gh_stars>1-10
import { Contract } from "../domain/contract";
import { Endpoint } from "../domain/endpoint";
export type ApolloContext = {
contracts: Contract[];
endpoint: Endpoint;
onDeployCompleted?: (contract: Contract, address: string) => void;
};
|
#!/bin/zsh
MLFLOW_HOST=192.168.4.94
MLFLOW_PORT=1235
REMOTE_SERVER_URI="http://${MLFLOW_HOST}:${MLFLOW_PORT}"
python scripts/train.py \
--remote-server-uri $REMOTE_SERVER_URI \
--experiment-name so-split-1 \
--run-name sa-sa-cm \
--batch-size 32 \
--data-dir ~/neural-tpps/data \
--load-from-dir ~/neural-tpps/data/baseline/so/split_1 \
--plots-dir ~/neural-tpps/plots/so_1/sa_sa_cm \
--time-scale 1e-5 \
--save-model-freq 100 \
--lr-rate-init 1e-2 \
--lr-poisson-rate-init 1e-2 \
--lr-scheduler-warmup 10 \
--train-epochs 1001 \
--patience 100 \
--encoder selfattention \
--encoder-encoding learnable_with_labels \
--encoder-time-encoding absolute \
--encoder-emb-dim 32 \
--encoder-attn-activation softmax \
--encoder-units-rnn 32 \
--encoder-activation-final-mlp relu \
--decoder selfattention-cm \
--decoder-encoding learnable \
--decoder-time-encoding absolute \
--decoder-embedding-constraint nonneg \
--decoder-emb-dim 32 \
--decoder-attn-activation sigmoid \
--decoder-units-rnn 32 \
--decoder-constraint-rnn nonneg \
--decoder-constraint-mlp nonneg \
--decoder-activation-rnn gumbel \
--decoder-activation-mlp gumbel \
--decoder-activation-final-mlp softplus \
--include-poisson True \
\
python scripts/train.py \
--remote-server-uri $REMOTE_SERVER_URI \
--experiment-name so-split-2 \
--run-name sa-sa-cm \
--batch-size 32 \
--data-dir ~/neural-tpps/data \
--load-from-dir ~/neural-tpps/data/baseline/so/split_2 \
--plots-dir ~/neural-tpps/plots/so_2/sa_sa_cm \
--time-scale 1e-5 \
--save-model-freq 100 \
--lr-rate-init 1e-2 \
--lr-poisson-rate-init 1e-2 \
--lr-scheduler-warmup 10 \
--train-epochs 1001 \
--patience 100 \
--encoder selfattention \
--encoder-encoding learnable_with_labels \
--encoder-time-encoding absolute \
--encoder-emb-dim 32 \
--encoder-attn-activation softmax \
--encoder-units-rnn 32 \
--encoder-activation-final-mlp relu \
--decoder selfattention-cm \
--decoder-encoding learnable \
--decoder-time-encoding absolute \
--decoder-embedding-constraint nonneg \
--decoder-emb-dim 32 \
--decoder-attn-activation sigmoid \
--decoder-units-rnn 32 \
--decoder-constraint-rnn nonneg \
--decoder-constraint-mlp nonneg \
--decoder-activation-rnn gumbel \
--decoder-activation-mlp gumbel \
--decoder-activation-final-mlp softplus \
--include-poisson True \
\
python scripts/train.py \
--remote-server-uri $REMOTE_SERVER_URI \
--experiment-name so-split-3 \
--run-name sa-sa-cm \
--batch-size 32 \
--data-dir ~/neural-tpps/data \
--load-from-dir ~/neural-tpps/data/baseline/so/split_3 \
--plots-dir ~/neural-tpps/plots/so_3/sa_sa_cm \
--time-scale 1e-5 \
--save-model-freq 100 \
--lr-rate-init 1e-2 \
--lr-poisson-rate-init 1e-2 \
--lr-scheduler-warmup 10 \
--train-epochs 1001 \
--patience 100 \
--encoder selfattention \
--encoder-encoding learnable_with_labels \
--encoder-time-encoding absolute \
--encoder-emb-dim 32 \
--encoder-attn-activation softmax \
--encoder-units-rnn 32 \
--encoder-activation-final-mlp relu \
--decoder selfattention-cm \
--decoder-encoding learnable \
--decoder-time-encoding absolute \
--decoder-embedding-constraint nonneg \
--decoder-emb-dim 32 \
--decoder-attn-activation sigmoid \
--decoder-units-rnn 32 \
--decoder-constraint-rnn nonneg \
--decoder-constraint-mlp nonneg \
--decoder-activation-rnn gumbel \
--decoder-activation-mlp gumbel \
--decoder-activation-final-mlp softplus \
--include-poisson True \
\
python scripts/train.py \
--remote-server-uri $REMOTE_SERVER_URI \
--experiment-name so-split-4 \
--run-name sa-sa-cm \
--batch-size 32 \
--data-dir ~/neural-tpps/data \
--load-from-dir ~/neural-tpps/data/baseline/so/split_4 \
--plots-dir ~/neural-tpps/plots/so_4/sa_sa_cm \
--time-scale 1e-5 \
--save-model-freq 100 \
--lr-rate-init 1e-2 \
--lr-poisson-rate-init 1e-2 \
--lr-scheduler-warmup 10 \
--train-epochs 1001 \
--patience 100 \
--encoder selfattention \
--encoder-encoding learnable_with_labels \
--encoder-time-encoding absolute \
--encoder-emb-dim 32 \
--encoder-attn-activation softmax \
--encoder-units-rnn 32 \
--encoder-activation-final-mlp relu \
--decoder selfattention-cm \
--decoder-encoding learnable \
--decoder-time-encoding absolute \
--decoder-embedding-constraint nonneg \
--decoder-emb-dim 32 \
--decoder-attn-activation sigmoid \
--decoder-units-rnn 32 \
--decoder-constraint-rnn nonneg \
--decoder-constraint-mlp nonneg \
--decoder-activation-rnn gumbel \
--decoder-activation-mlp gumbel \
--decoder-activation-final-mlp softplus \
--include-poisson True \
\
python scripts/train.py \
--remote-server-uri $REMOTE_SERVER_URI \
--experiment-name so-split-5 \
--run-name sa-sa-cm \
--batch-size 32 \
--data-dir ~/neural-tpps/data \
--load-from-dir ~/neural-tpps/data/baseline/so/split_5 \
--plots-dir ~/neural-tpps/plots/so_5/sa_sa_cm \
--time-scale 1e-5 \
--save-model-freq 100 \
--lr-rate-init 1e-2 \
--lr-poisson-rate-init 1e-2 \
--lr-scheduler-warmup 10 \
--train-epochs 1001 \
--patience 100 \
--encoder selfattention \
--encoder-encoding learnable_with_labels \
--encoder-time-encoding absolute \
--encoder-emb-dim 32 \
--encoder-attn-activation softmax \
--encoder-units-rnn 32 \
--encoder-activation-final-mlp relu \
--decoder selfattention-cm \
--decoder-encoding learnable \
--decoder-time-encoding absolute \
--decoder-embedding-constraint nonneg \
--decoder-emb-dim 32 \
--decoder-attn-activation sigmoid \
--decoder-units-rnn 32 \
--decoder-constraint-rnn nonneg \
--decoder-constraint-mlp nonneg \
--decoder-activation-rnn gumbel \
--decoder-activation-mlp gumbel \
--decoder-activation-final-mlp softplus \
--include-poisson True \
\
|
#!/bin/bash
for i in "$@"
do
case $i in
--service=*)
service_name="${i#*=}"
shift
;;
--namespace=*)
namespace="${i#*=}"
shift
;;
--serverIp=*)
serverIp="${i#*=}"
shift
;;
esac
done
hub_user_name="nirmata"
project_name="kyverno"
if [ -z "${service_name}" ]; then
service_name="${project_name}-svc"
fi
echo "Generating certificate for the service ${service_name}..."
certsGenerator="./scripts/generate-server-cert.sh"
chmod +x "${certsGenerator}"
if [ -z "${namespace}" ]; then # controller should be launched locally
${certsGenerator} "--service=${service_name}" "--serverIp=${serverIp}" || exit 2
kubectl delete -f config/install.yaml
kubectl create -f config/install.yaml || exit 3
echo -e "\n### You can build and run kyverno project locally.\n### To check its work, run it with parameters -cert, -key and -kubeconfig parameters (see paths of -cert and -key in the log above)."
else # controller should be launched within a cluster
${certsGenerator} "--service=${service_name}" "--namespace=${namespace}" "--serverIp=${serverIp}" || exit 2
secret_name="${project_name}-secret"
echo "Generating secret ${secret_name}..."
kubectl delete secret "${secret_name}" 2>/dev/null
kubectl create secret generic ${secret_name} --namespace ${namespace} --from-file=./certs || exit 3
echo "Creating the service ${service_name}..."
kubectl delete -f crd/service.yaml
kubectl create -f crd/service.yaml || exit 4
echo "Creating deployment..."
kubectl delete -f crd/deployment.yaml
kubectl create -f crd/deployment.yaml || exit 5
kubectl delete -f crd/crd.yaml
kubectl create -f crd/crd.yaml || exit 3
echo -e "\n### Controller is running in cluster.\n### You can use compile-image.sh to rebuild its image and then the current script to redeploy the controller.\n### Check its work by 'kubectl logs <controller_pod> command'"
fi
|
#!/bin/bash
for number in {1..5}
do
sbatch scripts/main_signif_needles3.sh ${number}
done
|
// Regular expression pattern to validate given email address
let validationPattern = /^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$/;
|
var _deserialize_pooling2d_8cpp =
[
[ "BOOST_FIXTURE_TEST_CASE", "_deserialize_pooling2d_8cpp.xhtml#a871d76879d1522921464a09b07e9f0b4", null ],
[ "BOOST_FIXTURE_TEST_CASE", "_deserialize_pooling2d_8cpp.xhtml#a5053bdb79d4e6b3d9ea00d5aa8f7ff65", null ],
[ "BOOST_FIXTURE_TEST_CASE", "_deserialize_pooling2d_8cpp.xhtml#a6cec6b76dad19aae23a18d72505cbee2", null ],
[ "BOOST_FIXTURE_TEST_CASE", "_deserialize_pooling2d_8cpp.xhtml#a77b173bb59acc3bacef4e39e1693858a", null ]
];
|
String[] animals = { "cat", "dog", "bird", "horse", "cow" };
List<String> filteredAnimals = Arrays.stream(animals)
.filter(a -> a.length() > 5)
.collect(Collectors.toList());
System.out.println(filteredAnimals); // ["bird", "horse", "cow"]
|
/**
* @description Majuscule à la 1ère lettre d'une string.
* @param {String} s
* @returns {String}
* @examples capitalize('hello world!') → 'Hello world!'
*/
function capitalize (s) {
if (typeof s !== 'string') return ''
return s.charAt(0).toUpperCase() + s.slice(1)
}
module.exports = capitalize
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.fuseki.servlets;
import static java.lang.String.format ;
import java.util.List ;
import org.apache.jena.atlas.lib.InternalErrorException ;
import org.apache.jena.fuseki.migrate.GraphLoadUtils ;
import org.apache.jena.query.Dataset ;
import org.apache.jena.query.DatasetFactory ;
import org.apache.jena.query.Query ;
import org.apache.jena.rdf.model.Model ;
import org.apache.jena.rdf.model.ModelFactory ;
import org.apache.jena.riot.RiotException ;
import org.apache.jena.sparql.core.DatasetDescription ;
public class SPARQL_QueryGeneral extends SPARQL_Query
{
final static int MaxTriples = 100*1000 ;
public SPARQL_QueryGeneral() { super() ; }
@Override
protected void validateRequest(HttpAction action) {}
@Override
protected void validateQuery(HttpAction action, Query query) {}
@Override
protected String mapRequestToDataset(String uri)
{ return null ; }
@Override
protected Dataset decideDataset(HttpAction action, Query query, String queryStringLog)
{
DatasetDescription datasetDesc = getDatasetDescription(action) ;
if ( datasetDesc == null )
datasetDesc = getDatasetDescription(query) ;
if ( datasetDesc == null )
errorBadRequest("No dataset description in protocol request or in the query string") ;
return datasetFromDescription(action, datasetDesc) ;
}
/**
* Construct a Dataset based on a dataset description.
*/
protected static Dataset datasetFromDescription(HttpAction action, DatasetDescription datasetDesc)
{
try {
if ( datasetDesc == null )
return null ;
if ( datasetDesc.isEmpty() )
return null ;
List<String> graphURLs = datasetDesc.getDefaultGraphURIs() ;
List<String> namedGraphs = datasetDesc.getNamedGraphURIs() ;
if ( graphURLs.size() == 0 && namedGraphs.size() == 0 )
return null ;
Dataset dataset = DatasetFactory.createGeneral() ;
// ---- Default graph
{
Model model = ModelFactory.createDefaultModel() ;
for ( String uri : graphURLs )
{
if ( uri == null || uri.equals("") )
throw new InternalErrorException("Default graph URI is null or the empty string") ;
try {
//TODO Clearup - RIOT integration.
GraphLoadUtils.loadModel(model, uri, MaxTriples) ;
log.info(format("[%d] Load (default graph) %s", action.id, uri)) ;
} catch (RiotException ex) {
log.info(format("[%d] Parsing error loading %s: %s", action.id, uri, ex.getMessage())) ;
errorBadRequest("Failed to load URL (parse error) "+uri+" : "+ex.getMessage()) ;
} catch (Exception ex)
{
log.info(format("[%d] Failed to load (default) %s: %s", action.id, uri, ex.getMessage())) ;
errorBadRequest("Failed to load URL "+uri) ;
}
}
dataset.setDefaultModel(model) ;
}
// ---- Named graphs
if ( namedGraphs != null )
{
for ( String uri : namedGraphs )
{
if ( uri == null || uri.equals("") )
throw new InternalErrorException("Named graph URI is null or the empty string") ;
try {
Model model = ModelFactory.createDefaultModel() ;
GraphLoadUtils.loadModel(model, uri, MaxTriples) ;
log.info(format("[%d] Load (named graph) %s", action.id, uri)) ;
dataset.addNamedModel(uri, model) ;
} catch (RiotException ex) {
log.info(format("[%d] Parsing error loading %s: %s", action.id, uri, ex.getMessage())) ;
errorBadRequest("Failed to load URL (parse error) "+uri+" : "+ex.getMessage()) ;
} catch (Exception ex)
{
log.info(format("[%d] Failed to load (named graph) %s: %s", action.id, uri, ex.getMessage())) ;
errorBadRequest("Failed to load URL "+uri) ;
}
}
}
return dataset ;
}
catch (ActionErrorException ex) { throw ex ; }
catch (Exception ex)
{
log.info(format("[%d] SPARQL parameter error: "+ex.getMessage(),action.id, ex)) ;
errorBadRequest("Parameter error: "+ex.getMessage());
return null ;
}
}
}
|
class PrisonerAvailabilityValidation
include MemoryModel
PRISONER_ERRORS = [
Nomis::PrisonerDateAvailability::OUT_OF_VO,
Nomis::PrisonerDateAvailability::EXTERNAL_MOVEMENT,
Nomis::PrisonerDateAvailability::BOOKED_VISIT
].freeze
attribute :prisoner, :nomis_prisoner
attribute :requested_slots, :concrete_slot_list
validate :slots_availability
def slot_errors(slot)
errors[slot.to_s]
end
def unknown_result?
return false if valid_requested_slots.none?
!Nomis::Api.enabled? || prisoner_availability.nil? || api_error
end
private
attr_reader :api_error
def slots_availability
valid_requested_slots.each do |requested_slot|
error_messages_for_slot(requested_slot).each do |message|
errors[requested_slot.to_s] << message
end
end
end
def error_messages_for_slot(slot)
return [] if unknown_result? || !valid_slot?(slot)
prisoner_availability.error_messages_for_slot(slot)
end
def prisoner_availability
return nil unless prisoner.valid?
@prisoner_availability ||= load_prisoner_availability
end
def load_prisoner_availability
return nil if @api_error
Nomis::Api.instance.prisoner_visiting_detailed_availability(
offender_id: prisoner.nomis_offender_id,
slots: valid_requested_slots
)
rescue Nomis::APIError => e
@api_error = true
Rails.logger.warn "Error calling the NOMIS API: #{e.inspect}"
nil
end
def valid_requested_slots
@valid_requested_slots ||= requested_slots.select { |slot| valid_slot?(slot) }
end
def valid_slot?(slot)
slot.to_date.between?(Date.tomorrow, 60.days.from_now.to_date)
end
end
|
public class ProgEx9_4 {
static int nCalls;
public static void main(String args[]) {
nCalls = 0;
for (int i=0; i<10; i++)
System.out.println(count());
}
static int count() {
return nCalls++;
}
}
|
<reponame>smagill/opensphere-desktop
package com.bitsys.common.http.ssl;
import java.security.PublicKey;
import java.security.cert.CertificateException;
import java.security.cert.CertificateExpiredException;
import java.security.cert.CertificateNotYetValidException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.net.ssl.X509TrustManager;
import javax.security.auth.x500.X500Principal;
import org.apache.commons.lang3.time.DateFormatUtils;
import com.bitsys.common.http.ssl.CertificateVerificationIssue.IssueType;
/**
* This class is an X.509 trust manager that provides greater control in the
* certificate verification process. A {@link CertificateVerifier} can be
* provided to override the default, strict certificate verification.
*/
public class InteractiveX509TrustManager implements X509TrustManager
{
/** The X.509 trust manager. */
private final X509TrustManager trustManager;
/** The certificate verifier. */
private final CertificateVerifier verifier;
/**
* Constructs an {@linkplain InteractiveX509TrustManager} with the given
* trust manager. The certificate verifier defaults to
* {@link StrictCertificateVerifier}.
*
* @param trustManager the underlying trust manager.
*/
public InteractiveX509TrustManager(final X509TrustManager trustManager)
{
this(trustManager, new StrictCertificateVerifier());
}
/**
* Constructs an {@linkplain InteractiveX509TrustManager} with the given
* trust manager and certificate verifier.
*
* @param trustManager the underlying trust manager.
* @param verifier the certificate verifier.
*/
public InteractiveX509TrustManager(final X509TrustManager trustManager, final CertificateVerifier verifier)
{
if (trustManager == null)
{
throw new IllegalArgumentException("The trust manager is null");
}
if (verifier == null)
{
throw new IllegalArgumentException("The certificate verifier is null");
}
this.trustManager = trustManager;
this.verifier = verifier;
}
/**
* Returns the X.509 trust manager.
*
* @return the X.509 trust manager.
*/
public X509TrustManager getTrustManager()
{
return trustManager;
}
/**
* Returns the certificate verifier.
*
* @return the certificate verifier.
*/
public CertificateVerifier getVerifier()
{
return verifier;
}
/**
* @see javax.net.ssl.X509TrustManager#checkClientTrusted(java.security.cert.X509Certificate[],
* java.lang.String)
*/
@Override
public void checkClientTrusted(final X509Certificate[] chain, final String authType) throws CertificateException
{
trustManager.checkClientTrusted(chain, authType);
}
/**
* @see javax.net.ssl.X509TrustManager#checkServerTrusted(java.security.cert.X509Certificate[],
* java.lang.String)
*/
@Override
public void checkServerTrusted(final X509Certificate[] chain, final String authType) throws CertificateException
{
if (chain == null)
{
throw new CertificateException("The certificate chain is null");
}
if (chain.length == 0)
{
throw new CertificateException("The certificate chain is empty");
}
if (authType == null)
{
throw new IllegalArgumentException("The authentication type is null");
}
if (authType.length() == 0)
{
throw new IllegalArgumentException("The authentication type is empty");
}
// First see if the default trust manager has an issue.
CertificateException defaultException = null;
try
{
trustManager.checkServerTrusted(chain, authType);
}
catch (final CertificateException e)
{
defaultException = e;
}
// If the default trust manager found an issue, try to find it and any
// other issues. Ask the verifier for guidance.
if (defaultException != null)
{
final Collection<CertificateVerificationIssue> issues = new ArrayList<>();
final X509Certificate serverCertificate = chain[0];
final CertificateVerificationIssue issue = checkValidity(serverCertificate, chain);
if (issue != null)
{
issues.add(issue);
}
issues.addAll(verifyChain(chain));
// If the verifier doesn't allow the certificate, throw an
// exception.
if (!verifier.allowCertificate(chain, authType, issues, defaultException))
{
throw new CertificateException("The exception was sustained: " + defaultException.getLocalizedMessage(),
defaultException);
}
}
}
/**
* Determines if the given certificate is valid. The certificate is valid if
* the current date/time fall between the certificate's
* <code>notBefore</code> and <code>notAfter</code> dates.
*
* @param certificate the certificate to check.
* @param chain the full certificate chain. This parameter is for
* informational purposes.
* @return the certificate verification issue or <code>null</code>.
* @throws CertificateNotYetValidException if the certificate is not yet
* valid and verifier rejects the certificate.
* @throws CertificateExpiredException if the certificate has expired and
* the verifier rejects the certificate.
*/
protected CertificateVerificationIssue checkValidity(final X509Certificate certificate, final X509Certificate[] chain)
throws CertificateExpiredException, CertificateNotYetValidException
{
CertificateVerificationIssue error = null;
final Date now = new Date();
if (now.before(certificate.getNotBefore()))
{
final String message = "The certificate '" + certificate.getSubjectDN() + "' will not be valid until "
+ DateFormatUtils.ISO_8601_EXTENDED_DATETIME_FORMAT.format(certificate.getNotAfter());
error = new CertificateVerificationIssue(IssueType.CERTIFICATE_NOT_YET_VALID, certificate, message);
}
else if (now.after(certificate.getNotAfter()))
{
final String message = "The certificate '" + certificate.getSubjectDN() + "' expired on "
+ DateFormatUtils.ISO_8601_EXTENDED_DATETIME_FORMAT.format(certificate.getNotAfter());
error = new CertificateVerificationIssue(IssueType.CERTIFICATE_EXPIRED, certificate, message);
}
return error;
}
/**
* Analyzes the certificate chain for issues. Checks for a well-formed chain
* as well as certificate trust. Any issues found are reported in the
* returned collection.
*
* @param chain the certificate chain to verify.
* @return the collection of issues. The result may be zero-length but never
* <code>null</code>.
*/
protected Collection<? extends CertificateVerificationIssue> verifyChain(final X509Certificate[] chain)
{
final Collection<CertificateVerificationIssue> errors = new ArrayList<>();
final List<X509Certificate> trustedCertificates = Arrays.asList(getAcceptedIssuers());
final Map<X500Principal, Set<PublicKey>> trustedSubjects = createTrustedSubjects(trustedCertificates);
CertificateVerificationIssue issue = null;
boolean trusted = false;
// Verify each link in the chain.
for (int ii = 0; ii < chain.length; ii++)
{
final X509Certificate certificate = chain[ii];
final X500Principal subject = certificate.getSubjectX500Principal();
final Set<PublicKey> publicKeys = trustedSubjects.get(subject);
if (ii > 0)
{
final X509Certificate lastCertificate = chain[ii - 1];
issue = verifyLink(lastCertificate, certificate);
if (issue != null)
{
break;
}
}
// Verify that the last certificate in the chain is self-signed.
if (ii == chain.length - 1 && !certificate.getIssuerX500Principal().equals(certificate.getSubjectX500Principal()))
{
errors.add(new CertificateVerificationIssue(IssueType.INCOMPLETE_CERTIFICATE_CHAIN, certificate));
}
// If the current certificate is in the trusted certificates or the
// certificate's public key is in the set of trusted public keys,
// the
// certificate chain is trusted.
if (trustedCertificates.contains(certificate)
|| publicKeys != null && publicKeys.contains(certificate.getPublicKey()))
{
trusted = true;
break;
}
}
// Report any issues found.
if (issue != null)
{
errors.add(issue);
}
else if (!trusted)
{
errors.add(new CertificateVerificationIssue(IssueType.CERTIFICATE_NOT_TRUSTED, chain[0]));
}
return errors;
}
/**
* Verifies the link between the two certificates. This method will first
* verify that the previous certificate's issue principal matches the
* current certificate's principal. If they match, this method
* {@link X509Certificate#verify(PublicKey) verifies} that the current
* certificate issues the previous certificate.
*
* @param previousCertificate the previous certificate in the chain.
* @param certificate the current certificate in the chain.
* @return the verification issue or <code>null</code> if no issue was
* found.
*/
protected CertificateVerificationIssue verifyLink(final X509Certificate previousCertificate,
final X509Certificate certificate)
{
CertificateVerificationIssue issue = null;
// If the next certificate in the chain doesn't have the same
// subject as the previous certificate's issuer, the chain is
// invalid.
final X500Principal subject = certificate.getSubjectX500Principal();
final X500Principal issuerSubject = previousCertificate.getIssuerX500Principal();
if (!subject.equals(issuerSubject))
{
issue = new CertificateVerificationIssue(IssueType.MALFORMED_CERTIFICATE_CHAIN, certificate,
"The certificate '" + previousCertificate.getSubjectX500Principal() + "' was not issued by '"
+ certificate.getSubjectX500Principal() + "'");
}
else
{
try
{
previousCertificate.verify(certificate.getPublicKey());
}
catch (final Exception e)
{
issue = new CertificateVerificationIssue(IssueType.MALFORMED_CERTIFICATE_CHAIN, certificate,
"Failed to verify that '" + previousCertificate.getSubjectX500Principal() + "' was issued by '"
+ certificate.getSubjectX500Principal() + "'");
}
}
return issue;
}
/**
* Returns the mapping of trusted certificate's subject to set of trusted
* certificate's public keys from the given list of trusted certificates.
*
* @param trustedCertificates the list of trusted certificates.
* @return the mapping of subjects to sets of public keys.
*/
protected Map<X500Principal, Set<PublicKey>> createTrustedSubjects(final List<X509Certificate> trustedCertificates)
{
final Map<X500Principal, Set<PublicKey>> trustedSubjects = new HashMap<>();
for (final X509Certificate cert : trustedCertificates)
{
final X500Principal subject = cert.getSubjectX500Principal();
Set<PublicKey> publicKeys = trustedSubjects.get(subject);
if (publicKeys == null)
{
publicKeys = new HashSet<>();
trustedSubjects.put(subject, publicKeys);
}
publicKeys.add(cert.getPublicKey());
}
return trustedSubjects;
}
@Override
public X509Certificate[] getAcceptedIssuers()
{
X509Certificate[] trustedCerts = null;
trustedCerts = trustManager.getAcceptedIssuers();
return trustedCerts;
}
}
|
#!/bin/bash -xe
PSK=$1
PeerVPNSubnets=$2
yum -y install openvpn
#/usr/sbin/openvpn
mkdir -p /data/openvpn/{etc,sbin}
##
echo "daemon
;mode server
cd /data/openvpn/etc/
dev tun
proto tcp-server
;proto udp
port 1200
secret static.key
ifconfig 10.8.0.1 10.8.0.2
comp-lzo
keepalive 10 60
ping-timer-rem
persist-tun
persist-key
persist-key
persist-tun
status openvpn-status.log
;verb 5
cipher AES-128-CBC
" > /data/openvpn/etc/server-static.conf
echo "-----BEGIN OpenVPN Static key V1-----
$PSK
-----END OpenVPN Static key V1----- " > /data/openvpn/etc/static.key
##
F=/data/openvpn/sbin/startup.sh
echo "#/bin/bash
killall openvpn
/usr/sbin/openvpn --config /data/openvpn/etc/server-static.conf
echo 1 > /proc/sys/net/ipv4/ip_forward
## IPtables
#/sbin/iptables -t nat -A POSTROUTING -o tun+ -j MASQUERADE
#/sbin/iptables -t nat -A POSTROUTING -o eth+ -j MASQUERADE
## VPN Monitor
nohup /data/openvpn/sbin/vpn_monitor.sh &
exit 0" > $F
chmod +x $F
F=/data/openvpn/sbin/vpn_monitor.sh
echo "#!/bin/bash
LOG=/var/log/vpn_monitor.log
#
VPN_SERVER_SUBNETS=\"${PeerVPNSubnets//,/ }\"
VPN_GW=10.8.0.2
function do_log()
{
TIME=\`date +\"%Y-%m-%d %T\"\`
echo \"\$TIME \$1\" >> \$LOG
}
function refresh_to_vpn_route()
{
for N in \${VPN_SERVER_SUBNETS}; do
/sbin/ip route add \$N via \${VPN_GW} table main
done
/sbin/ip route fulsh cache
}
while [ 1 ]; do
CHG=0
if [ \`/sbin/ip route list table main | grep -c \"via \${VPN_GW}\"\` -eq 0 ]; then
refresh_to_vpn_route
do_log \"refresh_to_vpn_route\"
CHG=1
fi
sleep 60
done
exit 0
" > $F
chmod +x $F
##
echo "/data/openvpn/sbin/startup.sh" >> /etc/rc.local
## First run
#/data/openvpn/sbin/startup.sh
exit 0
|
#!/usr/bin/env bash
#
# script to start influxdb and compile influxdb-java with all tests.
#
set -e
DEFAULT_INFLUXDB_VERSION="1.8"
DEFAULT_MAVEN_JAVA_VERSION="3-openjdk-16-slim"
INFLUXDB_VERSION="${INFLUXDB_VERSION:-$DEFAULT_INFLUXDB_VERSION}"
MAVEN_JAVA_VERSION="${MAVEN_JAVA_VERSION:-$DEFAULT_MAVEN_JAVA_VERSION}"
echo "Run tests with maven:${MAVEN_JAVA_VERSION} on influxdb-${INFLUXDB_VERSION}"
docker kill influxdb || true
docker rm influxdb || true
docker pull influxdb:${INFLUXDB_VERSION}-alpine || true
docker run \
--detach \
--name influxdb \
--publish 8086:8086 \
--publish 8089:8089/udp \
--volume ${PWD}/influxdb.conf:/etc/influxdb/influxdb.conf \
influxdb:${INFLUXDB_VERSION}-alpine
echo "Starting Nginx"
docker kill nginx || true
docker rm nginx || true
docker run \
--detach \
--name nginx \
--publish 8080:8080 \
--publish 8080:8080/udp \
--volume ${PWD}/src/test/nginx/nginx.conf:/etc/nginx/nginx.conf:ro \
--link influxdb:influxdb \
nginx:stable-alpine nginx '-g' 'daemon off;'
echo "Running tests"
PROXY_API_URL=http://nginx:8080/influx-api/
PROXY_UDP_PORT=8080
docker run --rm \
--volume ${PWD}:/usr/src/mymaven \
--volume ${PWD}/.m2:/root/.m2 \
--workdir /usr/src/mymaven \
--link=influxdb \
--link=nginx \
--env INFLUXDB_VERSION=${INFLUXDB_VERSION} \
--env INFLUXDB_IP=influxdb \
--env PROXY_API_URL=${PROXY_API_URL} \
--env PROXY_UDP_PORT=${PROXY_UDP_PORT} \
maven:${MAVEN_JAVA_VERSION} mvn clean install
docker kill influxdb || true
docker kill nginx || true
|
export const faWarnIfIconSpecMissing = () => {
console.error('FontAwesome: Property `icon` is required for `fa-icon`/`fa-duotone-icon` components. ' +
`This warning will become a hard error in 0.6.0.`);
};
|
#!/bin/bash
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
set -xe
CURRENT_DIR="$(pwd)"
: ${OSH_INFRA_PATH:="../openstack-helm-infra"}
cd ${OSH_INFRA_PATH}
make dev-deploy setup-host
make dev-deploy k8s
cd ${CURRENT_DIR}
|
#!/bin/bash
# Copyright 2017-2020 Authors of Cilium
# SPDX-License-Identifier: Apache-2.0
set -o errexit
set -o pipefail
set -o nounset
MAKER_IMAGE="${MAKER_IMAGE:-docker.io/cilium/image-maker:3e2ea4f151593908c362307a1de22e68610d955c}"
if [ "$#" -ne 1 ] ; then
echo "$0 supports exactly 1 argument"
exit 1
fi
root_dir="$(git rev-parse --show-toplevel)"
if [ -z "${MAKER_CONTAINER+x}" ] ; then
exec docker run --env DOCKER_HUB_PUBLIC_ACCESS_ONLY=true --env QUAY_PUBLIC_ACCESS_ONLY=true --rm --volume "${root_dir}:/src" --workdir /src "${MAKER_IMAGE}" "/src/scripts/$(basename "${0}")" "${1}"
fi
crane digest "${1}" 2> /dev/null
|
<reponame>kancerberus/gestorServicios<gh_stars>1-10
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package bd;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import javax.faces.application.FacesMessage;
import javax.faces.context.FacesContext;
import modelo.Empresa;
import modelo.SubEmpresa;
/**
*
* @author JuanVilla
*/
public class EmpresaDAO extends GestorBD {
private Connection conexion;
public EmpresaDAO(Connection conexion) {
this.conexion = conexion;
}
public EmpresaDAO() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
public String guardarEmpresa(Empresa empresa){
FacesContext contextoJSF = FacesContext.getCurrentInstance();
int actualizado=0;
try {
bd.conectar(getUsuario(), getClave(), getServidor(), getPuerto(), getBasedatos());
sql = "insert into empresas" +
"(nom_empresa, nit_empresa, telefono_empresa, direccion_empresa, correo_empresa)" +
"values ('"+empresa.getNom_empresa()+"','"+empresa.getNit_empresa()+"','"+empresa.getTelefono_empresa()+"'," +
"'"+empresa.getDireccion_empresa()+"','"+empresa.getCorreo_empresa()+"')";
actualizado = bd.actualizar(sql);
} catch (SQLException E) {
contextoJSF.addMessage(null, new FacesMessage(FacesMessage.SEVERITY_WARN, "", E.getMessage()));
} finally {
bd.desconectar();
}
return Integer.toString(actualizado);
}
public ArrayList<Empresa> listarEmpresas() throws SQLException {
Empresa empresa;
ArrayList<Empresa> listaEmpresas = new ArrayList<>();
ResultSet rs;
Consulta consulta = null;
try {
consulta = new Consulta(getConexion());
String sql
= " SELECT nit_empresa,nom_empresa "
+ " FROM empresas ";
rs = consulta.ejecutar(sql);
while (rs.next()) {
empresa = new Empresa ();
empresa.setNit_empresa(rs.getString("nit_empresa"));
empresa.setNom_empresa(rs.getString("nom_empresa"));
listaEmpresas.add(empresa);
}
return listaEmpresas;
} catch (SQLException ex) {
throw ex;
} finally {
consulta.desconectar();
}
}
public ArrayList<SubEmpresa> listarSubempresas(Empresa empresa) throws SQLException {
SubEmpresa subempresa;
ArrayList<SubEmpresa> listaSubempresas = new ArrayList<>();
ResultSet rs;
Consulta consulta = null;
try {
consulta = new Consulta(getConexion());
String sql
= " SELECT nit_subempresa, nom_subempresa "
+ " FROM subempresa"
+ " WHERE nit_empresa = '"+empresa.getNit_empresa()+"'" ;
rs = consulta.ejecutar(sql);
while (rs.next()) {
subempresa = new SubEmpresa();
subempresa.setNit_subempresa(rs.getString("nit_subempresa"));
subempresa.setNom_empresa(rs.getString("nom_subempresa"));
listaSubempresas.add(subempresa);
}
return listaSubempresas;
} catch (SQLException ex) {
throw ex;
} finally {
consulta.desconectar();
}
}
//getters y setters
/**
* @return the conexion
*/
public Connection getConexion() {
return conexion;
}
/**
* @param conexion the conexion to set
*/
public void setConexion(Connection conexion) {
this.conexion = conexion;
}
}
|
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm
[ -s "$NVM_DIR/zsh_completion" ] && \. "$NVM_DIR/zsh_completion" # This loads nvm zsh_completion
|
<reponame>amypritc/MemoryGroup
package com.youngki.memory_project;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.res.Resources;
import android.os.Bundle;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.GridLayout;
import com.google.gson.Gson;
import java.util.HashMap;
public class ViewMapActivity extends AppCompatActivity {
private static final String ALPHABETS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
HashMap<String, Integer> memMap;
Boolean hasGenerated = false;
GridLayout dynamicGridLayout;
//Level level = Level.EASY;
private int[] colors = new int[10];
/**
* Convert Dp to Pixel
*/
public static int dpToPx(float dp, Resources resources) {
float px =
TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dp, resources.getDisplayMetrics());
return (int) px;
}
@Override protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_view_map);
dynamicGridLayout = (GridLayout) findViewById(R.id.dynamicGridLayout);
colors[0] = ContextCompat.getColor(this, R.color.red);
colors[1] = ContextCompat.getColor(this, R.color.orange);
colors[2] = ContextCompat.getColor(this, R.color.yellow);
colors[3] = ContextCompat.getColor(this, R.color.green);
colors[4] = ContextCompat.getColor(this, R.color.blue);
colors[5] = ContextCompat.getColor(this, R.color.purple);
colors[6] = ContextCompat.getColor(this, R.color.pink);
colors[7] = ContextCompat.getColor(this, R.color.teal);
colors[8] = ContextCompat.getColor(this, R.color.tan);
colors[9] = ContextCompat.getColor(this, R.color.gray);
SharedPreferences myPref = getSharedPreferences("MyPref", MODE_PRIVATE);
String map =myPref.getString("memMap", "");
memMap = new Gson().fromJson(map,MapWrapper.class).getMap();
onGenerateClicked(null);
}
public void onGenerateClicked(View v) {
//now get the maps with default: 7 letters and 3 numbers for easy
String letterMap = ALPHABETS;
for (int i = 0; i < ALPHABETS.length(); i++) {
String curLetter = letterMap.substring(i, i + 1);
if (memMap.containsKey(curLetter)){
int curNumber = memMap.get(curLetter);
Button button = new Button(this);
ViewGroup.MarginLayoutParams marginLayoutParams =
new ViewGroup.MarginLayoutParams(dpToPx(48, getResources()), dpToPx(48, getResources()));
marginLayoutParams.rightMargin = dpToPx(5, getResources());
marginLayoutParams.bottomMargin = dpToPx(5, getResources());
button.setLayoutParams(new GridLayout.LayoutParams(marginLayoutParams));
button.setBackgroundColor(colors[curNumber]);
dynamicGridLayout.addView(button);
button.setGravity(Gravity.CENTER);
button.setTextSize(TypedValue.COMPLEX_UNIT_SP, 14);
button.setText(curLetter + ":" + curNumber);
}
}
hasGenerated = true;
}
@Override
public void onBackPressed(){
Intent windowOpener = new Intent(this, showHelp.class);
startActivity(windowOpener);
}
}
|
<reponame>blainehansen/simple-query
import 'mocha'
import { expect } from 'chai'
import '@ts-std/extensions/dist/array'
import * as sql from './sql'
import { boil_string } from '../utils.spec'
import {
Arg, BooleanOperator,
Query, QueryColumn, QueryRawColumn, QueryBlock, SimpleTable, TableChain, ForeignKeyChain, KeyReference,
Insert, InsertBlock
ColumnName, WhereDirective,
} from '../ast'
import { declare_inspection_results } from '../inspect'
import { _raw_declare_dumb_table_schema, _reset_registered_tables } from '../inspect.spec'
describe('query columns render correctly', () => {
it('with same name', () => {
const result = sql.query_column(new QueryColumn('column_name', 'column_name'), 'some_table')
expect(result).equal(`'column_name', "some_table"."column_name"`)
})
it('with different names', () => {
const result = sql.query_column(new QueryColumn('column_name', 'diff_name'), 'some_table')
expect(result).equal(`'diff_name', "some_table"."column_name"`)
})
})
describe('raw sql statements', () => {
const args = [
new Arg(1, 'one', 'not checked', false, undefined),
new Arg(2, 'two', 'not checked', false, undefined),
new Arg(3, 'three', 'not checked', false, undefined),
new Arg(4, 'onetwo', 'not checked', false, undefined),
]
it('can do simple things', () => {
let col
col = new QueryRawColumn('thing', `$one / ((coalesce(some_json, $two) -> 'stuff') :: int * $three)`)
expect(sql.query_raw_column(col, args)).equal(`'thing', $1 / ((coalesce(some_json, $2) -> 'stuff') :: int * $3)`)
col = new QueryRawColumn('thing', `$one / ((coalesce(some_json, $one) -> 'stuff') :: int * $one)`)
expect(sql.query_raw_column(col, args)).equal(`'thing', $1 / ((coalesce(some_json, $1) -> 'stuff') :: int * $1)`)
col = new QueryRawColumn('thing', `$one / $onetwo`)
expect(sql.query_raw_column(col, args)).equal(`'thing', $1 / $4`)
col = new QueryRawColumn('thing', `$one / $onefive`)
expect(sql.query_raw_column(col, args)).equal(`'thing', $1 / $onefive`)
// TODO trying to figure out what's a reasonable amount of dollar escaped compatibility
// sql = new QueryRawColumn('thing', `$one || $one$one dollar escaped text$one$`)
// expect(sql.renderSql(args)).equal(`$1 || $one$one dollar escaped text$one$`)
})
})
describe('foreign key chains', () => {
before(() => _raw_declare_dumb_table_schema(
['a', 'b', 'c', 'd', 'e', 'f'],
[
['a', 'b', 'a_id', false],
['b', 'c', 'b_id', false],
['c', 'd', 'c_id', false],
['b', 'd', 'right_b_id', false],
['b', 'd', 'left_b_id', false],
['a', 'd', 'a_id', false],
['d', 'e', 'd_id', false],
['d', 'f', 'd_id', false],
],
))
after(() => _reset_registered_tables())
it('can handle unambiguous chains', () => {
let chain, join_conditions
// starting from b
// ~~b_id~~c_id~~d
chain = new ForeignKeyChain([new KeyReference(['b_id']), new KeyReference(['c_id'])], 'd')
join_conditions = sql.make_join_conditions(chain, 'b', 'b', 'd')
expect(join_conditions).equal([[ '"b"."id" = "c"."b_id"', 'c', 'c' ], [ '"c"."id" = "d"."c_id"', 'd', 'd' ]])
// starting from b
// ~~right_b_id~~d
chain = new ForeignKeyChain([new KeyReference(['right_b_id'])], 'd')
join_conditions = sql.make_join_conditions(chain, 'b', 'b', 'd')
expect(join_conditions).equal([[ '"b"."id" = "d"."right_b_id"', 'd', 'd' ]])
// starting from b
// ~~left_b_id~~d
chain = new ForeignKeyChain([new KeyReference(['left_b_id'])], 'd')
join_conditions = sql.make_join_conditions(chain, 'b', 'b', 'd')
expect(join_conditions).equal([[ '"b"."id" = "d"."left_b_id"', 'd', 'd' ]])
})
it('can handle qualified', () => {
let chain, join_conditions
// starting from a
// ~~b.a_id~~b
chain = new ForeignKeyChain([new KeyReference(['a_id'], 'b')], 'b')
join_conditions = sql.make_join_conditions(chain, 'a', 'a', 'b')
expect(join_conditions).equal([[ '"a"."id" = "b"."a_id"', 'b', 'b' ]])
// starting from a
// ~~d.a_id~~e.d_id~~e
chain = new ForeignKeyChain([new KeyReference(['a_id'], 'd'), new KeyReference(['d_id'], 'e')], 'e')
join_conditions = sql.make_join_conditions(chain, 'a', 'a', 'e')
expect(join_conditions).equal([[ '"a"."id" = "d"."a_id"', 'd', 'd' ], [ '"d"."id" = "e"."d_id"', 'e', 'e' ]])
// starting from a
// ~~d.a_id~~f.d_id~~f
chain = new ForeignKeyChain([new KeyReference(['a_id'], 'd'), new KeyReference(['d_id'], 'f')], 'f')
join_conditions = sql.make_join_conditions(chain, 'a', 'a', 'f')
expect(join_conditions).equal([[ '"a"."id" = "d"."a_id"', 'd', 'd' ], [ '"d"."id" = "f"."d_id"', 'f', 'f' ]])
})
it('fails if given an incorrect destination', () => {
const chain = new ForeignKeyChain([new KeyReference(['a_id'], 'b')], 'c')
expect(() => sql.make_join_conditions(chain, 'a', 'a', 'c')).throw("you've given an incorrect destination_table_name: ")
})
})
describe('query with arguments', () => {
before(() => _raw_declare_dumb_table_schema(['root'], []))
after(() => _reset_registered_tables())
const arg = new Arg(1, 'id', 'int', false, undefined)
const q = new Query(
'thing', [arg],
new QueryBlock(
'root_display', 'root', new SimpleTable('root'), true,
[
new QueryColumn('root_column', 'root_column'),
],
[new WhereDirective(
new ColumnName('id'),
arg,
BooleanOperator.Eq,
)],
[],
undefined, undefined, true,
)
)
it('many', () => {
const rendered = boil_string(sql.Query(q))
expect(rendered).equal(boil_string(`
select array_to_json(array(
select jsonb_build_object(
'root_column', "root_display"."root_column"
) as "root_display"
from
"root" as "root_display"
where "root_display"."id" = $1
)) :: text as __value
`))
})
it('single', () => {
(q.block as any).is_many = false
// TODO this likely will fail to render once we are actually intelligently checking for manyness
const rendered = boil_string(sql.Query(q))
expect(rendered).equal(boil_string(`
select jsonb_build_object(
'root_column', "root_display"."root_column"
) :: text as __value
from
"root" as "root_display"
where "root_display"."id" = $1
`))
})
})
describe('single layer query', () => {
before(() => _raw_declare_dumb_table_schema(['root'], []))
after(() => _reset_registered_tables())
it('compiles correctly with no args', () => {
const q = new Query(
'thing', [],
new QueryBlock(
'root', 'root', new SimpleTable('root'), true,
[
new QueryColumn('root_column', 'root_column'),
],
[], [],
undefined, undefined, true,
)
)
const rendered = boil_string(sql.Query(q))
expect(rendered).equal(boil_string(`
select array_to_json(array(
select jsonb_build_object(
'root_column', "root"."root_column"
) as "root"
from
"root" as "root"
)) :: text as __value
`))
})
it('compiles correctly with default and no default args', () => {
const q = new Query(
'thing', [new Arg(1, 'id', 'int', false, undefined), new Arg(2, 'amount', 'int', false, 2000)],
new QueryBlock(
'root', 'root', new SimpleTable('root'), true,
[
new QueryColumn('root_column', 'root_column'),
new QueryColumn('other_column', 'diff_other_column'),
new QueryColumn('diff_column', 'diff_column'),
],
[], [], undefined, undefined, true
)
)
const rendered = boil_string(sql.Query(q))
expect(rendered).equal(boil_string(`
select array_to_json(array(
select jsonb_build_object(
'root_column', "root"."root_column",
'diff_other_column', "root"."other_column",
'diff_column', "root"."diff_column"
) as "root"
from
"root" as "root"
)) :: text as __value
`))
})
})
describe('complex queries', () => {
it('a with child b', () => {
_raw_declare_dumb_table_schema(['a', 'b'], [['a', 'b', 'a_id', false]])
const q = new Query(
'thing', [],
new QueryBlock(
'b', 'b', new SimpleTable('b'), true,
[
new QueryColumn('b_column', 'b_column'),
new QueryBlock(
'a', 'a', new SimpleTable('a'), false,
[
new QueryColumn('a_column', 'a_column')
],
[], [], undefined, undefined, true,
)
],
[], [], undefined, undefined, true,
)
)
const rendered = boil_string(sql.Query(q))
expect(rendered).equal(boil_string(`
select array_to_json(array(
select jsonb_build_object(
'b_column', "b"."b_column",
'a', "a"."a"
) as "b"
from
"b" as "b"
left join lateral (
select jsonb_build_object(
'a_column', "a"."a_column"
) as "a"
from "a" as "a"
where "b"."a_id" = "a"."id"
) as "a" on true
)) :: text as __value
`))
_reset_registered_tables()
})
it('root with single right and children b and c', () => {
_raw_declare_dumb_table_schema(['root', 'right', 'b', 'c'], [
['right', 'root', 'right_id', false],
['root', 'b', 'root_id', false],
['b', 'c', 'b_id', false],
])
const q = new Query(
'thing', [],
new QueryBlock(
'root', 'root', new SimpleTable('root'), true,
[
new QueryColumn('root_column', 'root_column'),
new QueryBlock(
'right', 'right', new SimpleTable('right'), false,
[
new QueryColumn('right_column', 'right_column')
],
[], [], undefined, undefined, true,
),
new QueryBlock(
'b', 'b', new SimpleTable('b'), true,
[
new QueryColumn('b_column', 'b_column'),
new QueryBlock(
'c', 'c', new SimpleTable('c'), true,
[
new QueryColumn('c_column', 'c_column')
],
[], [], undefined, undefined, true,
),
],
[], [], undefined, undefined, true,
),
],
[], [], undefined, undefined, true,
)
)
const rendered = boil_string(sql.Query(q))
expect(rendered).equal(boil_string(`
select array_to_json(array(
select jsonb_build_object(
'root_column', "root"."root_column",
'right', "right"."right",
'b', "b"."b"
) as "root"
from
"root" as "root"
left join lateral (
select jsonb_build_object(
'right_column', "right"."right_column"
) as "right"
from
"right" as "right"
where "root"."right_id" = "right"."id"
) as "right" on true
left join lateral (select array_to_json(array(
select jsonb_build_object(
'b_column', "b"."b_column",
'c', "c"."c"
) as "b"
from
"b" as "b"
left join lateral (select array_to_json(array(
select jsonb_build_object(
'c_column', "c"."c_column"
) as "c"
from
"c" as "c"
where "b"."id" = "c"."b_id"
)) as "c") as "c" on true
where "root"."id" = "b"."root_id"
)) as "b") as "b" on true
)) :: text as __value
`))
_reset_registered_tables()
})
it('a through mid to b', () => {
_raw_declare_dumb_table_schema(['a', 'mid', 'b'], [
['a', 'mid', 'a_id', false],
['b', 'mid', 'b_id', false],
])
const q = new Query(
'thing', [],
new QueryBlock(
'a', 'a', new SimpleTable('a'), true,
[
new QueryColumn('a_column', 'a_column'),
new QueryBlock(
'b', 'b', new TableChain(['mid', 'b']), true,
[
new QueryColumn('b_column', 'b_column')
],
[], [], undefined, undefined, true,
)
],
[], [], undefined, undefined, true,
)
)
const rendered = boil_string(sql.Query(q))
expect(rendered).equal(boil_string(`
select array_to_json(array(
select jsonb_build_object(
'a_column', "a"."a_column",
'b', "b"."b"
) as "a"
from
"a" as "a"
left join lateral (select array_to_json(array(
select jsonb_build_object(
'b_column', "b"."b_column"
) as "b"
from
"mid" as "mid"
left join "b" as "b"
on "mid"."b_id" = "b"."id"
where "a"."id" = "mid"."a_id"
)) as "b") as "b" on true
)) :: text as __value
`))
_reset_registered_tables()
})
it('first/second/third/other levels', () => {
_raw_declare_dumb_table_schema(['first_level', 'second_level', 'third_level', 'other_level'], [
['first_level', 'second_level', 'first_level_id', false],
['second_level', 'third_level', 'second_level_id', false],
['first_level', 'other_level', 'first_level_id', false],
])
const q = new Query(
'thing', [],
new QueryBlock(
'first_level', 'first_level', new SimpleTable('first_level'), true,
[
new QueryColumn('first_column', 'first_column'),
new QueryBlock(
'second_level', 'second_level', new SimpleTable('second_level'), true,
[
new QueryColumn('second_column', 'second_column'),
new QueryBlock(
'third_level', 'third_level', new SimpleTable('third_level'), true,
[
new QueryColumn('third_column', 'third_column'),
],
[], [], undefined, undefined, true,
),
],
[], [], undefined, undefined, true,
),
new QueryBlock(
'other_level', 'other_level', new SimpleTable('other_level'), true,
[
new QueryColumn('other_column', 'other_column'),
],
[], [], undefined, undefined, true,
),
],
[], [], undefined, undefined, true,
),
)
const rendered = boil_string(sql.Query(q))
expect(rendered).equal(boil_string(`
select array_to_json(array(
select jsonb_build_object(
'first_column', "first_level"."first_column",
'second_level', "second_level"."second_level",
'other_level', "other_level"."other_level"
) as "first_level"
from
"first_level" as "first_level"
left join lateral (select array_to_json(array(
select jsonb_build_object(
'second_column', "second_level"."second_column",
'third_level', "third_level"."third_level"
) as "second_level"
from
"second_level" as "second_level"
left join lateral (select array_to_json(array(
select jsonb_build_object(
'third_column', "third_level"."third_column"
) as "third_level"
from
"third_level" as "third_level"
where "second_level"."id" = "third_level"."second_level_id"
)) as "third_level") as "third_level" on true
where "first_level"."id" = "second_level"."first_level_id"
)) as "second_level") as "second_level" on true
left join lateral (select array_to_json(array(
select jsonb_build_object(
'other_column', "other_level"."other_column"
) as "other_level"
from
"other_level" as "other_level"
where "first_level"."id" = "other_level"."first_level_id"
)) as "other_level") as "other_level" on true
)) :: text as __value
`))
_reset_registered_tables()
})
})
import { blog_schema } from '../../schemas/blog.ts'
describe('simple inserts', () => {
declare_inspection_results()
it('single, no nested inserts', () => {
const i = new Insert('thing', new InsertBlock('organization', false, []))
const rendered = boil_string(sql.Insert(i))
expect(i).equal(boil_string(`
with
_organization_rows as (
select
_value,
(_value->>'id') :: int as id,
(_value->>'name') :: text as "name"
from $1 as _value
),
_insert_organization as (
insert into organization (id, "name")
select id, "name" from _organization_rows
)
select null
`))
})
it('multiple, no nested inserts', () => {
const i = new Insert('thing', new InsertBlock('organization', true, []))
const rendered = boil_string(sql.Insert(i))
expect(i).equal(boil_string(`
with
_organization_rows as (
select
_value,
(_value->>'id') :: int as id,
(_value->>'name') :: text as "name"
from jsonb_array_elements($1) as _(_value)
),
_insert_organization as (
insert into organization (id, "name")
select id, "name" from _organization_rows
)
select null
`))
})
_reset_registered_tables()
})
|
<reponame>rafax/sourcegraph<filename>internal/database/query/query.go
// Package query provides an expression tree structure which can be converted
// into WHERE queries. It is used by DB APIs to expose a more powerful query
// interface.
package query
import (
"fmt"
"github.com/keegancsmith/sqlf"
)
// Q is a query item. It is converted into a *sqlf.Query by Eval.
type Q interface{}
// And returns a Q which when evaluated will join the children by "AND".
func And(children ...Q) Q {
return &and{Children: children}
}
// Or returns a Q which when evaluated will join the children by "OR".
func Or(children ...Q) Q {
return &or{Children: children}
}
// Not returns a Q which when evaluated will wrap child with "NOT".
func Not(child Q) Q {
return ¬{Child: child}
}
type and struct {
Children []Q
}
type or struct {
Children []Q
}
type not struct {
Child Q
}
// Eval runs all atoms of q through atomToQueryFn, returning the final query
// to run. If any call of atomToQueryFn returns an error, that error is
// returned by Eval.
//
// Eval handles And, Or, Not and booleans. Otherwise every other Q will be
// passed to atomToQueryFn.
//
// For example in the expression
//
// And("atom1", Or(true, "atom2", &atom3{})
//
// atomToQueryFn is responsible for converting "atom1", "atom2" and &atom3{}
// into sqlf.Query patterns. Eval will return the expression:
//
// (query1 AND (query2 OR query3))
//
// Where queryN is the respective output of atomToQueryFn.
//
// Typically we expect atomToQueryFn to return a SQL condition like "name LIKE
// $q". It should also handle unexpected values/types being passed in via
// returning an error. See ExampleEval for a real example of a atomToQueryFn.
func Eval(q Q, atomToQueryFn func(q Q) (*sqlf.Query, error)) (*sqlf.Query, error) {
childQueries := func(qs []Q) ([]*sqlf.Query, error) {
x := make([]*sqlf.Query, 0, len(qs))
for _, q := range qs {
c, err := Eval(q, atomToQueryFn)
if err != nil {
return nil, err
}
x = append(x, c)
}
return x, nil
}
switch c := q.(type) {
case *and:
children, err := childQueries(c.Children)
if err != nil {
return nil, err
}
if len(children) == 0 {
return sqlf.Sprintf("TRUE"), nil
}
return sqlf.Sprintf("(%s)", sqlf.Join(children, "AND")), nil
case *or:
children, err := childQueries(c.Children)
if err != nil {
return nil, err
}
if len(children) == 0 {
return sqlf.Sprintf("FALSE"), nil
}
return sqlf.Sprintf("(%s)", sqlf.Join(children, "OR")), nil
case *not:
child, err := Eval(c.Child, atomToQueryFn)
if err != nil {
return nil, err
}
return sqlf.Sprintf("NOT(%s)", child), nil
case bool:
if c {
return sqlf.Sprintf("TRUE"), nil
}
return sqlf.Sprintf("FALSE"), nil
default:
return atomToQueryFn(q)
}
}
// Print returns a string representing Q.
func Print(q Q) string {
rq, _ := Eval(q, func(q Q) (*sqlf.Query, error) {
return sqlf.Sprintf("%s", q), nil
})
return fmt.Sprintf(rq.Query(printfBindVar{}), rq.Args()...)
}
type printfBindVar struct{}
func (printfBindVar) BindVar(i int) string {
return "%#v"
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-shuffled/model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-shuffled/512+0+512-N-VB-ADJ-ADV-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_verbs_adjectives_and_adverbs_first_half_quarter --eval_function last_quarter_eval
|
<gh_stars>0
define([
'webix', './state-router', './upload'
], function(webix, stateRouter, upload) {
return function(initialState, defaultState) {
defaultState = defaultState || initialState;
webix.ready(function() {
stateRouter.addState(upload);
stateRouter.on('routeNotFound', function(route, parameters) {
console.log('route not found', route, defaultState);
stateRouter.go(defaultState, { route: route });
});
stateRouter.evaluateCurrentRoute(initialState);
});
};
});
|
#!/usr/bin/env python
""" nav_square.py - Version 1.1 2013-12-20
A basic demo of the using odometry data to move the robot
along a square trajectory.
Created for the Pi Robot Project: http://www.pirobot.org
Copyright (c) 2012 Patrick Goebel. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.5
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details at:
http://www.gnu.org/licenses/gpl.html
"""
import rospy
from geometry_msgs.msg import Twist, Point, Quaternion
import tf
from frobo_nav.transform_utils import quat_to_angle, normalize_angle
from math import degrees, radians, copysign, sqrt, pow, pi
class NavSquare():
def __init__(self):
# Give the node a name
rospy.init_node('nav_square', anonymous=False)
# Set rospy to execute a shutdown function when terminating the script
rospy.on_shutdown(self.shutdown)
# How fast will we check the odometry values?
rate = 20
# Set the equivalent ROS rate variable
r = rospy.Rate(rate)
# Set the parameters for the target square
goal_distance = rospy.get_param("~goal_distance", 3.0) # meters
goal_angle = rospy.get_param("~goal_angle", radians(90)) # degrees converted to radians
linear_speed = rospy.get_param("~linear_speed", 0.1) # meters per second
angular_speed = rospy.get_param("~angular_speed", 0.6) # radians per second
angular_tolerance = rospy.get_param("~angular_tolerance", radians(0)) # degrees to radians
# Publisher to control the robot's speed
self.cmd_vel = rospy.Publisher('/cmd_vel', Twist)
# The base frame is base_footprint for the TurtleBot but base_link for Pi Robot
self.base_frame = rospy.get_param('~base_frame', '/base_link')
# The odom frame is usually just /odom
self.odom_frame = rospy.get_param('~odom_frame', '/odom')
# Initialize the tf listener
self.tf_listener = tf.TransformListener()
# Give tf some time to fill its buffer
rospy.sleep(2)
# Set the odom frame
self.odom_frame = '/odom'
# Find out if the robot uses /base_link or /base_footprint
try:
self.tf_listener.waitForTransform(self.odom_frame, '/base_footprint', rospy.Time(), rospy.Duration(1.0))
self.base_frame = '/base_footprint'
except (tf.Exception, tf.ConnectivityException, tf.LookupException):
try:
self.tf_listener.waitForTransform(self.odom_frame, '/base_link', rospy.Time(), rospy.Duration(1.0))
self.base_frame = '/base_link'
except (tf.Exception, tf.ConnectivityException, tf.LookupException):
rospy.loginfo("Cannot find transform between /odom and /base_link or /base_footprint")
rospy.signal_shutdown("tf Exception")
# Initialize the position variable as a Point type
position = Point()
# Initialize the movement command
move_cmd = Twist()
# Set the movement command to forward motion
move_cmd.linear.x = linear_speed
# Get the starting position values
(position, rotation) = self.get_odom()
initPosition = position
rospy.loginfo("Initial position at " + str(position) + " and rotation " + str(degrees(rotation)) + " degrees")
x_start = position.x
y_start = position.y
# Keep track of the distance traveled
distance = 0
# Enter the loop to move along a side
while distance < goal_distance and not rospy.is_shutdown():
# Publish the Twist message and sleep 1 cycle
self.cmd_vel.publish(move_cmd)
r.sleep()
# Get the current position
(position, rotation) = self.get_odom()
# Compute the Euclidean distance from the start
distance = sqrt(pow((position.x - x_start), 2) +
pow((position.y - y_start), 2))
# Stop the robot when we are done
self.cmd_vel.publish(Twist())
#print result
(position, rotation) = self.get_odom()
rospy.loginfo("Final position at " + str(position) + " and rotation " + str(degrees(rotation)) + " degrees")
rospy.loginfo("Difference (position.x - initPosition.x) " + str(position.x - initPosition.x) + "m")
def get_odom(self):
# Get the current transform between the odom and base frames
try:
(trans, rot) = self.tf_listener.lookupTransform(self.odom_frame, self.base_frame, rospy.Time(0))
except (tf.Exception, tf.ConnectivityException, tf.LookupException):
rospy.loginfo("TF Exception")
return
return (Point(*trans), quat_to_angle(Quaternion(*rot)))
def shutdown(self):
# Always stop the robot when shutting down the node
rospy.loginfo("Stopping the robot...")
self.cmd_vel.publish(Twist())
rospy.sleep(1)
if __name__ == '__main__':
try:
NavSquare()
except rospy.ROSInterruptException:
rospy.loginfo("Navigation terminated.")
|
<gh_stars>0
// file : xsde/cxx/parser/validating/gday.hxx
// author : <NAME> <<EMAIL>>
// copyright : Copyright (c) 2005-2011 Code Synthesis Tools CC
// license : GNU GPL v2 + exceptions; see accompanying LICENSE file
#ifndef XSDE_CXX_PARSER_VALIDATING_GDAY_HXX
#define XSDE_CXX_PARSER_VALIDATING_GDAY_HXX
#include <xsde/cxx/string.hxx>
#include <xsde/cxx/parser/validating/xml-schema-pskel.hxx>
namespace xsde
{
namespace cxx
{
namespace parser
{
namespace validating
{
#ifdef XSDE_REUSE_STYLE_MIXIN
struct gday_pimpl: virtual gday_pskel
#else
struct gday_pimpl: gday_pskel
#endif
{
virtual void
_pre ();
virtual void
_characters (const ro_string&);
virtual void
_post ();
virtual gday
post_gday ();
protected:
string str_;
unsigned short day_;
bool z_;
short zh_, zm_;
};
}
}
}
}
#endif // XSDE_CXX_PARSER_VALIDATING_GDAY_HXX
|
#!/usr/bin/env bash
set -eo pipefail
set -o errexit
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
docker tag opentracing-spring-haystack-example expediadotcom/opentracing-spring-haystack-example:${SHA}
docker push expediadotcom/opentracing-spring-haystack-example:${SHA}
docker tag opentracing-spring-haystack-example expediadotcom/opentracing-spring-haystack-example:latest
docker push expediadotcom/opentracing-spring-haystack-example:latest
|
import matplotlib
matplotlib.use('Agg')
import torch
from torch import nn
from model_helper import load_checkpoint_reconstruct, predict_im, calc_val_metrics
from image_helper import process_image
import argparse
from data_helper import load_labels, make_dataloader
import numpy as np
parser = argparse.ArgumentParser()
parser.add_argument('data_dir', action='store',
help='directory containing the data. must contain test, valid and train subfolders. these must contain subfolders for each category.')
parser.add_argument('checkpoint', action='store',
help='checkpoint of trained model')
parser.add_argument('--top_k', action='store', type=int,
dest='top_k', default=5,
help='output of top k classes')
parser.add_argument('--nr_probes', action='store', type=int,
dest='nr_probes', default=5,
help='do classification for this many images for a random sample from each dataset (train, test, valid)')
parser.add_argument('--gpu', action='store_true',
default=False,
dest='set_gpu',
help='switch to set gpu mode explicitely. default is autodetect')
parser.add_argument('--cpu', action='store_true',
default=False,
dest='set_cpu',
help='switch to set cpu mode explicitely. default is autodetect')
parser.add_argument('--printmodel', action='store_true',
default=False,
dest='printmodel',
help='for debugging: print model architecture to console')
parser.add_argument('--performance', nargs = '+', action='store',
default=[],
dest='performance',
help='calculate overall performance (accuracy) for data sets. can have multiple options: train test valid. Caution: may take a long time for train set, and even longer in cpu mode')
args = parser.parse_args()
data_dir = args.data_dir
nr_probes = args.nr_probes
checkpoint = args.checkpoint
top_k = args.top_k
printmodel = args.printmodel
set_cpu = args.set_cpu
set_gpu = args.set_gpu
acc_dsets = args.performance
if set_gpu:
device = torch.device('cuda:0')
print("Device manually set to cuda")
elif set_cpu:
device = torch.device('cpu')
print("Device manually set to cpu")
else: #autodetect
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
print(f"device autodetected as {device.type}")
fl_model, log = load_checkpoint_reconstruct(checkpoint, device)
fl_model.to(device)
if printmodel:
print(fl_model)
dataloader, class_to_idx = make_dataloader(data_dir)
dsets = ['train', 'valid', 'test']
criterion = nn.NLLLoss()
fl_model.eval()
dataiter = {x: iter(dataloader[x]) for x in dsets}
np.set_printoptions(precision = 3)
for dset in dsets:
images, labels = next(dataiter[dset])
if dset in acc_dsets:
print(f"calculating overall performance on {dset} set...")
val_time, test_loss, test_accuracy = calc_val_metrics (device, fl_model, dataloader[dset], criterion)
print(f"accuracy:{test_accuracy:.3f}")
print(f"checking true label against prediction for {dset} set")
for i in range(nr_probes):
t_prob, t_class = predict_im(images[i],fl_model,device,top_k)
t_class
print(f"true label:{labels[i]:03d} | prediction:{t_class[0]:03d} | {t_class} | {t_prob}")
|
<reponame>Th3Shadowbroker/AtMessage
package com.th3shadowbroker.AtMessage.Etc;
import org.bukkit.entity.Player;
import org.bukkit.event.Event;
import org.bukkit.event.HandlerList;
public class AtMessageSendEvent extends Event {
//Sender as player
private final Player sender;
//Target as Player
private final Player target;
//Message as String
private final String message;
//Construction and setup
public AtMessageSendEvent( Player sender, Player target, String message )
{
this.sender = sender;
this.target = target;
this.message = message;
}
//== Sender information functions ==//
//Get sender as player
public Player getSender()
{
return sender;
}
//Get sender's name
public String getSenderName()
{
return sender.getName();
}
//== Target information functions ==//
//Get target as player
public Player getTarget()
{
return target;
}
//Get target's name
public String getTargetName()
{
return target.getName();
}
//== Message information functions ==//
//Get message as string
public String getMessage()
{
return message;
}
//From here: Required stuff only
@Override
public HandlerList getHandlers() {
return new HandlerList();
}
public static HandlerList getHandlerList() {
return new HandlerList();
}
}
|
def search_string(database, string):
if string in database:
return True
else:
return False
|
package com.linwei.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* @Author: WS
* @Time: 2020/4/27
* @Description: 界面跳转变量注解
*/
@Retention(RetentionPolicy.CLASS)
@Target(ElementType.FIELD)
public @interface IntentField {
/**
* 目标对象标识,如{{@code MainActivity}}
*
* @return
*/
String value() default "";
}
|
/**
* This code was generated by
* \ / _ _ _| _ _
* | (_)\/(_)(_|\/| |(/_ v1.0.0
* / /
*/
package com.twilio.twiml.voice;
import com.twilio.converter.Promoter;
import com.twilio.twiml.TwiML;
import java.net.URI;
/**
* TwiML wrapper for {@code <Sip>}
*/
public class ReferSip extends TwiML {
private final URI sipUrl;
/**
* For XML Serialization/Deserialization
*/
private ReferSip() {
this(new Builder((URI) null));
}
/**
* Create a new {@code <ReferSip>} element
*/
private ReferSip(Builder b) {
super("Sip", b);
this.sipUrl = b.sipUrl;
}
/**
* The body of the TwiML element
*
* @return Element body as a string if present else null
*/
protected String getElementBody() {
return this.getSipUrl() == null ? null : this.getSipUrl().toString();
}
/**
* SIP URL
*
* @return SIP URL
*/
public URI getSipUrl() {
return sipUrl;
}
/**
* Create a new {@code <Sip>} element
*/
public static class Builder extends TwiML.Builder<Builder> {
private URI sipUrl;
/**
* Create a {@code <Sip>} with sipUrl
*/
public Builder(URI sipUrl) {
this.sipUrl = sipUrl;
}
/**
* Create a {@code <Sip>} with sipUrl
*/
public Builder(String sipUrl) {
this.sipUrl = Promoter.uriFromString(sipUrl);
}
/**
* Create and return resulting {@code <Sip>} element
*/
public ReferSip build() {
return new ReferSip(this);
}
}
}
|
#! /bin/sh
EVE_IP=$(curl -s "http://169.254.169.254/eve/v1/network.json" | jq -r '."external-ipv4"')
echo ssh -o ConnectTimeout=10 -o StrictHostKeyChecking=no -i /root/.ssh/id_rsa root@"$EVE_IP" -p "$1" grep Ubuntu /etc/issue
ssh -o ConnectTimeout=10 -o StrictHostKeyChecking=no -i /root/.ssh/id_rsa root@"$EVE_IP" -p "$1" grep Ubuntu /etc/issue
|
<filename>func-rxjava2/src/main/java/cyclops/rxjava2/container/higherkinded/Rx2Witness.java<gh_stars>0
package cyclops.rxjava2.container.higherkinded;
import com.oath.cyclops.anym.extensability.MonadAdapter;
import cyclops.monads.AnyM;
import cyclops.monads.WitnessType;
import cyclops.rxjava2.adapter.FlowableAdapter;
import cyclops.rxjava2.adapter.impl.FlowableReactiveSeqImpl;
import cyclops.rxjava2.adapter.MaybeAdapter;
import cyclops.rxjava2.adapter.ObservableAdapter;
import cyclops.rxjava2.adapter.ObservableReactiveSeqImpl;
import cyclops.rxjava2.adapter.SingleAdapter;
import io.reactivex.Flowable;
import io.reactivex.Maybe;
import io.reactivex.Observable;
import io.reactivex.Single;
@Deprecated
public interface Rx2Witness {
public static <T> Maybe<T> maybe(AnyM<maybe, ? extends T> anyM) {
return anyM.unwrap();
}
public static <T> Flowable<T> flowable(AnyM<flowable, ? extends T> anyM) {
FlowableReactiveSeqImpl<T> obs = anyM.unwrap();
return obs.getFlowable();
}
public static <T> Single<T> single(AnyM<single, ? extends T> anyM) {
return anyM.unwrap();
}
public static <T> Observable<T> observable(AnyM<observable, ? extends T> anyM) {
ObservableReactiveSeqImpl<T> obs = anyM.unwrap();
return obs.getObservable();
}
public static enum maybe implements MaybeWitness<maybe> {
INSTANCE;
@Override
public MonadAdapter<maybe> adapter() {
return new MaybeAdapter();
}
}
public static enum flowable implements FlowableWitness<flowable> {
INSTANCE;
@Override
public MonadAdapter<flowable> adapter() {
return new FlowableAdapter();
}
}
public static enum single implements SingleWitness<single> {
INSTANCE;
@Override
public MonadAdapter<single> adapter() {
return new SingleAdapter();
}
}
public static enum observable implements ObservableWitness<observable> {
INSTANCE;
@Override
public MonadAdapter<observable> adapter() {
return new ObservableAdapter();
}
}
static interface MaybeWitness<W extends MaybeWitness<W>> extends WitnessType<W> {
}
static interface FlowableWitness<W extends FlowableWitness<W>> extends WitnessType<W> {
}
static interface SingleWitness<W extends SingleWitness<W>> extends WitnessType<W> {
}
static interface ObservableWitness<W extends ObservableWitness<W>> extends WitnessType<W> {
}
}
|
<gh_stars>0
package ch15.ex6;
public class ex156
{
// TODO
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.