text
stringlengths 1
1.05M
|
|---|
<reponame>yicone/steedos-platform
const path = require("path");
const dotenv = require("dotenv-flow");
delete process.env.MONGO_URL
delete process.env.MONGO_OPLOG_URL
delete process.env.ROOT_URL
delete process.env.PORT
let projectPath = process.cwd()
dotenv.config({path: projectPath, debug: true, purge_dotenv: true})
console.log("MONGO_URL: " + process.env.MONGO_URL)
module.exports= {}
|
/*
* Copyright (c) 2007-2013 Concurrent, Inc. All Rights Reserved.
*
* Project and contact information: http://www.cascading.org/
*
* This file is part of the Cascading project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cascading.operation.filter;
import java.beans.ConstructorProperties;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import cascading.flow.FlowProcess;
import cascading.operation.BaseOperation;
import cascading.operation.Filter;
import cascading.operation.OperationCall;
import cascading.tuple.Fields;
import cascading.tuple.TupleEntry;
/**
* Class Logic is the base class for logical {@link Filter} operations.
*
* @see And
* @see Or
* @see Xor
*/
public abstract class Logic extends BaseOperation<Logic.Context> implements Filter<Logic.Context>
{
/** Field fields */
protected final Fields[] argumentSelectors;
/** Field filters */
protected final Filter[] filters;
private static Filter[] filters( Filter... filters )
{
return filters;
}
public class Context
{
TupleEntry[] argumentEntries;
Object[] contexts;
}
@ConstructorProperties({"filters"})
protected Logic( Filter... filters )
{
this.filters = filters;
if( filters == null )
throw new IllegalArgumentException( "given filters array must not be null" );
this.argumentSelectors = new Fields[ filters.length ];
Arrays.fill( this.argumentSelectors, Fields.ALL );
verify();
this.numArgs = getFieldsSize();
}
@ConstructorProperties({"lhsArgumentsSelector", "lhsFilter", "rhsArgumentSelector", "rhsFilter"})
protected Logic( Fields lhsArgumentSelector, Filter lhsFilter, Fields rhsArgumentSelector, Filter rhsFilter )
{
this( Fields.fields( lhsArgumentSelector, rhsArgumentSelector ), filters( lhsFilter, rhsFilter ) );
}
@ConstructorProperties({"argumentSelectors", "filters"})
protected Logic( Fields[] argumentSelectors, Filter[] filters )
{
this.argumentSelectors = argumentSelectors;
this.filters = filters;
verify();
this.numArgs = getFieldsSize();
}
protected void verify()
{
if( argumentSelectors == null )
throw new IllegalArgumentException( "given argumentSelectors array must not be null" );
if( filters == null )
throw new IllegalArgumentException( "given filters array must not be null" );
for( Fields field : argumentSelectors )
{
if( field == null )
throw new IllegalArgumentException( "given argumentSelectors must not be null" );
if( !field.isAll() && !field.isDefined() )
throw new IllegalArgumentException( "given argumentSelectors must be ALL or 'defined' selectors, got: " + field.print() );
}
for( Filter filter : filters )
{
if( filter == null )
throw new IllegalArgumentException( "given filters must not be null" );
}
}
@Override
public void prepare( FlowProcess flowProcess, OperationCall operationCall )
{
Context context = new Context();
context.argumentEntries = getArgumentEntries();
context.contexts = new Object[ filters.length ];
for( int i = 0; i < filters.length; i++ )
{
Filter filter = filters[ i ];
filter.prepare( flowProcess, operationCall );
context.contexts[ i ] = operationCall.getContext();
operationCall.setContext( null );
}
operationCall.setContext( context );
}
@Override
public void cleanup( FlowProcess flowProcess, OperationCall operationCall )
{
Context context = (Context) operationCall.getContext();
Object[] contexts = context.contexts;
for( int i = 0; i < filters.length; i++ )
{
Filter filter = filters[ i ];
operationCall.setContext( contexts[ i ] );
filter.cleanup( flowProcess, operationCall );
}
operationCall.setContext( null );
}
protected int getFieldsSize()
{
Set<Comparable> pos = new HashSet<Comparable>();
for( Fields field : argumentSelectors )
{
if( field.isSubstitution() ) // will be tested to be ALL in verify
return ANY;
for( int i = 0; i < field.size(); i++ )
pos.add( field.get( i ) );
}
return pos.size();
}
private final TupleEntry[] getArgumentEntries()
{
TupleEntry[] argumentEntries = new TupleEntry[ argumentSelectors.length ];
for( int i = 0; i < argumentSelectors.length; i++ )
{
Fields argumentSelector = argumentSelectors[ i ];
argumentEntries[ i ] = new TupleEntry( Fields.asDeclaration( argumentSelector ) );
}
return argumentEntries;
}
@Override
public boolean equals( Object object )
{
if( this == object )
return true;
if( !( object instanceof Logic ) )
return false;
if( !super.equals( object ) )
return false;
Logic logic = (Logic) object;
if( !Arrays.equals( argumentSelectors, logic.argumentSelectors ) )
return false;
if( !Arrays.equals( filters, logic.filters ) )
return false;
return true;
}
@Override
public int hashCode()
{
int result = super.hashCode();
result = 31 * result + ( argumentSelectors != null ? Arrays.hashCode( argumentSelectors ) : 0 );
result = 31 * result + ( filters != null ? Arrays.hashCode( filters ) : 0 );
return result;
}
}
|
<reponame>JasonLiu798/javautil
package com.atjl.util.collection;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Random;
/**
* Created by JasonLiu798 on 16/6/3.
*/
public class SearchUtil {
private SearchUtil(){
throw new UnsupportedOperationException();
}
/**
* check number is exist in array
*
* @param array number array
* @param number target
* @return result
*/
public static boolean numberInArray(int[] array, int number) {
int start = 0, end, middle, count = 0;
int N = array.length;
end = N;
middle = (start + end) / 2;
while (number != array[middle]) {
if (number > array[middle])
start = middle;
else if (number < array[middle])
end = middle;
middle = (start + end) / 2;
count++;
if (count > N / 2)
break;
}
if (count > N / 2)
return false;
else
return true;
}
public static <T> int binarySearchRaw(ArrayList<T> srcArray, T tgt, Comparator<T> comparator){
return Collections.binarySearch(srcArray,tgt,comparator);
}
/**
* find the tgt element index
* @param srcArray
* @param tgt
* @param comparator
* @param <T>
* @return
*/
public static <T> int binarySearch(ArrayList<T> srcArray, T tgt, Comparator<T> comparator) {
return binarySearchRaw(srcArray,tgt,comparator,false);
}
/**
*
* @param srcArray
* @param tgt
* @param comparator
* @param <T>
* @return
*/
public static <T> int binarySearchExpect(ArrayList<T> srcArray, T tgt, Comparator<T> comparator) {
return binarySearchRaw(srcArray,tgt,comparator,true);
}
public static <T> int binarySearchRaw(ArrayList<T> srcArray, T tgt, Comparator<T> comparator,boolean findExpect) {
int low = 0;
int high = srcArray.size() - 1;
while (low <= high) {
//中间位置计算,low+ 最高位置减去最低位置,右移一位,相当于除2.也可以用(high+low)/2
int middle = low + ((high - low) >> 1);
//与最中间的数字进行判断,是否相等,相等的话就返回对应的数组下标.
int cmpRes = comparator.compare(tgt, srcArray.get(middle));
if (cmpRes == 0) {
return middle;
//如果小于的话则移动最高层的"指针"
} else if (cmpRes < 0) {
high = middle - 1;
//移动最低的"指针"
} else {
low = middle + 1;
}
}
if (findExpect) {
if(low==high)
return low;
else{
return Math.max(low, high);
}
}
return -1;//not find
}
public static <T> int binarySearchRecu(ArrayList<T> srcArray,int low, int high, T tgt, Comparator<T> comparator) {
if (low <= high) {
int mid = (low + high) / 2;
int cmpRes = comparator.compare(tgt, srcArray.get(mid));
if (cmpRes == 0)
return mid;
else if (cmpRes < 0)
//移动low和high
return binarySearchRecu(srcArray, low, mid - 1, tgt,comparator);
else
return binarySearchRecu(srcArray, mid + 1, high, tgt,comparator);
} else
return -1;
}
public static <T> int partition(T[] a, Comparator<? super T> c, int p, int r) {
T t = a[r - 1];
int i = p - 1;
for (int j = p; j < r - 1; j++) {
if (c.compare(a[j], t) <= 0) {
i++;
T tmp = a[i];
a[i] = a[j];
a[j] = tmp;
}
}
T tmp = a[i + 1];
a[i + 1] = a[r - 1];
a[r - 1] = tmp;
return i + 1;
}
public static <T> int randomizedPartition(T[] a, Comparator<? super T> c,
int p, int r) {
int i = new Random().nextInt(r - p) + p;
T tmp = a[i];
a[i] = a[r - 1];
a[r - 1] = tmp;
return partition(a, c, p, r);
}
public static <T> T randomizedSelect(T[] t,
Comparator<? super T> comparator, int p, int r, int i) {
if (p == r)
return t[p];
int q = randomizedPartition(t, comparator, p, r);
int k = q - p + 1;
if (i <= k)
return randomizedSelect(t, comparator, p, q, i);
else
return randomizedSelect(t, comparator, q + 1, r, i - k);
}
public static <T> T randomizedSelect(T[] t,
Comparator<T> comparator, int i) {
return randomizedSelect(t, comparator, 0, t.length, i);
}
}
|
# -*- coding: utf-8 -*-
"""
Kay miscellanea.
:Copyright: (c) 2009 Accense Technology, Inc. All rights reserved.
:license: BSD, see LICENSE for more details.
"""
import os, sys
def get_appid():
from google.appengine.api import apiproxy_stub_map
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if have_appserver:
appid = os.environ.get('APPLICATION_ID')
else:
try:
from google.appengine.tools import dev_appserver
from kay import PROJECT_DIR
appconfig, unused = dev_appserver.LoadAppConfig(PROJECT_DIR, {})
appid = appconfig.application
except ImportError:
appid = None
return appid
def get_datastore_paths():
"""Returns a tuple with the path to the datastore and history file.
The datastore is stored in the same location as dev_appserver uses by
default, but the name is altered to be unique to this project so multiple
Django projects can be developed on the same machine in parallel.
Returns:
(datastore_path, history_path)
"""
from google.appengine.tools import dev_appserver_main
datastore_path = dev_appserver_main.DEFAULT_ARGS['datastore_path']
history_path = dev_appserver_main.DEFAULT_ARGS['history_path']
datastore_path = datastore_path.replace("dev_appserver", "kay_%s" %
get_appid())
history_path = history_path.replace("dev_appserver", "kay_%s" %
get_appid())
return datastore_path, history_path
class NullMemcache(object):
def get(self, name):
return None
def set(self, name, value, ttl):
return None
|
<reponame>davepacheco/node-manta
/*
* Copyright 2016 Joyent, Inc.
*/
/*
* A common logger setup for test files.
*/
var assert = require('assert-plus');
var bunyan = require('bunyan');
var path = require('path');
var restifyClients = require('restify-clients');
function createLogger() {
return (bunyan.createLogger({
name: path.basename(process.argv[1]),
serializers: restifyClients.bunyan.serializers,
src: true,
streams: [
{
level: (process.env.TEST_LOG_LEVEL || 'info'),
stream: process.stderr
}
]
}));
}
module.exports = {
createLogger: createLogger
};
|
import { should } from 'chai';
import { HierarchyInstance } from '../../types/truffle-contracts';
// tslint:disable:no-var-requires
const { expectEvent, expectRevert } = require('@openzeppelin/test-helpers');
const Hierarchy = artifacts.require('Hierarchy') as Truffle.Contract<HierarchyInstance>;
should();
/** @test {Hierarchy} contract */
contract('Hierarchy', (accounts) => {
let hierarchy: HierarchyInstance;
const ROOT_ROLE = stringToBytes32('');
const ADDED_ROLE = stringToBytes32('ADDED');
const root = accounts[0];
const user1 = accounts[1];
beforeEach(async () => {
hierarchy = await Hierarchy.new(root);
});
/**
* @test {Hierarchy#hasRole}
*/
it('hasRole returns true for members of a role', async () => {
assert.isTrue(await hierarchy.hasRole(ROOT_ROLE, root));
assert.isFalse(await hierarchy.hasRole(ROOT_ROLE, user1));
});
/**
* @test {Hierarchy#addRole}
*/
it('addRole throws if not called by a member of the admin role.', async () => {
await expectRevert(
hierarchy.addRole(ADDED_ROLE, ROOT_ROLE, { from: user1 }),
'Restricted to members.',
);
});
/**
* @test {Hierarchy#grantRole}
*/
it('grantRole throws if not called by a member of the admin role.', async () => {
await expectRevert(
hierarchy.grantRole(ROOT_ROLE, user1, { from: user1 }),
'AccessControl: sender must be an admin to grant',
);
});
/**
* @test {Hierarchy#revokeRole}
*/
it('revokeRole throws if not called by a member of the admin role.', async () => {
await expectRevert(
hierarchy.revokeRole(ROOT_ROLE, user1, { from: user1 }),
'AccessControl: sender must be an admin to revoke',
);
});
/**
* @test {Hierarchy#grantRole}
*/
it('grantRole adds an account to a role.', async () => {
await hierarchy.grantRole(ROOT_ROLE, user1, { from: root });
assert.isTrue(await hierarchy.hasRole(ROOT_ROLE, user1));
});
/**
* @test {Hierarchy#grantRole}
*/
it('adds an admin role.', async () => {
expectEvent(
await hierarchy.addRole(ADDED_ROLE, ROOT_ROLE, { from: root }),
'AdminRoleSet',
{
roleId: ADDED_ROLE,
adminRoleId: ROOT_ROLE,
},
);
// assert.equal(await hierarchy.getAdminRole(ADDED_ROLE), ROOT_ROLE);
});
describe('with existing users and roles', () => {
beforeEach(async () => {
await hierarchy.addRole(ADDED_ROLE, ROOT_ROLE, { from: root });
await hierarchy.grantRole(ADDED_ROLE, user1, { from: root });
});
/**
* @test {Community#revokeRole}
*/
it('revokeRole removes a member from a role.', async () => {
await hierarchy.revokeRole(ADDED_ROLE, user1, { from: root });
assert.isFalse(await hierarchy.hasRole(ADDED_ROLE, user1));
});
});
});
function stringToBytes32(text: string) {
let result = web3.utils.fromAscii(text);
while (result.length < 66) result += '0'; // 0x + 64 digits
return result
}
function bytes32ToString(text: string) {
return web3.utils.toAscii(text).replace(/\0/g, '');
}
|
import random
def shuffleArray(arr):
random.shuffle(arr)
return arr
array = [1, 2, 3, 4, 5, 6]
shuffledArray = shuffleArray(array)
print(shuffledArray)
|
#!/bin/bash
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
export PT_DATA_DIR=../../../data
export PT_OUTPUT_DIR=output
|
<gh_stars>0
package controllers
import base.Logging
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.libs.json._
import play.api.mvc._
import play.modules.reactivemongo.MongoController
import play.modules.reactivemongo.json.collection.JSONCollection
import reactivemongo.api.Cursor
import scala.concurrent.Future
/**
* The Users controllers encapsulates the Rest endpoints and the interaction with the MongoDB, via ReactiveMongo
* play plugin. This provides a non-blocking driver for mongoDB as well as some useful additions for handling JSon.
* @see https://github.com/ReactiveMongo/Play-ReactiveMongo
*/
trait Users extends Controller with MongoController with Logging {
/*
* Get a JSONCollection (a Collection implementation that is designed to work
* with JsObject, Reads and Writes.)
* Note that the `collection` is not a `val`, but a `def`. We do _not_ store
* the collection reference to avoid potential problems in development with
* Play hot-reloading.
*/
def collection: JSONCollection = db.collection[JSONCollection]("users")
// ------------------------------------------ //
// Using case classes + Json Writes and Reads //
// ------------------------------------------ //
import models.JsonFormats._
import models._
def createUser = Action.async(parse.json) {
request =>
/*
* request.body is a JsValue.
* There is an implicit Writes that turns this JsValue as a JsObject,
* so you can call insert() with this JsValue.
* (insert() takes a JsObject as parameter, or anything that can be
* turned into a JsObject using a Writes.)
*/
request.body.validate[User].map {
user =>
// `user` is an instance of the case class `models.User`
collection.insert(user).map {
lastError =>
logger.debug(s"Successfully inserted with LastError: $lastError")
Created(s"User Created")
}
}.getOrElse(Future.successful(BadRequest("invalid json")))
}
def updateUser(firstName: String, lastName: String) = Action.async(parse.json) {
request =>
request.body.validate[User].map {
user =>
// find our user by first name and last name
val nameSelector = Json.obj("firstName" -> firstName, "lastName" -> lastName)
collection.update(nameSelector, user).map {
lastError =>
logger.debug(s"Successfully updated with LastError: $lastError")
Created(s"User Updated")
}
}.getOrElse(Future.successful(BadRequest("invalid json")))
}
def findUsers = Action.async {
// let's do our query
val cursor: Cursor[User] = collection.
// find all
find(Json.obj("active" -> true)).
// sort them by creation date
sort(Json.obj("created" -> -1)).
// perform the query and get a cursor of JsObject
cursor[User]
// gather all the JsObjects in a list
val futureUsersList: Future[List[User]] = cursor.collect[List]()
// transform the list into a JsArray
val futurePersonsJsonArray: Future[JsArray] = futureUsersList.map { users =>
Json.arr(users)
}
// everything's ok! Let's reply with the array
futurePersonsJsonArray.map {
users =>
Ok(users(0))
}
}
}
object Users extends Users
|
<filename>src/main/java/com/dashradar/privatesendanalysis/GraphLoader.java
package com.dashradar.privatesendanalysis;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Base64;
import java.util.List;
import java.util.Optional;
import java.util.Scanner;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.util.EntityUtils;
import org.jgrapht.Graphs;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.traverse.DepthFirstIterator;
public class GraphLoader {
private static PoolingHttpClientConnectionManager poolingConnManager = new PoolingHttpClientConnectionManager();
private static CloseableHttpResponse fromEvaluator(String txid, int maxrounds, String neo4jusername, String neo4jpassword, String evaluatorURL) throws IOException {
poolingConnManager.setMaxTotal(50);
poolingConnManager.setDefaultMaxPerRoute(16);
String encoding = Base64.getEncoder().encodeToString((neo4jusername+":"+neo4jpassword).getBytes());
CloseableHttpClient client
= HttpClients.custom().setConnectionManager(poolingConnManager)
.build();
HttpPost httppost = new HttpPost(evaluatorURL);
httppost.setHeader("Authorization", "Basic " + encoding);
httppost.setEntity(new StringEntity("{\"txid\":\""+txid+"\", \"maxRounds\":\""+maxrounds+"\"}"));
return client.execute(httppost);
}
public static void evaluatorToFile(String txid, int maxrounds, String neo4jusername, String neo4jpassword, String evaluatorURL) throws FileNotFoundException, IOException {
try (CloseableHttpResponse response = fromEvaluator(txid, maxrounds, neo4jusername, neo4jpassword, evaluatorURL)) {
HttpEntity entity = response.getEntity();
String fileName = txid+".csv";
try (
OutputStream os = new FileOutputStream("psgraphs/"+fileName);
InputStream is = entity.getContent();
)
{
IOUtils.copy(is, os);
}
}
}
public static List<PsGraph> segmentGraph(PsGraph graph) {
List<PsGraph> result = new ArrayList<>();
String root = graph.rootNode().get();
Set<DefaultEdge> inputs = graph.outgoingEdgesOf(root);
Set<String> firstRoundTxIds = inputs.stream().map(input -> graph.getEdgeTarget(input)).collect(Collectors.toSet());
firstRoundTxIdLoop : for (String firstRoundTxId : firstRoundTxIds) {
PsGraph segment = new PsGraph();
org.jgrapht.traverse.DepthFirstIterator<String, DefaultEdge> iter = new DepthFirstIterator<>(graph, firstRoundTxId);
String previous = firstRoundTxId;
while (iter.hasNext()) {
String next = iter.next();
Set<DefaultEdge> edges = iter.getGraph().outgoingEdgesOf(next);
for (DefaultEdge edge : edges) {
String source = iter.getGraph().getEdgeSource(edge);
String target = iter.getGraph().getEdgeTarget(edge);
segment.addVertex(source);
segment.addVertex(target);
boolean added = segment.addEdge(source, target, edge);
}
if (graph.outDegreeOf(next) != 0) {
Optional<PsGraph> overlappingGraph = result.stream().filter(g -> g.vertexSet().contains(next)).findAny();
if (overlappingGraph.isPresent()) {
System.out.println("overlapping found at "+next+", indegree:"+graph.inDegreeOf(next));
Graphs.addGraph(overlappingGraph.get(), segment);
continue firstRoundTxIdLoop;
} else {
System.out.println("no overlap");
}
}
previous = next;
}
result.add(segment);
}
return result;
}
public static PsGraph loadFromEvaluator(String txid, String neo4jusername, String neo4jpassword, String evaluatorURL) throws FileNotFoundException, UnsupportedEncodingException, IOException {
try (CloseableHttpResponse response = fromEvaluator(txid, 8, neo4jusername, neo4jpassword, evaluatorURL)) {
if (response.getStatusLine().getStatusCode() != 200) {
throw new RuntimeException("Wrong status code ("+response.getStatusLine()+")"+" txid="+txid);
}
HttpEntity entity = response.getEntity();
PsGraph result = new PsGraph();
try (Scanner scanner = new Scanner(entity.getContent())) {
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
String[] fields = line.split(",");
String fromTxid = fields[0].replace("\"", "");
String toTxid = fields[1].replace("\"", "");
String connectionsStr = fields[2].replace("\"", "");
int connections = Integer.parseInt(connectionsStr);
result.addVertex(fromTxid);
result.addVertex(toTxid);
for (int i = 0; i < connections; i++) {
DefaultEdge edge = result.addEdge(fromTxid, toTxid);
}
}
}
EntityUtils.consume(entity);
return result;
}
}
public static PsGraph loadFromFile(String filename) throws FileNotFoundException {
PsGraph result = new PsGraph();
File f = new File(filename);
Scanner scanner = new Scanner(f);
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
String[] fields = line.split(",");
String fromTxid = fields[0].replace("\"", "");
String toTxid = fields[1].replace("\"", "");
String connectionsStr = fields[2].replace("\"", "");
int connections = Integer.parseInt(connectionsStr);
result.addVertex(fromTxid);
result.addVertex(toTxid);
for (int i = 0; i < connections; i++) {
DefaultEdge edge = result.addEdge(fromTxid, toTxid);
}
}
return result;
}
}
|
<filename>src/vuexStore/commonModule/mutations.ts
import * as types from '@/vuexStore/commonModule/mutations_types';
export default {
[types.SETTOKEN](state: any, d: any) {
state.token = d;
},
[types.SETUSERINFO](state: any, d: any) {
state.userInfo = d;
},
[types.SETWXINFO](state: any, d: any) {
state.wxInfo = d;
},
};
|
<filename>src/main/java/org/n3r/eql/trans/spring/EqlTransactionInterceptor.java<gh_stars>1-10
package org.n3r.eql.trans.spring;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
public class EqlTransactionInterceptor implements MethodInterceptor {
@Override
public Object invoke(MethodInvocation invocation) throws Throwable {
EqlTransactionManager.start();
try {
Object retValue = invocation.proceed();
EqlTransactionManager.commit();
return retValue;
} catch (Throwable throwable) {
EqlTransactionManager.rollback();
throw throwable;
} finally {
EqlTransactionManager.end();
}
}
}
|
package astrionic.adventofcode2020.framework
private[adventofcode2020] object AdventConfig {
/**
* Path to the resource directory which contains files like input, solutions and templates.
*/
val resourceDirectory = "./src/main/resources"
/**
* Path to the directory containing the input files.
*/
val inputDirectory = s"$resourceDirectory/input/"
/**
* Path to the directory to which the solutions will be written as text files.
*/
val solutionDirectory = s"$resourceDirectory/solutions/"
/**
* Path to the template that is used to generate boilerplate code for each day
*/
val solutionTemplatePath = s"$resourceDirectory/solution_code_template.txt"
/**
* Placeholder for the day number used in the template. Each occurrence in the template will be replaced by the number
* of the current day.
*/
val templateDayPlaceholder = "__DAY_NUMBER__"
/**
* Path to the directory in which the directories containing the solution Scala files are located.
*/
val solutionDirPath = "./src/main/scala/astrionic/adventofcode2020/solutions/"
/**
* Prefix for the directory (and package) that contains the code for a solution. The day number (with leading zeroes)
* is appended to this.
*/
val solutionDirNamePrefix = "day"
/**
* Suffix used to identify files that are related to part 1.
*/
val partOneSuffix = "_1"
/**
* Suffix used to identify files that are related to part 2.
*/
val partTwoSuffix = "_2"
}
|
gh completion -s zsh > /usr/local/share/zsh/site-functions/_gh
|
#!/bin/bash
../../../CleanUpGerbers/renamegerbers.sh -o -c -z -n APC1
|
#!/bin/sh
# CREATE ROLE vagrant SUPERUSER LOGIN PASSWORD $2;
cat << EOF | sudo -u postgres psql
-- uncomment to reset your DB
DROP DATABASE $1;
CREATE DATABASE $1;
\connect $1
CREATE EXTENSION postgis;
CREATE EXTENSION postgis_topology;
EOF
|
#!/usr/bin/env bash
cd $(dirname $0)
SESSION='meshblu'
. ./_defaults.sh
tmux new-window -t $SESSION:1 -n proxy
tmux new-window -t $SESSION:2 -n frontends
tmux new-window -t $SESSION:3 -n core
tmux new-window -t $SESSION:4 -n firehose
tmux new-window -t $SESSION:5 -n obscure-frontends
tmux new-window -t $SESSION:6 -n core-workers
tmux split-window -t $SESSION:2 -d -p 33
tmux split-window -t $SESSION:2 -d -p 50
tmux split-window -t $SESSION:3 -d -p 33
tmux split-window -t $SESSION:3 -d -p 50
tmux split-window -t $SESSION:5 -d -p 50
tmux send-keys -t $SESSION:1.0 "cd ~/Projects/Octoblu/meshblu-haproxy" C-m
tmux send-keys -t $SESSION:1.0 "eval \$SERVICES/run-service-docker.sh meshblu-haproxy" C-m
tmux send-keys -t $SESSION:2.0 "cd ~/Projects/Octoblu/meshblu-server-http" C-m
tmux send-keys -t $SESSION:2.0 "eval \$SERVICES/$RUN_SERVICE meshblu-server-http" C-m
tmux send-keys -t $SESSION:2.1 "cd ~/Projects/Octoblu/meshblu-server-socket.io-v1" C-m
tmux send-keys -t $SESSION:2.1 "eval \$SERVICES/$RUN_SERVICE meshblu-server-socket.io-v1" C-m
tmux send-keys -t $SESSION:2.2 "cd ~/Projects/Octoblu/meshblu-server-websocket" C-m
tmux send-keys -t $SESSION:2.2 "eval \$SERVICES/$RUN_SERVICE meshblu-server-websocket" C-m
tmux send-keys -t $SESSION:3.0 "cd ~/Projects/Octoblu/meshblu-core-dispatcher" C-m
tmux send-keys -t $SESSION:3.0 "eval \$SERVICES/$RUN_SERVICE meshblu-core-dispatcher whiskey" C-m
tmux send-keys -t $SESSION:3.1 "cd ~/Projects/Octoblu/meshblu-core-dispatcher" C-m
tmux send-keys -t $SESSION:3.1 "eval \$SERVICES/$RUN_SERVICE meshblu-core-dispatcher tango" C-m
tmux send-keys -t $SESSION:3.2 "cd ~/Projects/Octoblu/meshblu-core-dispatcher" C-m
tmux send-keys -t $SESSION:3.2 "eval \$SERVICES/$RUN_SERVICE meshblu-core-dispatcher foxtrot" C-m
tmux send-keys -t $SESSION:4.0 "cd ~/Projects/Octoblu/meshblu-core-firehose-socket.io" C-m
tmux send-keys -t $SESSION:4.0 "eval \$SERVICES/$RUN_SERVICE meshblu-core-firehose-socket.io" C-m
tmux send-keys -t $SESSION:5.0 "cd ~/Projects/Octoblu/meshblu-core-protocol-adapter-xmpp" C-m
tmux send-keys -t $SESSION:5.0 "eval \$SERVICES/$RUN_SERVICE meshblu-core-protocol-adapter-xmpp" C-m
tmux send-keys -t $SESSION:5.1 "cd ~/Projects/Octoblu/meshblu-core-protocol-adapter-http-streaming" C-m
tmux send-keys -t $SESSION:5.1 "eval \$SERVICES/$RUN_SERVICE meshblu-core-protocol-adapter-http-streaming" C-m
tmux send-keys -t $SESSION:6.0 "cd ~/Projects/Octoblu/meshblu-core-worker-webhook" C-m
tmux send-keys -t $SESSION:6.0 "eval \$SERVICES/$RUN_SERVICE meshblu-core-worker-webhook" C-m
tmux select-window -t $SESSION:1
tmux attach-session -t $SESSION
|
#!/bin/bash
# ----------
# SPDX-License-Identifier: Apache-2.0
# ----------
#
echo "NodeOUs:
Enable: true
ClientOUIdentifier:
Certificate: cacerts/rca-trading-blockchain-biz-6005.pem
OrganizationalUnitIdentifier: client
MemberOUIdentifier:
Certificate: cacerts/rca-trading-blockchain-biz-6005.pem
OrganizationalUnitIdentifier: member
PeerOUIdentifier:
Certificate: cacerts/rca-trading-blockchain-biz-6005.pem
OrganizationalUnitIdentifier: peer
AdminOUIdentifier:
Certificate: cacerts/rca-trading-blockchain-biz-6005.pem
OrganizationalUnitIdentifier: admin
OrdererOUIdentifier:
Certificate: cacerts/rca-trading-blockchain-biz-6005.pem
OrganizationalUnitIdentifier: orderer" > ./users/config.yaml
export ORG_NAME=trading
echo "admincerts > admin"
mkdir -p ./users/admin/msp/admincerts
cp ./users/admin/msp/signcerts/cert.pem ./users/admin/msp/admincerts/${ORG_NAME}-admin-cert.pem
cp ./users/config.yaml ./users/admin/msp/
# - - -
export CRYPTO_PLACE=./users/user1
echo "admincerts > "${CRYPTO_PLACE}
mkdir -p ${CRYPTO_PLACE}/msp/admincerts
cp ./users/admin/msp/signcerts/cert.pem ${CRYPTO_PLACE}/msp/admincerts/${ORG_NAME}-admin-cert.pem
mkdir -p ${CRYPTO_PLACE}/msp/tlscacerts
cp ${CRYPTO_PLACE}/tls-msp/tlscacerts/*.pem ${CRYPTO_PLACE}/msp/tlscacerts/
cp ./users/config.yaml ${CRYPTO_PLACE}/msp/
# ==============================================
# - - -
export CRYPTO_PLACE=./users/orderer0
echo "admincerts > "${CRYPTO_PLACE}
mkdir -p ${CRYPTO_PLACE}/msp/admincerts
cp ./users/admin/msp/signcerts/cert.pem ${CRYPTO_PLACE}/msp/admincerts/${ORG_NAME}-admin-cert.pem
mkdir -p ${CRYPTO_PLACE}/msp/tlscacerts
cp ${CRYPTO_PLACE}/tls-msp/tlscacerts/*.pem ${CRYPTO_PLACE}/msp/tlscacerts/
cp ${CRYPTO_PLACE}/tls-msp/keystore/*_sk ${CRYPTO_PLACE}/tls-msp/keystore/key.pem
cp ./users/config.yaml ${CRYPTO_PLACE}/msp/
# - - -
export CRYPTO_PLACE=./users/orderer1
echo "admincerts > "${CRYPTO_PLACE}
mkdir -p ${CRYPTO_PLACE}/msp/admincerts
cp ./users/admin/msp/signcerts/cert.pem ${CRYPTO_PLACE}/msp/admincerts/${ORG_NAME}-admin-cert.pem
mkdir -p ${CRYPTO_PLACE}/msp/tlscacerts
cp ${CRYPTO_PLACE}/tls-msp/tlscacerts/*.pem ${CRYPTO_PLACE}/msp/tlscacerts/
cp ${CRYPTO_PLACE}/tls-msp/keystore/*_sk ${CRYPTO_PLACE}/tls-msp/keystore/key.pem
cp ./users/config.yaml ${CRYPTO_PLACE}/msp/
# - - -
export CRYPTO_PLACE=./users/orderer2
echo "admincerts > "${CRYPTO_PLACE}
mkdir -p ${CRYPTO_PLACE}/msp/admincerts
cp ./users/admin/msp/signcerts/cert.pem ${CRYPTO_PLACE}/msp/admincerts/${ORG_NAME}-admin-cert.pem
mkdir -p ${CRYPTO_PLACE}/msp/tlscacerts
cp ${CRYPTO_PLACE}/tls-msp/tlscacerts/*.pem ${CRYPTO_PLACE}/msp/tlscacerts/
cp ${CRYPTO_PLACE}/tls-msp/keystore/*_sk ${CRYPTO_PLACE}/tls-msp/keystore/key.pem
cp ./users/config.yaml ${CRYPTO_PLACE}/msp/
# - - -
echo "Remove config.yaml base"
rm ./users/config.yaml
#
echo " "
echo "END"
|
function intersection(arr1, arr2) {
const intersection = [];
for (let i=0; i<arr1.length; i++) {
if (arr2.includes(arr1[i])) {
intersection.push(arr1[i]);
}
}
return intersection;
}
console.log(intersection([2, 3, 5, 6, 8], [3, 4, 5, 10]));
// Output: [3, 5]
|
const gulp = require('gulp');
const concat = require('gulp-concat');
gulp.task('build-script', function () {
return gulp
.src('./build/static/js/*.js')
.pipe(concat('vdr.js'))
.pipe(
gulp.dest('./dist/VirtualDocumentRoomModules/Files/SiteAssets/js')
)
.pipe(gulp.dest('a:/VDRoom/SiteAssets/js')) //dev
.pipe(gulp.dest('b:/VDRoom/SiteAssets/js')); //uat
});
gulp.task('build-css', function () {
return gulp
.src('./build/static/css/*.css')
.pipe(concat('vdr.css'))
.pipe(
gulp.dest('./dist/VirtualDocumentRoomModules/Files/SiteAssets/css')
)
.pipe(gulp.dest('a:/VDRoom/SiteAssets/css')) //dev
.pipe(gulp.dest('b:/VDRoom/SiteAssets/css')); //uat
});
gulp.task('default', gulp.parallel(['build-script', 'build-css']));
|
#!/usr/bin/env bash
set -ex
# Incorporate TARGET env var to the build and test process
cargo build --target "$TARGET" --verbose
# We cannot run arm executables on linux
if [[ $TARGET != arm-unknown-linux-gnueabihf ]]; then
cargo test --target "$TARGET" --verbose
fi
|
<reponame>deokgoo/project-2021_mandala_plan<gh_stars>0
import { useStore } from 'react-redux';
import { useEffect, useState } from 'react';
import {
dreamType,
mandalaDreamType,
mandalaStatusType,
mandalaUnitType,
stateType as mandalaStoreState
} from '../reducer/type';
const useMandalaDreamSelector = ({isCore, dreamNum}: mandalaStatusType) => {
const store = useStore();
const [initialData, setInitialData] = useState<mandalaDreamType>();
useEffect(() => {
const getCurrentData = (): { core: mandalaUnitType; side: dreamType } => {
const { mandalaReducer }: {mandalaReducer: mandalaStoreState} = store.getState();
if(isCore) {
return mandalaReducer.dreamCore;
} else {
// @ts-ignore
return mandalaReducer[`dream${dreamNum}`];
}
}
setInitialData(getCurrentData());
}, [isCore, dreamNum, store]);
return {
initialData,
}
}
export default useMandalaDreamSelector;
|
#!/bin/bash
SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
cd $SCRIPT_DIR/../lambda/custom
yarn build
yarn zip
yarn copy-build
|
#!/bin/env bash
set -e
export BINUTILSVERSION="2.33.1"
export GCCVERSION="10.1.0"
dirname=$(pwd)
result="${dirname%"${dirname##*[!/]}"}" # extglob-free multi-trailing-/ trim
result="${result##*/}" # remove everything before the last /
echo "Current directory: $(pwd)"
if [ $result != "toolchain" ]; then
echo "Entering directory \`toolchain\` "
cd toolchain
fi
export PREFIX="$(pwd)/cross"
export TARGET=x86_64-elf
export PATH="$PREFIX/bin:$PATH"
mkdir -p cross
rm -r -f *.xz
echo "PREFIX: $PREFIX, TARGET: $TARGET"
if test -r "build-gcc"; then
rm -r build-gcc
fi
if test -r "build-binutils"; then
rm -r build-binutils
fi
# Let's get the OS:
if [ -f /etc/os-release ]; then
# freedesktop.org and systemd
. /etc/os-release
OS=$NAME
VER=$VERSION_ID
elif type lsb_release >/dev/null 2>&1; then
# linuxbase.org
OS=$(lsb_release -si)
VER=$(lsb_release -sr)
elif [ -f /etc/lsb-release ]; then
# For some versions of Debian/Ubuntu without lsb_release command
. /etc/lsb-release
OS=$DISTRIB_ID
VER=$DISTRIB_RELEASE
elif [ -f /etc/debian_version ]; then
# Older Debian/Ubuntu/etc.
OS=Debian
VER=$(cat /etc/debian_version)
else
# Fall back to uname, e.g. "Linux <version>", also works for BSD, etc.
OS=$(uname -s)
VER=$(uname -r)
fi
echo "$OS" "$VER"
if [ "$OS" == "Ubuntu" ] ||[ "$OS" == "Debian" ]; then
echo -e "\e[32mInstalling depedencies...\e[0m"
apt-get install make build-essential bison flex libgmp3-dev libmpc-dev libmpfr-dev texinfo wget gcc binutils
elif [ "$OS" == "Arch Linux" ]; then
echo -e "\e[32mInstalling dependencies...\e[0m"
pacman -Sy base-devel gmp libmpc mpfr wget
else
echo "\e[332mUnknown Distro\e[0m"
exit 1
fi
echo -e "\e[32mDownloading Binutils\e[0m"
wget https://ftp.gnu.org/gnu/binutils/binutils-$BINUTILSVERSION.tar.xz
echo -e "\e[32mDownloading GCC\e[0m"
wget ftp://ftp.gnu.org/gnu/gcc/gcc-$GCCVERSION/gcc-$GCCVERSION.tar.xz
echo -e "\e[32mUnpacking...\e[0m"
tar -xvf binutils-$BINUTILSVERSION.tar.xz
tar -xvf gcc-$GCCVERSION.tar.xz
mkdir build-binutils
mkdir build-gcc
cd build-binutils
echo -e "\e[32mBuilding binutils\e[0m"
../binutils-$BINUTILSVERSION/configure --target=$TARGET --prefix="$PREFIX" --with-sysroot --disable-nls --disable-werror
sudo make all -j
sudo make install -j
echo -e "\e[32mBuilding GCC\e[0m"
cd ..
cd build-gcc
echo "MULTILIB_OPTIONS += mno-red-zone" > ../gcc-$GCCVERSION/gcc/config/i386/t-x86_64-elf
echo "MULTILIB_DIRNAMES += no-red-zone" >> ../gcc-$GCCVERSION/gcc/config/i386/t-x86_64-elf
../gcc-$GCCVERSION/configure --prefix="$PREFIX" --target="$TARGET" --with-sysroot --disable-nls --enable-languages=c --with-newlib
sudo make -j all-gcc
sudo make -j all-target-libgcc
sudo make -j install-gcc
sudo make -j install-target-libgcc
echo "Done"
|
<filename>app/models/comfy/cms/site.rb
class Comfy::Cms::Site < ActiveRecord::Base
self.table_name = 'comfy_cms_sites'
# -- Relationships --------------------------------------------------------
with_options :dependent => :destroy do |site|
site.has_many :layouts
site.has_many :pages
site.has_many :snippets
site.has_many :files
site.has_many :categories
end
# -- Callbacks ------------------------------------------------------------
before_validation :assign_identifier,
:assign_hostname,
:assign_label
before_save :clean_path
after_save :sync_mirrors
# -- Validations ----------------------------------------------------------
validates :identifier,
:presence => true,
:uniqueness => true,
:format => { :with => /\A\w[a-z0-9_-]*\z/i }
validates :label,
:presence => true
validates :hostname,
:presence => true,
:uniqueness => { :scope => :path },
:format => { :with => /\A[\w\.\-]+(?:\:\d+)?\z/ }
# -- Scopes ---------------------------------------------------------------
scope :mirrored, -> { where(:is_mirrored => true) }
# -- Class Methods --------------------------------------------------------
# returning the Comfy::Cms::Site instance based on host and path
def self.find_site(host, path = nil)
return Comfy::Cms::Site.first if Comfy::Cms::Site.count == 1
cms_site = nil
Comfy::Cms::Site.where(:hostname => real_host_from_aliases(host)).each do |site|
if site.path.blank?
cms_site = site
elsif "#{path.to_s.split('?')[0]}/".match /^\/#{Regexp.escape(site.path.to_s)}\//
cms_site = site
break
end
end
return cms_site
end
# -- Instance Methods -----------------------------------------------------
def url
public_cms_path = ComfortableMexicanSofa.config.public_cms_path || '/'
'//' + [self.hostname, public_cms_path, self.path].join('/').squeeze('/')
end
# When removing entire site, let's not destroy content from other sites
# Since before_destroy doesn't really work, this does the trick
def destroy
self.update_attributes(:is_mirrored => false) if self.is_mirrored?
super
end
protected
def self.real_host_from_aliases(host)
if aliases = ComfortableMexicanSofa.config.hostname_aliases
aliases.each do |alias_host, aliases|
return alias_host if aliases.include?(host)
end
end
host
end
def assign_identifier
self.identifier = self.identifier.blank?? self.hostname.try(:slugify) : self.identifier
end
def assign_hostname
self.hostname ||= self.identifier
end
def assign_label
self.label = self.label.blank?? self.identifier.try(:titleize) : self.label
end
def clean_path
self.path ||= ''
self.path.squeeze!('/')
self.path.gsub!(/\/$/, '')
end
# When site is marked as a mirror we need to sync its structure
# with other mirrors.
def sync_mirrors
return unless is_mirrored_changed? && is_mirrored?
[self, Comfy::Cms::Site.mirrored.where("id != #{id}").first].compact.each do |site|
(site.layouts(:reload).roots + site.layouts.roots.map(&:descendants)).flatten.map(&:sync_mirror)
(site.pages(:reload).roots + site.pages.roots.map(&:descendants)).flatten.map(&:sync_mirror)
site.snippets(:reload).map(&:sync_mirror)
end
end
end
|
/* **** Notes
Open.
//*/
# define CAR
# include <io.h>
# include <conio.h>
# include <stdio.h>
# include <stdlib.h>
# include <time.h>
# include <fcntl.h>
# include <sys/types.h>
# include <sys/stat.h>
# include <errno.h>
# include "../../../incl/config.h"
signed(__cdecl openf(signed short(arg),signed char(*path),signed(mode),signed(permission))) {
auto signed(__cdecl*f)(signed char(*path),signed(mode),signed(permission));
auto signed(__cdecl*(fn[]))(signed char(*path),signed(mode),signed(permission)) = {
open,
_open,
0x00,
};
if(!path) return(0x00);
if(arg) arg = (0x01);
f = (*(arg+(fn)));
return(f(path,mode,permission));
}
|
class Categ < ActiveRecord::Base
CAT_1 = 0
CAT_2 = 1
self.primary_key = :cat_id
has_many :subcategs, :foreign_key => :fk_subcat_id
def cat_id
# required for Rails 3.2 compatibility
read_attribute(:cat_id)
end
end
|
#!/bin/bash
# __BEGIN_LICENSE__
# Copyright (c) 2009-2012, United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration. All
# rights reserved.
#
# The NGT platform is licensed under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# __END_LICENSE__
ORTHOPROJECT=$1
tile=$2
cub=$3
adj=$4
mpp=$5
outDir=$6
tilePrefix=$(echo $tile | perl -pi -e "s#^.*\/##g" | perl -pi -e "s#\..*?\$##g")
outputImg="$outDir/$tilePrefix.tif"
$ORTHOPROJECT --mark-no-processed-data --mpp $mpp $tile $cub $adj $outputImg
|
/**
* @class RR
*
* @brief Round Robin algorithm simulation class
*
* This Class is responsible for receiving a std::vector of Processes
* and run the RR algorithm over the input. The user can request the
* average response time, the average return time and the average wait
* time using getters or the toString method.
*
* @note See Process Class implementation to create the std::vector.
*
* @author <NAME> <<EMAIL>>
*/
#ifndef RR_h_
#define RR_h_
#include <vector>
#include <string>
#include <sstream>
#include <iomanip>
#include <algorithm>
#include "Process.h"
#include "Utils.h"
class RR {
std::vector<Process> _queue; /// The Process queue that will be executed under the algorithm.
std::vector<Process> _waiting; /// The Sorted Process queue that will be executed under the algorithm.
static const unsigned int quantum = 2; /// The quantum used for every process execution
double _avgResponse; /// Average response time.
double _avgReturn; /// Average return time.
double _avgWait; /// Average waiting time.
/**
* @brief Remove an amount of elements from the std::vector.
* @details Removes the elements based on the distance parameter, which means
* that it will remove from the first to first + distance elements.
*
* @param vector The std::vector that the element will be taken from.
* @param int The distance from the beginning to remove.
*/
void removeFromVector(std::vector<Process>& vector, unsigned int distance);
/**
* @brief Calculate the average time.
* @details Calculate the average time and populate the
* double variables so the user can access the resulting
* data.
*/
void calculateAverageTime();
public:
/**
* @brief Class constructor.
* @details Initialize the class attributes.
*
* @param processes Process queue to be executed under the algorithm.
*/
RR(std::vector<Process> processes);
/**
* @brief Class destructor.
* @details Does base C++ cleanup.
*/
~RR();
/**
* @brief Called to run the algorithm.
* @details Calls the calculateAverageTime member function wich is responsible
* for executing the algorithm.
*/
void init();
/**
* @brief Returns the average response time.
*
* @return The average response time.
*/
double getAverageResponse() const;
/**
* @brief Returns the average return time.
*
* @return The average return time.
*/
double getAverageReturn() const;
/**
* @brief Returns the average waiting time.
*
* @return The average waiting time.
*/
double getAverageWait() const;
/**
* @brief Return a formatted string with the algorithm average time.
* @details The string returned will be as follow: RR X.X X.X X.X
* where the first result is the average return time, the second is the
* average response time and the third is the average waiting time.
*
* @return A formatted string.
*/
std::string toString() const;
};
#endif //define RR.h
|
package testclasses;
import array.Vector;
import array.vector.VectorImplementation;
import heap.Heap;
import heap.implementation.ArrayHeap;
public class HeapTest {
public static void main(String[] args) {
try{
Heap<Integer> heap = new ArrayHeap<>();
System.out.println("IsEmpty: "+heap.isEmpty());
heap.insert(5);
heap.insert(10);
heap.insert(3);
heap.insert(2);
heap.insert(20);
heap.insert(4);
// expected: 20,10,4,2,5,3
System.out.println(heap.toString());
System.out.println("Size: "+heap.getSize());
System.out.println("IsEmpty: "+heap.isEmpty());
System.out.println("GetMax: "+heap.getMax());
System.out.println(heap.toString());
System.out.println("ExtractMax: "+heap.extractMax());
System.out.println(heap.toString());
System.out.println("Size: "+heap.getSize());
System.out.println("Remove index 1 value: "+heap.remove(1,Integer.MAX_VALUE));
System.out.println(heap.toString());
System.out.println("Size: "+heap.getSize());
System.out.println("Heapify: ");
heap.heapify();
System.out.println(heap.toString());
System.out.println("------------");
Vector<Integer> vector = new VectorImplementation<>();
vector.push(3);
vector.push(8);
vector.push(1);
vector.push(10);
vector.push(6);
vector.push(2);
vector.push(5);
System.out.println("Vector: "+vector.toString());
Heap<Integer> heap1 = new ArrayHeap<>(vector);
System.out.println("After heapify: "+heap1.toString());
System.out.println("Size: "+heap1.getSize());
}
catch (Exception e){
System.out.println(e);
}
}
}
|
#!/bin/bash
# Create necessary directories
mkdir -p /var/lib/mysql
mkdir -p /var/run/mysqld
mkdir -p /var/log/mysql
# Add configuration entries to my.cnf
cat <<EOT >> /etc/mysql/my.cnf
[client]
[mysql]
[mysqld]
EOT
|
for lr in 1e-4 ; do
for num_lstms in 1; do
for hidden_size in 128; do
for bidirectional in --bidirectional ""; do
for end_mask in --end_mask ""; do
for dataset_ver in 0 25 50 75; do
echo lr = $lr
echo num_lstms = $num_lstms
echo hidden_size = $hidden_size
echo dataset_ver = $dataset_ver
echo bidirectional = $bidirectional
python -u scripts/train_evaluate_wlstm.py --dataset_ver $dataset_ver --num_epochs 200 --save_epochs 50 --compute_baseline --batch_size 64 \
--embedding_size $hidden_size --hidden_size $hidden_size \
--num_layers 1 --num_lstms $num_lstms --lr $lr $bidirectional $end_mask | tee -a logs/train_wlstm_complete.txt
done
done
done
done
done
done
|
#!/bin/bash
set -o pipefail
set -eu
location=$(dirname $0)
PACKAGE=knative-camel-operator-dev
BASE_VERSION=0.15.0
update_manifest() {
local version=$1
local manifest_dir=$location/deploy/olm-catalog/knative-camel-operator-dev
echo "Renaming manifest dir"
old_version=""
for dir in $manifest_dir/*/; do
dir=${dir%*/}
current=$(basename $dir)
if [ "$current" != "$version" ]; then
old_version=$current
mv $dir $manifest_dir/$version
fi
done
echo "Old version was $old_version"
echo "Updating Manifest"
sed -i "s/^ version\: .*$/ version: $version/" $location/deploy/olm-catalog/knative-camel-operator-dev/$version/knative-camel-operator.clusterserviceversion.yaml
sed -i "s/^ replaces\: .*$/ replaces: knative-camel-operator.v$old_version/" $location/deploy/olm-catalog/knative-camel-operator-dev/$version/knative-camel-operator.clusterserviceversion.yaml
sed -i "s/^ createdAt\: .*$/ createdAt: \"$(date --iso-8601=seconds)\"/" $location/deploy/olm-catalog/knative-camel-operator-dev/$version/knative-camel-operator.clusterserviceversion.yaml
sed -i "s/^\(.*\)name\: knative-camel-operator\.v.*$/\1name: knative-camel-operator.v$version/" $location/deploy/olm-catalog/knative-camel-operator-dev/$version/knative-camel-operator.clusterserviceversion.yaml
sed -i "s/^\(.*\)currentCSV\: knative-camel-operator\.v.*$/\1currentCSV: knative-camel-operator.v$version/" $location/deploy/olm-catalog/knative-camel-operator-dev/knative-camel-operator.package.yaml
}
new_version=$BASE_VERSION-$(date '+%Y%m%d%H%M%S')
update_manifest $new_version
export AUTH_TOKEN=$(curl -sH "Content-Type: application/json" -XPOST https://quay.io/cnr/api/v1/users/login -d '{"user": {"username": "'"${QUAY_USERNAME}"'", "password": "'"${QUAY_PASSWORD}"'"}}' | jq -r '.token')
operator-courier --verbose push deploy/olm-catalog/${PACKAGE}/ ${QUAY_USERNAME} ${PACKAGE} ${new_version} "$AUTH_TOKEN"
|
#!/bin/bash
# Only update permissions if permissions need to be updated
if [[ $(stat -c %U:%G /var/lib/elasticsearch/data) != "elasticsearch:elasticsearch" ]]; then
sudo chown elasticsearch: /var/lib/elasticsearch/data
fi
|
function check_brackets(){
let count = 0;
let res = count + ' ' + (count > 0)? 'Bet': 'Hold';
console.log(res);
return res;
}
check_brackets();
|
package heist
import com.google.api.client.googleapis.auth.oauth2.{GoogleAuthorizationCodeFlow, GoogleCredential}
import com.google.api.client.auth.oauth2.{TokenResponse, Credential}
import com.google.api.client.http.HttpTransport
import com.google.api.client.http.javanet.NetHttpTransport
import com.google.api.client.json.JsonFactory
import com.google.api.client.json.jackson2.JacksonFactory
import com.google.api.services.drive.{ Drive, DriveScopes }
import com.google.api.client.googleapis.auth.oauth2.GoogleClientSecrets
import scala.collection.JavaConversions._
import java.nio.file.{ FileSystems, Files, Path }
import java.nio.charset.StandardCharsets.UTF_8
import java.io.{ IOException, FileReader }
import cats._, cats.data.Xor, cats.std.all._
object Auth {
val scopes = List(DriveScopes.DRIVE, DocScopes.SHEETS)
object DocScopes {
val SHEETS = "https://spreadsheets.google.com/feeds"
}
def simple: Credential = {
import Disk._
token.fold(_ => (requestTokens _ andThen saveTokens)(secrets), tokensAsCredentials(secrets))
}
def requestTokens(secrets: GoogleClientSecrets) = {
val flow = new GoogleAuthorizationCodeFlow.Builder(httpTransport, jsonFactory, secrets, scopes)
.setAccessType("offline")
.setApprovalPrompt("force")
.build()
val url = flow.newAuthorizationUrl().setRedirectUri(secrets.redirectUri).build()
print(s"\nVisit this URL and enter the code you are given:\n\t$url\n\nEnter Code: ")
val code = io.StdIn.readLine
val tokenResponse = flow.newTokenRequest(code).setRedirectUri(secrets.redirectUri).execute()
val credential = new GoogleCredential.Builder()
.setTransport(httpTransport)
.setJsonFactory(jsonFactory)
.setClientSecrets(secrets)
.build()
.setFromTokenResponse(tokenResponse)
credential
}
def tokensAsCredentials(secrets: GoogleClientSecrets)(tokenResponse: TokenResponse) =
new GoogleCredential.Builder()
.setTransport(httpTransport)
.setJsonFactory(jsonFactory)
.setClientSecrets(secrets)
.build()
.setFromTokenResponse(tokenResponse)
implicit class SecretsOps(secrets: GoogleClientSecrets) {
def redirectUri = secrets.getDetails().getRedirectUris().head
}
private object Disk {
val fs = FileSystems.getDefault
val clientJsonFile = fs.getPath("private/client_secret.json")
val accessFile = fs.getPath("private/access")
val refreshFile = fs.getPath("private/refresh")
lazy val secrets = GoogleClientSecrets.load(jsonFactory, new FileReader(clientJsonFile.toFile))
def token: Throwable Xor TokenResponse =
for {
access <- read(accessFile)
refresh <- read(refreshFile)
} yield new TokenResponse().setAccessToken(access).setRefreshToken(refresh)
private def read(path: Path): Throwable Xor String = Xor.fromTryCatch[IOException] {
Files.readAllLines(path, UTF_8).mkString
}
def saveTokens(c: Credential): Credential = {
val aPath = Files.write(accessFile, Seq(c.getAccessToken), UTF_8)
val rPath = Files.write(refreshFile, Seq(c.getRefreshToken), UTF_8)
c
}
}
lazy val httpTransport = new NetHttpTransport()
lazy val jsonFactory = new JacksonFactory()
}
|
//Create an index.html file and include the necessary files
<html>
<head>
<title>Current Weather Information</title>
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,400,500" />
<link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons" />
<link rel="manifest" href="/manifest.webmanifest">
</head>
<body>
<div id="weather-information">
<!--Weather Information will be added here-->
</div>
//Create a script.js file with the necessary functions
fetch("api_endpoint_url") //Call the API
.then(result => result.json())
.then(data => {
//get the current temperature, wind speed, and humidity
let temp = data.main.temp;
let windSpeed = data.wind.speed;
let humiditiy = data.main.humidity;
//append them to the div element with weather-information id created in index.html
document.getElementById("weather-information").innerHTML =
`<p>Temperature: ${temp}</p>
<p>Wind Speed: ${windSpeed}</p>
<p>Humidity: ${humidity}</p>`;
});
//Create a manifest file
{
"name": "Current Weather Information",
"short_name": "Weather Info",
"start_url": "/index.html",
"background_color": "#3F51B5",
"theme_color": "#3F51B5",
"display": "standalone",
"icon": "logo.svg"
}
|
chrome.runtime.onInstalled.addListener(function (object) {
chrome.tabs.create({url: "http://jrod.in/workbench-enhancer"}, function (tab) {
console.log("New tab launched with http://yoursite.com/");
});
});
|
<filename>src/store/note/actions.ts<gh_stars>0
import { ActionTree } from "vuex"
import { StateInterface } from "../index"
import { NoteStateInterface } from "./state"
import { api } from "boot/axios"
import { Notify } from "quasar"
const actions: ActionTree<NoteStateInterface, StateInterface> = {
getNotes({ commit }) {
const jwt: string = localStorage.getItem("jwt") || ""
void api
.get("/notes", { headers: { Authorization: "Bearer " + jwt } })
.then((response) => {
commit("setNotes", response.data)
})
.catch(() => {
Notify.create({
color: "negative",
position: "top",
message: "Something went wrong",
icon: "report_problem",
})
})
},
}
export default actions
|
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import SoundcloudSvg from '@rsuite/icon-font/lib/legacy/Soundcloud';
const Soundcloud = createSvgIcon({
as: SoundcloudSvg,
ariaLabel: 'soundcloud',
category: 'legacy',
displayName: 'Soundcloud'
});
export default Soundcloud;
|
<filename>sitewhere-java-model/src/main/java/com/sitewhere/rest/model/microservice/MicroserviceSummary.java
/**
* Copyright © 2014-2021 The SiteWhere Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sitewhere.rest.model.microservice;
import com.sitewhere.spi.microservice.IMicroserviceSummary;
/**
* Provides a subset of information stored in the SiteWhere microservice CRD.
*/
public class MicroserviceSummary implements IMicroserviceSummary {
/** Id */
private String id;
/** Name */
private String name;
/** Description */
private String description;
/** Functional area */
private String functionalArea;
/** Icon displayed in UI */
private String icon;
/** Multitenant flag */
private boolean multitenant;
/** Tag for Docker image */
private String dockerImageTag;
/** Debug enabled flag */
private boolean debugEnabled;
/** Debug JDWP port */
private int debugJdwpPort;
/** Debug JMX port */
private int debugJmxPort;
/*
* @see com.sitewhere.spi.k8s.IMicroserviceSummary#getId()
*/
@Override
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
/*
* @see com.sitewhere.spi.k8s.IMicroserviceSummary#getName()
*/
@Override
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
/*
* @see com.sitewhere.spi.k8s.IMicroserviceSummary#getDescription()
*/
@Override
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
/*
* @see com.sitewhere.spi.k8s.IMicroserviceSummary#getFunctionalArea()
*/
@Override
public String getFunctionalArea() {
return functionalArea;
}
public void setFunctionalArea(String functionalArea) {
this.functionalArea = functionalArea;
}
/*
* @see com.sitewhere.spi.k8s.IMicroserviceSummary#getIcon()
*/
@Override
public String getIcon() {
return icon;
}
public void setIcon(String icon) {
this.icon = icon;
}
/*
* @see com.sitewhere.spi.k8s.IMicroserviceSummary#isMultitenant()
*/
@Override
public boolean isMultitenant() {
return multitenant;
}
public void setMultitenant(boolean multitenant) {
this.multitenant = multitenant;
}
/*
* @see com.sitewhere.spi.k8s.IMicroserviceSummary#getDockerImageTag()
*/
@Override
public String getDockerImageTag() {
return dockerImageTag;
}
public void setDockerImageTag(String dockerImageTag) {
this.dockerImageTag = dockerImageTag;
}
/*
* @see com.sitewhere.spi.k8s.IMicroserviceSummary#isDebugEnabled()
*/
@Override
public boolean isDebugEnabled() {
return debugEnabled;
}
public void setDebugEnabled(boolean debugEnabled) {
this.debugEnabled = debugEnabled;
}
/*
* @see com.sitewhere.spi.k8s.IMicroserviceSummary#getDebugJdwpPort()
*/
@Override
public int getDebugJdwpPort() {
return debugJdwpPort;
}
public void setDebugJdwpPort(int debugJdwpPort) {
this.debugJdwpPort = debugJdwpPort;
}
/*
* @see com.sitewhere.spi.k8s.IMicroserviceSummary#getDebugJmxPort()
*/
@Override
public int getDebugJmxPort() {
return debugJmxPort;
}
public void setDebugJmxPort(int debugJmxPort) {
this.debugJmxPort = debugJmxPort;
}
}
|
<gh_stars>0
#ifndef __TFTP_EEPROM_H__
#define __TFTP_EEPROM_H__
void dut_tftp_eeprom(void);
#endif // __TFTP_EEPROM_H__
|
<gh_stars>1-10
package myimagej;
import java.io.File;
import java.io.FilenameFilter;
public class MyFileNameFilter implements FilenameFilter {
private String[] condition;
public MyFileNameFilter()
{
this.condition=new
String[]{".jpg",".JPG",".BMP",".bmp",".png",".PNG",".TIF","
.tif"};
}
public boolean accept(File dir, String name) {
for(String s:condition)
{
if(name.endsWith(s))return true;
}
return false;
}
|
import java.util.HashMap;
import java.util.Map;
import java.util.Arrays;
public class FreqSort {
// Function to sort an array arr[] of size n
// in decreasing order of frequency
public static void sortByFreq(int[] arr)
{
// get the frequency of elements in arr
Map<Integer, Integer> freq = new HashMap<>();
for (int i = 0; i < arr.length; i++) {
// increment the frequency
freq.put(arr[i], freq.getOrDefault(arr[i], 0) + 1);
}
// traverse the array and get the output
Arrays.sort(arr, (a, b) ->
(freq.get(b).compareTo(freq.get(a))));
}
// Driver code
public static void main(String[] args)
{
int arr[] = { 1, 2, 3, 4, 5, 4, 3, 2, 1, 6 };
sortByFreq(arr);
System.out.println(Arrays.toString(arr));
}
}
// Output:
// [1, 1, 2, 2, 3, 3, 4, 4, 5, 6]
|
#!/bin/bash
# Set your own EuRoC_PATH path to run ice-ba. Use './bin/ice_ba --help' to get the explanation for all of the flags. Flags [imgs_folder] and [iba_param_path] are necessary.
# Add flag '--save_feature' to save feature message and calibration file for back-end only mode
EuRoC_PATH=/home/nikolausmitchell/nodein/ndata/vio_test_data/Euroc/MH_02_easy
mkdir $EuRoC_PATH/result
cmd="../bin/ice_ba --imgs_folder $EuRoC_PATH --start_idx 0 --end_idx -1 --iba_param_path ../config/config_of_stereo.txt --gba_camera_save_path $EuRoC_PATH/result/MH_02_easy.txt --stereo --save_feature"
echo $cmd
eval $cmd
|
var nameVar = 'John';
var nameVar = 'Mike';
console.log('nameVar', nameVar);
let nameLet = 'Jen';
// error cannot redefine
//let nameLet = 'Mike';
nameLet = 'Julie'
console.log('nameLet', nameLet)
const nameConst = 'Frank';
console.log('nameConst', nameConst)
function getPetName() {
var petName = 'Hal';
return petName;
}
|
'use strict';
module.exports = (sequelize, DataTypes) => {
const Role = sequelize.define('Role', {
name: DataTypes.STRING,
description: DataTypes.STRING
});
Role.associate = models => {
models.Role.belongsToMany(models.Privilege, {
through: 'RolePrivilege'
});
models.Role.belongsToMany(models.User, {
through: 'UserRole'
});
};
return Role;
};
|
package codecheck.github.models
import org.json4s.JValue
case class ReviewRequest(value: JValue) extends AbstractJson(value) {
def id = get("id").toLong
def number = get("number").toLong
}
|
KUBEVIRT_VERSION=$(curl -s https://github.com/kubevirt/kubevirt/releases/latest | grep -o "v[0-9]\.[0-9]*\.[0-9]*")
#CDI_VERSION=$(curl -s https://github.com/kubevirt/containerized-data-importer/releases/latest | grep -o "v[0-9]\.[0-9]*\.[0-9]*")
CDI_VERSION="v1.29.0"
VIRTCTL_DOWNLOAD_URL="https://github.com/kubevirt/kubevirt/releases/download/${KUBEVIRT_VERSION}/virtctl-${KUBEVIRT_VERSION}"
VIRTCTL_X86_64="${VIRTCTL_DOWNLOAD_URL}-linux-x86_64"
VIRTCTL_AMD64="${VIRTCTL_DOWNLOAD_URL}-linux-amd64"
# Create openshift-cnv namespace for Integration Tests
oc create namespace openshift-cnv
oc create namespace kubevirt-os-images
# Deploy Kubevirt, Storage, CDI Pods
oc create -f https://github.com/kubevirt/kubevirt/releases/download/$KUBEVIRT_VERSION/kubevirt-operator.yaml
oc create -f https://github.com/kubevirt/kubevirt/releases/download/$KUBEVIRT_VERSION/kubevirt-cr.yaml
# File was deleted upstream, using last known file in github, until a more permanent fix:
# https://github.com/openshift/console/pull/8608 - will fix this issue.
# oc create -f https://raw.githubusercontent.com/kubevirt/kubevirt.github.io/master/labs/manifests/storage-setup.yml
oc create -f https://raw.githubusercontent.com/kubevirt/kubevirt.github.io/f6530b3fe71e8821208cad8fcac165c54a42bd54/labs/manifests/storage-setup.yml
oc create -f https://github.com/kubevirt/containerized-data-importer/releases/download/$CDI_VERSION/cdi-operator.yaml
oc create -f https://github.com/kubevirt/containerized-data-importer/releases/download/$CDI_VERSION/cdi-cr.yaml
# Deploy Common Templates
oc project openshift
oc create -f https://github.com/kubevirt/common-templates/releases/download/v0.13.1/common-templates-v0.13.1.yaml
oc project default
# Wait for kubevirt to be available
oc wait -n kubevirt kv kubevirt --for condition=Available --timeout 15m
oc patch storageclass hostpath -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"false"}}}'
# Create storage-class permissions
oc create -f - <<EOF
apiVersion: v1
kind: ConfigMap
metadata:
name: kubevirt-storage-class-defaults
namespace: openshift-cnv
data:
accessMode: ReadWriteOnce
volumeMode: Filesystem
EOF
# Enable live-migration feature-gate
oc create -f - <<EOF
apiVersion: v1
kind: ConfigMap
metadata:
name: kubevirt-config
namespace: kubevirt
labels:
kubevirt.io: ""
data:
feature-gates: "DataVolumes,SRIOV,LiveMigration,CPUManager,CPUNodeDiscovery,Sidecar,Snapshot"
EOF
# TODO remove this once the test image is built from Dockerfile
if ! type virtctl; then
# Install virtctl binary and add in to PATH
mkdir virtctl
wget ${VIRTCTL_AMD64} -O virtctl/virtctl || wget ${VIRTCTL_X86_64} -O virtctl/virtctl
[[ ! -f "virtctl/virtctl" ]] && echo "ERROR: virtctl binary is unavailable for download" && exit 1
chmod +x virtctl/virtctl
export PATH="${PATH}:$(pwd)/virtctl"
fi
|
<filename>controllers/api/index.js
// required modules
const router = require("express").Router();
const userRoutes = require("./user-routes");
const propertyRoutes = require("./property-routes");
//routes to use
router.use("/users", userRoutes);
router.use("/property", propertyRoutes);
module.exports = router;
|
#!/bin/bash
# Copyright (c) 2010, 2014, Oracle and/or its affiliates. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
if [ "$MYSQL_HOME" = "" ] ; then
source ../env.properties
echo MYSQL_HOME=$MYSQL_HOME
PATH="$MYSQL_LIBEXEC:$MYSQL_BIN:$PATH"
fi
#set -x
cwd="$(pwd)"
mylogdir="$cwd/ndblog"
mkdir -p "$mylogdir"
user="$(whoami)"
mycnf="$cwd/../my.cnf"
myerr="$mylogdir/mysqld.log.err"
mysock="/tmp/mysql.sock"
#mysock="$mylogdir/mysql.sock"
echo
echo start mysqld...
( cd $MYSQL_HOME ; "mysqld_safe" --defaults-file="$mycnf" --user="$user" --log-error="$myerr" --socket="$mysock" & )
#
# debug:
#( cd $MYSQL_HOME ; "mysqld_safe" --defaults-file="$mycnf" --user="$user" --log-error="$myerr" -#d & )
# crashes when --debug/-# at beginning:
#( cd $MYSQL_HOME ; "$mysqld" --debug --defaults-file="$mycnf" --user="$user" --log-error="$myerr" & )
# need some extra time
for ((i=0; i<10; i++)) ; do printf "." ; sleep 1 ; done ; echo
#echo
#ps -efa | grep mysqld
./show_cluster.sh
#set +x
|
<gh_stars>1-10
package btc
const (
MaxTimeStamp = 4762368000
TIMESTAMP = "timestamp"
)
|
# Copyright 2015 Alexey Baranov <me@kotiki.cc>. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
cd `dirname $0`/../..
git_root=`pwd`
cd -
DOCKER_IMAGE_NAME=squim_docker_`sha1sum ${git_root}/build/travis/Dockerfile | cut -f1 -d\ `
echo $DOCKER_IMAGE_NAME
docker build -t $DOCKER_IMAGE_NAME ${git_root}/build/travis
CONTAINER_NAME="travis_run_tests_$(uuidgen)"
echo $CONTAINER_NAME
opts="--verbose_failures -c dbg"
tests=`cat ${git_root}/build/testsuites | tr "\n" " "`
COMMAND="bazel test ${tests} ${opts}"
echo $COMMAND
docker run \
-e "COMMAND=$COMMAND" \
-v "$git_root:/var/local/ci/squim" \
-w /var/local/ci/squim/ \
--name=$CONTAINER_NAME \
$DOCKER_IMAGE_NAME \
bash -l ./build/travis/setup_bazel_and_run_tests.sh || DOCKER_FAILED="true"
docker rm -f $CONTAINER_NAME || true
if [ "$DOCKER_FAILED" != "" ]
then
exit 1
fi
|
#!/usr/bin/env bats
help() {
bash "${BATS_TEST_DIRNAME}"/../dotbare -h
}
addall() {
bash "${BATS_TEST_DIRNAME}"/../dotbare add --all
}
routing() {
"${BATS_TEST_DIRNAME}"/../dotbare fadd -h
}
routing2() {
"${BATS_TEST_DIRNAME}"/../dotbare flog -h
}
normal_git() {
export PATH="${BATS_TEST_DIRNAME}:$PATH"
"${BATS_TEST_DIRNAME}"/../dotbare add -h
}
invalid_command() {
"${BATS_TEST_DIRNAME}"/../dotbare hello
}
version() {
"${BATS_TEST_DIRNAME}"/../dotbare --version
}
no_argument() {
export PATH="${BATS_TEST_DIRNAME}:$PATH"
"${BATS_TEST_DIRNAME}"/../dotbare
}
generic_git_operation_block_finit() {
"${BATS_TEST_DIRNAME}"/../dotbare --git finit
}
generic_git_operation_block_fbackup() {
"${BATS_TEST_DIRNAME}"/../dotbare --git fbackup
}
generic_git_operation_block_fupgrade() {
"${BATS_TEST_DIRNAME}"/../dotbare --git fupgrade
}
generic_git_operation_init() {
export PATH="${BATS_TEST_DIRNAME}:$PATH"
"${BATS_TEST_DIRNAME}"/../dotbare --git init
}
generic_git_operation_fadd() {
export PATH="${BATS_TEST_DIRNAME}:$PATH"
"${BATS_TEST_DIRNAME}"/../dotbare --git fadd
}
@test "main generic git fadd" {
run generic_git_operation_fadd
[ "${status}" -eq 0 ]
[[ "${output}" =~ "--git-dir=rev-parse --show-toplevel/.git --work-tree=rev-parse --show-toplevel" ]]
[[ "${output}" =~ "fadd_stage_modified" ]]
}
@test "main generic git commands" {
run generic_git_operation_init
[ "${status}" -eq 0 ]
[[ "${output}" =~ "--git-dir=rev-parse --show-toplevel/.git --work-tree=rev-parse --show-toplevel" ]]
[[ "${output}" =~ "init" ]]
}
@test "main generic git flag block finit" {
run generic_git_operation_block_finit
[ "${status}" -eq 1 ]
[ "${output}" = "dotbare finit is not supported when using dotbare as a generic fuzzy git tool" ]
}
@test "main generic git flag block fbackup" {
run generic_git_operation_block_fbackup
[ "${status}" -eq 1 ]
[ "${output}" = "dotbare fbackup is not supported when using dotbare as a generic fuzzy git tool" ]
}
@test "main generic git flag block fupgrade" {
run generic_git_operation_block_fupgrade
[ "${status}" -eq 1 ]
[ "${output}" = "dotbare fupgrade is not supported when using dotbare as a generic fuzzy git tool" ]
}
@test "main help" {
run help
[ "${status}" -eq 0 ]
[ "${lines[0]}" = "Usage: dotbare [-h] [-v] [COMMANDS] [OPTIONS] ..." ]
}
@test "main version" {
run version
[ "${status}" -eq 0 ]
[[ "${output}" =~ "Current dotbare version: ${DOTBARE_VERSION}" ]]
}
@test "main disable add --all" {
run addall
[ "${status}" -eq 1 ]
[ "${lines[0]}" = "If you intend to stage all modified file, run dotbare add -u" ]
}
@test "main routing" {
run routing
[ "${status}" -eq 0 ]
[ "${lines[0]}" = "Usage: dotbare fadd [-h] [-f] [-d] ..." ]
}
@test "main routing2" {
run routing2
[ "${status}" -eq 0 ]
[ "${lines[0]}" = "Usage: dotbare flog [-h] [-r] [-R] [-e] [-c] [-y] ..." ]
}
@test "main git command" {
run normal_git
[ "${status}" -eq 0 ]
[[ "${output}" =~ "add -h" ]]
}
@test "main invliad command" {
run invalid_command
[ "${status}" -eq 1 ]
}
@test "main no argument" {
run no_argument
[[ "${output}" =~ "Available commands" ]]
}
|
import React from "react"
import Seo from "../components/seo"
function SermonPage () {
return (
<>
<Seo title="Sermon" />
<div>설교</div>
</>
);
}
export default SermonPage;
|
<reponame>Igorocky/jfxutils
package org.igye.jfxutils.exceptions
class JfxUtilsException(msg: String) extends Exception(msg)
|
def processImageURLs(categories):
image_urls = []
for cat in categories:
if 'picture' in cat and cat['picture']:
image_urls.append(f"{cat['user_id']}/s_{cat['picture']}")
return image_urls
|
#!/bin/sh
FLOW=$1
cp lib/lib.js lib/lib.js.orig
# This should not cause the Flow server to die
$FLOW force-recheck --no-auto-start lib/lib/js
echo "first status, after recheck"
$FLOW status --no-auto-start 2>/dev/null
# This also should not cause the Flow server to die
touch lib/lib.js
echo "second status, after touch"
$FLOW status --no-auto-start 2>/dev/null
# This should cause the flow server to die
cp lib/lib.js.modified lib/lib.js
echo "third status, after modification"
# This should have no output, since it won't find a server. It will print stuff
# on stderr but it includes the nondeterministically-chosen tmpdir so we can't
# compare against it.
$FLOW status --no-auto-start 2>/dev/null
echo "done"
mv lib/lib.js.orig lib/lib.js
|
<filename>main/plugins/org.talend.repository/src/main/java/org/talend/repository/MigrationDIMetadataItemService.java
// ============================================================================
//
// Copyright (C) 2006-2021 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.repository;
import org.talend.core.IMigrateDIMetadataItemService;
import org.talend.core.model.properties.Item;
import org.talend.migration.IMigrationTask.ExecutionResult;
import org.talend.repository.model.migration.MergeTosMetadataMigrationTask;
/**
* DOC bZhou class global comment. Detailled comment
*/
public class MigrationDIMetadataItemService implements IMigrateDIMetadataItemService {
/*
* (non-Javadoc)
*
* @see org.talend.core.IMigrateDIMetadataItemService#migrateDIItems(org.talend.core.model.properties.Item)
*/
public ExecutionResult migrateDIItems(Item item) {
return new MergeTosMetadataMigrationTask().execute(item);
}
}
|
#!/bin/sh
# Do an automated release to NPM.
#
# Due to some weirdnesses with npm pathes, be sure to call this script from the
# root directory of the project (e.g. via ./scripts/release.sh).
NPM_REGISTRY="registry.npmjs.org"
# Are we logged in to NPM? Check for a *repo-scoped* .npmrc file (e.g. in the
# project directory), if not found, create based on the $NPM_TOKEN variable.
#
# In CI environments where we can mount secrets as files, it may be preferable
# to store and mount the .npmrc directly instead of using env variables.
if [ ! -f .npmrc ]; then
if [ -z "$NPM_TOKEN" ]; then
echo "No local .npmrc or NPM_TOKEN environment variable! Exiting..."
exit 1
fi
echo "//${NPM_REGISTRY}/:_authToken=${NPM_TOKEN}" >> .npmrc
fi
# Publish to NPM. Will do a dry-run by default unless overridden via LIVE=1.
#
# The automatically triggered prepublishOnly npm step seems to be very unhappy
# without unsafe-perm as it tries to deescalate its own privileges and can
# no longer modify the working directory.
LIVE=${LIVE:-0}
if [ "$LIVE" -eq "1" ]; then
# We can use a release script that detects semver pre-release SemVer version
# strings and automatically append `--tag prerelease` to the npm publish
# commands when they are detected. This restores proper behavior whereby end
# users will not automatically get a prerelease version on install unless
# explicitly requested.
#
# The origin of this issue stems from NPM now starting to use their own "dist
# tags" that must be explicitly set, whereby they used to respect SemVer
# directly. See: https://medium.com/@mbostock/prereleases-and-npm-e778fc5e2420
npm run-script publish-semver -- --unsafe-perm=true
else
npm run-script publish-semver -- --unsafe-perm=true --dry-run
fi
|
<gh_stars>100-1000
/* spread.h -- Spread an array into an additional dimension
Copyright (C) 2017 European Centre for Medium-Range Weather Forecasts
Author: <NAME> <<EMAIL>>
This file is part of the Adept library.
*/
#ifndef AdeptSpread_H
#define AdeptSpread_H
#include <adept/Array.h>
namespace adept {
namespace internal {
// Expression representing the spread of an array into an
// additional dimension
template <int SpreadDim, typename Type, class E>
class Spread : public Expression<Type, Spread<SpreadDim,Type,E> > {
typedef Array<E::rank,Type,E::is_active> ArrayType;
public:
// Static data
static const int rank = E::rank+1;
static const bool is_active = E::is_active;
static const int n_active = ArrayType::n_active;
static const int n_scratch = 0;
static const int n_arrays = ArrayType::n_arrays;
// Currently not vectorizable if the final dimension is the
// spread dimension because the current design always has the
// array index increasing
static const bool is_vectorizable = (SpreadDim != E::rank);
protected:
const ArrayType array;
ExpressionSize<rank> dims;
Index n;
public:
Spread(const Expression<Type,E>& e, Index n_)
: array(e.cast()), n(n_) {
for (int i = 0; i < SpreadDim; ++i) {
dims[i] = array.dimension(i);
}
dims[SpreadDim] = n_;
for (int i = SpreadDim+1; i < rank; ++i) {
dims[i] = array.dimension(i-1);
}
// Communicate empty array if n == 0
if (n_ == 0) {
dims[0] = 0;
}
}
bool get_dimensions_(ExpressionSize<rank>& dim) const {
dim = dims;
return true;
}
std::string expression_string_() const {
std::stringstream s;
s << "spread<" << SpreadDim << ">(" << array.expression_string()
<< "," << n << ")";
return s.str();
}
bool is_aliased_(const Type* mem1, const Type* mem2) const {
return false;
}
bool all_arrays_contiguous_() const {
return array.all_arrays_contiguous_();
}
bool is_aligned_() const {
return array.is_aligned_();
}
template <int N>
int alignment_offset_() const {
return array.template alignment_offset_<N>();
}
// Do not implement value_with_len_
// Advance only if the spread dimension is not the last
template <int MyArrayNum, int NArrays>
void advance_location_(ExpressionSize<NArrays>& loc) const {
// If false this if statement should be optimized away
if (SpreadDim < rank-1) {
array.template advance_location_<MyArrayNum>(loc);
}
}
template <int MyArrayNum, int NArrays>
Type value_at_location_(const ExpressionSize<NArrays>& loc) const {
return array.template value_at_location_<MyArrayNum>(loc);
}
template <int MyArrayNum, int MyScratchNum, int NArrays, int NScratch>
Type value_at_location_store_(const ExpressionSize<NArrays>& loc,
ScratchVector<NScratch>& scratch) const {
return array.template value_at_location_<MyArrayNum>(loc);
}
template <int MyArrayNum, int MyScratchNum, int NArrays, int NScratch>
Type value_stored_(const ExpressionSize<NArrays>& loc,
const ScratchVector<NScratch>& scratch) const {
return array.template value_at_location_<MyArrayNum>(loc);
}
template <int MyArrayNum, int NArrays>
Packet<Type>
packet_at_location_(const ExpressionSize<NArrays>& loc) const {
return packet_at_location_local_<SpreadDim==rank-1,MyArrayNum>(loc);
}
protected:
// Specializing for the case when the final dimension is the
// final dimension of the wrapped array
template <bool IsDuplicate, int MyArrayNum, int NArrays>
typename enable_if<!IsDuplicate, Packet<Type> >::type
packet_at_location_local_(const ExpressionSize<NArrays>& loc) const {
return array.template packet_at_location_<MyArrayNum>(loc);
}
// Specializing for the case when the final dimension is to be
// "spread". The following does not work because the array
// location is incremented for packets when we really want it to
// always point to the start of a row. It is deactivated by
// is_vectorizable_ (above).
template <bool IsDuplicate, int MyArrayNum, int NArrays>
typename enable_if<IsDuplicate, Packet<Type> >::type
packet_at_location_local_(const ExpressionSize<NArrays>& loc) const {
return Packet<Type>(array.template value_at_location_<MyArrayNum>(loc));
}
public:
template <int MyArrayNum, int NArrays>
void set_location_(const ExpressionSize<rank>& i,
ExpressionSize<NArrays>& index) const {
ExpressionSize<rank-1> i_array(0);
int j = 0;
for ( ; j < SpreadDim; ++j) {
i_array[j] = i[j];
}
for ( ; j < rank-1; ++j) {
i_array[j] = i[j+1];
}
array.template set_location_<MyArrayNum>(i_array, index);
}
template <int MyArrayNum, int MyScratchNum, int NArrays, int NScratch>
void calc_gradient_(Stack& stack, const ExpressionSize<NArrays>& loc,
const ScratchVector<NScratch>& scratch) const {
array.template calc_gradient_<MyArrayNum,MyScratchNum>(stack,loc,scratch);
}
template <int MyArrayNum, int MyScratchNum, int NArrays, int NScratch,
typename MyType>
void calc_gradient_(Stack& stack,
const ExpressionSize<NArrays>& loc,
const ScratchVector<NScratch>& scratch,
MyType multiplier) const {
array.template calc_gradient_<MyArrayNum,MyScratchNum>(stack,loc,
scratch,multiplier);
}
};
}
// Define spread function applied to an expression
template <int SpreadDim, typename Type, class E>
typename internal::enable_if<(SpreadDim >= 0 && SpreadDim <= E::rank),
internal::Spread<SpreadDim,Type,E> >::type
spread(const Expression<Type,E>& e, Index n) {
return internal::Spread<SpreadDim,Type,E>(e,n);
}
/*
// If "spread" is applied to a scalar, we expand it to a Vector of
// the same type
template <int SpreadDim, typename Type>
typename internal::enable_if<internal::is_not_expression<Type>::value,
Array<1,Type,false> >::type
spread(const Type& e, Index n) {
Array<1,Type,false> arr(n);
arr = e;
return arr;
}
*/
}
#endif
|
<reponame>seawindnick/javaFamily<filename>offer/src/main/java/com/java/study/algorithm/zuo/bbasic/class_03/Code_13_CopyListWithRandom.java
package com.java.study.algorithm.zuo.bbasic.class_03;
public class Code_13_CopyListWithRandom{
}
|
# Clean up the old resources
rm -f design.*
rm -f design_resources.*
pyrcc4 -py3 ../designer/design_resources.qrc -o design_resources.py
pyuic4 ../designer/design.ui -o design_temp.py
# Can't figure out how to force pyuic to correctly link design_resources
sed 's/design_resources_rc/design_resources/' design_temp.py > design.py
rm -f design_temp.py
|
#!/bin/bash
# 本脚本的作用是
# 1. 项目打包
# 2. 上传云主机
# 3. 远程登录云主机并执行reset脚本
# 请设置云主机的IP地址和账户
# 例如 ubuntu@118.24.0.153
REMOTE=weblogic@148.70.134.173
# 请设置本地SSH私钥文件id_rsa路径
# 例如 /home/litemall/id_rsa
ID_RSA=
if test -z "$REMOTE"
then
echo "weblogic@148.70.134.173"
exit -1
fi
if test -z "$ID_RSA"
then
echo "weblogic@148.70.134.173"
exit -1
fi
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
cd $DIR/../..
LITEMALL_HOME=$PWD
echo "LITEMALL_HOME $LITEMALL_HOME"
# 项目打包
cd $LITEMALL_HOME
./deploy/util/package.sh
# 上传云主机
cd $LITEMALL_HOME
scp -i $ID_RSA -r ./deploy $REMOTE:/home/ubuntu/
# 远程登录云主机并执行reset脚本
ssh $REMOTE -i $ID_RSA << eeooff
cd /home/ubuntu
sudo ./deploy/bin/reset.sh
exit
eeooff
|
<gh_stars>1-10
import { Meteor } from 'meteor/meteor';
import { Accounts } from 'meteor/accounts-base';
import { t } from '../../../utils';
import toastr from 'toastr';
Accounts.onEmailVerificationLink(function(token, done) {
Accounts.verifyEmail(token, function(error) {
if (error == null) {
toastr.success(t('Email_verified'));
Meteor.call('afterVerifyEmail');
} else {
toastr.error(error.message);
}
return done();
});
});
|
import * as React from 'react';
import * as ReactDOM from 'react-dom';
import '@testing-library/jest-dom/extend-expect';
import { render } from '@testing-library/react';
import * as fs from 'fs';
import * as path from 'path';
import Sandbox, { SandboxContructor } from '@ice/sandbox';
import renderModules, {
getModules,
parseUrlAssets,
appendCSS,
clearModules,
MicroModule,
mountModule,
unmoutModule,
removeCSS,
} from '../src/modules';
const modules = [{
name: 'selfComponent',
url: 'http://127.0.0.1:3334/index.js',
}, {
name: 'error',
url: 'http://127.0.0.1:3334/error.js',
}];
declare global {
interface Window {
fetch?: any;
react?: any;
ReactDOM?: any;
}
}
window.react = React;
window.ReactDOM = ReactDOM;
describe('render modules', () => {
beforeEach(() => {
const source = fs.readFileSync(path.resolve(__dirname, './component.js'));
window.fetch = (url) => {
return Promise.resolve({
text: url.indexOf('error') === -1 ? () => source.toString() : () => 'const error = 1;',
});
};
});
test('fallback render', (next) => {
const Component = renderModules(modules, null, {});
const { container, unmount } = render(Component);
expect(getModules()).toEqual(modules);
setTimeout(() => {
expect(container.innerHTML).toBe('<div><div><h2>404</h2></div></div>');
unmount();
expect(container.innerHTML).toBe('');
next();
}, 0);
});
test('render skeleton', () => {
const { container } = render(renderModules(modules, () => {
return (
<div id="skeleon"></div>
);
}, {}));
expect(container.innerHTML).toBe('<div id="skeleon"></div>');
});
test('render MicroModule with name', (next) => {
const { container } = render(<MicroModule moduleName="selfComponent" />);
setTimeout(() => {
expect(container.innerHTML).toBe('<div><div><h2>404</h2></div></div>');
next();
}, 0);
});
test('render loadingComponent', (next) => {
const { container } = render(<MicroModule moduleName="selfComponent" loadingComponent={<div>loading</div>} />);
expect(container.innerHTML).toBe('<div>loading</div>');
setTimeout(() => {
expect(container.innerHTML).toBe('<div><div><h2>404</h2></div></div>');
next();
}, 0);
});
test('render MicroModule with default sandbox', (next) => {
const { container } = render(<MicroModule moduleName="selfComponent" sandbox />);
setTimeout(() => {
expect(container.innerHTML).toBe('<div><div><h2>404</h2></div></div>');
next();
}, 0);
});
test('render MicroModule with custom className and style', (next) => {
const { container } = render(<MicroModule moduleName="selfComponent" wrapperClassName="test" wrapperStyle={{ fontSize: '14px' }} sandbox />);
setTimeout(() => {
expect(container.innerHTML).toBe('<div class="test" style="font-size: 14px;"><div><h2>404</h2></div></div>');
next();
}, 0);
});
test('mountModule with default sandbox', (next) => {
const moduleInfo = { name: 'defaultSandbox', url: '//localhost' };
const div = document.createElement('div');
mountModule(moduleInfo, div, {}, true);
setTimeout(() => {
expect(div.innerHTML).toBe('<div><h2>404</h2></div>');
unmoutModule(moduleInfo, div);
expect(div.innerHTML).toBe('');
next();
}, 0);
});
test('mountModule with custom sandbox', (next) => {
const moduleInfo = { name: 'customSandbox', url: '//localhost' };
const div = document.createElement('div');
mountModule(moduleInfo, div, {}, (Sandbox as SandboxContructor));
setTimeout(() => {
expect(div.innerHTML).toBe('<div><h2>404</h2></div>');
unmoutModule(moduleInfo, div);
expect(div.innerHTML).toBe('');
next();
}, 0);
});
test('load error module', (next) => {
const { container } = render(<MicroModule moduleName="error" handleError={() => {
expect(true).toBe(true);
next();
}} />);
try {
const moduleInfo = modules.find(({ name }) => name === 'error');
unmoutModule(moduleInfo, container);
expect(false).toBe(true);
} catch(error) {
expect(true).toBe(true);
}
});
test('append css', () => {
const container = document.createElement('div');
appendCSS('css', 'http://test.css', container);
expect(container.innerHTML).toBe('<link module="css" rel="stylesheet" href="http://test.css">');
removeCSS('css', container);
expect(container.innerHTML).toBe('');
});
test('parse url assets', () => {
const assets = parseUrlAssets([
'//icestark.com/index.css',
'//icestark.com/index.css?timeSamp=1575443657834',
'//icestark.com/index.js',
'//icestark.com/index.js?timeSamp=1575443657834',
]);
expect(assets).toStrictEqual({
cssList: [
'//icestark.com/index.css',
'//icestark.com/index.css?timeSamp=1575443657834',
],
jsList: [
'//icestark.com/index.js',
'//icestark.com/index.js?timeSamp=1575443657834',
],
});
})
test('clear module', () => {
clearModules();
expect(getModules()).toStrictEqual([]);
});
});
|
#!/bin/bash
# Run WordPress docker entrypoint.
. docker-entrypoint.sh 'apache2'
set +u
# Ensure mysql is loaded
dockerize -wait tcp://${DB_HOST}:${DB_HOST_PORT:-3306} -timeout 1m
# Config WordPress
if [ ! -f "${WP_ROOT_FOLDER}/wp-config.php" ]; then
wp config create \
--path="${WP_ROOT_FOLDER}" \
--dbname="${DB_NAME}" \
--dbuser="${DB_USER}" \
--dbpass="${DB_PASSWORD}" \
--dbhost="${DB_HOST}" \
--dbprefix="${WP_TABLE_PREFIX}" \
--skip-check \
--quiet \
--allow-root
fi
# Install WP if not yet installed
if ! $( wp core is-installed --allow-root ); then
wp core install \
--path="${WP_ROOT_FOLDER}" \
--url="${WP_URL}" \
--title='Test' \
--admin_user="${ADMIN_USERNAME}" \
--admin_password="${ADMIN_PASSWORD}" \
--admin_email="${ADMIN_EMAIL}" \
--allow-root
fi
# Install and activate WooCommerce
if [ ! -f "${PLUGINS_DIR}/the-events-calendar/the-events-calendar.php" ]; then
wp plugin install the-events-calendar --activate --allow-root
fi
# Install and activate WooCommerce
if [ ! -f "${PLUGINS_DIR}/event-tickets/event-tickets.php" ]; then
wp plugin install event-tickets --activate --allow-root
fi
# Install and activate WPGraphQL
if [ ! -f "${PLUGINS_DIR}/wp-graphql/wp-graphql.php" ]; then
wp plugin install \
https://github.com/wp-graphql/wp-graphql/archive/master.zip \
--activate --allow-root
fi
# Install and activate WPGraphQL JWT Authentication
if [ ! -f "${PLUGINS_DIR}/wp-graphql-jwt-authentication/wp-graphql-jwt-authentication.php" ]; then
wp plugin install \
https://github.com/wp-graphql/wp-graphql-jwt-authentication/archive/master.zip \
--activate --allow-root
fi
# Install and activate WPGraphiQL
if [[ ! -z "$INCLUDE_WPGRAPHIQL" ]]; then
if [ ! -f "${PLUGINS_DIR}/wp-graphiql/wp-graphiql.php" ]; then
wp plugin install \
https://github.com/wp-graphql/wp-graphiql/archive/master.zip \
--activate --allow-root
fi
fi
# Install and activate WooGraphQL
if [ ! -f "${PLUGINS_DIR}/ql-events/ql-events.php" ]; then
wp plugin install \
https://github.com/simplur/ql-events/archive/${QL_EVENTS_BRANCH:-master}.zip \
--activate --allow-root
else
wp plugin activate ql-events --allow-root
fi
# Set pretty permalinks.
wp rewrite structure '/%year%/%monthnum%/%postname%/' --allow-root
wp db export "${PROJECT_DIR}/tests/_data/dump.sql" --allow-root
exec "$@"
|
<reponame>uscope-platform/makefile_gen<gh_stars>0
// Generated from /home/fils/git/makefilegen_v2/grammars/sv2017.g4 by ANTLR 4.9.2
#include "sv2017BaseListener.h"
using namespace mgp_sv;
|
#!/bin/bash
for f in `grep -l "Tactic call ran for .* (success)" logs$1/*.log`; do basename $f; done > successes$1.log
|
<filename>src/app/upload-files-example/upload-files-example.component.ts
import { Component, OnInit } from '@angular/core';
import { MediaObserver } from '@angular/flex-layout';
import { PipFileUploadService } from 'pip-webui2-files';
@Component({
selector: 'app-upload-files-example',
templateUrl: './upload-files-example.component.html',
styleUrls: ['./upload-files-example.component.scss']
})
export class UploadFilesExampleComponent implements OnInit {
private files: any[] = [];
private url = 'http://tracker.pipservices.net:8080/api/v1/blobs';
private headers: any = {
'x-session-id': '7501b38b1cea42f2b0d34d282b701e5c'
};
private blobId = 'a0573d87408c4e6ebb93ff75569a821b';
constructor(
public media: MediaObserver,
public fileUploadService: PipFileUploadService
) { }
ngOnInit() { }
changeFile(e) {
this.files = [];
const files = e.dataTransfer ? e.dataTransfer.files : e.target.files;
for (let i = 0; i < files.length; i++) {
const file = files[i];
if (!file) { return; }
const reader = new FileReader();
reader.onloadend = (result: any) => {
this.files.push({
file: this.dataURItoBlob(
result.target.result), fileName: file.name
}
);
if (files.length - 1 === i) { this.uploadFiles(); }
};
reader.readAsDataURL(file);
}
}
private dataURItoBlob(dataURI) {
// convert base64 to raw binary data held in a string
// doesn't handle URLEncoded DataURIs - see SO answer #6850276 for code that does this
const byteString = atob(dataURI.split(',')[1]);
// separate out the mime component
const mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to an ArrayBuffer
const ab = new ArrayBuffer(byteString.length);
const ia = new Uint8Array(ab);
for (let i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
// New Code
return new Blob([ab], { type: mimeString });
}
private uploadFiles() {
this.fileUploadService.uploadFiles(this.url /* + '/' + this.blobId*/, this.files, this.headers).subscribe(
(result: any) => {
console.log('res', result);
}
);
}
}
|
window._ = require('lodash')
window.Vue = require('vue').default;
import VuetifyToast from 'vuetify-toast-snackbar-ng'
import setup from './interceptors/interceptors.js'
setup()
import VueRouter from 'vue-router'
Vue.use(VueRouter)
import Loader from './core/base/Loader.vue'
Vue.component('loader', Loader)
// import VCustomTitle from './global_components/VCustomTitle.vue'
// Vue.component('v-custom-title', VCustomTitle)
// import VCustomCard from './global_components/VCustomCard.vue'
// Vue.component('v-custom-card', VCustomCard)
// import VClienteComponent from './global_components/VClienteComponent.vue'
// Vue.component('v-cliente-component', VClienteComponent)
// import VCustomMenuCalendar from './global_components/VCustomMenuCalendar.vue'
// Vue.component('v-custom-menu-calendar', VCustomMenuCalendar)
window.axios = require('axios')
axios.defaults.headers.common['Content-Type'] = 'application/json'
axios.defaults.headers.common['X-CSRF-TOKEN'] = document.querySelector('meta[name="csrf-token"]').getAttribute('content')
axios.defaults.headers.common.Authorization = `Bearer ${localStorage.getItem('id_token')}`
axios.defaults.withCredentials = true;
Vue.use(VuetifyToast, {
x: 'right',
y: 'top',
color: 'info',
icon: 'mdi-info',
timeout: 3000,
dismissable: true,
autoHeight: false,
multiLine: false,
vertical: false,
shorts: {
error: {
color: 'red'
},
sucs: {
color: 'green'
},
warn: {
color: 'orange'
}
},
property: '$toast'
})
|
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Run all the queries and save a log.
# First argument: Supply the folder which houses all the queries (recursive).
# Second argument: adds options to the VXQuery CLI.
#
# run_benchmark.sh ./noaa-ghcn-daily/benchmarks/local_speed_up/queries/
# run_benchmark.sh ./noaa-ghcn-daily/benchmarks/local_speed_up/queries/ "-client-net-ip-address 169.235.27.138"
# run_benchmark.sh ./noaa-ghcn-daily/benchmarks/local_speed_up/queries/ "" q03
#
CLUSTER="rita"
REPEAT=1
FRAME_SIZE=$((8*1024))
BUFFER_SIZE=$((32*1024*1024))
JOIN_HASH_SIZE=$((1024*1024*1024))
#JOIN_HASH_SIZE=-1
if [ -z "${1}" ]
then
echo "Please supply a directory for query files to be found."
exit
fi
if [ -z "${2}" ]
then
echo "Please the number of nodes (start at 0)."
exit
fi
# Run queries for the specified number of nodes.
echo "Starting ${2} cluster nodes"
python vxquery-server/src/main/resources/scripts/cluster_cli.py -c vxquery-server/src/main/resources/conf/${CLUSTER}/${2}nodes.xml -a start
# wait for cluster to finish setting up
sleep 5
export JAVA_OPTS="$JAVA_OPTS -server -Xmx7G -Djava.util.logging.config.file=./vxquery-benchmark/src/main/resources/noaa-ghcn-daily/scripts/testing_logging.properties"
for j in $(find ${1} -name '*q??.xq')
do
# Only work with i nodes.
if [[ "${j}" =~ "${2}nodes" ]]
then
# Only run for specified queries.
if [ -z "${4}" ] || [[ "${j}" =~ "${4}" ]]
then
date
echo "Running query: ${j}"
log_file="$(basename ${j}).$(date +%Y%m%d%H%M).log"
log_base_path=$(dirname ${j/queries/query_logs})
mkdir -p ${log_base_path}
time sh ./vxquery-cli/target/appassembler/bin/vxq ${j} ${3} -timing -showquery -showoet -showrp -frame-size ${FRAME_SIZE} -buffer-size ${BUFFER_SIZE} -join-hash-size ${JOIN_HASH_SIZE} -repeatexec ${REPEAT} -timing-ignore-queries 0 > ${log_base_path}/${log_file} 2>&1
echo "\nBuffer Size: ${BUFFER_SIZE}" >> ${log_base_path}/${log_file}
echo "\nFrame Size: ${FRAME_SIZE}" >> ${log_base_path}/${log_file}
echo "\nJoin Hash Size: ${JOIN_HASH_SIZE}" >> ${log_base_path}/${log_file}
fi;
fi;
done
# Stop cluster.
python vxquery-server/src/main/resources/scripts/cluster_cli.py -c vxquery-server/src/main/resources/conf/${CLUSTER}/${2}nodes.xml -a stop
sleep 5
python vxquery-server/src/main/resources/scripts/cluster_cli.py -c vxquery-server/src/main/resources/conf/${CLUSTER}/${2}nodes.xml -a kill
if which programname >/dev/null;
then
echo "Sending out e-mail notification."
SUBJECT="Benchmark Cluster Tests Finished"
EMAIL="ecarm002@ucr.edu"
/bin/mail -s "${SUBJECT}" "${EMAIL}" <<EOM
Completed all tests in folder ${1} for a ${2} node cluster using ${HOSTNAME}.
EOM
else
echo "No mail command to use."
fi;
|
import { Prop, Schema, SchemaFactory } from '@nestjs/mongoose';
import { Document } from 'mongoose';
import { User } from '../../users/schemas/user.schema';
import { Payment, PaymentSchema } from '../schemas/payment.schema';
import { LiftBooking, LiftBookingSchema } from '../../lift/schemas/liftBooking.schema';
import * as mongoose from 'mongoose';
export type HistoryDocument = History & Document;
@Schema()
export class History {
@Prop({
type: String,
required: true,
enum: ['credit','debit'],
})
type: string;
@Prop({ type: mongoose.Schema.Types.ObjectId, ref: 'Payment' })
payment_id: Payment;
@Prop({ type: mongoose.Schema.Types.ObjectId, ref: 'LiftBooking' })
booking_id: LiftBooking;
@Prop()
created_at: string;
}
export const HistorySchema = SchemaFactory.createForClass(History);
|
const express = require("express");
const app = express();
const data = [
{ id: 1, name: "Alice", age: 25 },
{ id: 2, name: "Bob", age: 30 },
{ id: 3, name: "Charlie", age: 28 }
];
app.get("/", function (request, response) {
response.json(data);
});
app.listen(3000, () => {
console.log("Server running on port 3000");
});
|
"""
We want... a shrubbery!
"""
print "We want... a shrubbery!"
|
import React, {
Component,
PropTypes,
} from 'react';
import ReactDOM from 'react-dom';
let _MediumEditor;
if (typeof document !== 'undefined') {
_MediumEditor = require('medium-editor');
}
export default class MediumEditor extends Component {
static propTypes = {
tag: PropTypes.string,
text: PropTypes.string,
options: PropTypes.any,
onChange: PropTypes.func,
flushEditorDOM: PropTypes.bool,
};
static defaultProps = {
tag: 'div',
text: '',
onChange: () => {},
};
componentDidMount = () => {
const dom = ReactDOM.findDOMNode(this);
this.medium = new _MediumEditor(dom, this.props.options);
this.medium.subscribe('editableInput', () => {
this.props.onChange(dom.innerHTML);
});
this.medium.setContent(this.props.text);
};
componentDidUpdate = () => {
this.medium.restoreSelection();
};
componentWillUnmount = () => {
this.medium.destroy();
};
render() {
const tag = this.props.tag;
const childProps = {
...this.props,
};
if (this.medium) {
this.medium.saveSelection();
if (this.props.flushEditorDOM) {
this.medium.setContent(this.props.text);
}
}
return React.createElement(tag, childProps);
}
}
|
#!/bin/bash
cp -r ~/nfra/sync-browser-mocks/src/* node_modules/sync-browser-mocks/src/ \
&& cp -r ~/nfra/post-robot/src/* node_modules/post-robot/src/ \
&& cp -r ~/nfra/zoid/src/* node_modules/zoid/src/ \
&& cp -r ~/nfra/beaver-logger/src/* node_modules/beaver-logger/src/ \
&& cp -r ~/nfra/cross-domain-utils/src/* node_modules/cross-domain-utils/src/ \
&& cp -r ~/nfra/belter/src/* node_modules/belter/src/ \
&& cp -r ~/nfra/zalgo-promise/src/* node_modules/zalgo-promise/src/ \
&& cp -r ~/paypal-braintree-web-client/src/* node_modules/paypal-braintree-web-client/src/ \
&& npm run webpack-base \
&& rsync -avz dist/* root@bluesuncorp.co.uk:/var/www/html/icv4/dist/ \
&& git checkout dist
|
class EnemyTrackingSystem {
constructor() {
this.enemyActors = new Map();
}
addEnemyActor(actor) {
this.enemyActors.set(actor.getID(), actor);
}
getEnemyActors() {
return Array.from(this.enemyActors.values());
}
hasEnemyActor(actorID) {
return this.enemyActors.has(actorID);
}
}
// Usage
const enemyTrackingSystem = new EnemyTrackingSystem();
// Adding enemy actors
const hunter1 = { getID: () => 'hunter1' };
const hunter2 = { getID: () => 'hunter2' };
enemyTrackingSystem.addEnemyActor(hunter1);
enemyTrackingSystem.addEnemyActor(hunter2);
// Retrieving enemy actors
const allEnemyActors = enemyTrackingSystem.getEnemyActors();
// Checking if a specific enemy actor is present
const isHunter2Present = enemyTrackingSystem.hasEnemyActor('hunter2');
console.log(isHunter2Present); // Output: true
|
import request from 'supertest';
import { before, after } from 'mocha';
import { auth } from './auth';
/**
* Navigates to the STAC catalog route for the given job ID
*
* @param app - The express application (typically this.frontend)
* @param jobId - The job ID
* @returns An awaitable object that resolves to the request response
*/
export function stacCatalog(
app: Express.Application,
jobId: string,
linkType?: string,
): request.Test {
const query = linkType ? { linkType } : {};
return request(app).get(`/stac/${jobId}`).query(query);
}
/**
* Navigates to the STAC item route for the given job ID and item index
*
* @param app - The express application (typically this.frontend)
* @param jobId - The job ID
* @param index - The index of the stac item in the stac catalog
* @returns An awaitable object that resolves to the request response
*/
export function stacItem(
app: Express.Application,
jobId: string,
index: number,
linkType?: string,
): request.Test {
const query = linkType ? { linkType } : {};
return request(app).get(`/stac/${jobId}/${index}`).query(query);
}
/**
* Adds before/after hooks to navigate to the STAC catalog route
*
* @param jobId - The job ID
* @param username - optional user to simulate logging in as
*/
export function hookStacCatalog(
jobId: string,
username?: string,
linkType?: string,
): void {
before(async function () {
if (username) {
this.res = await stacCatalog(this.frontend, jobId, linkType).use(auth({ username }));
} else {
this.res = await stacCatalog(this.frontend, jobId, linkType);
}
});
after(function () {
delete this.res;
});
}
/**
* Adds before/after hooks to navigate to the STAC item route
*
* @param jobId - The job ID
* @param index - The item index
* @param username - optional user to simulate logging in as
*/
export function hookStacItem(
jobId: string,
index: number,
username?: string,
linkType?: string,
): void {
before(async function () {
if (username) {
this.res = await stacItem(this.frontend, jobId, index, linkType).use(auth({ username }));
} else {
this.res = await stacItem(this.frontend, jobId, index, linkType);
}
});
after(function () {
delete this.res;
});
}
|
import styled from "styled-components"
export const CommentsWrapper = styled.section`
margin: auto;
max-width: 70rem;
padding: 3rem 6.4rem 3rem;
iframe[src*="ads-iframe"] {
display: none;
}
#disqus_thread {
a {
color: --highlight !important;
}
}
`
export const CommentsTitle = styled.h2`
color: var(--white);
font-size: 2.1rem;
font-weight: 700;
padding-bottom: 2rem;
`
|
func applyConfiguration(configuration: Configuration) -> Sublayout {
var modifiedSublayout: Sublayout = configuration.sublayout
for property in configuration.properties {
switch property {
case .backgroundColor(let color):
// Apply background color to the sublayout
// (Implementation details depend on the specific UI framework being used)
modifiedSublayout.applyBackgroundColor(color)
case .alignment(let alignment):
// Apply alignment to the sublayout
// (Implementation details depend on the specific UI framework being used)
modifiedSublayout.applyAlignment(alignment)
case .spacing(let value):
// Apply spacing to the sublayout
// (Implementation details depend on the specific UI framework being used)
modifiedSublayout.applySpacing(value)
}
}
return modifiedSublayout
}
|
#!/bin/bash
timestamp=$(date +"%Y-%m-%d %H:%M:%S")
echo $timestamp ": Start script." >> /tmp/blackvue.log
pid_file="/tmp/blackvue.pid"
/opt/bin/find $pid_file -type f -mtime +2 -exec rm {} \;
if [ -f $pid_file ]; then
echo $timestamp ": PID file exists." >> /tmp/blackvue.log
exit 0
fi
touch $pid_file
cd /share/MD0_DATA/Recordings/blackvue/
IPADDRESS="BLACKVUE_IPADRESS_HERE"
re="([0-9]+_[0-9]+_[E,M])"
re2="([0-9]+_[0-9]+_[P,N,M,E])"
# Sort function
Sort()
{
for item in $@;
do
echo $item
done |
sort
}
FILENAMES=()
# These variables are for downloading before event recordings.
file_previous_1=""
file_previous_2=""
timestamp=$(date +"%Y-%m-%d %H:%M:%S")
echo $timestamp ": Running Curl." >> /tmp/blackvue.log
for file in `curl --retry 5 --retry-delay 30 -s http://$IPADDRESS/blackvue_vod.cgi | sed 's/^n://' | sed 's/F.mp4//' | sed 's/R.mp4//' | sed 's/,s:1000000//' | sed $'s/\r//'`;
do
echo $timestamp ": Filename: "$file >> /tmp/blackvue.log
FILENAMES+=($file)
done
timestamp=$(date +"%Y-%m-%d %H:%M:%S")
echo $timestamp ": Sorting filenames." >> /tmp/blackvue.log
SORTEDFILENAMES=$(Sort ${FILENAMES[@]})
# echo $timestamp ": Sorted filenames: " SORTEDFILENAMES[@] >> /tmp/blackvue.log
timestamp=$(date +"%Y-%m-%d %H:%M:%S")
echo $timestamp ": Looping files for download." >> /tmp/blackvue.log
for dlfile in ${SORTEDFILENAMES[@]};
do
file_previous_2=$file_previous_1
file_previous_1=$dlfile
timestamp=$(date +"%Y-%m-%d %H:%M:%S")
echo $timestamp ": Checking: "$dlfile >> /tmp/blackvue.log
# echo $file_previous1
# echo $file_previous2
echo $timestamp ": Checking: "$dlfile >> /tmp/blackvue.log
if [[ $dlfile =~ $re ]]; then
echo $timestamp ": Downloading: "$dlfile >> /tmp/blackvue.log
wget -c http://$IPADDRESS$dlfile\F.mp4
wget -c http://$IPADDRESS$dlfile\R.mp4
wget -nc http://$IPADDRESS$dlfile\F.thm
wget -nc http://$IPADDRESS$dlfile\R.thm
wget -nc http://$IPADDRESS$dlfile.gps
wget -nc http://$IPADDRESS$dlfile.3gf
if [[ $file_previous_2 =~ $re2 ]]; then
echo $timestamp ": Downloading: "$file_previous_2 >> /tmp/blackvue.log
wget -c http://$IPADDRESS$file_previous_2\F.mp4
wget -c http://$IPADDRESS$file_previous_2\R.mp4
wget -nc http://$IPADDRESS$file_previous_2\F.thm
wget -nc http://$IPADDRESS$file_previous_2\R.thm
wget -nc http://$IPADDRESS$file_previous_2.gps
wget -nc http://$IPADDRESS$file_previous_2.3gf
fi
if [[ $file_previous_1 =~ $re2 ]]; then
echo $timestamp ": Downloading: "$file_previous_1 >> /tmp/blackvue.log
wget -c http://$IPADDRESS$file_previous_1\F.mp4
wget -c http://$IPADDRESS$file_previous_1\R.mp4
wget -nc http://$IPADDRESS$file_previous_1\F.thm
wget -nc http://$IPADDRESS$file_previous_1\R.thm
wget -nc http://$IPADDRESS$file_previous_1.gps
wget -nc http://$IPADDRESS$file_previous_1.3gf
fi
fi
done
/bin/rm -f $pid_file
timestamp=$(date +"%Y-%m-%d %H:%M:%S")
echo $timestamp ": End script." >> /tmp/blackvue.log
|
/**
* Update the teacher's information in the database.
*
* @param int $id The ID of the teacher to be updated.
* @param string $name The updated name of the teacher.
* @param string $email The updated email of the teacher.
* @return string A success message if the update is successful, or an error message if the input data is invalid or if the update operation fails.
*/
function updateTeacher($id, $name, $email) {
// Validate input data
if (empty($name) || empty($email) || !filter_var($email, FILTER_VALIDATE_EMAIL)) {
return "Invalid input data. Please provide a valid name and email.";
}
// Perform database update
// Replace the following code with your database update logic
$success = true; // Assume the update operation is successful
if ($success) {
// Return success message
return "Teacher information updated successfully.";
} else {
// Return error message
return "Failed to update teacher information. Please try again.";
}
}
|
def should_install(instance):
generated_sources = instance.get_generated_sources()
sources = instance.get_sources()
return bool(generated_sources or sources)
|
<filename>u-boot/drivers/video/sunxi/disp2/tv/de_tve_sun8iw11.h
#ifndef __DE_TVE_SUN8IW11_H__
#define __DE_TVE_SUN8IW11_H__
#define TVE_GET_REG_BASE(sel) (tve_reg_base[sel])
#define TVE_WUINT32(sel,offset,value) (*((volatile u32 *)( TVE_GET_REG_BASE(sel) + (offset) ))=(value))
#define TVE_RUINT32(sel,offset) (*((volatile u32 *)( TVE_GET_REG_BASE(sel) + (offset) )))
#define TVE_SET_BIT(sel,offset,bit) (*((volatile u32 *)( TVE_GET_REG_BASE(sel) + (offset) )) |= (bit))
#define TVE_CLR_BIT(sel,offset,bit) (*((volatile u32 *)( TVE_GET_REG_BASE(sel) + (offset) )) &= (~(bit)))
#define TVE_INIT_BIT(sel,offset,c,s) (*((volatile u32 *)( TVE_GET_REG_BASE(sel) + (offset) )) = \
(((*(volatile u32 *)( TVE_GET_REG_BASE(sel) + (offset) )) & (~(c))) | (s)))
#define TVE_TOP_GET_REG_BASE (tve_top_reg_base[0])
#define TVE_TOP_WUINT32(offset,value) (*((volatile u32 *)( TVE_TOP_GET_REG_BASE + (offset) ))=(value))
#define TVE_TOP_RUINT32(offset) (*((volatile u32 *)( TVE_TOP_GET_REG_BASE + (offset) )))
#define TVE_TOP_SET_BIT(offset,bit) (*((volatile u32 *)( TVE_TOP_GET_REG_BASE + (offset) )) |= (bit))
#define TVE_TOP_CLR_BIT(offset,bit) (*((volatile u32 *)( TVE_TOP_GET_REG_BASE + (offset) )) &= (~(bit)))
#define TVE_TOP_INIT_BIT(offset,c,s) (*((volatile u32 *)( TVE_TOP_GET_REG_BASE + (offset) )) = \
(((*(volatile u32 *)( TVE_TOP_GET_REG_BASE + (offset) )) & (~(c))) | (s)))
/*
enum tv_mode {
CVBS,
YPBPR,
VGA,
};
*/
enum disp_cvbs_mode{
TV_NTSC = 0,
TV_PAL = 1,
};
s32 tve_low_set_reg_base(u32 sel,void __iomem * address);
s32 tve_low_dac_init(u32 dac_no,u32 cali,s32 offset);
s32 tve_low_dac_map(u32 sel, u32 *dac_no, u32 num);
s32 tve_low_dac_enable(u32 sel);
s32 tve_low_dac_disable(u32 sel);
s32 tve_low_open(u32 sel);
s32 tve_low_close(u32 sel);
s32 tve_low_set_tv_mode(u32 sel, u32 is_cvbs);
s32 tve_low_set_ypbpr_mode(u32 sel, enum disp_tv_mode mode);
s32 tve_low_set_vga_mode(u32 sel);
s32 tve_low_set_cvbs_mode(u32 sel, u8 mode);
s32 tve_low_get_dac_status(u32 sel);
s32 tve_low_dac_autocheck_enable(u32 sel, u8 index);
s32 tve_low_dac_autocheck_disable(u32 sel,u8 index);
s32 tve_low_enhance(u32 sel, u32 mode);
#endif
|
var peer = null,
comp = null,
player,
ship={type:"Sub",
btn:null,
col:"gray"},
config={size:0,
vcalc:0,
gameon:true,
plyrone:false,
p2ready:false,
opp:"Player 2"
};
document.getElementById("plselect").onclick = function(evt) {
//get user's name
if (evt.target.className === "plbutton") {
var val = document.getElementById("nm").value;
var nm = val == "" ? "Anonymous" : val;
player = new makeFleet(nm);
document.getElementById("p1").innerHTML = player.name;
if (evt.target.id === "plsub") {
makeConn();
} else {
//playing vs. computer
document.getElementById("loading").style.display = "none";
config.plyrone = true;
showSizeSel();
}
}
}
function makeConn(){
//playing vs. player -set up the peer connection
peer = new Peer({
debug: 2,
serialization: "json"
});
peer.conn = null;
peer.on('open', function(id) {
// Show this peer's ID
document.getElementById("welcome").style.display = "none";
document.getElementById("login").style.display="block";
document.getElementById("welc").innerHTML="Welcome, " + player.name;
document.getElementById("myid").innerHTML=peer.id;
});
// Await connections from others
peer.on('connection', connect);
peer.on('error', function(err) {
if (err.type == 'disconnected') {
console.log("disconnected")
}
if (err.type == 'network') {
console.log("server. refresh")
}
if (err.type == 'browser-incompatible') {
killSwitch();
}
})
if (!util.supports.data){
killSwitch();
}
}
document.getElementById("gotid").onclick=function(){
// the other player sent us their ID
document.getElementById("login").style.display = "none";
var theval=document.getElementById("oppid").value;
if(theval!=""){
config.plyrone = true;
dirconnect(theval);
}
}
function killSwitch(){
document.getElementById("loader").style.display = "none";
alert("Your browser does not support the WebRTC protocol. \nWe suggest using Chrome or Firefox.");
}
function connect(c) {
// Handle a connection object.
peer.conn = c;
sendIt({
"type": "conn",
"name": player.name
});
if(!config.plyrone){
}
// Handle a chat connection.
if (c.label === 'chat') {
// Just to fix the connection receiver to successfully send a message to the connection requester
c.open = true;
c.on('data', function(data) {
getData(data);
});
c.on('close', function() {
alert("The connection has been closed.");
peer.conn=null;
});
}
}
function dirconnect(id) {
// Connect to a peer
var requestedPeer = id;
if (!peer.conn) {
var c = peer.connect(requestedPeer, {
label: 'chat',
serialization: 'json',
metadata: {
message: 'hi i want to chat with you!'
}
});
c.on('open', function() {
connect(c);
});
c.on('error', function(err) {
postChat("sys", err)
});
}
showSizeSel();
}
// Make sure things clean up properly.
window.onunload = window.onbeforeunload = function(e) {
if (!!peer && !peer.destroyed) {
peer.destroy();
}
};
function showSizeSel() {
// show the grid size selection dialog
document.getElementById("welcome").style.display = "none";
document.getElementById("sizesel").style.display = "block";
}
document.getElementById("runbtn").onclick = runIt;
document.getElementById("gsize").onkeypress = function(evt){
if (event.keyCode === 13) {
runIt();
}
}
function makeBoard() {
config.gameon = true;
document.getElementById("status").style.visibility = "visible";
document.getElementById("status").innerHTML="Please place your ships..."
document.getElementById("outer").style.display = "block";
document.getElementById("wrapper").style.display = "grid";
document.getElementById("bombs").style.display = "none";
document.getElementById("shipyd").style.display = "block";
document.getElementById("blanket").style.display = "none";
for (a in player.fleet) {
// make buttons for placing ships
var btn = document.createElement("input");
btn.type = "button";
btn.value = "Place " + a;
btn.className = "placer";
document.getElementById("shipbtns").appendChild(btn);
document.getElementById("shipbtns").appendChild(document.createElement("hr"))
}
document.getElementById("ships").innerHTML="";
document.getElementById("bombs").innerHTML="";
setUp("ships");
setUp("bombs");
}
function makeFleet(nm) {
// object factory for player & comp fleets
this.fleet = {
Carrier: {
hits: 0,
slots: 5,
coords: []
},
Battleship: {
hits: 0,
slots: 4,
coords: []
},
Cruiser: {
hits: 0,
slots: 3,
coords: []
},
Sub: {
hits: 0,
slots: 3,
coords: []
},
Destroyer: {
hits: 0,
slots: 2,
coords: []
}
},
this.shotstaken = [];
this.placed=0;
this.toplace=Object.keys(this.fleet).length;
this.sunk = 0;
this.name = nm;
}
function runIt() {
var min=10, max=50;
config.size = Number(document.getElementById("gsize").value);
if (config.size > max) {
config.size = max;
}
if (config.size < min) {
config.size = min;
}
config.vcalc = 30 / config.size; // helper calc. for getting widths right
makeBoard();
if (!peer) {
//make computer player object
comp = new makeFleet("Computer");
config.opp = comp.name;
document.getElementById("p2").innerHTML = config.opp;
comp.hits = [];
comp.dirs = ["e", "n", "w", "s"];
comp.dir = "e";
compShips();
} else {
// send config.size to player 2 for their setup
peer.conn.send({
type: "gsize",
gs: config.size
})
}
}
function compShips() {
// make computer fleet
for (a in comp.fleet) {
var boat = a;
var size = comp.fleet[boat].slots;
var tmp = trySpot(size, comp.fleet);
while (tmp.length == 0) {
tmp = trySpot(size, comp.fleet);
}
comp.fleet[boat].coords = tmp;
}
}
function trySpot(size, fleet) {
//randomly generate coordinates to try to place ships
var ori = ["v", "h"][Math.round(Math.random())],
poss = randCoord(),
col = poss[0],
num = Number(poss.slice(1));
return canPlace(col, num, size, ori, fleet);
}
function randCoord() {
// get a random coordinate from the available grid size
var clet = String.fromCharCode(Math.floor(Math.random() * config.size) + 65),
cnum = Math.floor(Math.random() * config.size) + 1;
return clet + cnum;
}
function setUp(dv) {
document.getElementById(dv).style.gridTemplateColumns = "repeat(" + (config.size + 1) + "," + config.vcalc + "vw)";
document.getElementById(dv).style.gridTemplateRows = "repeat(" + (config.size + 1) + "," + config.vcalc + "vw)";
var grid = Math.pow((config.size + 1), 2); //caclulate number of divs required to make grid
for (var i = 0; i < grid; i++) {
var mydiv = document.createElement("div");
if (i == 0) {
//top left corner - no content
mydiv.className = "coord";
}
if (i > 0 && i < config.size + 1) {
// top row - letters
mydiv.className = "coord";
mydiv.innerHTML = String.fromCharCode(i + 64);
}
if (i % (config.size + 1) == 0 && i > 0) {
//left column - numbers
mydiv.className = "coord";
mydiv.innerHTML = i / (config.size + 1);
}
if (i > config.size && i % (config.size + 1) != 0) {
// playing field
var num = Math.floor(i / (config.size + 1));
mydiv.className = "grid-item";
if (dv === "bombs") {
mydiv.style.cursor = "crosshair";
}
mydiv.id = dv + String.fromCharCode((i % (config.size + 1)) + 64) + num; //id's correspond to grid references
}
document.getElementById(dv).appendChild(mydiv)
}
}
var rads = document.getElementsByName("orient"); //radio buttons to change between horizontal and vertical ship placement
for (var x = 0; x < rads.length; x++) {
rads[x].onchange = checkOr;
}
document.getElementById("shipyd").onclick = function(evt) {
//"place ship" button has been clicked - show helper div and move it with mouse movement
if (evt.target.className === "placer") {
document.getElementById("ships").style.pointerEvents="auto";
ship.btn = evt.target;
var offset = (config.vcalc / 2) * (document.documentElement.clientWidth / 100);
ship.type = ship.btn.value.replace("Place ", "");
var hlpr = document.getElementById("ship");
hlpr.style.display = "block";
hlpr.style.left = evt.pageX - offset + "px";
hlpr.style.top = evt.pageY - offset + "px";
checkOr();
document.onmousemove = function(evt) {
hlpr.style.left = evt.pageX - offset + "px";
hlpr.style.top = evt.pageY - offset + "px";
}
}
}
function checkOr() {
// to switch helper div orientation between vertical and horizontal
var hlpr = document.getElementById("ship"),
x = config.vcalc + "vw",
y = (player.fleet[ship.type].slots * config.vcalc) * .98 + "vw";
if (document.getElementById("hori").checked) {
hlpr.style.height = x;
hlpr.style.width = y;
} else {
hlpr.style.height = y;
hlpr.style.width = x;
}
}
document.getElementById("ships").onclick = function(evt) {
// clicking on "ships" div to place ship
if (evt.target.className === "grid-item" && player.placed < player.toplace) {
var dtarg = evt.target;
var hlpr = document.getElementById("ship");
var col = dtarg.id[5]; // get coords of click location
var row = Number(dtarg.id.substr(6))
var size = player.fleet[ship.type].slots;
var ori = document.getElementById("hori").checked ? "h" : "v";
var spots = canPlace(col, row, size, ori, player.fleet); // check that ship can be placed there - returns 0 length if no
if (spots.length == 0) {
hlpr.style.backgroundColor = "red";
setTimeout(function() {
hlpr.style.backgroundColor = "gray";
}, 500)
} else {
for (var i = 0; i < size; i++) {
document.getElementById("ships" + spots[i]).style.backgroundColor = ship.col;
}
hlpr.style.display = "none"; //hide helper div
player.placed++;
ship.btn.disabled = true; //disable button corresponding to this ship
document.getElementById("ships").style.pointerEvents="none"; // disable clicks until new ship selected
player.fleet[ship.type].coords = spots; // store ship coordinates
document.onmousemove = function() {} // remove onmousemove listener
if (player.toplace == player.placed) {
// done placing ships, show right side game board
document.getElementById("bombs").style.display = "grid";
document.getElementById("shipyd").style.display = "none";
document.getElementById("bombs").style.pointerEvents = "none";
if (comp) {
document.getElementById("cheatwrap").style.visibility = "visible"; // show "cheat mode" checkbox
}
if (config.plyrone) {
if (config.p2ready || comp) {
// player 2 has already placed ships, or playing against computer, so ready to play
goPlayerOne();
} else {
document.getElementById("status").innerHTML = "Waiting for " + config.opp + " to place ships";
}
} else {
// player 2, so send msg to player 1 that ships have been placed
sendIt({
type: "p2ready",
deets: true
})
document.getElementById("status").innerHTML = config.opp + " to shoot. Please wait...";
document.getElementById("p2").className = "plyr on";
}
document.getElementById("players").style.visibility = "visible";
}
}
}
}
document.getElementById("cheat").onchange = function() {
// show comp fleet
var ischk = this.checked;
for (a in comp.fleet) {
var arr = comp.fleet[a].coords,
len = arr.length,
i = 0;
for (i; i < len; i++) {
document.getElementById("bombs" + arr[i]).style.border = ischk ? "dotted" : "1px solid";
}
}
}
function canPlace(col, row, size, ori, fleet) {
// can a ship of this size be placed on that spot?
var spots = [],
valid = true,
colnum = col.charCodeAt() - 65;
if (ori == "h") {
if (colnum + size > config.size) {
//too close to the side
valid = false;
}
if (valid) {
for (var i = 0; i < size; i++) {
var grid = String.fromCharCode(colnum + i + 65) + row;
spots.push(grid)
if (isTaken(fleet, grid).isHit) {
// overlaps with already-placed ships
valid = false;
}
}
}
} else {
if (row + size > config.size + 1) {
//too close to the bottom
valid = false;
}
if (valid) {
for (var i = 0; i < size; i++) {
var grid = col + (row + i);
spots.push(grid)
if (isTaken(fleet, grid).isHit) {
// overlaps with already-placed ships
valid = false;
}
}
}
}
if (valid) {
return spots;
} else {
return [];
}
}
function isTaken(flt, theid) {
// to check if a boat occupies a particular coordinate - for ship placement and can also be for hit detection
var boat = "",
hit = false;
for (a in flt) {
if (flt[a].coords.indexOf(theid) != -1) {
hit = true;
boat = a;
break;
}
}
return {
isHit: hit,
bt: boat
}
}
document.getElementById("bombs").onclick = function(evt) {
if (evt.target.style.cursor === "crosshair" && config.gameon) {
evt.target.style.cursor = "no-drop";
var theid = evt.target.id.replace("bombs", "");
togglePlayer(false);
if (comp) {
// playing vs computer - check if hit, then computer shoots after waiting a bit
getHit(theid, "pl1", comp);
var wait = Math.random() * 2000;
setTimeout(compShot, wait);
} else {
// drop bomb - get coordinates of click and send them
sendIt({
type: "shot",
coords: theid
})
}
}
}
function compShot() {
var aimshot=true; //not a random guess
if (config.gameon) {
if (comp.hits.length == 0) {
var poss = randCoord();
aimshot=false;
} else {
if (!comp.dir) {
changeDir();
}
if (comp.poss[comp.dir].length == 0) {
// dead end - change direction
changeDir();
compShot();
return;
}
var poss = comp.poss[comp.dir].shift();
}
if (comp.shotstaken.indexOf(poss) == -1) {
//hasn't been tried before
comp.shotstaken.push(poss);
var shot = getHit(poss, "comp", player);
if (shot.res) {
if (comp.hits.length == 0) {
// unique hit - store as pivot
calcPoss(poss)
}
comp.hits.push(poss); // store successful hits
} else {
if(aimshot){
comp.poss[comp.dir].length = 0;
}
}
if (shot.snk) {
for (var i = 0; i < shot.slts.length; i++) {
if (comp.hits.includes(shot.slts[i])) {
// remove sunk ship's coords from hits list
comp.hits.splice(comp.hits.indexOf(shot.slts[i]), 1)
}
}
if (comp.hits.length > 0) {
// if more pivots available, calculate the likely hits for first one
calcPoss(comp.hits[0])
}
comp.dir = null;
togglePlayer(true);
return;
}
togglePlayer(true);
} else {
if(aimshot){
// spot has already been tried - dead end
comp.poss[comp.dir].length = 0;
}
compShot();
}
}
}
function changeDir() {
if (!comp.dir) {
// random direction
comp.dir = comp.dirs[Math.floor(Math.random() * comp.dirs.length)];
} else {
// pivot 180 degrees
comp.dir = comp.dirs[(comp.dirs.indexOf(comp.dir) + 2) % 4];
if (comp.poss[comp.dir].length == 0){
// pivot 90 degrees
comp.dir = comp.dirs[(comp.dirs.indexOf(comp.dir) + 1) % 4];
}
}
}
function calcPoss(anc) {
var we = [],
ns = [],
ltr = anc[0],
num = Number(anc.substring(1));
for (var i = 0; i < config.size; i++) {
ns.push(ltr + (i + 1)) // get all coordinates in the column of the pivot point
we.push(String.fromCharCode(65 + i) + num); // get all coordinates in the row of the pivot point
}
comp.poss = {
/* store coordinates in ascending order from hit point, so for H5:
E5
F5
G5
H8,H7,H6 H4,H3,H2
I5
J5
K5
*/
n: ns.splice(0, ns.indexOf(anc)).reverse(),
s: ns.splice(ns.indexOf(anc) + 1),
w: we.splice(0, we.indexOf(anc)).reverse(),
e: we.splice(we.indexOf(anc) + 1)
}
return comp.poss;
}
function sendIt(data) {
peer.conn.send(data);
}
function postChat(plr, msg) {
// for posting directly to own chat
var thediv = document.createElement("div"),
me = player.name,
cls = {
sys: "red"
}
cls[me]="black";
cls[config.opp]="blue";
thediv.style.color = cls[plr];
thediv.appendChild(document.createTextNode(plr + ": " + msg));
document.getElementById("cbox").appendChild(thediv);
}
function enterChat() {
// for sending chat to other player
var txt = document.getElementById("msgs").value,
theobj = {
sndr: player.name,
type: "chat",
msg: txt
}
postChat(player.name, txt);
if (peer.conn) {
sendIt(theobj);
}
document.getElementById("msgs").value = "";
}
document.getElementById("msgbtn").onclick = enterChat;
document.getElementById("msgs").onkeypress = function(evt){
if (event.keyCode === 13) {
enterChat();
}
}
function getHit(theid, who, targ) {
// receive a hit, iterate over fleet coordinates
var thediv = document.getElementById("ships" + theid),
flt = targ.fleet,
res = isTaken(flt, theid),
hit = res.isHit,
sunk = false,
slots = [];
if (hit) {
flt[res.bt].hits++;
if (flt[res.bt].hits === flt[res.bt].slots) {
// ship has been sunk
var txt = targ.name + "'s " + res.bt + " has been sunk!";
sunk = true;
slots = flt[res.bt].coords;
postChat("sys", txt);
targ.sunk++;
if (targ.sunk == player.toplace) {
// all ships have been sunk
var txt = targ.name + " loses!"
if (peer) {
sendIt({ // send notification to other player
type: "chat",
sndr: "sys",
msg: txt
})
sendIt({
type: "end",
lost: true
})
}
postChat("sys", txt) // post direct to chat if playing vs comp
endGame(targ.name == player.name); // did the player lose or was it the comp?
config.gameon = false;
}
}
}
if (who != "pl1") {
// other player fired
placeDot("ships" + theid, hit);
} else {
// this player fired
placeDot("bombs" + theid, hit);
}
if (peer) {
sendIt({ // tell other player if hit or miss
type: "res",
coords: "bombs" + theid,
hit: hit
})
}
if (who == 'comp') {
/* hack for letting comp calculate where the sunk boat was
we could make this more 'human' by getting the first and last comp.hits,
calculating orientation based on if the letters in the
coords are the same and counting back from the last hit by
the amount of slots in the ship, but really...
*/
return {
id: theid,
res: hit,
snk: sunk,
slts: slots
}
}
}
function endGame(ilose) {
// show end dialog
document.getElementById("connmess").style.visibility = "hidden";
document.getElementById("status").style.visibility = "hidden";
document.getElementById("loading").style.display = "none";
document.getElementById("sizesel").style.display = "none";
document.getElementById("blanket").style.display = "block";
document.getElementById("winlose").style.display = "block";
document.getElementById("cbox").innerHTML="";
document.getElementById("shipbtns").innerHTML="";
document.getElementById("wrapper").style.display = "none";
document.getElementById("players").style.visibility = "hidden";
document.getElementById("cheatwrap").style.visibility = "hidden";
document.getElementById("cheat").checked = false;
document.getElementById("endmess").innerHTML = ilose ? "<p>" + config.opp + " wins. </p><p>Please don't be sad. It's just a game</p>" : "<p>" + player.name + " wins! Congratulations. </p><p>Please don't gloat. Be like Mike</p>";
var me = player.name;
player = new makeFleet(me);
}
document.getElementById("endbtns").onclick = function(evt) {
if (evt.target.id === "norepl") {
if(peer){
peer.conn.close();
}
location.href="https://github.com/lvidgen/battleships";
} else {
document.getElementById("winlose").style.display = "none";
document.getElementById("replay").style.display = "block";
document.getElementById("rewelc").innerHTML="Welcome back, " + player.name +"!";
}
}
document.getElementById("replselect").onclick = function(evt) {
config.p2ready=false;
document.getElementById("replay").style.display = "none";
if (evt.target.id === "replsub") {
if (comp){
makeConn();
comp=null;
} else {
if(config.plyrone){
waitConfig();
} else {
showSizeSel();
}
config.plyrone = !config.plyrone;
}
} else {
// playing vs comp.
// comp = new makeFleet("Computer");
if(!comp){
peer.conn.close();
peer=null;
}
showSizeSel();
}
}
function placeDot(idstr, hit) {
// places dot coloured depending on hit or miss
var mydiv = document.createElement("div");
mydiv.className = "dot";
mydiv.style.backgroundColor = hit ? "red" : "white";
document.getElementById(idstr).appendChild(mydiv);
}
function waitConfig(){
document.getElementById("plone").innerHTML = "Receiving game configuration from "+config.opp+"...";
document.getElementById("loading").style.display = "block";
}
function goPlayerOne() {
// setup for player 1
if (player.toplace == player.placed) {
togglePlayer(true);
}
}
function togglePlayer(isme) {
// toggle player indicator and text
var statid = document.getElementById("status"),
bdiv = document.getElementById("bombs"),
onstr = "plyr on",
offstr = "plyr";
bdiv.style.pointerEvents = isme ? "auto" : "none";
document.getElementById("p1").className = isme ? onstr : offstr;
document.getElementById("p2").className = isme ? offstr : onstr;
statid.innerHTML = isme ? player.name + " to shoot. Fire away!" : config.opp + " to shoot. Please wait...";
}
function getData(data) {
// data received...
switch (data.type) {
case "conn":
// connection established, exchange names
config.opp = data.name;
document.getElementById("connmess").innerHTML = config.opp + " has connected";
document.getElementById("connmess").style.visibility = "visible";
document.getElementById("p2").innerHTML = config.opp;
if (!config.plyrone) {
document.getElementById("login").style.display = "none";
peer.conn.send({
type: "conn",
name: player.name
});
waitConfig();
}
break;
case "gsize":
// player 1 has sent grid size info to player 2
config.size = data.gs;
config.vcalc = 30 / config.size;
makeBoard();
break;
case "chat": // chat
postChat(data.sndr, data.msg)
break;
case "p2ready":
// player 2 has finished placing ships
config.p2ready = true;
goPlayerOne();
break;
case "shot":
// other player has sent shot
getHit(data.coords, "pl2", player);
togglePlayer(true);
break;
case "res":
// other player has sent results of this player's shot
placeDot(data.coords, data.hit);
break;
case "end":
// player 2 has lost
endGame(false);
config.gameon = false;
break;
}
}
|
/******************************************************************************
* Copyright 2011 Kitware Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#include "mdsSQLiteDatabase.h"
#include <stdio.h>
#include <string.h>
#include <sstream>
#include <iostream>
namespace mds
{
/** Constructor */
SQLiteDatabase::SQLiteDatabase()
{
m_Database = 0;
m_Statement = 0;
m_Query = "";
m_ErrorMessage = "";
m_InitialFetch = false;
m_Active = false;
m_InitialFetchResult = SQLITE_DONE;
m_Mutex = new QMutex();
}
/** Destructor */
SQLiteDatabase::~SQLiteDatabase()
{
delete m_Mutex;
}
/** Open a database connection */
bool SQLiteDatabase::Open(const char* dbname)
{
this->m_Mutex->lock();
int result = sqlite3_open(dbname, &m_Database);
if( result )
{
m_ErrorMessage = sqlite3_errmsg(m_Database);
sqlite3_close(m_Database);
return false;
}
return true;
}
/** Close the current database */
bool SQLiteDatabase::Close()
{
// Finalize any statements;
sqlite3_finalize(m_Statement);
m_Statement = NULL;
int result = sqlite3_close(m_Database);
if( result )
{
m_ErrorMessage = sqlite3_errmsg(m_Database);
this->m_Mutex->unlock();
return false;
}
this->m_Mutex->unlock();
return true;
}
/** Set the query */
bool SQLiteDatabase::SetQuery(const char* query)
{
if( m_Statement )
{
int finalizeStatus = sqlite3_finalize(m_Statement);
m_Statement = NULL;
if( finalizeStatus != SQLITE_OK )
{
m_ErrorMessage = "SetQuery(): Finalize returned unexpected code ";
return false;
}
}
const char *unused_statement;
int prepareStatus = sqlite3_prepare_v2(m_Database,
query,
static_cast<int>(strlen(query) ),
&m_Statement,
&unused_statement);
if( prepareStatus != SQLITE_OK )
{
m_ErrorMessage = "Cannot prepare transaction";
return false;
}
m_ErrorMessage = "";
return true;
}
/** Execute */
bool SQLiteDatabase::Execute()
{
if( m_Statement == NULL )
{
m_ErrorMessage =
"Execute(): Query is not null but prepared statement is. There may have been an error during SetQuery().";
return false;
}
else
{
sqlite3_reset(m_Statement);
}
m_InitialFetch = true;
m_Active = true;
int result = sqlite3_step(m_Statement);
m_InitialFetchResult = result;
if( result == SQLITE_DONE )
{
m_ErrorMessage = "";
}
else if( result != SQLITE_ROW )
{
m_Active = false;
m_ErrorMessage = sqlite3_errmsg(m_Database);
return false;
}
m_ErrorMessage = "";
return true;
}
/** Run Set query and execute in one command */
bool SQLiteDatabase::ExecuteQuery(const char* query)
{
bool success = this->SetQuery(query);
if( success )
{
success = this->Execute();
}
return success;
}
/** Get the autoincremented id of the last insert */
int SQLiteDatabase::GetLastInsertId()
{
this->ExecuteQuery("SELECT last_insert_rowid()");
return this->GetNextRow() ? this->GetValueAsInt(0) : -1;
}
/** Get number of columns */
const char * SQLiteDatabase::GetFieldName(unsigned int column)
{
if( !m_Active )
{
return NULL;
}
if( column >= this->GetNumberOfFields() )
{
m_ErrorMessage = "GetFieldName(): Illegal field index ";
return NULL;
}
return sqlite3_column_name(m_Statement, column);
}
/** Get number of columns */
unsigned int SQLiteDatabase::GetNumberOfFields()
{
if( !m_Active )
{
return 0;
}
if( m_Statement )
{
return sqlite3_column_count(m_Statement);
}
return 0;
}
/** Fetch the next row */
bool SQLiteDatabase::GetNextRow()
{
if( !m_Active )
{
m_ErrorMessage = "Query is not active";
return false;
}
if( m_InitialFetch )
{
m_InitialFetch = false;
if( m_InitialFetchResult == SQLITE_DONE )
{
return false;
}
else
{
return true;
}
}
else
{
int result = sqlite3_step(m_Statement);
if( result == SQLITE_DONE )
{
return false;
}
else if( result == SQLITE_ROW )
{
return true;
}
else
{
m_ErrorMessage = sqlite3_errmsg(m_Database);
m_Active = false;
return false;
}
}
return true;
}
/** Get the column value */
int SQLiteDatabase::GetValueAsInt(unsigned int column)
{
if( m_Active == false )
{
m_ErrorMessage = "Query is not active";
return 0;
}
else if( column >= this->GetNumberOfFields() )
{
m_ErrorMessage = "DataValue() called with out-of-range column index ";
return 0;
}
else
{
switch( sqlite3_column_type(m_Statement, column) )
{
case SQLITE_INTEGER:
return sqlite3_column_int(m_Statement, column);
}
}
m_ErrorMessage = "Wrong column type";
return 0;
}
/** Get the column value */
sqlite_int64 SQLiteDatabase::GetValueAsInt64(unsigned int column)
{
if( m_Active == false )
{
m_ErrorMessage = "Query is not active";
return 0;
}
else if( column >= this->GetNumberOfFields() )
{
m_ErrorMessage = "DataValue() called with out-of-range column index ";
return 0;
}
else
{
switch( sqlite3_column_type(m_Statement, column) )
{
case SQLITE_INTEGER:
return sqlite3_column_int64(m_Statement, column);
}
}
m_ErrorMessage = "Wrong column type";
return 0;
}
/** Get the column value */
float SQLiteDatabase::GetValueAsFloat(unsigned int column)
{
if( m_Active == false )
{
m_ErrorMessage = "Query is not active";
return 0;
}
else if( column >= this->GetNumberOfFields() )
{
m_ErrorMessage = "DataValue() called with out-of-range column index ";
return 0;
}
else
{
switch( sqlite3_column_type(m_Statement, column) )
{
case SQLITE_FLOAT:
return static_cast<float>(sqlite3_column_double(m_Statement, column) );
}
}
m_ErrorMessage = "Wrong column type";
return 0;
}
const char * SQLiteDatabase::GetValueAsString(unsigned int column)
{
if( m_Active == false )
{
m_ErrorMessage = "Query is not active";
return NULL;
}
else if( column >= this->GetNumberOfFields() )
{
m_ErrorMessage = "DataValue() called with out-of-range column index ";
return NULL;
}
else
{
switch( sqlite3_column_type(m_Statement, column) )
{
case SQLITE_TEXT:
return reinterpret_cast<const char *>(sqlite3_column_text(m_Statement, column) );
case SQLITE_NULL:
return "";
}
}
m_ErrorMessage = "Wrong column type";
return NULL;
}
/** Get the error message */
const char * SQLiteDatabase::GetErrorMessage()
{
return m_ErrorMessage.c_str();
}
} // end namespace
|
# -----------------------------------------------------------------------------
# This file is part of the xPack distribution.
# (https://xpack.github.io)
# Copyright (c) 2020 Liviu Ionescu.
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose is hereby granted, under the terms of the MIT license.
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Common functions used in various tests.
#
# Requires
# - app_folder_path
# - test_folder_path
# - archive_platform (win32|linux|darwin)
# -----------------------------------------------------------------------------
function run_tests()
{
GCC_VERSION="$(echo "${RELEASE_VERSION}" | sed -e 's|-.*||')"
GCC_VERSION_MAJOR=$(echo ${GCC_VERSION} | sed -e 's|\([0-9][0-9]*\)\..*|\1|')
# Call the functions defined in the build code.
if [ "${TARGET_PLATFORM}" != "darwin" ]
then
test_binutils
fi
test_gcc
if [ "${TARGET_PLATFORM}" != "darwin" ]
then
test_gdb
fi
}
# -----------------------------------------------------------------------------
function update_image()
{
local image_name="$1"
# Make sure that the minimum prerequisites are met.
if [[ ${image_name} == *ubuntu* ]] || [[ ${image_name} == *debian* ]] || [[ ${image_name} == *raspbian* ]]
then
run_verbose apt-get -qq update
run_verbose apt-get -qq install -y git-core curl tar gzip lsb-release binutils
run_verbose apt-get -qq install -y libc6-dev libstdc++6 # TODO: get rid of them
elif [[ ${image_name} == *centos* ]] || [[ ${image_name} == *redhat* ]] || [[ ${image_name} == *fedora* ]]
then
run_verbose yum install -y -q git curl tar gzip redhat-lsb-core binutils
run_verbose yum install -y -q glibc-devel libstdc++-devel # TODO: get rid of them
elif [[ ${image_name} == *suse* ]]
then
run_verbose zypper -q in -y git-core curl tar gzip lsb-release binutils findutils util-linux
run_verbose zypper -q in -y glibc-devel libstdc++6 # TODO: get rid of them
elif [[ ${image_name} == *manjaro* ]]
then
# run_verbose pacman-mirrors -g
run_verbose pacman -S -y -q --noconfirm
# Update even if up to date (-yy) & upgrade (-u).
# pacman -S -yy -u -q --noconfirm
run_verbose pacman -S -q --noconfirm --noprogressbar git curl tar gzip lsb-release binutils
run_verbose pacman -S -q --noconfirm --noprogressbar gcc-libs # TODO: get rid of them
elif [[ ${image_name} == *archlinux* ]]
then
run_verbose pacman -S -y -q --noconfirm
# Update even if up to date (-yy) & upgrade (-u).
# pacman -S -yy -u -q --noconfirm
run_verbose pacman -S -q --noconfirm --noprogressbar git curl tar gzip lsb-release binutils
run_verbose pacman -S -q --noconfirm --noprogressbar gcc-libs
fi
echo
echo "The system C/C++ libraries..."
find /usr/lib* /lib -name 'libc.*' -o -name 'libstdc++.*' -o -name 'libgcc_s.*'
}
# -----------------------------------------------------------------------------
|
import requests
# Leitura de um campo do canal(temperatura e umidade) em JSON - Requisição de visualização de dados
r = requests.get('https://api.thingspeak.com/channels/1226973/fields/1.json?api_key=RTLY3MGRNANNANTQ&results=2')
# Atualização - Requisição de envio de dados
# r = requests.get('https://api.thingspeak.com/update?api_key=X7AASPFZ94RV1CK1&field2=81')
print('Código de status: ', r.status_code)
if (r.status_code == 200):
print(r.text)
|
const errorHandling = require('../../utils/errorHandling')
const { requiredFields } = require('../../config/errors')
const formatNumberResult = require('../secondary/formatNumberResult')
/**
* @description Calcule les counter premiers nombres de la suite de fibonacci.
* @param {number} counter
*/
function fibonacci (counter, result = [], a = 0, b = 1) {
if (counter === 0) {
return result
}
counter--
result.push(a)
return fibonacci(counter, result, b, a + b)
}
/* OUTPUTS */
module.exports = ({ res, next }, argsObject) => {
let { counter } = argsObject
// S'il n'y a pas les champs obligatoire
if (!counter) {
return errorHandling(next, requiredFields)
}
// Si ce n'est pas un nombre
counter = parseInt(counter)
if (isNaN(counter)) {
return errorHandling(next, {
message: 'Veuillez rentré un nombre valide.',
statusCode: 400
})
}
// Si le nombre dépasse LIMIT_COUNTER
const LIMIT_COUNTER = 51
if (counter >= LIMIT_COUNTER) {
return errorHandling(next, {
message: `Par souci de performance, vous ne pouvez pas exécuter cette fonction avec un compteur dépassant ${LIMIT_COUNTER -
1}.`,
statusCode: 400
})
}
const result = fibonacci(counter)
const resultFormatted = result.map(number => formatNumberResult(number))
return res.status(200).json({
result,
resultFormatted,
resultHTML: `<p>Les ${counter} premiers nombres de la suite de fibonacci :<br/> ${resultFormatted.join(
', '
)}</p>`
})
}
|
<reponame>ritaswc/wechat_app_template
// pages/shenghuo/GoodLife/LifeSearchList/LifeSearchList.js
const config = require('../../../../config')
var util = require('../../../../utils/util.js')
var longt = ""
var lati = ""
var CategoryId = ""
var CtiyName = ""
var pageNo = 0;
Page({
/**
* 页面的初始数据
*/
data: {
LifeSearchList: []
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
//var url = 'LifeSearchList/LifeSearchList?CategoryId=' + CategoryId + '&longt=' + longt + '&lati=' + lati + '&localCtiyName=' + this.data.localCtiyName;
CategoryId = options.CategoryId;
lati = options.lati;
longt = options.longt;
CtiyName = options.localCtiyName;
this.loadNewData()
},
/** 下拉刷新 */
loadNewData: function (e) {
pageNo = 1;
this.requestLifeSearchList();
},
/** 上拉加载 */
loadNewData_NextPage: function (e) {
pageNo += 1;
this.requestLifeSearchList();
},
requestLifeSearchList: function (e) {
var that = this;
let url = config.LifeSearchListUrl
var that = this
wx.showLoading({ title: '加载中...', })
var distanceStr = null
if (that.data.quanchengSelectedName === "1千米") {
distanceStr = "1000"
} else if (that.data.quanchengSelectedName === "3千米") {
distanceStr = "3000"
}
else if (that.data.quanchengSelectedName === "5千米") {
distanceStr = "5000"
} else if (that.data.quanchengSelectedName === "10千米") {
distanceStr = "10000"
} else if (that.data.quanchengSelectedName === "全城") {
distanceStr = null
}
var para = {
"pageSize": 20,
"pageNum": pageNo,
"sortType": ("离我最近" === that.data.priceSelectedName ? 0 : 1), //排序方式 0- 距离排序 1-面额最高排序
"position": {
"distance": distanceStr,
"latitude": lati,
"longitude": longt
},
"fcId": CategoryId,
"city": CtiyName,
"country": null
}
wx.showLoading({ title: '加载中...' })
util.RequestManager(url, para, function (res, fail) {
wx.hideLoading()
var tempAr = [];
for (var i = 0; i < res.data.dataList.length; i++) {
var model = res.data.dataList[i];
model["starAr"] = util.convertToStarsArray(model["star"])
model["distance"] = util.convertToDistance(model["distance"])
tempAr.push(model)
}
if (pageNo == 1) {
//下拉刷新
that.setData({ LifeSearchList: tempAr })
} else {
//上拉加载
that.setData({ LifeSearchList: that.data.LifeSearchList.concat(tempAr) })
}
})
},
/**
* 生命周期函数--监听页面初次渲染完成
*/
onReady: function () {
},
/**
* 生命周期函数--监听页面显示
*/
onShow: function () {
},
/**
* 生命周期函数--监听页面隐藏
*/
onHide: function () {
},
/**
* 生命周期函数--监听页面卸载
*/
onUnload: function () {
},
/**
* 页面相关事件处理函数--监听用户下拉动作
*/
onPullDownRefresh: function () {
},
/**
* 页面上拉触底事件的处理函数
*/
onReachBottom: function () {
this.loadNewData_NextPage();
},
/**
* 用户点击右上角分享
*/
onShareAppMessage: function () {
}
})
|
public class Calculator {
public Calculator() { }
public int add(int a, int b) {
return a + b;
}
public int sub(int a, int b) {
return a - b;
}
public int mul(int a, int b) {
return a * b;
}
public int div(int a, int b) {
return a / b;
}
}
|
import random
import string
def random_password(length):
characters = string.ascii_letters + string.digits + '!@#$%^&*()'
password = ''.join(random.choice(characters) for i in range(length))
return password
if __name__ == '__main__':
print(random_password(8))
|
require_relative '../classes/person'
describe Person do
context 'check person class' do
age = 20
name = 'John'
parent_permission = false
person = Person.new(age, name, parent_permission)
it 'should be the class person' do
expect(person.class).to eq Person
end
it 'should have an age' do
expect(person.age).to eq age
end
it 'should have a name' do
expect(person.name).to eq name
end
it 'should have a parent permission' do
expect(person.parent_permission).to eq parent_permission
end
it 'should have an unique ID' do
expect(person.id).to be_kind_of Numeric
end
it 'shoudl have the rentals array' do
expect(person.rentals).to be_kind_of Array
end
it 'should return a true or false ' do
expect(person.can_use_services?).to be true
end
it 'should return the name capitalize' do
expect(person.validate_name).to eq person.name.capitalize
end
end
end
|
package main
import (
"database/sql"
"io/ioutil"
"log"
"net/http"
"net/url"
"regexp"
_ "github.com/go-sql-driver/mysql"
)
func main() {
// Connecting to the database.
db, err := sql.Open("mysql", "root:@/crawler")
if err != nil {
log.Fatal(err)
}
defer db.Close()
// Querying data from the website.
resp, err := http.Get("https://example.com")
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
log.Fatalf("Error reading body: %v", err)
}
// Extracting meta data.
titleRegexp := regexp.MustCompile(`<title>(.*?)</title>`)
descRegexp := regexp.MustCompile(`<meta name="description" content="(.*?)"`)
title := titleRegexp.FindStringSubmatch(string(body))
desc := descRegexp.FindStringSubmatch(string(body))
// Storing data into the database.
if title == nil || desc == nil {
log.Fatal("Could not find meta data")
}
query, err := db.Prepare("INSERT INTO meta (url, title, description) VALUES (?, ?, ?)")
if err != nil {
log.Fatal(err)
}
defer query.Close()
url, err := url.Parse("https://example.com")
if err != nil {
log.Fatal(err)
}
res, err := query.Exec(url, title[1], desc[1])
if err != nil {
log.Fatal(err)
}
log.Println("Data stored. ", res)
}
|
package cache
import "srcd.works/go-git.v4/plumbing"
const (
initialQueueSize = 20
MaxSize = 10 * MiByte
)
type ObjectFIFO struct {
objects map[plumbing.Hash]plumbing.EncodedObject
order *queue
maxSize int64
actualSize int64
}
// NewObjectFIFO returns an Object cache that keeps the newest objects that fit
// into the specific memory size
func NewObjectFIFO(size int64) *ObjectFIFO {
return &ObjectFIFO{
objects: make(map[plumbing.Hash]plumbing.EncodedObject),
order: newQueue(initialQueueSize),
maxSize: size,
}
}
// Add adds a new object to the cache. If the object size is greater than the
// cache size, the object is not added.
func (c *ObjectFIFO) Add(o plumbing.EncodedObject) {
// if the size of the object is bigger or equal than the cache size,
// skip it
if o.Size() >= c.maxSize {
return
}
// if the object is into the cache, do not add it again
if _, ok := c.objects[o.Hash()]; ok {
return
}
// delete the oldest object if cache is full
if c.actualSize >= c.maxSize {
h := c.order.Pop()
o := c.objects[h]
if o != nil {
c.actualSize -= o.Size()
delete(c.objects, h)
}
}
c.objects[o.Hash()] = o
c.order.Push(o.Hash())
c.actualSize += o.Size()
}
// Get returns an object by his hash. If the object is not found in the cache, it
// returns nil
func (c *ObjectFIFO) Get(k plumbing.Hash) plumbing.EncodedObject {
return c.objects[k]
}
// Clear the content of this object cache
func (c *ObjectFIFO) Clear() {
c.objects = make(map[plumbing.Hash]plumbing.EncodedObject)
c.order = newQueue(initialQueueSize)
c.actualSize = 0
}
|
#!/bin/bash
#SBATCH --account=project_2001659
#SBATCH --cores=10
#SBATCH --partition=test
#SBATCH --time=00:15:00
export PERMEDCOE_IMAGES=$(readlink -f $(pwd)/../../../BuildingBlocks/Resources/images/)/
export PERMEDCOE_ASSETS=$(readlink -f $(pwd)/../../../BuildingBlocks/Resources/assets/)/
export _DATA_DIR=$(readlink -f ../../Resources/data/)
export SINGULARITY_BIND="$PERMEDCOE_ASSETS:$PERMEDCOE_ASSETS,$_DATA_DIR:$_DATA_DIR"
snakemake --cores 10 meta_analysis
|
<filename>src/components/CreatePost.js
import React from "react"
import "./CreatePost.css"
import {
Row,
Col,
Image,
Button,
Form,
Card,
FloatingLabel,
} from "react-bootstrap"
import * as Yup from "yup"
import { useForm } from "react-hook-form"
import ReactCommonmark from "react-commonmark"
import { yupResolver } from "@hookform/resolvers/yup"
import {
withoutTrailingSlash,
getBackEndHostWithSlash,
getAuthorIdOrRemoteLink,
isNotNullOrUndefined,
getAuthorImgOrDefault,
} from "../utils"
export default function CreatePost({ loggedInUser, author, triggerRerender }) {
const [postContentType, setPostContentType] = React.useState("text/plain")
const [postImageSrc, setPostImageSrc] = React.useState(null)
const [postMarkdownContent, setPostMarkdownContent] = React.useState("")
const [disableSubmit, setDisableSubmit] = React.useState(false)
let postHasMarkdownContentType = postContentType === "text/markdown"
let postHasImageContentType =
postContentType === "image/png;base64" ||
postContentType === "image/jpeg;base64"
// schema to validate form inputs
const validationSchema = Yup.object().shape({
content: Yup.string().required("Post content is required"),
})
// get form functions and link validation schema to form
const {
register,
handleSubmit,
reset,
setValue,
setError,
formState: { errors },
} = useForm({
resolver: yupResolver(validationSchema),
})
const submitHandler = (data) => {
console.log("data")
console.log(data)
const newData = { ...data }
if (newData.title === "") {
delete newData.title
}
newData.type = "post"
if (newData.visibility === "UNLISTED") {
newData.visibility = "FRIENDS"
newData.unlisted = true
}
if (isNotNullOrUndefined(postImageSrc)) {
newData.content = postImageSrc
}
if (
withoutTrailingSlash(loggedInUser.id) !== withoutTrailingSlash(author.id)
) {
setError("content", {
type: "server",
message: `${author.displayName} cannot post for ${loggedInUser.displayName}!`,
})
return
}
const host = getBackEndHostWithSlash()
console.log("newData")
console.log(newData)
// post the validated data to the backend registration service
fetch(`${host}service/author/${loggedInUser.uuid}/posts/`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${localStorage.getItem("token")}`,
},
body: JSON.stringify(newData),
}).then((corsResponse) => {
const apiPromise = corsResponse.json()
apiPromise
.then((apiResponse) => {
// empty out the form
reset()
setPostImageSrc(null)
setPostContentType("text/plain")
setPostMarkdownContent("")
triggerRerender()
setDisableSubmit(false)
console.log(apiResponse)
})
.catch((e) => {
// get the errors object
const errors = e.response.data
// set content errors
if (errors.content) {
setError("content", {
type: "server",
message: errors.content[0],
})
}
// set visibility errors
if (errors.visibility) {
setError("visibility", {
type: "server",
message: errors.visibility[0],
})
}
// set type errors
if (errors.type) {
setError("type", {
type: "server",
message: errors.type[0],
})
}
})
})
}
const updateImage = () => {
const file = document.getElementById("formFile").files[0]
const reader = new FileReader()
reader.onloadend = function () {
// convert image file to base64 string
setPostImageSrc(reader.result)
}
if (file) {
reader.readAsDataURL(file)
}
}
return (
<div>
<Form onSubmit={handleSubmit(submitHandler)}>
<Card
key={1}
className="Card my-5 border-0"
style={{
boxShadow: "#e0e3e8 0px 1px 1px, #e0e3e8 0px 1px 2px",
borderRadius: "7px",
}}
>
<Card.Body className="p-4">
<Row>
<Col
xs={6}
style={{
display: "flex",
alignItems: "flex-start",
}}
>
<a
href={getAuthorIdOrRemoteLink(author)}
style={{ textDecoration: "none" }}
>
<Image
className="fluid"
src={getAuthorImgOrDefault(author?.profileImage)}
roundedCircle
style={{
objectfit: "cover",
backgroundColor: "#EEE",
width: "40px",
height: "40px",
marginRight: "8px",
}}
/>
</a>
{/* title Form Field */}
<Form.Group className="mb-3" style={{ flexGrow: 1 }}>
<Form.Control
defaultValue=""
name="title"
placeholder="Post title (optional)"
as="textarea"
rows={1}
style={{
padding: "1rem 0.85rem",
resize: "none",
}}
{...register("title")}
className={`form-control ${
errors.title ? "is-invalid" : ""
}`}
/>
<Form.Text className="invalid-feedback">
{errors.title?.message}
</Form.Text>
</Form.Group>
</Col>
<Col xs={3}>
<FloatingLabel
controlId="contentType"
name="contentType"
label="Type"
>
<Form.Select
id="postType"
aria-label="Floating label select example"
{...register("contentType")}
onChange={(e) => {
setPostImageSrc(null)
setPostContentType(
document.getElementById("postType").value
)
}}
>
<option value="text/plain">text/plain</option>
<option value="text/markdown">text/markdown</option>
<option value="application/base64">
application/base64
</option>
<option value="image/png;base64">image/png;base64</option>
<option value="image/jpeg;base64">image/jpeg;base64</option>
</Form.Select>
</FloatingLabel>
</Col>
<Col xs={3}>
<FloatingLabel
controlId="visibility"
name="visibility"
label="Visibility"
>
<Form.Select
aria-label="Floating label select example"
{...register("visibility")}
>
<option value="PUBLIC">PUBLIC</option>
<option value="FRIENDS">FRIENDS</option>
<option value="UNLISTED">UNLISTED</option>
</Form.Select>
</FloatingLabel>
</Col>
</Row>
<Row>
<Col
xs={12}
style={{
display: "flex",
alignItems: "flex-start",
}}
>
{/* content Form Field */}
{!postHasImageContentType && (
<div
style={{
display: "flex",
justifyContent: "space-between",
flexGrow: 1,
marginLeft: "48px",
}}
>
<Form.Group
style={{
width: `${postHasMarkdownContentType ? "46%" : "100%"}`,
}}
>
<Form.Control
defaultValue=""
name="content"
id="content"
placeholder="Create your post"
as="textarea"
rows={5}
style={{ padding: "0.75rem 0.85rem" }}
{...register("content")}
onChange={(e) => {
setPostMarkdownContent(
document.getElementById("content").value
)
}}
className={`form-control ${
errors.content ? "is-invalid" : ""
}`}
/>
<Form.Text className="invalid-feedback">
{errors.content?.message}
</Form.Text>
</Form.Group>
{postHasMarkdownContentType && (
<div
style={{
width: "51%",
height: "146px",
border: "1px solid #ced4da",
borderRadius: "0.25rem",
padding: "0.75rem 0.85rem",
whiteSpace: "pre-line",
resize: "vertical",
overflowY: "scroll",
}}
>
<ReactCommonmark
source={postMarkdownContent}
escapeHtml={true}
/>
</div>
)}
</div>
)}
</Col>
</Row>
{postHasImageContentType && (
<Row
style={{
marginLeft: "37px",
display: "flex",
justifyContent: "center",
alignItems: "center",
}}
>
<div
style={{
maxWidth: "80%",
minHeight: "200px",
maxHeight: "500px",
position: "relative",
padding: "0px",
backgroundColor: "transparent",
display: "flex",
justifyContent: "center",
alignItems: "center",
}}
>
{postImageSrc ? (
<img
id="postImageContent"
src={postImageSrc}
objectfit="contain"
alt="preview..."
style={{
maxWidth: "100%",
minHeight: "200px",
maxHeight: "500px",
}}
/>
) : (
<div
style={{
height: "100%",
width: "100%",
position: "absolute",
display: "flex",
justifyContent: "center",
alignItems: "center",
backgroundColor: "#f0f0f0",
fontSize: "18px",
}}
>
No Image Selected
</div>
)}
</div>
</Row>
)}
<Row className="mt-3">
{/* Submit Button */}
<div
style={{
display: "flex",
alignItems: "center",
justifyContent: "flex-end",
}}
>
{postHasImageContentType && (
<Button
className="pl-5"
variant="outline-primary"
// type="file"
// id="formFile"
onClick={() => {
document.getElementById("formFile").click()
}}
style={{ padding: "0.6rem 1rem", marginRight: "10px" }}
>
Upload Image
</Button>
)}
<Button
className="pl-5"
variant="primary"
type="submit"
style={{
padding: "0.6rem 1rem",
pointerEvents: `${disableSubmit ? "none" : "auto"}`,
}}
onClick={() => {
setDisableSubmit(true)
if (postImageSrc !== null) {
setValue("content", "value")
}
}}
>
Create Post
</Button>
<input
className="form-control"
type="file"
id="formFile"
accept={
postContentType === "image/png;base64"
? "image/png"
: postContentType === "image/jpeg;base64"
? "image/jpeg"
: null
}
// {...register("contentx")}
onChange={updateImage}
style={{ display: "none" }}
></input>
</div>
</Row>
</Card.Body>
</Card>
</Form>
</div>
)
}
|
import Navbar from '../components/Navbar.vue';
export default {
title: 'Navbar',
component: Navbar,
};
export const DefaultState = () => ({
components: {
navbar: Navbar
},
template: `<navbar> </navbar>`
});
|
#!/bin/bash
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Runs the server for the demo page.
#
# Usage:
# $ buildtools/run_demo.sh
# CD to the root FirebaseUI directory, which should be the parent directory of
# buildtools/.
cd "$(dirname $(dirname "$0"))"
cp -r dist demo/public
cd demo
firebase serve
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.