content
stringlengths 66
45k
| language
stringclasses 11
values | license
stringclasses 14
values | path
stringlengths 20
176
| annotation_id
stringlengths 36
36
| pii
stringlengths 2
19.6k
| pii_modified
stringlengths 2
19.6k
|
---|---|---|---|---|---|---|
var class_v_s_t_g_u_i_1_1_i_list_control_configurator =
[
[ "~IListControlConfigurator", "class_v_s_t_g_u_i_1_1_i_list_control_configurator.html#a91759873a7f5540cb3010333fba70f88", null ],
[ "getRowDesc", "class_v_s_t_g_u_i_1_1_i_list_control_configurator.html#af4eb081cc50f2b7e403abc8a33cb0024", null ]
];
|
JavaScript
|
MIT
|
MOAMaster/AudioPlugSharp-SamplePlugins/vst3sdk/doc/vstgui/html/class_v_s_t_g_u_i_1_1_i_list_control_configurator.js
|
5915dbb0-1292-4d31-a5eb-bf050e4eac4e
|
[]
|
[]
|
#VERSION: 2.3
#AUTHORS: Vikas Yadav (https://github.com/v1k45 | http://v1k45.com)
#CONTRIBUTORS: Diego de las Heras (ngosang@hotmail.es)
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import re
from html.parser import HTMLParser
from helpers import retrieve_url
from novaprinter import prettyPrinter
class leetx(object):
url = "https://1337x.to"
name = "1337x"
supported_categories = {
'all': 'All',
'movies': 'Movies',
'tv': 'TV',
'music': 'Music',
'games': 'Games',
'anime': 'Anime',
'software': 'Apps'
}
class MyHtmlParser(HTMLParser):
A, TABLE, TR, TD, SPAN = ('a', 'table', 'tr', 'td', 'span')
""" Sub-class for parsing results """
def __init__(self, results, url):
HTMLParser.__init__(self)
self.results = results
self.url = url
self.current_result = {}
self.current_item = None
self.inside_table = False
self.inside_row = False
def handle_starttag(self, tag, attrs):
# are we inside the results table body or not
# if we are not inside the table, no need to process any further
self.inside_table = self.inside_table or tag == self.TABLE
if not self.inside_table:
return
# convert attrs tuple to dictionary
attrs = dict(attrs)
# for torrent name and link
link = attrs.get('href', '')
if tag == self.A and link.startswith('/torrent'):
self.current_result['link'] = self.url + link
self.current_result['desc_link'] = self.url + link
self.current_result['engine_url'] = self.url
self.current_item = 'name'
# to ignore uploader name attached to the torrent size in span tag
if tag == self.SPAN:
self.current_item = None
# if this is a <td> there can be seeds, leeches or size inside it.
if tag == self.TD:
self.inside_row = True
# find apporipate data key using class name of td
for item in ['seeds', 'leech', 'size']:
if item in attrs.get('class', ''):
self.current_item = item
break
def handle_data(self, data):
# if we are not inside the table, no need to process any further
if not self.inside_table:
return
# do not process data if we are not inside the table body
if self.current_item:
prev_value = self.current_result.get(self.current_item, '')
self.current_result[self.current_item] = prev_value + data
def handle_endtag(self, tag):
# are we inside the results table body or not
# if we are not inside the table, no need to process any further
if tag == self.TABLE:
self.inside_table = False
if not self.inside_table:
return
# exiting the table data and maybe moving td or tr element
if self.inside_row and tag == self.TD:
self.inside_row = False
self.current_item = None
# exiting the tr element, which means all necessary data for a torrent has been
# extracted, we should save it and clean the object's state.
if self.current_result and tag == self.TR:
if 'size' in self.current_result:
self.current_result['size'] = self.current_result['size'].replace(',', '')
# skip malformed names (eg. with @)
if 'name' in self.current_result:
prettyPrinter(self.current_result)
self.results.append('a')
self.current_result = {}
self.current_item = None
def download_torrent(self, download_url):
# since 1337x does not provide torrent links in the search results,
# we will have to fetch the page and extract the magnet link
torrent_page = retrieve_url(download_url)
magnet_match = re.search(r"href\s*\=\s*\"(magnet[^\"]+)\"", torrent_page)
if magnet_match and magnet_match.groups():
print(magnet_match.groups()[0] + " " + download_url)
else:
raise Exception('Error, please fill a bug report!')
def search(self, what, cat='all'):
cat = cat.lower()
# decide which type of search to perform based on category
search_page = "search" if cat == 'all' else 'category-search'
search_url = "{url}/{search_page}/{search_query}/".format(
url=self.url, search_page=search_page, search_query=what)
# apply search category to url, if any.
if cat != 'all':
search_url += self.supported_categories[cat] + "/"
# try to get 15 pages (20 * 15 = 300 results) and stop when we don't found results
results_list = []
parser = self.MyHtmlParser(results_list, self.url)
page = 1
while page < 16:
# download the page
html = retrieve_url(search_url + str(page) + '/')
parser.feed(html)
if len(results_list) < 1:
break
del results_list[:]
page += 1
parser.close()
|
Python
|
MIT
|
Kira9204/Wireguard-qBittorrent/container_data/.config/qBittorrent/plugins/nova3/engines/leetx.py
|
69ea163d-0800-4b52-a859-6fe76f84a1fc
|
[{"tag": "USERNAME", "value": "v1k45", "start": 56, "end": 61, "context": "ON: 2.3\n#AUTHORS: Vikas Yadav (https://github.com/v1k45 | http://v1k45.com)\n#CONTRIBUTORS: Diego de las H"}, {"tag": "NAME", "value": "Vikas Yadav", "start": 24, "end": 35, "context": "#VERSION: 2.3\n#AUTHORS: Vikas Yadav (https://github.com/v1k45 | http://v1k45.com)\n#CO"}, {"tag": "EMAIL", "value": "ngosang@hotmail.es", "start": 117, "end": 135, "context": "p://v1k45.com)\n#CONTRIBUTORS: Diego de las Heras (ngosang@hotmail.es)\n\n# Redistribution and use in source and binary f"}, {"tag": "NAME", "value": "Diego de las Heras", "start": 97, "end": 115, "context": "thub.com/v1k45 | http://v1k45.com)\n#CONTRIBUTORS: Diego de las Heras (ngosang@hotmail.es)\n\n# Redistribution and use in"}]
|
[{"tag": "USERNAME", "value": "v1k45", "start": 56, "end": 61, "context": "ON: 2.3\n#AUTHORS: Vikas Yadav (https://github.com/v1k45 | http://v1k45.com)\n#CONTRIBUTORS: Diego de las H"}, {"tag": "NAME", "value": "Vikas Yadav", "start": 24, "end": 35, "context": "#VERSION: 2.3\n#AUTHORS: Vikas Yadav (https://github.com/v1k45 | http://v1k45.com)\n#CO"}, {"tag": "EMAIL", "value": "ngosang@hotmail.es", "start": 117, "end": 135, "context": "p://v1k45.com)\n#CONTRIBUTORS: Diego de las Heras (ngosang@hotmail.es)\n\n# Redistribution and use in source and binary f"}, {"tag": "NAME", "value": "Diego de las Heras", "start": 97, "end": 115, "context": "thub.com/v1k45 | http://v1k45.com)\n#CONTRIBUTORS: Diego de las Heras (ngosang@hotmail.es)\n\n# Redistribution and use in"}]
|
//---------------------------------------------------------------------------
/*
TJS2 Script Engine
Copyright (C) 2000-2007 W.Dee <dee@kikyou.info> and contributors
See details of license at "license.txt"
*/
//---------------------------------------------------------------------------
// Conditional Compile Control
//---------------------------------------------------------------------------
#include "tjsCommHead.h"
#include "tjsCompileControl.h"
#include "tjsLex.h"
#include "tjsVariant.h"
#include "tjspp.tab.h"
#include "tjsError.h"
namespace TJS
{
//---------------------------------------------------------------------------
int ppparse(void*);
//---------------------------------------------------------------------------
// TJS_iswspace
static bool inline TJS_iswspace(tjs_char ch)
{
// the standard iswspace misses when non-zero page code
if(ch&0xff00)
{
return false;
}
else
{
return isspace(ch);
}
}
//---------------------------------------------------------------------------
static bool inline TJS_iswdigit(tjs_char ch)
{
// the standard iswdigit misses when non-zero page code
if(ch&0xff00)
{
return false;
}
else
{
return isdigit(ch);
}
}
//---------------------------------------------------------------------------
static bool inline TJS_iswalpha(tjs_char ch)
{
// the standard iswalpha misses when non-zero page code
if(ch&0xff00)
{
return true;
}
else
{
return isalpha(ch);
}
}
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
// tTJSPPExprParser
//---------------------------------------------------------------------------
tTJSPPExprParser::tTJSPPExprParser(tTJS * tjs, const tjs_char *script)
{
// script pointed by "script" argument will be released by this class
// via delete[]
TJS = tjs;
Script = script;
}
//---------------------------------------------------------------------------
tTJSPPExprParser::~tTJSPPExprParser()
{
delete [] Script;
}
//---------------------------------------------------------------------------
tjs_int32 tTJSPPExprParser::Parse()
{
Current = Script;
Result = 0;
if(ppparse(this))
{
TJS_eTJSError(TJSPPError);
}
return Result;
}
//---------------------------------------------------------------------------
tjs_int tTJSPPExprParser::GetNext(tjs_int32 &value)
{
// get next token
while(TJS_iswspace(*Current) && *Current) Current++;
if(!*Current) return 0;
switch(*Current)
{
case TJS_W('('):
Current++;
return PT_LPARENTHESIS;
case TJS_W(')'):
Current++;
return PT_RPARENTHESIS;
case TJS_W(','):
Current++;
return PT_COMMA;
case TJS_W('='):
if(*(Current+1) == TJS_W('=')) { Current+=2; return PT_EQUALEQUAL; }
Current++;
return PT_EQUAL;
case TJS_W('!'):
if(*(Current+1) == TJS_W('=')) { Current+=2; return PT_NOTEQUAL; }
Current++;
return PT_EXCLAMATION;
case TJS_W('|'):
if(*(Current+1) == TJS_W('|')) { Current+=2; return PT_LOGICALOR; }
Current++;
return PT_VERTLINE;
case TJS_W('&'):
if(*(Current+1) == TJS_W('&')) { Current+=2; return PT_LOGICALAND; }
Current++;
return PT_AMPERSAND;
case TJS_W('^'):
Current++;
return PT_CHEVRON;
case TJS_W('+'):
Current++;
return PT_PLUS;
case TJS_W('-'):
Current++;
return PT_MINUS;
case TJS_W('*'):
Current++;
return PT_ASTERISK;
case TJS_W('/'):
Current++;
return PT_SLASH;
case TJS_W('%'):
Current++;
return PT_PERCENT;
case TJS_W('<'):
if(*(Current+1) == TJS_W('=')) { Current+=2; return PT_LTOREQUAL; }
Current++;
return PT_LT;
case TJS_W('>'):
if(*(Current+1) == TJS_W('=')) { Current+=2; return PT_GTOREQUAL; }
Current++;
return PT_GT;
case TJS_W('0'):
case TJS_W('1'):
case TJS_W('2'):
case TJS_W('3'):
case TJS_W('4'):
case TJS_W('5'):
case TJS_W('6'):
case TJS_W('7'):
case TJS_W('8'):
case TJS_W('9'):
{
// number
tTJSVariant val;
try
{
if(!TJSParseNumber(val, &Current)) return PT_ERROR;
}
catch(...)
{
return PT_ERROR;
}
value = (tjs_int32)(tTVInteger)val;
return PT_NUM;
}
}
if(!TJS_iswalpha(*Current) && *Current!=TJS_W('_'))
{
return PT_ERROR;
}
const tjs_char *st = Current;
while((TJS_iswalpha(*Current) || TJS_iswdigit(*Current) ||
*Current==TJS_W('_')) && *Current)
Current++;
ttstr str(st, Current-st);
IDs.push_back(str);
value = IDs.size() -1;
return PT_SYMBOL;
}
//---------------------------------------------------------------------------
const tjs_char * tTJSPPExprParser::GetString(tjs_int idx) const
{
return IDs[idx].c_str();
}
//---------------------------------------------------------------------------
int pplex(YYSTYPE *yylex, void *pm)
{
tjs_int32 val;
tjs_int n;
n = ((tTJSPPExprParser*)pm)->GetNext(val);
if(n == PT_NUM) yylex->val = val;
if(n == PT_SYMBOL) yylex->nv = val;
return n;
}
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
} // namespace TJS
|
C++
|
MIT
|
CATION-M/X-moe/Projects/krkr2_on_VC/kirikiri2/src/core/tjs2/tjsCompileControl.cpp
|
7b637256-20da-42d8-a154-a382cba31a7f
|
[{"tag": "EMAIL", "value": "dee@kikyou.info", "start": 133, "end": 148, "context": "JS2 Script Engine\n\tCopyright (C) 2000-2007 W.Dee <dee@kikyou.info> and contributors\n\n\tSee details of license at \"li"}, {"tag": "NAME", "value": "W.Dee", "start": 126, "end": 131, "context": "-\n/*\n\tTJS2 Script Engine\n\tCopyright (C) 2000-2007 W.Dee <dee@kikyou.info> and contributors\n\n\tSee details "}]
|
[{"tag": "EMAIL", "value": "dee@kikyou.info", "start": 133, "end": 148, "context": "JS2 Script Engine\n\tCopyright (C) 2000-2007 W.Dee <dee@kikyou.info> and contributors\n\n\tSee details of license at \"li"}, {"tag": "NAME", "value": "W.Dee", "start": 126, "end": 131, "context": "-\n/*\n\tTJS2 Script Engine\n\tCopyright (C) 2000-2007 W.Dee <dee@kikyou.info> and contributors\n\n\tSee details "}]
|
# coding=utf-8
# Copyright (c) 2020 NVIDIA CORPORATION. All rights reserved.
# Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file has been copied from
# https://github.com/mlcommons/inference/blob/r0.7/vision/medical_imaging/3d-unet/preprocess.py
import argparse
import numpy
import os
import pickle
import sys
import torch
from batchgenerators.augmentations.utils import pad_nd_image
from batchgenerators.utilities.file_and_folder_operations import subfiles
from nnunet.training.model_restore import load_model_and_checkpoint_files
from nnunet.inference.predict import preprocess_multithreaded
def preprocess_MLPerf(model, checkpoint_name, folds, fp16, list_of_lists, output_filenames, preprocessing_folder, num_threads_preprocessing):
assert len(list_of_lists) == len(output_filenames)
print("loading parameters for folds", folds)
trainer, params = load_model_and_checkpoint_files(model, folds, fp16, checkpoint_name=checkpoint_name)
print("starting preprocessing generator")
preprocessing = preprocess_multithreaded(trainer, list_of_lists, output_filenames, num_threads_preprocessing, None)
print("Preprocessing images...")
all_output_files = []
for preprocessed in preprocessing:
output_filename, (d, dct) = preprocessed
all_output_files.append(output_filename)
if isinstance(d, str):
data = np.load(d)
os.remove(d)
d = data
# Pad to the desired full volume
d = pad_nd_image(d, trainer.patch_size, "constant", None, False, None)
with open(os.path.join(preprocessing_folder, output_filename+ ".pkl"), "wb") as f:
pickle.dump([d, dct], f)
f.close()
return all_output_files
def preprocess_setup(preprocessed_data_dir):
print("Preparing for preprocessing data...")
# Validation set is fold 1
fold = 1
validation_fold_file = '../models/image_segmentation/tensorflow/3d_unet_mlperf/inference/nnUNet/folds/fold1_validation.txt'
# Make sure the model exists
model_dir = 'build/result/nnUNet/3d_fullres/Task043_BraTS2019/nnUNetTrainerV2__nnUNetPlansv2.mlperf.1'
model_path = os.path.join(model_dir, "plans.pkl")
assert os.path.isfile(model_path), "Cannot find the model file {:}!".format(model_path)
checkpoint_name = "model_final_checkpoint"
# Other settings
fp16 = False
num_threads_preprocessing = 12
raw_data_dir = 'build/raw_data/nnUNet_raw_data/Task043_BraTS2019/imagesTr'
# Open list containing validation images from specific fold (e.g. 1)
validation_files = []
with open(validation_fold_file) as f:
for line in f:
validation_files.append(line.rstrip())
# Create output and preprocessed directory
if not os.path.isdir(preprocessed_data_dir):
os.makedirs(preprocessed_data_dir)
# Create list of images locations (i.e. 4 images per case => 4 modalities)
all_files = subfiles(raw_data_dir, suffix=".nii.gz", join=False, sort=True)
list_of_lists = [[os.path.join(raw_data_dir, i) for i in all_files if i[:len(j)].startswith(j) and
len(i) == (len(j) + 12)] for j in validation_files]
# Preprocess images, returns filenames list
# This runs in multiprocess
print("Acually preprocessing data...")
preprocessed_files = preprocess_MLPerf(model_dir, checkpoint_name, fold, fp16, list_of_lists,
validation_files, preprocessed_data_dir, num_threads_preprocessing)
print("Saving metadata of the preprocessed data...")
with open(os.path.join(preprocessed_data_dir, "preprocessed_files.pkl"), "wb") as f:
pickle.dump(preprocessed_files, f)
print("Preprocessed data saved to {:}".format(preprocessed_data_dir))
print("Done!")
|
Python
|
Apache-2.0
|
Alavandar08/models/models/image_segmentation/tensorflow/3d_unet_mlperf/inference/nnUNet/preprocess.py
|
e6435a6c-8223-491f-9a37-2036f22f6bce
|
[]
|
[]
|
/*
Copyright The KubeDB Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package framework
import (
api "kubedb.dev/apimachinery/apis/kubedb/v1alpha1"
cs "kubedb.dev/apimachinery/client/clientset/versioned"
"github.com/appscode/go/crypto/rand"
crd_cs "k8s.io/apiextensions-apiserver/pkg/client/clientset/clientset/typed/apiextensions/v1beta1"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/rest"
ka "k8s.io/kube-aggregator/pkg/client/clientset_generated/clientset"
"kmodules.xyz/client-go/tools/portforward"
appcat_cs "kmodules.xyz/custom-resources/client/clientset/versioned/typed/appcatalog/v1alpha1"
)
var (
DockerRegistry = "kubedbci"
DBCatalogName = "1.5.4-v1"
)
type Framework struct {
restConfig *rest.Config
kubeClient kubernetes.Interface
apiExtKubeClient crd_cs.ApiextensionsV1beta1Interface
dbClient cs.Interface
kaClient ka.Interface
tunnel *portforward.Tunnel
appCatalogClient appcat_cs.AppcatalogV1alpha1Interface
namespace string
name string
StorageClass string
}
func New(
restConfig *rest.Config,
kubeClient kubernetes.Interface,
apiExtKubeClient crd_cs.ApiextensionsV1beta1Interface,
dbClient cs.Interface,
kaClient ka.Interface,
appCatalogClient appcat_cs.AppcatalogV1alpha1Interface,
storageClass string,
) *Framework {
return &Framework{
restConfig: restConfig,
kubeClient: kubeClient,
apiExtKubeClient: apiExtKubeClient,
dbClient: dbClient,
kaClient: kaClient,
appCatalogClient: appCatalogClient,
name: "memcached-operator",
namespace: rand.WithUniqSuffix(api.ResourceSingularMemcached),
StorageClass: storageClass,
}
}
func (f *Framework) Invoke() *Invocation {
return &Invocation{
Framework: f,
app: rand.WithUniqSuffix("memcached-e2e"),
}
}
func (fi *Invocation) DBClient() cs.Interface {
return fi.dbClient
}
func (fi *Invocation) RestConfig() *rest.Config {
return fi.restConfig
}
type Invocation struct {
*Framework
app string
}
|
GO
|
Apache-2.0
|
pohly/memcached/test/e2e/framework/framework.go
|
8470db1f-c28f-416e-b87d-8607916acaef
|
[]
|
[]
|
@extends('layouts.main')
@section('title','Surat | ')
@section('css_script')
<style>
/*body {*/
/* background: rgb(204, 204, 204);*/
/*}*/
page {
background: white;
display: block;
margin: 0 auto;
margin-bottom: 0.5cm;
box-shadow: 0 0 0.3cm rgba(0, 0, 0, 0.5);
}
page[size="A4"] {
width: 21cm;
height: 29.7cm;
}
page[size="A4"][layout="landscape"] {
width: 29.7cm;
height: 21cm;
}
page[size="A3"] {
width: 29.7cm;
height: 42cm;
}
page[size="A3"][layout="landscape"] {
width: 42cm;
height: 29.7cm;
}
page[size="A5"] {
width: 14.8cm;
height: 21cm;
}
page[size="A5"][layout="landscape"] {
width: 21cm;
height: 14.8cm;
}
@media print {
body, page {
margin: 0;
box-shadow: 0;
}
}
</style>
@endsection
@section('main_content')
{{-- <page size="A4"></page>--}}
{{-- <page size="A4"></page>--}}
{{-- <page size="A4" layout="landscape"></page>--}}
{{-- <page size="A5" layout="landscape"></page>--}}
{{-- <page size="A3"></page>--}}
{{-- <page size="A3" layout="landscape"></page>--}}
<section class="section feather-bg-img"
style="background-image: url({{asset('lezir/images/features-bg-img-1.png')}})">
<div class="row justify-content-center">
<div class="col-lg-6">
<div class="text-center mb-5">
<h3 class="title mb-3">Awesome Features</h3>
<p class="text-muted font-size-15">Et harum quidem rerum facilis est et expedita distinctio nam
libero tempore cum soluta nobis eligendi cumque.</p>
</div>
</div>
</div>
<div class="container">
<div class="row align-items-center">
<div class="col-lg-10">
<p class="font-weight-medium text-uppercase mb-2"><i
class="mdi mdi-chart-bubble h2 text-primary mr-1 align-middle"></i> Surat Keluar</p>
<div class="card" style="box-shadow: 0 0 0.1cm rgba(0, 0, 0, 0.5); width: 21cm;
height: 29.7cm;">
<div class="card-body">
<table>
<tr>
<td width="15%">
<div>
<img src="{{asset('madiun_logo.png')}}" alt="" width="128">
</div>
</td>
<td align="center" width="85%">
<div>
<b style="font-size: 16pt"> PEMERINTAH KOTA MADIUN</b> <br>
<b>DINAS KOMUNIKASI DAN INFORMATIKA</b> <br>
<div style="font-size: 11pt; margin: 0; padding-top: -10;"> JJalan Perintis
Kemerdekaan No. 32 Kota Madiun
<br>
No Telp / Fax : (0351) 467327 / (0351) 457331 email:
kominfo.madiunkota@gmail.com
</div>
</div>
</td>
</tr>
<tr>
<td colspan="2">
<hr>
</td>
</tr>
</table>
{{-- <h3 class="font-weight-semibold line-height-1_4 mb-4">We do the work you <b>stay focused</b>--}}
{{-- on <b>your customers</b>.</h3>--}}
<!-- <h3 class="font-weight-semibold line-height-1_4 mb-4">Build <b>community</b> & <b>conversion</b> with our suite of <b>social tool</b></h3> -->
{{-- <p class="text-muted font-size-15 mb-4">Temporibus autem quibusdam et aut officiis debitis--}}
{{-- aut rerum a necessitatibus saepe eveniet ut et voluptates repudiandae sint molestiae non--}}
{{-- recusandae itaque.</p>--}}
{{-- <p class="text-muted mb-2"><i class="icon-xs mr-1" data-feather="server"></i> Donec pede--}}
{{-- justo fringilla vel nec.</p>--}}
{{-- <p class="text-muted"><i class="icon-xs mr-1" data-feather="rss"></i> Cras ultricies mi eu--}}
{{-- turpis hendrerit fringilla.</p>--}}
<div class="row align-items-center">
<div class="col-lg-12">
<div class="custom-form mb-5 mb-lg-0">
<div id="message"></div>
<form method="post" action="" name="contact-form"
id="surat">
@CSRF
<div class="row">
<div class="col-md-6">
<div class="form-group">
<label for="name">Yth.*</label>
<input name="yth" id="name" type="text" class="form-control"
placeholder="Pihak yang dituju..."required>
</div>
</div>
<div class="col-md-6">
<div class="form-group">
<label for="email">Dari*</label>
<input name="dari" id="email" type="text" class="form-control"
placeholder="Pengirim ..." required>
</div>
</div>
</div>
<div class="row">
<div class="col-md-6">
<div class="form-group">
<label for="name">No. Surat*</label>
<input name="no_surat" id="name" type="text" class="form-control"
placeholder="1xxxxx..." required>
</div>
</div>
<div class="col-md-6">
<div class="form-group">
<label for="email">Perihal*</label>
<input name="perihal" id="email" type="text" class="form-control"
placeholder="Perihal Surat..." required >
</div>
</div>
</div>
<div class="row">
<div class="col-md-6">
<div class="form-group">
<label for="name">Lampiran*</label>
<input name="lampiran" id="name" type="text" class="form-control"
placeholder="Jumlah Lampiran..." required>
</div>
</div>
<div class="col-md-6">
<div class="form-group">
<label for="email">Tujuan*</label>
<input name="tujuan" id="email" type="text" class="form-control"
placeholder="Your email..." required>
</div>
</div>
</div>
<div class="row">
<div class="col-lg-12">
<div class="form-group">
<label for="comments">Isi Surat *</label>
<textarea name="pesan" id="summernote" rows="4"
class="form-control"
placeholder="Your message..."></textarea>
</div>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
<div class="mt-5">
<button type="submit" class="btn btn-primary mr-2" onclick="submit_form('{{route('surat.save')}}')"> Simpan & Kirim <i class="mdi mdi-send"></i></button>
{{-- <a href="#" class="btn btn-soft-primary">Simpan Draft </a>--}}
</div>
</div>
{{-- <div class="col-lg-6 offset-lg-1">--}}
{{-- <div class="mt-4 mt-lg-0">--}}
{{-- <img src="images/features-img-1.png" alt="" class="img-fluid d-block mx-auto">--}}
{{-- </div>--}}
{{-- </div>--}}
</div>
</div>
</section>
@endsection
@push('script')
<script>
function submit_form(link) {
$('#surat').attr('action',link).submit();
console.log("hello");
}
</script>
@endpush
|
PHP
|
MIT
|
m1ku100/tcp-Demo/resources/views/auth/surat.blade.php
|
7cc9a01d-d51c-481a-b9ba-fc41fc97adea
|
[]
|
[]
|
"""Get example scripts, notebooks, and data files."""
import argparse
from datetime import datetime, timedelta
from glob import glob
import json
import os
import pkg_resources
from progressbar import ProgressBar
try:
# For Python 3.0 and later
from urllib.request import urlopen
except ImportError:
# Fall back to Python 2's urllib2
from urllib2 import urlopen
import shutil
import sys
example_data_files = (
["MovingEddies_data/" + fn for fn in [
"moving_eddiesP.nc", "moving_eddiesU.nc", "moving_eddiesV.nc"]]
+ ["OFAM_example_data/" + fn for fn in [
"OFAM_simple_U.nc", "OFAM_simple_V.nc"]]
+ ["Peninsula_data/" + fn for fn in [
"peninsulaU.nc", "peninsulaV.nc", "peninsulaP.nc"]]
+ ["GlobCurrent_example_data/" + fn for fn in [
"%s000000-GLOBCURRENT-L4-CUReul_hs-ALT_SUM-v02.0-fv01.0.nc" % (
date.strftime("%Y%m%d"))
for date in ([datetime(2002, 1, 1) + timedelta(days=x)
for x in range(0, 365)] + [datetime(2003, 1, 1)])]]
+ ["DecayingMovingEddy_data/" + fn for fn in [
"decaying_moving_eddyU.nc", "decaying_moving_eddyV.nc"]]
+ ["NemoCurvilinear_data/" + fn for fn in [
"U_purely_zonal-ORCA025_grid_U.nc4", "V_purely_zonal-ORCA025_grid_V.nc4",
"mesh_mask.nc4"]]
+ ["NemoNorthSeaORCA025-N006_data/" + fn for fn in [
"ORCA025-N06_20000104d05U.nc", "ORCA025-N06_20000109d05U.nc",
"ORCA025-N06_20000104d05V.nc", "ORCA025-N06_20000109d05V.nc",
"ORCA025-N06_20000104d05W.nc", "ORCA025-N06_20000109d05W.nc",
"coordinates.nc"]])
example_data_url = "http://oceanparcels.org/examples-data"
def _maybe_create_dir(path):
"""Create directory (and parents) if they don't exist."""
try:
os.makedirs(path)
except OSError:
if not os.path.isdir(path):
raise
def copy_data_and_examples_from_package_to(target_path):
"""Copy example data from Parcels directory.
Return thos parths of the list `file_names` that were not found in the
package.
"""
examples_in_package = pkg_resources.resource_filename("parcels", "examples")
try:
shutil.copytree(examples_in_package, target_path)
except Exception as e:
print(e)
pass
def set_jupyter_kernel_to_python_version(path, python_version=2):
"""Set notebook kernelspec to desired python version.
This also drops all other meta data from the notebook.
"""
for file_name in glob(os.path.join(path, "*.ipynb")):
with open(file_name, 'r') as f:
notebook_data = json.load(f)
notebook_data['metadata'] = {"kernelspec": {
"display_name": "Python {}".format(python_version),
"language": "python",
"name": "python{}".format(python_version)}}
with open(file_name, 'w') as f:
json.dump(notebook_data, f, indent=2)
def _still_to_download(file_names, target_path):
"""Only return the files that are not yet present on disk."""
for fn in list(file_names):
if os.path.exists(os.path.join(target_path, fn)):
file_names.remove(fn)
return file_names
def download_files(source_url, file_names, target_path):
"""Mirror file_names from source_url to target_path."""
_maybe_create_dir(target_path)
pbar = ProgressBar()
print("Downloading %s ..." % (source_url.split("/")[-1]))
for filename in pbar(file_names):
_maybe_create_dir(os.path.join(target_path, os.path.dirname(filename)))
if not os.path.exists(os.path.join(target_path, filename)):
download_url = source_url + "/" + filename
src = urlopen(download_url)
with open(os.path.join(target_path, filename), 'wb') as dst:
dst.write(src.read())
def main(target_path=None):
"""Get example scripts, example notebooks, and example data.
Copy the examples from the package directory and get the example data either
from the package directory or from the Parcels website.
"""
if target_path is None:
# get targe directory
parser = argparse.ArgumentParser(
description="Get Parcels example data.")
parser.add_argument(
"target_path",
help="Where to put the tutorials? (This path will be created.)")
args = parser.parse_args()
target_path = args.target_path
if os.path.exists(target_path):
print("Error: {} already exists.".format(target_path))
return
# copy data and examples
copy_data_and_examples_from_package_to(target_path)
# make sure the notebooks use the correct python version
set_jupyter_kernel_to_python_version(
target_path,
python_version=sys.version_info[0])
# try downloading remaining files
remaining_example_data_files = _still_to_download(
example_data_files, target_path)
download_files(example_data_url, remaining_example_data_files, target_path)
if __name__ == "__main__":
main()
|
Python
|
MIT
|
becgorton/parcels/parcels/scripts/get_examples.py
|
ff445fd6-7460-4f69-8034-d14d248e9140
|
[]
|
[]
|
/**
* Copyright (c) 2016 NumberFour AG.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* NumberFour AG - Initial API and implementation
*/
package org.eclipse.n4js.transpiler.im.impl;
import com.google.common.base.Objects;
import java.lang.reflect.InvocationTargetException;
import java.math.BigDecimal;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.ECollections;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
import org.eclipse.emf.ecore.xcore.lib.XcoreEListExtensions;
import org.eclipse.n4js.transpiler.im.ImPackage;
import org.eclipse.n4js.transpiler.im.VersionedParameterizedTypeRefStructural_IM;
import org.eclipse.n4js.ts.typeRefs.ParameterizedTypeRef;
import org.eclipse.n4js.ts.typeRefs.ParameterizedTypeRefStructural;
import org.eclipse.n4js.ts.typeRefs.StructuralTypeRef;
import org.eclipse.n4js.ts.typeRefs.TypeArgument;
import org.eclipse.n4js.ts.typeRefs.TypeRef;
import org.eclipse.n4js.ts.typeRefs.TypeRefsPackage;
import org.eclipse.n4js.ts.typeRefs.TypeVariableMapping;
import org.eclipse.n4js.ts.typeRefs.Versionable;
import org.eclipse.n4js.ts.typeRefs.VersionedParameterizedTypeRef;
import org.eclipse.n4js.ts.typeRefs.VersionedParameterizedTypeRefStructural;
import org.eclipse.n4js.ts.typeRefs.VersionedReference;
import org.eclipse.n4js.ts.types.TStructMember;
import org.eclipse.n4js.ts.types.TStructuralType;
import org.eclipse.n4js.ts.types.Type;
import org.eclipse.n4js.ts.types.TypeVariable;
import org.eclipse.n4js.ts.types.TypingStrategy;
import org.eclipse.xtext.xbase.lib.Functions.Function1;
import org.eclipse.xtext.xbase.lib.IterableExtensions;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Versioned Parameterized Type Ref Structural IM</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link org.eclipse.n4js.transpiler.im.impl.VersionedParameterizedTypeRefStructural_IMImpl#getRequestedVersion <em>Requested Version</em>}</li>
* <li>{@link org.eclipse.n4js.transpiler.im.impl.VersionedParameterizedTypeRefStructural_IMImpl#getAstStructuralMembers <em>Ast Structural Members</em>}</li>
* <li>{@link org.eclipse.n4js.transpiler.im.impl.VersionedParameterizedTypeRefStructural_IMImpl#getStructuralType <em>Structural Type</em>}</li>
* <li>{@link org.eclipse.n4js.transpiler.im.impl.VersionedParameterizedTypeRefStructural_IMImpl#getGenStructuralMembers <em>Gen Structural Members</em>}</li>
* <li>{@link org.eclipse.n4js.transpiler.im.impl.VersionedParameterizedTypeRefStructural_IMImpl#getPostponedSubstitutions <em>Postponed Substitutions</em>}</li>
* </ul>
*
* @generated
*/
public class VersionedParameterizedTypeRefStructural_IMImpl extends ParameterizedTypeRef_IMImpl implements VersionedParameterizedTypeRefStructural_IM {
/**
* The default value of the '{@link #getRequestedVersion() <em>Requested Version</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRequestedVersion()
* @generated
* @ordered
*/
protected static final BigDecimal REQUESTED_VERSION_EDEFAULT = null;
/**
* The cached value of the '{@link #getRequestedVersion() <em>Requested Version</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRequestedVersion()
* @generated
* @ordered
*/
protected BigDecimal requestedVersion = REQUESTED_VERSION_EDEFAULT;
/**
* The cached value of the '{@link #getAstStructuralMembers() <em>Ast Structural Members</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getAstStructuralMembers()
* @generated
* @ordered
*/
protected EList<TStructMember> astStructuralMembers;
/**
* The cached value of the '{@link #getStructuralType() <em>Structural Type</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStructuralType()
* @generated
* @ordered
*/
protected TStructuralType structuralType;
/**
* The cached value of the '{@link #getGenStructuralMembers() <em>Gen Structural Members</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getGenStructuralMembers()
* @generated
* @ordered
*/
protected EList<TStructMember> genStructuralMembers;
/**
* The cached value of the '{@link #getPostponedSubstitutions() <em>Postponed Substitutions</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPostponedSubstitutions()
* @generated
* @ordered
*/
protected EList<TypeVariableMapping> postponedSubstitutions;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected VersionedParameterizedTypeRefStructural_IMImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return ImPackage.Literals.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public BigDecimal getRequestedVersion() {
return requestedVersion;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRequestedVersion(BigDecimal newRequestedVersion) {
BigDecimal oldRequestedVersion = requestedVersion;
requestedVersion = newRequestedVersion;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__REQUESTED_VERSION, oldRequestedVersion, requestedVersion));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<TStructMember> getAstStructuralMembers() {
if (astStructuralMembers == null) {
astStructuralMembers = new EObjectContainmentEList<TStructMember>(TStructMember.class, this, ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__AST_STRUCTURAL_MEMBERS);
}
return astStructuralMembers;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public TStructuralType getStructuralType() {
if (structuralType != null && structuralType.eIsProxy()) {
InternalEObject oldStructuralType = (InternalEObject)structuralType;
structuralType = (TStructuralType)eResolveProxy(oldStructuralType);
if (structuralType != oldStructuralType) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__STRUCTURAL_TYPE, oldStructuralType, structuralType));
}
}
return structuralType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public TStructuralType basicGetStructuralType() {
return structuralType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setStructuralType(TStructuralType newStructuralType) {
TStructuralType oldStructuralType = structuralType;
structuralType = newStructuralType;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__STRUCTURAL_TYPE, oldStructuralType, structuralType));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<TStructMember> getGenStructuralMembers() {
if (genStructuralMembers == null) {
genStructuralMembers = new EObjectContainmentEList<TStructMember>(TStructMember.class, this, ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__GEN_STRUCTURAL_MEMBERS);
}
return genStructuralMembers;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<TypeVariableMapping> getPostponedSubstitutions() {
if (postponedSubstitutions == null) {
postponedSubstitutions = new EObjectContainmentEList<TypeVariableMapping>(TypeVariableMapping.class, this, ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__POSTPONED_SUBSTITUTIONS);
}
return postponedSubstitutions;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public TypingStrategy getTypingStrategy() {
TypingStrategy _definedTypingStrategy = this.getDefinedTypingStrategy();
boolean _tripleEquals = (_definedTypingStrategy == TypingStrategy.DEFAULT);
if (_tripleEquals) {
return TypingStrategy.STRUCTURAL;
}
else {
return this.getDefinedTypingStrategy();
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setTypingStrategy(final TypingStrategy typingStrategy) {
boolean _equals = Objects.equal(typingStrategy, TypingStrategy.NOMINAL);
if (_equals) {
throw new IllegalArgumentException("cannot set structural type reference to nominal");
}
this.setDefinedTypingStrategy(typingStrategy);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<TStructMember> getStructuralMembers() {
EList<TStructMember> _xifexpression = null;
TStructuralType _structuralType = this.getStructuralType();
boolean _tripleNotEquals = (_structuralType != null);
if (_tripleNotEquals) {
_xifexpression = this.getStructuralType().getOwnedMembers();
}
else {
EList<TStructMember> _xifexpression_1 = null;
boolean _isEmpty = this.getAstStructuralMembers().isEmpty();
boolean _not = (!_isEmpty);
if (_not) {
_xifexpression_1 = this.getAstStructuralMembers();
}
else {
_xifexpression_1 = this.getGenStructuralMembers();
}
_xifexpression = _xifexpression_1;
}
return ECollections.<TStructMember>unmodifiableEList(_xifexpression);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getTypeRefAsString() {
TypingStrategy _typingStrategy = this.getTypingStrategy();
Type _declaredType = this.getDeclaredType();
String _rawTypeAsString = null;
if (_declaredType!=null) {
_rawTypeAsString=_declaredType.getRawTypeAsString();
}
String _plus = (_typingStrategy + _rawTypeAsString);
String _xifexpression = null;
boolean _isEmpty = this.getTypeArgs().isEmpty();
if (_isEmpty) {
_xifexpression = "";
}
else {
final Function1<TypeArgument, String> _function = new Function1<TypeArgument, String>() {
public String apply(final TypeArgument it) {
return it.getTypeRefAsString();
}
};
String _join = IterableExtensions.join(XcoreEListExtensions.<TypeArgument, String>map(this.getTypeArgs(), _function), ",");
String _plus_1 = ("<" + _join);
_xifexpression = (_plus_1 + ">");
}
String _plus_2 = (_plus + _xifexpression);
String _xifexpression_1 = null;
boolean _isEmpty_1 = this.getStructuralMembers().isEmpty();
if (_isEmpty_1) {
_xifexpression_1 = "";
}
else {
final Function1<TStructMember, String> _function_1 = new Function1<TStructMember, String>() {
public String apply(final TStructMember it) {
return it.getMemberAsString();
}
};
String _join_1 = IterableExtensions.join(XcoreEListExtensions.<TStructMember, String>map(this.getStructuralMembers(), _function_1), "; ");
String _plus_3 = (" with { " + _join_1);
String _plus_4 = (_plus_3 + " }");
String _xifexpression_2 = null;
boolean _isEmpty_2 = this.getPostponedSubstitutions().isEmpty();
if (_isEmpty_2) {
_xifexpression_2 = "";
}
else {
final Function1<TypeVariableMapping, String> _function_2 = new Function1<TypeVariableMapping, String>() {
public String apply(final TypeVariableMapping it) {
String _typeAsString = it.getTypeVar().getTypeAsString();
String _plus = (_typeAsString + "->");
String _typeRefAsString = it.getTypeArg().getTypeRefAsString();
return (_plus + _typeRefAsString);
}
};
String _join_2 = IterableExtensions.join(XcoreEListExtensions.<TypeVariableMapping, String>map(this.getPostponedSubstitutions(), _function_2), ", ");
String _plus_5 = (" [[" + _join_2);
_xifexpression_2 = (_plus_5 + "]]");
}
_xifexpression_1 = (_plus_4 + _xifexpression_2);
}
return (_plus_2 + _xifexpression_1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean hasPostponedSubstitutionFor(final TypeVariable typeVar) {
final Function1<TypeVariableMapping, Boolean> _function = new Function1<TypeVariableMapping, Boolean>() {
public Boolean apply(final TypeVariableMapping m) {
TypeVariable _typeVar = null;
if (m!=null) {
_typeVar=m.getTypeVar();
}
return Boolean.valueOf((_typeVar == typeVar));
}
};
return IterableExtensions.<TypeVariableMapping>exists(this.getPostponedSubstitutions(), _function);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public int getVersion() {
int _xifexpression = (int) 0;
boolean _hasRequestedVersion = this.hasRequestedVersion();
if (_hasRequestedVersion) {
_xifexpression = this.getRequestedVersion().intValue();
}
else {
_xifexpression = super.getVersion();
}
return _xifexpression;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean hasRequestedVersion() {
BigDecimal _requestedVersion = this.getRequestedVersion();
return (_requestedVersion != null);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public int getRequestedVersionOrZero() {
int _xifexpression = (int) 0;
boolean _hasRequestedVersion = this.hasRequestedVersion();
if (_hasRequestedVersion) {
_xifexpression = this.getRequestedVersion().intValue();
}
else {
_xifexpression = 0;
}
return _xifexpression;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__AST_STRUCTURAL_MEMBERS:
return ((InternalEList<?>)getAstStructuralMembers()).basicRemove(otherEnd, msgs);
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__GEN_STRUCTURAL_MEMBERS:
return ((InternalEList<?>)getGenStructuralMembers()).basicRemove(otherEnd, msgs);
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__POSTPONED_SUBSTITUTIONS:
return ((InternalEList<?>)getPostponedSubstitutions()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__REQUESTED_VERSION:
return getRequestedVersion();
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__AST_STRUCTURAL_MEMBERS:
return getAstStructuralMembers();
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__STRUCTURAL_TYPE:
if (resolve) return getStructuralType();
return basicGetStructuralType();
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__GEN_STRUCTURAL_MEMBERS:
return getGenStructuralMembers();
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__POSTPONED_SUBSTITUTIONS:
return getPostponedSubstitutions();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__REQUESTED_VERSION:
setRequestedVersion((BigDecimal)newValue);
return;
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__AST_STRUCTURAL_MEMBERS:
getAstStructuralMembers().clear();
getAstStructuralMembers().addAll((Collection<? extends TStructMember>)newValue);
return;
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__STRUCTURAL_TYPE:
setStructuralType((TStructuralType)newValue);
return;
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__GEN_STRUCTURAL_MEMBERS:
getGenStructuralMembers().clear();
getGenStructuralMembers().addAll((Collection<? extends TStructMember>)newValue);
return;
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__POSTPONED_SUBSTITUTIONS:
getPostponedSubstitutions().clear();
getPostponedSubstitutions().addAll((Collection<? extends TypeVariableMapping>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__REQUESTED_VERSION:
setRequestedVersion(REQUESTED_VERSION_EDEFAULT);
return;
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__AST_STRUCTURAL_MEMBERS:
getAstStructuralMembers().clear();
return;
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__STRUCTURAL_TYPE:
setStructuralType((TStructuralType)null);
return;
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__GEN_STRUCTURAL_MEMBERS:
getGenStructuralMembers().clear();
return;
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__POSTPONED_SUBSTITUTIONS:
getPostponedSubstitutions().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__REQUESTED_VERSION:
return REQUESTED_VERSION_EDEFAULT == null ? requestedVersion != null : !REQUESTED_VERSION_EDEFAULT.equals(requestedVersion);
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__AST_STRUCTURAL_MEMBERS:
return astStructuralMembers != null && !astStructuralMembers.isEmpty();
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__STRUCTURAL_TYPE:
return structuralType != null;
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__GEN_STRUCTURAL_MEMBERS:
return genStructuralMembers != null && !genStructuralMembers.isEmpty();
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__POSTPONED_SUBSTITUTIONS:
return postponedSubstitutions != null && !postponedSubstitutions.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public int eBaseStructuralFeatureID(int derivedFeatureID, Class<?> baseClass) {
if (baseClass == VersionedReference.class) {
switch (derivedFeatureID) {
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__REQUESTED_VERSION: return TypeRefsPackage.VERSIONED_REFERENCE__REQUESTED_VERSION;
default: return -1;
}
}
if (baseClass == VersionedParameterizedTypeRef.class) {
switch (derivedFeatureID) {
default: return -1;
}
}
if (baseClass == StructuralTypeRef.class) {
switch (derivedFeatureID) {
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__AST_STRUCTURAL_MEMBERS: return TypeRefsPackage.STRUCTURAL_TYPE_REF__AST_STRUCTURAL_MEMBERS;
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__STRUCTURAL_TYPE: return TypeRefsPackage.STRUCTURAL_TYPE_REF__STRUCTURAL_TYPE;
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__GEN_STRUCTURAL_MEMBERS: return TypeRefsPackage.STRUCTURAL_TYPE_REF__GEN_STRUCTURAL_MEMBERS;
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__POSTPONED_SUBSTITUTIONS: return TypeRefsPackage.STRUCTURAL_TYPE_REF__POSTPONED_SUBSTITUTIONS;
default: return -1;
}
}
if (baseClass == ParameterizedTypeRefStructural.class) {
switch (derivedFeatureID) {
default: return -1;
}
}
if (baseClass == VersionedParameterizedTypeRefStructural.class) {
switch (derivedFeatureID) {
default: return -1;
}
}
return super.eBaseStructuralFeatureID(derivedFeatureID, baseClass);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public int eDerivedStructuralFeatureID(int baseFeatureID, Class<?> baseClass) {
if (baseClass == VersionedReference.class) {
switch (baseFeatureID) {
case TypeRefsPackage.VERSIONED_REFERENCE__REQUESTED_VERSION: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__REQUESTED_VERSION;
default: return -1;
}
}
if (baseClass == VersionedParameterizedTypeRef.class) {
switch (baseFeatureID) {
default: return -1;
}
}
if (baseClass == StructuralTypeRef.class) {
switch (baseFeatureID) {
case TypeRefsPackage.STRUCTURAL_TYPE_REF__AST_STRUCTURAL_MEMBERS: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__AST_STRUCTURAL_MEMBERS;
case TypeRefsPackage.STRUCTURAL_TYPE_REF__STRUCTURAL_TYPE: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__STRUCTURAL_TYPE;
case TypeRefsPackage.STRUCTURAL_TYPE_REF__GEN_STRUCTURAL_MEMBERS: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__GEN_STRUCTURAL_MEMBERS;
case TypeRefsPackage.STRUCTURAL_TYPE_REF__POSTPONED_SUBSTITUTIONS: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM__POSTPONED_SUBSTITUTIONS;
default: return -1;
}
}
if (baseClass == ParameterizedTypeRefStructural.class) {
switch (baseFeatureID) {
default: return -1;
}
}
if (baseClass == VersionedParameterizedTypeRefStructural.class) {
switch (baseFeatureID) {
default: return -1;
}
}
return super.eDerivedStructuralFeatureID(baseFeatureID, baseClass);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public int eDerivedOperationID(int baseOperationID, Class<?> baseClass) {
if (baseClass == TypeArgument.class) {
switch (baseOperationID) {
case TypeRefsPackage.TYPE_ARGUMENT___GET_TYPE_REF_AS_STRING: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_TYPE_REF_AS_STRING;
default: return super.eDerivedOperationID(baseOperationID, baseClass);
}
}
if (baseClass == Versionable.class) {
switch (baseOperationID) {
case TypeRefsPackage.VERSIONABLE___GET_VERSION: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_VERSION;
default: return super.eDerivedOperationID(baseOperationID, baseClass);
}
}
if (baseClass == TypeRef.class) {
switch (baseOperationID) {
case TypeRefsPackage.TYPE_REF___GET_TYPE_REF_AS_STRING: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_TYPE_REF_AS_STRING;
case TypeRefsPackage.TYPE_REF___GET_TYPING_STRATEGY: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_TYPING_STRATEGY;
case TypeRefsPackage.TYPE_REF___GET_STRUCTURAL_MEMBERS: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_STRUCTURAL_MEMBERS;
case TypeRefsPackage.TYPE_REF___GET_VERSION: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_VERSION;
default: return super.eDerivedOperationID(baseOperationID, baseClass);
}
}
if (baseClass == ParameterizedTypeRef.class) {
switch (baseOperationID) {
case TypeRefsPackage.PARAMETERIZED_TYPE_REF___GET_TYPING_STRATEGY: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_TYPING_STRATEGY;
case TypeRefsPackage.PARAMETERIZED_TYPE_REF___GET_TYPE_REF_AS_STRING: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_TYPE_REF_AS_STRING;
default: return super.eDerivedOperationID(baseOperationID, baseClass);
}
}
if (baseClass == VersionedReference.class) {
switch (baseOperationID) {
case TypeRefsPackage.VERSIONED_REFERENCE___HAS_REQUESTED_VERSION: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___HAS_REQUESTED_VERSION;
case TypeRefsPackage.VERSIONED_REFERENCE___GET_REQUESTED_VERSION_OR_ZERO: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_REQUESTED_VERSION_OR_ZERO;
default: return -1;
}
}
if (baseClass == VersionedParameterizedTypeRef.class) {
switch (baseOperationID) {
case TypeRefsPackage.VERSIONED_PARAMETERIZED_TYPE_REF___GET_VERSION: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_VERSION;
default: return -1;
}
}
if (baseClass == StructuralTypeRef.class) {
switch (baseOperationID) {
case TypeRefsPackage.STRUCTURAL_TYPE_REF___GET_TYPING_STRATEGY: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_TYPING_STRATEGY;
case TypeRefsPackage.STRUCTURAL_TYPE_REF___SET_TYPING_STRATEGY__TYPINGSTRATEGY: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___SET_TYPING_STRATEGY__TYPINGSTRATEGY;
case TypeRefsPackage.STRUCTURAL_TYPE_REF___GET_STRUCTURAL_MEMBERS: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_STRUCTURAL_MEMBERS;
case TypeRefsPackage.STRUCTURAL_TYPE_REF___HAS_POSTPONED_SUBSTITUTION_FOR__TYPEVARIABLE: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___HAS_POSTPONED_SUBSTITUTION_FOR__TYPEVARIABLE;
default: return -1;
}
}
if (baseClass == ParameterizedTypeRefStructural.class) {
switch (baseOperationID) {
case TypeRefsPackage.PARAMETERIZED_TYPE_REF_STRUCTURAL___GET_TYPING_STRATEGY: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_TYPING_STRATEGY;
case TypeRefsPackage.PARAMETERIZED_TYPE_REF_STRUCTURAL___SET_TYPING_STRATEGY__TYPINGSTRATEGY: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___SET_TYPING_STRATEGY__TYPINGSTRATEGY;
case TypeRefsPackage.PARAMETERIZED_TYPE_REF_STRUCTURAL___GET_STRUCTURAL_MEMBERS: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_STRUCTURAL_MEMBERS;
case TypeRefsPackage.PARAMETERIZED_TYPE_REF_STRUCTURAL___GET_TYPE_REF_AS_STRING: return ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_TYPE_REF_AS_STRING;
default: return -1;
}
}
if (baseClass == VersionedParameterizedTypeRefStructural.class) {
switch (baseOperationID) {
default: return -1;
}
}
return super.eDerivedOperationID(baseOperationID, baseClass);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eInvoke(int operationID, EList<?> arguments) throws InvocationTargetException {
switch (operationID) {
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_TYPING_STRATEGY:
return getTypingStrategy();
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___SET_TYPING_STRATEGY__TYPINGSTRATEGY:
setTypingStrategy((TypingStrategy)arguments.get(0));
return null;
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_STRUCTURAL_MEMBERS:
return getStructuralMembers();
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_TYPE_REF_AS_STRING:
return getTypeRefAsString();
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___HAS_POSTPONED_SUBSTITUTION_FOR__TYPEVARIABLE:
return hasPostponedSubstitutionFor((TypeVariable)arguments.get(0));
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_VERSION:
return getVersion();
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___HAS_REQUESTED_VERSION:
return hasRequestedVersion();
case ImPackage.VERSIONED_PARAMETERIZED_TYPE_REF_STRUCTURAL_IM___GET_REQUESTED_VERSION_OR_ZERO:
return getRequestedVersionOrZero();
}
return super.eInvoke(operationID, arguments);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (requestedVersion: ");
result.append(requestedVersion);
result.append(')');
return result.toString();
}
} //VersionedParameterizedTypeRefStructural_IMImpl
|
Java
|
EPL-1.0
|
gbastkowski/n4js/plugins/org.eclipse.n4js.transpiler/emf-gen/org/eclipse/n4js/transpiler/im/impl/VersionedParameterizedTypeRefStructural_IMImpl.java
|
cdc219bd-e31d-48d9-99dd-98d6f2ebf26e
|
[]
|
[]
|
const express = require('express');
const cors = require('cors');
const bodyParser = require('body-parser');
const session = require('express-session');
const MYSQLStore = require('express-session-sequelize')(session.Store);
const next = require('next');
const compression = require('compression');
const helmet = require('helmet');
// const Sequelize = require('sequelize');
const logger = require('./logger');
const { insertTemplates } = require('./models/EmailTemplate');
const getRootUrl = require('../lib/api/getRootUrl');
// const User = require('./models/User');
const { initMigrateData } = require('./models/Group');
const { newMysqlInstance } = require('./utils/utils');
const setupGoogle = require('./google');
const fileSystem = require('./filesystem');
const api = require('./api');
require('dotenv').config();
const dev = process.env.NODE_ENV !== 'production';
// const MONGO_URL = process.env.MONGO_URL_TEST;
// const options = {
// useNewUrlParser: true,
// useCreateIndex: true,
// useFindAndModify: false,
// useUnifiedTopology: true,
// };
const port = process.env.PORT || 8000;
const ROOT_URL = getRootUrl();
const URL_MAP = {
'/login': '/public/login',
'/contact': '/public/contact',
};
const app = next({ dev });
const handle = app.getRequestHandler();
const myDatabase = newMysqlInstance();
// const myDatabase = new Sequelize(process.env.MYSQL_DATABASE, process.env.MYSQL_USER, process.env.MYSQL_PASSWORD, {
// host: process.env.MYSQL_SERVER,
// dialect: 'mysql',
// });
// Nextjs's server prepared
app.prepare().then(async () => {
// await tf.setBackend('cpu');
const server = express();
server.use(helmet({ contentSecurityPolicy: false }));
server.use(compression());
if (process.env.REQUIRE_INIT_GROUP === 'true') {
console.log('Starting initiate Group Data');
try {
await initMigrateData();
console.log('Initiate Group Data Done.');
} catch (err) {
console.error('Init Group error:', err);
}
}
// confuring mysql session store
const sess = {
name: process.env.SESSION_NAME,
secret: process.env.SESSION_SECRET,
store: new MYSQLStore({ db: myDatabase }),
resave: false,
saveUninitialized: false,
cookie: {
httpOnly: true,
maxAge: 14 * 24 * 60 * 60 * 1000,
domain: process.env.COOKIE_DOMAIN,
},
};
if (!dev) {
server.set('trust proxy', 1); // sets req.hostname, req.ip
sess.cookie.secure = false; // sets cookie over HTTPS only
}
server.use(session(sess));
await insertTemplates();
server.use(cors());
server.use(bodyParser.urlencoded({ extended: true, parameterLimit: 100000, limit: '50mb' }));
server.use(bodyParser.json({ limit: '50mb' }));
// server.get('/', async (req, res) => {
// // await User.create({
// // department: 'AI Research',
// // displayName: 'Jia Wang',
// // email: 'jia.wang@nhfc.com',
// // googleId: process.env.GOOGLE_CLIENTID,
// // avatarUrl:
// // 'https://lh3.googleusercontent.com/-XdUIqdMkCWA/AAAAAAAAAAI/AAAAAAAAAAA/4252rscbv5M/photo.jpg?sz=128',
// // });
// const user = await User.findOne({ department: 'AI Research' });
// req.user = user;
// app.render(req, res, '/');
// });
setupGoogle({ server, ROOT_URL });
fileSystem({ server });
api(server);
// server.get('*', (req, res) => handle(req, res));
server.get('*', (req, res) => {
const url = URL_MAP[req.path];
if (url) {
app.render(req, res, url);
} else {
handle(req, res);
}
});
// starting express server
server.listen(port, (err) => {
if (err) throw err;
logger.info(`> Ready on ${ROOT_URL}`); // eslint-disable-line no-console
});
});
|
JavaScript
|
MIT
|
nhfc-ai/NHFC-MGMT/server/server.js
|
38288b81-030c-4bf1-841b-3f8656cc8420
|
[{"tag": "NAME", "value": "Jia Wang", "start": 2861, "end": 2869, "context": "artment: 'AI Research',\n // // displayName: 'Jia Wang',\n // // email: 'jia.wang@nhfc.com',\n // "}, {"tag": "EMAIL", "value": "jia.wang@nhfc.com", "start": 2892, "end": 2909, "context": "// displayName: 'Jia Wang',\n // // email: 'jia.wang@nhfc.com',\n // // googleId: process.env.GOOGLE_CLIENT"}]
|
[{"tag": "NAME", "value": "Jia Wang", "start": 2861, "end": 2869, "context": "artment: 'AI Research',\n // // displayName: 'Jia Wang',\n // // email: 'jia.wang@nhfc.com',\n // "}, {"tag": "EMAIL", "value": "jia.wang@nhfc.com", "start": 2892, "end": 2909, "context": "// displayName: 'Jia Wang',\n // // email: 'jia.wang@nhfc.com',\n // // googleId: process.env.GOOGLE_CLIENT"}]
|
/**
* PcapSplitter application
* ========================
* An application that splits a pcap file into smaller pcap files by a user-defined criteria:
*
* 1) File-size - splits the pcap file to smaller pcap files, each file with a certain size defined by the user
* 2) Packet-count - splits the pcap file to smaller pcap files, each with number of packets defined by the user
* 3) Client-IP - splits the pcap file to smaller pcap files so each file contains all TCP/UDP connections
* initiated by a certain client-ip, for example: file#1 will contain connections initiated by 1.1.1.1, file#2
* will contain connections initiated by 1.2.3.4, and so on. The user can limit the number of output files, in
* this case multiple client-ips will be written to the same file. If the user doesn't set such limit - each file
* will contain one client-ip
* 4) Server-IP - splits the pcap file to smaller pcap files so each file contains all TCP/UDP connections
* to a certain server-ip, for example: file#1 will contain connections to 8.8.8.8, file#2 will contain connections
* to 10.12.13.14, and so on. The user can limit the number of output files, in this case multiple server-ips will
* be written to the same file. If the user doesn't set such limit - each file will contain one server-ip
* 5) Server-port - splits the pcap file to smaller pcap files so each file contains all TCP/UDP connections
* to a certain server port, for example: file#1 will contain all port 80 connections (HTTP), file#2 will contain
* all port 25 (SMTP) connections, and so on. The user can limit the number of output files, in this case connections
* to multiple server ports will be written to the same file. If the user doesn't set such limit - each file will
* contain connection to one server port only
* 6) IP source and IP dest - splits the pcap file to smaller pcap files so each file contains all connections made
* between two IP addresses. The user can limit the number of output files, in this case multiple pairs of IP source
* and dest will be written to the same file. If the user doesn't set such limit - all connection of one pair of
* source and dest IP will be written to each file
* 7) Connection - splits a pcap file to smaller pcap files by TCP/UDP connection meaning each connection will be written
* to a certain file. The user can limit the number of output files, in this case an equal number of connections will
* be written to the same file. If the user doesn't set such limit - each file will contain one connection
* 8) BPF filter - splits the pcap file into two files: one that contains all packets matching the input BPF filter
* and the other one with the rest of the packets
*
* Remarks:
* - Options 3-7 supports both IPv4 and IPV6
* - Number of output files isn't limited, unless the user set such limit in options 3-7
* - There is no limit on the size of the input file, the number of packets it contains or the number of connections it
* contains
* - The user can also set a BPF filter to instruct the application to handle only packets filtered by the filter. The rest
* of the packets in the input file will be ignored
* - In options 3-5 & 7 all packets which aren't UDP or TCP (hence don't belong to any connection) will be written to
* one output file, separate from the other output files (usually file#0)
* - Works only on files of the pcap (TCPDUMP) format
*
*/
#include <stdlib.h>
#include <iostream>
#include <fstream>
#include <sstream>
#include <string>
#include <iomanip>
#include <map>
#include <RawPacket.h>
#include <Packet.h>
#include <PcapFileDevice.h>
#include "SimpleSplitters.h"
#include "IPPortSplitters.h"
#include "ConnectionSplitters.h"
#include <getopt.h>
#include <SystemUtils.h>
#include <PcapPlusPlusVersion.h>
using namespace pcpp;
static struct option PcapSplitterOptions[] =
{
{"input-file", required_argument, 0, 'f'},
{"output-file", required_argument, 0, 'o'},
{"method", required_argument, 0, 'm'},
{"param", required_argument, 0, 'p'},
{"filter", required_argument, 0, 'i'},
{"help", no_argument, 0, 'h'},
{"version", no_argument, 0, 'v'},
{0, 0, 0, 0}
};
#define EXIT_WITH_ERROR(reason, ...) do { \
printf("\nError: " reason "\n\n", ## __VA_ARGS__); \
printUsage(); \
exit(1); \
} while(0)
#define SPLIT_BY_FILE_SIZE "file-size"
#define SPLIT_BY_PACKET_COUNT "packet-count"
#define SPLIT_BY_IP_CLIENT "client-ip"
#define SPLIT_BY_IP_SERVER "server-ip"
#define SPLIT_BY_SERVER_PORT "server-port"
#define SPLIT_BY_2_TUPLE "ip-src-dst"
#define SPLIT_BY_5_TUPLE "connection"
#define SPLIT_BY_BPF_FILTER "bpf-filter"
#define SPLIT_BY_ROUND_ROBIN "round-robin"
#if defined(WIN32) || defined(WINx64)
#define SEPARATOR '\\'
#else
#define SEPARATOR '/'
#endif
/**
* Print application usage
*/
void printUsage()
{
printf("\nUsage:\n"
"-------\n"
"%s [-h] [-v] [-i filter] -f pcap_file -o output_dir -m split_method [-p split_param]\n"
"\nOptions:\n\n"
" -f pcap_file : Input pcap file name\n"
" -o output_dir : The directory where the output files shall be written\n"
" -m split_method : The method to split with. Can take one of the following params:\n"
" 'file-size' - split files by size in bytes\n"
" 'packet-count' - split files by packet count\n"
" 'client-ip' - split files by client IP, meaning all connections with\n"
" the same client IP will be in the same file\n"
" 'server-ip' - split files by server IP, meaning all connections with\n"
" the same server IP will be in the same file\n"
" 'server-port' - split files by server port, meaning all connections with\n"
" the same server port will be in the same file\n"
" 'ip-src-dst' - split files by IP src and dst (2-tuple), meaning all connections\n"
" with the same IPs will be in the same file\n"
" 'connection' - split files by connection (5-tuple), meaning all packets\n"
" of a connection will be in the same file\n"
" 'bpf-filter' - split file into two files: one that contains all packets\n"
" matching the given BPF filter (file #0) and one that contains\n"
" the rest of the packets (file #1)\n"
" 'round-robin' - split the file in a round-robin manner - each packet to a\n"
" different file\n"
" -p split-param : The relevant parameter for the split method:\n"
" 'method = file-size' => split-param is the max size per file (in bytes).\n"
" split-param is required for this method\n"
" 'method = packet-count' => split-param is the number of packet per file.\n"
" split-param is required for this method\n"
" 'method = client-ip' => split-param is max number of files to open.\n"
" If not provided the default is unlimited number of files\n"
" 'method = server-ip' => split-param is max number of files to open.\n"
" If not provided the default is unlimited number of files\n"
" 'method = server-port' => split-param is max number of files to open.\n"
" If not provided the default is unlimited number of files\n"
" 'method = ip-src-dst' => split-param is max number of files to open.\n"
" If not provided the default is unlimited number of files\n"
" 'method = connection' => split-param is max number of files to open.\n"
" If not provided the default is unlimited number of files\n"
" 'method = bpf-filter' => split-param is the BPF filter to match upon\n"
" 'method = round-robin' => split-param is number of files to round-robin packets between\n"
" -i filter : Apply a BPF filter, meaning only filtered packets will be counted in the split\n"
" -v : Displays the current version and exists\n"
" -h : Displays this help message and exits\n", AppName::get().c_str());
exit(0);
}
/**
* Print application version
*/
void printAppVersion()
{
printf("%s %s\n", AppName::get().c_str(), getPcapPlusPlusVersionFull().c_str());
printf("Built: %s\n", getBuildDateTime().c_str());
printf("Built from: %s\n", getGitInfo().c_str());
exit(0);
}
/**
* An auxiliary method for extracting the file name without the extension from a file path,
* for example: for the input '/home/myuser/mypcap.pcap' -> return value will be 'mypcap'
*/
std::string getFileNameWithoutExtension(const std::string& path)
{
// if path is empty, return an empty string
if (path == "")
return "";
// find the last "\\" or "/" (depends on the os) - where path ends and filename starts
size_t i = path.rfind(SEPARATOR, path.length());
if (i != std::string::npos)
{
// extract filename from path
std::string fileNameWithExtension = path.substr(i+1, path.length() - i);
// from the file name - remove the extension (the part after the ".")
i = fileNameWithExtension.rfind('.', fileNameWithExtension.length());
if (i != std::string::npos)
return fileNameWithExtension.substr(0, i);
return fileNameWithExtension;
}
// filename without a path
else
{
// from the file name - remove the extension (the part after the ".")
i = path.rfind('.', path.length());
if (i != std::string::npos)
return path.substr(0, i);
// filename doesn't have an extension
return path;
}
return("");
}
/**
* main method of this utility
*/
int main(int argc, char* argv[])
{
AppName::init(argc, argv);
std::string inputPcapFileName = "";
std::string outputPcapDir = "";
std::string filter = "";
std::string method = "";
char param[1000];
memset(param, 0, 1000);
bool paramWasSet = false;
int optionIndex = 0;
char opt = 0;
while((opt = getopt_long (argc, argv, "f:o:m:p:i:vh", PcapSplitterOptions, &optionIndex)) != -1)
{
switch (opt)
{
case 0:
break;
case 'f':
inputPcapFileName = optarg;
break;
case 'o':
outputPcapDir = optarg;
break;
case 'm':
method = optarg;
break;
case 'p':
strncpy(param, optarg, 1000);
paramWasSet = true;
break;
case 'i':
filter = optarg;
break;
case 'h':
printUsage();
break;
case 'v':
printAppVersion();
break;
default:
printUsage();
exit(-1);
}
}
if (inputPcapFileName == "")
{
EXIT_WITH_ERROR("Input file name was not given");
}
if (outputPcapDir == "")
{
EXIT_WITH_ERROR("Output directory name was not given");
}
if (!pcpp::directoryExists(outputPcapDir))
{
EXIT_WITH_ERROR("Output directory doesn't exist");
}
if (method == "")
{
EXIT_WITH_ERROR("Split method was not given");
}
Splitter* splitter = NULL;
// decide of the splitter to use, according to the user's choice
if (method == SPLIT_BY_FILE_SIZE)
{
uint64_t paramAsUint64 = (paramWasSet ? strtoull(param, NULL, 10) : 0);
splitter = new FileSizeSplitter(paramAsUint64);
}
else if (method == SPLIT_BY_PACKET_COUNT)
{
int paramAsInt = (paramWasSet ? atoi(param) : 0);
splitter = new PacketCountSplitter(paramAsInt);
}
else if (method == SPLIT_BY_IP_CLIENT)
{
int paramAsInt = (paramWasSet ? atoi(param) : SplitterWithMaxFiles::UNLIMITED_FILES_MAGIC_NUMBER);
splitter = new ClientIPSplitter(paramAsInt);
}
else if (method == SPLIT_BY_IP_SERVER)
{
int paramAsInt = (paramWasSet ? atoi(param) : SplitterWithMaxFiles::UNLIMITED_FILES_MAGIC_NUMBER);
splitter = new ServerIPSplitter(paramAsInt);
}
else if (method == SPLIT_BY_SERVER_PORT)
{
int paramAsInt = (paramWasSet ? atoi(param) : SplitterWithMaxFiles::UNLIMITED_FILES_MAGIC_NUMBER);
splitter = new ServerPortSplitter(paramAsInt);
}
else if (method == SPLIT_BY_2_TUPLE)
{
int paramAsInt = (paramWasSet ? atoi(param) : SplitterWithMaxFiles::UNLIMITED_FILES_MAGIC_NUMBER);
splitter = new TwoTupleSplitter(paramAsInt);
}
else if (method == SPLIT_BY_5_TUPLE)
{
int paramAsInt = (paramWasSet ? atoi(param) : SplitterWithMaxFiles::UNLIMITED_FILES_MAGIC_NUMBER);
splitter = new FiveTupleSplitter(paramAsInt);
}
else if (method == SPLIT_BY_BPF_FILTER)
{
splitter = new BpfCriteriaSplitter(std::string(param));
}
else if (method == SPLIT_BY_ROUND_ROBIN)
{
int paramAsInt = (paramWasSet ? atoi(param) : 0);
splitter = new RoundRobinSplitter(paramAsInt);
}
else
EXIT_WITH_ERROR("Unknown method '%s'", method.c_str());
// verify splitter param is legal, otherwise return an error
std::string errorStr;
if (!splitter->isSplitterParamLegal(errorStr))
{
EXIT_WITH_ERROR("%s", errorStr.c_str());
}
// prepare the output file format: /requested-path/original-file-name-[4-digit-number-starting-at-0000].pcap
std::string outputPcapFileName = outputPcapDir + std::string(1, SEPARATOR) + getFileNameWithoutExtension(inputPcapFileName) + "-";
// open a pcap file for reading
PcapFileReaderDevice reader(inputPcapFileName.c_str());
if (!reader.open())
{
EXIT_WITH_ERROR("Error opening input pcap file\n");
}
// set a filter if provided
if (filter != "")
{
if (!reader.setFilter(filter))
EXIT_WITH_ERROR("Couldn't set filter '%s'", filter.c_str());
}
printf("Started...\n");
int packetCountSoFar = 0;
int numOfFiles = 0;
RawPacket rawPacket;
// prepare a map of file number to PcapFileWriterDevice
std::map<int, PcapFileWriterDevice*> outputFiles;
// read all packets from input file, for each packet do:
while (reader.getNextPacket(rawPacket))
{
// parse the raw packet into a parsed packet
Packet parsedPacket(&rawPacket);
std::vector<int> filesToClose;
// call the splitter to get the file number to write the current packet to
int fileNum = splitter->getFileNumber(parsedPacket, filesToClose);
// if file number is seen for the first time (meaning it's the first packet written to it)
if (outputFiles.find(fileNum) == outputFiles.end())
{
// get file name from the splitter and add the .pcap extension
std::string fileName = splitter->getFileName(parsedPacket, outputPcapFileName, fileNum) + ".pcap";
// create a new PcapFileWriterDevice for this file
outputFiles[fileNum] = new PcapFileWriterDevice(fileName.c_str(), reader.getLinkLayerType());
// open the writer
if (!outputFiles[fileNum]->open())
break;
numOfFiles++;
}
// if file number exists in the map but PcapFileWriterDevice is null it means this file was open once and
// then closed. In this case we need to re-open the PcapFileWriterDevice in append mode
else if (outputFiles[fileNum] == NULL)
{
// get file name from the splitter and add the .pcap extension
std::string fileName = splitter->getFileName(parsedPacket, outputPcapFileName, fileNum) + ".pcap";
// re-create the PcapFileWriterDevice
outputFiles[fileNum] = new PcapFileWriterDevice(fileName.c_str());
// open the writer in __append__ mode
if (!outputFiles[fileNum]->open(true))
break;
}
// write the packet to the writer
outputFiles[fileNum]->writePacket(*parsedPacket.getRawPacket());
// if splitter wants us to close files - go over the file numbers and close them
for (std::vector<int>::iterator it = filesToClose.begin(); it != filesToClose.end(); it++)
{
// check if that file number is in the map
if (outputFiles.find(*it) != outputFiles.end())
{
// close the writer
outputFiles[*it]->close();
// free the writer memory and put null in the map record
delete outputFiles[*it];
outputFiles[*it] = NULL;
}
}
packetCountSoFar++;
}
std::cout << "Finished. Read and written " << packetCountSoFar << " packets to " << numOfFiles << " files" << std::endl;
// close the reader file
reader.close();
// close the writer files which are still open
for(std::map<int, PcapFileWriterDevice*>::iterator it = outputFiles.begin(); it != outputFiles.end(); ++it)
{
if (it->second != NULL)
it->second->close();
}
return 0;
}
|
C++
|
Unlicense
|
AndreyBronin/PcapPlusPlus/Examples/PcapSplitter/main.cpp
|
e2947328-03c6-4104-8cea-3b3c75f4b58f
|
[]
|
[]
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
from spack import *
class ScalapackBase(CMakePackage):
"""Base class for building ScaLAPACK, shared with the AMD optimized version
of the library in the 'amdscalapack' package.
"""
variant(
'build_type',
default='Release',
description='CMake build type',
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
variant(
'shared',
default=True,
description='Build the shared library version'
)
variant(
'pic',
default=False,
description='Build position independent code'
)
provides('scalapack')
depends_on('mpi')
depends_on('lapack')
depends_on('blas')
depends_on('cmake', when='@2.0.0:', type='build')
# See: https://github.com/Reference-ScaLAPACK/scalapack/issues/9
patch("cmake_fortran_mangle.patch", when='@2.0.2:2.0')
# See: https://github.com/Reference-ScaLAPACK/scalapack/pull/10
patch("mpi2-compatibility.patch", when='@2.0.2:2.0')
# See: https://github.com/Reference-ScaLAPACK/scalapack/pull/16
patch("int_overflow.patch", when='@2.0.0:2.1.0')
# See: https://github.com/Reference-ScaLAPACK/scalapack/pull/23
patch("gcc10-compatibility.patch", when='@2.0.0:2.1.0')
@property
def libs(self):
# Note that the default will be to search
# for 'libnetlib-scalapack.<suffix>'
shared = True if '+shared' in self.spec else False
return find_libraries(
'libscalapack', root=self.prefix, shared=shared, recursive=True
)
def cmake_args(self):
spec = self.spec
options = [
"-DBUILD_SHARED_LIBS:BOOL=%s" % ('ON' if '+shared' in spec else
'OFF'),
"-DBUILD_STATIC_LIBS:BOOL=%s" % ('OFF' if '+shared' in spec else
'ON')
]
# Make sure we use Spack's Lapack:
blas = spec['blas'].libs
lapack = spec['lapack'].libs
options.extend([
'-DLAPACK_FOUND=true',
'-DLAPACK_INCLUDE_DIRS=%s' % spec['lapack'].prefix.include,
'-DLAPACK_LIBRARIES=%s' % (lapack.joined(';')),
'-DBLAS_LIBRARIES=%s' % (blas.joined(';'))
])
c_flags = []
if '+pic' in spec:
c_flags.append(self.compiler.cc_pic_flag)
options.append(
"-DCMAKE_Fortran_FLAGS=%s" % self.compiler.fc_pic_flag
)
# Work around errors of the form:
# error: implicit declaration of function 'BI_smvcopy' is
# invalid in C99 [-Werror,-Wimplicit-function-declaration]
if spec.satisfies('%clang') or spec.satisfies('%apple-clang'):
c_flags.append('-Wno-error=implicit-function-declaration')
options.append(
self.define('CMAKE_C_FLAGS', ' '.join(c_flags))
)
return options
@run_after('install')
def fix_darwin_install(self):
# The shared libraries are not installed correctly on Darwin:
if (sys.platform == 'darwin') and ('+shared' in self.spec):
fix_darwin_install_name(self.spec.prefix.lib)
class NetlibScalapack(ScalapackBase):
"""ScaLAPACK is a library of high-performance linear algebra routines for
parallel distributed memory machines
"""
homepage = "https://www.netlib.org/scalapack/"
url = "https://www.netlib.org/scalapack/scalapack-2.0.2.tgz"
tags = ['e4s']
version('2.1.0', sha256='61d9216cf81d246944720cfce96255878a3f85dec13b9351f1fa0fd6768220a6')
version('2.0.2', sha256='0c74aeae690fe5ee4db7926f49c5d0bb69ce09eea75beb915e00bba07530395c')
version('2.0.1', sha256='a9b34278d4e10b40cbe084c6d87d09af8845e874250719bfbbc497b2a88bfde1')
version('2.0.0', sha256='e51fbd9c3ef3a0dbd81385b868e2355900148eea689bf915c5383d72daf73114')
# versions before 2.0.0 are not using cmake and requires blacs as
# a separated package
|
Python
|
ECL-2.0
|
Bambi/spack/var/spack/repos/builtin/packages/netlib-scalapack/package.py
|
4fca43ca-004d-46f8-a728-8044fe9916a7
|
[]
|
[]
|
<?php
include_once("php_includes/check_login_status.php");
include_once("classes/develop_php_library.php");
session_start();
// If user is logged in, header them away
if(isset($_SESSION["username"])){
header("location: user.php?u=".$_SESSION["username"]);
exit();
}
?><?php
// AJAX CALLS THIS LOGIN CODE TO EXECUTE
if(isset($_POST["e"])){
// CONNECT TO THE DATABASE
// GATHER THE POSTED DATA INTO LOCAL VARIABLES AND SANITIZE
$e = mysqli_real_escape_string($db_conx, $_POST['e']);
$p = md5($_POST['p']);
// GET USER IP ADDRESS
$ip = preg_replace('#[^0-9.]#', '', getenv('REMOTE_ADDR'));
// FORM DATA ERROR HANDLING
if($e == "" || $p == ""){
echo "login_failed";
exit();
} else {
// END FORM DATA ERROR HANDLING
sleep(3);
$sql = "SELECT id, username, password FROM users WHERE email='$e' AND activated='1' LIMIT 1";
$query = mysqli_query($db_conx, $sql);
$row = mysqli_fetch_row($query);
$db_id = $row[0];
$db_username = $row[1];
$db_pass_str = $row[2];
if($p != $db_pass_str){
echo "login_failed";
exit();
} else {
// CREATE THEIR SESSIONS AND COOKIES
$_SESSION['userid'] = $db_id;
$_SESSION['username'] = $db_username;
$_SESSION['password'] = $db_pass_str;
setcookie("id", $db_id, strtotime( '+30 days' ), "/", "", "", TRUE);
setcookie("user", $db_username, strtotime( '+30 days' ), "/", "", "", TRUE);
setcookie("pass", $db_pass_str, strtotime( '+30 days' ), "/", "", "", TRUE);
// UPDATE THEIR "IP" AND "LASTLOGIN" FIELDS
$sql = "UPDATE users SET ip='$ip', lastlogin=now() WHERE username='$db_username' LIMIT 1";
$query = mysqli_query($db_conx, $sql);
echo $db_username;
exit();
}
}
exit();
}
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Easy Fundraising Ideas</title>
<meta name="description" content="Sign up. Make a post for the things you want. Tell your friends about it. Receive donations.">
<meta name="viewport" content="initial-scale=1.0,width=device-width" />
<meta name="msvalidate.01" content="25A9DC1385D08C4DF90A1DCE8F58723A" />
<meta http-equiv="content-language" content="en-gb">
<link rel="stylesheet" href="style/style.css" />
<link rel="stylesheet" media="only screen and (max-width: 4000px)" href="style/pageTop1.css" />
<link rel="stylesheet" href="style/pageTop3.css" media="only screen and (max-width: 700px)" />
<link rel="stylesheet" media="only screen and (max-width: 4000px)" href="style/pageMiddle1.css" />
<link rel="stylesheet" media="only screen and (max-width: 1250px)" href="style/pageMiddle2.css" />
<link rel="stylesheet" media="only screen and (max-width: 700px)" href="style/pageMiddle3.css" />
<link rel="icon" href="style/tabicon.png" type="image/x-icon" />
<link rel="shortcut icon" href="style/tabicon.png" type="image/x-icon" />
<style type="text/css">
.back-content > h1 {
color: #00B3FF;
}
.back-content > h2 {
color: #00B3FF;
}
.back-content > h3 {
color: #00B3FF;
}
.header {
text-align: center;
color: #00B3FF;
background-color: #fff;
width: 85%;
height: 100px;
margin: 0 auto;
border-bottom: 2px dashed #00B3FF;
margin-bottom:0px;
padding-bottom: 0px;
padding: 5px
}
.back-content {
background-color: #00B3FF;
padding: 5px;
margin-top: -18px;
border-bottom: 3px solid #999;
border: none;
}
#fundraising_sec {
background-color: #fff;
width: 65%;
margin: 0 auto;
text-align: center;
color: #555;
}
#compare {
text-align: center;
width: 100%;
height: 250px;
display: block;
}
.compare-img1 {
height: 200px;
width: 300px;
padding: 5px;
float:left;
transition: all 0.5s ease;
}
.compare-img1:hover {
zoom: 1.1;
transform: rotate(-7deg)
}
.compare-img2 {
height: 200px;
width: 300px;
padding: 5px;
float:right;
transition: all 0.5s ease;
}
.compare-img2:hover {
zoom: 1.1;
transform: rotate(7deg)
}
#compare-p {
display: inline-block;
color: #00B3FF;
font-size: 35px;
font-weight: bold;
text-align: center;
margin: 0 auto;
margin-top: 80px;
}
</style>
<script src="js/main.js"></script>
<script src="js/Ajax.js"></script>
<script src="js/autoScroll.js"></script>
<script src="js/fade.js"></script>
<script src="js/trBackground.js"></script>
<script src="js/trDrop.js"></script>
<script src="js/trSlide.js"></script>
<script>
function emptyElement(x){
_(x).innerHTML = "";
}
function login(){
var e = _("email").value;
var p = _("password").value;
if(e == "" || p == ""){
_("status2").innerHTML = "Please fill out all of the form data";
} else {
_("loginbtn").style.display = "none";
_("loading").style.display = 'inline-block';
var one = Math.floor((Math.random() * 20) + 1);
var two = Math.floor((Math.random() * 10) + 1);
fadeIn('invite','invite_pop');
_('invite_p').innerHTML = "";
_("invite_h2").innerHTML = 'Question';
_("invite_div").innerHTML = '<h3 style="color: #999">Please answer the question below.</h3><input type="text" style="border: 1px solid #00C7FF;height: 29px;width: 242px;margin: 5px;border-radius: 15px;padding-left: 8px;" class="textBox textBox1" name="answer" placeholder="What\'s '+one+' plus '+two+'?" id="robot" /><button type="submit" style="height: 29px;border-radius: 18px;" class="logIn" id="loginbtn" onclick="answer('+one+', '+two+')">Answer</button>';
}
}
function answer(one, two) {
var total = one + two;
if (_("robot").value != total) {
fadeOut('invite','invite_pop');
_("loginbtn").style.display = "inline-block";
_("loading").style.display = 'none';
_("status2").innerHTML = "Please prove you're not a robot.";
return false;
} else {
var ajax = ajaxObj("POST", "index.php");
var e = _("email").value;
var p = _("password").value;
fadeOut('invite','invite_pop');
_("loginbtn").style.display = "none";
_("loading").style.display = 'inline-block';
ajax.onreadystatechange = function() {
if(ajaxReturn(ajax) == true) {
if(ajax.responseText == "login_failed"){
_("loginbtn").style.display = "inline-block";
_("loading").style.display = 'none';
_("status2").innerHTML = "Login unsuccessful, please try again.";
} else if (ajax.responseText == "robot") {
_("loginbtn").style.display = "inline-block";
_("loading").style.display = 'none';
_("status2").innerHTML = "Please prove you're not a robot.";
} else {
window.location = "https://nosettles.com/user.php?u="+ajax.responseText;
}
}
}
ajax.send("e="+e+"&p="+p);
}
}
function validateEmail(email) {
var re = /^([\w-]+(?:\.[\w-]+)*)@((?:[\w-]+\.)*\w[\w-]{0,66})\.([a-z]{2,6}(?:\.[a-z]{2})?)$/i;
return re.test(email);
}
</script>
</head>
<body style="background-color: #eee;">
<div id="pageTop1">
<div id="pageTopWrap">
<div><a class="pageTopLogo" href="/"></a>
</div>
<div id="up">
<div class="right">
<form name="loginform" onSubmit="return false;">
<input type="email" class="textBox textBox1" name="Email" placeholder="Email" id="email" onfocus="emptyElement('status2')" maxlength="88">
</div>
</div>
<div id="middle">
<div class="right">
<input type="password" class="textBox textbox2" placeholder="Password" id="password" onfocus="emptyElement('status2')" maxlength="100" />
<button type="submit" class="logIn" id="loginbtn" onclick="login()">
Log in
</button><img id="loading" src="style/ajax-loader1.gif" class="load" style="height: 27px; width: 47px;" alt="Loading..." />
</form>
</div>
</div>
<div id="down">
<div class="right">
<a class="links link1" href="forgot_pass.php">Forgot Password?</a>
<p id="status2" style="color: #f00"></p>
</div>
</div>
</div>
</div>
</div>
<?php include_once("template_nav.php"); ?>
<div id="java_pop_up"></div>
<div id="pop_up"><p>Secure Checkout</p><div id="wepay_checkout"></div></div>
<div id="invite" onclick="fadeOut('invite','invite_pop')"></div>
<div id="invite_pop" style="padding: 5px;"><h2 id="invite_h2" style="border-bottom: 2px solid #C2BCBC;"></h2><div id="invite_div"></div><p id="invite_p"></p></div>
<script type="text/javascript" src="https://stage.wepay.com/min/js/iframe.wepay.js">
</script>
<div id="pageMiddle" style="margin-top: 0px; padding-top: 25px;">
<div id="fundraising_sec" class="back-content">
<h1 class="header">Easy Fundraising Ideas</h1>
<h2>Are you searching for Easy Fundraising Ideas?</h2>
<p>Have you been searching for easy fundraising ideas for hours and you're now frustrated on how to make a fundraiser. Are you looking at fundraisers and trying to figure out how they got their funds? Are you looking for money but don't know how to start a fundraiser that can get popular and get funded.</p>
<h2>Well, you have found what you are looking for.</h2>
<p>If the answer to the questions above were mostly yes, then you have found the right place. The place where you can find easy fundraising ideas. The place where you can generate funds easily and with the lowest fees possible.</p>
<h3>Introducing NoSettles</h3>
<p>NoSettles is a fund-free website where you can post whatever you want and receive donations for that. You will not need to pay fees that take your money for no reason. Here you are able to sign up for free under 5 minutes and get donations as quickly as possible.</p>
<h2>Start your own fundraiser without having to overthink it.</h2>
<p>If you want to start your own fundraiser but you are looking around in Google, looking for websites where you can find easy fundraising ideas, then you are wasting your time. Instead you could start your fundraiser right away without having to research for hours.</p>
<h3>Don't wait any longer, no need for searching for Easy Fundraising Ideas.</h3>
<p>Just make your own fundraiser. You deserve better. If you want something but you can't afford it, then make a fundraiser. You don't have to settle for cheap products that don't last a month. You don't have to go with low-cost products. You deserve more!</p>
<h2>Judge for yourself</h2>
<div id="compare">
<img src="images/good-laptop.jpg" alt="Easy Fundraising Ideas" class="compare-img1" />
<p id="compare-p">OR</p>
<img src="images/bad-laptop.jpg" alit="Easy Fundraising Ideas" class="compare-img2" />
</div>
<div id="compare">
<img src="images/good-car.jpg" alt="Easy Fundraising Ideas" class="compare-img1" />
<p id="compare-p">OR</p>
<img src="images/bad-car.jpg" alit="Easy Fundraising Ideas" class="compare-img2" />
</div>
<div id="compare">
<img src="images/good-plane.jpg" alt="Easy Fundraising Ideas" class="compare-img1" />
<p id="compare-p">OR</p>
<img src="images/bad-plane.jpg" alit="Easy Fundraising Ideas" class="compare-img2" />
</div>
<?php include_once("template_SignUp.php"); ?>
</div>
</div>
<script>
function fadeIn(el, elen){
var element = document.getElementById(elen);
var elem = document.getElementById(el);
element.style.transition = "all 0.5s linear 0s";
elem.style.transition = "all 0.5s linear 0s";
element.style.opacity = 1;
elem.style.opacity = 1;
element.style.visibility = "visible";
elem.style.visibility = "visible";
var body=document.getElementsByTagName('body')[0];
body.style.overflow = "hidden";
}
function hint() {
_('invite_div').innerHTML = "";
_('invite_h2').innerHTML = "Invitation Code";
_('invite_p').innerHTML = 'Invitation Code is a string that every None-Settler has. If you know any friends that is a member, you can ask them for the code. Otherwise, you can <a href="mailto:contact@nosettles.com">contact us</a>,tell us why you want to join and how you found us, then simply ask for the Invitation Code.';
}
function fadeOut(el, elen){
var element = document.getElementById(elen);
var elem = document.getElementById(el);
element.style.opacity = 0;
elem.style.opacity = 0;
element.style.visibility = "hidden";
elem.style.visibility = "hidden";
var body=document.getElementsByTagName('body')[0];
body.style.overflowY = "scroll";
}
function mostRecent() {
var ajax = ajaxObj("POST", "index.php");
ajax.onreadystatechange = function() {
if(ajaxReturn(ajax) == true) {
if(ajax.responseText == "login_failed"){
_("status2").innerHTML = "Login unsuccessful, please try again.";
}
}
}
ajax.send("action=mostRecent");
}
</script>
<div id="pageBottom" style="margin-top: 20px;"><div style="margin-top: -20px;">NoSettles Copyright © 2015</div>
<div style="padding-top: 5px;">
<span id="siteseal">
<script type="text/javascript" src="https://seal.godaddy.com/getSeal?sealID=6EFg6BS5bspEZsmRQHWfXAsaePte8smuuUzp4HtCti6trGWAiijB5qh7GRLG">
</script>
</span>
</div>
</div>
</body>
</html>
|
PHP
|
Apache-2.0
|
DeveloperTheExplorer/NoSettles/easyfundraisingideas.php
|
ea8c15f9-6c20-4b75-b9af-aac4efbea3b3
|
[]
|
[]
|
/*
Copyright (c) 2018, European X-Ray Free-Electron Laser Facility GmbH
All rights reserved.
You should have received a copy of the 3-Clause BSD License along with this
program. If not, see <https://opensource.org/licenses/BSD-3-Clause>
Author: Jun Zhu, zhujun981661@gmail.com
*/
#include <iostream>
#include <QThread>
#include <QStatusBar>
#include <QLayout>
#include <Qt>
#include <QDebug>
#include "mainwindow.hpp"
dmi::MainWindow::MainWindow(QWidget *parent) : QMainWindow(parent)
{
initUI();
initConnections();
img_proc_->connect(broker_->outputChannel());
image_analysis_->addProcessor(img_proc_);
}
dmi::MainWindow::~MainWindow()
{
broker_->requestInterruption();
img_proc_->requestInterruption();
qDebug() << "Requested interruptions of QThreads!";
broker_->quit();
img_proc_->quit();
qDebug() << "Waiting for QThreads to join ...";
broker_->wait();
img_proc_->wait();
qDebug() << "QThreads terminated!";
}
void dmi::MainWindow::initUI()
{
this->setWindowTitle("DMI");
this->resize(width_, height_);
initToolbar();
initStatusbar();
cw_ = new QSplitter(this);
cw_->setChildrenCollapsible(false);
cw_->setHandleWidth(SPLITTER_HANDLE_WIDTH);
l_panel_ = new QTabWidget();
c_panel_ = new QSplitter(Qt::Vertical);
c_panel_->setChildrenCollapsible(false);
c_panel_->setHandleWidth(SPLITTER_HANDLE_WIDTH);
cw_->addWidget(l_panel_);
cw_->addWidget(c_panel_);
cw_->setSizes({static_cast<int>(width_ * 0.45), static_cast<int>(width_ * 0.55)});
setCentralWidget(cw_);
initLeftUI();
initCentralUI();
setMinimumSize(640, 480);
}
void dmi::MainWindow::initLeftUI()
{
ds_widget_ = new DataSourceWidget();
l_panel_->setTabPosition(QTabWidget::West);
l_panel_->addTab(ds_widget_, "Data source");
}
void dmi::MainWindow::initCentralUI()
{
ct_panel_ = new QTabWidget();
cb_panel_ = new QTabWidget();
c_panel_->addWidget(ct_panel_);
c_panel_->addWidget(cb_panel_);
// top
image_analysis_ = new ImageAnalysisWidget(ct_panel_);
line_analysis_ = new LineAnalysisWidget(ct_panel_);
ct_panel_->addTab(image_analysis_, "Image Analysis");
ct_panel_->addTab(line_analysis_, "Line analysis");
// bottom
logger_ = new QWidget();
cb_panel_->setTabPosition(QTabWidget::South);
cb_panel_->addTab(logger_, "Logger");
}
void dmi::MainWindow::initToolbar()
{
tool_bar_ = addToolBar("View");
start_act_ = new QAction("&Start", this);
stop_act_ = new QAction("&Stop", this);
stop_act_->setEnabled(false);
tool_bar_->addAction(start_act_);
tool_bar_->addAction(stop_act_);
}
void dmi::MainWindow::initStatusbar()
{
status_label_ = new QLabel();
this->statusBar()->addPermanentWidget(status_label_);
status_label_->setText("Ready");
}
void dmi::MainWindow::initConnections()
{
connect(start_act_, &QAction::triggered, this, &dmi::MainWindow::onStart);
connect(start_act_, &QAction::triggered, ds_widget_, &dmi::DataSourceWidget::onStart);
connect(stop_act_, &QAction::triggered, this, &dmi::MainWindow::onStop);
connect(stop_act_, &QAction::triggered, ds_widget_, &dmi::DataSourceWidget::onStop);
// data broker
broker_ = new DataBroker(this);
connect(this, &MainWindow::daqStarted, broker_, [this]()
{
this->broker_->setEndpoint(this->ds_widget_->endpoint());
this->broker_->setSourceType(this->ds_widget_->sourceType());
this->broker_->start();
});
connect(this, &MainWindow::daqStopped, broker_, &DataBroker::stop);
// image processor (run forever)
img_proc_ = new ImageProcessor(this);
img_proc_->start();
connect(img_proc_, &dmi::ImageProcessor::newFrame, image_analysis_, &ImageAnalysisWidget::updateImage);
connect(img_proc_, &dmi::ImageProcessor::imageProcessed, broker_, &dmi::DataBroker::dataProcessed);
connect(broker_, &dmi::DataBroker::newSources, [this](const QStringList& srcs)
{
this->ds_widget_->updateSourceList(srcs);
});
connect(ds_widget_, &DataSourceWidget::sourceToggled, [this](SourceItem item, bool checked)
{
this->broker_->updateSources(item, checked);
});
connect(broker_, &DataBroker::newLine, line_analysis_, &LineAnalysisWidget::updateChart);
}
void dmi::MainWindow::onStart()
{
start_act_->setEnabled(false);
stop_act_->setEnabled(true);
emit daqStarted();
status_label_->setText("Acquiring ...");
}
void dmi::MainWindow::onStop()
{
emit daqStopped();
broker_->wait();
status_label_->setText("Ready");
stop_act_->setEnabled(false);
start_act_->setEnabled(true);
}
|
C++
|
BSD-3-Clause
|
zhujun98/karabo-bridge-cpp/src/dmi/src/gui/mainwindow.cpp
|
f73cfaa5-6501-4a13-940c-ac42dfd8011b
|
[{"tag": "EMAIL", "value": "zhujun981661@gmail.com", "start": 276, "end": 298, "context": ".org/licenses/BSD-3-Clause>\n\n Author: Jun Zhu, zhujun981661@gmail.com\n*/\n#include <iostream>\n\n#include <QThread>\n#inclu"}, {"tag": "NAME", "value": "Jun Zhu", "start": 267, "end": 274, "context": "pensource.org/licenses/BSD-3-Clause>\n\n Author: Jun Zhu, zhujun981661@gmail.com\n*/\n#include <iostream>\n\n#"}]
|
[{"tag": "EMAIL", "value": "zhujun981661@gmail.com", "start": 276, "end": 298, "context": ".org/licenses/BSD-3-Clause>\n\n Author: Jun Zhu, zhujun981661@gmail.com\n*/\n#include <iostream>\n\n#include <QThread>\n#inclu"}, {"tag": "NAME", "value": "Jun Zhu", "start": 267, "end": 274, "context": "pensource.org/licenses/BSD-3-Clause>\n\n Author: Jun Zhu, zhujun981661@gmail.com\n*/\n#include <iostream>\n\n#"}]
|
<?php
/*********************************************************************************
* SugarCRM Community Edition is a customer relationship management program developed by
* SugarCRM, Inc. Copyright (C) 2004-2013 SugarCRM Inc.
* SuiteCRM is an extension to SugarCRM Community Edition developed by Salesagility Ltd.
* Copyright (C) 2011 - 2014 Salesagility Ltd.
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License version 3 as published by the
* Free Software Foundation with the addition of the following permission added
* to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK
* IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY
* OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along with
* this program; if not, see http://www.gnu.org/licenses or write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA.
*
* You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road,
* SW2-130, Cupertino, CA 95014, USA. or at email address contact@sugarcrm.com.
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Public License version 3.
*
* In accordance with Section 7(b) of the GNU Affero General Public License version 3,
* these Appropriate Legal Notices must retain the display of the "Powered by
* SugarCRM" logo and "Supercharged by SuiteCRM" logo. If the display of the logos is not
* reasonably feasible for technical reasons, the Appropriate Legal Notices must
* display the words "Powered by SugarCRM" and "Supercharged by SuiteCRM".
********************************************************************************/
$module_name = 'AOD_IndexEvent';
$viewdefs[$module_name]['QuickCreate'] = array(
'templateMeta' => array('maxColumns' => '2',
'widths' => array(
array('label' => '10', 'field' => '30'),
array('label' => '10', 'field' => '30')
),
),
'panels' =>array (
'default' =>
array (
array (
'name',
'assigned_user_name',
),
),
),
);
|
PHP
|
MIT
|
YukioYoshimura/Homestead/src/suitecrm/modules/AOD_IndexEvent/metadata/quickcreatedefs.php
|
6afd793f-3339-459f-b30d-55f76cdc2f4c
|
[{"tag": "EMAIL", "value": "contact@sugarcrm.com", "start": 1477, "end": 1497, "context": "30, Cupertino, CA 95014, USA. or at email address contact@sugarcrm.com.\n *\n * The interactive user interfaces in modifie"}]
|
[{"tag": "EMAIL", "value": "contact@sugarcrm.com", "start": 1477, "end": 1497, "context": "30, Cupertino, CA 95014, USA. or at email address contact@sugarcrm.com.\n *\n * The interactive user interfaces in modifie"}]
|
/* Autogenerated file. Do not edit manually. */
/* tslint:disable */
/* eslint-disable */
import {
Signer,
utils,
BigNumberish,
Contract,
ContractFactory,
Overrides,
} from "ethers";
import { Provider, TransactionRequest } from "@ethersproject/providers";
import type {
MasterChefJoeV3,
MasterChefJoeV3Interface,
} from "../MasterChefJoeV3";
const _abi = [
{
inputs: [
{
internalType: "contract IMasterChef",
name: "_MASTER_CHEF_V2",
type: "address",
},
{
internalType: "contract IERC20",
name: "_joe",
type: "address",
},
{
internalType: "uint256",
name: "_MASTER_PID",
type: "uint256",
},
],
stateMutability: "nonpayable",
type: "constructor",
},
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "uint256",
name: "pid",
type: "uint256",
},
{
indexed: false,
internalType: "uint256",
name: "allocPoint",
type: "uint256",
},
{
indexed: true,
internalType: "contract IERC20",
name: "lpToken",
type: "address",
},
{
indexed: true,
internalType: "contract IRewarder",
name: "rewarder",
type: "address",
},
],
name: "Add",
type: "event",
},
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "address",
name: "user",
type: "address",
},
{
indexed: true,
internalType: "uint256",
name: "pid",
type: "uint256",
},
{
indexed: false,
internalType: "uint256",
name: "amount",
type: "uint256",
},
],
name: "Deposit",
type: "event",
},
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "address",
name: "user",
type: "address",
},
{
indexed: true,
internalType: "uint256",
name: "pid",
type: "uint256",
},
{
indexed: false,
internalType: "uint256",
name: "amount",
type: "uint256",
},
],
name: "EmergencyWithdraw",
type: "event",
},
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "address",
name: "user",
type: "address",
},
{
indexed: true,
internalType: "uint256",
name: "pid",
type: "uint256",
},
{
indexed: false,
internalType: "uint256",
name: "amount",
type: "uint256",
},
],
name: "Harvest",
type: "event",
},
{
anonymous: false,
inputs: [],
name: "Init",
type: "event",
},
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "address",
name: "previousOwner",
type: "address",
},
{
indexed: true,
internalType: "address",
name: "newOwner",
type: "address",
},
],
name: "OwnershipTransferred",
type: "event",
},
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "uint256",
name: "pid",
type: "uint256",
},
{
indexed: false,
internalType: "uint256",
name: "allocPoint",
type: "uint256",
},
{
indexed: true,
internalType: "contract IRewarder",
name: "rewarder",
type: "address",
},
{
indexed: false,
internalType: "bool",
name: "overwrite",
type: "bool",
},
],
name: "Set",
type: "event",
},
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "uint256",
name: "pid",
type: "uint256",
},
{
indexed: false,
internalType: "uint256",
name: "lastRewardTimestamp",
type: "uint256",
},
{
indexed: false,
internalType: "uint256",
name: "lpSupply",
type: "uint256",
},
{
indexed: false,
internalType: "uint256",
name: "accJoePerShare",
type: "uint256",
},
],
name: "UpdatePool",
type: "event",
},
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "address",
name: "user",
type: "address",
},
{
indexed: true,
internalType: "uint256",
name: "pid",
type: "uint256",
},
{
indexed: false,
internalType: "uint256",
name: "amount",
type: "uint256",
},
],
name: "Withdraw",
type: "event",
},
{
inputs: [],
name: "JOE",
outputs: [
{
internalType: "contract IERC20",
name: "",
type: "address",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [],
name: "MASTER_CHEF_V2",
outputs: [
{
internalType: "contract IMasterChef",
name: "",
type: "address",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [],
name: "MASTER_PID",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "allocPoint",
type: "uint256",
},
{
internalType: "contract IERC20",
name: "_lpToken",
type: "address",
},
{
internalType: "contract IRewarder",
name: "_rewarder",
type: "address",
},
],
name: "add",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "pid",
type: "uint256",
},
{
internalType: "uint256",
name: "amount",
type: "uint256",
},
],
name: "deposit",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "pid",
type: "uint256",
},
],
name: "emergencyWithdraw",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [],
name: "harvestFromMasterChef",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "contract IERC20",
name: "dummyToken",
type: "address",
},
],
name: "init",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [],
name: "joePerSec",
outputs: [
{
internalType: "uint256",
name: "amount",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "uint256[]",
name: "pids",
type: "uint256[]",
},
],
name: "massUpdatePools",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [],
name: "owner",
outputs: [
{
internalType: "address",
name: "",
type: "address",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "_pid",
type: "uint256",
},
{
internalType: "address",
name: "_user",
type: "address",
},
],
name: "pendingTokens",
outputs: [
{
internalType: "uint256",
name: "pendingJoe",
type: "uint256",
},
{
internalType: "address",
name: "bonusTokenAddress",
type: "address",
},
{
internalType: "string",
name: "bonusTokenSymbol",
type: "string",
},
{
internalType: "uint256",
name: "pendingBonusToken",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
name: "poolInfo",
outputs: [
{
internalType: "contract IERC20",
name: "lpToken",
type: "address",
},
{
internalType: "uint256",
name: "accJoePerShare",
type: "uint256",
},
{
internalType: "uint256",
name: "lastRewardTimestamp",
type: "uint256",
},
{
internalType: "uint256",
name: "allocPoint",
type: "uint256",
},
{
internalType: "contract IRewarder",
name: "rewarder",
type: "address",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [],
name: "poolLength",
outputs: [
{
internalType: "uint256",
name: "pools",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [],
name: "renounceOwnership",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "_pid",
type: "uint256",
},
{
internalType: "uint256",
name: "_allocPoint",
type: "uint256",
},
{
internalType: "contract IRewarder",
name: "_rewarder",
type: "address",
},
{
internalType: "bool",
name: "overwrite",
type: "bool",
},
],
name: "set",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [],
name: "totalAllocPoint",
outputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "newOwner",
type: "address",
},
],
name: "transferOwnership",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "pid",
type: "uint256",
},
],
name: "updatePool",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "",
type: "uint256",
},
{
internalType: "address",
name: "",
type: "address",
},
],
name: "userInfo",
outputs: [
{
internalType: "uint256",
name: "amount",
type: "uint256",
},
{
internalType: "uint256",
name: "rewardDebt",
type: "uint256",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "pid",
type: "uint256",
},
{
internalType: "uint256",
name: "amount",
type: "uint256",
},
],
name: "withdraw",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
];
const _bytecode =
"0x60e06040523480156200001157600080fd5b5060405162003167380380620031678339810160408190526200003491620000ae565b600080546001600160a01b031916339081178255604051909182917f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e0908290a350600180556001600160a01b03928316608052911660a05260c052620000f6565b6001600160a01b0381168114620000ab57600080fd5b50565b600080600060608486031215620000c457600080fd5b8351620000d18162000095565b6020850151909350620000e48162000095565b80925050604084015190509250925092565b60805160a05160c051612fdf62000188600039600081816102900152818161065e015281816109f801526118fc01526000818161038e015281816108550152611ae80152600081816101fd015281816105aa0152818161068b01528181610a25015281816115e3015281816116770152818161170b015281816117af0152818161184501526119260152612fdf6000f3fe608060405234801561001057600080fd5b50600436106101775760003560e01c806361621aaa116100d8578063ab7de0981161008c578063f2fde38b11610066578063f2fde38b14610353578063ffcd426314610366578063ffebad301461038957600080fd5b8063ab7de09814610325578063ca418d2314610338578063e2bbb1581461034057600080fd5b806388bba42f116100bd57806388bba42f146102ba5780638da5cb5b146102cd57806393f1a40b146102de57600080fd5b806361621aaa1461028b578063715018a6146102b257600080fd5b8063441a3e701161012f57806351eb05a61161011457806351eb05a6146102525780635312ea8e1461026557806357a5b58c1461027857600080fd5b8063441a3e70146102375780634f70b15a1461024a57600080fd5b806317caf6f11161016057806317caf6f1146101da57806319ab453c146101e357806327bf88ad146101f857600080fd5b8063081e3eda1461017c5780631526fe2714610193575b600080fd5b6002545b6040519081526020015b60405180910390f35b6101a66101a1366004612a28565b6103b0565b604080516001600160a01b03968716815260208101959095528401929092526060830152909116608082015260a00161018a565b61018060065481565b6101f66101f1366004612a59565b6103fe565b005b61021f7f000000000000000000000000000000000000000000000000000000000000000081565b6040516001600160a01b03909116815260200161018a565b6101f6610245366004612a76565b61071c565b6101f66109d0565b6101f6610260366004612a28565b610a8b565b6101f6610273366004612a28565b610d01565b6101f6610286366004612a98565b610eb7565b6101807f000000000000000000000000000000000000000000000000000000000000000081565b6101f6610ef4565b6101f66102c8366004612b1b565b610fb0565b6000546001600160a01b031661021f565b6103106102ec366004612b65565b60056020908152600092835260408084209091529082529020805460019091015482565b6040805192835260208301919091520161018a565b6101f6610333366004612b95565b611254565b6101806115d4565b6101f661034e366004612a76565b6119bb565b6101f6610361366004612a59565b611d99565b610379610374366004612b65565b611ee2565b60405161018a9493929190612c03565b61021f7f000000000000000000000000000000000000000000000000000000000000000081565b600281815481106103c057600080fd5b6000918252602090912060059091020180546001820154600283015460038401546004909401546001600160a01b0393841695509193909290911685565b6000546001600160a01b0316331461045d5760405162461bcd60e51b815260206004820181905260248201527f4f776e61626c653a2063616c6c6572206973206e6f7420746865206f776e657260448201526064015b60405180910390fd5b6040517f70a082310000000000000000000000000000000000000000000000000000000081523360048201526000906001600160a01b038316906370a082319060240160206040518083038186803b1580156104b857600080fd5b505afa1580156104cc573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906104f09190612c73565b9050806105655760405162461bcd60e51b815260206004820152602360248201527f4d61737465724368656656323a2042616c616e6365206d75737420657863656560448201527f64203000000000000000000000000000000000000000000000000000000000006064820152608401610454565b61057a6001600160a01b03831633308461225a565b6040517f095ea7b30000000000000000000000000000000000000000000000000000000081526001600160a01b037f0000000000000000000000000000000000000000000000000000000000000000811660048301526024820183905283169063095ea7b390604401602060405180830381600087803b1580156105fd57600080fd5b505af1158015610611573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906106359190612c8c565b506040517fe2bbb1580000000000000000000000000000000000000000000000000000000081527f00000000000000000000000000000000000000000000000000000000000000006004820152602481018290527f00000000000000000000000000000000000000000000000000000000000000006001600160a01b03169063e2bbb15890604401600060405180830381600087803b1580156106d757600080fd5b505af11580156106eb573d6000803e3d6000fd5b50506040517f57a86f7d14ccde89e22870afe839e3011216827daa9b24e18629f0a1e9d6cc14925060009150a15050565b6002600154141561076f5760405162461bcd60e51b815260206004820152601f60248201527f5265656e7472616e637947756172643a207265656e7472616e742063616c6c006044820152606401610454565b600260015561077c6109d0565b61078582610a8b565b60006002838154811061079a5761079a612ca9565b600091825260208083206040805160a081018252600594850290920180546001600160a01b039081168452600182015484860152600282015484840152600382015460608501526004909101541660808301528785529282528284203385529091529120805491925090156108b55760006108468260010154610840670de0b6b3a764000061083a876020015187600001546123ac90919063ffffffff16565b90612450565b906124ab565b905061087c6001600160a01b037f0000000000000000000000000000000000000000000000000000000000000000163383612507565b604051818152859033907f71bab65ced2e5750775a0613be067df48ef06cf92a496ebf7663ae06609249549060200160405180910390a3505b80546108c190846124ab565b80825560208301516108e191670de0b6b3a76400009161083a91906123ac565b600182015560808201516001600160a01b038116156109775781546040517f1a7af8b000000000000000000000000000000000000000000000000000000000815233600482015260248101919091526001600160a01b03821690631a7af8b090604401600060405180830381600087803b15801561095e57600080fd5b505af1158015610972573d6000803e3d6000fd5b505050505b825161098d906001600160a01b03163386612507565b604051848152859033907ff279e6a1f5e320cca91135676d9cb6e44ca8a08c0b88342bcdb1144f6511b568906020015b60405180910390a3505060018055505050565b6040517fe2bbb1580000000000000000000000000000000000000000000000000000000081527f00000000000000000000000000000000000000000000000000000000000000006004820152600060248201527f00000000000000000000000000000000000000000000000000000000000000006001600160a01b03169063e2bbb15890604401600060405180830381600087803b158015610a7157600080fd5b505af1158015610a85573d6000803e3d6000fd5b50505050565b600060028281548110610aa057610aa0612ca9565b60009182526020918290206040805160a081018252600590930290910180546001600160a01b0390811684526001820154948401949094526002810154918301829052600381015460608401526004015490921660808201529150421115610cfd5780516040517f70a082310000000000000000000000000000000000000000000000000000000081523060048201526000916001600160a01b0316906370a082319060240160206040518083038186803b158015610b5e57600080fd5b505afa158015610b72573d6000803e3d6000fd5b505050506040513d601f19601f82011682018060405250810190610b969190612c73565b90508015610c0f576000610bb78360400151426124ab90919063ffffffff16565b90506000610be160065461083a8660600151610bdb610bd46115d4565b87906123ac565b906123ac565b9050610c07610bfc8461083a84670de0b6b3a76400006123ac565b602086015190612650565b602085015250505b4260408301526002805483919085908110610c2c57610c2c612ca9565b6000918252602091829020835160059092020180546001600160a01b039283167fffffffffffffffffffffffff0000000000000000000000000000000000000000918216178255848401516001830155604080860151600284015560608601516003840155608090950151600490920180549290931691161790558382015190840151915185927f3be3541fc42237d611b30329040bfa4569541d156560acdbbae57640d20b8f4692610cf392909186919283526020830191909152604082015260600190565b60405180910390a2505b5050565b60026001541415610d545760405162461bcd60e51b815260206004820152601f60248201527f5265656e7472616e637947756172643a207265656e7472616e742063616c6c006044820152606401610454565b6002600181905550600060028281548110610d7157610d71612ca9565b600091825260208083206040805160a081018252600594850290920180546001600160a01b0390811684526001808301548587015260028301548585015260038301546060860152600490920154811660808501908152898852958552828720338852909452908520805486825591810195909555925190945090811615610e6d576040517f1a7af8b0000000000000000000000000000000000000000000000000000000008152336004820152600060248201526001600160a01b03821690631a7af8b090604401600060405180830381600087803b158015610e5457600080fd5b505af1158015610e68573d6000803e3d6000fd5b505050505b8351610e83906001600160a01b03163384612507565b604051828152859033907fbb757047c2b5f3974fe26b7c10f732e7bce710b0952a71082702781e62ae0595906020016109bd565b8060005b81811015610a8557610ee4848483818110610ed857610ed8612ca9565b90506020020135610a8b565b610eed81612d07565b9050610ebb565b6000546001600160a01b03163314610f4e5760405162461bcd60e51b815260206004820181905260248201527f4f776e61626c653a2063616c6c6572206973206e6f7420746865206f776e65726044820152606401610454565b600080546040516001600160a01b03909116907f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e0908390a3600080547fffffffffffffffffffffffff0000000000000000000000000000000000000000169055565b6000546001600160a01b0316331461100a5760405162461bcd60e51b815260206004820181905260248201527f4f776e61626c653a2063616c6c6572206973206e6f7420746865206f776e65726044820152606401610454565b60006002858154811061101f5761101f612ca9565b60009182526020918290206040805160a081018252600590930290910180546001600160a01b039081168452600182015494840194909452600280820154928401929092526003810154606084015260040154909216608082015281549092506110c39186916110bd91908990811061109a5761109a612ca9565b9060005260206000209060050201600301546006546124ab90919063ffffffff16565b90612650565b60065560608101849052811561115d576040517f1a7af8b000000000000000000000000000000000000000000000000000000000815260006004820181905260248201526001600160a01b03841690631a7af8b090604401600060405180830381600087803b15801561113557600080fd5b505af1158015611149573d6000803e3d6000fd5b5050506001600160a01b0384166080830152505b806002868154811061117157611171612ca9565b600091825260209182902083516005929092020180547fffffffffffffffffffffffff00000000000000000000000000000000000000009081166001600160a01b039384161782559284015160018201556040840151600282015560608401516003820155608090930151600490930180549092169216919091179055816111fd5780608001516111ff565b825b6001600160a01b0316857fa54644aae5c48c5971516f334e4fe8ecbc7930e23f34877d4203c6551e67ffaa86856040516112459291909182521515602082015260400190565b60405180910390a35050505050565b6000546001600160a01b031633146112ae5760405162461bcd60e51b815260206004820181905260248201527f4f776e61626c653a2063616c6c6572206973206e6f7420746865206f776e65726044820152606401610454565b6112b96003836126af565b156113065760405162461bcd60e51b815260206004820152601560248201527f6164643a204c5020616c726561647920616464656400000000000000000000006044820152606401610454565b6040517f70a082310000000000000000000000000000000000000000000000000000000081523060048201526001600160a01b038316906370a082319060240160206040518083038186803b15801561135e57600080fd5b505afa158015611372573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906113969190612c73565b506001600160a01b03811615611421576040517f1a7af8b000000000000000000000000000000000000000000000000000000000815260006004820181905260248201526001600160a01b03821690631a7af8b090604401600060405180830381600087803b15801561140857600080fd5b505af115801561141c573d6000803e3d6000fd5b505050505b60065442906114309085612650565b6006556040805160a0810182526001600160a01b038581168252600060208301818152938301858152606084018981528784166080860190815260028054600181018255945294517f405787fa12a823e0f2b7631cc41b3ba8828b3321ca811111fa75cd3aa3bb5ace600590940293840180547fffffffffffffffffffffffff000000000000000000000000000000000000000090811692871692909217905595517f405787fa12a823e0f2b7631cc41b3ba8828b3321ca811111fa75cd3aa3bb5acf84015590517f405787fa12a823e0f2b7631cc41b3ba8828b3321ca811111fa75cd3aa3bb5ad0830155517f405787fa12a823e0f2b7631cc41b3ba8828b3321ca811111fa75cd3aa3bb5ad182015591517f405787fa12a823e0f2b7631cc41b3ba8828b3321ca811111fa75cd3aa3bb5ad29092018054909316911617905561157c6003846126d1565b506002546001600160a01b03808416919085169061159b9060016124ab565b6040518781527f4b16bd2431ad24dc020ab0e1de7fcb6563dead6a24fb10089d6c23e97a70381f9060200160405180910390a450505050565b6000806103e8905060006117a17f00000000000000000000000000000000000000000000000000000000000000006001600160a01b0316630735b2086040518163ffffffff1660e01b815260040160206040518083038186803b15801561163a57600080fd5b505afa15801561164e573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906116729190612c73565b6108407f00000000000000000000000000000000000000000000000000000000000000006001600160a01b03166304ef9d586040518163ffffffff1660e01b815260040160206040518083038186803b1580156116ce57600080fd5b505afa1580156116e2573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906117069190612c73565b6108407f00000000000000000000000000000000000000000000000000000000000000006001600160a01b031663fc3c28af6040518163ffffffff1660e01b815260040160206040518083038186803b15801561176257600080fd5b505afa158015611776573d6000803e3d6000fd5b505050506040513d601f19601f8201168201806040525081019061179a9190612c73565b87906124ab565b9050600061183e8361083a847f00000000000000000000000000000000000000000000000000000000000000006001600160a01b031663ca418d236040518163ffffffff1660e01b815260040160206040518083038186803b15801561180657600080fd5b505afa15801561181a573d6000803e3d6000fd5b505050506040513d601f19601f82011682018060405250810190610bdb9190612c73565b90506119b37f00000000000000000000000000000000000000000000000000000000000000006001600160a01b03166317caf6f16040518163ffffffff1660e01b815260040160206040518083038186803b15801561189c57600080fd5b505afa1580156118b0573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906118d49190612c73565b6040517f1526fe270000000000000000000000000000000000000000000000000000000081527f0000000000000000000000000000000000000000000000000000000000000000600482015261083a907f00000000000000000000000000000000000000000000000000000000000000006001600160a01b031690631526fe279060240160806040518083038186803b15801561197057600080fd5b505afa158015611984573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906119a89190612d6f565b6020015184906123ac565b935050505090565b60026001541415611a0e5760405162461bcd60e51b815260206004820152601f60248201527f5265656e7472616e637947756172643a207265656e7472616e742063616c6c006044820152606401610454565b6002600155611a1b6109d0565b611a2482610a8b565b600060028381548110611a3957611a39612ca9565b600091825260208083206040805160a081018252600594850290920180546001600160a01b03908116845260018201548486015260028201548484015260038201546060850152600490910154166080830152878552928252828420338552909152912080549192509015611b48576000611ad98260010154610840670de0b6b3a764000061083a876020015187600001546123ac90919063ffffffff16565b9050611b0f6001600160a01b037f0000000000000000000000000000000000000000000000000000000000000000163383612507565b604051818152859033907f71bab65ced2e5750775a0613be067df48ef06cf92a496ebf7663ae06609249549060200160405180910390a3505b81516040517f70a082310000000000000000000000000000000000000000000000000000000081523060048201526000916001600160a01b0316906370a082319060240160206040518083038186803b158015611ba457600080fd5b505afa158015611bb8573d6000803e3d6000fd5b505050506040513d601f19601f82011682018060405250810190611bdc9190612c73565b8351909150611bf6906001600160a01b031633308761225a565b82516040517f70a08231000000000000000000000000000000000000000000000000000000008152306004820152600091611c909184916001600160a01b0316906370a082319060240160206040518083038186803b158015611c5857600080fd5b505afa158015611c6c573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906108409190612c73565b8354909150611c9f9082612650565b8084556020850151611cbf91670de0b6b3a76400009161083a91906123ac565b600184015560808401516001600160a01b03811615611d555783546040517f1a7af8b000000000000000000000000000000000000000000000000000000000815233600482015260248101919091526001600160a01b03821690631a7af8b090604401600060405180830381600087803b158015611d3c57600080fd5b505af1158015611d50573d6000803e3d6000fd5b505050505b604051828152879033907f90890809c654f11d6e72a28fa60149770a0d11ec6c92319d6ceb2bb0a4ea1a159060200160405180910390a35050600180555050505050565b6000546001600160a01b03163314611df35760405162461bcd60e51b815260206004820181905260248201527f4f776e61626c653a2063616c6c6572206973206e6f7420746865206f776e65726044820152606401610454565b6001600160a01b038116611e6f5760405162461bcd60e51b815260206004820152602660248201527f4f776e61626c653a206e6577206f776e657220697320746865207a65726f206160448201527f64647265737300000000000000000000000000000000000000000000000000006064820152608401610454565b600080546040516001600160a01b03808516939216917f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e091a3600080547fffffffffffffffffffffffff0000000000000000000000000000000000000000166001600160a01b0392909216919091179055565b600080606060008060028781548110611efd57611efd612ca9565b600091825260208083206040805160a081018252600594850290920180546001600160a01b039081168452600182015484860190815260028301548585015260038301546060860152600492830154821660808601528e88529585528287208d821688529094528186209451835192517f70a08231000000000000000000000000000000000000000000000000000000008152309281019290925292965093949193919216906370a082319060240160206040518083038186803b158015611fc457600080fd5b505afa158015611fd8573d6000803e3d6000fd5b505050506040513d601f19601f82011682018060405250810190611ffc9190612c73565b905083604001514211801561201057508015155b1561207257600061202e8560400151426124ab90919063ffffffff16565b9050600061204b60065461083a8860600151610bdb610bd46115d4565b905061206d6120668461083a84670de0b6b3a76400006123ac565b8590612650565b935050505b61209d8360010154610840670de0b6b3a764000061083a8688600001546123ac90919063ffffffff16565b60808501519098506001600160a01b03161561224d5783608001516001600160a01b031663f7c618c16040518163ffffffff1660e01b815260040160206040518083038186803b1580156120f057600080fd5b505afa158015612104573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906121289190612dde565b96506121b084608001516001600160a01b031663f7c618c16040518163ffffffff1660e01b815260040160206040518083038186803b15801561216a57600080fd5b505afa15801561217e573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906121a29190612dde565b6001600160a01b03166126e6565b60808501516040517fc031a66f0000000000000000000000000000000000000000000000000000000081526001600160a01b038c8116600483015292985091169063c031a66f9060240160206040518083038186803b15801561221257600080fd5b505afa158015612226573d6000803e3d6000fd5b505050506040513d601f19601f8201168201806040525081019061224a9190612c73565b94505b5050505092959194509250565b604080516001600160a01b0385811660248301528481166044830152606480830185905283518084039091018152608490920183526020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff167f23b872dd0000000000000000000000000000000000000000000000000000000017905291516000928392908816916122ec9190612dfb565b6000604051808303816000865af19150503d8060008114612329576040519150601f19603f3d011682016040523d82523d6000602084013e61232e565b606091505b50915091508180156123585750805115806123585750808060200190518101906123589190612c8c565b6123a45760405162461bcd60e51b815260206004820181905260248201527f426f72696e6745524332303a205472616e7366657246726f6d206661696c65646044820152606401610454565b505050505050565b6000826123bb5750600061244a565b60006123c78385612e17565b9050826123d48583612e54565b146124475760405162461bcd60e51b815260206004820152602160248201527f536166654d6174683a206d756c7469706c69636174696f6e206f766572666c6f60448201527f77000000000000000000000000000000000000000000000000000000000000006064820152608401610454565b90505b92915050565b60008082116124a15760405162461bcd60e51b815260206004820152601a60248201527f536166654d6174683a206469766973696f6e206279207a65726f0000000000006044820152606401610454565b6124478284612e54565b6000828211156124fd5760405162461bcd60e51b815260206004820152601e60248201527f536166654d6174683a207375627472616374696f6e206f766572666c6f7700006044820152606401610454565b6124478284612e8f565b604080516001600160a01b038481166024830152604480830185905283518084039091018152606490920183526020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff167fa9059cbb0000000000000000000000000000000000000000000000000000000017905291516000928392908716916125919190612dfb565b6000604051808303816000865af19150503d80600081146125ce576040519150601f19603f3d011682016040523d82523d6000602084013e6125d3565b606091505b50915091508180156125fd5750805115806125fd5750808060200190518101906125fd9190612c8c565b6126495760405162461bcd60e51b815260206004820152601c60248201527f426f72696e6745524332303a205472616e73666572206661696c6564000000006044820152606401610454565b5050505050565b60008061265d8385612ea6565b9050838110156124475760405162461bcd60e51b815260206004820152601b60248201527f536166654d6174683a206164646974696f6e206f766572666c6f7700000000006044820152606401610454565b6001600160a01b03811660009081526001830160205260408120541515612447565b6000612447836001600160a01b0384166127f1565b60408051600481526024810182526020810180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff167f95d89b4100000000000000000000000000000000000000000000000000000000179052905160609160009182916001600160a01b0386169161275b9190612dfb565b600060405180830381855afa9150503d8060008114612796576040519150601f19603f3d011682016040523d82523d6000602084013e61279b565b606091505b5091509150816127e0576040518060400160405280600381526020017f3f3f3f00000000000000000000000000000000000000000000000000000000008152506127e9565b6127e981612840565b949350505050565b60008181526001830160205260408120546128385750815460018181018455600084815260208082209093018490558454848252828601909352604090209190915561244a565b50600061244a565b6060604082511061285f578180602001905181019061244a9190612ebe565b8151602014156129ef5760005b60208160ff161080156128b95750828160ff168151811061288f5761288f612ca9565b01602001517fff000000000000000000000000000000000000000000000000000000000000001615155b156128d057806128c881612f89565b91505061286c565b60008160ff1667ffffffffffffffff8111156128ee576128ee612d40565b6040519080825280601f01601f191660200182016040528015612918576020820181803683370190505b509050600091505b60208260ff1610801561296d5750838260ff168151811061294357612943612ca9565b01602001517fff000000000000000000000000000000000000000000000000000000000000001615155b156129e857838260ff168151811061298757612987612ca9565b602001015160f81c60f81b818360ff16815181106129a7576129a7612ca9565b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a905350816129e081612f89565b925050612920565b9392505050565b505060408051808201909152600381527f3f3f3f0000000000000000000000000000000000000000000000000000000000602082015290565b600060208284031215612a3a57600080fd5b5035919050565b6001600160a01b0381168114612a5657600080fd5b50565b600060208284031215612a6b57600080fd5b813561244781612a41565b60008060408385031215612a8957600080fd5b50508035926020909101359150565b60008060208385031215612aab57600080fd5b823567ffffffffffffffff80821115612ac357600080fd5b818501915085601f830112612ad757600080fd5b813581811115612ae657600080fd5b8660208260051b8501011115612afb57600080fd5b60209290920196919550909350505050565b8015158114612a5657600080fd5b60008060008060808587031215612b3157600080fd5b84359350602085013592506040850135612b4a81612a41565b91506060850135612b5a81612b0d565b939692955090935050565b60008060408385031215612b7857600080fd5b823591506020830135612b8a81612a41565b809150509250929050565b600080600060608486031215612baa57600080fd5b833592506020840135612bbc81612a41565b91506040840135612bcc81612a41565b809150509250925092565b60005b83811015612bf2578181015183820152602001612bda565b83811115610a855750506000910152565b8481526001600160a01b03841660208201526080604082015260008351806080840152612c378160a0850160208801612bd7565b606083019390935250601f919091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0160160a0019392505050565b600060208284031215612c8557600080fd5b5051919050565b600060208284031215612c9e57600080fd5b815161244781612b0d565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052603260045260246000fd5b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b60007fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff821415612d3957612d39612cd8565b5060010190565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b600060808284031215612d8157600080fd5b6040516080810181811067ffffffffffffffff82111715612da457612da4612d40565b6040528251612db281612a41565b808252506020830151602082015260408301516040820152606083015160608201528091505092915050565b600060208284031215612df057600080fd5b815161244781612a41565b60008251612e0d818460208701612bd7565b9190910192915050565b6000817fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0483118215151615612e4f57612e4f612cd8565b500290565b600082612e8a577f4e487b7100000000000000000000000000000000000000000000000000000000600052601260045260246000fd5b500490565b600082821015612ea157612ea1612cd8565b500390565b60008219821115612eb957612eb9612cd8565b500190565b600060208284031215612ed057600080fd5b815167ffffffffffffffff80821115612ee857600080fd5b818401915084601f830112612efc57600080fd5b815181811115612f0e57612f0e612d40565b604051601f82017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0908116603f01168101908382118183101715612f5457612f54612d40565b81604052828152876020848701011115612f6d57600080fd5b612f7e836020830160208801612bd7565b979650505050505050565b600060ff821660ff811415612fa057612fa0612cd8565b6001019291505056fea2646970667358221220e7a439ad2eec4ff6f05ac86d53e16d75af36ffd1779c1cac46b59904b7d2dee064736f6c63430008090033";
export class MasterChefJoeV3__factory extends ContractFactory {
constructor(
...args: [signer: Signer] | ConstructorParameters<typeof ContractFactory>
) {
if (args.length === 1) {
super(_abi, _bytecode, args[0]);
} else {
super(...args);
}
}
deploy(
_MASTER_CHEF_V2: string,
_joe: string,
_MASTER_PID: BigNumberish,
overrides?: Overrides & { from?: string | Promise<string> }
): Promise<MasterChefJoeV3> {
return super.deploy(
_MASTER_CHEF_V2,
_joe,
_MASTER_PID,
overrides || {}
) as Promise<MasterChefJoeV3>;
}
getDeployTransaction(
_MASTER_CHEF_V2: string,
_joe: string,
_MASTER_PID: BigNumberish,
overrides?: Overrides & { from?: string | Promise<string> }
): TransactionRequest {
return super.getDeployTransaction(
_MASTER_CHEF_V2,
_joe,
_MASTER_PID,
overrides || {}
);
}
attach(address: string): MasterChefJoeV3 {
return super.attach(address) as MasterChefJoeV3;
}
connect(signer: Signer): MasterChefJoeV3__factory {
return super.connect(signer) as MasterChefJoeV3__factory;
}
static readonly bytecode = _bytecode;
static readonly abi = _abi;
static createInterface(): MasterChefJoeV3Interface {
return new utils.Interface(_abi) as MasterChefJoeV3Interface;
}
static connect(
address: string,
signerOrProvider: Signer | Provider
): MasterChefJoeV3 {
return new Contract(address, _abi, signerOrProvider) as MasterChefJoeV3;
}
}
|
TypeScript
|
MIT
|
vulturefi/vulturefi.github.io/types/ethers-contracts/factories/MasterChefJoeV3__factory.ts
|
2c7e5120-1bc7-43e4-96d9-dce537347361
|
[]
|
[]
|
from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:9332")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:9332")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Sarnath address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Sarnath address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
|
Python
|
MIT
|
iannkwon/Sarnath/contrib/bitrpc/bitrpc.py
|
4105c1d4-ef81-4469-856a-e06f8cb086ff
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 284, "end": 293, "context": "\nif rpcpass == \"\":\n\taccess = ServiceProxy(\"http://127.0.0.1:9332\")\nelse:\n\taccess = ServiceProxy(\"http://\"+rpc"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 362, "end": 371, "context": "ss = ServiceProxy(\"http://\"+rpcuser+\":\"+rpcpass+\"@127.0.0.1:9332\")\ncmd = sys.argv[1].lower()\n\nif cmd == \"back"}]
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 284, "end": 293, "context": "\nif rpcpass == \"\":\n\taccess = ServiceProxy(\"http://127.0.0.1:9332\")\nelse:\n\taccess = ServiceProxy(\"http://\"+rpc"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 362, "end": 371, "context": "ss = ServiceProxy(\"http://\"+rpcuser+\":\"+rpcpass+\"@127.0.0.1:9332\")\ncmd = sys.argv[1].lower()\n\nif cmd == \"back"}]
|
/* global describe beforeEach it */
import { expect } from 'chai'
import React from 'react'
import enzyme, { shallow } from 'enzyme'
import Adapter from 'enzyme-adapter-react-16'
import {UserHome} from '../user-home'
const adapter = new Adapter()
enzyme.configure({ adapter })
describe('UserHome', () => {
let userHome
beforeEach(() => {
userHome = shallow(<UserHome email="cody@email.com" user={{ reviews: [] }} userOrders={[]} getUserOrders={() => {}} getUserCart={() => {}}/>)
})
it('renders the email in an h2', () => {
expect(userHome.find('h2').text()).to.be.equal('Welcome, cody@email.com')
})
})
|
JavaScript
|
MIT
|
adink829/oregon-trail-outfitters/client/components/tests/user-home.spec.js
|
ca3e7bf2-9af6-49d0-b0ff-79240a0c5abf
|
[{"tag": "EMAIL", "value": "cody@email.com", "start": 602, "end": 616, "context": "userHome.find('h2').text()).to.be.equal('Welcome, cody@email.com')\n })\n})\n"}, {"tag": "EMAIL", "value": "cody@email.com", "start": 386, "end": 400, "context": "h(() => {\n userHome = shallow(<UserHome email=\"cody@email.com\" user={{ reviews: [] }} userOrders={[]} getUserOr"}]
|
[{"tag": "EMAIL", "value": "cody@email.com", "start": 602, "end": 616, "context": "userHome.find('h2').text()).to.be.equal('Welcome, cody@email.com')\n })\n})\n"}, {"tag": "EMAIL", "value": "cody@email.com", "start": 386, "end": 400, "context": "h(() => {\n userHome = shallow(<UserHome email=\"cody@email.com\" user={{ reviews: [] }} userOrders={[]} getUserOr"}]
|
# Generate a Rails app.
# Use the rake tasks in this repository to generate the Rails apps.
def add_gems
gem 'sorbet-rails', path: '../../../.'
sorbet_version = ENV["SORBET_VERSION"]
if sorbet_version
# mostly used to test against a stable version of Sorbet in Travis.
gem 'sorbet', sorbet_version
gem 'sorbet-runtime', sorbet_version
else
# prefer to test against latest version because sorbet is updated frequently
gem 'sorbet'
gem 'sorbet-runtime'
end
end
def add_routes
route "get 'test/index' => 'test#index'"
end
def create_initializers
initializer "sorbet_rails.rb", <<~RUBY
# typed: strict
require(Rails.root.join('lib/mythical_rbi_plugin'))
SorbetRails::ModelRbiFormatter.register_plugin(MythicalRbiPlugin)
RUBY
end
def create_lib
lib "mythical_rbi_plugin.rb", <<~'RUBY'
# typed: true
class MythicalRbiPlugin < SorbetRails::ModelPlugins::Base
def generate(root)
return unless @model_class.include?(Mythical)
model_class_rbi = root.create_class(self.model_class_name)
# ActiveSupport::Concern class method will be inserted to the class
# directly. We need to also put the sig in the model class rbi directly
model_class_rbi.create_method(
'mythicals',
class_method: true,
return_type: "T::Array[#{@model_class.name}]",
)
end
end
RUBY
end
def create_helpers
file "app/helpers/foo_helper.rb", <<~RUBY
module FooHelper
end
RUBY
file "app/helpers/bar_helper.rb", <<~RUBY
module BarHelper
end
RUBY
file "app/helpers/baz_helper.rb", <<~RUBY
module BazHelper
end
RUBY
end
def create_models
if ENV['RAILS_VERSION'] == '4.2'
file "app/models/application_record.rb", <<~RUBY
class ApplicationRecord < ActiveRecord::Base
self.abstract_class = true
end
RUBY
end
file "app/models/spell_book.rb", <<~RUBY
class SpellBook < ApplicationRecord
validates :name, length: { minimum: 5 }, presence: true
belongs_to :wizard
enum book_type: {
unclassified: 0,
biology: 1,
dark_art: 999,
}
end
RUBY
file "app/models/potion.rb", <<~RUBY
# an abstract class that has no table
class Potion < ApplicationRecord
self.abstract_class = true
end
RUBY
file "app/models/wand.rb", <<~RUBY
class Wand < ApplicationRecord
include Mythical
enum core_type: {
phoenix_feather: 0,
dragon_heartstring: 1,
unicorn_tail_hair: 2,
basilisk_horn: 3,
}
belongs_to :wizard
def wood_type
'Type ' + super
end
end
RUBY
# A nasty hack to add has_one_attached and has_many_attached to the models/wizard.rb file.
attachments = nil
if ['5.2', '6.0'].include?(ENV["RAILS_VERSION"])
attachments = "has_one_attached :school_photo\n has_many_attached :hats"
end
if ENV["RAILS_VERSION"] == "4.2"
file "app/models/wizard.rb", <<~RUBY
class Wizard < ApplicationRecord
validates :name, length: { minimum: 5 }, presence: true
enum house: {
Gryffindor: 0,
Hufflepuff: 1,
Ravenclaw: 2,
Slytherin: 3,
}
enum professor: {
"Severus Snape": 0,
"Minerva McGonagall": 1,
"Pomona Sprout": 2,
"Filius Flitwick": 3,
"Hagrid": 4,
}
enum broom: {
nimbus: 'nimbus',
firebolt: 'firebolt',
}
has_one :wand
has_many :spell_books
scope :recent, -> { where('created_at > ?', 1.month.ago) }
end
RUBY
else
file "app/models/wizard.rb", <<~RUBY
class Wizard < ApplicationRecord
validates :name, length: { minimum: 5 }, presence: true
enum house: {
Gryffindor: 0,
Hufflepuff: 1,
Ravenclaw: 2,
Slytherin: 3,
}
enum professor: {
"Severus Snape": 0,
"Minerva McGonagall": 1,
"Pomona Sprout": 2,
"Filius Flitwick": 3,
"Hagrid": 4,
}
enum broom: {
nimbus: 'nimbus',
firebolt: 'firebolt',
}, _prefix: true
enum quidditch_position: {
keeper: 0,
seeker: 1,
beater: 2,
chaser: 3,
}, _prefix: :quidditch
enum hair_color: {
brown: 0,
black: 1,
blonde: 2,
}, _suffix: :hair
enum eye_color: {
brown: 0,
green: 1,
blue: 2,
}, _prefix: :color, _suffix: :eyes
has_one :wand
has_many :spell_books
scope :recent, -> { where('created_at > ?', 1.month.ago) }
#{attachments}
end
RUBY
end
file "app/models/concerns/mythical.rb", <<~RUBY
require 'active_support/concern'
module Mythical
extend ActiveSupport::Concern
class_methods do
def mythicals
all.to_a # yeah!
end
end
end
RUBY
file "app/models/squib.rb", <<~RUBY
class Squib < Wizard
def is_magical
false
end
end
RUBY
end
def create_migrations
if ENV["RAILS_VERSION"] == "4.2"
migration_superclass = 'ActiveRecord::Migration'
else
migration_superclass = "ActiveRecord::Migration[#{ENV['RAILS_VERSION']}]"
end
file "db/migrate/20190620000001_create_wizards.rb", <<~RUBY
class CreateWizards < #{migration_superclass}
def change
create_table :wizards do |t|
t.string :name
t.integer :house
t.string :parent_email
t.text :notes
t.timestamps
end
end
end
RUBY
file "db/migrate/20190620000002_create_wands.rb", <<~RUBY
class CreateWands < #{migration_superclass}
def change
create_table :wands do |t|
t.references :wizard, unique: true, null: false
t.string :wood_type
t.integer :core_type
t.timestamps
end
end
end
RUBY
file "db/migrate/20190620000003_create_spell_books.rb", <<~RUBY
class CreateSpellBooks < #{migration_superclass}
def change
create_table :spell_books do |t|
t.string :name
t.references :wizard
t.integer :book_type, null: false, default: 0
end
end
end
RUBY
file "db/migrate/20190620000004_add_more_column_types_to_wands.rb", <<~RUBY
class AddMoreColumnTypesToWands < #{migration_superclass}
def change
add_column :wands, :flexibility, :float, null: false, default: 0.5
add_column :wands, :hardness, :decimal, null: false, precision: 10, scale: 10, default: 5
add_column :wands, :reflectance, :decimal, null: false, precision: 10, scale: 0, default: 0.5
add_column :wands, :broken, :boolean, null: false, default: false
add_column :wands, :chosen_at_date, :date
add_column :wands, :chosen_at_time, :time
# JSON column type is only supported on 5.2 or higher
unless ['4.2', '5.0', '5.1'].include?(ENV['RAILS_VERSION'])
add_column :wands, :spell_history, :json
add_column :wands, :maker_info, :json, null: false, default: '{}'
end
end
end
RUBY
file "db/migrate/20190620000005_add_broom_to_wizard.rb", <<~RUBY
class AddBroomToWizard < #{migration_superclass}
def change
add_column :wizards, :broom, :string
end
end
RUBY
if ENV["RAILS_VERSION"] != "4.2"
file "db/migrate/20190620000006_add_more_enums_to_wizard.rb", <<~RUBY
class AddMoreEnumsToWizard < #{migration_superclass}
def change
add_column :wizards, :quidditch_position, :integer
add_column :wizards, :hair_color, :integer
add_column :wizards, :eye_color, :integer
add_column :wizards, :hair_length, :integer
end
end
RUBY
end
file "db/migrate/20190620000007_add_type_to_wizard.rb", <<~RUBY
class AddTypeToWizard < #{migration_superclass}
def change
add_column :wizards, :type, :string, null: false, default: 'Wizard'
end
end
RUBY
end
def create_mailers
if ENV['RAILS_VERSION'] == '4.2'
file "app/mailers/application_mailer.rb", <<~RUBY
class ApplicationMailer < ActionMailer::Base
end
RUBY
end
file "app/mailers/hogwarts_acceptance_mailer.rb", <<~RUBY
class HogwartsAcceptanceMailer < ApplicationMailer
extend T::Sig
sig { params(student: Wizard).void }
def notify(student)
# TODO: mail acceptance letter to student
end
def notify_retry(student)
# TODO: send more owls!!
end
end
RUBY
file "app/mailers/daily_prophet_mailer.rb", <<~RUBY
class DailyProphetMailer < ApplicationMailer
extend T::Sig
sig { params(wizards: T::Array[Wizard], hotnews_only: T::Boolean).void }
def notify_subscribers(wizards:, hotnews_only:)
# TODO: mail the latest news to wizards!
end
end
RUBY
end
def add_sorbet_test_files
file "typed-override.yaml", <<~YAML
true:
- ./sorbet_test_cases.rb
YAML
copy_file "./sorbet_test_cases.rb", "sorbet_test_cases.rb"
end
# Get files relative to this template when copying.
def source_paths
[__dir__]
end
# Main setup
source_paths
if ['4.2', '5.0'].include?(ENV["RAILS_VERSION"])
File.open('Gemfile', 'r+') do |f|
out = ""
f.each do |line|
# We remove sdoc and web-console because they misbehave.
# sqlite needs to be limited to 1.3.x or it won't work.
out << line.gsub("gem 'sqlite3'", "gem 'sqlite3', '~> 1.3.6'") unless line =~ /gem \'(sdoc|web-console)\'.*/
end
f.pos = 0
f.print out
f.truncate(f.pos)
end
end
add_gems
after_bundle do
say "Creating application..."
add_routes
create_initializers
create_lib
create_helpers
create_models
create_migrations
create_mailers
add_sorbet_test_files
bundle_version = ENV["RAILS_VERSION"] == "4.2" ? "_1.17.3_" : ""
Bundler.with_clean_env do
# Rails 4.2 doesn't have the rails_command method, so just use run.
run "bundle #{bundle_version} exec rake db:migrate"
end
if ENV["RUN_WITH_SORBET"] != 'false'
Bundler.with_clean_env do
run "SRB_YES=true bundle #{bundle_version} exec srb init"
run "bundle #{bundle_version} exec rake rails_rbi:all"
run "bundle #{bundle_version} exec srb rbi hidden-definitions"
run "bundle #{bundle_version} exec srb rbi todo"
end
end
say "Done!"
end
|
Ruby
|
MIT
|
clarkdave/sorbet-rails/spec/generators/rails-template.rb
|
b6896b19-7047-46d7-a70c-a4dbc113d21e
|
[{"tag": "NAME", "value": "Hagrid", "start": 4116, "end": 4122, "context": "t\": 2,\n \"Filius Flitwick\": 3,\n \"Hagrid\": 4,\n }\n\n enum broom: {\n n"}, {"tag": "NAME", "value": "Severus Snape", "start": 3287, "end": 3300, "context": ",\n }\n\n enum professor: {\n \"Severus Snape\": 0,\n \"Minerva McGonagall\": 1,\n "}, {"tag": "NAME", "value": "Filius Flitwick", "start": 3382, "end": 3397, "context": "all\": 1,\n \"Pomona Sprout\": 2,\n \"Filius Flitwick\": 3,\n \"Hagrid\": 4,\n }\n\n en"}, {"tag": "NAME", "value": "Minerva McGonagall", "start": 3317, "end": 3335, "context": "essor: {\n \"Severus Snape\": 0,\n \"Minerva McGonagall\": 1,\n \"Pomona Sprout\": 2,\n \"Fil"}, {"tag": "NAME", "value": "Minerva McGonagall", "start": 4019, "end": 4037, "context": "essor: {\n \"Severus Snape\": 0,\n \"Minerva McGonagall\": 1,\n \"Pomona Sprout\": 2,\n \"Fil"}, {"tag": "NAME", "value": "Filius Flitwick", "start": 4084, "end": 4099, "context": "all\": 1,\n \"Pomona Sprout\": 2,\n \"Filius Flitwick\": 3,\n \"Hagrid\": 4,\n }\n\n en"}, {"tag": "NAME", "value": "Pomona Sprout", "start": 3352, "end": 3365, "context": " 0,\n \"Minerva McGonagall\": 1,\n \"Pomona Sprout\": 2,\n \"Filius Flitwick\": 3,\n \"H"}, {"tag": "NAME", "value": "Hagrid", "start": 3414, "end": 3420, "context": "t\": 2,\n \"Filius Flitwick\": 3,\n \"Hagrid\": 4,\n }\n\n enum broom: {\n n"}, {"tag": "NAME", "value": "Pomona Sprout", "start": 4054, "end": 4067, "context": " 0,\n \"Minerva McGonagall\": 1,\n \"Pomona Sprout\": 2,\n \"Filius Flitwick\": 3,\n \"H"}, {"tag": "NAME", "value": "Severus Snape", "start": 3989, "end": 4002, "context": ",\n }\n\n enum professor: {\n \"Severus Snape\": 0,\n \"Minerva McGonagall\": 1,\n "}]
|
[{"tag": "NAME", "value": "Hagrid", "start": 4116, "end": 4122, "context": "t\": 2,\n \"Filius Flitwick\": 3,\n \"Hagrid\": 4,\n }\n\n enum broom: {\n n"}, {"tag": "NAME", "value": "Severus Snape", "start": 3287, "end": 3300, "context": ",\n }\n\n enum professor: {\n \"Severus Snape\": 0,\n \"Minerva McGonagall\": 1,\n "}, {"tag": "NAME", "value": "Filius Flitwick", "start": 3382, "end": 3397, "context": "all\": 1,\n \"Pomona Sprout\": 2,\n \"Filius Flitwick\": 3,\n \"Hagrid\": 4,\n }\n\n en"}, {"tag": "NAME", "value": "Minerva McGonagall", "start": 3317, "end": 3335, "context": "essor: {\n \"Severus Snape\": 0,\n \"Minerva McGonagall\": 1,\n \"Pomona Sprout\": 2,\n \"Fil"}, {"tag": "NAME", "value": "Minerva McGonagall", "start": 4019, "end": 4037, "context": "essor: {\n \"Severus Snape\": 0,\n \"Minerva McGonagall\": 1,\n \"Pomona Sprout\": 2,\n \"Fil"}, {"tag": "NAME", "value": "Filius Flitwick", "start": 4084, "end": 4099, "context": "all\": 1,\n \"Pomona Sprout\": 2,\n \"Filius Flitwick\": 3,\n \"Hagrid\": 4,\n }\n\n en"}, {"tag": "NAME", "value": "Pomona Sprout", "start": 3352, "end": 3365, "context": " 0,\n \"Minerva McGonagall\": 1,\n \"Pomona Sprout\": 2,\n \"Filius Flitwick\": 3,\n \"H"}, {"tag": "NAME", "value": "Hagrid", "start": 3414, "end": 3420, "context": "t\": 2,\n \"Filius Flitwick\": 3,\n \"Hagrid\": 4,\n }\n\n enum broom: {\n n"}, {"tag": "NAME", "value": "Pomona Sprout", "start": 4054, "end": 4067, "context": " 0,\n \"Minerva McGonagall\": 1,\n \"Pomona Sprout\": 2,\n \"Filius Flitwick\": 3,\n \"H"}, {"tag": "NAME", "value": "Severus Snape", "start": 3989, "end": 4002, "context": ",\n }\n\n enum professor: {\n \"Severus Snape\": 0,\n \"Minerva McGonagall\": 1,\n "}]
|
/*
Package: dyncall
Library: test
File: test/callf/main.c
Description:
License:
Copyright (c) 2007-2021 Daniel Adler <dadler@uni-goettingen.de>,
Tassilo Philipp <tphilipp@potion-studios.com>
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
/* test dcCallF API */
#include "../../dyncall/dyncall_callf.h"
#include "../common/platformInit.h"
#include "../common/platformInit.c" /* Impl. for functions only used in this translation unit */
#include <stdarg.h>
#if defined(DC_UNIX) && !defined(DC__OS_BeOS)
#include <sys/syscall.h>
#endif
/* sample void function */
int vf_iii(int x,int y,int z)
{
int r = (x == 1 && y == 2 && z == 3);
printf("%d %d %d: %d", x, y, z, r);
return r;
}
int vf_ffiffiffi(float a, float b, int c, float d, float e, int f, float g, float h, int i)
{
int r = (a == 1.f && b == 2.f && c == 3 && d == 4.f && e == 5.f && f == 6 && g == 7.f && h == 8.f && i == 9);
printf("%f %f %d %f %f %d %f %f %d: %d", a, b, c, d, e, f, g, h, i, r);
return r;
}
int vf_ffiV(float a, float b, int c, ...)
{
va_list ap;
double d, e, g, h;
int f, i;
int r;
va_start(ap, c);
d = va_arg(ap, double);
e = va_arg(ap, double);
f = va_arg(ap, int);
g = va_arg(ap, double);
h = va_arg(ap, double);
i = va_arg(ap, int);
va_end(ap);
r = (a == 1.f && b == 2.f && c == 3 && d == 4. && e == 5. && f == 6 && g == 7. && h == 8. && i == 9);
printf("%f %f %d %f %f %d %f %f %d: %d", a, b, c, d, e, f, g, h, i, r);
return r;
}
/* main */
int main(int argc, char* argv[])
{
DCCallVM* vm;
DCValue ret;
int r = 1;
dcTest_initPlatform();
/* allocate call vm */
vm = dcNewCallVM(4096);
/* calls using 'formatted' API */
dcReset(vm);
printf("callf iii)i: ");
dcCallF(vm, &ret, (void*)&vf_iii, "iii)i", 1, 2, 3);
r = ret.i && r;
dcReset(vm);
printf("\ncallf ffiffiffi)i: ");
dcCallF(vm, &ret, (void*)&vf_ffiffiffi, "ffiffiffi)i", 1.f, 2.f, 3, 4.f, 5.f, 6, 7.f, 8.f, 9);
r = ret.i && r;
/* same but with calling convention prefix */
dcReset(vm);
printf("\ncallf _:ffiffiffi)i: ");
dcCallF(vm, &ret, (void*)&vf_ffiffiffi, "_:ffiffiffi)i", 1.f, 2.f, 3, 4.f, 5.f, 6, 7.f, 8.f, 9);
r = ret.i && r;
/* vararg call */
dcReset(vm);
printf("\ncallf _effi_.ddiddi)i: ");
dcCallF(vm, &ret, (void*)&vf_ffiV, "_effi_.ddiddi)i", 1.f, 2.f, 3, 4., 5., 6, 7., 8., 9);
r = ret.i && r;
/* arg binding then call using 'formatted' API */
dcReset(vm);
/* reset calling convention too */
dcMode(vm, DC_CALL_C_DEFAULT);
printf("\nargf iii)i then call: ");
dcArgF(vm, "iii)i", 1, 2, 3);
r = r && dcCallInt(vm, (void*)&vf_iii);
dcReset(vm);
printf("\nargf iii then call: ");
dcArgF(vm, "iii", 1, 2, 3);
r = r && dcCallInt(vm, (void*)&vf_iii);
dcReset(vm);
printf("\nargf ffiffiffi)i then call: ");
dcArgF(vm, "ffiffiffi)i", 1.f, 2.f, 3, 4.f, 5.f, 6, 7.f, 8.f, 9);
r = r && dcCallInt(vm, (void*)&vf_ffiffiffi);
dcReset(vm);
printf("\nargf ffiffiffi then call: ");
dcArgF(vm, "ffiffiffi", 1.f, 2.f, 3, 4.f, 5.f, 6, 7.f, 8.f, 9);
r = r && dcCallInt(vm, (void*)&vf_ffiffiffi);
#if defined(DC_UNIX) && !defined(DC__OS_MacOSX) && !defined(DC__OS_SunOS) && !defined(DC__OS_BeOS)
/* testing syscall using calling convention prefix - not available on all platforms */
dcReset(vm);
printf("\ncallf _$iZi)i");
fflush(NULL); /* needed before syscall write as it's immediate, or order might be incorrect */
dcCallF(vm, &ret, (DCpointer)(ptrdiff_t)SYS_write, "_$iZi)i", 1/*stdout*/, " = syscall: 1", 13);
r = ret.i == 13 && r;
#endif
/* free vm */
dcFree(vm);
printf("\nresult: callf: %d\n", r);
dcTest_deInitPlatform();
return 0;
}
|
C
|
Apache-2.0
|
travisdoor/bl/deps/dyncall-1.2/test/callf/main.c
|
bbbd39d8-9d4b-4d23-99f0-a07ae79c6150
|
[{"tag": "NAME", "value": "Tassilo Philipp", "start": 182, "end": 197, "context": "er@uni-goettingen.de>,\n Tassilo Philipp <tphilipp@potion-studios.com>\n\n Permission to u"}, {"tag": "NAME", "value": "Daniel Adler", "start": 114, "end": 126, "context": "escription:\n License:\n\n Copyright (c) 2007-2021 Daniel Adler <dadler@uni-goettingen.de>,\n "}, {"tag": "EMAIL", "value": "tphilipp@potion-studios.com", "start": 199, "end": 226, "context": ".de>,\n Tassilo Philipp <tphilipp@potion-studios.com>\n\n Permission to use, copy, modify, and distrib"}, {"tag": "EMAIL", "value": "dadler@uni-goettingen.de", "start": 128, "end": 152, "context": "icense:\n\n Copyright (c) 2007-2021 Daniel Adler <dadler@uni-goettingen.de>,\n Tassilo Philipp <tph"}]
|
[{"tag": "NAME", "value": "Tassilo Philipp", "start": 182, "end": 197, "context": "er@uni-goettingen.de>,\n Tassilo Philipp <tphilipp@potion-studios.com>\n\n Permission to u"}, {"tag": "NAME", "value": "Daniel Adler", "start": 114, "end": 126, "context": "escription:\n License:\n\n Copyright (c) 2007-2021 Daniel Adler <dadler@uni-goettingen.de>,\n "}, {"tag": "EMAIL", "value": "tphilipp@potion-studios.com", "start": 199, "end": 226, "context": ".de>,\n Tassilo Philipp <tphilipp@potion-studios.com>\n\n Permission to use, copy, modify, and distrib"}, {"tag": "EMAIL", "value": "dadler@uni-goettingen.de", "start": 128, "end": 152, "context": "icense:\n\n Copyright (c) 2007-2021 Daniel Adler <dadler@uni-goettingen.de>,\n Tassilo Philipp <tph"}]
|
<?php namespace DCarbone\XMLPrimitiveTypes\Types;
/**
* Copyright 2015 Daniel Carbone (daniel.p.carbone@gmail.com)
*
* Interface XMLPrimitiveTypeInterface
* @package DCarbone\XMLPrimitiveTypes\Types
*/
interface XMLPrimitiveTypeInterface
{
/**
* @return string
*/
public function getXMLDataType();
/**
* @return mixed
*/
public function getValue();
/**
* @param mixed $value
*/
public function setValue($value);
/**
* @return string
*/
public function __toString();
}
|
PHP
|
Apache-2.0
|
dcarbone/xml-primitive-types/src/Types/XMLPrimitiveTypeInterface.php
|
644858ad-a647-4914-b2eb-b249fda8b0ec
|
[{"tag": "NAME", "value": "Daniel Carbone", "start": 73, "end": 87, "context": "e\\XMLPrimitiveTypes\\Types;\n\n/**\n * Copyright 2015 Daniel Carbone (daniel.p.carbone@gmail.com)\n *\n * Interface XMLP"}, {"tag": "EMAIL", "value": "daniel.p.carbone@gmail.com", "start": 89, "end": 115, "context": "pes\\Types;\n\n/**\n * Copyright 2015 Daniel Carbone (daniel.p.carbone@gmail.com)\n *\n * Interface XMLPrimitiveTypeInterface\n * @pa"}]
|
[{"tag": "NAME", "value": "Daniel Carbone", "start": 73, "end": 87, "context": "e\\XMLPrimitiveTypes\\Types;\n\n/**\n * Copyright 2015 Daniel Carbone (daniel.p.carbone@gmail.com)\n *\n * Interface XMLP"}, {"tag": "EMAIL", "value": "daniel.p.carbone@gmail.com", "start": 89, "end": 115, "context": "pes\\Types;\n\n/**\n * Copyright 2015 Daniel Carbone (daniel.p.carbone@gmail.com)\n *\n * Interface XMLPrimitiveTypeInterface\n * @pa"}]
|
"""
DataMeta
DataMeta # noqa: E501
The version of the OpenAPI document: 1.4.0
Contact: leon.kuchenbecker@uni-tuebingen.de
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from datameta_client_lib.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
class StagedMetaDataSets(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'metadataset_ids': ([str],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'metadataset_ids': 'metadatasetIds', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, metadataset_ids, *args, **kwargs): # noqa: E501
"""StagedMetaDataSets - a model defined in OpenAPI
Args:
metadataset_ids ([str]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.metadataset_ids = metadataset_ids
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
|
Python
|
Apache-2.0
|
ghga-de/datameta-client-lib/datameta_client_lib/model/staged_meta_data_sets.py
|
2754377e-b47a-46ec-ae32-b13a4dbc1a7a
|
[{"tag": "EMAIL", "value": "leon.kuchenbecker@uni-tuebingen.de", "start": 106, "end": 140, "context": "rsion of the OpenAPI document: 1.4.0\n Contact: leon.kuchenbecker@uni-tuebingen.de\n Generated by: https://openapi-generator.tech\n"}]
|
[{"tag": "EMAIL", "value": "leon.kuchenbecker@uni-tuebingen.de", "start": 106, "end": 140, "context": "rsion of the OpenAPI document: 1.4.0\n Contact: leon.kuchenbecker@uni-tuebingen.de\n Generated by: https://openapi-generator.tech\n"}]
|
// This is a library to be used to represent a Graph and various measurments for a Graph
// and to perform optimization using Particle Swarm Optimization (PSO)
// Copyright (C) 2008, 2015
// Patrick Olekas - polekas55@gmail.com
// Ali Minai - minaiaa@gmail.com
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package psograph.graph;
import java.io.Serializable;
/**
* This represents a Edge.
*
* There is some commented out code I believe in this file to support DAG and the concept
* of multiple edges between two nodes.
* @author Patrick
*
*/
public class Edge implements Serializable
{
static final long serialVersionUID = 45L;
/**
* Copy Constructor
* @param ci
*/
public Edge(Edge ci)
{
m_weight = ci.m_weight;
}
/**
* Constructor
* @param weight
*/
public Edge(double weight)
{
m_weight = weight;
}
/**
* Comparison of two objects
*/
public boolean equals (Object obj)
{
boolean ret = true;
Edge e = (Edge)obj;
if(Double.compare(m_weight, e.getWeight()) != 0)
{
ret = false;
}
return ret;
}
/**
* Mutator for weight value.
* @param weight
*/
public void modifyWeight(double weight)
{
m_weight = weight;
}
/**
* Accessor for weight.
* @return
*/
public double getWeight()
{
return m_weight;
}
private double m_weight;
/* Only allow on weight per node to node connection
ConnectionInfo(ConnectionInfo ci)
{
m_weight = new Vector<Integer>(ci.m_weight);
}
ConnectionInfo(int weight)
{
m_weight = new Vector<Integer>();
m_weight.add(weight);
}
ConnectionInfo(int weight[])
{
m_weight = new Vector<Integer>();
for(int i=0; i < weight.length; i++)
m_weight.add(weight[i]);
}
void addWeight(int weight)
{
m_weight.add(weight);
}
void addWeights(int weight[])
{
m_weight = new Vector<Integer>();
for(int i=0; i < weight.length; i++)
m_weight.add(weight[i]);
}
void removeWeight(int weight)
{
m_weight.remove(new Integer(weight));
}
void removeWeights(int weight[])
{
for(int i=0; i < weight.length; i++)
m_weight.remove(new Integer(weight[i]));
}
Vector<Integer> m_weight;
*/
}
|
Java
|
MIT
|
mallorbc/Advanced-Algorithms-Heuristic/JavaPSOGraph3/HW/src/psograph/graph/Edge.java
|
14ad1f4f-35cd-423b-aa5c-35b329c80d32
|
[{"tag": "EMAIL", "value": "polekas55@gmail.com", "start": 222, "end": 241, "context": "yright (C) 2008, 2015 \r\n// Patrick Olekas - polekas55@gmail.com\r\n// Ali Minai - minaiaa@gmail.com\r\n//\r\n// "}, {"tag": "NAME", "value": "Patrick", "start": 1214, "end": 1221, "context": " of multiple edges between two nodes.\r\n * @author Patrick\r\n *\r\n */\r\npublic class Edge implements Serializab"}, {"tag": "NAME", "value": "Patrick Olekas", "start": 205, "end": 219, "context": " (PSO)\r\n// Copyright (C) 2008, 2015 \r\n// Patrick Olekas - polekas55@gmail.com\r\n// Ali Minai - minai"}, {"tag": "NAME", "value": "Ali Minai", "start": 252, "end": 261, "context": " Patrick Olekas - polekas55@gmail.com\r\n// Ali Minai - minaiaa@gmail.com\r\n//\r\n// This program is fr"}, {"tag": "EMAIL", "value": "minaiaa@gmail.com", "start": 264, "end": 281, "context": "lekas - polekas55@gmail.com\r\n// Ali Minai - minaiaa@gmail.com\r\n//\r\n// This program is free software: you can"}]
|
[{"tag": "EMAIL", "value": "polekas55@gmail.com", "start": 222, "end": 241, "context": "yright (C) 2008, 2015 \r\n// Patrick Olekas - polekas55@gmail.com\r\n// Ali Minai - minaiaa@gmail.com\r\n//\r\n// "}, {"tag": "NAME", "value": "Patrick", "start": 1214, "end": 1221, "context": " of multiple edges between two nodes.\r\n * @author Patrick\r\n *\r\n */\r\npublic class Edge implements Serializab"}, {"tag": "NAME", "value": "Patrick Olekas", "start": 205, "end": 219, "context": " (PSO)\r\n// Copyright (C) 2008, 2015 \r\n// Patrick Olekas - polekas55@gmail.com\r\n// Ali Minai - minai"}, {"tag": "NAME", "value": "Ali Minai", "start": 252, "end": 261, "context": " Patrick Olekas - polekas55@gmail.com\r\n// Ali Minai - minaiaa@gmail.com\r\n//\r\n// This program is fr"}, {"tag": "EMAIL", "value": "minaiaa@gmail.com", "start": 264, "end": 281, "context": "lekas - polekas55@gmail.com\r\n// Ali Minai - minaiaa@gmail.com\r\n//\r\n// This program is free software: you can"}]
|
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE122_Heap_Based_Buffer_Overflow__cpp_CWE806_char_memcpy_51a.cpp
Label Definition File: CWE122_Heap_Based_Buffer_Overflow__cpp_CWE806.label.xml
Template File: sources-sink-51a.tmpl.cpp
*/
/*
* @description
* CWE: 122 Heap Based Buffer Overflow
* BadSource: Initialize data as a large string
* GoodSource: Initialize data as a small string
* Sink: memcpy
* BadSink : Copy data to string using memcpy
* Flow Variant: 51 Data flow: data passed as an argument from one function to another in different source files
*
* */
#include "std_testcase.h"
#include <wchar.h>
namespace CWE122_Heap_Based_Buffer_Overflow__cpp_CWE806_char_memcpy_51
{
#ifndef OMITBAD
/* bad function declaration */
void badSink(char * data);
void bad()
{
char * data;
data = new char[100];
/* FLAW: Initialize data as a large buffer that is larger than the small buffer used in the sink */
memset(data, 'A', 100-1); /* fill with 'A's */
data[100-1] = '\0'; /* null terminate */
badSink(data);
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* good function declarations */
void goodG2BSink(char * data);
/* goodG2B uses the GoodSource with the BadSink */
static void goodG2B()
{
char * data;
data = new char[100];
/* FIX: Initialize data as a small buffer that as small or smaller than the small buffer used in the sink */
memset(data, 'A', 50-1); /* fill with 'A's */
data[50-1] = '\0'; /* null terminate */
goodG2BSink(data);
}
void good()
{
goodG2B();
}
#endif /* OMITGOOD */
} /* close namespace */
/* Below is the main(). It is only used when building this testcase on
its own for testing or for building a binary to use in testing binary
analysis tools. It is not used when compiling all the testcases as one
application, which is how source code analysis tools are tested. */
#ifdef INCLUDEMAIN
using namespace CWE122_Heap_Based_Buffer_Overflow__cpp_CWE806_char_memcpy_51; /* so that we can use good and bad easily */
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
|
C++
|
BSD-3-Clause
|
JianpingZeng/xcc/xcc/test/juliet/testcases/CWE122_Heap_Based_Buffer_Overflow/s04/CWE122_Heap_Based_Buffer_Overflow__cpp_CWE806_char_memcpy_51a.cpp
|
01a2b618-7f89-4f5a-973c-9cd4f0b07bf7
|
[]
|
[]
|
export function __cargo_web_snippet_3d491adb30e232ec587c79baefb825cded12ff76(Module, $0) { var o = Module.STDWEB_PRIVATE.acquire_js_reference( $0 );return (o instanceof Location); }
|
JavaScript
|
MIT
|
Tweoss/econ/client/consumer/static/snippets/stdweb-bb142200b065bd55/inline405.js
|
4d5d588e-938c-4b65-9270-5e7c4b7ea2b2
|
[]
|
[]
|
# IMPORTATION STANDARD
# IMPORTATION THIRDPARTY
import pytest
# IMPORTATION INTERNAL
from openbb_terminal.cryptocurrency.defi import terraengineer_model
@pytest.mark.vcr
@pytest.mark.parametrize(
"asset,address",
[("ust", "terra1tmnqgvg567ypvsvk6rwsga3srp7e3lg6u0elp8")],
)
def test_get_history_asset_from_terra_address(asset, address, recorder):
df = terraengineer_model.get_history_asset_from_terra_address(
asset=asset,
address=address,
)
recorder.capture(df)
|
Python
|
MIT
|
23errg/GamestonkTerminal/tests/openbb_terminal/cryptocurrency/defi/test_terraengineer_model.py
|
e3445d34-5cb8-4f7e-b1f9-93d89af5baa4
|
[{"tag": "API_KEY", "value": "terra1tmnqgvg567ypvsvk6rwsga3srp7e3lg6u0elp8", "start": 235, "end": 279, "context": "k.parametrize(\n \"asset,address\",\n [(\"ust\", \"terra1tmnqgvg567ypvsvk6rwsga3srp7e3lg6u0elp8\")],\n)\ndef test_get_history_asset_from_terra_addre"}]
|
[{"tag": "KEY", "value": "terra1tmnqgvg567ypvsvk6rwsga3srp7e3lg6u0elp8", "start": 235, "end": 279, "context": "k.parametrize(\n \"asset,address\",\n [(\"ust\", \"terra1tmnqgvg567ypvsvk6rwsga3srp7e3lg6u0elp8\")],\n)\ndef test_get_history_asset_from_terra_addre"}]
|
# -*- coding: latin-1 -*-
# -----------------------------------------------------------------------------
# Copyright 2009-2011 Stephen Tiedemann <stephen.tiedemann@googlemail.com>
#
# Licensed under the EUPL, Version 1.1 or - as soon they
# will be approved by the European Commission - subsequent
# versions of the EUPL (the "Licence");
# You may not use this work except in compliance with the
# Licence.
# You may obtain a copy of the Licence at:
#
# http://www.osor.eu/eupl
#
# Unless required by applicable law or agreed to in
# writing, software distributed under the Licence is
# distributed on an "AS IS" basis,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied.
# See the Licence for the specific language governing
# permissions and limitations under the Licence.
# -----------------------------------------------------------------------------
import logging
log = logging.getLogger(__name__)
import time
from types import *
import threading
import collections
import random
import nfc.clf
import nfc.dep
# local imports
from tco import *
from pdu import *
from err import *
from opt import *
RAW_ACCESS_POINT, LOGICAL_DATA_LINK, DATA_LINK_CONNECTION = range(3)
wks_map = {
"urn:nfc:sn:sdp" : 1,
"urn:nfc:sn:ip" : 2,
"urn:nfc:sn:obex": 3,
"urn:nfc:sn:snep": 4}
class ServiceAccessPoint(object):
def __init__(self, addr, llc):
self.llc = llc
self.addr = addr
self.sock_list = collections.deque()
self.send_list = collections.deque()
def __str__(self):
return "SAP {0:>2}".format(self.addr)
@property
def mode(self):
with self.llc.lock:
try:
if isinstance(self.sock_list[0], RawAccessPoint):
return RAW_ACCESS_POINT
if isinstance(self.sock_list[0], LogicalDataLink):
return LOGICAL_DATA_LINK
if isinstance(self.sock_list[0], DataLinkConnection):
return DATA_LINK_CONNECTION
except IndexError: return 0
def insert_socket(self, socket):
with self.llc.lock:
try: insertable = type(socket) == type(self.sock_list[0])
except IndexError: insertable = True
if insertable:
socket.bind(self.addr)
self.sock_list.appendleft(socket)
else: log.error("can't insert socket of different type")
return insertable
def remove_socket(self, socket):
assert socket.addr == self.addr
socket.close()
with self.llc.lock:
try: self.sock_list.remove(socket)
except ValueError: pass
if len(self.sock_list) == 0:
# completely remove this sap
self.llc.sap[self.addr] = None
def send(self, pdu):
self.send_list.append(pdu)
def shutdown(self):
while True:
try: socket = self.sock_list.pop()
except IndexError: return
log.debug("shutdown socket %s" % str(socket))
socket.bind(None); socket.close()
#
# enqueue() and dequeue() are called from llc run thread
#
def enqueue(self, pdu):
with self.llc.lock:
if isinstance(pdu, Connect):
for socket in self.sock_list:
if socket.state.LISTEN:
socket.enqueue(pdu)
return
else:
for socket in self.sock_list:
if pdu.ssap == socket.peer or socket.peer is None:
socket.enqueue(pdu)
return
if pdu.type in connection_mode_pdu_types:
self.send(DisconnectedMode(pdu.ssap, pdu.dsap, reason=1))
def dequeue(self, max_size):
with self.llc.lock:
for socket in self.sock_list:
#print "dequeue from", socket
pdu = socket.dequeue(max_size)
if pdu: return pdu
else:
try: return self.send_list.popleft()
except IndexError: pass
def sendack(self, max_size):
with self.llc.lock:
for socket in self.sock_list:
pdu = socket.sendack(max_size)
if pdu: return pdu
class ServiceDiscovery(object):
def __init__(self, llc):
self.llc = llc
self.snl = dict()
self.tids = range(256)
self.resp = threading.Condition(self.llc.lock)
self.sent = dict()
self.sdreq = collections.deque()
self.sdres = collections.deque()
self.dmpdu = collections.deque()
def __str__(self):
return "SAP 1"
@property
def mode(self):
return LOGICAL_DATA_LINK
def resolve(self, name):
with self.resp:
if self.snl is None: return None
log.debug("resolve service name '{0}'".format(name))
try: return self.snl[name]
except KeyError: pass
tid = random.choice(self.tids)
self.tids.remove(tid)
self.sdreq.append((tid, name))
while not self.snl is None and not name in self.snl:
self.resp.wait()
return None if self.snl is None else self.snl[name]
#
# enqueue() and dequeue() are called from llc run thread
#
def enqueue(self, pdu):
with self.llc.lock:
if isinstance(pdu, ServiceNameLookup) and not self.snl is None:
for tid, sap in pdu.sdres:
try: name = self.sent[tid]
except KeyError: pass
else:
log.debug("resolved '{0}' to remote addr {1}"
.format(name, sap))
self.snl[name] = sap
self.tids.append(tid)
self.resp.notify_all()
for tid, name in pdu.sdreq:
try: sap = self.llc.snl[name]
except KeyError: sap = 0
self.sdres.append((tid, sap))
def dequeue(self, max_size):
if max_size < 2:
return None
with self.llc.lock:
if len(self.sdres) > 0 or len(self.sdreq) > 0:
pdu = ServiceNameLookup(dsap=1, ssap=1)
max_size -= len(pdu)
while max_size > 0:
try: pdu.sdres.append(self.sdres.popleft())
except IndexError: break
for i in range(len(self.sdreq)):
tid, name = self.sdreq[0]
if 1 + len(name) > max_size:
self.sdreq.rotate(-1)
else:
pdu.sdreq.append(self.sdreq.popleft())
self.sent[tid] = name
return pdu
if len(self.dmpdu) > 0 and max_size >= 2:
return self.dmpdu.popleft()
def shutdown(self):
with self.llc.lock:
self.snl = None
self.resp.notify_all()
class LogicalLinkController(object):
def __init__(self, recv_miu=248, send_lto=500, send_agf=True,
symm_log=True):
self.lock = threading.RLock()
self.cfg = dict()
self.cfg['recv-miu'] = recv_miu
self.cfg['send-lto'] = send_lto
self.cfg['send-agf'] = send_agf
self.cfg['symm-log'] = symm_log
self.snl = dict({"urn:nfc:sn:sdp" : 1})
self.sap = 64 * [None]
self.sap[0] = ServiceAccessPoint(0, self)
self.sap[1] = ServiceDiscovery(self)
def __str__(self):
local = "Local(MIU={miu}, LTO={lto}ms)".format(
miu=self.cfg.get('recv-miu'), lto=self.cfg.get('send-lto'))
remote = "Remote(MIU={miu}, LTO={lto}ms)".format(
miu=self.cfg.get('send-miu'), lto=self.cfg.get('recv-lto'))
return "LLC: {local} {remote}".format(local=local, remote=remote)
def activate(self, mac):
assert type(mac) in (nfc.dep.Initiator, nfc.dep.Target)
self.mac = None
miu = self.cfg['recv-miu']
lto = self.cfg['send-lto']
wks = 1+sum(sorted([1<<sap for sap in self.snl.values() if sap < 15]))
pax = ParameterExchange(version=(1,1), miu=miu, lto=lto, wks=wks)
if type(mac) == nfc.dep.Initiator:
gb = mac.activate(gbi='Ffm'+pax.to_string()[2:])
self.run = self.run_as_initiator
role = "Initiator"
if type(mac) == nfc.dep.Target:
gb = mac.activate(gbt='Ffm'+pax.to_string()[2:], wt=9)
self.run = self.run_as_target
role = "Target"
if gb is not None and gb.startswith('Ffm') and len(gb) >= 6:
info = ["LLCP Link established as NFC-DEP {0}".format(role)]
info.append("Local LLCP Settings")
info.append(" LLCP Version: {0[0]}.{0[1]}".format(pax.version))
info.append(" Link Timeout: {0} ms".format(pax.lto))
info.append(" Max Inf Unit: {0} octet".format(pax.miu))
info.append(" Service List: {0:016b}".format(pax.wks))
pax = ProtocolDataUnit.from_string("\x00\x40" + str(gb[3:]))
info.append("Remote LLCP Settings")
info.append(" LLCP Version: {0[0]}.{0[1]}".format(pax.version))
info.append(" Link Timeout: {0} ms".format(pax.lto))
info.append(" Max Inf Unit: {0} octet".format(pax.miu))
info.append(" Service List: {0:016b}".format(pax.wks))
log.info('\n'.join(info))
self.cfg['rcvd-ver'] = pax.version
self.cfg['send-miu'] = pax.miu
self.cfg['recv-lto'] = pax.lto
self.cfg['send-wks'] = pax.wks
self.cfg['send-lsc'] = pax.lsc
log.debug("llc cfg {0}".format(self.cfg))
if type(mac) == nfc.dep.Initiator and mac.rwt is not None:
max_rwt = 4096/13.56E6 * 2**10
if mac.rwt > max_rwt:
log.warning("NFC-DEP RWT {0:.3f} exceeds max {1:.3f} sec"
.format(mac.rwt, max_rwt))
self.mac = mac
return bool(self.mac)
def terminate(self, reason):
log.debug("llcp link termination caused by {0}".format(reason))
if reason == "local choice":
self.exchange(Disconnect(0, 0), timeout=0.1)
self.mac.deactivate()
elif reason == "remote choice":
self.mac.deactivate()
# shutdown local services
for i in range(63, -1, -1):
if not self.sap[i] is None:
log.debug("closing service access point %d" % i)
self.sap[i].shutdown()
self.sap[i] = None
def exchange(self, pdu, timeout):
if not isinstance(pdu, Symmetry) or self.cfg.get('symm-log') is True:
log.debug("SEND {0}".format(pdu))
data = pdu.to_string() if pdu else None
try:
data = self.mac.exchange(data, timeout)
if data is None: return None
except nfc.clf.DigitalProtocolError as error:
log.debug("{0!r}".format(error))
return None
pdu = ProtocolDataUnit.from_string(data)
if not isinstance(pdu, Symmetry) or self.cfg.get('symm-log') is True:
log.debug("RECV {0}".format(pdu))
return pdu
def run_as_initiator(self, terminate=lambda: False):
recv_timeout = 1E-3 * (self.cfg['recv-lto'] + 10)
symm = 0
try:
pdu = self.collect(delay=0.01)
while not terminate():
if pdu is None: pdu = Symmetry()
pdu = self.exchange(pdu, recv_timeout)
if pdu is None:
return self.terminate(reason="link disruption")
if pdu == Disconnect(0, 0):
return self.terminate(reason="remote choice")
symm = symm + 1 if type(pdu) == Symmetry else 0
self.dispatch(pdu)
pdu = self.collect(delay=0.001)
if pdu is None and symm >= 10:
pdu = self.collect(delay=0.05)
else:
self.terminate(reason="local choice")
except KeyboardInterrupt:
print # move to new line
self.terminate(reason="local choice")
raise KeyboardInterrupt
except IOError:
self.terminate(reason="input/output error")
raise SystemExit
finally:
log.debug("llc run loop terminated on initiator")
def run_as_target(self, terminate=lambda: False):
recv_timeout = 1E-3 * (self.cfg['recv-lto'] + 10)
symm = 0
try:
pdu = None
while not terminate():
pdu = self.exchange(pdu, recv_timeout)
if pdu is None:
return self.terminate(reason="link disruption")
if pdu == Disconnect(0, 0):
return self.terminate(reason="remote choice")
symm = symm + 1 if type(pdu) == Symmetry else 0
self.dispatch(pdu)
pdu = self.collect(delay=0.001)
if pdu is None and symm >= 10:
pdu = self.collect(delay=0.05)
if pdu is None: pdu = Symmetry()
else:
self.terminate(reason="local choice")
except KeyboardInterrupt:
print # move to new line
self.terminate(reason="local choice")
raise KeyboardInterrupt
except IOError:
self.terminate(reason="input/output error")
raise SystemExit
finally:
log.debug("llc run loop terminated on target")
def collect(self, delay=None):
if delay: time.sleep(delay)
pdu_list = list()
max_data = None
with self.lock:
active_sap_list = [sap for sap in self.sap if sap is not None]
for sap in active_sap_list:
#log.debug("query sap {0}, max_data={1}"
# .format(sap, max_data))
pdu = sap.dequeue(max_data if max_data else 2179)
if pdu is not None:
if self.cfg['send-agf'] == False:
return pdu
pdu_list.append(pdu)
if max_data is None:
max_data = self.cfg["send-miu"] + 2
max_data -= len(pdu)
if max_data < bool(len(pdu_list)==1) * 2 + 2 + 2:
break
else: max_data = self.cfg["send-miu"] + 2
for sap in active_sap_list:
if sap.mode == DATA_LINK_CONNECTION:
pdu = sap.sendack(max_data)
if not pdu is None:
if self.cfg['send-agf'] == False:
return pdu
pdu_list.append(pdu)
max_data -= len(pdu)
if max_data < bool(len(pdu_list)==1) * 2 + 2 + 3:
break
if len(pdu_list) > 1:
return AggregatedFrame(aggregate=pdu_list)
if len(pdu_list) == 1:
return pdu_list[0]
return None
def dispatch(self, pdu):
if isinstance(pdu, Symmetry):
return
if isinstance(pdu, AggregatedFrame):
if pdu.dsap == 0 and pdu.ssap == 0:
[log.debug(" " + str(p)) for p in pdu]
[self.dispatch(p) for p in pdu]
return
if isinstance(pdu, Connect) and pdu.dsap == 1:
# connect-by-name
addr = self.snl.get(pdu.sn)
if not addr or self.sap[addr] is None:
log.debug("no service named '{0}'".format(pdu.sn))
pdu = DisconnectedMode(pdu.ssap, 1, reason=2)
self.sap[1].dmpdu.append(pdu)
return
pdu = Connect(dsap=addr, ssap=pdu.ssap, rw=pdu.rw, miu=pdu.miu)
with self.lock:
sap = self.sap[pdu.dsap]
if sap:
sap.enqueue(pdu)
return
log.debug("discard PDU {0}".format(str(pdu)))
return
def resolve(self, name):
return self.sap[1].resolve(name)
def socket(self, socket_type):
if socket_type == RAW_ACCESS_POINT:
return RawAccessPoint(recv_miu=self.cfg["recv-miu"])
if socket_type == LOGICAL_DATA_LINK:
return LogicalDataLink(recv_miu=self.cfg["recv-miu"])
if socket_type == DATA_LINK_CONNECTION:
return DataLinkConnection(recv_miu=128, recv_win=1)
def setsockopt(self, socket, option, value):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if option == SO_RCVMIU:
value = min(value, self.cfg['recv-miu'])
socket.setsockopt(option, value)
return socket.getsockopt(option)
def getsockopt(self, socket, option):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if isinstance(socket, LogicalDataLink):
# FIXME: set socket send miu when activated
socket.send_miu = self.cfg['send-miu']
if isinstance(socket, RawAccessPoint):
# FIXME: set socket send miu when activated
socket.send_miu = self.cfg['send-miu']
return socket.getsockopt(option)
def bind(self, socket, addr_or_name=None):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if not socket.addr is None:
raise Error(errno.EINVAL)
if addr_or_name is None:
self._bind_by_none(socket)
elif type(addr_or_name) is IntType:
self._bind_by_addr(socket, addr_or_name)
elif type(addr_or_name) is StringType:
self._bind_by_name(socket, addr_or_name)
else: raise Error(errno.EFAULT)
def _bind_by_none(self, socket):
with self.lock:
try: addr = 32 + self.sap[32:64].index(None)
except ValueError: raise Error(errno.EAGAIN)
else:
socket.bind(addr)
self.sap[addr] = ServiceAccessPoint(addr, self)
self.sap[addr].insert_socket(socket)
def _bind_by_addr(self, socket, addr):
with self.lock:
if addr in range(32, 64):
if self.sap[addr] is None:
socket.bind(addr)
self.sap[addr] = ServiceAccessPoint(addr, self)
self.sap[addr].insert_socket(socket)
else: raise Error(errno.EADDRINUSE)
else: raise Error(errno.EACCES)
def _bind_by_name(self, socket, name):
if not (name.startswith("urn:nfc:sn") or
name.startswith("urn:nfc:xsn") or
name == "com.android.npp"): # invalid name but legacy
raise Error(errno.EFAULT)
with self.lock:
if self.snl.get(name) != None:
raise Error(errno.EADDRINUSE)
addr = wks_map.get(name)
if addr is None:
try: addr = 16 + self.sap[16:32].index(None)
except ValueError: raise Error(errno.EADDRNOTAVAIL)
socket.bind(addr)
self.sap[addr] = ServiceAccessPoint(addr, self)
self.sap[addr].insert_socket(socket)
self.snl[name] = addr
def connect(self, socket, dest):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if not socket.is_bound:
self.bind(socket)
socket.connect(dest)
log.debug("connected ({0} ===> {1})".format(socket.addr, socket.peer))
def listen(self, socket, backlog):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if not isinstance(socket, DataLinkConnection):
raise Error(errno.EOPNOTSUPP)
if not type(backlog) == IntType:
raise TypeError("backlog must be integer")
if backlog < 0:
raise ValueError("backlog mmust not be negative")
backlog = min(backlog, 16)
if not socket.is_bound:
self.bind(socket)
socket.listen(backlog)
def accept(self, socket):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if not isinstance(socket, DataLinkConnection):
raise Error(errno.EOPNOTSUPP)
while True:
client = socket.accept()
if not client.is_bound:
self.bind(client)
if self.sap[client.addr].insert_socket(client):
log.debug("new data link connection ({0} <=== {1})"
.format(client.addr, client.peer))
return client
else:
pdu = DisconnectedMode(client.peer, socket.addr, reason=0x20)
super(DataLinkConnection, socket).send(pdu)
def send(self, socket, message):
return self.sendto(socket, message, socket.peer)
def sendto(self, socket, message, dest):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if isinstance(socket, RawAccessPoint):
if not isinstance(message, ProtocolDataUnit):
raise TypeError("message must be a pdu on raw access point")
if not socket.is_bound:
self.bind(socket)
# FIXME: set socket send miu when activated
socket.send_miu = self.cfg['send-miu']
return socket.send(message)
if not type(message) == StringType:
raise TypeError("sendto() argument *message* must be a string")
if isinstance(socket, LogicalDataLink):
if dest is None:
raise Error(errno.EDESTADDRREQ)
if not socket.is_bound:
self.bind(socket)
# FIXME: set socket send miu when activated
socket.send_miu = self.cfg['send-miu']
return socket.sendto(message, dest)
if isinstance(socket, DataLinkConnection):
return socket.send(message)
def recv(self, socket):
message, sender = self.recvfrom(socket)
return message
def recvfrom(self, socket):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if not (socket.addr and self.sap[socket.addr]):
raise Error(errno.EBADF)
if isinstance(socket, RawAccessPoint):
return (socket.recv(), None)
if isinstance(socket, LogicalDataLink):
return socket.recvfrom()
if isinstance(socket, DataLinkConnection):
return (socket.recv(), socket.peer)
def poll(self, socket, event, timeout=None):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if not (socket.addr and self.sap[socket.addr]):
raise Error(errno.EBADF)
return socket.poll(event, timeout)
def close(self, socket):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if socket.is_bound:
self.sap[socket.addr].remove_socket(socket)
else: socket.close()
def getsockname(self, socket):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
return socket.addr
def getpeername(self, socket):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
return socket.peer
|
Python
|
MIT
|
javgh/bitpay-brick/src/nfc/llcp/llc.py
|
d4f47474-1705-402f-9d00-66b17aa2c5b0
|
[{"tag": "NAME", "value": "Stephen Tiedemann", "start": 128, "end": 145, "context": "---------------------------\n# Copyright 2009-2011 Stephen Tiedemann <stephen.tiedemann@googlemail.com>\n#\n# Licensed u"}, {"tag": "EMAIL", "value": "stephen.tiedemann@googlemail.com", "start": 147, "end": 179, "context": "--------\n# Copyright 2009-2011 Stephen Tiedemann <stephen.tiedemann@googlemail.com>\n#\n# Licensed under the EUPL, Version 1.1 or - as"}]
|
[{"tag": "NAME", "value": "Stephen Tiedemann", "start": 128, "end": 145, "context": "---------------------------\n# Copyright 2009-2011 Stephen Tiedemann <stephen.tiedemann@googlemail.com>\n#\n# Licensed u"}, {"tag": "EMAIL", "value": "stephen.tiedemann@googlemail.com", "start": 147, "end": 179, "context": "--------\n# Copyright 2009-2011 Stephen Tiedemann <stephen.tiedemann@googlemail.com>\n#\n# Licensed under the EUPL, Version 1.1 or - as"}]
|
cask 'unity-ios-support-for-editor@2019.3.8f1' do
version '2019.3.8f1,4ba98e9386ed'
sha256 :no_check
url "https://download.unity3d.com/download_unity/4ba98e9386ed/MacEditorTargetInstaller/UnitySetup-iOS-Support-for-Editor-2019.3.8f1.pkg"
name 'iOS Build Support'
homepage 'https://unity3d.com/unity/'
pkg 'UnitySetup-iOS-Support-for-Editor-2019.3.8f1.pkg'
depends_on cask: 'unity@2019.3.8f1'
preflight do
if File.exist? "/Applications/Unity"
FileUtils.move "/Applications/Unity", "/Applications/Unity.temp"
end
if File.exist? "/Applications/Unity-2019.3.8f1"
FileUtils.move "/Applications/Unity-2019.3.8f1", '/Applications/Unity'
end
end
postflight do
if File.exist? '/Applications/Unity'
FileUtils.move '/Applications/Unity', "/Applications/Unity-2019.3.8f1"
end
if File.exist? '/Applications/Unity.temp'
FileUtils.move '/Applications/Unity.temp', '/Applications/Unity'
end
end
uninstall quit: 'com.unity3d.UnityEditor5.x',
delete: '/Applications/Unity-2019.3.8f1/PlaybackEngines/iOSSupport'
end
|
Ruby
|
MIT
|
Larusso/homebrew-unityversions/Casks/unity-ios-support-for-editor@2019.3.8f1.rb
|
a4bb7db8-b8c3-4f86-98a3-e7fa64559386
|
[]
|
[]
|
# typed: false
# frozen_string_literal: true
# This file was generated by GoReleaser. DO NOT EDIT.
class KubectlReap < Formula
desc "kubectl plugin that deletes unused Kubernetes resources"
homepage "https://github.com/micnncim/kubectl-reap"
version "0.11.3"
bottle :unneeded
if OS.mac?
url "https://github.com/micnncim/kubectl-reap/releases/download/v0.11.3/kubectl-reap_0.11.3_darwin_amd64.tar.gz"
sha256 "53c2074e2dcab8c4d513013de9bd9746f4c4504c4bf07fb4956607de4766ed20"
end
if OS.linux? && Hardware::CPU.intel?
url "https://github.com/micnncim/kubectl-reap/releases/download/v0.11.3/kubectl-reap_0.11.3_linux_amd64.tar.gz"
sha256 "ceec75c07a030717f2658a77c459767b653c31b9f204d767633535f28af57295"
end
if OS.linux? && Hardware::CPU.arm? && Hardware::CPU.is_64_bit?
url "https://github.com/micnncim/kubectl-reap/releases/download/v0.11.3/kubectl-reap_0.11.3_linux_arm64.tar.gz"
sha256 "4af1337b93f2098eb5138349a6692b5a616470a7d2372890cd99cb3c30f7f532"
end
def install
bin.install "kubectl-reap"
end
end
|
Ruby
|
Apache-2.0
|
OrquestraDigital/kubectl-reap/Formula/kubectl-reap.rb
|
32599188-4751-48ef-9cfe-91bd5fdc8764
|
[]
|
[]
|
const path = require("path");
const HtmlWebpackPlugin = require("html-webpack-plugin");
const MiniCssExtractPlugin = require("mini-css-extract-plugin");
module.exports = {
entry: {
index: path.resolve(__dirname, "./src/index.tsx")
},
output: {
path: path.resolve(__dirname, "./dist"),
filename: "./js/[name].js"
},
plugins: [
new HtmlWebpackPlugin({
filename: "index.html",
template: "src/index.html"
}),
new MiniCssExtractPlugin({
filename: "./css/style.css"
})
],
resolve: {
extensions: [
".ts", // for ts-loader
".tsx", // for ts-loader
".js",
".jsx"
]
},
module: {
rules: [
{
test: /\.tsx?$/,
use: "ts-loader"
},
{
test: /\.scss$/,
use: [MiniCssExtractPlugin.loader, "css-loader", "sass-loader"]
},
{
test: /\.(jpg|png|gif)$/,
use: {
loader: "file-loader",
options: {
name: "./images/[name].[ext]",
outputPath: "./",
publicPath: path => "." + path
}
}
},
{
test: /\.html$/,
use: [
{
loader: "html-loader",
options: { minimize: true }
}
]
}
]
},
devServer: {
contentBase: "./dist",
port: 8081,
inline: true,
host: "0.0.0.0"
}
};
|
JavaScript
|
MIT
|
yoshikawa/gaia/frontend/webpack.config.js
|
e9063990-d9de-4281-8d07-1d4ed72153f0
|
[{"tag": "IP_ADDRESS", "value": "0.0.0.0", "start": 1381, "end": 1388, "context": "st\",\n port: 8081,\n inline: true,\n host: \"0.0.0.0\"\n }\n};\n"}]
|
[{"tag": "IP_ADDRESS", "value": "0.0.0.0", "start": 1381, "end": 1388, "context": "st\",\n port: 8081,\n inline: true,\n host: \"0.0.0.0\"\n }\n};\n"}]
|
package compute_test
import (
"fmt"
"regexp"
"testing"
"github.com/hashicorp/terraform-plugin-sdk/helper/resource"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance/check"
)
func TestAccLinuxVirtualMachineScaleSet_networkAcceleratedNetworking(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkAcceleratedNetworking(data, true),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkAcceleratedNetworkingUpdated(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkAcceleratedNetworking(data, false),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
{
Config: r.networkAcceleratedNetworking(data, true),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
{
Config: r.networkAcceleratedNetworking(data, false),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkApplicationGateway(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkApplicationGateway(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkApplicationSecurityGroup(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkApplicationSecurityGroup(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkApplicationSecurityGroupUpdate(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
// none
Config: r.networkPrivate(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
{
// one
Config: r.networkApplicationSecurityGroup(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
{
// another
Config: r.networkApplicationSecurityGroupUpdated(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
{
// none
Config: r.networkPrivate(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkDNSServers(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkDNSServers(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
{
Config: r.networkDNSServersUpdated(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkIPForwarding(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
// enabled
Config: r.networkIPForwarding(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
{
// disabled
Config: r.networkPrivate(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
{
// enabled
Config: r.networkIPForwarding(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkIPv6(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkIPv6(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
ExpectError: regexp.MustCompile("Error expanding `network_interface`: An IPv6 Primary IP Configuration is unsupported - instead add a IPv4 IP Configuration as the Primary and make the IPv6 IP Configuration the secondary"),
},
})
}
func TestAccLinuxVirtualMachineScaleSet_networkLoadBalancer(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkLoadBalancer(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkMultipleIPConfigurations(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkMultipleIPConfigurations(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkMultipleIPConfigurationsIPv6(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkMultipleIPConfigurationsIPv6(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkMultipleNICs(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkMultipleNICs(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkMultipleNICsMultipleIPConfigurations(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkMultipleNICsMultipleIPConfigurations(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkMultipleNICsMultiplePublicIPs(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkMultipleNICsMultiplePublicIPs(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkMultipleNICsWithDifferentDNSServers(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkMultipleNICsWithDifferentDNSServers(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkNetworkSecurityGroup(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkNetworkSecurityGroup(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkNetworkSecurityGroupUpdate(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
// without
Config: r.networkPrivate(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
{
// add one
Config: r.networkNetworkSecurityGroup(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
{
// change it
Config: r.networkNetworkSecurityGroupUpdated(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
{
// remove it
Config: r.networkPrivate(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkPrivate(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkPrivate(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkPublicIP(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkPublicIP(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkPublicIPDomainNameLabel(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkPublicIPDomainNameLabel(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkPublicIPFromPrefix(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkPublicIPFromPrefix(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func TestAccLinuxVirtualMachineScaleSet_networkPublicIPTags(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_linux_virtual_machine_scale_set", "test")
r := LinuxVirtualMachineScaleSetResource{}
data.ResourceTest(t, r, []resource.TestStep{
{
Config: r.networkPublicIPTags(data),
Check: resource.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(
"admin_password",
),
})
}
func (r LinuxVirtualMachineScaleSetResource) networkAcceleratedNetworking(data acceptance.TestData, enabled bool) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F4"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "example"
primary = true
enable_accelerated_networking = %t
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.test.id
}
}
}
`, r.template(data), data.RandomInteger, enabled)
}
func (LinuxVirtualMachineScaleSetResource) networkApplicationGateway(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
}
resource "azurerm_resource_group" "test" {
name = "acctestRG-%d"
location = "%s"
}
resource "azurerm_virtual_network" "test" {
name = "acctest-vnet-%d"
resource_group_name = "${azurerm_resource_group.test.name}"
address_space = ["10.0.0.0/16"]
location = "${azurerm_resource_group.test.location}"
}
resource "azurerm_subnet" "test" {
name = "subnet-%d"
resource_group_name = "${azurerm_resource_group.test.name}"
virtual_network_name = "${azurerm_virtual_network.test.name}"
address_prefix = "10.0.0.0/24"
}
resource "azurerm_public_ip" "test" {
name = "acctest-pubip-%d"
location = "${azurerm_resource_group.test.location}"
resource_group_name = "${azurerm_resource_group.test.name}"
allocation_method = "Dynamic"
}
# since these variables are re-used - a locals block makes this more maintainable
locals {
backend_address_pool_name = "${azurerm_virtual_network.test.name}-beap"
frontend_port_name = "${azurerm_virtual_network.test.name}-feport"
frontend_ip_configuration_name = "${azurerm_virtual_network.test.name}-feip"
http_setting_name = "${azurerm_virtual_network.test.name}-be-htst"
listener_name = "${azurerm_virtual_network.test.name}-httplstn"
request_routing_rule_name = "${azurerm_virtual_network.test.name}-rqrt"
}
resource "azurerm_application_gateway" "test" {
name = "acctestag-%d"
resource_group_name = "${azurerm_resource_group.test.name}"
location = "${azurerm_resource_group.test.location}"
sku {
name = "Standard_Small"
tier = "Standard"
capacity = 2
}
gateway_ip_configuration {
name = "my-gateway-ip-configuration"
subnet_id = "${azurerm_subnet.test.id}"
}
frontend_port {
name = "${local.frontend_port_name}"
port = 80
}
frontend_ip_configuration {
name = "${local.frontend_ip_configuration_name}"
public_ip_address_id = "${azurerm_public_ip.test.id}"
}
backend_address_pool {
name = "${local.backend_address_pool_name}"
}
backend_http_settings {
name = "${local.http_setting_name}"
cookie_based_affinity = "Disabled"
port = 80
protocol = "Http"
request_timeout = 1
}
http_listener {
name = "${local.listener_name}"
frontend_ip_configuration_name = "${local.frontend_ip_configuration_name}"
frontend_port_name = "${local.frontend_port_name}"
protocol = "Http"
}
request_routing_rule {
name = "${local.request_routing_rule_name}"
rule_type = "Basic"
http_listener_name = "${local.listener_name}"
backend_address_pool_name = "${local.backend_address_pool_name}"
backend_http_settings_name = "${local.http_setting_name}"
}
}
resource "azurerm_subnet" "other" {
name = "other"
resource_group_name = "${azurerm_resource_group.test.name}"
virtual_network_name = "${azurerm_virtual_network.test.name}"
address_prefix = "10.0.1.0/24"
}
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "example"
primary = true
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.other.id
application_gateway_backend_address_pool_ids = [azurerm_application_gateway.test.backend_address_pool.0.id]
}
}
}
`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkApplicationSecurityGroup(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_application_security_group" "test" {
name = "acctestasg-%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
}
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "example"
primary = true
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.test.id
application_security_group_ids = [azurerm_application_security_group.test.id]
}
}
}
`, r.template(data), data.RandomInteger, data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkApplicationSecurityGroupUpdated(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_application_security_group" "test" {
name = "acctestasg-%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
}
resource "azurerm_application_security_group" "other" {
name = "acctestasg2-%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
}
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "example"
primary = true
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.test.id
application_security_group_ids = [
azurerm_application_security_group.test.id,
azurerm_application_security_group.other.id,
]
}
}
}
`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkDNSServers(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "example"
primary = true
dns_servers = ["8.8.8.8"]
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.test.id
}
}
}
`, r.template(data), data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkDNSServersUpdated(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "example"
primary = true
dns_servers = ["1.1.1.1", "8.8.8.8"]
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.test.id
}
}
}
`, r.template(data), data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkIPForwarding(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "example"
primary = true
enable_ip_forwarding = true
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.test.id
}
}
}
`, r.template(data), data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkIPv6(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "example"
primary = true
ip_configuration {
name = "internal"
primary = true
version = "IPv6"
}
}
}
`, r.template(data), data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkLoadBalancer(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_public_ip" "test" {
name = "test-ip-%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
allocation_method = "Static"
}
resource "azurerm_lb" "test" {
name = "acctestlb-%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
frontend_ip_configuration {
name = "internal"
public_ip_address_id = azurerm_public_ip.test.id
}
}
resource "azurerm_lb_backend_address_pool" "test" {
name = "test"
resource_group_name = azurerm_resource_group.test.name
loadbalancer_id = azurerm_lb.test.id
}
resource "azurerm_lb_nat_pool" "test" {
name = "test"
resource_group_name = azurerm_resource_group.test.name
loadbalancer_id = azurerm_lb.test.id
frontend_ip_configuration_name = "internal"
protocol = "Tcp"
frontend_port_start = 80
frontend_port_end = 81
backend_port = 8080
}
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "example"
primary = true
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.test.id
load_balancer_backend_address_pool_ids = [azurerm_lb_backend_address_pool.test.id]
load_balancer_inbound_nat_rules_ids = [azurerm_lb_nat_pool.test.id]
}
}
}
`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkMultipleIPConfigurations(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "internal"
primary = true
ip_configuration {
name = "primary"
primary = true
subnet_id = azurerm_subnet.test.id
}
ip_configuration {
name = "secondary"
subnet_id = azurerm_subnet.test.id
}
}
}
`, r.template(data), data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkMultipleIPConfigurationsIPv6(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_D2s_v3"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "primary"
primary = true
ip_configuration {
name = "first"
primary = true
subnet_id = azurerm_subnet.test.id
version = "IPv4"
}
ip_configuration {
name = "second"
version = "IPv6"
}
}
}
`, r.template(data), data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkMultipleNICs(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "primary"
primary = true
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.test.id
}
}
network_interface {
name = "secondary"
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.test.id
}
}
}
`, r.template(data), data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkMultipleNICsMultipleIPConfigurations(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "primary"
primary = true
ip_configuration {
name = "first"
primary = true
subnet_id = azurerm_subnet.test.id
}
ip_configuration {
name = "second"
subnet_id = azurerm_subnet.test.id
}
}
network_interface {
name = "secondary"
ip_configuration {
name = "third"
primary = true
subnet_id = azurerm_subnet.test.id
}
ip_configuration {
name = "fourth"
subnet_id = azurerm_subnet.test.id
}
}
}
`, r.template(data), data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkMultipleNICsWithDifferentDNSServers(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "primary"
primary = true
dns_servers = ["8.8.8.8"]
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.test.id
}
}
network_interface {
name = "secondary"
dns_servers = ["1.1.1.1"]
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.test.id
}
}
}
`, r.template(data), data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkMultipleNICsMultiplePublicIPs(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "primary"
primary = true
ip_configuration {
name = "first"
primary = true
subnet_id = azurerm_subnet.test.id
public_ip_address {
name = "first"
domain_name_label = "acctest1-%d"
idle_timeout_in_minutes = 4
}
}
}
network_interface {
name = "secondary"
ip_configuration {
name = "second"
primary = true
subnet_id = azurerm_subnet.test.id
public_ip_address {
name = "second"
domain_name_label = "acctest2-%d"
idle_timeout_in_minutes = 4
}
}
}
}
`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkNetworkSecurityGroup(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_network_security_group" "test" {
name = "acctestnsg-%d"
location = "${azurerm_resource_group.test.location}"
resource_group_name = "${azurerm_resource_group.test.name}"
}
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "example"
primary = true
network_security_group_id = azurerm_network_security_group.test.id
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.test.id
}
}
}
`, r.template(data), data.RandomInteger, data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkNetworkSecurityGroupUpdated(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_network_security_group" "test" {
name = "acctestnsg-%d"
location = "${azurerm_resource_group.test.location}"
resource_group_name = "${azurerm_resource_group.test.name}"
}
resource "azurerm_network_security_group" "other" {
name = "acctestnsg2-%d"
location = "${azurerm_resource_group.test.location}"
resource_group_name = "${azurerm_resource_group.test.name}"
}
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "example"
primary = true
network_security_group_id = azurerm_network_security_group.other.id
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.test.id
}
}
}
`, r.template(data), data.RandomInteger, data.RandomInteger, data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkPrivate(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "example"
primary = true
ip_configuration {
name = "internal"
primary = true
subnet_id = azurerm_subnet.test.id
}
}
}
`, r.template(data), data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkPublicIP(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "primary"
primary = true
ip_configuration {
name = "first"
primary = true
subnet_id = azurerm_subnet.test.id
public_ip_address {
name = "first"
idle_timeout_in_minutes = 4
}
}
}
}
`, r.template(data), data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkPublicIPDomainNameLabel(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "primary"
primary = true
ip_configuration {
name = "first"
primary = true
subnet_id = azurerm_subnet.test.id
public_ip_address {
name = "first"
domain_name_label = "acctestdnl-%d"
idle_timeout_in_minutes = 4
}
}
}
}
`, r.template(data), data.RandomInteger, data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkPublicIPFromPrefix(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_public_ip_prefix" "test" {
name = "acctestpublicipprefix-%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
}
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "primary"
primary = true
ip_configuration {
name = "first"
primary = true
subnet_id = azurerm_subnet.test.id
public_ip_address {
name = "first"
public_ip_prefix_id = azurerm_public_ip_prefix.test.id
}
}
}
}
`, r.template(data), data.RandomInteger, data.RandomInteger)
}
func (r LinuxVirtualMachineScaleSetResource) networkPublicIPTags(data acceptance.TestData) string {
return fmt.Sprintf(`
%s
resource "azurerm_linux_virtual_machine_scale_set" "test" {
name = "acctestvmss-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "Standard_F2"
instances = 1
admin_username = "adminuser"
admin_password = "P@ssword1234!"
disable_password_authentication = false
source_image_reference {
publisher = "Canonical"
offer = "UbuntuServer"
sku = "16.04-LTS"
version = "latest"
}
os_disk {
storage_account_type = "Standard_LRS"
caching = "ReadWrite"
}
network_interface {
name = "primary"
primary = true
ip_configuration {
name = "first"
primary = true
subnet_id = azurerm_subnet.test.id
public_ip_address {
name = "first"
ip_tag {
tag = "/Sql"
type = "FirstPartyUsage"
}
}
}
}
}
`, r.template(data), data.RandomInteger)
}
|
GO
|
MPL-2.0
|
0svald/terraform-provider-azurerm/azurerm/internal/services/compute/linux_virtual_machine_scale_set_network_resource_test.go
|
424ec2f3-1fd8-4e3f-9883-e04a84e349cf
|
[{"tag": "IP_ADDRESS", "value": "10.0.0.0/16", "start": 14710, "end": 14721, "context": "ource_group.test.name}\"\n address_space = [\"10.0.0.0/16\"]\n location = \"${azurerm_resource_gro"}, {"tag": "USERNAME", "value": "adminuser", "start": 27671, "end": 27680, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 35135, "end": 35148, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 41753, "end": 41762, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "IP_ADDRESS", "value": "10.0.0.0/24", "start": 15018, "end": 15029, "context": "ual_network.test.name}\"\n address_prefix = \"10.0.0.0/24\"\n}\n\nresource \"azurerm_public_ip\" \"test\" {\n name "}, {"tag": "USERNAME", "value": "adminuser", "start": 17980, "end": 17989, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 30151, "end": 30164, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 29021, "end": 29034, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 36869, "end": 36878, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 40659, "end": 40672, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 43165, "end": 43174, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "IP_ADDRESS", "value": "1.1.1.1", "start": 23855, "end": 23862, "context": "ample\"\n primary = true\n dns_servers = [\"1.1.1.1\", \"8.8.8.8\"]\n\n ip_configuration {\n name "}, {"tag": "USERNAME", "value": "adminuser", "start": 31224, "end": 31233, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "USERNAME", "value": "adminuser", "start": 23425, "end": 23434, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "USERNAME", "value": "adminuser", "start": 30115, "end": 30124, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "IP_ADDRESS", "value": "8.8.8.8", "start": 34255, "end": 34262, "context": "imary\"\n primary = true\n dns_servers = [\"8.8.8.8\"]\n\n ip_configuration {\n name = \"inte"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 32466, "end": 32479, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 33825, "end": 33834, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 31260, "end": 31273, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 24519, "end": 24532, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 13649, "end": 13662, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 35099, "end": 35108, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "USERNAME", "value": "adminuser", "start": 39618, "end": 39627, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 44363, "end": 44376, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 24483, "end": 24492, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 18016, "end": 18029, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 41789, "end": 41802, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 32430, "end": 32439, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "USERNAME", "value": "adminuser", "start": 25542, "end": 25551, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 25578, "end": 25591, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 43201, "end": 43214, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 28985, "end": 28994, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "USERNAME", "value": "adminuser", "start": 38466, "end": 38475, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 38502, "end": 38515, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 44327, "end": 44336, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "IP_ADDRESS", "value": "8.8.8.8", "start": 23866, "end": 23873, "context": "primary = true\n dns_servers = [\"1.1.1.1\", \"8.8.8.8\"]\n\n ip_configuration {\n name = \"inte"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 23461, "end": 23474, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 27707, "end": 27720, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 19543, "end": 19552, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "IP_ADDRESS", "value": "8.8.8.8", "start": 22803, "end": 22810, "context": "ample\"\n primary = true\n dns_servers = [\"8.8.8.8\"]\n\n ip_configuration {\n name = \"inte"}, {"tag": "IP_ADDRESS", "value": "10.0.1.0/24", "start": 17656, "end": 17667, "context": "ual_network.test.name}\"\n address_prefix = \"10.0.1.0/24\"\n}\n\nresource \"azurerm_linux_virtual_machine_scale"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 19579, "end": 19592, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 40623, "end": 40632, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "USERNAME", "value": "adminuser", "start": 13613, "end": 13622, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "USERNAME", "value": "adminuser", "start": 22373, "end": 22382, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 22409, "end": 22422, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 33861, "end": 33874, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 39654, "end": 39667, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 36905, "end": 36918, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}]
|
[{"tag": "IP_ADDRESS", "value": "10.0.0.0/16", "start": 14710, "end": 14721, "context": "ource_group.test.name}\"\n address_space = [\"10.0.0.0/16\"]\n location = \"${azurerm_resource_gro"}, {"tag": "USERNAME", "value": "adminuser", "start": 27671, "end": 27680, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 35135, "end": 35148, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 41753, "end": 41762, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "IP_ADDRESS", "value": "10.0.0.0/24", "start": 15018, "end": 15029, "context": "ual_network.test.name}\"\n address_prefix = \"10.0.0.0/24\"\n}\n\nresource \"azurerm_public_ip\" \"test\" {\n name "}, {"tag": "USERNAME", "value": "adminuser", "start": 17980, "end": 17989, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 30151, "end": 30164, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 29021, "end": 29034, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 36869, "end": 36878, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 40659, "end": 40672, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 43165, "end": 43174, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "IP_ADDRESS", "value": "1.1.1.1", "start": 23855, "end": 23862, "context": "ample\"\n primary = true\n dns_servers = [\"1.1.1.1\", \"8.8.8.8\"]\n\n ip_configuration {\n name "}, {"tag": "USERNAME", "value": "adminuser", "start": 31224, "end": 31233, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "USERNAME", "value": "adminuser", "start": 23425, "end": 23434, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "USERNAME", "value": "adminuser", "start": 30115, "end": 30124, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "IP_ADDRESS", "value": "8.8.8.8", "start": 34255, "end": 34262, "context": "imary\"\n primary = true\n dns_servers = [\"8.8.8.8\"]\n\n ip_configuration {\n name = \"inte"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 32466, "end": 32479, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 33825, "end": 33834, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 31260, "end": 31273, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 24519, "end": 24532, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 13649, "end": 13662, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 35099, "end": 35108, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "USERNAME", "value": "adminuser", "start": 39618, "end": 39627, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 44363, "end": 44376, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 24483, "end": 24492, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 18016, "end": 18029, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 41789, "end": 41802, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 32430, "end": 32439, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "USERNAME", "value": "adminuser", "start": 25542, "end": 25551, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 25578, "end": 25591, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 43201, "end": 43214, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 28985, "end": 28994, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "USERNAME", "value": "adminuser", "start": 38466, "end": 38475, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 38502, "end": 38515, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 44327, "end": 44336, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "IP_ADDRESS", "value": "8.8.8.8", "start": 23866, "end": 23873, "context": "primary = true\n dns_servers = [\"1.1.1.1\", \"8.8.8.8\"]\n\n ip_configuration {\n name = \"inte"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 23461, "end": 23474, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 27707, "end": 27720, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 19543, "end": 19552, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "IP_ADDRESS", "value": "8.8.8.8", "start": 22803, "end": 22810, "context": "ample\"\n primary = true\n dns_servers = [\"8.8.8.8\"]\n\n ip_configuration {\n name = \"inte"}, {"tag": "IP_ADDRESS", "value": "10.0.1.0/24", "start": 17656, "end": 17667, "context": "ual_network.test.name}\"\n address_prefix = \"10.0.1.0/24\"\n}\n\nresource \"azurerm_linux_virtual_machine_scale"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 19579, "end": 19592, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "USERNAME", "value": "adminuser", "start": 40623, "end": 40632, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "USERNAME", "value": "adminuser", "start": 13613, "end": 13622, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "USERNAME", "value": "adminuser", "start": 22373, "end": 22382, "context": " instances = 1\n admin_username = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disab"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 22409, "end": 22422, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 33861, "end": 33874, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 39654, "end": 39667, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}, {"tag": "PASSWORD", "value": "P@ssword1234!", "start": 36905, "end": 36918, "context": "rname = \"adminuser\"\n admin_password = \"P@ssword1234!\"\n\n disable_password_authentication = false\n\n so"}]
|
/*
* Copyright (C) 2015 - 2016 VREM Software Development <VREMSoftwareDevelopment@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.vrem.wifianalyzer.wifi.graph.channel;
import android.content.Context;
import android.content.res.Resources;
import android.support.v4.util.Pair;
import android.view.View;
import com.jjoe64.graphview.GraphView;
import com.vrem.wifianalyzer.BuildConfig;
import com.vrem.wifianalyzer.Configuration;
import com.vrem.wifianalyzer.RobolectricUtil;
import com.vrem.wifianalyzer.settings.Settings;
import com.vrem.wifianalyzer.wifi.band.WiFiBand;
import com.vrem.wifianalyzer.wifi.band.WiFiChannel;
import com.vrem.wifianalyzer.wifi.graph.tools.GraphLegend;
import com.vrem.wifianalyzer.wifi.graph.tools.GraphViewWrapper;
import com.vrem.wifianalyzer.wifi.model.SortBy;
import com.vrem.wifianalyzer.wifi.model.WiFiConnection;
import com.vrem.wifianalyzer.wifi.model.WiFiData;
import com.vrem.wifianalyzer.wifi.model.WiFiDetail;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricGradleTestRunner;
import org.robolectric.annotation.Config;
import java.util.ArrayList;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(RobolectricGradleTestRunner.class)
@Config(constants = BuildConfig.class)
public class ChannelGraphViewTest {
private Context context;
private Resources resources;
private Settings settings;
private Configuration configuration;
private GraphViewWrapper graphViewWrapper;
private ChannelGraphView fixture;
@Before
public void setUp() throws Exception {
RobolectricUtil.INSTANCE.getMainActivity();
graphViewWrapper = mock(GraphViewWrapper.class);
context = mock(Context.class);
resources = mock(Resources.class);
settings = mock(Settings.class);
configuration = mock(Configuration.class);
fixture = new ChannelGraphView(WiFiBand.GHZ2, new Pair<>(WiFiChannel.UNKNOWN, WiFiChannel.UNKNOWN));
fixture.setGraphViewWrapper(graphViewWrapper);
fixture.setContext(context);
fixture.setResources(resources);
fixture.setSettings(settings);
fixture.setConfiguration(configuration);
}
@Test
public void testUpdate() throws Exception {
// setup
WiFiData wiFiData = new WiFiData(new ArrayList<WiFiDetail>(), WiFiConnection.EMPTY, new ArrayList<String>());
withSettings();
// execute
fixture.update(wiFiData);
// validate
verify(graphViewWrapper).removeSeries(any(Set.class));
verify(graphViewWrapper).updateLegend(GraphLegend.RIGHT);
verify(graphViewWrapper).setVisibility(View.VISIBLE);
verifySettings();
}
private void verifySettings() {
verify(settings).getChannelGraphLegend();
verify(settings).getSortBy();
verify(settings).getWiFiBand();
}
private void withSettings() {
when(settings.getChannelGraphLegend()).thenReturn(GraphLegend.RIGHT);
when(settings.getSortBy()).thenReturn(SortBy.CHANNEL);
when(settings.getWiFiBand()).thenReturn(WiFiBand.GHZ2);
}
@Test
public void testGetGraphView() throws Exception {
// setup
GraphView expected = mock(GraphView.class);
when(graphViewWrapper.getGraphView()).thenReturn(expected);
// execute
GraphView actual = fixture.getGraphView();
// validate
assertEquals(expected, actual);
verify(graphViewWrapper).getGraphView();
}
}
|
Java
|
Apache-2.0
|
Telecooperation/darmstadt-wifi/WifiAnalyzer/app/src/test/java/com/vrem/wifianalyzer/wifi/graph/channel/ChannelGraphViewTest.java
|
5e408a8c-41e8-467a-b67f-fe5adbef4255
|
[]
|
[]
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
QGIS plugin 'Graphium'
/***************************************************************************
*
* Copyright 2020 Simon Gröchenig @ Salzburg Research
* eMail graphium@salzburgresearch.at
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
***************************************************************************/
"""
from enum import Enum
class OsmHighwayTypes(Enum):
MOTORWAY = 'motorway'
MOTORWAY_LINK = 'motorway_link'
TRUNK = 'trunk'
TRUNK_LINK = 'trunk_link'
PRIMARY = 'primary'
PRIMARY_LINK = 'primary_link'
SECONDARY = 'secondary'
SECONDARY_LINK = 'secondary_link'
TERTIARY = 'tertiary'
TERTIARY_LINK = 'tertiary_link'
UNCLASSIFIED = 'unclassified'
RESIDENTIAL = 'residential'
LIVING_STREET = 'living_street'
SERVICE = 'service'
PEDESTRIAN = 'pedestrian'
TRACK = 'track'
BUS_GUIDEWAY = 'bus_guideway'
FOOTWAY = 'footway'
BRIDLEWAY = 'bridleway'
STEPS = 'steps'
CORRIDOR = 'dorridor'
PATH = 'path'
SIDEWALK = 'sidewalk'
CYCLEWAY = 'cycleway'
|
Python
|
Apache-2.0
|
graphium-project/graphium-qgis-plugin/graphium/graph_management/model/osm_highway_types.py
|
d1f8c72f-dcf9-4d4d-a9b4-9c600b44625d
|
[{"tag": "EMAIL", "value": "graphium@salzburgresearch.at", "start": 277, "end": 305, "context": " Simon Gr\u00f6chenig @ Salzburg Research\n * eMail graphium@salzburgresearch.at\n *\n * Licensed under the Apache License, Version "}, {"tag": "NAME", "value": "Simon Gr\u00f6chenig", "start": 228, "end": 243, "context": "****************************\n *\n * Copyright 2020 Simon Gr\u00f6chenig @ Salzburg Research\n * eMail graphium@salzbur"}]
|
[{"tag": "EMAIL", "value": "graphium@salzburgresearch.at", "start": 277, "end": 305, "context": " Simon Gr\u00f6chenig @ Salzburg Research\n * eMail graphium@salzburgresearch.at\n *\n * Licensed under the Apache License, Version "}, {"tag": "NAME", "value": "Simon Gr\u00f6chenig", "start": 228, "end": 243, "context": "****************************\n *\n * Copyright 2020 Simon Gr\u00f6chenig @ Salzburg Research\n * eMail graphium@salzbur"}]
|
# Microbes
A list of Microbes Analytes for use in interoperability.
----------------------------------------
## Aerobic Bacteria
* ULID: `018NY6XC00LMFPY3XH8NNXM9TH`
* Scientific Name: `aerobic bacteria`
* Common Names: `aerobic`
* Category: `microbe`
* CAS RN: ``
```json
{
"ulid": "018NY6XC00LMFPY3XH8NNXM9TH",
"scientific_name": "aerobic bacteria",
"common_names": [
"aerobic"
],
"category": "microbe",
"cas_rn": null
}
```
----------------------------------------
## Bile Tolerant Gram Negative Bacteria
* ULID: `018NY6XC00LM638QCGB50ZKYKJ`
* Scientific Name: `bile tolerant gram negative bacteria`
* Common Names: `btgn`
* Category: `microbe`
* CAS RN: ``
```json
{
"ulid": "018NY6XC00LM638QCGB50ZKYKJ",
"scientific_name": "bile tolerant gram negative bacteria",
"common_names": [
"btgn"
],
"category": "microbe",
"cas_rn": null
}
```
----------------------------------------
## Coliforms
* ULID: `018NY6XC00LMTMR8TN8WE86JVY`
* Scientific Name: `coliforms`
* Common Names: `coliform`
* Category: `microbe`
* CAS RN: ``
```json
{
"ulid": "018NY6XC00LMTMR8TN8WE86JVY",
"scientific_name": "coliforms",
"common_names": [
"coliform"
],
"category": "microbe",
"cas_rn": null
}
```
----------------------------------------
## E Coli
* ULID: `018NY6XC00LM7S8H2RT4K4GYME`
* Scientific Name: `e coli`
* Common Names: `e coli`
* Category: `microbe`
* CAS RN: ``
```json
{
"ulid": "018NY6XC00LM7S8H2RT4K4GYME",
"scientific_name": "e coli",
"common_names": [
"e coli"
],
"category": "microbe",
"cas_rn": null
}
```
----------------------------------------
## Salmonella
* ULID: `018NY6XC00LMS96WE6KHKNP52T`
* Scientific Name: `salmonella`
* Common Names: `salmonella`
* Category: `microbe`
* CAS RN: ``
```json
{
"ulid": "018NY6XC00LMS96WE6KHKNP52T",
"scientific_name": "salmonella",
"common_names": [
"salmonella"
],
"category": "microbe",
"cas_rn": null
}
```
----------------------------------------
## Yeast And Mold
* ULID: `018NY6XC00LMCPKZ3QB78GQXWP`
* Scientific Name: `yeast and mold`
* Common Names: `mold`
* Category: `microbe`
* CAS RN: ``
```json
{
"ulid": "018NY6XC00LMCPKZ3QB78GQXWP",
"scientific_name": "yeast and mold",
"common_names": [
"mold"
],
"category": "microbe",
"cas_rn": null
}
```
----------------------------------------
## Aspergillus Fumigatus
* ULID: `018NY6XC00GAHZEZESTB1HSAZ4`
* Scientific Name: `aspergillus fumigatus`
* Common Names: `aspergillus fumigatus`
* Category: `microbe`
* CAS RN: ``
```json
{
"ulid": "018NY6XC00GAHZEZESTB1HSAZ4",
"scientific_name": "aspergillus fumigatus",
"common_names": [
"aspergillus fumigatus"
],
"category": "microbe",
"cas_rn": null
}
```
----------------------------------------
## Aspergillus Terreus
* ULID: `018NY6XC00J7N5A4KHT9EX6K50`
* Scientific Name: `aspergillus terreus`
* Common Names: `aspergillus terreus`
* Category: `microbe`
* CAS RN: ``
```json
{
"ulid": "018NY6XC00J7N5A4KHT9EX6K50",
"scientific_name": "aspergillus terreus",
"common_names": [
"aspergillus terreus"
],
"category": "microbe",
"cas_rn": null
}
```
----------------------------------------
## Aspergillus Flavus
* ULID: `018NY6XC00LM9CA3V07AK2GE9Q`
* Scientific Name: `aspergillus flavus`
* Common Names: `aspergillus flavus`
* Category: `microbe`
* CAS RN: ``
```json
{
"ulid": "018NY6XC00LM9CA3V07AK2GE9Q",
"scientific_name": "aspergillus flavus",
"common_names": [
"aspergillus flavus"
],
"category": "microbe",
"cas_rn": null
}
```
----------------------------------------
## Plate Count
* ULID: `018NY6XC00LMX227M153SSW8N9`
* Scientific Name: `plate count`
* Common Names: `plate count`
* Category: `microbe`
* CAS RN: ``
```json
{
"ulid": "018NY6XC00LMX227M153SSW8N9",
"scientific_name": "plate count",
"common_names": [
"plate count"
],
"category": "microbe",
"cas_rn": null
}
```
----------------------------------------
## Aspergillus Niger
* ULID: `018NY6XC00MZPYJQH1JN62HT7E`
* Scientific Name: `aspergillus niger`
* Common Names: `aspergillus niger`
* Category: `microbe`
* CAS RN: ``
```json
{
"ulid": "018NY6XC00MZPYJQH1JN62HT7E",
"scientific_name": "aspergillus niger",
"common_names": [
"aspergillus niger"
],
"category": "microbe",
"cas_rn": null
}
```
|
Markdown
|
MIT
|
conflabs/wcia-analytes/docs/Microbes.md
|
6860cc9f-3791-4826-821f-52a3d2a42663
|
[]
|
[]
|
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83_bad.cpp
Label Definition File: CWE23_Relative_Path_Traversal.label.xml
Template File: sources-sink-83_bad.tmpl.cpp
*/
/*
* @description
* CWE: 23 Relative Path Traversal
* BadSource: connect_socket Read data using a connect socket (client side)
* GoodSource: Use a fixed file name
* Sinks: ifstream
* BadSink : Open the file named in data using ifstream::open()
* Flow Variant: 83 Data flow: data passed to class constructor and destructor by declaring the class object on the stack
*
* */
#ifndef OMITBAD
#include "std_testcase.h"
#include "CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83.h"
#ifdef _WIN32
#include <winsock2.h>
#include <windows.h>
#include <direct.h>
#pragma comment(lib, "ws2_32") /* include ws2_32.lib when linking */
#define CLOSE_SOCKET closesocket
#else /* NOT _WIN32 */
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <unistd.h>
#define INVALID_SOCKET -1
#define SOCKET_ERROR -1
#define CLOSE_SOCKET close
#define SOCKET int
#endif
#define TCP_PORT 27015
#define IP_ADDRESS "127.0.0.1"
#include <fstream>
using namespace std;
namespace CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83
{
CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83_bad::CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83_bad(char * dataCopy)
{
data = dataCopy;
{
#ifdef _WIN32
WSADATA wsaData;
int wsaDataInit = 0;
#endif
int recvResult;
struct sockaddr_in service;
char *replace;
SOCKET connectSocket = INVALID_SOCKET;
size_t dataLen = strlen(data);
do
{
#ifdef _WIN32
if (WSAStartup(MAKEWORD(2,2), &wsaData) != NO_ERROR)
{
break;
}
wsaDataInit = 1;
#endif
/* POTENTIAL FLAW: Read data using a connect socket */
connectSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (connectSocket == INVALID_SOCKET)
{
break;
}
memset(&service, 0, sizeof(service));
service.sin_family = AF_INET;
service.sin_addr.s_addr = inet_addr(IP_ADDRESS);
service.sin_port = htons(TCP_PORT);
if (connect(connectSocket, (struct sockaddr*)&service, sizeof(service)) == SOCKET_ERROR)
{
break;
}
/* Abort on error or the connection was closed, make sure to recv one
* less char than is in the recv_buf in order to append a terminator */
/* Abort on error or the connection was closed */
recvResult = recv(connectSocket, (char *)(data + dataLen), sizeof(char) * (FILENAME_MAX - dataLen - 1), 0);
if (recvResult == SOCKET_ERROR || recvResult == 0)
{
break;
}
/* Append null terminator */
data[dataLen + recvResult / sizeof(char)] = '\0';
/* Eliminate CRLF */
replace = strchr(data, '\r');
if (replace)
{
*replace = '\0';
}
replace = strchr(data, '\n');
if (replace)
{
*replace = '\0';
}
}
while (0);
if (connectSocket != INVALID_SOCKET)
{
CLOSE_SOCKET(connectSocket);
}
#ifdef _WIN32
if (wsaDataInit)
{
WSACleanup();
}
#endif
}
}
CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83_bad::~CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83_bad()
{
{
ifstream inputFile;
/* POTENTIAL FLAW: Possibly opening a file without validating the file name or path */
inputFile.open((char *)data);
inputFile.close();
}
}
}
#endif /* OMITBAD */
|
C++
|
MIT
|
AditiAShenoy/Juliet1.3/C/testcases/CWE23_Relative_Path_Traversal/s01/CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83_bad.cpp
|
7a3a55b5-d401-4033-95d4-b463e300e6d6
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1233, "end": 1242, "context": "if\r\n\r\n#define TCP_PORT 27015\r\n#define IP_ADDRESS \"127.0.0.1\"\r\n\r\n#include <fstream>\r\nusing namespace std;\r\n\r\nn"}]
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1233, "end": 1242, "context": "if\r\n\r\n#define TCP_PORT 27015\r\n#define IP_ADDRESS \"127.0.0.1\"\r\n\r\n#include <fstream>\r\nusing namespace std;\r\n\r\nn"}]
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/macros.h"
#include "build/build_config.h"
#include "content/browser/frame_host/frame_tree.h"
#include "content/browser/frame_host/frame_tree_node.h"
#include "content/browser/renderer_host/render_view_host_impl.h"
#include "content/browser/web_contents/web_contents_impl.h"
#include "content/public/browser/notification_service.h"
#include "content/public/browser/notification_types.h"
#include "content/public/common/content_switches.h"
#include "content/public/common/url_constants.h"
#include "content/public/test/browser_test_utils.h"
#include "content/public/test/content_browser_test.h"
#include "content/public/test/content_browser_test_utils.h"
#include "content/public/test/test_navigation_observer.h"
#include "content/public/test/test_utils.h"
#include "content/shell/browser/shell.h"
#include "content/shell/common/shell_switches.h"
#include "content/test/content_browser_test_utils_internal.h"
#include "net/dns/mock_host_resolver.h"
#include "net/test/embedded_test_server/embedded_test_server.h"
#include "third_party/WebKit/public/web/WebSandboxFlags.h"
#include "url/url_constants.h"
// For fine-grained suppression on flaky tests.
#if defined(OS_WIN)
#include "base/win/windows_version.h"
#endif
namespace content {
namespace {
std::string GetOriginFromRenderer(FrameTreeNode* node) {
std::string origin;
EXPECT_TRUE(ExecuteScriptAndExtractString(
node, "window.domAutomationController.send(document.origin);", &origin));
return origin;
}
} // namespace
class FrameTreeBrowserTest : public ContentBrowserTest {
public:
FrameTreeBrowserTest() {}
void SetUpOnMainThread() override {
host_resolver()->AddRule("*", "127.0.0.1");
SetupCrossSiteRedirector(embedded_test_server());
ASSERT_TRUE(embedded_test_server()->Start());
}
private:
DISALLOW_COPY_AND_ASSIGN(FrameTreeBrowserTest);
};
// Ensures FrameTree correctly reflects page structure during navigations.
IN_PROC_BROWSER_TEST_F(FrameTreeBrowserTest, FrameTreeShape) {
GURL base_url = embedded_test_server()->GetURL("A.com", "/site_isolation/");
// Load doc without iframes. Verify FrameTree just has root.
// Frame tree:
// Site-A Root
NavigateToURL(shell(), base_url.Resolve("blank.html"));
FrameTreeNode* root =
static_cast<WebContentsImpl*>(shell()->web_contents())->
GetFrameTree()->root();
EXPECT_EQ(0U, root->child_count());
// Add 2 same-site frames. Verify 3 nodes in tree with proper names.
// Frame tree:
// Site-A Root -- Site-A frame1
// \-- Site-A frame2
WindowedNotificationObserver observer1(
content::NOTIFICATION_LOAD_STOP,
content::Source<NavigationController>(
&shell()->web_contents()->GetController()));
NavigateToURL(shell(), base_url.Resolve("frames-X-X.html"));
observer1.Wait();
ASSERT_EQ(2U, root->child_count());
EXPECT_EQ(0U, root->child_at(0)->child_count());
EXPECT_EQ(0U, root->child_at(1)->child_count());
}
// TODO(ajwong): Talk with nasko and merge this functionality with
// FrameTreeShape.
IN_PROC_BROWSER_TEST_F(FrameTreeBrowserTest, FrameTreeShape2) {
NavigateToURL(shell(),
embedded_test_server()->GetURL("/frame_tree/top.html"));
WebContentsImpl* wc = static_cast<WebContentsImpl*>(shell()->web_contents());
FrameTreeNode* root = wc->GetFrameTree()->root();
// Check that the root node is properly created.
ASSERT_EQ(3UL, root->child_count());
EXPECT_EQ(std::string(), root->frame_name());
ASSERT_EQ(2UL, root->child_at(0)->child_count());
EXPECT_STREQ("1-1-name", root->child_at(0)->frame_name().c_str());
// Verify the deepest node exists and has the right name.
ASSERT_EQ(2UL, root->child_at(2)->child_count());
EXPECT_EQ(1UL, root->child_at(2)->child_at(1)->child_count());
EXPECT_EQ(0UL, root->child_at(2)->child_at(1)->child_at(0)->child_count());
EXPECT_STREQ("3-1-name",
root->child_at(2)->child_at(1)->child_at(0)->frame_name().c_str());
// Navigate to about:blank, which should leave only the root node of the frame
// tree in the browser process.
NavigateToURL(shell(), embedded_test_server()->GetURL("/title1.html"));
root = wc->GetFrameTree()->root();
EXPECT_EQ(0UL, root->child_count());
EXPECT_EQ(std::string(), root->frame_name());
}
// Test that we can navigate away if the previous renderer doesn't clean up its
// child frames.
IN_PROC_BROWSER_TEST_F(FrameTreeBrowserTest, FrameTreeAfterCrash) {
NavigateToURL(shell(),
embedded_test_server()->GetURL("/frame_tree/top.html"));
// Ensure the view and frame are live.
RenderViewHost* rvh = shell()->web_contents()->GetRenderViewHost();
RenderFrameHostImpl* rfh =
static_cast<RenderFrameHostImpl*>(rvh->GetMainFrame());
EXPECT_TRUE(rvh->IsRenderViewLive());
EXPECT_TRUE(rfh->IsRenderFrameLive());
// Crash the renderer so that it doesn't send any FrameDetached messages.
RenderProcessHostWatcher crash_observer(
shell()->web_contents(),
RenderProcessHostWatcher::WATCH_FOR_PROCESS_EXIT);
NavigateToURL(shell(), GURL(kChromeUICrashURL));
crash_observer.Wait();
// The frame tree should be cleared.
WebContentsImpl* wc = static_cast<WebContentsImpl*>(shell()->web_contents());
FrameTreeNode* root = wc->GetFrameTree()->root();
EXPECT_EQ(0UL, root->child_count());
// Ensure the view and frame aren't live anymore.
EXPECT_FALSE(rvh->IsRenderViewLive());
EXPECT_FALSE(rfh->IsRenderFrameLive());
// Navigate to a new URL.
GURL url(embedded_test_server()->GetURL("/title1.html"));
NavigateToURL(shell(), url);
EXPECT_EQ(0UL, root->child_count());
EXPECT_EQ(url, root->current_url());
// Ensure the view and frame are live again.
EXPECT_TRUE(rvh->IsRenderViewLive());
EXPECT_TRUE(rfh->IsRenderFrameLive());
}
// Test that we can navigate away if the previous renderer doesn't clean up its
// child frames.
// Flaky on Mac. http://crbug.com/452018
#if defined(OS_MACOSX)
#define MAYBE_NavigateWithLeftoverFrames DISABLED_NavigateWithLeftoverFrames
#else
#define MAYBE_NavigateWithLeftoverFrames NavigateWithLeftoverFrames
#endif
IN_PROC_BROWSER_TEST_F(FrameTreeBrowserTest, MAYBE_NavigateWithLeftoverFrames) {
#if defined(OS_WIN)
// Flaky on XP bot http://crbug.com/468713
if (base::win::GetVersion() <= base::win::VERSION_XP)
return;
#endif
GURL base_url = embedded_test_server()->GetURL("A.com", "/site_isolation/");
NavigateToURL(shell(),
embedded_test_server()->GetURL("/frame_tree/top.html"));
// Hang the renderer so that it doesn't send any FrameDetached messages.
// (This navigation will never complete, so don't wait for it.)
shell()->LoadURL(GURL(kChromeUIHangURL));
// Check that the frame tree still has children.
WebContentsImpl* wc = static_cast<WebContentsImpl*>(shell()->web_contents());
FrameTreeNode* root = wc->GetFrameTree()->root();
ASSERT_EQ(3UL, root->child_count());
// Navigate to a new URL. We use LoadURL because NavigateToURL will try to
// wait for the previous navigation to stop.
TestNavigationObserver tab_observer(wc, 1);
shell()->LoadURL(base_url.Resolve("blank.html"));
tab_observer.Wait();
// The frame tree should now be cleared.
EXPECT_EQ(0UL, root->child_count());
}
// Ensure that IsRenderFrameLive is true for main frames and same-site iframes.
IN_PROC_BROWSER_TEST_F(FrameTreeBrowserTest, IsRenderFrameLive) {
GURL main_url(embedded_test_server()->GetURL("/frame_tree/top.html"));
NavigateToURL(shell(), main_url);
// It is safe to obtain the root frame tree node here, as it doesn't change.
FrameTreeNode* root = static_cast<WebContentsImpl*>(shell()->web_contents())
->GetFrameTree()->root();
// The root and subframe should each have a live RenderFrame.
EXPECT_TRUE(
root->current_frame_host()->render_view_host()->IsRenderViewLive());
EXPECT_TRUE(root->current_frame_host()->IsRenderFrameLive());
EXPECT_TRUE(root->child_at(0)->current_frame_host()->IsRenderFrameLive());
// Load a same-site page into iframe and it should still be live.
GURL http_url(embedded_test_server()->GetURL("/title1.html"));
NavigateFrameToURL(root->child_at(0), http_url);
EXPECT_TRUE(
root->current_frame_host()->render_view_host()->IsRenderViewLive());
EXPECT_TRUE(root->current_frame_host()->IsRenderFrameLive());
EXPECT_TRUE(root->child_at(0)->current_frame_host()->IsRenderFrameLive());
}
// Ensure that origins are correctly set on navigations.
IN_PROC_BROWSER_TEST_F(FrameTreeBrowserTest, OriginSetOnNavigation) {
GURL about_blank(url::kAboutBlankURL);
GURL main_url(
embedded_test_server()->GetURL("a.com", "/frame_tree/top.html"));
EXPECT_TRUE(NavigateToURL(shell(), main_url));
WebContents* contents = shell()->web_contents();
// It is safe to obtain the root frame tree node here, as it doesn't change.
FrameTreeNode* root =
static_cast<WebContentsImpl*>(contents)->GetFrameTree()->root();
// Extra '/' is added because the replicated origin is serialized in RFC 6454
// format, which dictates no trailing '/', whereas GURL::GetOrigin does put a
// '/' at the end.
EXPECT_EQ(main_url.GetOrigin().spec(),
root->current_origin().Serialize() + '/');
EXPECT_EQ(
main_url.GetOrigin().spec(),
root->current_frame_host()->GetLastCommittedOrigin().Serialize() + '/');
// The iframe is inititially same-origin.
EXPECT_TRUE(
root->current_frame_host()->GetLastCommittedOrigin().IsSameOriginWith(
root->child_at(0)->current_frame_host()->GetLastCommittedOrigin()));
EXPECT_EQ(root->current_origin().Serialize(), GetOriginFromRenderer(root));
EXPECT_EQ(root->child_at(0)->current_origin().Serialize(),
GetOriginFromRenderer(root->child_at(0)));
// Navigate the iframe cross-origin.
GURL frame_url(embedded_test_server()->GetURL("b.com", "/title1.html"));
NavigateFrameToURL(root->child_at(0), frame_url);
EXPECT_EQ(frame_url, root->child_at(0)->current_url());
EXPECT_EQ(frame_url.GetOrigin().spec(),
root->child_at(0)->current_origin().Serialize() + '/');
EXPECT_FALSE(
root->current_frame_host()->GetLastCommittedOrigin().IsSameOriginWith(
root->child_at(0)->current_frame_host()->GetLastCommittedOrigin()));
EXPECT_EQ(root->current_origin().Serialize(), GetOriginFromRenderer(root));
EXPECT_EQ(root->child_at(0)->current_origin().Serialize(),
GetOriginFromRenderer(root->child_at(0)));
// Parent-initiated about:blank navigation should inherit the parent's a.com
// origin.
NavigateIframeToURL(contents, "1-1-id", about_blank);
EXPECT_EQ(about_blank, root->child_at(0)->current_url());
EXPECT_EQ(main_url.GetOrigin().spec(),
root->child_at(0)->current_origin().Serialize() + '/');
EXPECT_EQ(root->current_frame_host()->GetLastCommittedOrigin().Serialize(),
root->child_at(0)
->current_frame_host()
->GetLastCommittedOrigin()
.Serialize());
EXPECT_TRUE(
root->current_frame_host()->GetLastCommittedOrigin().IsSameOriginWith(
root->child_at(0)->current_frame_host()->GetLastCommittedOrigin()));
EXPECT_EQ(root->current_origin().Serialize(), GetOriginFromRenderer(root));
EXPECT_EQ(root->child_at(0)->current_origin().Serialize(),
GetOriginFromRenderer(root->child_at(0)));
GURL data_url("data:text/html,foo");
EXPECT_TRUE(NavigateToURL(shell(), data_url));
// Navigating to a data URL should set a unique origin. This is represented
// as "null" per RFC 6454.
EXPECT_EQ("null", root->current_origin().Serialize());
EXPECT_TRUE(contents->GetMainFrame()->GetLastCommittedOrigin().unique());
EXPECT_EQ("null", GetOriginFromRenderer(root));
// Re-navigating to a normal URL should update the origin.
EXPECT_TRUE(NavigateToURL(shell(), main_url));
EXPECT_EQ(main_url.GetOrigin().spec(),
root->current_origin().Serialize() + '/');
EXPECT_EQ(
main_url.GetOrigin().spec(),
contents->GetMainFrame()->GetLastCommittedOrigin().Serialize() + '/');
EXPECT_FALSE(contents->GetMainFrame()->GetLastCommittedOrigin().unique());
EXPECT_EQ(root->current_origin().Serialize(), GetOriginFromRenderer(root));
}
// Tests a cross-origin navigation to a blob URL. The main frame initiates this
// navigation on its grandchild. It should wind up in the main frame's process.
IN_PROC_BROWSER_TEST_F(FrameTreeBrowserTest, NavigateGrandchildToBlob) {
WebContents* contents = shell()->web_contents();
FrameTreeNode* root =
static_cast<WebContentsImpl*>(contents)->GetFrameTree()->root();
// First, snapshot the FrameTree for a normal A(B(A)) case where all frames
// are served over http. The blob test should result in the same structure.
EXPECT_TRUE(NavigateToURL(
shell(), embedded_test_server()->GetURL(
"a.com", "/cross_site_iframe_factory.html?a(b(a))")));
std::string reference_tree = FrameTreeVisualizer().DepictFrameTree(root);
GURL main_url(embedded_test_server()->GetURL(
"a.com", "/cross_site_iframe_factory.html?a(b(c))"));
EXPECT_TRUE(NavigateToURL(shell(), main_url));
// The root node will initiate the navigation; its grandchild node will be the
// target of the navigation.
FrameTreeNode* target = root->child_at(0)->child_at(0);
std::string blob_url_string;
RenderFrameDeletedObserver deleted_observer(target->current_frame_host());
EXPECT_TRUE(ExecuteScriptAndExtractString(
root,
"function receiveMessage(event) {"
" document.body.appendChild(document.createTextNode(event.data));"
" domAutomationController.send(event.source.location.href);"
"}"
"window.addEventListener('message', receiveMessage, false);"
"var blob = new Blob(["
" '<html><body><div>This is blob content.</div><script>"
" window.parent.parent.postMessage(\"HI\", document.origin);"
" </script></body></html>'], {type: 'text/html'});"
"var blob_url = URL.createObjectURL(blob);"
"frames[0][0].location.href = blob_url;",
&blob_url_string));
// Wait for the RenderFrame to go away, if this will be cross-process.
if (AreAllSitesIsolatedForTesting())
deleted_observer.WaitUntilDeleted();
EXPECT_EQ(GURL(blob_url_string), target->current_url());
EXPECT_EQ(url::kBlobScheme, target->current_url().scheme());
EXPECT_FALSE(target->current_origin().unique());
EXPECT_EQ("a.com", target->current_origin().host());
EXPECT_EQ(url::kHttpScheme, target->current_origin().scheme());
std::string document_body;
EXPECT_TRUE(ExecuteScriptAndExtractString(
target,
"domAutomationController.send(document.body.children[0].innerHTML);",
&document_body));
EXPECT_EQ("This is blob content.", document_body);
EXPECT_EQ(reference_tree, FrameTreeVisualizer().DepictFrameTree(root));
}
IN_PROC_BROWSER_TEST_F(FrameTreeBrowserTest, NavigateChildToAboutBlank) {
GURL main_url(embedded_test_server()->GetURL(
"a.com", "/cross_site_iframe_factory.html?a(b(c))"));
EXPECT_TRUE(NavigateToURL(shell(), main_url));
WebContentsImpl* contents =
static_cast<WebContentsImpl*>(shell()->web_contents());
// The leaf node (c.com) will be navigated. Its parent node (b.com) will
// initiate the navigation.
FrameTreeNode* target =
contents->GetFrameTree()->root()->child_at(0)->child_at(0);
FrameTreeNode* initiator = target->parent();
// Give the target a name.
EXPECT_TRUE(ExecuteScript(target, "window.name = 'target';"));
// Use window.open(about:blank), then poll the document for access.
std::string about_blank_origin;
EXPECT_TRUE(ExecuteScriptAndExtractString(
initiator,
"var didNavigate = false;"
"var intervalID = setInterval(function() {"
" if (!didNavigate) {"
" didNavigate = true;"
" window.open('about:blank', 'target');"
" }"
" // Poll the document until it doesn't throw a SecurityError.\n"
" try {"
" frames[0].document.write('Hi from ' + document.domain);"
" } catch (e) { return; }"
" clearInterval(intervalID);"
" domAutomationController.send(frames[0].document.origin);"
"}, 16);",
&about_blank_origin));
EXPECT_EQ(GURL(url::kAboutBlankURL), target->current_url());
EXPECT_EQ(url::kAboutScheme, target->current_url().scheme());
EXPECT_FALSE(target->current_origin().unique());
EXPECT_EQ("b.com", target->current_origin().host());
EXPECT_EQ(url::kHttpScheme, target->current_origin().scheme());
EXPECT_EQ(target->current_origin().Serialize(), about_blank_origin);
std::string document_body;
EXPECT_TRUE(ExecuteScriptAndExtractString(
target, "domAutomationController.send(document.body.innerHTML);",
&document_body));
EXPECT_EQ("Hi from b.com", document_body);
}
// Nested iframes, three origins: A(B(C)). Frame A navigates C to about:blank
// (via window.open). This should wind up in A's origin per the spec. Test fails
// because of http://crbug.com/564292
IN_PROC_BROWSER_TEST_F(FrameTreeBrowserTest,
DISABLED_NavigateGrandchildToAboutBlank) {
GURL main_url(embedded_test_server()->GetURL(
"a.com", "/cross_site_iframe_factory.html?a(b(c))"));
EXPECT_TRUE(NavigateToURL(shell(), main_url));
WebContentsImpl* contents =
static_cast<WebContentsImpl*>(shell()->web_contents());
// The leaf node (c.com) will be navigated. Its grandparent node (a.com) will
// initiate the navigation.
FrameTreeNode* target =
contents->GetFrameTree()->root()->child_at(0)->child_at(0);
FrameTreeNode* initiator = target->parent()->parent();
// Give the target a name.
EXPECT_TRUE(ExecuteScript(target, "window.name = 'target';"));
// Use window.open(about:blank), then poll the document for access.
std::string about_blank_origin;
EXPECT_TRUE(ExecuteScriptAndExtractString(
initiator,
"var didNavigate = false;"
"var intervalID = setInterval(function() {"
" if (!didNavigate) {"
" didNavigate = true;"
" window.open('about:blank', 'target');"
" }"
" // May raise a SecurityError, that's expected.\n"
" frames[0][0].document.write('Hi from ' + document.domain);"
" clearInterval(intervalID);"
" domAutomationController.send(frames[0][0].document.origin);"
"}, 16);",
&about_blank_origin));
EXPECT_EQ(GURL(url::kAboutBlankURL), target->current_url());
EXPECT_EQ(url::kAboutScheme, target->current_url().scheme());
EXPECT_FALSE(target->current_origin().unique());
EXPECT_EQ("a.com", target->current_origin().host());
EXPECT_EQ(url::kHttpScheme, target->current_origin().scheme());
EXPECT_EQ(target->current_origin().Serialize(), about_blank_origin);
std::string document_body;
EXPECT_TRUE(ExecuteScriptAndExtractString(
target, "domAutomationController.send(document.body.innerHTML);",
&document_body));
EXPECT_EQ("Hi from a.com", document_body);
}
// Ensures that iframe with srcdoc is always put in the same origin as its
// parent frame.
IN_PROC_BROWSER_TEST_F(FrameTreeBrowserTest, ChildFrameWithSrcdoc) {
GURL main_url(embedded_test_server()->GetURL(
"a.com", "/cross_site_iframe_factory.html?a(b)"));
EXPECT_TRUE(NavigateToURL(shell(), main_url));
WebContentsImpl* contents =
static_cast<WebContentsImpl*>(shell()->web_contents());
FrameTreeNode* root = contents->GetFrameTree()->root();
EXPECT_EQ(1U, root->child_count());
FrameTreeNode* child = root->child_at(0);
std::string frame_origin;
EXPECT_TRUE(ExecuteScriptAndExtractString(
child, "domAutomationController.send(document.origin);", &frame_origin));
EXPECT_TRUE(
child->current_frame_host()->GetLastCommittedOrigin().IsSameOriginWith(
url::Origin(GURL(frame_origin))));
EXPECT_FALSE(
root->current_frame_host()->GetLastCommittedOrigin().IsSameOriginWith(
url::Origin(GURL(frame_origin))));
// Create a new iframe with srcdoc and add it to the main frame. It should
// be created in the same SiteInstance as the parent.
{
std::string script("var f = document.createElement('iframe');"
"f.srcdoc = 'some content';"
"document.body.appendChild(f)");
TestNavigationObserver observer(shell()->web_contents());
EXPECT_TRUE(ExecuteScript(root, script));
EXPECT_EQ(2U, root->child_count());
observer.Wait();
EXPECT_EQ(GURL(kAboutSrcDocURL), root->child_at(1)->current_url());
EXPECT_TRUE(ExecuteScriptAndExtractString(
root->child_at(1), "domAutomationController.send(document.origin);",
&frame_origin));
EXPECT_EQ(root->current_frame_host()->GetLastCommittedURL().GetOrigin(),
GURL(frame_origin));
EXPECT_NE(child->current_frame_host()->GetLastCommittedURL().GetOrigin(),
GURL(frame_origin));
}
// Set srcdoc on the existing cross-site frame. It should navigate the frame
// back to the origin of the parent.
{
std::string script("var f = document.getElementById('child-0');"
"f.srcdoc = 'some content';");
TestNavigationObserver observer(shell()->web_contents());
EXPECT_TRUE(ExecuteScript(root, script));
observer.Wait();
EXPECT_EQ(GURL(kAboutSrcDocURL), child->current_url());
EXPECT_TRUE(ExecuteScriptAndExtractString(
child, "domAutomationController.send(document.origin);",
&frame_origin));
EXPECT_EQ(root->current_frame_host()->GetLastCommittedURL().GetOrigin(),
GURL(frame_origin));
}
}
// Ensure that sandbox flags are correctly set when child frames are created.
IN_PROC_BROWSER_TEST_F(FrameTreeBrowserTest, SandboxFlagsSetForChildFrames) {
GURL main_url(embedded_test_server()->GetURL("/sandboxed_frames.html"));
EXPECT_TRUE(NavigateToURL(shell(), main_url));
// It is safe to obtain the root frame tree node here, as it doesn't change.
FrameTreeNode* root = static_cast<WebContentsImpl*>(shell()->web_contents())
->GetFrameTree()->root();
// Verify that sandbox flags are set properly for all FrameTreeNodes.
// First frame is completely sandboxed; second frame uses "allow-scripts",
// which resets both SandboxFlags::Scripts and
// SandboxFlags::AutomaticFeatures bits per blink::parseSandboxPolicy(), and
// third frame has "allow-scripts allow-same-origin".
EXPECT_EQ(blink::WebSandboxFlags::None, root->effective_sandbox_flags());
EXPECT_EQ(blink::WebSandboxFlags::All,
root->child_at(0)->effective_sandbox_flags());
EXPECT_EQ(blink::WebSandboxFlags::All & ~blink::WebSandboxFlags::Scripts &
~blink::WebSandboxFlags::AutomaticFeatures,
root->child_at(1)->effective_sandbox_flags());
EXPECT_EQ(blink::WebSandboxFlags::All & ~blink::WebSandboxFlags::Scripts &
~blink::WebSandboxFlags::AutomaticFeatures &
~blink::WebSandboxFlags::Origin,
root->child_at(2)->effective_sandbox_flags());
// Sandboxed frames should set a unique origin unless they have the
// "allow-same-origin" directive.
EXPECT_EQ("null", root->child_at(0)->current_origin().Serialize());
EXPECT_EQ("null", root->child_at(1)->current_origin().Serialize());
EXPECT_EQ(main_url.GetOrigin().spec(),
root->child_at(2)->current_origin().Serialize() + "/");
// Navigating to a different URL should not clear sandbox flags.
GURL frame_url(embedded_test_server()->GetURL("/title1.html"));
NavigateFrameToURL(root->child_at(0), frame_url);
EXPECT_EQ(blink::WebSandboxFlags::All,
root->child_at(0)->effective_sandbox_flags());
}
// Ensure that a popup opened from a subframe sets its opener to the subframe's
// FrameTreeNode, and that the opener is cleared if the subframe is destroyed.
IN_PROC_BROWSER_TEST_F(FrameTreeBrowserTest, SubframeOpenerSetForNewWindow) {
GURL main_url(embedded_test_server()->GetURL("/frame_tree/top.html"));
EXPECT_TRUE(NavigateToURL(shell(), main_url));
// It is safe to obtain the root frame tree node here, as it doesn't change.
FrameTreeNode* root = static_cast<WebContentsImpl*>(shell()->web_contents())
->GetFrameTree()
->root();
// Open a new window from a subframe.
ShellAddedObserver new_shell_observer;
GURL popup_url(embedded_test_server()->GetURL("foo.com", "/title1.html"));
EXPECT_TRUE(ExecuteScript(root->child_at(0),
"window.open('" + popup_url.spec() + "');"));
Shell* new_shell = new_shell_observer.GetShell();
WebContents* new_contents = new_shell->web_contents();
WaitForLoadStop(new_contents);
// Check that the new window's opener points to the correct subframe on
// original window.
FrameTreeNode* popup_root =
static_cast<WebContentsImpl*>(new_contents)->GetFrameTree()->root();
EXPECT_EQ(root->child_at(0), popup_root->opener());
// Close the original window. This should clear the new window's opener.
shell()->Close();
EXPECT_EQ(nullptr, popup_root->opener());
}
class CrossProcessFrameTreeBrowserTest : public ContentBrowserTest {
public:
CrossProcessFrameTreeBrowserTest() {}
void SetUpCommandLine(base::CommandLine* command_line) override {
IsolateAllSitesForTesting(command_line);
}
void SetUpOnMainThread() override {
host_resolver()->AddRule("*", "127.0.0.1");
SetupCrossSiteRedirector(embedded_test_server());
ASSERT_TRUE(embedded_test_server()->Start());
}
private:
DISALLOW_COPY_AND_ASSIGN(CrossProcessFrameTreeBrowserTest);
};
// Ensure that we can complete a cross-process subframe navigation.
IN_PROC_BROWSER_TEST_F(CrossProcessFrameTreeBrowserTest,
CreateCrossProcessSubframeProxies) {
GURL main_url(embedded_test_server()->GetURL("/site_per_process_main.html"));
NavigateToURL(shell(), main_url);
// It is safe to obtain the root frame tree node here, as it doesn't change.
FrameTreeNode* root = static_cast<WebContentsImpl*>(shell()->web_contents())
->GetFrameTree()->root();
// There should not be a proxy for the root's own SiteInstance.
SiteInstance* root_instance = root->current_frame_host()->GetSiteInstance();
EXPECT_FALSE(root->render_manager()->GetRenderFrameProxyHost(root_instance));
// Load same-site page into iframe.
GURL http_url(embedded_test_server()->GetURL("/title1.html"));
NavigateFrameToURL(root->child_at(0), http_url);
// Load cross-site page into iframe.
GURL cross_site_url(
embedded_test_server()->GetURL("foo.com", "/title2.html"));
NavigateFrameToURL(root->child_at(0), cross_site_url);
// Ensure that we have created a new process for the subframe.
ASSERT_EQ(2U, root->child_count());
FrameTreeNode* child = root->child_at(0);
SiteInstance* child_instance = child->current_frame_host()->GetSiteInstance();
RenderViewHost* rvh = child->current_frame_host()->render_view_host();
RenderProcessHost* rph = child->current_frame_host()->GetProcess();
EXPECT_NE(shell()->web_contents()->GetRenderViewHost(), rvh);
EXPECT_NE(shell()->web_contents()->GetSiteInstance(), child_instance);
EXPECT_NE(shell()->web_contents()->GetRenderProcessHost(), rph);
// Ensure that the root node has a proxy for the child node's SiteInstance.
EXPECT_TRUE(root->render_manager()->GetRenderFrameProxyHost(child_instance));
// Also ensure that the child has a proxy for the root node's SiteInstance.
EXPECT_TRUE(child->render_manager()->GetRenderFrameProxyHost(root_instance));
// The nodes should not have proxies for their own SiteInstance.
EXPECT_FALSE(root->render_manager()->GetRenderFrameProxyHost(root_instance));
EXPECT_FALSE(
child->render_manager()->GetRenderFrameProxyHost(child_instance));
// Ensure that the RenderViews and RenderFrames are all live.
EXPECT_TRUE(
root->current_frame_host()->render_view_host()->IsRenderViewLive());
EXPECT_TRUE(
child->current_frame_host()->render_view_host()->IsRenderViewLive());
EXPECT_TRUE(root->current_frame_host()->IsRenderFrameLive());
EXPECT_TRUE(root->child_at(0)->current_frame_host()->IsRenderFrameLive());
}
IN_PROC_BROWSER_TEST_F(CrossProcessFrameTreeBrowserTest,
OriginSetOnCrossProcessNavigations) {
GURL main_url(embedded_test_server()->GetURL("/site_per_process_main.html"));
EXPECT_TRUE(NavigateToURL(shell(), main_url));
// It is safe to obtain the root frame tree node here, as it doesn't change.
FrameTreeNode* root = static_cast<WebContentsImpl*>(shell()->web_contents())
->GetFrameTree()->root();
EXPECT_EQ(root->current_origin().Serialize() + '/',
main_url.GetOrigin().spec());
// First frame is an about:blank frame. Check that its origin is correctly
// inherited from the parent.
EXPECT_EQ(root->child_at(0)->current_origin().Serialize() + '/',
main_url.GetOrigin().spec());
// Second frame loads a same-site page. Its origin should also be the same
// as the parent.
EXPECT_EQ(root->child_at(1)->current_origin().Serialize() + '/',
main_url.GetOrigin().spec());
// Load cross-site page into the first frame.
GURL cross_site_url(
embedded_test_server()->GetURL("foo.com", "/title2.html"));
NavigateFrameToURL(root->child_at(0), cross_site_url);
EXPECT_EQ(root->child_at(0)->current_origin().Serialize() + '/',
cross_site_url.GetOrigin().spec());
// The root's origin shouldn't have changed.
EXPECT_EQ(root->current_origin().Serialize() + '/',
main_url.GetOrigin().spec());
GURL data_url("data:text/html,foo");
NavigateFrameToURL(root->child_at(1), data_url);
// Navigating to a data URL should set a unique origin. This is represented
// as "null" per RFC 6454.
EXPECT_EQ(root->child_at(1)->current_origin().Serialize(), "null");
}
// FrameTreeBrowserTest variant where we isolate http://*.is, Iceland's top
// level domain. This is an analogue to --isolate-extensions that we use inside
// of content_browsertests, where extensions don't exist. Iceland, like an
// extension process, is a special place with magical powers; we want to protect
// it from outsiders.
class IsolateIcelandFrameTreeBrowserTest : public ContentBrowserTest {
public:
IsolateIcelandFrameTreeBrowserTest() {}
void SetUpCommandLine(base::CommandLine* command_line) override {
command_line->AppendSwitchASCII(switches::kIsolateSitesForTesting, "*.is");
}
void SetUpOnMainThread() override {
host_resolver()->AddRule("*", "127.0.0.1");
SetupCrossSiteRedirector(embedded_test_server());
ASSERT_TRUE(embedded_test_server()->Start());
}
private:
DISALLOW_COPY_AND_ASSIGN(IsolateIcelandFrameTreeBrowserTest);
};
// Regression test for https://crbug.com/644966
IN_PROC_BROWSER_TEST_F(IsolateIcelandFrameTreeBrowserTest,
ProcessSwitchForIsolatedBlob) {
// blink suppresses navigations to blob URLs of origins different from the
// frame initiating the navigation. We disable those checks for this test, to
// test what happens in a compromise scenario.
base::CommandLine::ForCurrentProcess()->AppendSwitch(
switches::kDisableWebSecurity);
// Set up an iframe.
WebContents* contents = shell()->web_contents();
FrameTreeNode* root =
static_cast<WebContentsImpl*>(contents)->GetFrameTree()->root();
GURL main_url(embedded_test_server()->GetURL(
"a.com", "/cross_site_iframe_factory.html?a(a)"));
EXPECT_TRUE(NavigateToURL(shell(), main_url));
// The navigation targets an invalid blob url; that's intentional to trigger
// an error response. The response should commit in a process dedicated to
// http://b.is.
std::string result;
EXPECT_TRUE(ExecuteScriptAndExtractString(
root,
"var iframe_element = document.getElementsByTagName('iframe')[0];"
"iframe_element.onload = () => {"
" domAutomationController.send('done');"
"};"
"iframe_element.src = 'blob:http://b.is:2932/';",
&result));
WaitForLoadStop(contents);
// Make sure we did a process transfer back to "b.is".
EXPECT_EQ(
" Site A ------------ proxies for B\n"
" +--Site B ------- proxies for A\n"
"Where A = http://a.com/\n"
" B = http://b.is/",
FrameTreeVisualizer().DepictFrameTree(root));
}
} // namespace content
|
C++
|
Apache-2.0
|
Cela-Inc/WebARonARCore/content/browser/frame_host/frame_tree_browsertest.cc
|
944deb19-1372-4400-b6af-2878b0e8f3f3
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1840, "end": 1849, "context": "d() override {\n host_resolver()->AddRule(\"*\", \"127.0.0.1\");\n SetupCrossSiteRedirector(embedded_test_ser"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 30801, "end": 30810, "context": "d() override {\n host_resolver()->AddRule(\"*\", \"127.0.0.1\");\n SetupCrossSiteRedirector(embedded_test_ser"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 25604, "end": 25613, "context": "d() override {\n host_resolver()->AddRule(\"*\", \"127.0.0.1\");\n SetupCrossSiteRedirector(embedded_test_ser"}]
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1840, "end": 1849, "context": "d() override {\n host_resolver()->AddRule(\"*\", \"127.0.0.1\");\n SetupCrossSiteRedirector(embedded_test_ser"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 30801, "end": 30810, "context": "d() override {\n host_resolver()->AddRule(\"*\", \"127.0.0.1\");\n SetupCrossSiteRedirector(embedded_test_ser"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 25604, "end": 25613, "context": "d() override {\n host_resolver()->AddRule(\"*\", \"127.0.0.1\");\n SetupCrossSiteRedirector(embedded_test_ser"}]
|
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from extensions.rich_text_components import base
NONNEGATIVE_INT_SCHEMA = {
'type': 'int',
'validators': [{
'id': 'is_at_least',
'min_value': 0
}],
}
class Video(base.BaseRichTextComponent):
"""A rich-text component representing a YouTube video."""
name = 'Video'
category = 'Basic Input'
description = 'A YouTube video.'
frontend_name = 'video'
tooltip = 'Insert video'
_customization_arg_specs = [{
'name': 'video_id',
'description': (
'The YouTube id for this video. This is the 11-character string '
'after \'v=\' in the video URL.'),
'schema': {
'type': 'unicode',
},
'default_value': '',
}, {
'name': 'start',
'description': (
'Video start time in seconds: (leave at 0 to start at the '
'beginning.)'),
'schema': NONNEGATIVE_INT_SCHEMA,
'default_value': 0
}, {
'name': 'end',
'description': (
'Video end time in seconds: (leave at 0 to play until the end.)'),
'schema': NONNEGATIVE_INT_SCHEMA,
'default_value': 0
}, {
'name': 'autoplay',
'description': (
'Autoplay this video once the question has loaded?'),
'schema': {
'type': 'bool'
},
'default_value': False,
}]
icon_data_url = (
'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAA'
'ABGdBTUEAAK/INwWK6QAAABl0RVh0%0AU29mdHdhcmUAQWRvYmUgSW1hZ2VSZWFkeXHJZ'
'TwAAAIfSURBVDjLpZNPaBNBGMXfbrubzBqbg4kL%0A0lJLgiVKE/AP6Kl6UUFQNAeDIAj'
'VS08aELx59GQPAREV/4BeiqcqROpRD4pUNCJSS21OgloISWME%0AZ/aPb6ARdNeTCz92m'
'O%2B9N9/w7RphGOJ/nsH%2Bolqtvg%2BCYJR8q9VquThxuVz%2BoJTKeZ63Uq/XC38E%0'
'A0Jj3ff8%2BOVupVGLbolkzQw5HOqAxQU4wXWWnZrykmYD0QsgAOJe9hpEUcPr8i0GaJ8'
'n2vs/sL2h8%0AR66TpVfWTdETHWE6GRGKjGiiKNLii5BSLpN7pBHpgMYhMkm8tPUWz3sL'
'2D1wFaY/jvnWcTTaE5Dy%0AjMfTT5J0XIAiTRYn3ASwZ1MKbTmN7z%2BKaHUOYqmb1fcP'
'iNa4kQBuyvWAHYfcHGzDgYcx9NKrwJYH%0ACAyF21JiPWBnXMAQOea6bmn%2B4ueYGZi8'
'gtymNVobF7BG5prNpjd%2BeW6X4BSUD0gOdCpzA8MpA/v2%0Av15kl4%2BpK0emwHSbjJ'
'GBlz%2BvYM1fQeDrYOBTdzOGvDf6EFNr%2BLYjHbBgsaCLxr%2BmoNQjU2vYhRXp%0AgI'
'UOmSWWnsJRfjlOZhrexgtYDZ/gWbetNRbNs6QT10GJglNk64HMaGgbAkoMo5fiFNy7CKD'
'QUGqE%0A5r38YktxAfSqW7Zt33l66WtkAkACjuNsaLVaDxlw5HdJ/86aYrG4WCgUZD6fX'
'%2Bjv/U0ymfxoWVZo%0AmuZyf%2B8XqfGP49CCrBUAAAAASUVORK5CYII%3D%0A'
)
|
Python
|
Apache-2.0
|
Atlas-Sailed-Co/oppia/extensions/rich_text_components/Video/Video.py
|
6a60566f-f679-4fea-ac12-4a0b808a17ff
|
[]
|
[]
|
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <iostream>
#include <memory>
#include "schema/inner/model_generated.h"
#include "common/common_test.h"
#include "src/common/file_utils.h"
#include "mindspore/lite/src/kernel_registry.h"
#include "nnacl/pack.h"
#include "nnacl/fp32/matmul_fp32.h"
#include "nnacl/int8/deconv_int8.h"
#include "mindspore/lite/src/runtime/kernel/arm/int8/deconvolution_int8.h"
using mindspore::lite::DeviceType;
namespace mindspore {
using mindspore::lite::QuantArg;
using mindspore::lite::Tensor;
class TestDeconvInt8 : public mindspore::CommonTest {
public:
TestDeconvInt8() {}
};
TEST_F(TestDeconvInt8, PackWeight1) {
int8_t in[] = {-8, 11, 99, -80, 8, -12, 37, -45, 31, -69, -66, 26, 112, 124, -109, 85, -24, 28, -46, 100,
72, -36, -82, 64, -110, 37, -72, 65, -124, 91, -43, 99, 3, 100, 19, 51, -14, -81, 67, 90,
4, -106, 105, 28, -61, -79, 55, -54, 47, -38, 114, 125, -65, 100, 6, -72, -33, 60, 109, -68};
int8_t co[] = {-8, 11, 99, -80, 8, -12, 0, 0, 112, 124, -109, 85, -24, 28, 0, 0, -110, 37, -72, 65,
-124, 91, 0, 0, -14, -81, 67, 90, 4, -106, 0, 0, 47, -38, 114, 125, -65, 100, 0, 0,
37, -45, 31, -69, -66, 26, 0, 0, -46, 100, 72, -36, -82, 64, 0, 0, -43, 99, 3, 100,
19, 51, 0, 0, 105, 28, -61, -79, 55, -54, 0, 0, 6, -72, -33, 60, 109, -68, 0, 0};
int8_t dst[80] = {0};
/*5*1*2*6 nhwc*/
PackNHWCToC8HWN8Int8(in, dst, 5, 2, 6);
ASSERT_EQ(0, CompareOutputData(dst, co, 80, 1));
}
TEST_F(TestDeconvInt8, PackWeight2) {
int8_t in[] = {
40, 24, 94, 122, 67, 34, -89, 31, -43, 121, 48, -54, 44, -91, 35, 89, -37, 114, -8, 103,
-22, 32, 26, 112, -92, -23, 43, 9, 81, 118, -73, -54, 65, -99, 51, -90, 121, -62, 119, -93,
21, -92, -1, -82, -71, -54, 63, -93, 92, -93, 99, 122, -104, -16, -8, -32, 90, -126, 51, 91,
4, 70, -7, 116, 99, 81, -79, 124, -14, 28, 97, 9, -97, 99, 88, -15, 54, 26, 77, -25,
113, 119, 119, -75, -17, 7, 7, 1, 69, 66, 40, -13, 80, -115, -98, -8, -17, 31, 88, 65,
-1, -15, -98, 77, 56, 119, -20, -32, -54, -58, -16, 52, 121, 126, -33, 43, 92, -34, -17, -52,
104, -52, -91, 76, 79, 105, 102, -65, 43, 32, 13, 15, -38, 95, -18, -82, -7, 118, -79, -85,
120, -15, 2, 32, -94, 111, 115, 102, -18, 121, -106, 54, 63, 111, -16, 92, 82, -23, 111, 53,
1, -48, 45, 19, -4, -15, -72, 41, 80, -51, 116, 31, 94, 101, -10, 18, 0, -49, 108, 28,
-36, 47, -14, -2, -10, 31, -92, -84, 74, -114, -107, 66, 99, -121, -107, 31, -38, 56, -30, 109,
-7, 28, -22, -17, -3, -2, 27, -3, 108, -84, -23, -71, -54, 20, -45, 109, -42, 78, -79, 98,
-10, 57, 52, 1, 25, 73, 21, -78, 46, 121, 66, 92, 24, 55, 4, -110, -37, 112, -18, 10,
-42, 16, -9, 31, 39, -70, 108, -3, -90, -60, -121, 11, 50, -88, -104, -29, -89, 94, 64, -91,
-101, -7, 23, -57, 93, 16, 17, 35, -48, -25, 13, -121, 73, -68, -54, -122, -20, 12, 64, 20,
-11, -6, -71, -52, -97, 109, 116, -107, 117, -124, 56, 80, -108, 30, 123, 56, -80, 39, -18, -97,
-103, 122, 114, -10, -31, 97, -92, 105, -61, -25, 10, -119, -106, 41, 77, -117, 55, -83, -29, 14,
27, -106, -86, 41, 43, 23, 11, -76, -34, 121, 94, 18, 69, 73, 100, 54, 43, 32, 13, 15,
-38, 95, -18, -82, -7, 118, -79, -85, 120, -15, 2, 32, -94, 111, 115, 102, -18, 121, -106, 54,
63, 111, -16, 92, 82, -23, 111, 53, 1, -48, 45, 19, -4, -15, -72, 41, 80, -51, 116, 31,
94, 101, -10, 18, 0, -49, 108, 28, -36, 47, -14, -2, -10, 31, -92, -84, 74, -114, -107, 66,
99, -121, -107, 31, -38, 56, -30, 109, -7, 28, -22, -17, -3, -2, 27, -3, 108, -84, -23, -71,
-54, 20, -45, 109, -42, 78, -79, 98, -10, 57, 52, 1, 25, 73, 21, -78, 46, 121, 66, 92};
int8_t co[] = {
40, 24, 94, 122, 67, 34, -89, 31, -22, 32, 26, 112, -92, -23, 43, 9, 21, -92, -1, -82,
-71, -54, 63, -93, 4, 70, -7, 116, 99, 81, -79, 124, 113, 119, 119, -75, -17, 7, 7, 1,
-1, -15, -98, 77, 56, 119, -20, -32, 104, -52, -91, 76, 79, 105, 102, -65, 120, -15, 2, 32,
-94, 111, 115, 102, 1, -48, 45, 19, -4, -15, -72, 41, -36, 47, -14, -2, -10, 31, -92, -84,
-7, 28, -22, -17, -3, -2, 27, -3, -10, 57, 52, 1, 25, 73, 21, -78, -42, 16, -9, 31,
39, -70, 108, -3, -101, -7, 23, -57, 93, 16, 17, 35, -11, -6, -71, -52, -97, 109, 116, -107,
-103, 122, 114, -10, -31, 97, -92, 105, 27, -106, -86, 41, 43, 23, 11, -76, -38, 95, -18, -82,
-7, 118, -79, -85, 63, 111, -16, 92, 82, -23, 111, 53, 94, 101, -10, 18, 0, -49, 108, 28,
99, -121, -107, 31, -38, 56, -30, 109, -54, 20, -45, 109, -42, 78, -79, 98, -43, 121, 48, -54,
44, -91, 35, 89, 81, 118, -73, -54, 65, -99, 51, -90, 92, -93, 99, 122, -104, -16, -8, -32,
-14, 28, 97, 9, -97, 99, 88, -15, 69, 66, 40, -13, 80, -115, -98, -8, -54, -58, -16, 52,
121, 126, -33, 43, 43, 32, 13, 15, -38, 95, -18, -82, -18, 121, -106, 54, 63, 111, -16, 92,
80, -51, 116, 31, 94, 101, -10, 18, 74, -114, -107, 66, 99, -121, -107, 31, 108, -84, -23, -71,
-54, 20, -45, 109, 46, 121, 66, 92, 24, 55, 4, -110, -90, -60, -121, 11, 50, -88, -104, -29,
-48, -25, 13, -121, 73, -68, -54, -122, 117, -124, 56, 80, -108, 30, 123, 56, -61, -25, 10, -119,
-106, 41, 77, -117, -34, 121, 94, 18, 69, 73, 100, 54, 120, -15, 2, 32, -94, 111, 115, 102,
1, -48, 45, 19, -4, -15, -72, 41, -36, 47, -14, -2, -10, 31, -92, -84, -7, 28, -22, -17,
-3, -2, 27, -3, -10, 57, 52, 1, 25, 73, 21, -78, -37, 114, -8, 103, 0, 0, 0, 0,
121, -62, 119, -93, 0, 0, 0, 0, 90, -126, 51, 91, 0, 0, 0, 0, 54, 26, 77, -25,
0, 0, 0, 0, -17, 31, 88, 65, 0, 0, 0, 0, 92, -34, -17, -52, 0, 0, 0, 0,
-7, 118, -79, -85, 0, 0, 0, 0, 82, -23, 111, 53, 0, 0, 0, 0, 0, -49, 108, 28,
0, 0, 0, 0, -38, 56, -30, 109, 0, 0, 0, 0, -42, 78, -79, 98, 0, 0, 0, 0,
-37, 112, -18, 10, 0, 0, 0, 0, -89, 94, 64, -91, 0, 0, 0, 0, -20, 12, 64, 20,
0, 0, 0, 0, -80, 39, -18, -97, 0, 0, 0, 0, 55, -83, -29, 14, 0, 0, 0, 0,
43, 32, 13, 15, 0, 0, 0, 0, -18, 121, -106, 54, 0, 0, 0, 0, 80, -51, 116, 31,
0, 0, 0, 0, 74, -114, -107, 66, 0, 0, 0, 0, 108, -84, -23, -71, 0, 0, 0, 0,
46, 121, 66, 92, 0, 0, 0, 0};
int8_t dst[528] = {0};
PackNHWCToC8HWN8Int8(in, dst, 22, 1, 20);
ASSERT_EQ(0, CompareOutputData(dst, co, 528, 1));
}
TEST_F(TestDeconvInt8, PackInputTest1) {
/* 6 x 20 */
int8_t in[] = {40, 24, 94, 122, 67, 34, -89, 31, -43, 121, 48, -54, 44, -91, 35, 89, -37, 114, -8, 103,
-22, 32, 26, 112, -92, -23, 43, 9, 81, 118, -73, -54, 65, -99, 51, -90, 121, -62, 119, -93,
21, -92, -1, -82, -71, -54, 63, -93, 92, -93, 99, 122, -104, -16, -8, -32, 90, -126, 51, 91,
4, 70, -7, 116, 99, 81, -79, 124, -14, 28, 97, 9, -97, 99, 88, -15, 54, 26, 77, -25,
113, 119, 119, -75, -17, 7, 7, 1, 69, 66, 40, -13, 80, -115, -98, -8, -17, 31, 88, 65,
-1, -15, -98, 77, 56, 119, -20, -32, -54, -58, -16, 52, 121, 126, -33, 43, 92, -34, -17, -52};
int8_t co[] = {40, 24, 94, 122, 67, 34, -89, 31, -43, 121, 48, -54, 44, -91, 35, 89, -22, 32, 26, 112,
-92, -23, 43, 9, 81, 118, -73, -54, 65, -99, 51, -90, 21, -92, -1, -82, -71, -54, 63, -93,
92, -93, 99, 122, -104, -16, -8, -32, 4, 70, -7, 116, 99, 81, -79, 124, -14, 28, 97, 9,
-97, 99, 88, -15, -37, 114, -8, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
121, -62, 119, -93, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 90, -126, 51, 91,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 54, 26, 77, -25, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 113, 119, 119, -75, -17, 7, 7, 1, 69, 66, 40, -13,
80, -115, -98, -8, -1, -15, -98, 77, 56, 119, -20, -32, -54, -58, -16, 52, 121, 126, -33, 43,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -17, 31, 88, 65, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 92, -34, -17, -52, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
int8_t dst[8 * 32] = {0};
RowMajor2Row16x4MajorInt8(in, dst, 6, 20);
ASSERT_EQ(0, CompareOutputData(dst, co, 8 * 32, 1));
}
TEST_F(TestDeconvInt8, InputSumTest1) {
int8_t packed_a[] = {
-6, 76, 32, 80, -73, 8, -85, -3, 114, 80, 30, 42, 15, 15, 15, 15, -41, 117, 62, -76, -77, -111,
88, 105, 68, 105, -74, 13, 15, 15, 15, 15, 51, 94, 31, -52, -92, -4, -35, -71, 101, -93, 46, -65,
15, 15, 15, 15, 57, -41, -51, 77, 1, 9, 73, -19, -36, 57, 81, -24, 15, 15, 15, 15, 40, 103,
112, 109, -41, -68, 57, 61, 55, -20, 3, 2, 15, 15, 15, 15, 17, -16, -31, 58, -4, 67, -4, -95,
-5, -72, 81, 15, 15, 15, 15, 15, -7, -16, -47, 112, 114, -26, -98, 53, 15, -49, 26, 19, 15, 15,
15, 15, 19, 8, -57, -35, -79, 118, 29, 21, 37, -48, 83, 7, 15, 15, 15, 15, 124, 113, -5, 15,
-8, 107, -65, -88, 50, -47, -80, -84, 15, 15, 15, 15, 3, -45, 92, 42, -20, -101, 106, -10, 89, 67,
55, 10, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15};
int32_t filter_zp = -20;
int32_t input_sum[12] = {0};
int32_t correct_input_sum[] = {-7100, -4780, 580, -4880, -9460, -1420, -3120, -3260, -1840, -6960, -4800, -4800};
DeConvPackInputSum(packed_a, input_sum, filter_zp, 12, 16, true);
ASSERT_EQ(0, CompareOutputData(input_sum, correct_input_sum, 12, 0));
int32_t input_sum_4[4] = {0};
int32_t correct_input_sum_4[] = {-18400, -13160, -7340, -12940};
DeConvPackInputSum(packed_a, input_sum_4, filter_zp, 4, 16 * 3, true);
ASSERT_EQ(0, CompareOutputData(input_sum_4, correct_input_sum_4, 4, 0));
}
TEST_F(TestDeconvInt8, MatMulOptTest1) {
int8_t a_src_ptr[] = {-6, 76, 32, 80, -73, 8, -85, -3, 114, 80, 30, 42, -41, 117, 62, -76, -77, -111,
88, 105, 68, 105, -74, 13, 51, 94, 31, -52, -92, -4, -35, -71, 101, -93, 46, -65,
57, -41, -51, 77, 1, 9, 73, -19, -36, 57, 81, -24, 40, 103, 112, 109, -41, -68,
57, 61, 55, -20, 3, 2, 17, -16, -31, 58, -4, 67, -4, -95, -5, -72, 81, 15,
-7, -16, -47, 112, 114, -26, -98, 53, 15, -49, 26, 19, 19, 8, -57, -35, -79, 118,
29, 21, 37, -48, 83, 7, 124, 113, -5, 15, -8, 107, -65, -88, 50, -47, -80, -84,
3, -45, 92, 42, -20, -101, 106, -10, 89, 67, 55, 10};
int32_t input_zp = 15;
int8_t b_src_ptr[] = {
92, 27, 22, 52, -112, -20, -57, -2, 89, 32, 93, -66, -25, -54, 94, -97, -119, -98, 101, -99,
77, -83, 76, 95, 59, 97, 8, 40, -109, -20, 67, -107, 37, -6, -54, -20, -30, 36, -106, -103,
-3, -86, -82, 59, 4, -75, -50, -106, 55, 104, -117, -71, -20, -85, -77, 16, -25, -58, 4, 80,
-75, 94, 32, -68, 2, 40, 56, -103, 11, -98, -70, -69, 0, 57, -6, 82, 66, -112, -61, 33,
-77, -53, 95, -38, 87, -46, -3, 81, -47, 43, 21, 26, -45, -57, 50, -24, -82, -114, 61, 46,
-53, 78, -24, 31, -7, 37, 29, 38, 45, 106, 52, -42, 31, -6, -61, -87, 2, 79, -5, -42,
43, -106, -104, 7, 91, -63, 58, 97, -15, 74, -96, 15, -23, -3, -47, -97, 100, -54, 26, -46,
35, 26, 100, -80, 34, -25, 96, -67, -80, -27, 66, 41, 41, -43, -43, -38, -4, -64, 31, 7,
-8, 6, -2, 39, -119, 53, 75, -91, -44, 77, -62, 22, -44, 78, -67, -48, -115, -4, 43, 81,
40, -20, -5, -89, 60, -62, -4, -48, 66, -64, -69, 62, 17, -89, 1, 87, 81, 32, -29, 51,
40, 27, 66, 67, 11, -69, 85, -79, -106, 55, 22, -23, 62, 69, -74, 49};
int32_t filter_zp = -20;
/*
* ---------------------- pack input ------------------------- */
int8_t packed_a[12 * 16] = {0};
memset(packed_a, static_cast<int8_t>(input_zp), 12 * 16);
int8_t correct_packed_a[] = {
-6, 76, 32, 80, -73, 8, -85, -3, 114, 80, 30, 42, 15, 15, 15, 15, -41, 117, 62, -76, -77, -111,
88, 105, 68, 105, -74, 13, 15, 15, 15, 15, 51, 94, 31, -52, -92, -4, -35, -71, 101, -93, 46, -65,
15, 15, 15, 15, 57, -41, -51, 77, 1, 9, 73, -19, -36, 57, 81, -24, 15, 15, 15, 15, 40, 103,
112, 109, -41, -68, 57, 61, 55, -20, 3, 2, 15, 15, 15, 15, 17, -16, -31, 58, -4, 67, -4, -95,
-5, -72, 81, 15, 15, 15, 15, 15, -7, -16, -47, 112, 114, -26, -98, 53, 15, -49, 26, 19, 15, 15,
15, 15, 19, 8, -57, -35, -79, 118, 29, 21, 37, -48, 83, 7, 15, 15, 15, 15, 124, 113, -5, 15,
-8, 107, -65, -88, 50, -47, -80, -84, 15, 15, 15, 15, 3, -45, 92, 42, -20, -101, 106, -10, 89, 67,
55, 10, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15};
RowMajor2Row16x4MajorInt8(a_src_ptr, packed_a, 10, 12);
ASSERT_EQ(0, CompareOutputData(packed_a, correct_packed_a, 16 * 12, 0));
/*
* ---------------------- pack weight ------------------------- */
int8_t packed_b[16 * 3 * 8] = {0};
memset(packed_b, static_cast<int8_t>(filter_zp), 16 * 3 * 8);
int8_t correct_packed_b[] = {
92, 101, -30, -77, 0, 21, 45, 58, 34, -2, 40, -29, -20, -20, -20, -20, 27, -99, 36, 16, 57,
26, 106, 97, -25, 39, -20, 51, -20, -20, -20, -20, 22, 77, -106, -25, -6, -45, 52, -15, 96, -119,
-5, 40, -20, -20, -20, -20, 52, -83, -103, -58, 82, -57, -42, 74, -67, 53, -89, 27, -20, -20, -20,
-20, -112, 76, -3, 4, 66, 50, 31, -96, -80, 75, 60, 66, -20, -20, -20, -20, -20, 95, -86, 80,
-112, -24, -6, 15, -27, -91, -62, 67, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20,
-20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20,
-20, -20, -57, 59, -82, -75, -61, -82, -61, -23, 66, -44, -4, 11, -20, -20, -20, -20, -2, 97, 59,
94, 33, -114, -87, -3, 41, 77, -48, -69, -20, -20, -20, -20, 89, 8, 4, 32, -77, 61, 2, -47,
41, -62, 66, 85, -20, -20, -20, -20, 32, 40, -75, -68, -53, 46, 79, -97, -43, 22, -64, -79, -20,
-20, -20, -20, 93, -109, -50, 2, 95, -53, -5, 100, -43, -44, -69, -106, -20, -20, -20, -20, -66, -20,
-106, 40, -38, 78, -42, -54, -38, 78, 62, 55, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20,
-20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20,
-20, -20, -20, -20, -25, 67, 55, 56, 87, -24, 43, 26, -4, -67, 17, 22, -20, -20, -20, -20, -54,
-107, 104, -103, -46, 31, -106, -46, -64, -48, -89, -23, -20, -20, -20, -20, 94, 37, -117, 11, -3, -7,
-104, 35, 31, -115, 1, 62, -20, -20, -20, -20, -97, -6, -71, -98, 81, 37, 7, 26, 7, -4, 87,
69, -20, -20, -20, -20, -119, -54, -20, -70, -47, 29, 91, 100, -8, 43, 81, -74, -20, -20, -20, -20,
-98, -20, -85, -69, 43, 38, -63, -80, 6, 81, 32, 49, -20, -20, -20, -20, -20, -20, -20, -20, -20,
-20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20,
-20, -20, -20, -20, -20, -20};
DeConvWeightTransInt8(b_src_ptr, packed_b, 12, 6, 3, true);
/* kernel : 12x1x3x6 nhwc */
ASSERT_EQ(0, CompareOutputData(packed_b, correct_packed_b, 16 * 3 * 8, 0));
/*
* ---------------------- calculate input_sum ------------------------- */
int32_t input_sum[12] = {0};
int32_t correct_input_sum[] = {-7100, -4780, 580, -4880, -9460, -1420, -3120, -3260, -1840, -6960, -4800, -4800};
DeConvPackInputSum(packed_a, input_sum, filter_zp, 12, 16, true);
ASSERT_EQ(0, CompareOutputData(input_sum, correct_input_sum, 12, 0));
/*
* ---------------------- calculate weight_sum ------------------------- */
int32_t weight_sum[3 * 8] = {0};
int32_t correct_weight_sum[] = {-7395, -8265, -3090, -435, -5655, -1035, 0, 0, 1695, -4770, -6630, 300,
-765, -2835, 0, 0, -7395, 4665, -2475, -4170, -2880, -1110, 0, 0};
DeConvPackWeightSum(packed_b, weight_sum, input_zp, filter_zp, 16, 24, true);
ASSERT_EQ(0, CompareOutputData(weight_sum, correct_weight_sum, 3 * 8, 0));
/*
* ---------------------- do matmul ------------------------- */
int32_t tmp_output[12 * 24] = {0};
int32_t correct_tmp_output[] = {
-1624, -19061, 1795, -17119, 14706, 417, 7306, 1357, 9653, -44022, 19414, -36187, -2041, 6874,
-5766, 3072, 9842, 2395, 12464, -18826, -12267, -17853, 4617, -19468, -15734, -6112, 2122, 14259,
11098, -9520, 12407, -15239, 10309, -34271, 9740, -14607, -5027, 12313, -508, -10808, 0, 0,
0, 0, 0, 0, 0, 0, 1604, 14898, 0, 0, -8212, 9471, 0, 0,
-23430, 6343, 0, 0, 4020, -3740, 0, 0, -9730, 22378, 0, 0, 4702, 4740,
0, 0, -7541, 5461, 0, 0, -6633, 8356, 0, 0, -16854, 9147, 0, 0,
-4018, -11524, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17194, 28501,
13376, -9359, 21454, 22425, -21049, 6603, 23479, -658, 12866, 9739, -12173, -7558, 3862, 10238,
4110, 31945, 10069, -7376, -1948, -20322, 16439, 3260, 1712, 12743, -8132, -27744, 7633, -33916,
18755, 11300, 3686, 9222, 10103, 26102, 17, 13135, 785, -6305, 0, 0, 0, 0,
0, 0, 0, 0, -27325, 14957, 0, 0, -12191, -21866, 0, 0, -21690, -18554,
0, 0, 8737, 14529, 0, 0, -1774, -19575, 0, 0, -12761, 13286, 0, 0,
20523, 2488, 0, 0, -12782, 12688, 0, 0, -1194, -10523, 0, 0, -4044, -9671,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -4671, -4173, 8675, -8560,
-1597, -4946, -20214, -6752, -11439, 5138, 11119, -17661, -6690, -17301, -5541, -4356, 22347, -11778,
2389, -22030, -5176, -242, 8786, -994, 9104, -7208, 24117, 3724, -13648, -1840, 12265, 10347,
-10325, 7184, 19374, -29001, 3979, -6704, -23278, -8124, 0, 0, 0, 0, 0, 0,
0, 0, -9132, 8560, 0, 0, 19264, -10169, 0, 0, -15133, -13678, 0, 0,
7894, -51, 0, 0, -4775, -29785, 0, 0, -12597, 4088, 0, 0, -17420, 1815,
0, 0, 15796, 3101, 0, 0, -37969, -10818, 0, 0, 12714, -7827, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0};
MatMulInt8_16x4(packed_a, packed_b, tmp_output, 12, 24, 16, input_sum, weight_sum);
ASSERT_EQ(0, CompareOutputData(tmp_output, correct_tmp_output, 12 * 3 * 8, 0));
}
int DeConvInt8TestInit1(std::vector<lite::Tensor *> *inputs_, std::vector<lite::Tensor *> *outputs_,
ConvParameter *conv_param, int8_t **correct) {
/* float data from deconv fp32 testcase : DeConvTestInit2 */
/* vq = (vi - zp) * s vi = vq / s + zp */
auto *in_t = new Tensor(kNumberTypeInt8, {1, 4, 2, 3}, mindspore::NHWC, lite::Tensor::Category::VAR);
in_t->MallocData();
int8_t in[] = {6, 43, 38, 24, -8, 12, 41, -24, -20, 41, -19, -6, -26, -6, 23, -31, 34, 45, 8, 45, -39, -27, -48, 12};
memcpy(in_t->MutableData(), in, sizeof(int8_t) * in_t->ElementsNum());
auto *in_quant_arg = new QuantArg();
in_quant_arg->zeroPoint = -19, in_quant_arg->scale = 0.31228156;
in_t->AddQuantParam(*in_quant_arg);
inputs_->push_back(in_t);
auto *weight_t = new Tensor(kNumberTypeInt8, {3, 3, 3, 2}, mindspore::NHWC, lite::Tensor::Category::CONST_TENSOR);
weight_t->MallocData();
int8_t weight[] = {66, 89, 98, 74, 95, 86, 125, 95, 105, 83, 116, 94, 90, 80, 86, 59, 72, 92,
64, 76, 92, 80, 90, 87, 106, 55, 105, 60, 75, 53, 81, 81, 98, 81, 86, 59,
74, 82, 97, 105, 71, 67, 79, 87, 72, 79, 80, 76, 96, 80, 83, 71, 61, 79};
memcpy(weight_t->MutableData(), weight, sizeof(int8_t) * weight_t->ElementsNum());
auto *w_quant_arg = new QuantArg();
w_quant_arg->zeroPoint = 83, w_quant_arg->scale = 0.023649725490196;
weight_t->AddQuantParam(*w_quant_arg);
inputs_->push_back(weight_t);
auto *out_t = new Tensor(kNumberTypeInt8, {1, 7, 3, 2}, mindspore::NHWC, lite::Tensor::Category::VAR);
out_t->MallocData();
auto *out_quant_arg = new QuantArg();
out_quant_arg->zeroPoint = 31, out_quant_arg->scale = 0.3439215686275;
out_t->AddQuantParam(*out_quant_arg);
outputs_->push_back(out_t);
*correct = reinterpret_cast<int8_t *>(malloc(out_t->ElementsNum() * sizeof(int8_t)));
int8_t co_nchw[] = {57, 76, 49, 71, 8, 61, 57, 127, 56, 46, -11, 61, 23, 31, 34, 50, 59, 49, 78, 17, 6,
-3, -5, 23, -11, 6, -5, 33, 64, 30, 21, 18, 25, 21, -15, 0, 4, 31, 36, 2, 17, 43};
PackNCHWToNHWCInt8(co_nchw, *correct, out_t->Batch(), out_t->Width() * out_t->Height(), out_t->Channel());
conv_param->kernel_h_ = conv_param->kernel_w_ = 3;
conv_param->pad_u_ = conv_param->pad_l_ = 1;
conv_param->stride_h_ = conv_param->stride_w_ = 2;
conv_param->dilation_h_ = conv_param->dilation_w_ = 1;
return out_t->ElementsNum();
}
TEST_F(TestDeconvInt8, DeConvInt8Test1) {
std::vector<lite::Tensor *> inputs_;
std::vector<lite::Tensor *> outputs_;
auto deconv_param = new ConvParameter();
auto *ctx = new lite::InnerContext;
ctx->thread_num_ = 1;
ASSERT_EQ(lite::RET_OK, ctx->Init());
int8_t *correct;
int total_size = DeConvInt8TestInit1(&inputs_, &outputs_, deconv_param, &correct);
auto *deconv =
new mindspore::kernel::DeConvInt8CPUKernel(reinterpret_cast<OpParameter *>(deconv_param), inputs_, outputs_, ctx);
deconv->Init();
deconv->Run();
ASSERT_EQ(0, CompareOutputData(reinterpret_cast<int8_t *>(outputs_[0]->MutableData()), correct, total_size, 3));
delete deconv_param;
delete deconv;
delete ctx;
for (auto t : inputs_) delete t;
for (auto t : outputs_) delete t;
free(correct);
}
} // namespace mindspore
|
C++
|
Apache-2.0
|
Ming-blue/mindspore/mindspore/lite/test/ut/src/runtime/kernel/arm/int8/deconv_int8_tests.cc
|
91182544-3c03-404f-aceb-d0290ec8b033
|
[]
|
[]
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.store.kahadb.scheduler;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import java.util.UUID;
import org.apache.activemq.broker.scheduler.JobScheduler;
import org.apache.activemq.broker.scheduler.JobSchedulerStore;
import org.apache.activemq.protobuf.Buffer;
import org.apache.activemq.store.kahadb.AbstractKahaDBStore;
import org.apache.activemq.store.kahadb.JournalCommand;
import org.apache.activemq.store.kahadb.KahaDBMetaData;
import org.apache.activemq.store.kahadb.Visitor;
import org.apache.activemq.store.kahadb.data.KahaAddScheduledJobCommand;
import org.apache.activemq.store.kahadb.data.KahaDestroySchedulerCommand;
import org.apache.activemq.store.kahadb.data.KahaRemoveScheduledJobCommand;
import org.apache.activemq.store.kahadb.data.KahaRemoveScheduledJobsCommand;
import org.apache.activemq.store.kahadb.data.KahaRescheduleJobCommand;
import org.apache.activemq.store.kahadb.data.KahaTraceCommand;
import org.apache.activemq.store.kahadb.disk.index.BTreeVisitor;
import org.apache.activemq.store.kahadb.disk.journal.DataFile;
import org.apache.activemq.store.kahadb.disk.journal.Location;
import org.apache.activemq.store.kahadb.disk.page.Page;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
import org.apache.activemq.store.kahadb.disk.util.VariableMarshaller;
import org.apache.activemq.store.kahadb.scheduler.legacy.LegacyStoreReplayer;
import org.apache.activemq.util.ByteSequence;
import org.apache.activemq.util.IOHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/*
* @org.apache.xbean.XBean element="kahaDBJobScheduler"
*/
public class JobSchedulerStoreImpl extends AbstractKahaDBStore implements JobSchedulerStore {
private static final Logger LOG = LoggerFactory.getLogger(JobSchedulerStoreImpl.class);
private JobSchedulerKahaDBMetaData metaData = new JobSchedulerKahaDBMetaData(this);
private final MetaDataMarshaller metaDataMarshaller = new MetaDataMarshaller(this);
private final Map<String, JobSchedulerImpl> schedulers = new HashMap<String, JobSchedulerImpl>();
private File legacyStoreArchiveDirectory;
/**
* The Scheduler Token is used to identify base revisions of the Scheduler store. A store
* based on the initial scheduler design will not have this tag in it's meta-data and will
* indicate an update is needed. Later versions of the scheduler can also change this value
* to indicate incompatible store bases which require complete meta-data and journal rewrites
* instead of simpler meta-data updates.
*/
static final UUID SCHEDULER_STORE_TOKEN = UUID.fromString("57ed642b-1ee3-47b3-be6d-b7297d500409");
/**
* The default scheduler store version. All new store instance will be given this version and
* earlier versions will be updated to this version.
*/
static final int CURRENT_VERSION = 1;
@Override
public JobScheduler getJobScheduler(final String name) throws Exception {
this.indexLock.writeLock().lock();
try {
JobSchedulerImpl result = this.schedulers.get(name);
if (result == null) {
final JobSchedulerImpl js = new JobSchedulerImpl(this);
js.setName(name);
getPageFile().tx().execute(new Transaction.Closure<IOException>() {
@Override
public void execute(Transaction tx) throws IOException {
js.createIndexes(tx);
js.load(tx);
metaData.getJobSchedulers().put(tx, name, js);
}
});
result = js;
this.schedulers.put(name, js);
if (isStarted()) {
result.start();
}
this.pageFile.flush();
}
return result;
} finally {
this.indexLock.writeLock().unlock();
}
}
@Override
public boolean removeJobScheduler(final String name) throws Exception {
boolean result = false;
this.indexLock.writeLock().lock();
try {
final JobSchedulerImpl js = this.schedulers.remove(name);
result = js != null;
if (result) {
js.stop();
getPageFile().tx().execute(new Transaction.Closure<IOException>() {
@Override
public void execute(Transaction tx) throws IOException {
metaData.getJobSchedulers().remove(tx, name);
js.removeAll(tx);
}
});
}
} finally {
this.indexLock.writeLock().unlock();
}
return result;
}
/**
* Sets the directory where the legacy scheduler store files are archived before an
* update attempt is made. Both the legacy index files and the journal files are moved
* to this folder prior to an upgrade attempt.
*
* @param directory
* The directory to move the legacy Scheduler Store files to.
*/
public void setLegacyStoreArchiveDirectory(File directory) {
this.legacyStoreArchiveDirectory = directory;
}
/**
* Gets the directory where the legacy Scheduler Store files will be archived if the
* broker is started and an existing Job Scheduler Store from an old version is detected.
*
* @return the directory where scheduler store legacy files are archived on upgrade.
*/
public File getLegacyStoreArchiveDirectory() {
if (this.legacyStoreArchiveDirectory == null) {
this.legacyStoreArchiveDirectory = new File(getDirectory(), "legacySchedulerStore");
}
return this.legacyStoreArchiveDirectory.getAbsoluteFile();
}
@Override
public void load() throws IOException {
if (opened.compareAndSet(false, true)) {
getJournal().start();
try {
loadPageFile();
} catch (UnknownStoreVersionException ex) {
LOG.info("Can't start until store update is performed.");
upgradeFromLegacy();
// Restart with the updated store
getJournal().start();
loadPageFile();
LOG.info("Update from legacy Scheduler store completed successfully.");
} catch (Throwable t) {
LOG.warn("Index corrupted. Recovering the index through journal replay. Cause: {}", t.toString());
LOG.debug("Index load failure", t);
// try to recover index
try {
pageFile.unload();
} catch (Exception ignore) {
}
if (isArchiveCorruptedIndex()) {
pageFile.archive();
} else {
pageFile.delete();
}
metaData = new JobSchedulerKahaDBMetaData(this);
pageFile = null;
loadPageFile();
}
startCheckpoint();
recover();
}
LOG.info("{} started.", this);
}
@Override
public void unload() throws IOException {
if (opened.compareAndSet(true, false)) {
for (JobSchedulerImpl js : this.schedulers.values()) {
try {
js.stop();
} catch (Exception e) {
throw new IOException(e);
}
}
this.indexLock.writeLock().lock();
try {
if (pageFile != null && pageFile.isLoaded()) {
metaData.setState(KahaDBMetaData.CLOSED_STATE);
if (metaData.getPage() != null) {
pageFile.tx().execute(new Transaction.Closure<IOException>() {
@Override
public void execute(Transaction tx) throws IOException {
tx.store(metaData.getPage(), metaDataMarshaller, true);
}
});
}
}
} finally {
this.indexLock.writeLock().unlock();
}
checkpointLock.writeLock().lock();
try {
if (metaData.getPage() != null) {
checkpointUpdate(getCleanupOnStop());
}
} finally {
checkpointLock.writeLock().unlock();
}
synchronized (checkpointThreadLock) {
if (checkpointThread != null) {
try {
checkpointThread.join();
checkpointThread = null;
} catch (InterruptedException e) {
}
}
}
if (pageFile != null) {
pageFile.unload();
pageFile = null;
}
if (this.journal != null) {
journal.close();
journal = null;
}
metaData = new JobSchedulerKahaDBMetaData(this);
}
LOG.info("{} stopped.", this);
}
private void loadPageFile() throws IOException {
this.indexLock.writeLock().lock();
try {
final PageFile pageFile = getPageFile();
pageFile.load();
pageFile.tx().execute(new Transaction.Closure<IOException>() {
@Override
public void execute(Transaction tx) throws IOException {
if (pageFile.getPageCount() == 0) {
Page<JobSchedulerKahaDBMetaData> page = tx.allocate();
assert page.getPageId() == 0;
page.set(metaData);
metaData.setPage(page);
metaData.setState(KahaDBMetaData.CLOSED_STATE);
metaData.initialize(tx);
tx.store(metaData.getPage(), metaDataMarshaller, true);
} else {
Page<JobSchedulerKahaDBMetaData> page = null;
page = tx.load(0, metaDataMarshaller);
metaData = page.get();
metaData.setPage(page);
}
metaData.load(tx);
metaData.loadScheduler(tx, schedulers);
for (JobSchedulerImpl js : schedulers.values()) {
try {
js.start();
} catch (Exception e) {
JobSchedulerStoreImpl.LOG.error("Failed to load " + js.getName(), e);
}
}
}
});
pageFile.flush();
} finally {
this.indexLock.writeLock().unlock();
}
}
private void upgradeFromLegacy() throws IOException {
journal.close();
journal = null;
try {
pageFile.unload();
pageFile = null;
} catch (Exception ignore) {}
File storeDir = getDirectory().getAbsoluteFile();
File storeArchiveDir = getLegacyStoreArchiveDirectory();
LOG.info("Attempting to move old store files from {} to {}", storeDir, storeArchiveDir);
// Move only the known store files, locks and other items left in place.
IOHelper.moveFiles(storeDir, storeArchiveDir, new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
if (name.endsWith(".data") || name.endsWith(".redo") || name.endsWith(".log")) {
return true;
}
return false;
}
});
// We reset everything to clean state, then we can read from the old
// scheduler store and replay the scheduled jobs into this one as adds.
getJournal().start();
metaData = new JobSchedulerKahaDBMetaData(this);
pageFile = null;
loadPageFile();
LegacyStoreReplayer replayer = new LegacyStoreReplayer(getLegacyStoreArchiveDirectory());
replayer.load();
replayer.startReplay(this);
// Cleanup after replay and store what we've done.
pageFile.tx().execute(new Transaction.Closure<IOException>() {
@Override
public void execute(Transaction tx) throws IOException {
tx.store(metaData.getPage(), metaDataMarshaller, true);
}
});
checkpointUpdate(true);
getJournal().close();
getPageFile().unload();
}
@Override
protected void checkpointUpdate(Transaction tx, boolean cleanup) throws IOException {
LOG.debug("Job Scheduler Store Checkpoint started.");
// reflect last update exclusive of current checkpoint
Location lastUpdate = metaData.getLastUpdateLocation();
metaData.setState(KahaDBMetaData.OPEN_STATE);
tx.store(metaData.getPage(), metaDataMarshaller, true);
pageFile.flush();
if (cleanup) {
final TreeSet<Integer> completeFileSet = new TreeSet<Integer>(journal.getFileMap().keySet());
final TreeSet<Integer> gcCandidateSet = new TreeSet<Integer>(completeFileSet);
LOG.trace("Last update: {}, full gc candidates set: {}", lastUpdate, gcCandidateSet);
if (lastUpdate != null) {
gcCandidateSet.remove(lastUpdate.getDataFileId());
}
this.metaData.getJournalRC().visit(tx, new BTreeVisitor<Integer, Integer>() {
@Override
public void visit(List<Integer> keys, List<Integer> values) {
for (Integer key : keys) {
if (gcCandidateSet.remove(key)) {
LOG.trace("Removed referenced file: {} from GC set", key);
}
}
}
@Override
public boolean isInterestedInKeysBetween(Integer first, Integer second) {
return true;
}
});
LOG.trace("gc candidates after reference check: {}", gcCandidateSet);
// If there are GC candidates then check the remove command location to see
// if any of them can go or if they must stay in order to ensure proper recover.
//
// A log containing any remove commands must be kept until all the logs with the
// add commands for all the removed jobs have been dropped.
if (!gcCandidateSet.isEmpty()) {
Iterator<Entry<Integer, List<Integer>>> removals = metaData.getRemoveLocationTracker().iterator(tx);
List<Integer> orphans = new ArrayList<Integer>();
while (removals.hasNext()) {
boolean orphanedRemove = true;
Entry<Integer, List<Integer>> entry = removals.next();
// If this log is not a GC candidate then there's no need to do a check to rule it out
if (gcCandidateSet.contains(entry.getKey())) {
for (Integer addLocation : entry.getValue()) {
if (completeFileSet.contains(addLocation)) {
LOG.trace("A remove in log {} has an add still in existance in {}.", entry.getKey(), addLocation);
orphanedRemove = false;
break;
}
}
// If it's not orphaned than we can't remove it, otherwise we
// stop tracking it it's log will get deleted on the next check.
if (!orphanedRemove) {
gcCandidateSet.remove(entry.getKey());
} else {
LOG.trace("All removes in log {} are orphaned, file can be GC'd", entry.getKey());
orphans.add(entry.getKey());
}
}
}
// Drop all orphaned removes from the tracker.
for (Integer orphan : orphans) {
metaData.getRemoveLocationTracker().remove(tx, orphan);
}
}
LOG.trace("gc candidates after removals check: {}", gcCandidateSet);
if (!gcCandidateSet.isEmpty()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Cleanup removing the data files: " + gcCandidateSet);
}
journal.removeDataFiles(gcCandidateSet);
}
}
LOG.debug("Job Scheduler Store Checkpoint complete.");
}
/**
* Adds a reference for the journal log file pointed to by the given Location value.
*
* To prevent log files in the journal that still contain valid data that needs to be
* kept in order to allow for recovery the logs must have active references. Each Job
* scheduler should ensure that the logs are accurately referenced.
*
* @param tx
* The TX under which the update is to be performed.
* @param location
* The location value to update the reference count of.
*
* @throws IOException if an error occurs while updating the journal references table.
*/
protected void incrementJournalCount(Transaction tx, Location location) throws IOException {
int logId = location.getDataFileId();
Integer val = metaData.getJournalRC().get(tx, logId);
int refCount = val != null ? val.intValue() + 1 : 1;
metaData.getJournalRC().put(tx, logId, refCount);
}
/**
* Removes one reference for the Journal log file indicated in the given Location value.
*
* The references are used to track which log files cannot be GC'd. When the reference count
* on a log file reaches zero the file id is removed from the tracker and the log will be
* removed on the next check point update.
*
* @param tx
* The TX under which the update is to be performed.
* @param location
* The location value to update the reference count of.
*
* @throws IOException if an error occurs while updating the journal references table.
*/
protected void decrementJournalCount(Transaction tx, Location location) throws IOException {
int logId = location.getDataFileId();
Integer refCount = metaData.getJournalRC().get(tx, logId);
if (refCount != null) {
int refCountValue = refCount;
refCountValue--;
if (refCountValue <= 0) {
metaData.getJournalRC().remove(tx, logId);
} else {
metaData.getJournalRC().put(tx, logId, refCountValue);
}
}
}
/**
* Updates the Job removal tracking index with the location of a remove command and the
* original JobLocation entry.
*
* The JobLocation holds the locations in the logs where the add and update commands for
* a job stored. The log file containing the remove command can only be discarded after
* both the add and latest update log files have also been discarded.
*
* @param tx
* The TX under which the update is to be performed.
* @param location
* The location value to reference a remove command.
* @param removedJob
* The original JobLocation instance that holds the add and update locations
*
* @throws IOException if an error occurs while updating the remove location tracker.
*/
protected void referenceRemovedLocation(Transaction tx, Location location, JobLocation removedJob) throws IOException {
int logId = location.getDataFileId();
List<Integer> removed = this.metaData.getRemoveLocationTracker().get(tx, logId);
if (removed == null) {
removed = new ArrayList<Integer>();
}
removed.add(removedJob.getLocation().getDataFileId());
this.metaData.getRemoveLocationTracker().put(tx, logId, removed);
}
/**
* Retrieve the scheduled Job's byte blob from the journal.
*
* @param location
* The location of the KahaAddScheduledJobCommand that originated the Job.
*
* @return a ByteSequence containing the payload of the scheduled Job.
*
* @throws IOException if an error occurs while reading the payload value.
*/
protected ByteSequence getPayload(Location location) throws IOException {
KahaAddScheduledJobCommand job = (KahaAddScheduledJobCommand) this.load(location);
Buffer payload = job.getPayload();
return new ByteSequence(payload.getData(), payload.getOffset(), payload.getLength());
}
public void readLockIndex() {
this.indexLock.readLock().lock();
}
public void readUnlockIndex() {
this.indexLock.readLock().unlock();
}
public void writeLockIndex() {
this.indexLock.writeLock().lock();
}
public void writeUnlockIndex() {
this.indexLock.writeLock().unlock();
}
@Override
public String toString() {
return "JobSchedulerStore: " + getDirectory();
}
@Override
protected String getPageFileName() {
return "scheduleDB";
}
@Override
protected File getDefaultDataDirectory() {
return new File(IOHelper.getDefaultDataDirectory(), "delayedDB");
}
private class MetaDataMarshaller extends VariableMarshaller<JobSchedulerKahaDBMetaData> {
private final JobSchedulerStoreImpl store;
MetaDataMarshaller(JobSchedulerStoreImpl store) {
this.store = store;
}
@Override
public JobSchedulerKahaDBMetaData readPayload(DataInput dataIn) throws IOException {
JobSchedulerKahaDBMetaData rc = new JobSchedulerKahaDBMetaData(store);
rc.read(dataIn);
return rc;
}
@Override
public void writePayload(JobSchedulerKahaDBMetaData object, DataOutput dataOut) throws IOException {
object.write(dataOut);
}
}
/**
* Called during index recovery to rebuild the index from the last known good location. For
* entries that occur before the last known good position we just ignore then and move on.
*
* @param command
* the command read from the Journal which should be used to update the index.
* @param location
* the location in the index where the command was read.
* @param inDoubtlocation
* the location in the index known to be the last time the index was valid.
*
* @throws IOException if an error occurs while recovering the index.
*/
protected void doRecover(JournalCommand<?> data, final Location location, final Location inDoubtlocation) throws IOException {
if (inDoubtlocation != null && location.compareTo(inDoubtlocation) >= 0) {
process(data, location);
}
}
/**
* Called during recovery to allow the store to rebuild from scratch.
*
* @param data
* The command to process, which was read from the Journal.
* @param location
* The location of the command in the Journal.
*
* @throws IOException if an error occurs during command processing.
*/
@Override
protected void process(JournalCommand<?> data, final Location location) throws IOException {
data.visit(new Visitor() {
@Override
public void visit(final KahaAddScheduledJobCommand command) throws IOException {
final JobSchedulerImpl scheduler;
indexLock.writeLock().lock();
try {
try {
scheduler = (JobSchedulerImpl) getJobScheduler(command.getScheduler());
} catch (Exception e) {
throw new IOException(e);
}
getPageFile().tx().execute(new Transaction.Closure<IOException>() {
@Override
public void execute(Transaction tx) throws IOException {
scheduler.process(tx, command, location);
}
});
processLocation(location);
} finally {
indexLock.writeLock().unlock();
}
}
@Override
public void visit(final KahaRemoveScheduledJobCommand command) throws IOException {
final JobSchedulerImpl scheduler;
indexLock.writeLock().lock();
try {
try {
scheduler = (JobSchedulerImpl) getJobScheduler(command.getScheduler());
} catch (Exception e) {
throw new IOException(e);
}
getPageFile().tx().execute(new Transaction.Closure<IOException>() {
@Override
public void execute(Transaction tx) throws IOException {
scheduler.process(tx, command, location);
}
});
processLocation(location);
} finally {
indexLock.writeLock().unlock();
}
}
@Override
public void visit(final KahaRemoveScheduledJobsCommand command) throws IOException {
final JobSchedulerImpl scheduler;
indexLock.writeLock().lock();
try {
try {
scheduler = (JobSchedulerImpl) getJobScheduler(command.getScheduler());
} catch (Exception e) {
throw new IOException(e);
}
getPageFile().tx().execute(new Transaction.Closure<IOException>() {
@Override
public void execute(Transaction tx) throws IOException {
scheduler.process(tx, command, location);
}
});
processLocation(location);
} finally {
indexLock.writeLock().unlock();
}
}
@Override
public void visit(final KahaRescheduleJobCommand command) throws IOException {
final JobSchedulerImpl scheduler;
indexLock.writeLock().lock();
try {
try {
scheduler = (JobSchedulerImpl) getJobScheduler(command.getScheduler());
} catch (Exception e) {
throw new IOException(e);
}
getPageFile().tx().execute(new Transaction.Closure<IOException>() {
@Override
public void execute(Transaction tx) throws IOException {
scheduler.process(tx, command, location);
}
});
processLocation(location);
} finally {
indexLock.writeLock().unlock();
}
}
@Override
public void visit(final KahaDestroySchedulerCommand command) {
try {
removeJobScheduler(command.getScheduler());
} catch (Exception e) {
LOG.warn("Failed to remove scheduler: {}", command.getScheduler());
}
processLocation(location);
}
@Override
public void visit(KahaTraceCommand command) {
processLocation(location);
}
});
}
protected void processLocation(final Location location) {
indexLock.writeLock().lock();
try {
this.metaData.setLastUpdateLocation(location);
} finally {
indexLock.writeLock().unlock();
}
}
/**
* We recover from the Journal logs as needed to restore the index.
*
* @throws IllegalStateException
* @throws IOException
*/
private void recover() throws IllegalStateException, IOException {
this.indexLock.writeLock().lock();
try {
long start = System.currentTimeMillis();
Location lastIndoubtPosition = getRecoveryPosition();
Location recoveryPosition = lastIndoubtPosition;
if (recoveryPosition != null) {
int redoCounter = 0;
LOG.info("Recovering from the scheduled job journal @" + recoveryPosition);
while (recoveryPosition != null) {
try {
JournalCommand<?> message = load(recoveryPosition);
metaData.setLastUpdateLocation(recoveryPosition);
doRecover(message, recoveryPosition, lastIndoubtPosition);
redoCounter++;
} catch (IOException failedRecovery) {
if (isIgnoreMissingJournalfiles()) {
LOG.debug("Failed to recover data at position:" + recoveryPosition, failedRecovery);
// track this dud location
journal.corruptRecoveryLocation(recoveryPosition);
} else {
throw new IOException("Failed to recover data at position:" + recoveryPosition, failedRecovery);
}
}
recoveryPosition = journal.getNextLocation(recoveryPosition);
if (LOG.isInfoEnabled() && redoCounter % 100000 == 0) {
LOG.info("@ {}, {} entries recovered ..", recoveryPosition, redoCounter);
}
}
long end = System.currentTimeMillis();
LOG.info("Recovery replayed {} operations from the journal in {} seconds.",
redoCounter, ((end - start) / 1000.0f));
}
// We may have to undo some index updates.
pageFile.tx().execute(new Transaction.Closure<IOException>() {
@Override
public void execute(Transaction tx) throws IOException {
recoverIndex(tx);
}
});
} finally {
this.indexLock.writeLock().unlock();
}
}
private Location getRecoveryPosition() throws IOException {
// This loads the first position and we completely rebuild the index if we
// do not override it with some known recovery start location.
Location result = null;
if (!isForceRecoverIndex()) {
if (metaData.getLastUpdateLocation() != null) {
result = metaData.getLastUpdateLocation();
}
}
return journal.getNextLocation(result);
}
private void recoverIndex(Transaction tx) throws IOException {
long start = System.currentTimeMillis();
// It is possible index updates got applied before the journal updates..
// in that case we need to removed references to Jobs that are not in the journal
final Location lastAppendLocation = journal.getLastAppendLocation();
long undoCounter = 0;
// Go through all the jobs in each scheduler and check if any are added after
// the last appended location and remove those. For now we ignore the update
// location since the scheduled job will update itself after the next fire and
// a new update will replace any existing update.
for (Iterator<Map.Entry<String, JobSchedulerImpl>> i = metaData.getJobSchedulers().iterator(tx); i.hasNext();) {
Map.Entry<String, JobSchedulerImpl> entry = i.next();
JobSchedulerImpl scheduler = entry.getValue();
List<JobLocation> jobs = scheduler.getAllScheduledJobs(tx);
for (JobLocation job : jobs) {
if (job.getLocation().compareTo(lastAppendLocation) >= 0) {
if (scheduler.removeJobAtTime(tx, job.getJobId(), job.getNextTime())) {
LOG.trace("Removed Job past last appened in the journal: {}", job.getJobId());
undoCounter++;
}
}
}
}
if (undoCounter > 0) {
// The rolled back operations are basically in flight journal writes. To avoid getting
// these the end user should do sync writes to the journal.
long end = System.currentTimeMillis();
LOG.info("Rolled back {} messages from the index in {} seconds.", undoCounter, ((end - start) / 1000.0f));
undoCounter = 0;
}
// Now we check for missing and corrupt journal files.
// 1. Collect the set of all referenced journal files based on the Location of the
// the scheduled jobs and the marked last update field.
HashSet<Integer> missingJournalFiles = new HashSet<Integer>();
for (Iterator<Map.Entry<String, JobSchedulerImpl>> i = metaData.getJobSchedulers().iterator(tx); i.hasNext();) {
Map.Entry<String, JobSchedulerImpl> entry = i.next();
JobSchedulerImpl scheduler = entry.getValue();
List<JobLocation> jobs = scheduler.getAllScheduledJobs(tx);
for (JobLocation job : jobs) {
missingJournalFiles.add(job.getLocation().getDataFileId());
if (job.getLastUpdate() != null) {
missingJournalFiles.add(job.getLastUpdate().getDataFileId());
}
}
}
// 2. Remove from that set all known data file Id's in the journal and what's left
// is the missing set which will soon also contain the corrupted set.
missingJournalFiles.removeAll(journal.getFileMap().keySet());
if (!missingJournalFiles.isEmpty()) {
LOG.info("Some journal files are missing: {}", missingJournalFiles);
}
// 3. Now check all references in the journal logs for corruption and add any
// corrupt journal files to the missing set.
HashSet<Location> corruptedLocations = new HashSet<Location>();
if (isCheckForCorruptJournalFiles()) {
Collection<DataFile> dataFiles = journal.getFileMap().values();
for (DataFile dataFile : dataFiles) {
int id = dataFile.getDataFileId();
for (long offset : dataFile.getCorruptedBlocks()) {
corruptedLocations.add(new Location(id, (int) offset));
}
}
if (!corruptedLocations.isEmpty()) {
LOG.debug("Found some corrupted data blocks in the journal: {}", corruptedLocations.size());
}
}
// 4. Now we either fail or we remove all references to missing or corrupt journal
// files from the various JobSchedulerImpl instances. We only remove the Job if
// the initial Add operation is missing when the ignore option is set, the updates
// could be lost but that's price you pay when ignoring the missing logs.
if (!missingJournalFiles.isEmpty() || !corruptedLocations.isEmpty()) {
if (!isIgnoreMissingJournalfiles()) {
throw new IOException("Detected missing/corrupt journal files.");
}
// Remove all Jobs that reference an Location that is either missing or corrupt.
undoCounter = removeJobsInMissingOrCorruptJounralFiles(tx, missingJournalFiles, corruptedLocations);
// Clean up the Journal Reference count Map.
removeJournalRCForMissingFiles(tx, missingJournalFiles);
}
if (undoCounter > 0) {
long end = System.currentTimeMillis();
LOG.info("Detected missing/corrupt journal files. Dropped {} jobs from the " +
"index in {} seconds.", undoCounter, ((end - start) / 1000.0f));
}
}
private void removeJournalRCForMissingFiles(Transaction tx, Set<Integer> missing) throws IOException {
List<Integer> matches = new ArrayList<Integer>();
Iterator<Entry<Integer, Integer>> references = metaData.getJournalRC().iterator(tx);
while (references.hasNext()) {
int dataFileId = references.next().getKey();
if (missing.contains(dataFileId)) {
matches.add(dataFileId);
}
}
for (Integer match : matches) {
metaData.getJournalRC().remove(tx, match);
}
}
private int removeJobsInMissingOrCorruptJounralFiles(Transaction tx, Set<Integer> missing, Set<Location> corrupted) throws IOException {
int removed = 0;
// Remove Jobs that reference missing or corrupt files.
// Remove Reference counts to missing or corrupt files.
// Remove and remove command markers to missing or corrupt files.
for (Iterator<Map.Entry<String, JobSchedulerImpl>> i = metaData.getJobSchedulers().iterator(tx); i.hasNext();) {
Map.Entry<String, JobSchedulerImpl> entry = i.next();
JobSchedulerImpl scheduler = entry.getValue();
List<JobLocation> jobs = scheduler.getAllScheduledJobs(tx);
for (JobLocation job : jobs) {
// Remove all jobs in missing log files.
if (missing.contains(job.getLocation().getDataFileId())) {
scheduler.removeJobAtTime(tx, job.getJobId(), job.getNextTime());
removed++;
continue;
}
// Remove all jobs in corrupted parts of log files.
if (corrupted.contains(job.getLocation())) {
scheduler.removeJobAtTime(tx, job.getJobId(), job.getNextTime());
removed++;
}
}
}
return removed;
}
}
|
Java
|
Apache-2.0
|
MrTutao/activemq/activemq-kahadb-store/src/main/java/org/apache/activemq/store/kahadb/scheduler/JobSchedulerStoreImpl.java
|
c4f5f50b-057c-45f2-977c-be8a00956fed
|
[{"tag": "API_KEY", "value": "57ed642b-1ee3-47b3-be6d-b7297d500409", "start": 3842, "end": 3878, "context": "nal UUID SCHEDULER_STORE_TOKEN = UUID.fromString(\"57ed642b-1ee3-47b3-be6d-b7297d500409\");\n\n /**\n * The default scheduler store ve"}]
|
[{"tag": "KEY", "value": "57ed642b-1ee3-47b3-be6d-b7297d500409", "start": 3842, "end": 3878, "context": "nal UUID SCHEDULER_STORE_TOKEN = UUID.fromString(\"57ed642b-1ee3-47b3-be6d-b7297d500409\");\n\n /**\n * The default scheduler store ve"}]
|
#
# MythBox for XBMC - http://mythbox.googlecode.com
# Copyright (C) 2011 analogue@yahoo.com
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
import logging
import xbmc
from mythbox.bus import Event
from mythbox.util import run_async
from mythbox.mythtv.conn import EventConnection, inject_conn
log = logging.getLogger('mythbox.core')
class MythEventPublisher(object):
# Before recording starts:
#
# [u'BACKEND_MESSAGE', u'SYSTEM_EVENT REC_PENDING SECS 120 CARDID 7 CHANID 4282 STARTTIME 2011-05-27T20:00:00 SENDER athena', u'empty']
#
# Delete recording
#
# [u'BACKEND_MESSAGE', u'RECORDING_LIST_CHANGE DELETE 1071 2011-05-27T15:30:00', u'empty']
#
# Create/edit/delete schedule
#
# [u'BACKEND_MESSAGE', u'SCHEDULE_CHANGE', u'empty']
#
def __init__(self, *args, **kwargs):
[setattr(self, k, v) for k,v in kwargs.items() if k in ['bus', 'settings','translator','platform']]
self.closed = False
@inject_conn
def supportsSystemEvents(self):
return self.conn().platform.supportsSystemEvents()
@run_async
def startup(self):
log.debug('Starting MythEventPublisher..')
self.eventConn = EventConnection(settings=self.settings, translator=self.translator, platform=self.platform, bus=self.bus)
while not self.closed and not xbmc.abortRequested:
try:
tokens = self.eventConn.readEvent()
if len(tokens) >= 2 and not tokens[1].startswith(u'UPDATE_FILE_SIZE'):
log.debug('EVENT: %s' % tokens)
if len(tokens)>=3 and tokens[0] == 'BACKEND_MESSAGE':
if tokens[1].startswith('SYSTEM_EVENT') and 'SCHEDULER_RAN' in tokens[1]:
self.bus.publish({'id':Event.SCHEDULER_RAN})
elif tokens[1].startswith('COMMFLAG_START'):
self.bus.publish({'id':Event.COMMFLAG_START})
elif tokens[1].startswith('SCHEDULE_CHANGE'):
self.bus.publish({'id':Event.SCHEDULE_CHANGED})
except Exception, e:
log.exception(e)
log.debug('Exiting MythEventPublisher')
def shutdown(self):
self.closed = True
try:
self.eventConn.close()
except:
log.exception('On shutting down MythEventPublisher')
|
Python
|
Apache-2.0
|
C6SUMMER/allinclusive-kodi-pi/.kodi/addons/script.mythbox/resources/src/mythbox/mythtv/publish.py
|
303f00fc-63db-435e-be9b-5fb23b4879ac
|
[{"tag": "EMAIL", "value": "analogue@yahoo.com", "start": 76, "end": 94, "context": "tp://mythbox.googlecode.com\n# Copyright (C) 2011 analogue@yahoo.com\n# \n# This program is free software; you can redi"}]
|
[{"tag": "EMAIL", "value": "analogue@yahoo.com", "start": 76, "end": 94, "context": "tp://mythbox.googlecode.com\n# Copyright (C) 2011 analogue@yahoo.com\n# \n# This program is free software; you can redi"}]
|
/*
*/
package behavioral.nullobject;
/**
*
* @author Keeyana Jones <keeyanajones@yahoo.com>
*/
public class NullPattern {
public static void main(String[] args) {
AbstractCustomer customer1 = CustomerFactory.getCustomer("Rob");
AbstractCustomer customer2 = CustomerFactory.getCustomer("Bob");
AbstractCustomer customer3 = CustomerFactory.getCustomer("Julie");
AbstractCustomer customer4 = CustomerFactory.getCustomer("Laura");
System.out.println("Customers");
System.out.println(customer1.getName());
System.out.println(customer2.getName());
System.out.println(customer3.getName());
System.out.println(customer4.getName());
}
}
|
Java
|
MIT
|
keeyanajones/Java-Design-Patterns/src/behavioral/nullobject/NullPattern.java
|
2c3a5df1-7b6d-4df3-8274-cfa4e58307a1
|
[{"tag": "EMAIL", "value": "keeyanajones@yahoo.com", "start": 77, "end": 99, "context": "ral.nullobject;\n\n/**\n *\n * @author Keeyana Jones <keeyanajones@yahoo.com>\n */\npublic class NullPattern {\n public static "}, {"tag": "NAME", "value": "Julie", "start": 383, "end": 388, "context": "Customer customer3 = CustomerFactory.getCustomer(\"Julie\");\n AbstractCustomer customer4 = CustomerFac"}, {"tag": "NAME", "value": "Rob", "start": 241, "end": 244, "context": "Customer customer1 = CustomerFactory.getCustomer(\"Rob\");\n AbstractCustomer customer2 = CustomerFac"}, {"tag": "NAME", "value": "Bob", "start": 312, "end": 315, "context": "Customer customer2 = CustomerFactory.getCustomer(\"Bob\");\n AbstractCustomer customer3 = CustomerFac"}, {"tag": "NAME", "value": "Keeyana Jones", "start": 62, "end": 75, "context": "package behavioral.nullobject;\n\n/**\n *\n * @author Keeyana Jones <keeyanajones@yahoo.com>\n */\npublic class NullPat"}, {"tag": "NAME", "value": "Laura", "start": 456, "end": 461, "context": "Customer customer4 = CustomerFactory.getCustomer(\"Laura\");\n\n System.out.println(\"Customers\");\n "}]
|
[{"tag": "EMAIL", "value": "keeyanajones@yahoo.com", "start": 77, "end": 99, "context": "ral.nullobject;\n\n/**\n *\n * @author Keeyana Jones <keeyanajones@yahoo.com>\n */\npublic class NullPattern {\n public static "}, {"tag": "NAME", "value": "Julie", "start": 383, "end": 388, "context": "Customer customer3 = CustomerFactory.getCustomer(\"Julie\");\n AbstractCustomer customer4 = CustomerFac"}, {"tag": "NAME", "value": "Rob", "start": 241, "end": 244, "context": "Customer customer1 = CustomerFactory.getCustomer(\"Rob\");\n AbstractCustomer customer2 = CustomerFac"}, {"tag": "NAME", "value": "Bob", "start": 312, "end": 315, "context": "Customer customer2 = CustomerFactory.getCustomer(\"Bob\");\n AbstractCustomer customer3 = CustomerFac"}, {"tag": "NAME", "value": "Keeyana Jones", "start": 62, "end": 75, "context": "package behavioral.nullobject;\n\n/**\n *\n * @author Keeyana Jones <keeyanajones@yahoo.com>\n */\npublic class NullPat"}, {"tag": "NAME", "value": "Laura", "start": 456, "end": 461, "context": "Customer customer4 = CustomerFactory.getCustomer(\"Laura\");\n\n System.out.println(\"Customers\");\n "}]
|
//========================================================================
//Copyright 2007-2010 David Yu dyuproject@gmail.com
//------------------------------------------------------------------------
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//http://www.apache.org/licenses/LICENSE-2.0
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//========================================================================
package io.protostuff.mojo;
import java.io.File;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.project.MavenProject;
import io.protostuff.compiler.CachingProtoLoader;
import io.protostuff.compiler.CompilerMain;
/**
* Compiles proto files to java/gwt/etc.
*
* @author David Yu
*
* @goal compile
* @configurator include-project-dependencies
* @requiresDependencyResolution compile+runtime
*/
public class ProtoCompilerMojo extends AbstractMojo
{
/**
* The current Maven project.
*
* @parameter default-value="${project}"
* @readonly
* @required
* @since 1.0.1
*/
protected MavenProject project;
/**
* When {@code true}, skip the execution.
*
* @parameter expression="${protostuff.compiler.skip}" default-value="false"
* @since 1.0.1
*/
private boolean skip;
/**
* When {@code true}, the protos are cached for re-use. This matters when a certain proto is also used/imported by
* other modules.
*
* @parameter expression="${protostuff.compiler.cache_protos}" default-value="false"
* @since 1.0.5
*/
private boolean cacheProtos;
/**
* Usually most of protostuff mojos will not get executed on parent poms (i.e. projects with packaging type 'pom').
* Setting this parameter to {@code true} will force the execution of this mojo, even if it would usually get
* skipped in this case.
*
* @parameter expression="${protostuff.compiler.force}" default-value="false"
* @required
* @since 1.0.1
*/
private boolean forceMojoExecution;
/**
* The properties file that contains the modules
*
* @parameter
*/
protected File modulesFile;
/**
* If not specified, the directory where the file is located will be used as its base dir.
* <p>
* This is only relevent when {@link #modulesFile is provided}.
*
* @parameter
* @since 1.0.8
*/
protected File sourceBaseDir;
/**
* If not specified, the directory where the file is located will be used as its base dir.
* <p>
* This is only relevent when {@link #modulesFile is provided}.
*
* @parameter
* @since 1.0.8
*/
protected File outputBaseDir;
/**
* The modules to generate code from
*
* @parameter
*/
protected ProtoModule[] protoModules;
/**
* @parameter expression="${project.basedir}"
* @required
*/
protected File baseDir;
@Override
public void execute() throws MojoExecutionException, MojoFailureException
{
if (skipMojo())
{
return;
}
assert baseDir != null && baseDir.exists() && baseDir.isDirectory();
CachingProtoLoader loader = cacheProtos ? new CachingProtoLoader() : null;
if (modulesFile == null)
{
if (protoModules == null)
{
throw new MojoExecutionException("Either <modules> or <modulesFile> " +
"should be provided.");
}
try
{
for (ProtoModule m : protoModules)
{
m.setCachingProtoLoader(loader);
if (!CompilerMain.isAvailableOutput(m.getOutput()) &&
!baseDir.getAbsoluteFile().equals(
new File(".").getAbsoluteFile()))
{
// custom stg output executed on a child pom
try
{
File relativePath = new File(baseDir, m.getOutput());
if (relativePath.exists())
{
// update the path module.
m.setOutput(relativePath.getCanonicalPath());
}
}
catch (Exception e)
{
// ignore ... <output> might be an absolute path
}
}
CompilerMain.compile(m);
// enabled by default unless overridden
if (m.isAddToCompileSourceRoot())
{
// Include generated directory to the list of compilation sources
project.addCompileSourceRoot(m.getOutputDir().getAbsolutePath());
}
}
}
catch (Exception e)
{
throw new MojoExecutionException(e.getMessage(), e);
}
}
else
{
try
{
if (protoModules != null)
{
for (ProtoModule m : protoModules)
{
m.setCachingProtoLoader(loader);
CompilerMain.compile(m);
// enabled by default unless overridden
if (m.isAddToCompileSourceRoot())
{
// Include generated directory to the list of compilation sources
project.addCompileSourceRoot(m.getOutputDir().getAbsolutePath());
}
}
}
if (!modulesFile.exists())
throw new MojoExecutionException(modulesFile + " does not exist.");
File parent = modulesFile.getParentFile();
File sourceBaseDir = this.sourceBaseDir, outputBaseDir = this.outputBaseDir;
if (sourceBaseDir == null)
sourceBaseDir = parent;
if (outputBaseDir == null)
outputBaseDir = parent;
CompilerMain.compile(CompilerMain.loadModules(modulesFile,
sourceBaseDir, outputBaseDir));
}
catch (Exception e)
{
throw new MojoExecutionException(e.getMessage(), e);
}
}
}
/**
* <p>
* Determine if the mojo execution should get skipped.
* </p>
* This is the case if:
* <ul>
* <li>{@link #skip} is <code>true</code></li>
* <li>if the mojo gets executed on a project with packaging type 'pom' and {@link #forceMojoExecution} is
* <code>false</code></li>
* </ul>
*
* @return <code>true</code> if the mojo execution should be skipped.
* @since 1.0.1
*/
protected boolean skipMojo()
{
if (skip)
{
getLog().info("Skipping protostuff mojo execution");
return true;
}
if (!forceMojoExecution && "pom".equals(this.project.getPackaging()))
{
getLog().info("Skipping protostuff mojo execution for project with packaging type 'pom'");
return true;
}
return false;
}
}
|
Java
|
Apache-2.0
|
guilhermejccavalcanti/protostuff/protostuff-maven-plugin/src/main/java/io/protostuff/mojo/ProtoCompilerMojo.java
|
78e23312-3c82-45c8-8ca8-5e658dd59b31
|
[{"tag": "NAME", "value": "David Yu", "start": 98, "end": 106, "context": "==========================\r\n//Copyright 2007-2010 David Yu dyuproject@gmail.com\r\n//-------------------------"}, {"tag": "NAME", "value": "David Yu", "start": 1250, "end": 1258, "context": "les proto files to java/gwt/etc.\r\n * \r\n * @author David Yu\r\n *\r\n * @goal compile\r\n * @configurator include-p"}, {"tag": "EMAIL", "value": "dyuproject@gmail.com", "start": 107, "end": 127, "context": "=================\r\n//Copyright 2007-2010 David Yu dyuproject@gmail.com\r\n//----------------------------------------------"}]
|
[{"tag": "NAME", "value": "David Yu", "start": 98, "end": 106, "context": "==========================\r\n//Copyright 2007-2010 David Yu dyuproject@gmail.com\r\n//-------------------------"}, {"tag": "NAME", "value": "David Yu", "start": 1250, "end": 1258, "context": "les proto files to java/gwt/etc.\r\n * \r\n * @author David Yu\r\n *\r\n * @goal compile\r\n * @configurator include-p"}, {"tag": "EMAIL", "value": "dyuproject@gmail.com", "start": 107, "end": 127, "context": "=================\r\n//Copyright 2007-2010 David Yu dyuproject@gmail.com\r\n//----------------------------------------------"}]
|
package com.bitdubai.fermat_api.layer.all_definition.common.system.annotations;
import com.bitdubai.fermat_api.layer.all_definition.enums.Developers;
import com.bitdubai.fermat_api.layer.all_definition.enums.Layers;
import com.bitdubai.fermat_api.layer.all_definition.enums.Platforms;
import com.bitdubai.fermat_api.layer.all_definition.enums.Plugins;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* The annotation <code>NeededPluginReference</code>
* contains all the data needed to build the plugin version reference to assign it to the plugin.
* <p/>
* Created by Leon Acosta - (laion.cj91@gmail.com) on 28/10/2015.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface NeededPluginReference {
Platforms platform();
Layers layer();
Plugins plugin();
Developers developer() default Developers.BITDUBAI;
String version() default "1.0.0";
}
|
Java
|
MIT
|
jorgeejgonzalez/fermat/fermat-api/src/main/java/com/bitdubai/fermat_api/layer/all_definition/common/system/annotations/NeededPluginReference.java
|
ccd63340-c17b-4bc7-ade9-2858db578934
|
[{"tag": "NAME", "value": "laion.cj91@gmail.com", "start": 708, "end": 728, "context": " the plugin.\n * <p/>\n * Created by Leon Acosta - (laion.cj91@gmail.com) on 28/10/2015.\n */\n@Retention(RetentionPolicy.RU"}, {"tag": "NAME", "value": "Leon Acosta", "start": 693, "end": 704, "context": "to assign it to the plugin.\n * <p/>\n * Created by Leon Acosta - (laion.cj91@gmail.com) on 28/10/2015.\n */\n@Rete"}]
|
[{"tag": "NAME", "value": "laion.cj91@gmail.com", "start": 708, "end": 728, "context": " the plugin.\n * <p/>\n * Created by Leon Acosta - (laion.cj91@gmail.com) on 28/10/2015.\n */\n@Retention(RetentionPolicy.RU"}, {"tag": "NAME", "value": "Leon Acosta", "start": 693, "end": 704, "context": "to assign it to the plugin.\n * <p/>\n * Created by Leon Acosta - (laion.cj91@gmail.com) on 28/10/2015.\n */\n@Rete"}]
|
/*
* steam.c
*
* Copyright (C) 2014 Tomasz Bujlow <tomasz@skatnet.dk>
*
* The signature is mostly based on the Libprotoident library
* except the detection of HTTP Steam flows.
*
* This file is part of nDPI, an open source deep packet inspection
* library based on the OpenDPI and PACE technology by ipoque GmbH
*
* nDPI is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* nDPI is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with nDPI. If not, see <http://www.gnu.org/licenses/>.
*
*/
#include "ndpi_api.h"
#ifdef NDPI_PROTOCOL_STEAM
static void ndpi_int_steam_add_connection(struct ndpi_detection_module_struct *ndpi_struct, struct ndpi_flow_struct *flow) {
ndpi_int_add_connection(ndpi_struct, flow, NDPI_PROTOCOL_STEAM, NDPI_REAL_PROTOCOL);
}
static void ndpi_check_steam_http(struct ndpi_detection_module_struct *ndpi_struct, struct ndpi_flow_struct *flow) {
struct ndpi_packet_struct *packet = &flow->packet;
if (packet->user_agent_line.ptr != NULL
&& packet->user_agent_line.len >= 23
&& memcmp(packet->user_agent_line.ptr, "Valve/Steam HTTP Client", 23) == 0) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Found STEAM.\n");
ndpi_int_steam_add_connection(ndpi_struct, flow);
}
}
static void ndpi_check_steam_tcp(struct ndpi_detection_module_struct *ndpi_struct, struct ndpi_flow_struct *flow) {
struct ndpi_packet_struct *packet = &flow->packet;
u_int32_t payload_len = packet->payload_packet_len;
if (flow->steam_stage == 0) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "STEAM stage 0: \n");
if (((payload_len == 1) || (payload_len == 4) || (payload_len == 5)) && match_first_bytes(packet->payload, "\x01\x00\x00\x00")) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Possible STEAM request detected, we will look further for the response...\n");
/* Encode the direction of the packet in the stage, so we will know when we need to look for the response packet. */
flow->steam_stage = packet->packet_direction + 1; // packet_direction 0: stage 1, packet_direction 1: stage 2
return;
}
if (((payload_len == 1) || (payload_len == 4) || (payload_len == 5)) && (packet->payload[0] == 0x00) && (packet->payload[1] == 0x00) && (packet->payload[2] == 0x00)) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Possible STEAM request detected, we will look further for the response...\n");
/* Encode the direction of the packet in the stage, so we will know when we need to look for the response packet. */
flow->steam_stage = packet->packet_direction + 3; // packet_direction 0: stage 3, packet_direction 1: stage 4
return;
}
} else if ((flow->steam_stage == 1) || (flow->steam_stage == 2)) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "STEAM stage %u: \n", flow->steam_stage);
/* At first check, if this is for sure a response packet (in another direction. If not, do nothing now and return. */
if ((flow->steam_stage - packet->packet_direction) == 1) {
return;
}
/* This is a packet in another direction. Check if we find the proper response. */
if (((payload_len == 1) || (payload_len == 4) || (payload_len == 5)) && (packet->payload[0] == 0x00) && (packet->payload[1] == 0x00) && (packet->payload[2] == 0x00)) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Found STEAM.\n");
ndpi_int_steam_add_connection(ndpi_struct, flow);
} else {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "The reply did not seem to belong to STEAM, resetting the stage to 0...\n");
flow->steam_stage = 0;
}
} else if ((flow->steam_stage == 3) || (flow->steam_stage == 4)) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "STEAM stage %u: \n", flow->steam_stage);
/* At first check, if this is for sure a response packet (in another direction. If not, do nothing now and return. */
if ((flow->steam_stage - packet->packet_direction) == 3) {
return;
}
/* This is a packet in another direction. Check if we find the proper response. */
if (((payload_len == 1) || (payload_len == 4) || (payload_len == 5)) && match_first_bytes(packet->payload, "\x01\x00\x00\x00")) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Found STEAM.\n");
ndpi_int_steam_add_connection(ndpi_struct, flow);
} else {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "The reply did not seem to belong to STEAM, resetting the stage to 0...\n");
flow->steam_stage = 0;
}
}
}
static void ndpi_check_steam_udp1(struct ndpi_detection_module_struct *ndpi_struct, struct ndpi_flow_struct *flow) {
struct ndpi_packet_struct *packet = &flow->packet;
u_int32_t payload_len = packet->payload_packet_len;
if ((payload_len > 0) && match_first_bytes(packet->payload, "VS01")) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Found STEAM.\n");
ndpi_int_steam_add_connection(ndpi_struct, flow);
return;
}
/* Check if we so far detected the protocol in the request or not. */
if (flow->steam_stage1 == 0) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "STEAM stage 0: \n");
if ((payload_len > 0) && match_first_bytes(packet->payload, "\x31\xff\x30\x2e")) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Possible STEAM request detected, we will look further for the response...\n");
/* Encode the direction of the packet in the stage, so we will know when we need to look for the response packet. */
flow->steam_stage1 = packet->packet_direction + 1; // packet_direction 0: stage 1, packet_direction 1: stage 2
return;
}
if ((payload_len > 0) && match_first_bytes(packet->payload, "\xff\xff\xff\xff")) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Possible STEAM request detected, we will look further for the response...\n");
/* Encode the direction of the packet in the stage, so we will know when we need to look for the response packet. */
flow->steam_stage1 = packet->packet_direction + 3; // packet_direction 0: stage 3, packet_direction 1: stage 4
return;
}
} else if ((flow->steam_stage1 == 1) || (flow->steam_stage1 == 2)) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "STEAM stage %u: \n", flow->steam_stage1);
/* At first check, if this is for sure a response packet (in another direction. If not, do nothing now and return. */
if ((flow->steam_stage1 - packet->packet_direction) == 1) {
return;
}
/* This is a packet in another direction. Check if we find the proper response. */
if ((payload_len > 0) && match_first_bytes(packet->payload, "\xff\xff\xff\xff")) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Found STEAM.\n");
ndpi_int_steam_add_connection(ndpi_struct, flow);
} else {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "The reply did not seem to belong to STEAM, resetting the stage to 0...\n");
flow->steam_stage1 = 0;
}
} else if ((flow->steam_stage1 == 3) || (flow->steam_stage1 == 4)) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "STEAM stage %u: \n", flow->steam_stage1);
/* At first check, if this is for sure a response packet (in another direction. If not, do nothing now and return. */
if ((flow->steam_stage1 - packet->packet_direction) == 3) {
return;
}
/* This is a packet in another direction. Check if we find the proper response. */
if ((payload_len > 0) && match_first_bytes(packet->payload, "\x31\xff\x30\x2e")) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Found STEAM.\n");
ndpi_int_steam_add_connection(ndpi_struct, flow);
} else {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "The reply did not seem to belong to STEAM, resetting the stage to 0...\n");
flow->steam_stage1 = 0;
}
}
}
static void ndpi_check_steam_udp2(struct ndpi_detection_module_struct *ndpi_struct, struct ndpi_flow_struct *flow) {
struct ndpi_packet_struct *packet = &flow->packet;
u_int32_t payload_len = packet->payload_packet_len;
/* Check if we so far detected the protocol in the request or not. */
if (flow->steam_stage2 == 0) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "STEAM stage 0: \n");
if ((payload_len == 25) && match_first_bytes(packet->payload, "\xff\xff\xff\xff")) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Possible STEAM request detected, we will look further for the response...\n");
/* Encode the direction of the packet in the stage, so we will know when we need to look for the response packet. */
flow->steam_stage2 = packet->packet_direction + 1; // packet_direction 0: stage 1, packet_direction 1: stage 2
}
} else {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "STEAM stage %u: \n", flow->steam_stage2);
/* At first check, if this is for sure a response packet (in another direction. If not, do nothing now and return. */
if ((flow->steam_stage2 - packet->packet_direction) == 1) {
return;
}
/* This is a packet in another direction. Check if we find the proper response. */
if ((payload_len == 0) || match_first_bytes(packet->payload, "\xff\xff\xff\xff")) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Found STEAM.\n");
ndpi_int_steam_add_connection(ndpi_struct, flow);
} else {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "The reply did not seem to belong to STEAM, resetting the stage to 0...\n");
flow->steam_stage2 = 0;
}
}
}
static void ndpi_check_steam_udp3(struct ndpi_detection_module_struct *ndpi_struct, struct ndpi_flow_struct *flow) {
struct ndpi_packet_struct *packet = &flow->packet;
u_int32_t payload_len = packet->payload_packet_len;
/* Check if we so far detected the protocol in the request or not. */
if (flow->steam_stage3 == 0) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "STEAM stage 0: \n");
if ((payload_len == 4) && (packet->payload[0] == 0x39) && (packet->payload[1] == 0x18) && (packet->payload[2] == 0x00) && (packet->payload[3] == 0x00)) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Possible STEAM request detected, we will look further for the response...\n");
/* Encode the direction of the packet in the stage, so we will know when we need to look for the response packet. */
flow->steam_stage3 = packet->packet_direction + 1; // packet_direction 0: stage 1, packet_direction 1: stage 2
}
} else {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "STEAM stage %u: \n", flow->steam_stage3);
/* At first check, if this is for sure a response packet (in another direction. If not, do nothing now and return. */
if ((flow->steam_stage3 - packet->packet_direction) == 1) {
return;
}
/* This is a packet in another direction. Check if we find the proper response. */
if ((payload_len == 0) || ((payload_len == 8) && (packet->payload[0] == 0x3a) && (packet->payload[1] == 0x18) && (packet->payload[2] == 0x00) && (packet->payload[3] == 0x00))) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Found STEAM.\n");
ndpi_int_steam_add_connection(ndpi_struct, flow);
} else {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "The reply did not seem to belong to STEAM, resetting the stage to 0...\n");
flow->steam_stage3 = 0;
}
}
}
void ndpi_search_steam(struct ndpi_detection_module_struct *ndpi_struct, struct ndpi_flow_struct *flow) {
struct ndpi_packet_struct *packet = &flow->packet;
/* Break after 20 packets. */
if (flow->packet_counter > 20) {
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "Exclude STEAM.\n");
NDPI_ADD_PROTOCOL_TO_BITMASK(flow->excluded_protocol_bitmask, NDPI_PROTOCOL_STEAM);
return;
}
/* skip marked or retransmitted packets */
if (packet->tcp_retransmission != 0) {
return;
}
if (packet->detected_protocol_stack[0] == NDPI_PROTOCOL_STEAM) {
return;
}
NDPI_LOG(NDPI_PROTOCOL_STEAM, ndpi_struct, NDPI_LOG_DEBUG, "STEAM detection...\n");
ndpi_check_steam_http(ndpi_struct, flow);
if (packet->detected_protocol_stack[0] == NDPI_PROTOCOL_STEAM) {
return;
}
ndpi_check_steam_tcp(ndpi_struct, flow);
if (packet->detected_protocol_stack[0] == NDPI_PROTOCOL_STEAM) {
return;
}
ndpi_check_steam_udp1(ndpi_struct, flow);
if (packet->detected_protocol_stack[0] == NDPI_PROTOCOL_STEAM) {
return;
}
ndpi_check_steam_udp2(ndpi_struct, flow);
if (packet->detected_protocol_stack[0] == NDPI_PROTOCOL_STEAM) {
return;
}
ndpi_check_steam_udp3(ndpi_struct, flow);
}
#endif
|
C
|
Apache-2.0
|
CN-UPB/Cloud-NFV-Orchestration/son-examples/vnfs/sonata-vtc-vnf-docker/nDPI/src/lib/protocols/steam.c
|
e39fbe95-2d22-49bf-89ab-1db4a92941b9
|
[{"tag": "NAME", "value": "Tomasz Bujlow", "start": 39, "end": 52, "context": "/*\n * steam.c\n *\n * Copyright (C) 2014 Tomasz Bujlow <tomasz@skatnet.dk>\n * \n * The signature is mostl"}, {"tag": "EMAIL", "value": "tomasz@skatnet.dk", "start": 54, "end": 71, "context": "* steam.c\n *\n * Copyright (C) 2014 Tomasz Bujlow <tomasz@skatnet.dk>\n * \n * The signature is mostly based on the Libp"}]
|
[{"tag": "NAME", "value": "Tomasz Bujlow", "start": 39, "end": 52, "context": "/*\n * steam.c\n *\n * Copyright (C) 2014 Tomasz Bujlow <tomasz@skatnet.dk>\n * \n * The signature is mostl"}, {"tag": "EMAIL", "value": "tomasz@skatnet.dk", "start": 54, "end": 71, "context": "* steam.c\n *\n * Copyright (C) 2014 Tomasz Bujlow <tomasz@skatnet.dk>\n * \n * The signature is mostly based on the Libp"}]
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package gameshop.advance.technicalservices;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
* @author Lorenzo Di Giuseppe <lorenzo.digiuseppe88@gmail.com>
*/
public class LoggerSingleton {
private static LoggerSingleton instance;
private LoggerSingleton(){
}
/**
* @return istanza di LoggerSingleton
*/
public static LoggerSingleton getInstance()
{
if(instance == null)
instance = new LoggerSingleton();
return instance;
}
/**
* @param ex
*/
public void log(Exception ex)
{
System.err.println("Logger Singleton says:\n");
Logger.getLogger(LoggerSingleton.class.getName()).log(Level.SEVERE, null, ex);
}
}
|
Java
|
MIT
|
GameShopAdvance/GameShop-Advance/GameShop Advance Server/src/gameshop/advance/technicalservices/LoggerSingleton.java
|
85939af4-d646-4901-8611-0e1fe961338e
|
[{"tag": "NAME", "value": "Lorenzo Di Giuseppe", "start": 315, "end": 334, "context": "port java.util.logging.Logger;\n\n/**\n *\n * @author Lorenzo Di Giuseppe <lorenzo.digiuseppe88@gmail.com>\n */\npublic class"}, {"tag": "EMAIL", "value": "lorenzo.digiuseppe88@gmail.com", "start": 336, "end": 366, "context": "g.Logger;\n\n/**\n *\n * @author Lorenzo Di Giuseppe <lorenzo.digiuseppe88@gmail.com>\n */\npublic class LoggerSingleton {\n \n priv"}]
|
[{"tag": "NAME", "value": "Lorenzo Di Giuseppe", "start": 315, "end": 334, "context": "port java.util.logging.Logger;\n\n/**\n *\n * @author Lorenzo Di Giuseppe <lorenzo.digiuseppe88@gmail.com>\n */\npublic class"}, {"tag": "EMAIL", "value": "lorenzo.digiuseppe88@gmail.com", "start": 336, "end": 366, "context": "g.Logger;\n\n/**\n *\n * @author Lorenzo Di Giuseppe <lorenzo.digiuseppe88@gmail.com>\n */\npublic class LoggerSingleton {\n \n priv"}]
|
"""
A module that contains utility functions to load the 'classical' workspace configuration.
This configuration may have three meaningful files:
.remote (required) - information about the connection options
.remoteindex (optional) - information about which connection from options above to use
.remoteignore (optional) - information about files that should be ignore when syncing files
"""
import os
import re
from collections import defaultdict
from dataclasses import asdict
from pathlib import Path
from typing import Dict, List, Tuple
from remote.exceptions import ConfigurationError
from . import ConfigurationMedium, RemoteConfig, SyncRules, WorkspaceConfig
from .shared import DEFAULT_REMOTE_ROOT, hash_path
CONFIG_FILE_NAME = ".remote"
INDEX_FILE_NAME = ".remoteindex"
IGNORE_FILE_NAME = ".remoteignore"
IGNORE_SECTION_REGEX = re.compile(r"^(push|pull|both)\s*:$")
BASE_IGNORES = (CONFIG_FILE_NAME, INDEX_FILE_NAME, IGNORE_FILE_NAME)
DEFAULT_SHELL = "sh"
DEFAULT_SHELL_OPTIONS = ""
def _extract_shell_info(line: str, env_vars: List[str]) -> Tuple[str, str]:
if not env_vars:
return DEFAULT_SHELL, DEFAULT_SHELL_OPTIONS
vars_string = env_vars[0]
env = {}
items = vars_string.split()
index = 0
while index < len(items):
key, value = items[index].split("=")
if value.startswith("'") or value.startswith('"'):
control_character = value[0]
while index < len(items) - 1:
if value[-1] == control_character:
break
index += 1
value += " " + items[index]
if not value[-1] == control_character:
raise ConfigurationError(f"Config line {line} is corrupted. Cannot parse end {key}={value}")
env[key] = value.strip("\"'")
index += 1
print(env)
# TODO: these shell types are not used in new implementation, need to remove them
shell = env.pop("RSHELL", DEFAULT_SHELL)
shell_options = env.pop("RSHELL_OPTS", DEFAULT_SHELL_OPTIONS)
if env:
raise ConfigurationError(
f"Config line {line} contains unexpected env variables: {env}. Only RSHELL and RSHELL_OPTS can be used"
)
return shell, shell_options
def parse_config_line(line: str) -> RemoteConfig:
# The line should look like this:
# sdas-ld2:.remotes/814f27f15f4e7a0842cada353dfc765a RSHELL=zsh
entry, *env_items = line.split(maxsplit=1)
shell, shell_options = _extract_shell_info(line, env_items)
parts = entry.split(":")
if len(parts) != 2:
raise ConfigurationError(
f"The configuration string is malformed: {parts}. Please use host-name:remote_dir format"
)
host, directory = parts
return RemoteConfig(host=host, directory=Path(directory), shell=shell, shell_options=shell_options)
def load_configurations(workspace_root: Path) -> List[RemoteConfig]:
config_file = workspace_root / CONFIG_FILE_NAME
configurations = []
for line in config_file.read_text().splitlines():
line = line.strip()
if not line or line.startswith("#"):
continue
configurations.append(parse_config_line(line))
return configurations
def load_default_configuration_num(workspace_root: Path) -> int:
# If REMOTE_HOST_INDEX is set, that overrides settings in .remoteindex
env_index = os.environ.get("REMOTE_HOST_INDEX")
if env_index:
try:
return int(env_index)
except ValueError:
raise ConfigurationError(
f"REMOTE_HOST_INDEX env variable contains symbols other than numbers: '{env_index}'. "
"Please set the coorect index value to continue"
)
index_file = workspace_root / INDEX_FILE_NAME
if not index_file.exists():
return 0
# Configuration uses 1-base index and we need to have 0-based
text = index_file.read_text().strip()
try:
return int(text) - 1
except ValueError:
raise ConfigurationError(
f"File {index_file} contains symbols other than numbers: '{text}'. "
"Please remove it or replace the value to continue"
)
def _postprocess(ignores):
pull = ignores.pop("pull", [])
push = ignores.pop("push", [])
both = ignores.pop("both", [])
if ignores:
raise ConfigurationError(
f"{IGNORE_FILE_NAME} file has unexpected sections: {', '.join(ignores.keys())}. Please remove them"
)
return SyncRules(pull=pull, push=push, both=both)
def load_ignores(workspace_root: Path) -> SyncRules:
ignores: Dict[str, List[str]] = defaultdict(list)
ignores["both"].extend(BASE_IGNORES)
ignore_file = workspace_root / IGNORE_FILE_NAME
if not ignore_file.exists():
return _postprocess(ignores)
active_section = "both"
is_new_format = None
for line in ignore_file.read_text().splitlines():
line = line.strip()
if not line or line.startswith("#"):
continue
matcher = IGNORE_SECTION_REGEX.match(line)
if matcher is None:
if is_new_format is None:
is_new_format = False
ignores[active_section].append(line)
else:
if is_new_format is None:
is_new_format = True
elif not is_new_format:
raise ConfigurationError(
f"Few ignore patters were listed in {IGNORE_FILE_NAME} before the first section {matcher.group(1)} appeared. "
"Please list all ignored files after a section declaration if you use new ignore format"
)
active_section = matcher.group(1)
return _postprocess(ignores)
def save_general_config(config_file: Path, configurations: List[RemoteConfig]):
with config_file.open("w") as f:
for item in configurations:
f.write(f"{item.host}:{item.directory}")
if item.shell != "sh":
f.write(f" RSHELL={item.shell}")
if item.shell_options:
f.write(f" RSHELL_OPTS='{item.shell_options}'")
f.write("\n")
def save_ignores(config_file: Path, ignores: SyncRules):
ignores.both.extend(BASE_IGNORES)
ignores.trim()
if ignores.is_empty():
if config_file.exists():
config_file.unlink()
return
with config_file.open("w") as f:
for key, value in asdict(ignores).items():
f.write(f"{key}:\n")
for item in value:
f.write(f"{item}\n")
def save_index(config_file: Path, index: int):
if index == 0:
# We delete file when index is default
if config_file.exists():
config_file.unlink()
else:
config_file.write_text(f"{index + 1}\n")
class ClassicConfigurationMedium(ConfigurationMedium):
"""A medium class that knows how to load and save the 'classical' workspace configuration.
This configuration may have three meaningful files:
.remote (required) - information about the connection options
.remoteindex (optional) - information about which connection from options above to use
.remoteignore (optional) - information about files that should be ignore when syncing files
"""
def load_config(self, workspace_root: Path) -> WorkspaceConfig:
configurations = load_configurations(workspace_root)
configuration_index = load_default_configuration_num(workspace_root)
if configuration_index > len(configurations) - 1:
raise ConfigurationError(
f"Configuration #{configuration_index + 1} requested but there are only {len(configurations)} declared"
)
ignores = load_ignores(workspace_root)
return WorkspaceConfig(
root=workspace_root,
configurations=configurations,
default_configuration=configuration_index,
ignores=ignores,
includes=SyncRules.new(),
)
def save_config(self, config: WorkspaceConfig) -> None:
save_general_config(config.root / CONFIG_FILE_NAME, config.configurations)
save_ignores(config.root / IGNORE_FILE_NAME, config.ignores)
save_index(config.root / INDEX_FILE_NAME, config.default_configuration)
def is_workspace_root(self, path: Path) -> bool:
return (path / CONFIG_FILE_NAME).exists()
def generate_remote_directory(self, config: WorkspaceConfig) -> Path:
md5 = hash_path(config.root)
return Path(f"{DEFAULT_REMOTE_ROOT}/{config.root.name}_{md5}")
|
Python
|
BSD-2-Clause
|
cdoronc/remote/src/remote/configuration/classic.py
|
11d2b9a9-a7ce-4997-a75d-4259da99d7a4
|
[]
|
[]
|
/*
* Copyright (c) 2013 Calvin Rien
*
* Based on the JSON parser by Patrick van Bergen
* http://techblog.procurios.nl/k/618/news/view/14605/14863/How-do-I-write-my-own-parser-for-JSON.html
*
* Simplified it so that it doesn't throw exceptions
* and can be used in Unity iPhone with maximum code stripping.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
namespace LeanplumSDK.MiniJSON {
// Example usage:
//
// using UnityEngine;
// using System.Collections;
// using System.Collections.Generic;
// using MiniJSON;
//
// public class MiniJSONTest : MonoBehaviour {
// void Start () {
// var jsonString = "{ \"array\": [1.44,2,3], " +
// "\"object\": {\"key1\":\"value1\", \"key2\":256}, " +
// "\"string\": \"The quick brown fox \\\"jumps\\\" over the lazy dog \", " +
// "\"unicode\": \"\\u3041 Men\u00fa sesi\u00f3n\", " +
// "\"int\": 65536, " +
// "\"float\": 3.1415926, " +
// "\"bool\": true, " +
// "\"null\": null }";
//
// var dict = Json.Deserialize(jsonString) as Dictionary<string,object>;
//
// Debug.Log("deserialized: " + dict.GetType());
// Debug.Log("dict['array'][0]: " + ((List<object>) dict["array"])[0]);
// Debug.Log("dict['string']: " + (string) dict["string"]);
// Debug.Log("dict['float']: " + (double) dict["float"]); // floats come out as doubles
// Debug.Log("dict['int']: " + (long) dict["int"]); // ints come out as longs
// Debug.Log("dict['unicode']: " + (string) dict["unicode"]);
//
// var str = Json.Serialize(dict);
//
// Debug.Log("serialized: " + str);
// }
// }
/// <summary>
/// This class encodes and decodes JSON strings.
/// Spec. details, see http://www.json.org/
///
/// JSON uses Arrays and Objects. These correspond here to the datatypes IList and IDictionary.
/// All numbers are parsed to doubles.
/// </summary>
public static class Json {
/// <summary>
/// Parses the string json into a value
/// </summary>
/// <param name="json">A JSON string.</param>
/// <returns>An List<object>, a Dictionary<string, object>, a double, an integer,a string, null, true, or false</returns>
public static object Deserialize(string json) {
// save the string for debug information
if (json == null) {
return null;
}
return Parser.Parse(json);
}
sealed class Parser : IDisposable {
const string WORD_BREAK = "{}[],:\"";
public static bool IsWordBreak(char c) {
return Char.IsWhiteSpace(c) || WORD_BREAK.IndexOf(c) != -1;
}
enum TOKEN {
NONE,
CURLY_OPEN,
CURLY_CLOSE,
SQUARED_OPEN,
SQUARED_CLOSE,
COLON,
COMMA,
STRING,
NUMBER,
TRUE,
FALSE,
NULL
};
StringReader json;
Parser(string jsonString) {
json = new StringReader(jsonString);
}
public static object Parse(string jsonString) {
using (var instance = new Parser(jsonString)) {
return instance.ParseValue();
}
}
public void Dispose() {
json.Dispose();
json = null;
}
Dictionary<string, object> ParseObject() {
Dictionary<string, object> table = new Dictionary<string, object>();
// ditch opening brace
json.Read();
// {
while (true) {
switch (NextToken) {
case TOKEN.NONE:
return null;
case TOKEN.COMMA:
continue;
case TOKEN.CURLY_CLOSE:
return table;
default:
// name
string name = ParseString();
if (name == null) {
return null;
}
// :
if (NextToken != TOKEN.COLON) {
return null;
}
// ditch the colon
json.Read();
// value
table[name] = ParseValue();
break;
}
}
}
List<object> ParseArray() {
List<object> array = new List<object>();
// ditch opening bracket
json.Read();
// [
var parsing = true;
while (parsing) {
TOKEN nextToken = NextToken;
switch (nextToken) {
case TOKEN.NONE:
return null;
case TOKEN.COMMA:
continue;
case TOKEN.SQUARED_CLOSE:
parsing = false;
break;
default:
object value = ParseByToken(nextToken);
array.Add(value);
break;
}
}
return array;
}
object ParseValue() {
TOKEN nextToken = NextToken;
return ParseByToken(nextToken);
}
object ParseByToken(TOKEN token) {
switch (token) {
case TOKEN.STRING:
return ParseString();
case TOKEN.NUMBER:
return ParseNumber();
case TOKEN.CURLY_OPEN:
return ParseObject();
case TOKEN.SQUARED_OPEN:
return ParseArray();
case TOKEN.TRUE:
return true;
case TOKEN.FALSE:
return false;
case TOKEN.NULL:
return null;
default:
return null;
}
}
string ParseString() {
StringBuilder s = new StringBuilder();
char c;
// ditch opening quote
json.Read();
bool parsing = true;
while (parsing) {
if (json.Peek() == -1) {
parsing = false;
break;
}
c = NextChar;
switch (c) {
case '"':
parsing = false;
break;
case '\\':
if (json.Peek() == -1) {
parsing = false;
break;
}
c = NextChar;
switch (c) {
case '"':
case '\\':
case '/':
s.Append(c);
break;
case 'b':
s.Append('\b');
break;
case 'f':
s.Append('\f');
break;
case 'n':
s.Append('\n');
break;
case 'r':
s.Append('\r');
break;
case 't':
s.Append('\t');
break;
case 'u':
var hex = new char[4];
for (int i=0; i< 4; i++) {
hex[i] = NextChar;
}
s.Append((char) Convert.ToInt32(new string(hex), 16));
break;
}
break;
default:
s.Append(c);
break;
}
}
return s.ToString();
}
object ParseNumber() {
string number = NextWord;
if (number.IndexOf('.') == -1) {
long parsedInt;
Int64.TryParse(number, out parsedInt);
return parsedInt;
}
double parsedDouble;
Double.TryParse(number, out parsedDouble);
return parsedDouble;
}
void EatWhitespace() {
while (Char.IsWhiteSpace(PeekChar)) {
json.Read();
if (json.Peek() == -1) {
break;
}
}
}
char PeekChar {
get {
return Convert.ToChar(json.Peek());
}
}
char NextChar {
get {
return Convert.ToChar(json.Read());
}
}
string NextWord {
get {
StringBuilder word = new StringBuilder();
while (!IsWordBreak(PeekChar)) {
word.Append(NextChar);
if (json.Peek() == -1) {
break;
}
}
return word.ToString();
}
}
TOKEN NextToken {
get {
EatWhitespace();
if (json.Peek() == -1) {
return TOKEN.NONE;
}
switch (PeekChar) {
case '{':
return TOKEN.CURLY_OPEN;
case '}':
json.Read();
return TOKEN.CURLY_CLOSE;
case '[':
return TOKEN.SQUARED_OPEN;
case ']':
json.Read();
return TOKEN.SQUARED_CLOSE;
case ',':
json.Read();
return TOKEN.COMMA;
case '"':
return TOKEN.STRING;
case ':':
return TOKEN.COLON;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case '-':
return TOKEN.NUMBER;
}
switch (NextWord) {
case "false":
return TOKEN.FALSE;
case "true":
return TOKEN.TRUE;
case "null":
return TOKEN.NULL;
}
return TOKEN.NONE;
}
}
}
/// <summary>
/// Converts a IDictionary / IList object or a simple type (string, int, etc.) into a JSON string
/// </summary>
/// <param name="json">A Dictionary<string, object> / List<object></param>
/// <returns>A JSON encoded string, or null if object 'json' is not serializable</returns>
public static string Serialize(object obj) {
return Serializer.Serialize(obj);
}
sealed class Serializer {
StringBuilder builder;
Serializer() {
builder = new StringBuilder();
}
public static string Serialize(object obj) {
var instance = new Serializer();
instance.SerializeValue(obj);
return instance.builder.ToString();
}
void SerializeValue(object value) {
IList asList;
IDictionary asDict;
string asStr;
if (value == null) {
builder.Append("null");
} else if ((asStr = value as string) != null) {
SerializeString(asStr);
} else if (value is bool) {
builder.Append((bool) value ? "true" : "false");
} else if ((asList = value as IList) != null) {
SerializeArray(asList);
} else if ((asDict = value as IDictionary) != null) {
SerializeObject(asDict);
} else if (value is char) {
SerializeString(new string((char) value, 1));
} else {
SerializeOther(value);
}
}
void SerializeObject(IDictionary obj) {
bool first = true;
builder.Append('{');
foreach (object e in obj.Keys) {
if (!first) {
builder.Append(',');
}
SerializeString(e.ToString());
builder.Append(':');
SerializeValue(obj[e]);
first = false;
}
builder.Append('}');
}
void SerializeArray(IList anArray) {
builder.Append('[');
bool first = true;
foreach (object obj in anArray) {
if (!first) {
builder.Append(',');
}
SerializeValue(obj);
first = false;
}
builder.Append(']');
}
void SerializeString(string str) {
builder.Append('\"');
char[] charArray = str.ToCharArray();
foreach (var c in charArray) {
switch (c) {
case '"':
builder.Append("\\\"");
break;
case '\\':
builder.Append("\\\\");
break;
case '\b':
builder.Append("\\b");
break;
case '\f':
builder.Append("\\f");
break;
case '\n':
builder.Append("\\n");
break;
case '\r':
builder.Append("\\r");
break;
case '\t':
builder.Append("\\t");
break;
default:
int codepoint = Convert.ToInt32(c);
if ((codepoint >= 32) && (codepoint <= 126)) {
builder.Append(c);
} else {
builder.Append("\\u");
builder.Append(codepoint.ToString("x4"));
}
break;
}
}
builder.Append('\"');
}
void SerializeOther(object value) {
// NOTE: decimals lose precision during serialization.
// They always have, I'm just letting you know.
// Previously floats and doubles lost precision too.
if (value is float) {
builder.Append(((float) value).ToString("R", new CultureInfo("en-US")));
} else if (value is int
|| value is uint
|| value is long
|| value is sbyte
|| value is byte
|| value is short
|| value is ushort
|| value is ulong) {
builder.Append(value);
} else if (value is double
|| value is decimal) {
builder.Append(Convert.ToDouble(value).ToString("R", new CultureInfo("en-US")));
} else {
SerializeString(value.ToString());
}
}
}
}
}
|
C#
|
Apache-2.0
|
Nezz/Leanplum-Unity-SDK-1/LeanplumSample/Assets/LeanplumSDK/MiniJSON.cs
|
5a6ce60a-d9ce-482d-bfe4-f728b2b0b211
|
[{"tag": "NAME", "value": "Patrick van Bergen", "start": 71, "end": 89, "context": "013 Calvin Rien\n *\n * Based on the JSON parser by Patrick van Bergen\n * http://techblog.procurios.nl/k/618/news/view/1"}, {"tag": "NAME", "value": "Calvin Rien", "start": 25, "end": 36, "context": "/*\n * Copyright (c) 2013 Calvin Rien\n *\n * Based on the JSON parser by Patrick van Ber"}]
|
[{"tag": "NAME", "value": "Patrick van Bergen", "start": 71, "end": 89, "context": "013 Calvin Rien\n *\n * Based on the JSON parser by Patrick van Bergen\n * http://techblog.procurios.nl/k/618/news/view/1"}, {"tag": "NAME", "value": "Calvin Rien", "start": 25, "end": 36, "context": "/*\n * Copyright (c) 2013 Calvin Rien\n *\n * Based on the JSON parser by Patrick van Ber"}]
|
import { Component, OnInit } from '@angular/core';
import { Recipe } from '../recipe.model';
@Component({
selector: 'app-recipe-list',
templateUrl: './recipe-list.component.html',
styleUrls: ['./recipe-list.component.css']
})
export class RecipeListComponent implements OnInit {
public recipes: Recipe[] = [
new Recipe('A test recipe', 'Recipe desc', 'https://www.elmundoeats.com/wp-content/uploads/2020/12/FP-10-Minute-Eggless-Tiramisu-For-Two-2.jpg'),
new Recipe('A test recipe', 'Recipe desc', 'https://www.elmundoeats.com/wp-content/uploads/2020/12/FP-10-Minute-Eggless-Tiramisu-For-Two-2.jpg')
];
constructor() { }
ngOnInit(): void {
}
}
|
TypeScript
|
MIT
|
TiagoBG/recipe-book-angular/src/app/components/RecipeBook/RecipeList/recipe-list.component.ts
|
f1ac7c1d-8296-4b1d-b22c-ade7586eb38d
|
[]
|
[]
|
/**
* Tiny LRU cache for Client or Server
*
* @author Jason Mulligan <jason.mulligan@avoidwork.com>
* @copyright 2018
* @license BSD-3-Clause
* @link https://github.com/avoidwork/tiny-lru
* @version 5.0.5
*/
"use strict";
(function (global) {
const empty = null;
class LRU {
constructor (max, ttl) {
this.clear();
this.max = max;
this.ttl = ttl;
}
clear () {
this.cache = {};
this.first = empty;
this.last = empty;
this.length = 0;
return this;
}
delete (key, bypass = false) {
return this.remove(key, bypass);
}
evict () {
if (this.length > 0) {
this.remove(this.last, true);
}
return this;
}
get (key) {
let result;
if (this.has(key) === true) {
const item = this.cache[key];
if (item.expiry === -1 || item.expiry > Date.now()) {
result = item.value;
this.set(key, result, true);
} else {
this.remove(key, true);
}
}
return result;
}
has (key) {
return key in this.cache;
}
remove (key, bypass = false) {
if (bypass === true || this.has(key) === true) {
const item = this.cache[key];
delete this.cache[key];
this.length--;
if (item.next !== empty) {
this.cache[item.next].prev = item.prev;
}
if (item.prev !== empty) {
this.cache[item.prev].next = item.next;
}
if (this.first === key) {
this.first = item.next;
}
if (this.last === key) {
this.last = item.prev;
}
}
return this;
}
set (key, value, bypass = false) {
if (bypass === true || this.has(key) === true) {
const item = this.cache[key];
item.value = value;
if (this.first !== key) {
const p = item.prev,
n = item.next,
f = this.cache[this.first];
item.prev = empty;
item.next = this.first;
f.prev = key;
if (p !== empty) {
this.cache[p].next = n;
}
if (n !== empty) {
this.cache[n].prev = p;
}
if (this.last === key) {
this.last = p;
}
}
} else {
if (this.length === this.max) {
this.evict();
}
this.length++;
this.cache[key] = {
expiry: this.ttl > 0 ? new Date().getTime() + this.ttl : -1,
prev: empty,
next: this.first,
value: value
};
if (this.length === 1) {
this.last = key;
} else {
this.cache[this.first].prev = key;
}
}
this.first = key;
return this;
}
}
function factory (max = 1000, ttl = 0) {
return new LRU(max, ttl);
}
// Node, AMD & window supported
if (typeof exports !== "undefined") {
module.exports = factory;
} else if (typeof define === "function" && define.amd !== void 0) {
define(() => factory);
} else {
global.lru = factory;
}
}(typeof window !== "undefined" ? window : global));
|
JavaScript
|
BSD-3-Clause
|
runk/tiny-lru/lib/tiny-lru.js
|
ee7169a0-420b-4f27-8d38-43fb470d7fa3
|
[{"tag": "EMAIL", "value": "jason.mulligan@avoidwork.com", "start": 73, "end": 101, "context": "or Client or Server\n *\n * @author Jason Mulligan <jason.mulligan@avoidwork.com>\n * @copyright 2018\n * @license BSD-3-Clause\n * @"}, {"tag": "USERNAME", "value": "avoidwork", "start": 175, "end": 184, "context": "@license BSD-3-Clause\n * @link https://github.com/avoidwork/tiny-lru\n * @version 5.0.5\n */\n\"use strict\";\n\n(fu"}, {"tag": "NAME", "value": "Jason Mulligan", "start": 57, "end": 71, "context": "Tiny LRU cache for Client or Server\n *\n * @author Jason Mulligan <jason.mulligan@avoidwork.com>\n * @copyright 2018"}]
|
[{"tag": "EMAIL", "value": "jason.mulligan@avoidwork.com", "start": 73, "end": 101, "context": "or Client or Server\n *\n * @author Jason Mulligan <jason.mulligan@avoidwork.com>\n * @copyright 2018\n * @license BSD-3-Clause\n * @"}, {"tag": "USERNAME", "value": "avoidwork", "start": 175, "end": 184, "context": "@license BSD-3-Clause\n * @link https://github.com/avoidwork/tiny-lru\n * @version 5.0.5\n */\n\"use strict\";\n\n(fu"}, {"tag": "NAME", "value": "Jason Mulligan", "start": 57, "end": 71, "context": "Tiny LRU cache for Client or Server\n *\n * @author Jason Mulligan <jason.mulligan@avoidwork.com>\n * @copyright 2018"}]
|
// Copyright (c) 2016 The Bitcoin Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "bench.h"
#include "chainparams.h"
#include "validation.h"
#include "streams.h"
#include "consensus/validation.h"
namespace block_bench {
#include "bench/data/block413567.raw.h"
}
// These are the two major time-sinks which happen after we have fully received
// a block off the wire, but before we can relay the block on to peers using
// compact block relay.
// Searchcoin uses block height 878439, hash 0babe680f55a55d54339511226755f0837261da89a4e78eba4d6436a63026df8
// which contains 3808 transactions.
static void DeserializeBlockTest(benchmark::State& state)
{
CDataStream stream((const char*)block_bench::block413567,
(const char*)&block_bench::block413567[sizeof(block_bench::block413567)],
SER_NETWORK, PROTOCOL_VERSION);
char a;
stream.write(&a, 1); // Prevent compaction
while (state.KeepRunning()) {
CBlock block;
stream >> block;
assert(stream.Rewind(sizeof(block_bench::block413567)));
}
}
static void DeserializeAndCheckBlockTest(benchmark::State& state)
{
CDataStream stream((const char*)block_bench::block413567,
(const char*)&block_bench::block413567[sizeof(block_bench::block413567)],
SER_NETWORK, PROTOCOL_VERSION);
char a;
stream.write(&a, 1); // Prevent compaction
Consensus::Params params = Params(CBaseChainParams::MAIN).GetConsensus();
while (state.KeepRunning()) {
CBlock block; // Note that CBlock caches its checked state, so we need to recreate it here
stream >> block;
assert(stream.Rewind(sizeof(block_bench::block413567)));
CValidationState validationState;
assert(CheckBlock(block, validationState, params));
}
}
BENCHMARK(DeserializeBlockTest);
BENCHMARK(DeserializeAndCheckBlockTest);
|
C++
|
MIT
|
searchcoin/searchcoin/src/bench/checkblock.cpp
|
1703e3e7-4ad2-43c5-8a2f-24b7a81f5673
|
[]
|
[]
|
package org.jboss.windup.rules.apps.javaee.service;
import javax.inject.Inject;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.windup.graph.GraphContext;
import org.jboss.windup.graph.GraphContextFactory;
import org.jboss.windup.graph.model.ProjectModel;
import org.jboss.windup.graph.service.ProjectService;
import org.jboss.windup.rules.apps.javaee.AbstractTest;
import org.jboss.windup.rules.apps.javaee.model.HibernateConfigurationFileModel;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.google.common.collect.Iterables;
/**
* @author <a href="mailto:jesse.sightler@gmail.com">Jesse Sightler</a>
*/
@RunWith(Arquillian.class)
public class HibernateConfigurationFileServiceTest extends AbstractTest
{
@Inject
GraphContextFactory factory;
@Test
public void testHibernateConfigurationFindByProject() throws Exception
{
try (GraphContext context = factory.create(true))
{
ProjectService projectService = new ProjectService(context);
ProjectModel app1 = projectService.create();
app1.setName("app1");
ProjectModel app2 = projectService.create();
app2.setName("app2");
HibernateConfigurationFileService service = new HibernateConfigurationFileService(context);
HibernateConfigurationFileModel bean1 = service.create();
app1.addFileModel(bean1);
HibernateConfigurationFileModel bean2 = service.create();
app2.addFileModel(bean2);
service.create();
Assert.assertEquals(3, Iterables.size(service.findAll()));
Assert.assertEquals(1, Iterables.size(service.findAllByApplication(app1)));
Assert.assertEquals(1, Iterables.size(service.findAllByApplication(app2)));
Assert.assertTrue(Iterables.contains(service.findAllByApplication(app1), bean1));
Assert.assertTrue(Iterables.contains(service.findAllByApplication(app2), bean2));
}
}
}
|
Java
|
EPL-1.0
|
ItsLeon15/windup/rules-java-ee/tests/src/test/java/org/jboss/windup/rules/apps/javaee/service/HibernateConfigurationFileServiceTest.java
|
8daea9e4-5a66-45fb-bedc-dcf27fdfb51c
|
[{"tag": "EMAIL", "value": "jesse.sightler@gmail.com", "start": 622, "end": 646, "context": "ollect.Iterables;\n\n/**\n * @author <a href=\"mailto:jesse.sightler@gmail.com\">Jesse Sightler</a>\n */\n@RunWith(Arquillian.class"}, {"tag": "NAME", "value": "Jesse Sightler", "start": 648, "end": 662, "context": "@author <a href=\"mailto:jesse.sightler@gmail.com\">Jesse Sightler</a>\n */\n@RunWith(Arquillian.class)\npublic class H"}]
|
[{"tag": "EMAIL", "value": "jesse.sightler@gmail.com", "start": 622, "end": 646, "context": "ollect.Iterables;\n\n/**\n * @author <a href=\"mailto:jesse.sightler@gmail.com\">Jesse Sightler</a>\n */\n@RunWith(Arquillian.class"}, {"tag": "NAME", "value": "Jesse Sightler", "start": 648, "end": 662, "context": "@author <a href=\"mailto:jesse.sightler@gmail.com\">Jesse Sightler</a>\n */\n@RunWith(Arquillian.class)\npublic class H"}]
|
//------------------------------------------------------------------------------
// <auto-generated>
// Этот код создан программным средством.
// Версия среды выполнения: 4.0.30319.42000
//
// Изменения в этом файле могут привести к неправильному поведению и будут утрачены, если
// код создан повторно.
// </auto-generated>
//------------------------------------------------------------------------------
namespace VkStickerUploader.Properties
{
/// <summary>
/// Класс ресурсов со строгим типом для поиска локализованных строк и пр.
/// </summary>
// Этот класс был автоматически создан при помощи StronglyTypedResourceBuilder
// класс с помощью таких средств, как ResGen или Visual Studio.
// Для добавления или удаления члена измените файл .ResX, а затем перезапустите ResGen
// с параметром /str или заново постройте свой VS-проект.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources
{
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources()
{
}
/// <summary>
/// Возврат кэшированного экземпляра ResourceManager, используемого этим классом.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager
{
get
{
if ((resourceMan == null))
{
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("VkStickerUploader.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// Переопределяет свойство CurrentUICulture текущего потока для всех
/// подстановки ресурсов с помощью этого класса ресурсов со строгим типом.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture
{
get
{
return resourceCulture;
}
set
{
resourceCulture = value;
}
}
}
}
|
C#
|
MIT
|
KonstantinMitish/VkStickerUploader/Properties/Resources.Designer.cs
|
8637b2fa-aa30-475b-aec6-88280f720b7d
|
[{"tag": "IP_ADDRESS", "value": "4.0.0.0", "start": 988, "end": 995, "context": "m.Resources.Tools.StronglyTypedResourceBuilder\", \"4.0.0.0\")]\n [global::System.Diagnostics.DebuggerNonUserC"}]
|
[{"tag": "IP_ADDRESS", "value": "4.0.0.0", "start": 988, "end": 995, "context": "m.Resources.Tools.StronglyTypedResourceBuilder\", \"4.0.0.0\")]\n [global::System.Diagnostics.DebuggerNonUserC"}]
|
$(function() {
consoleInit(main)
});
const BANKSY_CHEF_ABI = [{"inputs":[{"internalType":"contract BanksyTokenV3","name":"_banksy","type":"address"},{"internalType":"address","name":"_feeAddress","type":"address"},{"internalType":"address","name":"_treasuryAddress","type":"address"},{"internalType":"uint256","name":"_banksyPerSecond","type":"uint256"},{"internalType":"uint256","name":"_startTime","type":"uint256"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":true,"internalType":"uint256","name":"pid","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Deposit","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":true,"internalType":"uint256","name":"pid","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"EmergencyWithdraw","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOwner","type":"address"},{"indexed":true,"internalType":"address","name":"newOwner","type":"address"}],"name":"OwnershipTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"caller","type":"address"},{"indexed":false,"internalType":"uint256","name":"previousAmount","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"newAmount","type":"uint256"}],"name":"SetEmissionRate","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":true,"internalType":"address","name":"newAddress","type":"address"}],"name":"SetFeeAddress","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":true,"internalType":"address","name":"newAddress","type":"address"}],"name":"SetTreasuryAddress","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint256","name":"newStartTime","type":"uint256"}],"name":"SetstartTime","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":true,"internalType":"uint256","name":"pid","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Withdraw","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"uint256","name":"pid","type":"uint256"},{"indexed":false,"internalType":"address","name":"lpToken","type":"address"},{"indexed":false,"internalType":"uint256","name":"allocPoint","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"depositFeeBP","type":"uint256"}],"name":"addPool","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"uint256","name":"pid","type":"uint256"},{"indexed":false,"internalType":"address","name":"lpToken","type":"address"},{"indexed":false,"internalType":"uint256","name":"allocPoint","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"depositFeeBP","type":"uint256"}],"name":"setPool","type":"event"},{"inputs":[],"name":"MAX_EMISSION_RATE","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_allocPoint","type":"uint256"},{"internalType":"contract IERC20","name":"_lpToken","type":"address"},{"internalType":"uint16","name":"_depositFeeBP","type":"uint16"},{"internalType":"bool","name":"_withUpdate","type":"bool"}],"name":"add","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"banksy","outputs":[{"internalType":"contract BanksyTokenV3","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"banksyMaximumSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"banksyPerSecond","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_pid","type":"uint256"},{"internalType":"uint256","name":"_amount","type":"uint256"}],"name":"deposit","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_pid","type":"uint256"}],"name":"emergencyWithdraw","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"emmissionEndTime","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"feeAddress","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_from","type":"uint256"},{"internalType":"uint256","name":"_to","type":"uint256"}],"name":"getMultiplier","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"massUpdatePools","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"owner","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_pid","type":"uint256"},{"internalType":"address","name":"_user","type":"address"}],"name":"pendingBanksy","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"contract IERC20","name":"","type":"address"}],"name":"poolExistence","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"poolInfo","outputs":[{"internalType":"contract IERC20","name":"lpToken","type":"address"},{"internalType":"uint256","name":"allocPoint","type":"uint256"},{"internalType":"uint256","name":"lastRewardTime","type":"uint256"},{"internalType":"uint256","name":"accBanksyPerShare","type":"uint256"},{"internalType":"uint16","name":"depositFeeBP","type":"uint16"},{"internalType":"uint256","name":"lpSupply","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"poolLength","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"renounceOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_pid","type":"uint256"},{"internalType":"uint256","name":"_allocPoint","type":"uint256"},{"internalType":"uint16","name":"_depositFeeBP","type":"uint16"},{"internalType":"bool","name":"_withUpdate","type":"bool"}],"name":"set","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_banksyPerSecond","type":"uint256"}],"name":"setEmissionRate","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_feeAddress","type":"address"}],"name":"setFeeAddress","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"_newstartTime","type":"uint256"}],"name":"setStartTime","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"_treasuryAddress","type":"address"}],"name":"setTreasuryAddress","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"startTime","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"totalAllocPoint","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"treasuryAddress","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_pid","type":"uint256"}],"name":"updatePool","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"},{"internalType":"address","name":"","type":"address"}],"name":"userInfo","outputs":[{"internalType":"uint256","name":"amount","type":"uint256"},{"internalType":"uint256","name":"rewardDebt","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"_pid","type":"uint256"},{"internalType":"uint256","name":"_amount","type":"uint256"}],"name":"withdraw","outputs":[],"stateMutability":"nonpayable","type":"function"}]
async function main() {
const App = await init_ethers();
_print(`Initialized ${App.YOUR_ADDRESS}\n`);
_print("Reading smart contracts...\n");
const BANKSY_CHEF_ADDR = "0x6daa10F9D8F3EBAc21BEcA9edC8b86EE32E33cD0";
const rewardTokenTicker = "BANKSY";
const BANKSY_CHEF = new ethers.Contract(BANKSY_CHEF_ADDR, BANKSY_CHEF_ABI, App.provider);
const rewardsPerWeek = await BANKSY_CHEF.banksyPerSecond() / 1e18 * 86400*7;
const tokens = {};
const prices = await getFantomPrices();
await loadFantomChefContract(App, tokens, prices, BANKSY_CHEF, BANKSY_CHEF_ADDR, BANKSY_CHEF_ABI, rewardTokenTicker,
"banksy", null, rewardsPerWeek, "pendingBanksy");
hideLoading();
}
|
JavaScript
|
MIT
|
ThorusFi/vfat-tools/src/static/js/fantom_banksyfarm.js
|
27b1c546-5f68-4b45-8613-2f99b587d357
|
[{"tag": "USERNAME", "value": "BANKSY", "start": 9137, "end": 9143, "context": "edC8b86EE32E33cD0\";\n\n const rewardTokenTicker = \"BANKSY\";\n const BANKSY_CHEF = new ethers.Contract(BANKS"}]
|
[{"tag": "USERNAME", "value": "BANKSY", "start": 9137, "end": 9143, "context": "edC8b86EE32E33cD0\";\n\n const rewardTokenTicker = \"BANKSY\";\n const BANKSY_CHEF = new ethers.Contract(BANKS"}]
|
import sys
import logging
import urlparse
import urllib
import redis
from flask import Flask, current_app
from flask_sslify import SSLify
from werkzeug.contrib.fixers import ProxyFix
from werkzeug.routing import BaseConverter
from statsd import StatsClient
from flask_mail import Mail
from flask_limiter import Limiter
from flask_limiter.util import get_ipaddr
from flask_migrate import Migrate
from redash import settings
from redash.query_runner import import_query_runners
from redash.destinations import import_destinations
__version__ = '7.0.0-beta'
import os
if os.environ.get("REMOTE_DEBUG"):
import ptvsd
ptvsd.enable_attach(address=('0.0.0.0', 5678))
def setup_logging():
handler = logging.StreamHandler(sys.stdout if settings.LOG_STDOUT else sys.stderr)
formatter = logging.Formatter(settings.LOG_FORMAT)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
logging.getLogger().setLevel(settings.LOG_LEVEL)
# Make noisy libraries less noisy
if settings.LOG_LEVEL != "DEBUG":
logging.getLogger("passlib").setLevel("ERROR")
logging.getLogger("requests.packages.urllib3").setLevel("ERROR")
logging.getLogger("snowflake.connector").setLevel("ERROR")
logging.getLogger('apiclient').setLevel("ERROR")
def create_redis_connection():
logging.debug("Creating Redis connection (%s)", settings.REDIS_URL)
redis_url = urlparse.urlparse(settings.REDIS_URL)
if redis_url.scheme == 'redis+socket':
qs = urlparse.parse_qs(redis_url.query)
if 'virtual_host' in qs:
db = qs['virtual_host'][0]
else:
db = 0
client = redis.StrictRedis(unix_socket_path=redis_url.path, db=db)
else:
if redis_url.path:
redis_db = redis_url.path[1]
else:
redis_db = 0
# Redis passwords might be quoted with special characters
redis_password = redis_url.password and urllib.unquote(redis_url.password)
client = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=redis_db, password=redis_password)
return client
setup_logging()
redis_connection = create_redis_connection()
mail = Mail()
migrate = Migrate()
mail.init_mail(settings.all_settings())
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
limiter = Limiter(key_func=get_ipaddr, storage_uri=settings.LIMITER_STORAGE)
import_query_runners(settings.QUERY_RUNNERS)
import_destinations(settings.DESTINATIONS)
from redash.version_check import reset_new_version_status
reset_new_version_status()
class SlugConverter(BaseConverter):
def to_python(self, value):
# This is ay workaround for when we enable multi-org and some files are being called by the index rule:
# for path in settings.STATIC_ASSETS_PATHS:
# full_path = safe_join(path, value)
# if os.path.isfile(full_path):
# raise ValidationError()
return value
def to_url(self, value):
return value
def create_app():
from redash import authentication, extensions, handlers
from redash.handlers.webpack import configure_webpack
from redash.handlers import chrome_logger
from redash.models import db, users
from redash.metrics.request import provision_app
from redash.utils import sentry
sentry.init()
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
# Make sure we get the right referral address even behind proxies like nginx.
app.wsgi_app = ProxyFix(app.wsgi_app, settings.PROXIES_COUNT)
app.url_map.converters['org_slug'] = SlugConverter
if settings.ENFORCE_HTTPS:
SSLify(app, skips=['ping'])
# configure our database
app.config['SQLALCHEMY_DATABASE_URI'] = settings.SQLALCHEMY_DATABASE_URI
app.config.update(settings.all_settings())
provision_app(app)
db.init_app(app)
migrate.init_app(app, db)
mail.init_app(app)
authentication.init_app(app)
limiter.init_app(app)
handlers.init_app(app)
configure_webpack(app)
extensions.init_extensions(app)
chrome_logger.init_app(app)
users.init_app(app)
return app
|
Python
|
BSD-2-Clause
|
CodeGerm/redash/redash/__init__.py
|
5785cebb-c28c-432d-9df5-f015f4300d08
|
[{"tag": "IP_ADDRESS", "value": "0.0.0.0", "start": 657, "end": 664, "context": " import ptvsd\n ptvsd.enable_attach(address=('0.0.0.0', 5678))\n\n\ndef setup_logging():\n handler = log"}]
|
[{"tag": "IP_ADDRESS", "value": "0.0.0.0", "start": 657, "end": 664, "context": " import ptvsd\n ptvsd.enable_attach(address=('0.0.0.0', 5678))\n\n\ndef setup_logging():\n handler = log"}]
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
'''
@File : utils_node.py
@Time : 2022/03/08 14:35:13
@Author : Jianwen Chen
@Version : 1.0
@Contact : chenjw48@mail2.sysu.edu.cn
@License : (C)Copyright 2021-2022, SAIL-Lab
'''
######################################## import area ########################################
# common library
import os
import random
import torch
import torch.nn as nn
import numpy as np
from tqdm import tqdm
from sklearn import metrics
from torch.optim.lr_scheduler import _LRScheduler
######################################## function area ########################################
def seed_everything(seed=2021):
os.environ['PYTHONHASHSEED'] = str(seed)
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
def initialize_weights(model):
"""
Initializes the weights of a model in place.
:param model: An nn.Module.
"""
for param in model.parameters():
if param.dim() > 1:
nn.init.xavier_normal_(param)
def loop(data_loader, model, optimizer, scheduler, device):
batch_size = data_loader.batch_size
data_loader = tqdm(data_loader) if optimizer is not None else data_loader
loss_sum, y_true, y_pred = 0.0, list(), list()
for batch in data_loader:
smiles, mols, batch_node_features, batch_edge_features, batch_distance_matrix, labels = batch
# add mask
batch_masks = torch.sum(torch.abs(batch_node_features), dim=-1) != 0
# (batch, max_length, node_dim)
batch_node_features = batch_node_features.to(device)
# (batch, max_length, max_length, edge_dim)
batch_edge_features = batch_edge_features.to(device)
# (batch, max_length, max_length)
batch_distance_matrix = batch_distance_matrix.to(device)
# (batch, max_length)
batch_masks = batch_masks.to(device)
# (batch, max_length, 1)
labels = labels.to(device)
# (batch, max_length, 1)
outputs = model(batch_node_features, batch_edge_features, batch_distance_matrix, batch_masks, device)
# loss calculation
loss = cal_loss(y_true=labels, y_pred=outputs, device=device)
loss_sum += loss.item()
if optimizer is not None:
# clear gradients for this training step
optimizer.zero_grad()
# back propagation, compute gradients
loss.backward()
# apply gradients
optimizer.step()
# NormLR need step every batch
if scheduler is not None:
scheduler.step()
# collect result
labels = labels.detach().cpu().numpy()
outputs = outputs.detach().cpu().numpy()
y_true.append([])
y_pred.append([])
for label, output in zip(labels, outputs):
label, output = label.flatten(), output.flatten()
for l, o in zip(label, output):
if l != 0.0:
y_true[-1].append(l)
y_pred[-1].append(o)
# clear cuda cache
torch.cuda.empty_cache()
# metric calculation
results = cal_metric(y_true=y_true, y_pred=y_pred)
results['loss'] = loss_sum / (len(data_loader) * batch_size)
return results
def cal_loss(y_true, y_pred, device):
y_true, y_pred = y_true.flatten(), y_pred.flatten()
y_mask = torch.where(y_true != 0.0, torch.full_like(y_true, 1), torch.full_like(y_true, 0))
loss = torch.sum(torch.abs(y_true - y_pred) * y_mask) / torch.sum(y_mask)
return loss
def cal_metric(y_true, y_pred):
concatenate_true, concatenate_pred = np.concatenate(y_true, axis=-1), np.concatenate(y_pred, axis=-1)
mae = metrics.mean_absolute_error(concatenate_true, concatenate_pred)
r2 = metrics.r2_score(concatenate_true, concatenate_pred)
return {'mae':mae, 'r2':r2}
class NoamLR(_LRScheduler):
"""
Noam learning rate scheduler with piecewise linear increase and exponential decay.
The learning rate increases linearly from init_lr to max_lr over the course of
the first warmup_steps (where warmup_steps = warmup_epochs * steps_per_epoch).
Then the learning rate decreases exponentially from max_lr to final_lr over the
course of the remaining total_steps - warmup_steps (where total_steps =
total_epochs * steps_per_epoch). This is roughly based on the learning rate
schedule from Attention is All You Need, section 5.3 (https://arxiv.org/abs/1706.03762).
"""
def __init__(self, optimizer, warmup_epochs, total_epochs, steps_per_epoch, init_lr, max_lr, final_lr):
"""
Initializes the learning rate scheduler.
:param optimizer: A PyTorch optimizer.
:param warmup_epochs: The number of epochs during which to linearly increase the learning rate.
:param total_epochs: The total number of epochs.
:param steps_per_epoch: The number of steps (batches) per epoch.
:param init_lr: The initial learning rate.
:param max_lr: The maximum learning rate (achieved after warmup_epochs).
:param final_lr: The final learning rate (achieved after total_epochs).
"""
assert len(optimizer.param_groups) == len(warmup_epochs) == len(total_epochs) == len(init_lr) == len(max_lr) == len(final_lr)
self.num_lrs = len(optimizer.param_groups)
self.optimizer = optimizer
self.warmup_epochs = np.array(warmup_epochs)
self.total_epochs = np.array(total_epochs)
self.steps_per_epoch = steps_per_epoch
self.init_lr = np.array(init_lr)
self.max_lr = np.array(max_lr)
self.final_lr = np.array(final_lr)
self.current_step = 0
self.lr = init_lr
self.warmup_steps = (self.warmup_epochs * self.steps_per_epoch).astype(int)
self.total_steps = self.total_epochs * self.steps_per_epoch
self.linear_increment = (self.max_lr - self.init_lr) / self.warmup_steps
self.exponential_gamma = (self.final_lr / self.max_lr) ** (1 / (self.total_steps - self.warmup_steps))
super(NoamLR, self).__init__(optimizer)
def get_lr(self):
"""Gets a list of the current learning rates."""
return list(self.lr)
def step(self, current_step: int = None):
"""
Updates the learning rate by taking a step.
:param current_step: Optionally specify what step to set the learning rate to.
If None, current_step = self.current_step + 1.
"""
if current_step is not None:
self.current_step = current_step
else:
self.current_step += 1
for i in range(self.num_lrs):
if self.current_step <= self.warmup_steps[i]:
self.lr[i] = self.init_lr[i] + self.current_step * self.linear_increment[i]
elif self.current_step <= self.total_steps[i]:
self.lr[i] = self.max_lr[i] * (self.exponential_gamma[i] ** (self.current_step - self.warmup_steps[i]))
else: # theoretically this case should never be reached since training should stop at total_steps
self.lr[i] = self.final_lr[i]
self.optimizer.param_groups[i]['lr'] = self.lr[i]
|
Python
|
MIT
|
jcchan23/SAIL/Repeat/CoMPT/utils_node.py
|
c8c95b6b-352f-4b34-ad0c-71e47819251d
|
[{"tag": "NAME", "value": "Jianwen Chen", "start": 125, "end": 137, "context": ".py\n@Time : 2022/03/08 14:35:13\n@Author : Jianwen Chen\n@Version : 1.0\n@Contact : chenjw48@mail2.sysu"}, {"tag": "EMAIL", "value": "chenjw48@mail2.sysu.edu.cn", "start": 168, "end": 194, "context": "r : Jianwen Chen\n@Version : 1.0\n@Contact : chenjw48@mail2.sysu.edu.cn\n@License : (C)Copyright 2021-2022, SAIL-Lab\n'''"}]
|
[{"tag": "NAME", "value": "Jianwen Chen", "start": 125, "end": 137, "context": ".py\n@Time : 2022/03/08 14:35:13\n@Author : Jianwen Chen\n@Version : 1.0\n@Contact : chenjw48@mail2.sysu"}, {"tag": "EMAIL", "value": "chenjw48@mail2.sysu.edu.cn", "start": 168, "end": 194, "context": "r : Jianwen Chen\n@Version : 1.0\n@Contact : chenjw48@mail2.sysu.edu.cn\n@License : (C)Copyright 2021-2022, SAIL-Lab\n'''"}]
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React from 'react';
import { shallow } from 'enzyme';
import { UserList } from '.';
import * as i18n from '../case_view/translations';
describe('UserList ', () => {
const title = 'Case Title';
const caseLink = 'http://reddit.com';
const user = { username: 'username', fullName: 'Full Name', email: 'testemail@elastic.co' };
const open = jest.fn();
beforeAll(() => {
window.open = open;
});
beforeEach(() => {
jest.resetAllMocks();
});
it('triggers mailto when email icon clicked', () => {
const wrapper = shallow(
<UserList
email={{
subject: i18n.EMAIL_SUBJECT(title),
body: i18n.EMAIL_BODY(caseLink),
}}
headline={i18n.REPORTER}
users={[user]}
/>
);
wrapper.find('[data-test-subj="user-list-email-button"]').simulate('click');
expect(open).toBeCalledWith(
`mailto:${user.email}?subject=${i18n.EMAIL_SUBJECT(title)}&body=${i18n.EMAIL_BODY(caseLink)}`,
'_blank'
);
});
});
|
TypeScript
|
Apache-2.0
|
AlexanderWert/kibana/x-pack/plugins/security_solution/public/cases/components/user_list/index.test.tsx
|
990d1f73-8111-4c74-8065-ef8996d63ace
|
[]
|
[]
|
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
/*
|--------------------------------------------------------------------------
| Base Site URL
|--------------------------------------------------------------------------
|
| URL to your CodeIgniter root. Typically this will be your base URL,
| WITH a trailing slash:
|
| http://example.com/
|
| If this is not set then CodeIgniter will try guess the protocol, domain
| and path to your installation. However, you should always configure this
| explicitly and never rely on auto-guessing, especially in production
| environments.
|
*/
// $config['base_url'] = 'http://localhost:8000/';
$config['base_url'] = 'http://'.$_SERVER['SERVER_NAME'].'/~mar158/365/polls';;
/*
|--------------------------------------------------------------------------
| Index File
|--------------------------------------------------------------------------
|
| Typically this will be your index.php file, unless you've renamed it to
| something else. If you are using mod_rewrite to remove the page set this
| variable so that it is blank.
|
*/
$config['index_page'] = '';
/*
|--------------------------------------------------------------------------
| URI PROTOCOL
|--------------------------------------------------------------------------
|
| This item determines which server global should be used to retrieve the
| URI string. The default setting of 'REQUEST_URI' works for most servers.
| If your links do not seem to work, try one of the other delicious flavors:
|
| 'REQUEST_URI' Uses $_SERVER['REQUEST_URI']
| 'QUERY_STRING' Uses $_SERVER['QUERY_STRING']
| 'PATH_INFO' Uses $_SERVER['PATH_INFO']
|
| WARNING: If you set this to 'PATH_INFO', URIs will always be URL-decoded!
*/
$config['uri_protocol'] = 'REQUEST_URI';
/*
|--------------------------------------------------------------------------
| URL suffix
|--------------------------------------------------------------------------
|
| This option allows you to add a suffix to all URLs generated by CodeIgniter.
| For more information please see the user guide:
|
| http://codeigniter.com/user_guide/general/urls.html
*/
$config['url_suffix'] = '';
/*
|--------------------------------------------------------------------------
| Default Language
|--------------------------------------------------------------------------
|
| This determines which set of language files should be used. Make sure
| there is an available translation if you intend to use something other
| than english.
|
*/
$config['language'] = 'english';
/*
|--------------------------------------------------------------------------
| Default Character Set
|--------------------------------------------------------------------------
|
| This determines which character set is used by default in various methods
| that require a character set to be provided.
|
| See http://php.net/htmlspecialchars for a list of supported charsets.
|
*/
$config['charset'] = 'UTF-8';
/*
|--------------------------------------------------------------------------
| Enable/Disable System Hooks
|--------------------------------------------------------------------------
|
| If you would like to use the 'hooks' feature you must enable it by
| setting this variable to TRUE (boolean). See the user guide for details.
|
*/
$config['enable_hooks'] = FALSE;
/*
|--------------------------------------------------------------------------
| Class Extension Prefix
|--------------------------------------------------------------------------
|
| This item allows you to set the filename/classname prefix when extending
| native libraries. For more information please see the user guide:
|
| http://codeigniter.com/user_guide/general/core_classes.html
| http://codeigniter.com/user_guide/general/creating_libraries.html
|
*/
$config['subclass_prefix'] = 'MY_';
/*
|--------------------------------------------------------------------------
| Composer auto-loading
|--------------------------------------------------------------------------
|
| Enabling this setting will tell CodeIgniter to look for a Composer
| package auto-loader script in application/vendor/autoload.php.
|
| $config['composer_autoload'] = TRUE;
|
| Or if you have your vendor/ directory located somewhere else, you
| can opt to set a specific path as well:
|
| $config['composer_autoload'] = '/path/to/vendor/autoload.php';
|
| For more information about Composer, please visit http://getcomposer.org/
|
| Note: This will NOT disable or override the CodeIgniter-specific
| autoloading (application/config/autoload.php)
*/
$config['composer_autoload'] = FALSE;
/*
|--------------------------------------------------------------------------
| Allowed URL Characters
|--------------------------------------------------------------------------
|
| This lets you specify which characters are permitted within your URLs.
| When someone tries to submit a URL with disallowed characters they will
| get a warning message.
|
| As a security measure you are STRONGLY encouraged to restrict URLs to
| as few characters as possible. By default only these are allowed: a-z 0-9~%.:_-
|
| Leave blank to allow all characters -- but only if you are insane.
|
| The configured value is actually a regular expression character group
| and it will be executed as: ! preg_match('/^[<permitted_uri_chars>]+$/i
|
| DO NOT CHANGE THIS UNLESS YOU FULLY UNDERSTAND THE REPERCUSSIONS!!
|
*/
$config['permitted_uri_chars'] = 'a-z 0-9~%.:_\-';
/*
|--------------------------------------------------------------------------
| Enable Query Strings
|--------------------------------------------------------------------------
|
| By default CodeIgniter uses search-engine friendly segment based URLs:
| example.com/who/what/where/
|
| By default CodeIgniter enables access to the $_GET array. If for some
| reason you would like to disable it, set 'allow_get_array' to FALSE.
|
| You can optionally enable standard query string based URLs:
| example.com?who=me&what=something&where=here
|
| Options are: TRUE or FALSE (boolean)
|
| The other items let you set the query string 'words' that will
| invoke your controllers and its functions:
| example.com/index.php?c=controller&m=function
|
| Please note that some of the helpers won't work as expected when
| this feature is enabled, since CodeIgniter is designed primarily to
| use segment based URLs.
|
*/
$config['allow_get_array'] = TRUE;
$config['enable_query_strings'] = FALSE;
$config['controller_trigger'] = 'c';
$config['function_trigger'] = 'm';
$config['directory_trigger'] = 'd';
/*
|--------------------------------------------------------------------------
| Error Logging Threshold
|--------------------------------------------------------------------------
|
| If you have enabled error logging, you can set an error threshold to
| determine what gets logged. Threshold options are:
| You can enable error logging by setting a threshold over zero. The
| threshold determines what gets logged. Threshold options are:
|
| 0 = Disables logging, Error logging TURNED OFF
| 1 = Error Messages (including PHP errors)
| 2 = Debug Messages
| 3 = Informational Messages
| 4 = All Messages
|
| You can also pass an array with threshold levels to show individual error types
|
| array(2) = Debug Messages, without Error Messages
|
| For a live site you'll usually only enable Errors (1) to be logged otherwise
| your log files will fill up very fast.
|
*/
$config['log_threshold'] = 0;
/*
|--------------------------------------------------------------------------
| Error Logging Directory Path
|--------------------------------------------------------------------------
|
| Leave this BLANK unless you would like to set something other than the default
| application/logs/ directory. Use a full server path with trailing slash.
|
*/
$config['log_path'] = '';
/*
|--------------------------------------------------------------------------
| Log File Extension
|--------------------------------------------------------------------------
|
| The default filename extension for log files. The default 'php' allows for
| protecting the log files via basic scripting, when they are to be stored
| under a publicly accessible directory.
|
| Note: Leaving it blank will default to 'php'.
|
*/
$config['log_file_extension'] = '';
/*
|--------------------------------------------------------------------------
| Log File Permissions
|--------------------------------------------------------------------------
|
| The file system permissions to be applied on newly created log files.
|
| IMPORTANT: This MUST be an integer (no quotes) and you MUST use octal
| integer notation (i.e. 0700, 0644, etc.)
*/
$config['log_file_permissions'] = 0644;
/*
|--------------------------------------------------------------------------
| Date Format for Logs
|--------------------------------------------------------------------------
|
| Each item that is logged has an associated date. You can use PHP date
| codes to set your own date formatting
|
*/
$config['log_date_format'] = 'Y-m-d H:i:s';
/*
|--------------------------------------------------------------------------
| Error Views Directory Path
|--------------------------------------------------------------------------
|
| Leave this BLANK unless you would like to set something other than the default
| application/views/errors/ directory. Use a full server path with trailing slash.
|
*/
$config['error_views_path'] = '';
/*
|--------------------------------------------------------------------------
| Cache Directory Path
|--------------------------------------------------------------------------
|
| Leave this BLANK unless you would like to set something other than the default
| application/cache/ directory. Use a full server path with trailing slash.
|
*/
$config['cache_path'] = '';
/*
|--------------------------------------------------------------------------
| Cache Include Query String
|--------------------------------------------------------------------------
|
| Set this to TRUE if you want to use different cache files depending on the
| URL query string. Please be aware this might result in numerous cache files.
|
*/
$config['cache_query_string'] = FALSE;
/*
|--------------------------------------------------------------------------
| Encryption Key
|--------------------------------------------------------------------------
|
| If you use the Encryption class, you must set an encryption key.
| See the user guide for more info.
|
| http://codeigniter.com/user_guide/libraries/encryption.html
|
*/
$config['encryption_key'] = '';
/*
|--------------------------------------------------------------------------
| Session Variables
|--------------------------------------------------------------------------
|
| 'sess_driver'
|
| The storage driver to use: files, database, redis, memcached
|
| 'sess_cookie_name'
|
| The session cookie name, must contain only [0-9a-z_-] characters
|
| 'sess_expiration'
|
| The number of SECONDS you want the session to last.
| Setting to 0 (zero) means expire when the browser is closed.
|
| 'sess_save_path'
|
| The location to save sessions to, driver dependant.
|
| For the 'files' driver, it's a path to a writable directory.
| WARNING: Only absolute paths are supported!
|
| For the 'database' driver, it's a table name.
| Please read up the manual for the format with other session drivers.
|
| IMPORTANT: You are REQUIRED to set a valid save path!
|
| 'sess_match_ip'
|
| Whether to match the user's IP address when reading the session data.
|
| 'sess_time_to_update'
|
| How many seconds between CI regenerating the session ID.
|
| 'sess_regenerate_destroy'
|
| Whether to destroy session data associated with the old session ID
| when auto-regenerating the session ID. When set to FALSE, the data
| will be later deleted by the garbage collector.
|
| Other session cookie settings are shared with the rest of the application,
| except for 'cookie_prefix' and 'cookie_httponly', which are ignored here.
|
*/
$config['sess_driver'] = 'files';
$config['sess_cookie_name'] = 'ci_session';
$config['sess_expiration'] = 7200;
$config['sess_save_path'] = NULL;
$config['sess_match_ip'] = FALSE;
$config['sess_time_to_update'] = 300;
$config['sess_regenerate_destroy'] = FALSE;
/*
|--------------------------------------------------------------------------
| Cookie Related Variables
|--------------------------------------------------------------------------
|
| 'cookie_prefix' = Set a cookie name prefix if you need to avoid collisions
| 'cookie_domain' = Set to .your-domain.com for site-wide cookies
| 'cookie_path' = Typically will be a forward slash
| 'cookie_secure' = Cookie will only be set if a secure HTTPS connection exists.
| 'cookie_httponly' = Cookie will only be accessible via HTTP(S) (no javascript)
|
| Note: These settings (with the exception of 'cookie_prefix' and
| 'cookie_httponly') will also affect sessions.
|
*/
$config['cookie_prefix'] = '';
$config['cookie_domain'] = '';
$config['cookie_path'] = '/';
$config['cookie_secure'] = FALSE;
$config['cookie_httponly'] = FALSE;
/*
|--------------------------------------------------------------------------
| Standardize newlines
|--------------------------------------------------------------------------
|
| Determines whether to standardize newline characters in input data,
| meaning to replace \r\n, \r, \n occurences with the PHP_EOL value.
|
| This is particularly useful for portability between UNIX-based OSes,
| (usually \n) and Windows (\r\n).
|
*/
$config['standardize_newlines'] = FALSE;
/*
|--------------------------------------------------------------------------
| Global XSS Filtering
|--------------------------------------------------------------------------
|
| Determines whether the XSS filter is always active when GET, POST or
| COOKIE data is encountered
|
| WARNING: This feature is DEPRECATED and currently available only
| for backwards compatibility purposes!
|
*/
$config['global_xss_filtering'] = FALSE;
/*
|--------------------------------------------------------------------------
| Cross Site Request Forgery
|--------------------------------------------------------------------------
| Enables a CSRF cookie token to be set. When set to TRUE, token will be
| checked on a submitted form. If you are accepting user data, it is strongly
| recommended CSRF protection be enabled.
|
| 'csrf_token_name' = The token name
| 'csrf_cookie_name' = The cookie name
| 'csrf_expire' = The number in seconds the token should expire.
| 'csrf_regenerate' = Regenerate token on every submission
| 'csrf_exclude_uris' = Array of URIs which ignore CSRF checks
*/
$config['csrf_protection'] = FALSE;
$config['csrf_token_name'] = 'csrf_test_name';
$config['csrf_cookie_name'] = 'csrf_cookie_name';
$config['csrf_expire'] = 7200;
$config['csrf_regenerate'] = TRUE;
$config['csrf_exclude_uris'] = array();
/*
|--------------------------------------------------------------------------
| Output Compression
|--------------------------------------------------------------------------
|
| Enables Gzip output compression for faster page loads. When enabled,
| the output class will test whether your server supports Gzip.
| Even if it does, however, not all browsers support compression
| so enable only if you are reasonably sure your visitors can handle it.
|
| Only used if zlib.output_compression is turned off in your php.ini.
| Please do not use it together with httpd-level output compression.
|
| VERY IMPORTANT: If you are getting a blank page when compression is enabled it
| means you are prematurely outputting something to your browser. It could
| even be a line of whitespace at the end of one of your scripts. For
| compression to work, nothing can be sent before the output buffer is called
| by the output class. Do not 'echo' any values with compression enabled.
|
*/
$config['compress_output'] = FALSE;
/*
|--------------------------------------------------------------------------
| Master Time Reference
|--------------------------------------------------------------------------
|
| Options are 'local' or any PHP supported timezone. This preference tells
| the system whether to use your server's local time as the master 'now'
| reference, or convert it to the configured one timezone. See the 'date
| helper' page of the user guide for information regarding date handling.
|
*/
$config['time_reference'] = 'local';
/*
|--------------------------------------------------------------------------
| Rewrite PHP Short Tags
|--------------------------------------------------------------------------
|
| If your PHP installation does not have short tag support enabled CI
| can rewrite the tags on-the-fly, enabling you to utilize that syntax
| in your view files. Options are TRUE or FALSE (boolean)
|
*/
$config['rewrite_short_tags'] = FALSE;
/*
|--------------------------------------------------------------------------
| Reverse Proxy IPs
|--------------------------------------------------------------------------
|
| If your server is behind a reverse proxy, you must whitelist the proxy
| IP addresses from which CodeIgniter should trust headers such as
| HTTP_X_FORWARDED_FOR and HTTP_CLIENT_IP in order to properly identify
| the visitor's IP address.
|
| You can use both an array or a comma-separated list of proxy addresses,
| as well as specifying whole subnets. Here are a few examples:
|
| Comma-separated: '10.0.1.200,192.168.5.0/24'
| Array: array('10.0.1.200', '192.168.5.0/24')
*/
$config['proxy_ips'] = '';
|
PHP
|
MIT
|
mrattner/polls/application/config/config.php
|
98f5e2aa-464d-4de7-8b9e-873611cf0b1d
|
[{"tag": "IP_ADDRESS", "value": "192.168.5.0/24", "start": 17486, "end": 17500, "context": " a few examples:\n|\n| Comma-separated:\t'10.0.1.200,192.168.5.0/24'\n| Array:\t\tarray('10.0.1.200', '192.168.5.0/24')\n"}, {"tag": "IP_ADDRESS", "value": "10.0.1.200", "start": 17475, "end": 17485, "context": "s. Here are a few examples:\n|\n| Comma-separated:\t'10.0.1.200,192.168.5.0/24'\n| Array:\t\tarray('10.0.1.200', '19"}, {"tag": "IP_ADDRESS", "value": "10.0.1.200", "start": 17519, "end": 17529, "context": "ted:\t'10.0.1.200,192.168.5.0/24'\n| Array:\t\tarray('10.0.1.200', '192.168.5.0/24')\n*/\n$config['proxy_ips'] = '';"}, {"tag": "IP_ADDRESS", "value": "192.168.5.0/24", "start": 17533, "end": 17547, "context": "00,192.168.5.0/24'\n| Array:\t\tarray('10.0.1.200', '192.168.5.0/24')\n*/\n$config['proxy_ips'] = '';\n"}]
|
[{"tag": "IP_ADDRESS", "value": "192.168.5.0/24", "start": 17486, "end": 17500, "context": " a few examples:\n|\n| Comma-separated:\t'10.0.1.200,192.168.5.0/24'\n| Array:\t\tarray('10.0.1.200', '192.168.5.0/24')\n"}, {"tag": "IP_ADDRESS", "value": "10.0.1.200", "start": 17475, "end": 17485, "context": "s. Here are a few examples:\n|\n| Comma-separated:\t'10.0.1.200,192.168.5.0/24'\n| Array:\t\tarray('10.0.1.200', '19"}, {"tag": "IP_ADDRESS", "value": "10.0.1.200", "start": 17519, "end": 17529, "context": "ted:\t'10.0.1.200,192.168.5.0/24'\n| Array:\t\tarray('10.0.1.200', '192.168.5.0/24')\n*/\n$config['proxy_ips'] = '';"}, {"tag": "IP_ADDRESS", "value": "192.168.5.0/24", "start": 17533, "end": 17547, "context": "00,192.168.5.0/24'\n| Array:\t\tarray('10.0.1.200', '192.168.5.0/24')\n*/\n$config['proxy_ips'] = '';\n"}]
|
# Copyright 2011 Justin Santa Barbara
# Copyright 2012 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import errno
import hashlib
import os
import os.path
import shutil
import tempfile
import jinja2
import mock
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_utils import netutils
import six
from ironic.common import exception
from ironic.common import utils
from ironic.tests import base
CONF = cfg.CONF
class BareMetalUtilsTestCase(base.TestCase):
def test_create_link(self):
with mock.patch.object(os, "symlink", autospec=True) as symlink_mock:
symlink_mock.return_value = None
utils.create_link_without_raise("/fake/source", "/fake/link")
symlink_mock.assert_called_once_with("/fake/source", "/fake/link")
def test_create_link_EEXIST(self):
with mock.patch.object(os, "symlink", autospec=True) as symlink_mock:
symlink_mock.side_effect = OSError(errno.EEXIST)
utils.create_link_without_raise("/fake/source", "/fake/link")
symlink_mock.assert_called_once_with("/fake/source", "/fake/link")
class ExecuteTestCase(base.TestCase):
@mock.patch.object(processutils, 'execute', autospec=True)
@mock.patch.object(os.environ, 'copy', return_value={}, autospec=True)
def test_execute_use_standard_locale_no_env_variables(self, env_mock,
execute_mock):
utils.execute('foo', use_standard_locale=True)
execute_mock.assert_called_once_with('foo',
env_variables={'LC_ALL': 'C'})
@mock.patch.object(processutils, 'execute', autospec=True)
def test_execute_use_standard_locale_with_env_variables(self,
execute_mock):
utils.execute('foo', use_standard_locale=True,
env_variables={'foo': 'bar'})
execute_mock.assert_called_once_with('foo',
env_variables={'LC_ALL': 'C',
'foo': 'bar'})
@mock.patch.object(processutils, 'execute', autospec=True)
def test_execute_not_use_standard_locale(self, execute_mock):
utils.execute('foo', use_standard_locale=False,
env_variables={'foo': 'bar'})
execute_mock.assert_called_once_with('foo',
env_variables={'foo': 'bar'})
def test_execute_get_root_helper(self):
with mock.patch.object(
processutils, 'execute', autospec=True) as execute_mock:
helper = utils._get_root_helper()
utils.execute('foo', run_as_root=True)
execute_mock.assert_called_once_with('foo', run_as_root=True,
root_helper=helper)
def test_execute_without_root_helper(self):
with mock.patch.object(
processutils, 'execute', autospec=True) as execute_mock:
utils.execute('foo', run_as_root=False)
execute_mock.assert_called_once_with('foo', run_as_root=False)
class GenericUtilsTestCase(base.TestCase):
@mock.patch.object(utils, 'hashlib', autospec=True)
def test__get_hash_object(self, hashlib_mock):
algorithms_available = ('md5', 'sha1', 'sha224',
'sha256', 'sha384', 'sha512')
hashlib_mock.algorithms_guaranteed = algorithms_available
hashlib_mock.algorithms = algorithms_available
# | WHEN |
utils._get_hash_object('md5')
utils._get_hash_object('sha1')
utils._get_hash_object('sha224')
utils._get_hash_object('sha256')
utils._get_hash_object('sha384')
utils._get_hash_object('sha512')
# | THEN |
calls = [mock.call.md5(), mock.call.sha1(), mock.call.sha224(),
mock.call.sha256(), mock.call.sha384(), mock.call.sha512()]
hashlib_mock.assert_has_calls(calls)
def test__get_hash_object_throws_for_invalid_or_unsupported_hash_name(
self):
# | WHEN | & | THEN |
self.assertRaises(exception.InvalidParameterValue,
utils._get_hash_object,
'hickory-dickory-dock')
def test_hash_file_for_md5(self):
# | GIVEN |
data = b'Mary had a little lamb, its fleece as white as snow'
file_like_object = six.BytesIO(data)
expected = hashlib.md5(data).hexdigest()
# | WHEN |
actual = utils.hash_file(file_like_object) # using default, 'md5'
# | THEN |
self.assertEqual(expected, actual)
def test_hash_file_for_md5_not_binary(self):
# | GIVEN |
data = u'Mary had a little lamb, its fleece as white as sno\u0449'
file_like_object = six.StringIO(data)
expected = hashlib.md5(data.encode('utf-8')).hexdigest()
# | WHEN |
actual = utils.hash_file(file_like_object) # using default, 'md5'
# | THEN |
self.assertEqual(expected, actual)
def test_hash_file_for_sha1(self):
# | GIVEN |
data = b'Mary had a little lamb, its fleece as white as snow'
file_like_object = six.BytesIO(data)
expected = hashlib.sha1(data).hexdigest()
# | WHEN |
actual = utils.hash_file(file_like_object, 'sha1')
# | THEN |
self.assertEqual(expected, actual)
def test_hash_file_for_sha512(self):
# | GIVEN |
data = b'Mary had a little lamb, its fleece as white as snow'
file_like_object = six.BytesIO(data)
expected = hashlib.sha512(data).hexdigest()
# | WHEN |
actual = utils.hash_file(file_like_object, 'sha512')
# | THEN |
self.assertEqual(expected, actual)
def test_hash_file_throws_for_invalid_or_unsupported_hash(self):
# | GIVEN |
data = b'Mary had a little lamb, its fleece as white as snow'
file_like_object = six.BytesIO(data)
# | WHEN | & | THEN |
self.assertRaises(exception.InvalidParameterValue, utils.hash_file,
file_like_object, 'hickory-dickory-dock')
def test_file_has_content_equal(self):
data = b'Mary had a little lamb, its fleece as white as snow'
ref = data
with mock.patch('ironic.common.utils.open',
mock.mock_open(read_data=data)) as mopen:
self.assertTrue(utils.file_has_content('foo', ref))
mopen.assert_called_once_with('foo', 'rb')
def test_file_has_content_equal_not_binary(self):
data = u'Mary had a little lamb, its fleece as white as sno\u0449'
ref = data
with mock.patch('ironic.common.utils.open',
mock.mock_open(read_data=data)) as mopen:
self.assertTrue(utils.file_has_content('foo', ref))
mopen.assert_called_once_with('foo', 'rb')
def test_file_has_content_differ(self):
data = b'Mary had a little lamb, its fleece as white as snow'
ref = data + b'!'
with mock.patch('ironic.common.utils.open',
mock.mock_open(read_data=data)) as mopen:
self.assertFalse(utils.file_has_content('foo', ref))
mopen.assert_called_once_with('foo', 'rb')
def test_is_valid_datapath_id(self):
self.assertTrue(utils.is_valid_datapath_id("525400cf2d319fdf"))
self.assertTrue(utils.is_valid_datapath_id("525400CF2D319FDF"))
self.assertFalse(utils.is_valid_datapath_id("52"))
self.assertFalse(utils.is_valid_datapath_id("52:54:00:cf:2d:31"))
self.assertFalse(utils.is_valid_datapath_id("notadatapathid00"))
self.assertFalse(utils.is_valid_datapath_id("5525400CF2D319FDF"))
def test_is_hostname_safe(self):
self.assertTrue(utils.is_hostname_safe('spam'))
self.assertFalse(utils.is_hostname_safe('spAm'))
self.assertFalse(utils.is_hostname_safe('SPAM'))
self.assertFalse(utils.is_hostname_safe('-spam'))
self.assertFalse(utils.is_hostname_safe('spam-'))
self.assertTrue(utils.is_hostname_safe('spam-eggs'))
self.assertFalse(utils.is_hostname_safe('spam_eggs'))
self.assertFalse(utils.is_hostname_safe('spam eggs'))
self.assertTrue(utils.is_hostname_safe('spam.eggs'))
self.assertTrue(utils.is_hostname_safe('9spam'))
self.assertTrue(utils.is_hostname_safe('spam7'))
self.assertTrue(utils.is_hostname_safe('br34kf4st'))
self.assertFalse(utils.is_hostname_safe('$pam'))
self.assertFalse(utils.is_hostname_safe('egg$'))
self.assertFalse(utils.is_hostname_safe('spam#eggs'))
self.assertFalse(utils.is_hostname_safe(' eggs'))
self.assertFalse(utils.is_hostname_safe('spam '))
self.assertTrue(utils.is_hostname_safe('s'))
self.assertTrue(utils.is_hostname_safe('s' * 63))
self.assertFalse(utils.is_hostname_safe('s' * 64))
self.assertFalse(utils.is_hostname_safe(''))
self.assertFalse(utils.is_hostname_safe(None))
# Need to ensure a binary response for success or fail
self.assertIsNotNone(utils.is_hostname_safe('spam'))
self.assertIsNotNone(utils.is_hostname_safe('-spam'))
self.assertTrue(utils.is_hostname_safe('www.rackspace.com'))
self.assertTrue(utils.is_hostname_safe('www.rackspace.com.'))
self.assertTrue(utils.is_hostname_safe('http._sctp.www.example.com'))
self.assertTrue(utils.is_hostname_safe('mail.pets_r_us.net'))
self.assertTrue(utils.is_hostname_safe('mail-server-15.my_host.org'))
self.assertFalse(utils.is_hostname_safe('www.nothere.com_'))
self.assertFalse(utils.is_hostname_safe('www.nothere_.com'))
self.assertFalse(utils.is_hostname_safe('www..nothere.com'))
long_str = 'a' * 63 + '.' + 'b' * 63 + '.' + 'c' * 63 + '.' + 'd' * 63
self.assertTrue(utils.is_hostname_safe(long_str))
self.assertFalse(utils.is_hostname_safe(long_str + '.'))
self.assertFalse(utils.is_hostname_safe('a' * 255))
def test_is_valid_logical_name(self):
valid = (
'spam', 'spAm', 'SPAM', 'spam-eggs', 'spam.eggs', 'spam_eggs',
'spam~eggs', '9spam', 'spam7', '~spam', '.spam', '.~-_', '~',
'br34kf4st', 's', 's' * 63, 's' * 255)
invalid = (
' ', 'spam eggs', '$pam', 'egg$', 'spam#eggs',
' eggs', 'spam ', '', None, 'spam%20')
for hostname in valid:
result = utils.is_valid_logical_name(hostname)
# Need to ensure a binary response for success. assertTrue
# is too generous, and would pass this test if, for
# instance, a regex Match object were returned.
self.assertIs(result, True,
"%s is unexpectedly invalid" % hostname)
for hostname in invalid:
result = utils.is_valid_logical_name(hostname)
# Need to ensure a binary response for
# success. assertFalse is too generous and would pass this
# test if None were returned.
self.assertIs(result, False,
"%s is unexpectedly valid" % hostname)
def test_validate_and_normalize_mac(self):
mac = 'AA:BB:CC:DD:EE:FF'
with mock.patch.object(netutils, 'is_valid_mac',
autospec=True) as m_mock:
m_mock.return_value = True
self.assertEqual(mac.lower(),
utils.validate_and_normalize_mac(mac))
def test_validate_and_normalize_datapath_id(self):
datapath_id = 'AA:BB:CC:DD:EE:FF'
with mock.patch.object(utils, 'is_valid_datapath_id',
autospec=True) as m_mock:
m_mock.return_value = True
self.assertEqual(datapath_id.lower(),
utils.validate_and_normalize_datapath_id(
datapath_id))
def test_validate_and_normalize_mac_invalid_format(self):
with mock.patch.object(netutils, 'is_valid_mac',
autospec=True) as m_mock:
m_mock.return_value = False
self.assertRaises(exception.InvalidMAC,
utils.validate_and_normalize_mac, 'invalid-mac')
def test_safe_rstrip(self):
value = '/test/'
rstripped_value = '/test'
not_rstripped = '/'
self.assertEqual(rstripped_value, utils.safe_rstrip(value, '/'))
self.assertEqual(not_rstripped, utils.safe_rstrip(not_rstripped, '/'))
def test_safe_rstrip_not_raises_exceptions(self):
# Supplying an integer should normally raise an exception because it
# does not save the rstrip() method.
value = 10
# In the case of raising an exception safe_rstrip() should return the
# original value.
self.assertEqual(value, utils.safe_rstrip(value))
@mock.patch.object(os.path, 'getmtime', return_value=1439465889.4964755,
autospec=True)
def test_unix_file_modification_datetime(self, mtime_mock):
expected = datetime.datetime(2015, 8, 13, 11, 38, 9, 496475)
self.assertEqual(expected,
utils.unix_file_modification_datetime('foo'))
mtime_mock.assert_called_once_with('foo')
def test_is_valid_no_proxy(self):
# Valid values for 'no_proxy'
valid_no_proxy = [
('a' * 63 + '.' + '0' * 63 + '.c.' + 'd' * 61 + '.' + 'e' * 61),
('A' * 63 + '.' + '0' * 63 + '.C.' + 'D' * 61 + '.' + 'E' * 61),
('.' + 'a' * 62 + '.' + '0' * 62 + '.c.' + 'd' * 61 + '.'
+ 'e' * 61),
',,example.com:3128,',
'192.168.1.1', # IP should be valid
]
# Test each one individually, so if failure easier to determine which
# one failed.
for no_proxy in valid_no_proxy:
self.assertTrue(
utils.is_valid_no_proxy(no_proxy),
msg="'no_proxy' value should be valid: {}".format(no_proxy))
# Test valid when joined together
self.assertTrue(utils.is_valid_no_proxy(','.join(valid_no_proxy)))
# Test valid when joined together with whitespace
self.assertTrue(utils.is_valid_no_proxy(' , '.join(valid_no_proxy)))
# empty string should also be valid
self.assertTrue(utils.is_valid_no_proxy(''))
# Invalid values for 'no_proxy'
invalid_no_proxy = [
('A' * 64 + '.' + '0' * 63 + '.C.' + 'D' * 61 + '.'
+ 'E' * 61), # too long (> 253)
('a' * 100),
'a..com',
('.' + 'a' * 63 + '.' + '0' * 62 + '.c.' + 'd' * 61 + '.'
+ 'e' * 61), # too long (> 251 after deleting .)
('*.' + 'a' * 60 + '.' + '0' * 60 + '.c.' + 'd' * 61 + '.'
+ 'e' * 61), # starts with *.
'c.-a.com',
'c.a-.com',
]
for no_proxy in invalid_no_proxy:
self.assertFalse(
utils.is_valid_no_proxy(no_proxy),
msg="'no_proxy' value should be invalid: {}".format(no_proxy))
@mock.patch.object(utils, 'LOG', autospec=True)
def test_warn_about_deprecated_extra_vif_port_id(self, mock_log):
# Set variable to default value
utils.warn_deprecated_extra_vif_port_id = False
utils.warn_about_deprecated_extra_vif_port_id()
utils.warn_about_deprecated_extra_vif_port_id()
self.assertEqual(1, mock_log.warning.call_count)
self.assertIn("extra['vif_port_id'] is deprecated and will not",
mock_log.warning.call_args[0][0])
class TempFilesTestCase(base.TestCase):
def test_tempdir(self):
dirname = None
with utils.tempdir() as tempdir:
self.assertTrue(os.path.isdir(tempdir))
dirname = tempdir
self.assertFalse(os.path.exists(dirname))
@mock.patch.object(shutil, 'rmtree', autospec=True)
@mock.patch.object(tempfile, 'mkdtemp', autospec=True)
def test_tempdir_mocked(self, mkdtemp_mock, rmtree_mock):
self.config(tempdir='abc')
mkdtemp_mock.return_value = 'temp-dir'
kwargs = {'dir': 'b'}
with utils.tempdir(**kwargs) as tempdir:
self.assertEqual('temp-dir', tempdir)
tempdir_created = tempdir
mkdtemp_mock.assert_called_once_with(**kwargs)
rmtree_mock.assert_called_once_with(tempdir_created)
@mock.patch.object(utils, 'LOG', autospec=True)
@mock.patch.object(shutil, 'rmtree', autospec=True)
@mock.patch.object(tempfile, 'mkdtemp', autospec=True)
def test_tempdir_mocked_error_on_rmtree(self, mkdtemp_mock, rmtree_mock,
log_mock):
self.config(tempdir='abc')
mkdtemp_mock.return_value = 'temp-dir'
rmtree_mock.side_effect = OSError
with utils.tempdir() as tempdir:
self.assertEqual('temp-dir', tempdir)
tempdir_created = tempdir
rmtree_mock.assert_called_once_with(tempdir_created)
self.assertTrue(log_mock.error.called)
@mock.patch.object(os.path, 'exists', autospec=True)
@mock.patch.object(utils, '_check_dir_writable', autospec=True)
@mock.patch.object(utils, '_check_dir_free_space', autospec=True)
def test_check_dir_with_pass_in(self, mock_free_space, mock_dir_writable,
mock_exists):
mock_exists.return_value = True
# test passing in a directory and size
utils.check_dir(directory_to_check='/fake/path', required_space=5)
mock_exists.assert_called_once_with('/fake/path')
mock_dir_writable.assert_called_once_with('/fake/path')
mock_free_space.assert_called_once_with('/fake/path', 5)
@mock.patch.object(utils, '_check_dir_writable', autospec=True)
@mock.patch.object(utils, '_check_dir_free_space', autospec=True)
def test_check_dir_no_dir(self, mock_free_space, mock_dir_writable):
self.config(tempdir='/fake/path')
# NOTE(dtantsur): self.config uses os.path.exists, so we cannot mock
# on the method level.
with mock.patch.object(os.path, 'exists',
autospec=True) as mock_exists:
mock_exists.return_value = False
self.assertRaises(exception.PathNotFound, utils.check_dir)
mock_exists.assert_called_once_with(CONF.tempdir)
self.assertFalse(mock_free_space.called)
self.assertFalse(mock_dir_writable.called)
@mock.patch.object(utils, '_check_dir_writable', autospec=True)
@mock.patch.object(utils, '_check_dir_free_space', autospec=True)
def test_check_dir_ok(self, mock_free_space, mock_dir_writable):
self.config(tempdir='/fake/path')
# NOTE(dtantsur): self.config uses os.path.exists, so we cannot mock
# on the method level.
with mock.patch.object(os.path, 'exists',
autospec=True) as mock_exists:
mock_exists.return_value = True
utils.check_dir()
mock_exists.assert_called_once_with(CONF.tempdir)
mock_dir_writable.assert_called_once_with(CONF.tempdir)
mock_free_space.assert_called_once_with(CONF.tempdir, 1)
@mock.patch.object(os, 'access', autospec=True)
def test__check_dir_writable_ok(self, mock_access):
mock_access.return_value = True
self.assertIsNone(utils._check_dir_writable("/fake/path"))
mock_access.assert_called_once_with("/fake/path", os.W_OK)
@mock.patch.object(os, 'access', autospec=True)
def test__check_dir_writable_not_writable(self, mock_access):
mock_access.return_value = False
self.assertRaises(exception.DirectoryNotWritable,
utils._check_dir_writable, "/fake/path")
mock_access.assert_called_once_with("/fake/path", os.W_OK)
@mock.patch.object(os, 'statvfs', autospec=True)
def test__check_dir_free_space_ok(self, mock_stat):
statvfs_mock_return = mock.MagicMock()
statvfs_mock_return.f_bsize = 5
statvfs_mock_return.f_frsize = 0
statvfs_mock_return.f_blocks = 0
statvfs_mock_return.f_bfree = 0
statvfs_mock_return.f_bavail = 1024 * 1024
statvfs_mock_return.f_files = 0
statvfs_mock_return.f_ffree = 0
statvfs_mock_return.f_favail = 0
statvfs_mock_return.f_flag = 0
statvfs_mock_return.f_namemax = 0
mock_stat.return_value = statvfs_mock_return
utils._check_dir_free_space("/fake/path")
mock_stat.assert_called_once_with("/fake/path")
@mock.patch.object(os, 'statvfs', autospec=True)
def test_check_dir_free_space_raises(self, mock_stat):
statvfs_mock_return = mock.MagicMock()
statvfs_mock_return.f_bsize = 1
statvfs_mock_return.f_frsize = 0
statvfs_mock_return.f_blocks = 0
statvfs_mock_return.f_bfree = 0
statvfs_mock_return.f_bavail = 1024
statvfs_mock_return.f_files = 0
statvfs_mock_return.f_ffree = 0
statvfs_mock_return.f_favail = 0
statvfs_mock_return.f_flag = 0
statvfs_mock_return.f_namemax = 0
mock_stat.return_value = statvfs_mock_return
self.assertRaises(exception.InsufficientDiskSpace,
utils._check_dir_free_space, "/fake/path")
mock_stat.assert_called_once_with("/fake/path")
class GetUpdatedCapabilitiesTestCase(base.TestCase):
def test_get_updated_capabilities(self):
capabilities = {'ilo_firmware_version': 'xyz'}
cap_string = 'ilo_firmware_version:xyz'
cap_returned = utils.get_updated_capabilities(None, capabilities)
self.assertEqual(cap_string, cap_returned)
self.assertIsInstance(cap_returned, str)
def test_get_updated_capabilities_multiple_keys(self):
capabilities = {'ilo_firmware_version': 'xyz',
'foo': 'bar', 'somekey': 'value'}
cap_string = 'ilo_firmware_version:xyz,foo:bar,somekey:value'
cap_returned = utils.get_updated_capabilities(None, capabilities)
set1 = set(cap_string.split(','))
set2 = set(cap_returned.split(','))
self.assertEqual(set1, set2)
self.assertIsInstance(cap_returned, str)
def test_get_updated_capabilities_invalid_capabilities(self):
capabilities = 'ilo_firmware_version'
self.assertRaises(ValueError,
utils.get_updated_capabilities,
capabilities, {})
def test_get_updated_capabilities_capabilities_not_dict(self):
capabilities = ['ilo_firmware_version:xyz', 'foo:bar']
self.assertRaises(ValueError,
utils.get_updated_capabilities,
None, capabilities)
def test_get_updated_capabilities_add_to_existing_capabilities(self):
new_capabilities = {'BootMode': 'uefi'}
expected_capabilities = 'BootMode:uefi,foo:bar'
cap_returned = utils.get_updated_capabilities('foo:bar',
new_capabilities)
set1 = set(expected_capabilities.split(','))
set2 = set(cap_returned.split(','))
self.assertEqual(set1, set2)
self.assertIsInstance(cap_returned, str)
def test_get_updated_capabilities_replace_to_existing_capabilities(self):
new_capabilities = {'BootMode': 'bios'}
expected_capabilities = 'BootMode:bios'
cap_returned = utils.get_updated_capabilities('BootMode:uefi',
new_capabilities)
set1 = set(expected_capabilities.split(','))
set2 = set(cap_returned.split(','))
self.assertEqual(set1, set2)
self.assertIsInstance(cap_returned, str)
def test_validate_network_port(self):
port = utils.validate_network_port('0', 'message')
self.assertEqual(0, port)
port = utils.validate_network_port('65535')
self.assertEqual(65535, port)
def test_validate_network_port_fail(self):
self.assertRaisesRegex(exception.InvalidParameterValue,
'Port "65536" is not a valid port.',
utils.validate_network_port,
'65536')
self.assertRaisesRegex(exception.InvalidParameterValue,
'fake_port "-1" is not a valid port.',
utils.validate_network_port,
'-1',
'fake_port')
self.assertRaisesRegex(exception.InvalidParameterValue,
'Port "invalid" is not a valid port.',
utils.validate_network_port,
'invalid')
class JinjaTemplatingTestCase(base.TestCase):
def setUp(self):
super(JinjaTemplatingTestCase, self).setUp()
self.template = '{{ foo }} {{ bar }}'
self.params = {'foo': 'spam', 'bar': 'ham'}
self.expected = 'spam ham'
def test_render_string(self):
self.assertEqual(self.expected,
utils.render_template(self.template,
self.params,
is_file=False))
@mock.patch('ironic.common.utils.jinja2.FileSystemLoader', autospec=True)
def test_render_file(self, jinja_fsl_mock):
path = '/path/to/template.j2'
jinja_fsl_mock.return_value = jinja2.DictLoader(
{'template.j2': self.template})
self.assertEqual(self.expected,
utils.render_template(path,
self.params))
jinja_fsl_mock.assert_called_once_with('/path/to')
|
Python
|
Apache-2.0
|
jovial/ironic/ironic/tests/unit/common/test_utils.py
|
9129fb43-3141-4704-b76d-af81a4044132
|
[{"tag": "NAME", "value": "Mary", "start": 7647, "end": 7651, "context": "t_file_has_content_differ(self):\n data = b'Mary had a little lamb, its fleece as white as snow'\n "}, {"tag": "NAME", "value": "Mary", "start": 5791, "end": 5795, "context": "_sha1(self):\n # | GIVEN |\n data = b'Mary had a little lamb, its fleece as white as snow'\n "}, {"tag": "NAME", "value": "Mary", "start": 7271, "end": 7275, "context": "_content_equal_not_binary(self):\n data = u'Mary had a little lamb, its fleece as white as sno\\u04"}, {"tag": "NAME", "value": "Mary", "start": 6890, "end": 6894, "context": "st_file_has_content_equal(self):\n data = b'Mary had a little lamb, its fleece as white as snow'\n "}, {"tag": "NAME", "value": "Mary", "start": 6557, "end": 6561, "context": "_hash(self):\n # | GIVEN |\n data = b'Mary had a little lamb, its fleece as white as snow'\n "}, {"tag": "NAME", "value": "Mary", "start": 4999, "end": 5003, "context": "r_md5(self):\n # | GIVEN |\n data = b'Mary had a little lamb, its fleece as white as snow'\n "}, {"tag": "NAME", "value": "Justin Santa Barbara", "start": 17, "end": 37, "context": "# Copyright 2011 Justin Santa Barbara\n# Copyright 2012 Hewlett-Packard Development Comp"}, {"tag": "NAME", "value": "Mary", "start": 5389, "end": 5393, "context": "inary(self):\n # | GIVEN |\n data = u'Mary had a little lamb, its fleece as white as sno\\u04"}, {"tag": "IP_ADDRESS", "value": "192.168.1.1", "start": 14469, "end": 14480, "context": ",\n ',,example.com:3128,',\n '192.168.1.1', # IP should be valid\n ]\n # Test "}, {"tag": "NAME", "value": "Mary", "start": 6158, "end": 6162, "context": "ha512(self):\n # | GIVEN |\n data = b'Mary had a little lamb, its fleece as white as snow'\n "}]
|
[{"tag": "NAME", "value": "Mary", "start": 7647, "end": 7651, "context": "t_file_has_content_differ(self):\n data = b'Mary had a little lamb, its fleece as white as snow'\n "}, {"tag": "NAME", "value": "Mary", "start": 5791, "end": 5795, "context": "_sha1(self):\n # | GIVEN |\n data = b'Mary had a little lamb, its fleece as white as snow'\n "}, {"tag": "NAME", "value": "Mary", "start": 7271, "end": 7275, "context": "_content_equal_not_binary(self):\n data = u'Mary had a little lamb, its fleece as white as sno\\u04"}, {"tag": "NAME", "value": "Mary", "start": 6890, "end": 6894, "context": "st_file_has_content_equal(self):\n data = b'Mary had a little lamb, its fleece as white as snow'\n "}, {"tag": "NAME", "value": "Mary", "start": 6557, "end": 6561, "context": "_hash(self):\n # | GIVEN |\n data = b'Mary had a little lamb, its fleece as white as snow'\n "}, {"tag": "NAME", "value": "Mary", "start": 4999, "end": 5003, "context": "r_md5(self):\n # | GIVEN |\n data = b'Mary had a little lamb, its fleece as white as snow'\n "}, {"tag": "NAME", "value": "Justin Santa Barbara", "start": 17, "end": 37, "context": "# Copyright 2011 Justin Santa Barbara\n# Copyright 2012 Hewlett-Packard Development Comp"}, {"tag": "NAME", "value": "Mary", "start": 5389, "end": 5393, "context": "inary(self):\n # | GIVEN |\n data = u'Mary had a little lamb, its fleece as white as sno\\u04"}, {"tag": "IP_ADDRESS", "value": "192.168.1.1", "start": 14469, "end": 14480, "context": ",\n ',,example.com:3128,',\n '192.168.1.1', # IP should be valid\n ]\n # Test "}, {"tag": "NAME", "value": "Mary", "start": 6158, "end": 6162, "context": "ha512(self):\n # | GIVEN |\n data = b'Mary had a little lamb, its fleece as white as snow'\n "}]
|
import os
import pandas as pd
COMPETITION_NAME = "tabular-playground-series-sep-2021"
SUBMISSION_DIR = "."
SUBMISSION_FILE = "sub_blending_1_my_rank_004-2o-lightgbm-colsample_81830_my_ranking_81790_0926_1918.csv"
SUBMISSION_MESSAGE = '"004-2o-lightgbm-colsample-tps-sep-2021 + stacking_lgb_xbg_cat_imputer_no_imputer"'
df = pd.read_csv(f"{SUBMISSION_DIR}/{SUBMISSION_FILE}")
print(df.head())
submission_string = f"kaggle competitions submit {COMPETITION_NAME} -f {SUBMISSION_DIR}/{SUBMISSION_FILE} -m {SUBMISSION_MESSAGE}"
print(submission_string)
os.system(submission_string)
|
Python
|
Apache-2.0
|
arnabbiswas1/k_tab_sept_roc_auc_binary_classification_KFold/submissions/submissions_22.py
|
e8401a02-8066-49c4-882d-2bd9bcdf4da4
|
[]
|
[]
|
/******************************************************************************
*
* Project: Contour Generator
* Purpose: Contour Generator mainline.
* Author: Frank Warmerdam <warmerdam@pobox.com>
*
******************************************************************************
* Copyright (c) 2003, Applied Coherent Technology (www.actgate.com).
* Copyright (c) 2008-2013, Even Rouault <even dot rouault at mines-paris dot org>
* Copyright (c) 2018, Oslandia <infos at oslandia dot com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
****************************************************************************/
#include "cpl_conv.h"
#include "cpl_string.h"
#include "gdal_version.h"
#include "gdal.h"
#include "gdal_alg.h"
#include "ogr_api.h"
#include "ogr_srs_api.h"
#include "commonutils.h"
CPL_CVSID("$Id: gdal_contour.cpp 3895dc399ec85aa97a1a88949c6acdfac807d745 2019-04-30 13:00:55 +0200 Even Rouault $")
/************************************************************************/
/* ArgIsNumeric() */
/************************************************************************/
static bool ArgIsNumeric( const char *pszArg )
{
return CPLGetValueType(pszArg) != CPL_VALUE_STRING;
}
/************************************************************************/
/* Usage() */
/************************************************************************/
static void Usage(const char* pszErrorMsg = nullptr)
{
printf(
"Usage: gdal_contour [-b <band>] [-a <attribute_name>] [-amin <attribute_name>] [-amax <attribute_name>]\n"
" [-3d] [-inodata] [-snodata n] [-f <formatname>] [-i <interval>]\n"
" [[-dsco NAME=VALUE] ...] [[-lco NAME=VALUE] ...]\n"
" [-off <offset>] [-fl <level> <level>...] [-e <exp_base>]\n"
" [-nln <outlayername>] [-q] [-p]\n"
" <src_filename> <dst_filename>\n" );
if( pszErrorMsg != nullptr )
fprintf(stderr, "\nFAILURE: %s\n", pszErrorMsg);
exit( 1 );
}
static void CreateElevAttrib(const char* pszElevAttrib, OGRLayerH hLayer)
{
OGRFieldDefnH hFld = OGR_Fld_Create( pszElevAttrib, OFTReal );
OGRErr eErr = OGR_L_CreateField( hLayer, hFld, FALSE );
OGR_Fld_Destroy( hFld );
if( eErr == OGRERR_FAILURE )
{
exit( 1 );
}
}
/************************************************************************/
/* main() */
/************************************************************************/
#define CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(nExtraArg) \
do { if (i + nExtraArg >= argc) \
Usage(CPLSPrintf("%s option requires %d argument(s)", \
argv[i], nExtraArg)); } while( false )
MAIN_START(argc, argv)
{
bool b3D = false;
int bNoDataSet = FALSE;
bool bIgnoreNoData = false;
int nBandIn = 1;
double dfInterval = 0.0;
double dfNoData = 0.0;
double dfOffset = 0.0;
double dfExpBase = 0.0;
const char *pszSrcFilename = nullptr;
const char *pszDstFilename = nullptr;
const char *pszElevAttrib = nullptr;
const char *pszElevAttribMin = nullptr;
const char *pszElevAttribMax = nullptr;
const char *pszFormat = nullptr;
char **papszDSCO = nullptr;
char **papszLCO = nullptr;
double adfFixedLevels[1000];
int nFixedLevelCount = 0;
const char *pszNewLayerName = "contour";
bool bQuiet = false;
GDALProgressFunc pfnProgress = nullptr;
bool bPolygonize = false;
// Check that we are running against at least GDAL 1.4.
// Note to developers: if we use newer API, please change the requirement.
if (atoi(GDALVersionInfo("VERSION_NUM")) < 1400)
{
fprintf(stderr,
"At least, GDAL >= 1.4.0 is required for this version of %s, "
"which was compiled against GDAL %s\n",
argv[0], GDAL_RELEASE_NAME);
exit(1);
}
GDALAllRegister();
OGRRegisterAll();
argc = GDALGeneralCmdLineProcessor( argc, &argv, 0 );
/* -------------------------------------------------------------------- */
/* Parse arguments. */
/* -------------------------------------------------------------------- */
for( int i = 1; i < argc; i++ )
{
if( EQUAL(argv[i], "--utility_version") )
{
printf("%s was compiled against GDAL %s and "
"is running against GDAL %s\n",
argv[0], GDAL_RELEASE_NAME, GDALVersionInfo("RELEASE_NAME"));
CSLDestroy( argv );
return 0;
}
else if( EQUAL(argv[i], "--help") )
Usage();
else if( EQUAL(argv[i],"-a") )
{
CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1);
pszElevAttrib = argv[++i];
}
else if( EQUAL(argv[i],"-amin") )
{
CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1);
pszElevAttribMin = argv[++i];
}
else if( EQUAL(argv[i],"-amax") )
{
CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1);
pszElevAttribMax = argv[++i];
}
else if( EQUAL(argv[i],"-off") )
{
CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1);
dfOffset = CPLAtof(argv[++i]);
}
else if( EQUAL(argv[i],"-i") )
{
CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1);
dfInterval = CPLAtof(argv[++i]);
}
else if( EQUAL(argv[i],"-e") )
{
CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1);
dfExpBase = CPLAtof(argv[++i]);
}
else if( EQUAL(argv[i],"-p") )
{
bPolygonize = true;
}
else if( EQUAL(argv[i],"-fl") )
{
if( i >= argc-1 )
Usage(CPLSPrintf("%s option requires at least 1 argument",
argv[i]));
while( i < argc-1
&& nFixedLevelCount
< static_cast<int>(sizeof(adfFixedLevels)/sizeof(double))
&& ArgIsNumeric(argv[i+1]) )
adfFixedLevels[nFixedLevelCount++] = CPLAtof(argv[++i]);
}
else if( EQUAL(argv[i],"-b") )
{
CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1);
nBandIn = atoi(argv[++i]);
}
else if( EQUAL(argv[i],"-f") || EQUAL(argv[i],"-of") )
{
CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1);
pszFormat = argv[++i];
}
else if( EQUAL(argv[i],"-dsco") )
{
CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1);
papszDSCO = CSLAddString(papszDSCO, argv[++i] );
}
else if( EQUAL(argv[i],"-lco") )
{
CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1);
papszLCO = CSLAddString(papszLCO, argv[++i] );
}
else if( EQUAL(argv[i],"-3d") )
{
b3D = true;
}
else if( EQUAL(argv[i],"-snodata") )
{
CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1);
bNoDataSet = TRUE;
dfNoData = CPLAtof(argv[++i]);
}
else if( EQUAL(argv[i],"-nln") )
{
CHECK_HAS_ENOUGH_ADDITIONAL_ARGS(1);
pszNewLayerName = argv[++i];
}
else if( EQUAL(argv[i],"-inodata") )
{
bIgnoreNoData = true;
}
else if ( EQUAL(argv[i],"-q") || EQUAL(argv[i],"-quiet") )
{
bQuiet = TRUE;
}
else if( pszSrcFilename == nullptr )
{
pszSrcFilename = argv[i];
}
else if( pszDstFilename == nullptr )
{
pszDstFilename = argv[i];
}
else
Usage("Too many command options.");
}
if( dfInterval == 0.0 && nFixedLevelCount == 0 && dfExpBase == 0.0 )
{
Usage("Neither -i nor -fl nor -e are specified.");
}
if (pszSrcFilename == nullptr)
{
Usage("Missing source filename.");
}
if (pszDstFilename == nullptr)
{
Usage("Missing destination filename.");
}
if (!bQuiet)
pfnProgress = GDALTermProgress;
/* -------------------------------------------------------------------- */
/* Open source raster file. */
/* -------------------------------------------------------------------- */
GDALDatasetH hSrcDS = GDALOpen(pszSrcFilename, GA_ReadOnly);
if( hSrcDS == nullptr )
exit( 2 );
GDALRasterBandH hBand = GDALGetRasterBand( hSrcDS, nBandIn );
if( hBand == nullptr )
{
CPLError( CE_Failure, CPLE_AppDefined,
"Band %d does not exist on dataset.",
nBandIn );
exit(2);
}
if( !bNoDataSet && !bIgnoreNoData )
dfNoData = GDALGetRasterNoDataValue( hBand, &bNoDataSet );
/* -------------------------------------------------------------------- */
/* Try to get a coordinate system from the raster. */
/* -------------------------------------------------------------------- */
OGRSpatialReferenceH hSRS = nullptr;
const char *pszWKT = GDALGetProjectionRef( hSrcDS );
if( pszWKT != nullptr && strlen(pszWKT) != 0 )
hSRS = OSRNewSpatialReference( pszWKT );
/* -------------------------------------------------------------------- */
/* Create the output file. */
/* -------------------------------------------------------------------- */
CPLString osFormat;
if( pszFormat == nullptr )
{
std::vector<CPLString> aoDrivers =
GetOutputDriversFor(pszDstFilename, GDAL_OF_VECTOR);
if( aoDrivers.empty() )
{
CPLError( CE_Failure, CPLE_AppDefined,
"Cannot guess driver for %s", pszDstFilename);
exit( 10 );
}
else
{
if( aoDrivers.size() > 1 )
{
CPLError( CE_Warning, CPLE_AppDefined,
"Several drivers matching %s extension. Using %s",
CPLGetExtension(pszDstFilename), aoDrivers[0].c_str() );
}
osFormat = aoDrivers[0];
}
}
else
{
osFormat = pszFormat;
}
OGRSFDriverH hDriver = OGRGetDriverByName( osFormat.c_str() );
if( hDriver == nullptr )
{
fprintf( stderr, "Unable to find format driver named %s.\n",
osFormat.c_str() );
exit( 10 );
}
OGRDataSourceH hDS =
OGR_Dr_CreateDataSource(hDriver, pszDstFilename, papszDSCO);
if( hDS == nullptr )
exit(1);
OGRLayerH hLayer =
OGR_DS_CreateLayer(hDS, pszNewLayerName, hSRS,
bPolygonize ? (b3D ? wkbMultiPolygon25D : wkbMultiPolygon )
: (b3D ? wkbLineString25D : wkbLineString),
papszLCO);
if( hLayer == nullptr )
exit( 1 );
OGRFieldDefnH hFld = OGR_Fld_Create("ID", OFTInteger);
OGR_Fld_SetWidth( hFld, 8 );
OGR_L_CreateField( hLayer, hFld, FALSE );
OGR_Fld_Destroy( hFld );
if( pszElevAttrib )
{
CreateElevAttrib( pszElevAttrib, hLayer );
}
if( pszElevAttribMin )
{
CreateElevAttrib( pszElevAttribMin, hLayer );
}
if( pszElevAttribMax )
{
CreateElevAttrib( pszElevAttribMax, hLayer );
}
/* -------------------------------------------------------------------- */
/* Invoke. */
/* -------------------------------------------------------------------- */
int iIDField = OGR_FD_GetFieldIndex( OGR_L_GetLayerDefn( hLayer ), "ID" );
int iElevField = (pszElevAttrib == nullptr) ? -1 :
OGR_FD_GetFieldIndex( OGR_L_GetLayerDefn( hLayer ),
pszElevAttrib );
int iElevFieldMin = (pszElevAttribMin == nullptr) ? -1 :
OGR_FD_GetFieldIndex( OGR_L_GetLayerDefn( hLayer ),
pszElevAttribMin );
int iElevFieldMax = (pszElevAttribMax == nullptr) ? -1 :
OGR_FD_GetFieldIndex( OGR_L_GetLayerDefn( hLayer ),
pszElevAttribMax );
char** options = nullptr;
if ( nFixedLevelCount > 0 ) {
std::string values = "FIXED_LEVELS=";
for ( int i = 0; i < nFixedLevelCount; i++ ) {
const int sz = 32;
char* newValue = new char[sz+1];
if ( i == nFixedLevelCount - 1 ) {
CPLsnprintf( newValue, sz+1, "%f", adfFixedLevels[i] );
}
else {
CPLsnprintf( newValue, sz+1, "%f,", adfFixedLevels[i] );
}
values = values + std::string( newValue );
delete[] newValue;
}
options = CSLAddString( options, values.c_str() );
}
else if ( dfExpBase != 0.0 ) {
options = CSLAppendPrintf( options, "LEVEL_EXP_BASE=%f", dfExpBase );
}
else if ( dfInterval != 0.0 ) {
options = CSLAppendPrintf( options, "LEVEL_INTERVAL=%f", dfInterval );
}
if ( dfOffset != 0.0 ) {
options = CSLAppendPrintf( options, "LEVEL_BASE=%f", dfOffset );
}
if ( bNoDataSet ) {
options = CSLAppendPrintf( options, "NODATA=%.19g", dfNoData );
}
if ( iIDField != -1 ) {
options = CSLAppendPrintf( options, "ID_FIELD=%d", iIDField );
}
if ( iElevField != -1 ) {
options = CSLAppendPrintf( options, "ELEV_FIELD=%d", iElevField );
}
if ( iElevFieldMin != -1 ) {
options = CSLAppendPrintf( options, "ELEV_FIELD_MIN=%d", iElevFieldMin );
}
if ( iElevFieldMax != -1 ) {
options = CSLAppendPrintf( options, "ELEV_FIELD_MAX=%d", iElevFieldMax );
}
if ( bPolygonize ) {
options = CSLAppendPrintf( options, "POLYGONIZE=YES" );
}
CPLErr eErr = GDALContourGenerateEx( hBand, hLayer, options, pfnProgress, nullptr );
CSLDestroy( options );
OGR_DS_Destroy( hDS );
GDALClose( hSrcDS );
if (hSRS)
OSRDestroySpatialReference( hSRS );
CSLDestroy( argv );
CSLDestroy( papszDSCO );
CSLDestroy( papszLCO );
GDALDestroyDriverManager();
OGRCleanupAll();
return (eErr == CE_None) ? 0 : 1;
}
MAIN_END
|
C++
|
Apache-2.0
|
caldwellc/node-gdal/deps/libgdal/gdal/apps/gdal_contour.cpp
|
da5cfaf4-c80a-4506-ba6f-387d1e19d214
|
[{"tag": "EMAIL", "value": "warmerdam@pobox.com", "start": 185, "end": 204, "context": "Generator mainline.\n * Author: Frank Warmerdam <warmerdam@pobox.com>\n *\n ********************************************"}, {"tag": "NAME", "value": "Even Rouault", "start": 387, "end": 399, "context": "gy (www.actgate.com).\n * Copyright (c) 2008-2013, Even Rouault <even dot rouault at mines-paris dot org>\n * Copy"}, {"tag": "NAME", "value": "Frank Warmerdam", "start": 168, "end": 183, "context": "urpose: Contour Generator mainline.\n * Author: Frank Warmerdam <warmerdam@pobox.com>\n *\n ***********************"}]
|
[{"tag": "EMAIL", "value": "warmerdam@pobox.com", "start": 185, "end": 204, "context": "Generator mainline.\n * Author: Frank Warmerdam <warmerdam@pobox.com>\n *\n ********************************************"}, {"tag": "NAME", "value": "Even Rouault", "start": 387, "end": 399, "context": "gy (www.actgate.com).\n * Copyright (c) 2008-2013, Even Rouault <even dot rouault at mines-paris dot org>\n * Copy"}, {"tag": "NAME", "value": "Frank Warmerdam", "start": 168, "end": 183, "context": "urpose: Contour Generator mainline.\n * Author: Frank Warmerdam <warmerdam@pobox.com>\n *\n ***********************"}]
|
import React from 'react';
import Link from 'gatsby-link';
import Projects from '../components/Projects';
import StyledLink from '../components/StyledLink';
import { styling, colors, delay } from '../utils/style';
import { Container } from '../utils/shared';
import styled, { keyframes } from 'styled-components';
const HelloText = styled.h1`
color: ${colors.primaryColor};
`;
const IndexPage = () => (
<Container>
<div>
<HelloText>Hey! 👋</HelloText>
<HelloText>I'm Leo, a Front End Developer in Washington, DC.</HelloText>
</div>
<div>
<p>I'm currently working on a variety of Front End projects over at <StyledLink href='https://www.weddingwire.com/' text='WeddingWire'/>.</p>
<p>If you want to get in touch with me, you can find me on <StyledLink href='https://twitter.com/itsLeeOhGee' text='Twitter' />, <StyledLink href='https://www.linkedin.com/in/leogenerali/' text='LinkedIn' />, or <StyledLink href='https://github.com/leo-generali' text='Github' />. If you want to say hello, you can email me <StyledLink href='mailto:me@leogenerali.com?Subject=Hello!' text='here' />.</p>
<p>If I'm not coding, I'm probably out running. I try and post all of my runs on Strava. If that sounds like your type of thing, you can check that out over <StyledLink href='https://www.strava.com/athletes/11876587' text='here' />.</p>
<p>I also enjoy building tools that solve problems. I get to help others out, and I learn a thing or two in the process. Here are some of the cooler things I've made:</p>
<Projects />
</div>
</Container>
)
export default IndexPage;
|
JavaScript
|
MIT
|
leo-generali/leo-generali.github.io/src/pages/index.js
|
59c58f63-6349-4626-a3e1-6e3b06c0a760
|
[{"tag": "NAME", "value": "Leo", "start": 495, "end": 498, "context": "lloText>Hey! \ud83d\udc4b</HelloText>\n <HelloText>I'm Leo, a Front End Developer in Washington, DC.</HelloT"}, {"tag": "USERNAME", "value": "itsLeeOhGee", "start": 825, "end": 836, "context": " find me on <StyledLink href='https://twitter.com/itsLeeOhGee' text='Twitter' />, <StyledLink href='https://www"}, {"tag": "EMAIL", "value": "me@leogenerali.com", "start": 1077, "end": 1095, "context": " hello, you can email me <StyledLink href='mailto:me@leogenerali.com?Subject=Hello!' text='here' />.</p>\n <p>If I"}, {"tag": "USERNAME", "value": "leogenerali", "start": 903, "end": 914, "context": "/>, <StyledLink href='https://www.linkedin.com/in/leogenerali/' text='LinkedIn' />, or <StyledLink href='https:"}, {"tag": "USERNAME", "value": "leo-generali", "start": 977, "end": 989, "context": "edIn' />, or <StyledLink href='https://github.com/leo-generali' text='Github' />. If you want to say hello, you "}]
|
[{"tag": "NAME", "value": "Leo", "start": 495, "end": 498, "context": "lloText>Hey! \ud83d\udc4b</HelloText>\n <HelloText>I'm Leo, a Front End Developer in Washington, DC.</HelloT"}, {"tag": "USERNAME", "value": "itsLeeOhGee", "start": 825, "end": 836, "context": " find me on <StyledLink href='https://twitter.com/itsLeeOhGee' text='Twitter' />, <StyledLink href='https://www"}, {"tag": "EMAIL", "value": "me@leogenerali.com", "start": 1077, "end": 1095, "context": " hello, you can email me <StyledLink href='mailto:me@leogenerali.com?Subject=Hello!' text='here' />.</p>\n <p>If I"}, {"tag": "USERNAME", "value": "leogenerali", "start": 903, "end": 914, "context": "/>, <StyledLink href='https://www.linkedin.com/in/leogenerali/' text='LinkedIn' />, or <StyledLink href='https:"}, {"tag": "USERNAME", "value": "leo-generali", "start": 977, "end": 989, "context": "edIn' />, or <StyledLink href='https://github.com/leo-generali' text='Github' />. If you want to say hello, you "}]
|
#define TH_MIN -2000
#define TH_MAX 2000
#define TH_NUM 1000
#define EN_MIN 0.0
#define EN_MAX 10.0
#define EN_NUM 3000
#include <iostream>
#include <sstream>
#include <stdlib.h>
#include "stdio.h"
#include <unistd.h>
using std::cout;
using std::endl;
const int parId = 4;
const int eid = ENERGY;
const int wid = 0;
const double E = 1.022;
float emean;
float esigma;
TH1F* h_Z;
TH1F* h_E;
TH2F* th2_energy;
TProfile* pro_energy;
//#define VResCheck
#define ERes
void RecAnalysis_User_eplus()
{
void RecAnalysisSingleEnergy(const char* simFilePath, const char* recFilePath, const char* elecFilePath, int evtNum, int jobNum, int baseline);
emean = 0;
esigma = 0;
//std::string topdir = "/junofs/users/wenjie/job_SLC6/optical_model/Positron/uniform_J16v1r4/";
//std::string subdir = "e+_1.022MeV";
int evtnum = 500;
int jobnum = FILENUM;
int Energy[11] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
int seed = 0;
TString recdir = Form("DATAPATH/e+/e+_Uniform/%dMeV/recQTMLE/user-root", Energy[eid]);
TString simdir = Form("DATAPATH/e+/e+_Uniform/%dMeV/detsim/user-root", Energy[eid]);
TString elecdir = Form("DATAPATH/e+/e+_Uniform/%dMeV/elecsim/user-root", Energy[eid]);
RecAnalysisSingleEnergy(simdir, recdir, elecdir, evtnum, jobnum, seed);
}
void RecAnalysisSingleEnergy(const char* simFilePath, const char* recFilePath, const char* elecFilePath, int evtNum, int jobNum, int baseline){
gStyle->SetOptFit(1111);
h_Z = new TH1F("h_Z_1.022MeV","Z_{rec}-Z_{edep}_1.022MeV",600,0,6000);
h_E = new TH1F("h_E_1.022MeV","E_{rec}_1.022MeV",2000,0,20000);
pro_energy = new TProfile("profile_1.022MeV_energy","profile_1.022MeV_energy",100,0,18,0,3);
TH1D* hSignal = new TH1D("hSignal", "hSignal", 2000, 0., 10.);
TH1D* hSpec = new TH1D("hSpec", "hSpec", 2000, 0., 10.);
TH1D* hBkg = new TH1D("hBkg", "hBkg", 2000, 0., 10.);
TH1D* hNPESignal = new TH1D("hNPESignal", "hNPESignal", 2000, 0., 10.);
TChain ch_rec("TRec");
TChain ch_sim("evt");
TChain ch_elec("eventindex");
ifstream failIDin;
failIDin.open("errorfileid.txt", ios::in);
vector<int> vFailId;
int idtemp = 1;
while(failIDin>>idtemp) {
vFailId.push_back(idtemp);
}
for(int j=0;j<jobNum;j++){
string n_flag;
stringstream ss;
int k = baseline + j;
bool isContinue = false;
for(int id = 0;id<vFailId.size(); id++) {
if(vFailId[id]==k) {isContinue = true;break;}
}
if(isContinue) continue;
ss << k;
ss >> n_flag;
TString recFileAdd = Form("%s/user-recQTMLE-%s.root",recFilePath,n_flag.c_str());
TString simFileAdd = Form("%s/user-detsim-%s.root",simFilePath,n_flag.c_str());
TString elecFileAdd = Form("%s/user-elecsim-%s.root",elecFilePath,n_flag.c_str());
ch_rec.Add(recFileAdd);
ch_sim.Add(simFileAdd);
ch_elec.Add(elecFileAdd);
}
TObjArray* recFileElements = ch_rec.GetListOfFiles();
TIter recnext(recFileElements);
TChainElement* recChEl = 0;
TObjArray* simFileElements = ch_sim.GetListOfFiles();
TIter simnext(simFileElements);
TChainElement* simChEl = 0;
TObjArray* elecFileElements = ch_elec.GetListOfFiles();
TIter elecnext(elecFileElements);
TChainElement* elecChEl = 0;
#ifdef ERes
TFile* sfile = new TFile("Assemb.root","RECREATE");
#endif
#ifdef VResCheck
TFile* sfile = new TFile("Assemb_VResCheck.root","RECREATE");
#endif
TTree* rectree = new TTree("evt","evt");
Float_t recx, recy, recz, recQx, recQy, recQz, recQTx, recQTy, recQTz;
Float_t timeRecx, timeRecy, timeRecz, timeRecE, timeRect0, Truex, Truey, Truez, TrueQx, TrueQy, TrueQz;
Float_t QEn, QTEn, NQE, TrueR, TrueQR, QTR, QR, QTt0, Tt0, m_TR, Qtime, Ttime, QTtime, nPhotons;
Float_t Qstat, Tstat, QTstat, memory, edep, edepX, edepY, edepZ;
Double_t triggerT, dTriggerT, corrEvtT;
rectree->Branch("recx",&recx,"recx/F");
rectree->Branch("recy",&recy,"recy/F");
rectree->Branch("recz",&recz,"recz/F");
rectree->Branch("recQx",&recQx,"recQx/F");
rectree->Branch("recQy",&recQy,"recQy/F");
rectree->Branch("recQz",&recQz,"recQz/F");
rectree->Branch("recQTx",&recQTx,"recQTx/F");
rectree->Branch("recQTy",&recQTy,"recQTy/F");
rectree->Branch("recQTz",&recQTz,"recQTz/F");
rectree->Branch("timeRecx",&timeRecx,"timeRecx/F");
rectree->Branch("timeRecy",&timeRecy,"timeRecy/F");
rectree->Branch("timeRecz",&timeRecz,"timeRecz/F");
rectree->Branch("timeRecE",&timeRecE,"timeRecE/F");
rectree->Branch("timeRect0",&timeRect0,"timeRect0/F");
rectree->Branch("Truex",&Truex,"Truex/F");
rectree->Branch("Truey",&Truey,"Truey/F");
rectree->Branch("Truez",&Truez,"Truez/F");
rectree->Branch("TrueQx",&TrueQx,"TrueQx/F");
rectree->Branch("TrueQy",&TrueQy,"TrueQy/F");
rectree->Branch("TrueQz",&TrueQz,"TrueQz/F");
rectree->Branch("NQE",&NQE,"NQE");
rectree->Branch("m_QEn",&QEn,"QEn/F");
rectree->Branch("m_QTEn",&QTEn,"TQEn/F");
rectree->Branch("m_Qstat",&Qstat,"Qstat/F");
rectree->Branch("m_Tstat",&Tstat,"Tstat/F");
rectree->Branch("m_QTstat",&QTstat,"QTstat/F");
rectree->Branch("TrueR",&TrueR,"TrueR/F");
rectree->Branch("m_QR",&QR,"QR/F");
rectree->Branch("m_TR",&m_TR,"m_TR/F");
rectree->Branch("m_QTR",&QTR,"QTR/F");
rectree->Branch("QTt0",&QTt0,"QTt0/F");
rectree->Branch("Qtime",&Qtime,"Qtime/F");
rectree->Branch("Ttime",&Ttime,"Ttime/F");
rectree->Branch("QTtime",&QTtime,"QTtime/F");
rectree->Branch("triggerT",&triggerT,"triggerT/F");
rectree->Branch("dTriggerT",&dTriggerT,"dTriggerT/F");
rectree->Branch("corrEvtT",&corrEvtT,"corrEvtT/D");
rectree->Branch("memory",&memory,"memory/F");
rectree->Branch("edep", &edep,"edep/F");
rectree->Branch("edepX",&edepX,"edepX/F");
rectree->Branch("edepY",&edepY,"edepY/F");
rectree->Branch("edepZ",&edepZ,"edepZ/F");
while (( recChEl=(TChainElement*)recnext() )){
TFile* recf = new TFile(recChEl->GetTitle());
cout << recChEl->GetTitle() << endl;
if(recf->TestBit(TFile::kRecovered)) continue;
if(recf->IsZombie()) continue;
TTree* rec_ch = (TTree*)recf -> Get("TRec");
if(rec_ch->IsZombie()) continue;
rec_ch -> SetBranchAddress("recx",&recx);
rec_ch -> SetBranchAddress("recy",&recy);
rec_ch -> SetBranchAddress("recz",&recz);
rec_ch -> SetBranchAddress("recQx",&recQx);
rec_ch -> SetBranchAddress("recQy",&recQy);
rec_ch -> SetBranchAddress("recQz",&recQz);
rec_ch -> SetBranchAddress("timeRecx",&timeRecx);
rec_ch -> SetBranchAddress("timeRecy",&timeRecy);
rec_ch -> SetBranchAddress("timeRecz",&timeRecz);
rec_ch -> SetBranchAddress("timeRecE",&timeRecE);
rec_ch -> SetBranchAddress("timeRect0",&timeRect0);
// rec_ch -> SetBranchAddress("Truex",&Truex);
// rec_ch -> SetBranchAddress("Truey",&Truey);
// rec_ch -> SetBranchAddress("Truez",&Truez);
rec_ch -> SetBranchAddress("TrueQx",&TrueQx);
rec_ch -> SetBranchAddress("TrueQy",&TrueQy);
rec_ch -> SetBranchAddress("TrueQz",&TrueQz);
rec_ch -> SetBranchAddress("m_NQE",&NQE);
rec_ch -> SetBranchAddress("m_QEn",&QEn);
rec_ch -> SetBranchAddress("m_QTEn",&QTEn);
rec_ch -> SetBranchAddress("m_TR",&m_TR);
rec_ch -> SetBranchAddress("m_QR",&QR);
rec_ch -> SetBranchAddress("m_QTR",&QTR);
// rec_ch -> SetBranchAddress("TrueR",&TrueR);
rec_ch -> SetBranchAddress("TrueQR",&TrueQR);
rec_ch -> SetBranchAddress("recQTt0",&QTt0);
rec_ch -> SetBranchAddress("m_Qstat",&Qstat);
rec_ch -> SetBranchAddress("m_Tstat",&Tstat);
rec_ch -> SetBranchAddress("m_QTstat",&QTstat);
rec_ch -> SetBranchAddress("m_Qtime",&Qtime);
rec_ch -> SetBranchAddress("m_Ttime",&Ttime);
rec_ch -> SetBranchAddress("m_QTtime",&QTtime);
rec_ch -> SetBranchAddress("m_triggerT",&triggerT);
rec_ch -> SetBranchAddress("m_dTriggerT",&dTriggerT);
rec_ch -> SetBranchAddress("m_FadcEvtT",&corrEvtT);
rec_ch -> SetBranchAddress("m_memory",&memory);
// rec_ch -> SetBranchAddress("edep", &edep);
simChEl=(TChainElement*)simnext();
TFile* simf = new TFile(simChEl->GetTitle());
cout << simChEl->GetTitle() << endl;
if(simf->TestBit(TFile::kRecovered)) continue;
if(simf->IsZombie()) continue;
TTree* sim_ch = (TTree*)simf -> Get("evt");
if(sim_ch->IsZombie()) continue;
sim_ch -> SetBranchAddress("edep",&edep);
sim_ch -> SetBranchAddress("edepX",&edepX);
sim_ch -> SetBranchAddress("edepY",&edepY);
sim_ch -> SetBranchAddress("edepZ",&edepZ);
elecChEl=(TChainElement*)elecnext();
TFile* elecf = new TFile(elecChEl->GetTitle());
cout << elecChEl->GetTitle() << endl;
if(elecf->TestBit(TFile::kRecovered)) continue;
if(elecf->IsZombie()) continue;
TTree* elec_ch = (TTree*)elecf -> Get("eventindex");
if(elec_ch->IsZombie()) continue;
vector<int>* entries = new vector<int>;
elec_ch -> SetBranchAddress("entries",&entries);
cout<<sim_ch->GetEntries()<< '\t' << elec_ch->GetEntries()<< '\t' << rec_ch->GetEntries()<< endl;
for(int i=0; i<rec_ch->GetEntries(); i++){
elec_ch->GetEntry(i);
rec_ch->GetEntry(i);
int simk = entries->at(0);
vector<int>::iterator iter = entries->begin();
while(iter!=entries->end()) {
if(*iter<simk) simk = *iter;
iter++;
}
sim_ch->GetEntry(simk);
TrueR = sqrt(edepX*edepX+edepY*edepY+edepZ*edepZ);
#ifdef VResCheck
bool IsBadFile = false;
if(TMath::Abs(QR - TrueR)>2000) {cout << "####### Wrong File! #######" << recChEl->GetTitle() << endl;IsBadFile = true; break;}
if(IsBadFile) continue;
#endif
Truex=edepX;
Truey=edepY;
Truez=edepZ;
rectree->Fill();
Float_t edep_r = sqrt(recx*recx + recy*recy + recz*recz);
pro_energy->Fill(pow(edep_r/1000.,3), QTEn,1);
if(edep_r < 17200.) {
hSignal->Fill(QEn);
hNPESignal->Fill(nPhotons);
}
}
delete rec_ch;
delete elec_ch;
delete sim_ch;
delete recf;
delete elecf;
delete simf;
}
sfile->cd();
rectree->Write();
hBkg->Write();
//hSpec->Write();
hSignal->Write();
hNPESignal->Write();
sfile->Close();
TCanvas* myc=new TCanvas("myc","a canvas",10,10,700,500);
h_Z->GetXaxis()->SetTitle("R^{3}[m^{3}]");
h_Z->Draw();
TCanvas* myc1=new TCanvas("myc1","a canvas",10,10,700,500);
//h_E->Fit("gaus","W","C",EN_MIN,EN_MAX);
//TF1* fitE = (TF1*)h_E->GetFunction("gaus");
//Float_t energy_mean = fitE ->GetParameter(1);
//Float_t energy_sigma = fitE->GetParameter(2)/fitE->GetParameter(1);
h_E->GetXaxis()->SetTitle("energy[MeV]");
h_E->Draw();
TCanvas* myc_energy=new TCanvas("myc_energy","energy distribution",10,10,900,500);
myc_energy->Divide(2,1);
pro_energy->GetXaxis()->SetTitle("R_{edep}^{3}[m^{3}]");
pro_energy->GetYaxis()->SetTitle("energy[MeV]");
pro_energy->GetYaxis()->SetTitleOffset(1.2);
pro_energy->GetYaxis()->CenterTitle();
pro_energy->SetLineColor(2);
myc_energy->cd(1);
myc_energy->cd(2);
pro_energy->GetYaxis()->SetRangeUser(0,3);
pro_energy->Draw();
// emean = energy_mean;
// esigma = 100*energy_sigma;
// cout << "*******************************************" << endl;
// cout << " energy mean:" << energy_mean << " MeV" << endl;
// cout << " energy resolution: " << 100*energy_sigma << " % " << endl;
// cout << "*******************************************" << endl;
cout << "Successfully" << endl;
}
|
C++
|
Apache-2.0
|
jiangw425/JUNOOfflineProcess/recQTMLE/SampleFiles/RecAnalysis_User_eplus.C
|
f6638035-9a12-4dc5-a61e-1a4af484e0d7
|
[]
|
[]
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _pure = require('recompose/pure');
var _pure2 = _interopRequireDefault(_pure);
var _SvgIcon = require('material-ui/SvgIcon');
var _SvgIcon2 = _interopRequireDefault(_SvgIcon);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var SvgIconCustom = global.__MUI_SvgIcon__ || _SvgIcon2.default;
var _ref = _react2.default.createElement('path', { d: 'M0 7.72V9.4l3-1V18h2V6h-.25L0 7.72zm23.78 6.65c-.14-.28-.35-.53-.63-.74-.28-.21-.61-.39-1.01-.53s-.85-.27-1.35-.38c-.35-.07-.64-.15-.87-.23-.23-.08-.41-.16-.55-.25-.14-.09-.23-.19-.28-.3-.05-.11-.08-.24-.08-.39 0-.14.03-.28.09-.41.06-.13.15-.25.27-.34.12-.1.27-.18.45-.24s.4-.09.64-.09c.25 0 .47.04.66.11.19.07.35.17.48.29.13.12.22.26.29.42.06.16.1.32.1.49h1.95c0-.39-.08-.75-.24-1.09-.16-.34-.39-.63-.69-.88-.3-.25-.66-.44-1.09-.59C21.49 9.07 21 9 20.46 9c-.51 0-.98.07-1.39.21-.41.14-.77.33-1.06.57-.29.24-.51.52-.67.84-.16.32-.23.65-.23 1.01s.08.69.23.96c.15.28.36.52.64.73.27.21.6.38.98.53.38.14.81.26 1.27.36.39.08.71.17.95.26s.43.19.57.29c.13.1.22.22.27.34.05.12.07.25.07.39 0 .32-.13.57-.4.77-.27.2-.66.29-1.17.29-.22 0-.43-.02-.64-.08-.21-.05-.4-.13-.56-.24-.17-.11-.3-.26-.41-.44-.11-.18-.17-.41-.18-.67h-1.89c0 .36.08.71.24 1.05.16.34.39.65.7.93.31.27.69.49 1.15.66.46.17.98.25 1.58.25.53 0 1.01-.06 1.44-.19.43-.13.8-.31 1.11-.54.31-.23.54-.51.71-.83.17-.32.25-.67.25-1.06-.02-.4-.09-.74-.24-1.02zm-9.96-7.32c-.34-.4-.75-.7-1.23-.88-.47-.18-1.01-.27-1.59-.27-.58 0-1.11.09-1.59.27-.48.18-.89.47-1.23.88-.34.41-.6.93-.79 1.59-.18.65-.28 1.45-.28 2.39v1.92c0 .94.09 1.74.28 2.39.19.66.45 1.19.8 1.6.34.41.75.71 1.23.89.48.18 1.01.28 1.59.28.59 0 1.12-.09 1.59-.28.48-.18.88-.48 1.22-.89.34-.41.6-.94.78-1.6.18-.65.28-1.45.28-2.39v-1.92c0-.94-.09-1.74-.28-2.39-.18-.66-.44-1.19-.78-1.59zm-.92 6.17c0 .6-.04 1.11-.12 1.53-.08.42-.2.76-.36 1.02-.16.26-.36.45-.59.57-.23.12-.51.18-.82.18-.3 0-.58-.06-.82-.18s-.44-.31-.6-.57c-.16-.26-.29-.6-.38-1.02-.09-.42-.13-.93-.13-1.53v-2.5c0-.6.04-1.11.13-1.52.09-.41.21-.74.38-1 .16-.25.36-.43.6-.55.24-.11.51-.17.81-.17.31 0 .58.06.81.17.24.11.44.29.6.55.16.25.29.58.37.99.08.41.13.92.13 1.52v2.51z' });
var Timer10 = function Timer10(props) {
return _react2.default.createElement(
SvgIconCustom,
props,
_ref
);
};
Timer10 = (0, _pure2.default)(Timer10);
Timer10.muiName = 'SvgIcon';
exports.default = Timer10;
|
JavaScript
|
MIT
|
Akshat-Singh/BuildMyTrip/node_modules/material-ui-icons/Timer10.js
|
f0933138-f3dc-4ede-98a2-cb8e3183b2da
|
[]
|
[]
|
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.xslf.usermodel;
import junit.framework.TestCase;
import org.apache.poi.xslf.XSLFTestDataSamples;
import org.openxmlformats.schemas.drawingml.x2006.main.CTTextBodyProperties;
import org.openxmlformats.schemas.drawingml.x2006.main.CTTextCharacterProperties;
import org.openxmlformats.schemas.drawingml.x2006.main.CTTextParagraphProperties;
import org.openxmlformats.schemas.drawingml.x2006.main.STTextAlignType;
import org.openxmlformats.schemas.presentationml.x2006.main.CTPlaceholder;
import org.openxmlformats.schemas.presentationml.x2006.main.STPlaceholderType;
import java.awt.Color;
/**
* @author Yegor Kozlov
*/
public class TestXSLFTextShape extends TestCase {
public void testLayouts(){
XMLSlideShow ppt = XSLFTestDataSamples.openSampleDocument("layouts.pptx");
XSLFSlide[] slide = ppt.getSlides();
verifySlide1(slide[0]);
verifySlide2(slide[1]);
verifySlide3(slide[2]);
verifySlide4(slide[3]);
verifySlide7(slide[6]);
verifySlide8(slide[7]);
verifySlide10(slide[9]);
}
void verifySlide1(XSLFSlide slide){
XSLFSlideLayout layout = slide.getSlideLayout();
XSLFShape[] shapes = slide.getShapes();
assertEquals("Title Slide",layout.getName());
XSLFTextShape shape1 = (XSLFTextShape)shapes[0];
CTPlaceholder ph1 = shape1.getCTPlaceholder();
assertEquals(STPlaceholderType.CTR_TITLE, ph1.getType());
// anchor is not defined in the shape
assertNull(shape1.getSpPr().getXfrm());
XSLFTextShape masterShape1 = (XSLFTextShape)layout.getPlaceholder(ph1);
assertNotNull(masterShape1.getSpPr().getXfrm());
assertEquals(masterShape1.getAnchor(), shape1.getAnchor());
CTTextBodyProperties bodyPr1 = shape1.getTextBodyPr();
// none of the following properties are set in the shapes and fetched from the master shape
assertTrue(
!bodyPr1.isSetLIns() && !bodyPr1.isSetRIns() &&
!bodyPr1.isSetBIns() && !bodyPr1.isSetTIns() &&
!bodyPr1.isSetAnchor()
);
assertEquals(7.2, shape1.getLeftInset()); // 0.1"
assertEquals(7.2, shape1.getRightInset()); // 0.1"
assertEquals(3.6, shape1.getTopInset()); // 0.05"
assertEquals(3.6, shape1.getBottomInset()); // 0.05"
assertEquals(VerticalAlignment.MIDDLE, shape1.getVerticalAlignment());
// now check text properties
assertEquals("Centered Title", shape1.getText());
XSLFTextRun r1 = shape1.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals("Calibri", r1.getFontFamily());
assertEquals(44.0, r1.getFontSize());
assertEquals(Color.black, r1.getFontColor());
XSLFTextShape shape2 = (XSLFTextShape)shapes[1];
CTPlaceholder ph2 = shape2.getCTPlaceholder();
assertEquals(STPlaceholderType.SUB_TITLE, ph2.getType());
// anchor is not defined in the shape
assertNull(shape2.getSpPr().getXfrm());
XSLFTextShape masterShape2 = (XSLFTextShape)layout.getPlaceholder(ph2);
assertNotNull(masterShape2.getSpPr().getXfrm());
assertEquals(masterShape2.getAnchor(), shape2.getAnchor());
CTTextBodyProperties bodyPr2 = shape2.getTextBodyPr();
// none of the following properties are set in the shapes and fetched from the master shape
assertTrue(
!bodyPr2.isSetLIns() && !bodyPr2.isSetRIns() &&
!bodyPr2.isSetBIns() && !bodyPr2.isSetTIns() &&
!bodyPr2.isSetAnchor()
);
assertEquals(7.2, shape2.getLeftInset()); // 0.1"
assertEquals(7.2, shape2.getRightInset()); // 0.1"
assertEquals(3.6, shape2.getTopInset()); // 0.05"
assertEquals(3.6, shape2.getBottomInset()); // 0.05"
assertEquals(VerticalAlignment.TOP, shape2.getVerticalAlignment());
assertEquals("subtitle", shape2.getText());
XSLFTextRun r2 = shape2.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals("Calibri", r2.getFontFamily());
assertEquals(32.0, r2.getFontSize());
// TODO fix calculation of tint
//assertEquals(new Color(137, 137, 137), r2.getFontColor());
}
void verifySlide2(XSLFSlide slide){
XSLFSlideLayout layout = slide.getSlideLayout();
XSLFShape[] shapes = slide.getShapes();
assertEquals("Title and Content",layout.getName());
XSLFTextShape shape1 = (XSLFTextShape)shapes[0];
CTPlaceholder ph1 = shape1.getCTPlaceholder();
assertEquals(STPlaceholderType.TITLE, ph1.getType());
// anchor is not defined in the shape
assertNull(shape1.getSpPr().getXfrm());
XSLFTextShape masterShape1 = (XSLFTextShape)layout.getPlaceholder(ph1);
// layout does not have anchor info either, it is in the slide master
assertNull(masterShape1.getSpPr().getXfrm());
masterShape1 = (XSLFTextShape)layout.getSlideMaster().getPlaceholder(ph1);
assertNotNull(masterShape1.getSpPr().getXfrm());
assertEquals(masterShape1.getAnchor(), shape1.getAnchor());
CTTextBodyProperties bodyPr1 = shape1.getTextBodyPr();
// none of the following properties are set in the shapes and fetched from the master shape
assertTrue(
!bodyPr1.isSetLIns() && !bodyPr1.isSetRIns() &&
!bodyPr1.isSetBIns() && !bodyPr1.isSetTIns() &&
!bodyPr1.isSetAnchor()
);
assertEquals(7.2, shape1.getLeftInset()); // 0.1"
assertEquals(7.2, shape1.getRightInset()); // 0.1"
assertEquals(3.6, shape1.getTopInset()); // 0.05"
assertEquals(3.6, shape1.getBottomInset()); // 0.05"
assertEquals(VerticalAlignment.MIDDLE, shape1.getVerticalAlignment());
// now check text properties
assertEquals("Title", shape1.getText());
XSLFTextRun r1 = shape1.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals("Calibri", r1.getFontFamily());
assertEquals(44.0, r1.getFontSize());
assertEquals(Color.black, r1.getFontColor());
XSLFTextShape shape2 = (XSLFTextShape)shapes[1];
CTPlaceholder ph2 = shape2.getCTPlaceholder();
assertFalse(ph2.isSetType()); // <p:ph idx="1"/>
assertTrue(ph2.isSetIdx());
assertEquals(1, ph2.getIdx());
// anchor is not defined in the shape
assertNull(shape2.getSpPr().getXfrm());
XSLFTextShape masterShape2 = (XSLFTextShape)layout.getPlaceholder(ph2);
// anchor of the body text is missing in the slide layout, llokup in the slide master
assertNull(masterShape2.getSpPr().getXfrm());
masterShape2 = (XSLFTextShape)layout.getSlideMaster().getPlaceholder(ph2);
assertNotNull(masterShape2.getSpPr().getXfrm());
assertEquals(masterShape2.getAnchor(), shape2.getAnchor());
CTTextBodyProperties bodyPr2 = shape2.getTextBodyPr();
// none of the following properties are set in the shapes and fetched from the master shape
assertTrue(
!bodyPr2.isSetLIns() && !bodyPr2.isSetRIns() &&
!bodyPr2.isSetBIns() && !bodyPr2.isSetTIns() &&
!bodyPr2.isSetAnchor()
);
assertEquals(7.2, shape2.getLeftInset()); // 0.1"
assertEquals(7.2, shape2.getRightInset()); // 0.1"
assertEquals(3.6, shape2.getTopInset()); // 0.05"
assertEquals(3.6, shape2.getBottomInset()); // 0.05"
assertEquals(VerticalAlignment.TOP, shape2.getVerticalAlignment());
XSLFTextRun pr1 = shape2.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals(0, pr1.getParentParagraph().getLevel());
assertEquals("Content", pr1.getText());
assertEquals("Calibri", pr1.getFontFamily());
assertEquals(32.0, pr1.getFontSize());
assertEquals(27.0, pr1.getParentParagraph().getLeftMargin());
assertEquals("\u2022", pr1.getParentParagraph().getBulletCharacter());
assertEquals("Arial", pr1.getParentParagraph().getBulletFont());
XSLFTextRun pr2 = shape2.getTextParagraphs().get(1).getTextRuns().get(0);
assertEquals(1, pr2.getParentParagraph().getLevel());
assertEquals("Level 2", pr2.getText());
assertEquals("Calibri", pr2.getFontFamily());
assertEquals(28.0, pr2.getFontSize());
assertEquals(58.5, pr2.getParentParagraph().getLeftMargin());
assertEquals("\u2013", pr2.getParentParagraph().getBulletCharacter());
assertEquals("Arial", pr2.getParentParagraph().getBulletFont());
XSLFTextRun pr3 = shape2.getTextParagraphs().get(2).getTextRuns().get(0);
assertEquals(2, pr3.getParentParagraph().getLevel());
assertEquals("Level 3", pr3.getText());
assertEquals("Calibri", pr3.getFontFamily());
assertEquals(24.0, pr3.getFontSize());
assertEquals(90.0, pr3.getParentParagraph().getLeftMargin());
assertEquals("\u2022", pr3.getParentParagraph().getBulletCharacter());
assertEquals("Arial", pr3.getParentParagraph().getBulletFont());
XSLFTextRun pr4 = shape2.getTextParagraphs().get(3).getTextRuns().get(0);
assertEquals(3, pr4.getParentParagraph().getLevel());
assertEquals("Level 4", pr4.getText());
assertEquals("Calibri", pr4.getFontFamily());
assertEquals(20.0, pr4.getFontSize());
assertEquals(126.0, pr4.getParentParagraph().getLeftMargin());
assertEquals("\u2013", pr4.getParentParagraph().getBulletCharacter());
assertEquals("Arial", pr4.getParentParagraph().getBulletFont());
XSLFTextRun pr5 = shape2.getTextParagraphs().get(4).getTextRuns().get(0);
assertEquals(4, pr5.getParentParagraph().getLevel());
assertEquals("Level 5", pr5.getText());
assertEquals("Calibri", pr5.getFontFamily());
assertEquals(20.0, pr5.getFontSize());
assertEquals(162.0, pr5.getParentParagraph().getLeftMargin());
assertEquals("\u00bb", pr5.getParentParagraph().getBulletCharacter());
assertEquals("Arial", pr5.getParentParagraph().getBulletFont());
}
void verifySlide3(XSLFSlide slide){
XSLFSlideLayout layout = slide.getSlideLayout();
XSLFShape[] shapes = slide.getShapes();
assertEquals("Section Header",layout.getName());
XSLFTextShape shape1 = (XSLFTextShape)shapes[0];
CTPlaceholder ph1 = shape1.getCTPlaceholder();
assertEquals(STPlaceholderType.TITLE, ph1.getType());
// anchor is not defined in the shape
assertNull(shape1.getSpPr().getXfrm());
XSLFTextShape masterShape1 = (XSLFTextShape)layout.getPlaceholder(ph1);
assertNotNull(masterShape1.getSpPr().getXfrm());
assertEquals(masterShape1.getAnchor(), shape1.getAnchor());
CTTextBodyProperties bodyPr1 = shape1.getTextBodyPr();
// none of the following properties are set in the shapes and fetched from the master shape
assertTrue(
!bodyPr1.isSetLIns() && !bodyPr1.isSetRIns() &&
!bodyPr1.isSetBIns() && !bodyPr1.isSetTIns() &&
!bodyPr1.isSetAnchor()
);
assertEquals(7.2, shape1.getLeftInset()); // 0.1"
assertEquals(7.2, shape1.getRightInset()); // 0.1"
assertEquals(3.6, shape1.getTopInset()); // 0.05"
assertEquals(3.6, shape1.getBottomInset()); // 0.05"
assertEquals(VerticalAlignment.TOP, shape1.getVerticalAlignment());
// now check text properties
assertEquals("Section Title", shape1.getText());
XSLFTextRun r1 = shape1.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals(TextAlign.LEFT, r1.getParentParagraph().getTextAlign());
assertEquals("Calibri", r1.getFontFamily());
assertEquals(40.0, r1.getFontSize());
assertEquals(Color.black, r1.getFontColor());
assertTrue(r1.isBold());
assertFalse(r1.isItalic());
assertFalse(r1.isUnderline());
XSLFTextShape shape2 = (XSLFTextShape)shapes[1];
CTPlaceholder ph2 = shape2.getCTPlaceholder();
assertEquals(STPlaceholderType.BODY, ph2.getType());
// anchor is not defined in the shape
assertNull(shape2.getSpPr().getXfrm());
XSLFTextShape masterShape2 = (XSLFTextShape)layout.getPlaceholder(ph2);
assertNotNull(masterShape2.getSpPr().getXfrm());
assertEquals(masterShape2.getAnchor(), shape2.getAnchor());
CTTextBodyProperties bodyPr2 = shape2.getTextBodyPr();
// none of the following properties are set in the shapes and fetched from the master shape
assertTrue(
!bodyPr2.isSetLIns() && !bodyPr2.isSetRIns() &&
!bodyPr2.isSetBIns() && !bodyPr2.isSetTIns() &&
!bodyPr2.isSetAnchor()
);
assertEquals(7.2, shape2.getLeftInset()); // 0.1"
assertEquals(7.2, shape2.getRightInset()); // 0.1"
assertEquals(3.6, shape2.getTopInset()); // 0.05"
assertEquals(3.6, shape2.getBottomInset()); // 0.05"
assertEquals(VerticalAlignment.BOTTOM, shape2.getVerticalAlignment());
assertEquals("Section Header", shape2.getText());
XSLFTextRun r2 = shape2.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals(TextAlign.LEFT, r2.getParentParagraph().getTextAlign());
assertEquals("Calibri", r2.getFontFamily());
assertEquals(20.0, r2.getFontSize());
// TODO fix calculation of tint
//assertEquals(new Color(137, 137, 137), r2.getFontColor());
}
void verifySlide4(XSLFSlide slide){
XSLFSlideLayout layout = slide.getSlideLayout();
XSLFShape[] shapes = slide.getShapes();
assertEquals("Two Content",layout.getName());
XSLFTextShape shape1 = (XSLFTextShape)shapes[0];
CTPlaceholder ph1 = shape1.getCTPlaceholder();
assertEquals(STPlaceholderType.TITLE, ph1.getType());
// anchor is not defined in the shape
assertNull(shape1.getSpPr().getXfrm());
XSLFTextShape masterShape1 = (XSLFTextShape)layout.getPlaceholder(ph1);
// layout does not have anchor info either, it is in the slide master
assertNull(masterShape1.getSpPr().getXfrm());
masterShape1 = (XSLFTextShape)layout.getSlideMaster().getPlaceholder(ph1);
assertNotNull(masterShape1.getSpPr().getXfrm());
assertEquals(masterShape1.getAnchor(), shape1.getAnchor());
CTTextBodyProperties bodyPr1 = shape1.getTextBodyPr();
// none of the following properties are set in the shapes and fetched from the master shape
assertTrue(
!bodyPr1.isSetLIns() && !bodyPr1.isSetRIns() &&
!bodyPr1.isSetBIns() && !bodyPr1.isSetTIns() &&
!bodyPr1.isSetAnchor()
);
assertEquals(7.2, shape1.getLeftInset()); // 0.1"
assertEquals(7.2, shape1.getRightInset()); // 0.1"
assertEquals(3.6, shape1.getTopInset()); // 0.05"
assertEquals(3.6, shape1.getBottomInset()); // 0.05"
assertEquals(VerticalAlignment.MIDDLE, shape1.getVerticalAlignment());
// now check text properties
assertEquals("Title", shape1.getText());
XSLFTextRun r1 = shape1.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals(TextAlign.CENTER, r1.getParentParagraph().getTextAlign());
assertEquals("Calibri", r1.getFontFamily());
assertEquals(44.0, r1.getFontSize());
assertEquals(Color.black, r1.getFontColor());
XSLFTextShape shape2 = (XSLFTextShape)shapes[1];
CTPlaceholder ph2 = shape2.getCTPlaceholder();
assertFalse(ph2.isSetType());
assertTrue(ph2.isSetIdx());
assertEquals(1, ph2.getIdx()); //<p:ph sz="half" idx="1"/>
// anchor is not defined in the shape
assertNull(shape2.getSpPr().getXfrm());
XSLFTextShape masterShape2 = (XSLFTextShape)layout.getPlaceholder(ph2);
assertNotNull(masterShape2.getSpPr().getXfrm());
assertEquals(masterShape2.getAnchor(), shape2.getAnchor());
CTTextBodyProperties bodyPr2 = shape2.getTextBodyPr();
// none of the following properties are set in the shapes and fetched from the master shape
assertTrue(
!bodyPr2.isSetLIns() && !bodyPr2.isSetRIns() &&
!bodyPr2.isSetBIns() && !bodyPr2.isSetTIns() &&
!bodyPr2.isSetAnchor()
);
assertEquals(7.2, shape2.getLeftInset()); // 0.1"
assertEquals(7.2, shape2.getRightInset()); // 0.1"
assertEquals(3.6, shape2.getTopInset()); // 0.05"
assertEquals(3.6, shape2.getBottomInset()); // 0.05"
assertEquals(VerticalAlignment.TOP, shape2.getVerticalAlignment());
XSLFTextRun pr1 = shape2.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals(0, pr1.getParentParagraph().getLevel());
assertEquals("Left", pr1.getText());
assertEquals("Calibri", pr1.getFontFamily());
assertEquals(28.0, pr1.getFontSize());
assertEquals(27.0, pr1.getParentParagraph().getLeftMargin());
assertEquals("\u2022", pr1.getParentParagraph().getBulletCharacter());
assertEquals("Arial", pr1.getParentParagraph().getBulletFont());
XSLFTextRun pr2 = shape2.getTextParagraphs().get(1).getTextRuns().get(0);
assertEquals(1, pr2.getParentParagraph().getLevel());
assertEquals("Level 2", pr2.getParentParagraph().getText());
assertEquals("Calibri", pr2.getFontFamily());
assertEquals(24.0, pr2.getFontSize());
assertEquals(58.5, pr2.getParentParagraph().getLeftMargin());
assertEquals("\u2013", pr2.getParentParagraph().getBulletCharacter());
assertEquals("Arial", pr2.getParentParagraph().getBulletFont());
XSLFTextRun pr3 = shape2.getTextParagraphs().get(2).getTextRuns().get(0);
assertEquals(2, pr3.getParentParagraph().getLevel());
assertEquals("Level 3", pr3.getParentParagraph().getText());
assertEquals("Calibri", pr3.getFontFamily());
assertEquals(20.0, pr3.getFontSize());
assertEquals(90.0, pr3.getParentParagraph().getLeftMargin());
assertEquals("\u2022", pr3.getParentParagraph().getBulletCharacter());
assertEquals("Arial", pr3.getParentParagraph().getBulletFont());
XSLFTextRun pr4 = shape2.getTextParagraphs().get(3).getTextRuns().get(0);
assertEquals(3, pr4.getParentParagraph().getLevel());
assertEquals("Level 4", pr4.getParentParagraph().getText());
assertEquals("Calibri", pr4.getFontFamily());
assertEquals(18.0, pr4.getFontSize());
assertEquals(126.0, pr4.getParentParagraph().getLeftMargin());
assertEquals("\u2013", pr4.getParentParagraph().getBulletCharacter());
assertEquals("Arial", pr4.getParentParagraph().getBulletFont());
XSLFTextShape shape3 = (XSLFTextShape)shapes[2];
XSLFTextRun pr5 = shape3.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals(0, pr5.getParentParagraph().getLevel());
assertEquals("Right", pr5.getText());
assertEquals("Calibri", pr5.getFontFamily());
assertEquals(Color.black, pr5.getFontColor());
}
void verifySlide5(XSLFSlide slide){
XSLFSlideLayout layout = slide.getSlideLayout();
XSLFShape[] shapes = slide.getShapes();
// TODO
}
void verifySlide7(XSLFSlide slide){
XSLFSlideLayout layout = slide.getSlideLayout();
XSLFShape[] shapes = slide.getShapes();
assertEquals("Blank",layout.getName());
XSLFTextShape shape1 = (XSLFTextShape)shapes[0];
CTPlaceholder ph1 = shape1.getCTPlaceholder();
assertEquals(STPlaceholderType.TITLE, ph1.getType());
// anchor is not defined in the shape
assertNull(shape1.getSpPr().getXfrm());
CTTextBodyProperties bodyPr1 = shape1.getTextBodyPr();
// none of the following properties are set in the shapes and fetched from the master shape
assertTrue(
!bodyPr1.isSetLIns() && !bodyPr1.isSetRIns() &&
!bodyPr1.isSetBIns() && !bodyPr1.isSetTIns() &&
!bodyPr1.isSetAnchor()
);
assertEquals(7.2, shape1.getLeftInset()); // 0.1"
assertEquals(7.2, shape1.getRightInset()); // 0.1"
assertEquals(3.6, shape1.getTopInset()); // 0.05"
assertEquals(3.6, shape1.getBottomInset()); // 0.05"
assertEquals(VerticalAlignment.MIDDLE, shape1.getVerticalAlignment());
// now check text properties
assertEquals("Blank with Default Title", shape1.getText());
XSLFTextRun r1 = shape1.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals(TextAlign.CENTER, r1.getParentParagraph().getTextAlign());
assertEquals("Calibri", r1.getFontFamily());
assertEquals(44.0, r1.getFontSize());
assertEquals(Color.black, r1.getFontColor());
assertFalse(r1.isBold());
XSLFTextShape shape2 = (XSLFTextShape)shapes[1];
CTTextBodyProperties bodyPr2 = shape2.getTextBodyPr();
// none of the following properties are set in the shapes and fetched from the master shape
assertTrue(
!bodyPr2.isSetLIns() && !bodyPr2.isSetRIns() &&
!bodyPr2.isSetBIns() && !bodyPr2.isSetTIns() &&
!bodyPr2.isSetAnchor()
);
assertEquals(7.2, shape2.getLeftInset()); // 0.1"
assertEquals(7.2, shape2.getRightInset()); // 0.1"
assertEquals(3.6, shape2.getTopInset()); // 0.05"
assertEquals(3.6, shape2.getBottomInset()); // 0.05"
assertEquals(VerticalAlignment.TOP, shape2.getVerticalAlignment());
XSLFTextRun pr1 = shape2.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals(0, pr1.getParentParagraph().getLevel());
assertEquals("Default Text", pr1.getText());
assertEquals("Calibri", pr1.getFontFamily());
assertEquals(18.0, pr1.getFontSize());
XSLFTextShape shape3 = (XSLFTextShape)shapes[2];
assertEquals("Default", shape3.getTextParagraphs().get(0).getText());
assertEquals("Text with levels", shape3.getTextParagraphs().get(1).getText());
assertEquals("Level 1", shape3.getTextParagraphs().get(2).getText());
assertEquals("Level 2", shape3.getTextParagraphs().get(3).getText());
assertEquals("Level 3", shape3.getTextParagraphs().get(4).getText());
for(int p = 0; p < 5; p++) {
XSLFTextParagraph pr = shape3.getTextParagraphs().get(p);
assertEquals("Calibri", pr.getTextRuns().get(0).getFontFamily());
assertEquals(18.0, pr.getTextRuns().get(0).getFontSize());
}
}
void verifySlide8(XSLFSlide slide){
XSLFSlideLayout layout = slide.getSlideLayout();
XSLFShape[] shapes = slide.getShapes();
assertEquals("Content with Caption",layout.getName());
XSLFTextShape shape1 = (XSLFTextShape)shapes[0];
CTPlaceholder ph1 = shape1.getCTPlaceholder();
assertEquals(STPlaceholderType.TITLE, ph1.getType());
// anchor is not defined in the shape
assertNull(shape1.getSpPr().getXfrm());
XSLFTextShape masterShape1 = (XSLFTextShape)layout.getPlaceholder(ph1);
// layout does not have anchor info either, it is in the slide master
assertNotNull(masterShape1.getSpPr().getXfrm());
assertEquals(masterShape1.getAnchor(), shape1.getAnchor());
CTTextBodyProperties bodyPr1 = shape1.getTextBodyPr();
// none of the following properties are set in the shapes and fetched from the master shape
assertTrue(
!bodyPr1.isSetLIns() && !bodyPr1.isSetRIns() &&
!bodyPr1.isSetBIns() && !bodyPr1.isSetTIns() &&
!bodyPr1.isSetAnchor()
);
assertEquals(7.2, shape1.getLeftInset()); // 0.1"
assertEquals(7.2, shape1.getRightInset()); // 0.1"
assertEquals(3.6, shape1.getTopInset()); // 0.05"
assertEquals(3.6, shape1.getBottomInset()); // 0.05"
assertEquals(VerticalAlignment.BOTTOM, shape1.getVerticalAlignment());
// now check text properties
assertEquals("Caption", shape1.getText());
XSLFTextRun r1 = shape1.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals(TextAlign.LEFT, r1.getParentParagraph().getTextAlign());
assertEquals("Calibri", r1.getFontFamily());
assertEquals(20.0, r1.getFontSize());
assertEquals(Color.black, r1.getFontColor());
assertTrue(r1.isBold());
XSLFTextShape shape2 = (XSLFTextShape)shapes[1];
CTPlaceholder ph2 = shape2.getCTPlaceholder();
assertFalse(ph2.isSetType());
assertTrue(ph2.isSetIdx());
assertEquals(1, ph2.getIdx());
// anchor is not defined in the shape
assertNull(shape2.getSpPr().getXfrm());
XSLFTextShape masterShape2 = (XSLFTextShape)layout.getPlaceholder(ph2);
assertNotNull(masterShape2.getSpPr().getXfrm());
assertEquals(masterShape2.getAnchor(), shape2.getAnchor());
CTTextBodyProperties bodyPr2 = shape2.getTextBodyPr();
// none of the following properties are set in the shapes and fetched from the master shape
assertTrue(
!bodyPr2.isSetLIns() && !bodyPr2.isSetRIns() &&
!bodyPr2.isSetBIns() && !bodyPr2.isSetTIns() &&
!bodyPr2.isSetAnchor()
);
assertEquals(7.2, shape2.getLeftInset()); // 0.1"
assertEquals(7.2, shape2.getRightInset()); // 0.1"
assertEquals(3.6, shape2.getTopInset()); // 0.05"
assertEquals(3.6, shape2.getBottomInset()); // 0.05"
assertEquals(VerticalAlignment.TOP, shape2.getVerticalAlignment());
XSLFTextRun pr1 = shape2.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals(0, pr1.getParentParagraph().getLevel());
assertEquals("Level 1", pr1.getText());
assertEquals("Calibri", pr1.getFontFamily());
assertEquals(32.0, pr1.getFontSize());
assertEquals(27.0, pr1.getParentParagraph().getLeftMargin());
assertEquals("\u2022", pr1.getParentParagraph().getBulletCharacter());
assertEquals("Arial", pr1.getParentParagraph().getBulletFont());
XSLFTextRun pr2 = shape2.getTextParagraphs().get(1).getTextRuns().get(0);
assertEquals(1, pr2.getParentParagraph().getLevel());
assertEquals("Level 2", pr2.getParentParagraph().getText());
assertEquals("Calibri", pr2.getFontFamily());
assertEquals(28.0, pr2.getFontSize());
assertEquals(58.5, pr2.getParentParagraph().getLeftMargin());
assertEquals("\u2013", pr2.getParentParagraph().getBulletCharacter());
assertEquals("Arial", pr2.getParentParagraph().getBulletFont());
XSLFTextRun pr3 = shape2.getTextParagraphs().get(2).getTextRuns().get(0);
assertEquals(2, pr3.getParentParagraph().getLevel());
assertEquals("Level 3", pr3.getParentParagraph().getText());
assertEquals("Calibri", pr3.getFontFamily());
assertEquals(24.0, pr3.getFontSize());
assertEquals(90.0, pr3.getParentParagraph().getLeftMargin());
assertEquals("\u2022", pr3.getParentParagraph().getBulletCharacter());
assertEquals("Arial", pr3.getParentParagraph().getBulletFont());
XSLFTextRun pr4 = shape2.getTextParagraphs().get(3).getTextRuns().get(0);
assertEquals(3, pr4.getParentParagraph().getLevel());
assertEquals("Level 4", pr4.getParentParagraph().getText());
assertEquals("Calibri", pr4.getFontFamily());
assertEquals(20.0, pr4.getFontSize());
assertEquals(126.0, pr4.getParentParagraph().getLeftMargin());
assertEquals("\u2013", pr4.getParentParagraph().getBulletCharacter());
assertEquals("Arial", pr4.getParentParagraph().getBulletFont());
XSLFTextShape shape3 = (XSLFTextShape)shapes[2];
assertEquals(VerticalAlignment.TOP, shape3.getVerticalAlignment());
assertEquals("Content with caption", shape3.getText());
pr1 = shape3.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals(0, pr1.getParentParagraph().getLevel());
assertEquals("Content with caption", pr1.getText());
assertEquals("Calibri", pr1.getFontFamily());
assertEquals(14.0, pr1.getFontSize());
}
void verifySlide10(XSLFSlide slide){
XSLFTextShape footer = (XSLFTextShape)slide.getPlaceholderByType(STPlaceholderType.INT_FTR);
// now check text properties
assertEquals("Apache Software Foundation", footer.getText());
assertEquals(VerticalAlignment.MIDDLE, footer.getVerticalAlignment());
XSLFTextRun r1 = footer.getTextParagraphs().get(0).getTextRuns().get(0);
assertEquals(TextAlign.CENTER, r1.getParentParagraph().getTextAlign());
assertEquals("Calibri", r1.getFontFamily());
assertEquals(12.0, r1.getFontSize());
// TODO calculation of tint is incorrect
assertEquals(new Color(64,64,64), r1.getFontColor());
XSLFTextShape dt = (XSLFTextShape)slide.getPlaceholderByType(STPlaceholderType.INT_DT);
assertEquals("Friday, October 21, 2011", dt.getText());
XSLFTextShape sldNum = (XSLFTextShape)slide.getPlaceholderByType(STPlaceholderType.INT_SLD_NUM);
assertEquals("10", sldNum.getText());
}
public void testTitleStyles(){
XMLSlideShow ppt = new XMLSlideShow();
XSLFSlideMaster master = ppt.getSlideMasters()[0];
XSLFTheme theme = master.getTheme();
XSLFSlideLayout layout = master.getLayout(SlideLayout.TITLE);
XSLFSlide slide = ppt.createSlide(layout) ;
assertSame(layout, slide.getSlideLayout());
assertSame(master, slide.getSlideMaster());
XSLFTextShape titleShape = (XSLFTextShape)slide.getPlaceholder(0);
titleShape.setText("Apache POI");
XSLFTextParagraph paragraph = titleShape.getTextParagraphs().get(0);
XSLFTextRun textRun = paragraph.getTextRuns().get(0);
// level 1 : default title style on the master slide
// /p:sldMaster/p:txStyles/p:titleStyle/a:lvl1pPr
CTTextParagraphProperties lv1PPr = master.getXmlObject().getTxStyles().getTitleStyle().getLvl1PPr();
CTTextCharacterProperties lv1CPr = lv1PPr.getDefRPr();
assertEquals(4400, lv1CPr.getSz());
assertEquals(44.0, textRun.getFontSize());
assertEquals("+mj-lt", lv1CPr.getLatin().getTypeface());
assertEquals("Calibri", theme.getMajorFont());
assertEquals("Calibri", textRun.getFontFamily());
lv1CPr.setSz(3200);
assertEquals(32.0, textRun.getFontSize());
lv1CPr.getLatin().setTypeface("Arial");
assertEquals("Arial", textRun.getFontFamily());
assertEquals(STTextAlignType.CTR, lv1PPr.getAlgn());
assertEquals(TextAlign.CENTER, paragraph.getTextAlign());
lv1PPr.setAlgn(STTextAlignType.L);
assertEquals(TextAlign.LEFT, paragraph.getTextAlign());
// level 2: title placeholder on the master slide
// /p:sldMaster/p:cSld/p:spTree/p:sp/p:nvPr/p:ph[@type="title"]
XSLFTextShape tx2 = master.getPlaceholder(0);
CTTextParagraphProperties lv2PPr = tx2.getTextBody(true).getLstStyle().addNewLvl1PPr();
CTTextCharacterProperties lv2CPr = lv2PPr.addNewDefRPr();
lv2CPr.setSz(3300);
assertEquals(33.0, textRun.getFontSize());
lv2CPr.addNewLatin().setTypeface("Times");
assertEquals("Times", textRun.getFontFamily());
lv2PPr.setAlgn(STTextAlignType.R);
assertEquals(TextAlign.RIGHT, paragraph.getTextAlign());
// level 3: title placeholder on the slide layout
// /p:sldLayout /p:cSld/p:spTree/p:sp/p:nvPr/p:ph[@type="ctrTitle"]
XSLFTextShape tx3 = layout.getPlaceholder(0);
CTTextParagraphProperties lv3PPr = tx3.getTextBody(true).getLstStyle().addNewLvl1PPr();
CTTextCharacterProperties lv3CPr = lv3PPr.addNewDefRPr();
lv3CPr.setSz(3400);
assertEquals(34.0, textRun.getFontSize());
lv3CPr.addNewLatin().setTypeface("Courier New");
assertEquals("Courier New", textRun.getFontFamily());
lv3PPr.setAlgn(STTextAlignType.CTR);
assertEquals(TextAlign.CENTER, paragraph.getTextAlign());
// level 4: default text properties in the shape itself
// ./p:sp/p:txBody/a:lstStyle/a:lvl1pPr
CTTextParagraphProperties lv4PPr = titleShape.getTextBody(true).getLstStyle().addNewLvl1PPr();
CTTextCharacterProperties lv4CPr = lv4PPr.addNewDefRPr();
lv4CPr.setSz(3500);
assertEquals(35.0, textRun.getFontSize());
lv4CPr.addNewLatin().setTypeface("Arial");
assertEquals("Arial", textRun.getFontFamily());
lv4PPr.setAlgn(STTextAlignType.L);
assertEquals(TextAlign.LEFT, paragraph.getTextAlign());
// level 5: text properties are defined in the text run
CTTextParagraphProperties lv5PPr = paragraph.getXmlObject().addNewPPr();
CTTextCharacterProperties lv5CPr = textRun.getXmlObject().getRPr();
lv5CPr.setSz(3600);
assertEquals(36.0, textRun.getFontSize());
lv5CPr.addNewLatin().setTypeface("Calibri");
assertEquals("Calibri", textRun.getFontFamily());
lv5PPr.setAlgn(STTextAlignType.CTR);
assertEquals(TextAlign.CENTER, paragraph.getTextAlign());
}
public void testBodyStyles(){
XMLSlideShow ppt = new XMLSlideShow();
XSLFSlideMaster master = ppt.getSlideMasters()[0];
XSLFTheme theme = master.getTheme();
XSLFSlideLayout layout = master.getLayout(SlideLayout.TITLE_AND_CONTENT);
XSLFSlide slide = ppt.createSlide(layout) ;
assertSame(layout, slide.getSlideLayout());
assertSame(master, slide.getSlideMaster());
XSLFTextShape tx1 = (XSLFTextShape)slide.getPlaceholder(1);
tx1.clearText();
XSLFTextParagraph p1 = tx1.addNewTextParagraph();
assertEquals(0, p1.getLevel());
XSLFTextRun r1 = p1.addNewTextRun();
r1.setText("Apache POI");
XSLFTextParagraph p2 = tx1.addNewTextParagraph();
p2.setLevel(1);
assertEquals(1, p2.getLevel());
XSLFTextRun r2 = p2.addNewTextRun();
r2.setText("HSLF");
XSLFTextParagraph p3 = tx1.addNewTextParagraph();
p3.setLevel(2);
assertEquals(2, p3.getLevel());
XSLFTextRun r3 = p3.addNewTextRun();
r3.setText("XSLF");
// level 1 : default title style on the master slide
// /p:sldMaster/p:txStyles/p:bodyStyle/a:lvl1pPr
CTTextParagraphProperties lv1PPr = master.getXmlObject().getTxStyles().getBodyStyle().getLvl1PPr();
CTTextCharacterProperties lv1CPr = lv1PPr.getDefRPr();
CTTextParagraphProperties lv2PPr = master.getXmlObject().getTxStyles().getBodyStyle().getLvl2PPr();
CTTextCharacterProperties lv2CPr = lv2PPr.getDefRPr();
CTTextParagraphProperties lv3PPr = master.getXmlObject().getTxStyles().getBodyStyle().getLvl3PPr();
CTTextCharacterProperties lv3CPr = lv3PPr.getDefRPr();
// lv1
assertEquals(3200, lv1CPr.getSz());
assertEquals(32.0, r1.getFontSize());
assertEquals("+mn-lt", lv1CPr.getLatin().getTypeface());
assertEquals("Calibri", theme.getMinorFont());
assertEquals("Calibri", r1.getFontFamily());
lv1CPr.setSz(3300);
assertEquals(33.0, r1.getFontSize());
lv1CPr.getLatin().setTypeface("Arial");
assertEquals("Arial", r1.getFontFamily());
assertEquals(STTextAlignType.L, lv1PPr.getAlgn());
assertEquals(TextAlign.LEFT, p1.getTextAlign());
lv1PPr.setAlgn(STTextAlignType.R);
assertEquals(TextAlign.RIGHT, p1.getTextAlign());
//lv2
assertEquals(2800, lv2CPr.getSz());
assertEquals(28.0, r2.getFontSize());
lv2CPr.setSz(3300);
assertEquals(33.0, r2.getFontSize());
lv2CPr.getLatin().setTypeface("Times");
assertEquals("Times", r2.getFontFamily());
assertEquals(STTextAlignType.L, lv2PPr.getAlgn());
assertEquals(TextAlign.LEFT, p2.getTextAlign());
lv2PPr.setAlgn(STTextAlignType.R);
assertEquals(TextAlign.RIGHT, p2.getTextAlign());
//lv3
assertEquals(2400, lv3CPr.getSz());
assertEquals(24.0, r3.getFontSize());
lv3CPr.setSz(2500);
assertEquals(25.0, r3.getFontSize());
lv3CPr.getLatin().setTypeface("Courier New");
assertEquals("Courier New", r3.getFontFamily());
assertEquals(STTextAlignType.L, lv3PPr.getAlgn());
assertEquals(TextAlign.LEFT, p3.getTextAlign());
lv3PPr.setAlgn(STTextAlignType.R);
assertEquals(TextAlign.RIGHT, p3.getTextAlign());
// level 2: body placeholder on the master slide
// /p:sldMaster/p:cSld/p:spTree/p:sp/p:nvPr/p:ph[@type="body"]
XSLFTextShape tx2 = master.getPlaceholder(1);
assertEquals(Placeholder.BODY, tx2.getTextType());
lv1PPr = tx2.getTextBody(true).getLstStyle().addNewLvl1PPr();
lv1CPr = lv1PPr.addNewDefRPr();
lv2PPr = tx2.getTextBody(true).getLstStyle().addNewLvl2PPr();
lv2CPr = lv2PPr.addNewDefRPr();
lv3PPr = tx2.getTextBody(true).getLstStyle().addNewLvl3PPr();
lv3CPr = lv3PPr.addNewDefRPr();
lv1CPr.setSz(3300);
assertEquals(33.0, r1.getFontSize());
lv1CPr.addNewLatin().setTypeface("Times");
assertEquals("Times", r1.getFontFamily());
lv1PPr.setAlgn(STTextAlignType.L);
assertEquals(TextAlign.LEFT, p1.getTextAlign());
lv2CPr.setSz(3300);
assertEquals(33.0, r2.getFontSize());
lv2CPr.addNewLatin().setTypeface("Times");
assertEquals("Times", r2.getFontFamily());
lv2PPr.setAlgn(STTextAlignType.L);
assertEquals(TextAlign.LEFT, p2.getTextAlign());
lv3CPr.setSz(3300);
assertEquals(33.0, r3.getFontSize());
lv3CPr.addNewLatin().setTypeface("Times");
assertEquals("Times", r3.getFontFamily());
lv3PPr.setAlgn(STTextAlignType.L);
assertEquals(TextAlign.LEFT, p3.getTextAlign());
// level 3: body placeholder on the slide layout
// /p:sldLayout /p:cSld/p:spTree/p:sp/p:nvPr/p:ph[@type="ctrTitle"]
XSLFTextShape tx3 = layout.getPlaceholder(1);
assertEquals(Placeholder.BODY, tx2.getTextType());
lv1PPr = tx3.getTextBody(true).getLstStyle().addNewLvl1PPr();
lv1CPr = lv1PPr.addNewDefRPr();
lv2PPr = tx3.getTextBody(true).getLstStyle().addNewLvl2PPr();
lv2CPr = lv2PPr.addNewDefRPr();
lv3PPr = tx3.getTextBody(true).getLstStyle().addNewLvl3PPr();
lv3CPr = lv3PPr.addNewDefRPr();
lv1CPr.setSz(3400);
assertEquals(34.0, r1.getFontSize());
lv1CPr.addNewLatin().setTypeface("Courier New");
assertEquals("Courier New", r1.getFontFamily());
lv1PPr.setAlgn(STTextAlignType.CTR);
assertEquals(TextAlign.CENTER, p1.getTextAlign());
lv2CPr.setSz(3400);
assertEquals(34.0, r2.getFontSize());
lv2CPr.addNewLatin().setTypeface("Courier New");
assertEquals("Courier New", r2.getFontFamily());
lv2PPr.setAlgn(STTextAlignType.CTR);
assertEquals(TextAlign.CENTER, p2.getTextAlign());
lv3CPr.setSz(3400);
assertEquals(34.0, r3.getFontSize());
lv3CPr.addNewLatin().setTypeface("Courier New");
assertEquals("Courier New", r3.getFontFamily());
lv3PPr.setAlgn(STTextAlignType.CTR);
assertEquals(TextAlign.CENTER, p3.getTextAlign());
// level 4: default text properties in the shape itself
// ./p:sp/p:txBody/a:lstStyle/a:lvl1pPr
lv1PPr = tx1.getTextBody(true).getLstStyle().addNewLvl1PPr();
lv1CPr = lv1PPr.addNewDefRPr();
lv2PPr = tx1.getTextBody(true).getLstStyle().addNewLvl2PPr();
lv2CPr = lv2PPr.addNewDefRPr();
lv3PPr = tx1.getTextBody(true).getLstStyle().addNewLvl3PPr();
lv3CPr = lv3PPr.addNewDefRPr();
lv1CPr.setSz(3500);
assertEquals(35.0, r1.getFontSize());
lv1CPr.addNewLatin().setTypeface("Arial");
assertEquals("Arial", r1.getFontFamily());
lv1PPr.setAlgn(STTextAlignType.L);
assertEquals(TextAlign.LEFT, p1.getTextAlign());
lv2CPr.setSz(3500);
assertEquals(35.0, r2.getFontSize());
lv2CPr.addNewLatin().setTypeface("Arial");
assertEquals("Arial", r2.getFontFamily());
lv2PPr.setAlgn(STTextAlignType.L);
assertEquals(TextAlign.LEFT, p2.getTextAlign());
lv3CPr.setSz(3500);
assertEquals(35.0, r3.getFontSize());
lv3CPr.addNewLatin().setTypeface("Arial");
assertEquals("Arial", r3.getFontFamily());
lv3PPr.setAlgn(STTextAlignType.L);
assertEquals(TextAlign.LEFT, p3.getTextAlign());
// level 5: text properties are defined in the text run
lv1PPr = p1.getXmlObject().isSetPPr() ? p1.getXmlObject().getPPr() : p1.getXmlObject().addNewPPr();
lv1CPr = r1.getXmlObject().getRPr();
lv2PPr = p2.getXmlObject().isSetPPr() ? p2.getXmlObject().getPPr() : p2.getXmlObject().addNewPPr();
lv2CPr = r2.getXmlObject().getRPr();
lv3PPr = p3.getXmlObject().isSetPPr() ? p3.getXmlObject().getPPr() : p3.getXmlObject().addNewPPr();
lv3CPr = r3.getXmlObject().getRPr();
lv1CPr.setSz(3600);
assertEquals(36.0, r1.getFontSize());
lv1CPr.addNewLatin().setTypeface("Calibri");
assertEquals("Calibri", r1.getFontFamily());
lv1PPr.setAlgn(STTextAlignType.CTR);
assertEquals(TextAlign.CENTER, p1.getTextAlign());
lv2CPr.setSz(3600);
assertEquals(36.0, r2.getFontSize());
lv2CPr.addNewLatin().setTypeface("Calibri");
assertEquals("Calibri", r2.getFontFamily());
lv2PPr.setAlgn(STTextAlignType.CTR);
assertEquals(TextAlign.CENTER, p2.getTextAlign());
lv3CPr.setSz(3600);
assertEquals(36.0, r3.getFontSize());
lv3CPr.addNewLatin().setTypeface("Calibri");
assertEquals("Calibri", r3.getFontFamily());
lv3PPr.setAlgn(STTextAlignType.CTR);
assertEquals(TextAlign.CENTER, p3.getTextAlign());
}
}
|
Java
|
Apache-2.0
|
BBN-D/poi/src/ooxml/testcases/org/apache/poi/xslf/usermodel/TestXSLFTextShape.java
|
01f02a1b-74e2-43f2-b202-e9f6cb285019
|
[{"tag": "NAME", "value": "Yegor Kozlov\r", "start": 1593, "end": 1606, "context": "ype;\r\n\r\nimport java.awt.Color;\r\n\r\n/**\r\n * @author Yegor Kozlov\r\n */\r\npublic class TestXSLFTextShape extends TestC"}]
|
[{"tag": "NAME", "value": "Yegor Kozlov\r", "start": 1593, "end": 1606, "context": "ype;\r\n\r\nimport java.awt.Color;\r\n\r\n/**\r\n * @author Yegor Kozlov\r\n */\r\npublic class TestXSLFTextShape extends TestC"}]
|
/*
* Generated by the Jasper component of Apache Tomcat
* Version: Apache Tomcat/8.5.35
* Generated at: 2019-01-16 02:42:16 UTC
* Note: The last modified time of this file was set to
* the last modified time of the source file after
* generation to assist with modification tracking.
*/
package org.apache.jsp.html;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.jsp.*;
public final class copyright_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent,
org.apache.jasper.runtime.JspSourceImports {
private static final javax.servlet.jsp.JspFactory _jspxFactory =
javax.servlet.jsp.JspFactory.getDefaultFactory();
private static java.util.Map<java.lang.String,java.lang.Long> _jspx_dependants;
static {
_jspx_dependants = new java.util.HashMap<java.lang.String,java.lang.Long>(2);
_jspx_dependants.put("jar:file:/C:/Users/Ienovo/Desktop/server/webapps/ROOT/WEB-INF/lib/standard-1.1.2.jar!/META-INF/c.tld", Long.valueOf(1098682290000L));
_jspx_dependants.put("/WEB-INF/lib/standard-1.1.2.jar", Long.valueOf(1546872448000L));
}
private static final java.util.Set<java.lang.String> _jspx_imports_packages;
private static final java.util.Set<java.lang.String> _jspx_imports_classes;
static {
_jspx_imports_packages = new java.util.HashSet<>();
_jspx_imports_packages.add("javax.servlet");
_jspx_imports_packages.add("javax.servlet.http");
_jspx_imports_packages.add("javax.servlet.jsp");
_jspx_imports_classes = null;
}
private volatile javax.el.ExpressionFactory _el_expressionfactory;
private volatile org.apache.tomcat.InstanceManager _jsp_instancemanager;
public java.util.Map<java.lang.String,java.lang.Long> getDependants() {
return _jspx_dependants;
}
public java.util.Set<java.lang.String> getPackageImports() {
return _jspx_imports_packages;
}
public java.util.Set<java.lang.String> getClassImports() {
return _jspx_imports_classes;
}
public javax.el.ExpressionFactory _jsp_getExpressionFactory() {
if (_el_expressionfactory == null) {
synchronized (this) {
if (_el_expressionfactory == null) {
_el_expressionfactory = _jspxFactory.getJspApplicationContext(getServletConfig().getServletContext()).getExpressionFactory();
}
}
}
return _el_expressionfactory;
}
public org.apache.tomcat.InstanceManager _jsp_getInstanceManager() {
if (_jsp_instancemanager == null) {
synchronized (this) {
if (_jsp_instancemanager == null) {
_jsp_instancemanager = org.apache.jasper.runtime.InstanceManagerFactory.getInstanceManager(getServletConfig());
}
}
}
return _jsp_instancemanager;
}
public void _jspInit() {
}
public void _jspDestroy() {
}
public void _jspService(final javax.servlet.http.HttpServletRequest request, final javax.servlet.http.HttpServletResponse response)
throws java.io.IOException, javax.servlet.ServletException {
final java.lang.String _jspx_method = request.getMethod();
if (!"GET".equals(_jspx_method) && !"POST".equals(_jspx_method) && !"HEAD".equals(_jspx_method) && !javax.servlet.DispatcherType.ERROR.equals(request.getDispatcherType())) {
response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, "JSPs only permit GET POST or HEAD");
return;
}
final javax.servlet.jsp.PageContext pageContext;
javax.servlet.http.HttpSession session = null;
final javax.servlet.ServletContext application;
final javax.servlet.ServletConfig config;
javax.servlet.jsp.JspWriter out = null;
final java.lang.Object page = this;
javax.servlet.jsp.JspWriter _jspx_out = null;
javax.servlet.jsp.PageContext _jspx_page_context = null;
try {
response.setContentType("text/html;charset=UTF-8");
pageContext = _jspxFactory.getPageContext(this, request, response,
null, true, 8192, true);
_jspx_page_context = pageContext;
application = pageContext.getServletContext();
config = pageContext.getServletConfig();
session = pageContext.getSession();
out = pageContext.getOut();
_jspx_out = out;
out.write('\r');
out.write('\n');
out.write("\r\n");
out.write("\r\n");
out.write("<html>\r\n");
out.write("<head>\r\n");
out.write(" <title>Title</title>\r\n");
out.write(" <script>\r\n");
out.write(" function extend(num){\r\n");
out.write("\r\n");
out.write(" if(num==1)\r\n");
out.write(" {\r\n");
out.write(" var obj = document.getElementById('first');\r\n");
out.write(" obj.style.display = \"block\";\r\n");
out.write(" }\r\n");
out.write(" else if(num==2)\r\n");
out.write(" {\r\n");
out.write(" var obj = document.getElementById('second');\r\n");
out.write(" obj.style.display = \"block\";\r\n");
out.write(" }\r\n");
out.write(" else if(num==3)\r\n");
out.write(" {\r\n");
out.write(" var obj = document.getElementById('third');\r\n");
out.write(" obj.style.display = \"block\";\r\n");
out.write(" }\r\n");
out.write(" else if(num==4)\r\n");
out.write(" {\r\n");
out.write(" var obj = document.getElementById('fourth');\r\n");
out.write(" obj.style.display = \"block\";\r\n");
out.write(" }\r\n");
out.write(" else if(num==5)\r\n");
out.write(" {\r\n");
out.write(" var obj = document.getElementById('fifth');\r\n");
out.write(" obj.style.display = \"block\";\r\n");
out.write(" }\r\n");
out.write(" else if(num==6){\r\n");
out.write(" var obj = document.getElementById('sixth');\r\n");
out.write(" obj.style.display = \"block\";\r\n");
out.write(" }\r\n");
out.write(" else if(num==7){\r\n");
out.write(" var obj = document.getElementById('seven');\r\n");
out.write(" obj.style.display = \"block\";\r\n");
out.write(" }\r\n");
out.write(" else{\r\n");
out.write(" var obj = document.getElementById('eighth');\r\n");
out.write(" obj.style.display = \"block\";\r\n");
out.write(" }\r\n");
out.write("\r\n");
out.write(" }\r\n");
out.write("\r\n");
out.write(" function out(num){\r\n");
out.write(" if(num==1)\r\n");
out.write(" {\r\n");
out.write(" var obj = document.getElementById('first');\r\n");
out.write(" obj.style.display = \"none\";\r\n");
out.write(" }\r\n");
out.write(" else if(num==2)\r\n");
out.write(" {\r\n");
out.write(" var obj = document.getElementById('second');\r\n");
out.write(" obj.style.display = \"none\";\r\n");
out.write(" }\r\n");
out.write(" else if(num==3)\r\n");
out.write(" {\r\n");
out.write(" var obj = document.getElementById('third');\r\n");
out.write(" obj.style.display = \"none\";\r\n");
out.write(" }\r\n");
out.write(" else if(num==4)\r\n");
out.write(" {\r\n");
out.write(" var obj = document.getElementById('fourth');\r\n");
out.write(" obj.style.display = \"none\";\r\n");
out.write(" }\r\n");
out.write(" else if(num==5)\r\n");
out.write(" {\r\n");
out.write(" var obj = document.getElementById('fifth');\r\n");
out.write(" obj.style.display = \"none\";\r\n");
out.write(" }\r\n");
out.write(" else if(num==6)\r\n");
out.write(" {\r\n");
out.write(" var obj = document.getElementById('sixth');\r\n");
out.write(" obj.style.display = \"none\";\r\n");
out.write(" }\r\n");
out.write(" else if(num==7){\r\n");
out.write(" var obj = document.getElementById('seven');\r\n");
out.write(" obj.style.display = \"none\";\r\n");
out.write(" }\r\n");
out.write(" else{\r\n");
out.write(" var obj = document.getElementById('eighth');\r\n");
out.write(" obj.style.display = \"none\";\r\n");
out.write(" }\r\n");
out.write(" }\r\n");
out.write(" </script>\r\n");
out.write("</head>\r\n");
out.write("\r\n");
out.write("<body>\r\n");
out.write("<div class=\"mainWrap\">\r\n");
out.write(" <div class=\"link\">\r\n");
out.write(" <ul>\r\n");
out.write(" <li>友情链接</li>\r\n");
out.write(" <li><a href=\"http://www.dgut.edu.cn\">东莞理工学院</a></li>\r\n");
out.write(" <li><a href=\"http://jwc.dgut.edu.cn\">东莞理工学院教务处</a></li>\r\n");
out.write(" <li><a href=\"http://sw.dgut.edu.cn\">东莞理工学院计算机与网络安全学院</a></li>\r\n");
out.write(" </ul>\r\n");
out.write(" </div>\r\n");
out.write(" <div class=\"final\">\r\n");
out.write(" <ul>\r\n");
out.write(" <li class=\"left\">© 2018,UML Group 2</li>\r\n");
out.write(" <li class=\"right\">联系我们</li>\r\n");
out.write(" </ul>\r\n");
out.write(" </div>\r\n");
out.write("</div>\r\n");
out.write("</body>\r\n");
out.write("</html>\r\n");
} catch (java.lang.Throwable t) {
if (!(t instanceof javax.servlet.jsp.SkipPageException)){
out = _jspx_out;
if (out != null && out.getBufferSize() != 0)
try {
if (response.isCommitted()) {
out.flush();
} else {
out.clearBuffer();
}
} catch (java.io.IOException e) {}
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
else throw new ServletException(t);
}
} finally {
_jspxFactory.releasePageContext(_jspx_page_context);
}
}
}
|
Java
|
MIT
|
Scurt123/DearHouAimin/server/work/Catalina/localhost/ROOT/org/apache/jsp/html/copyright_jsp.java
|
042f0b51-c7c6-41c8-90c4-7bd2357e9576
|
[]
|
[]
|
from sqlalchemy.sql import func as fn
from sqlalchemy import sql
from ..translate import (
SqlTranslator,
extend_base,
sql_scalar,
sql_agg,
win_agg,
win_cumul,
annotate
)
#from .postgresql import PostgresqlColumn, PostgresqlColumnAgg
from .base import SqlColumn, SqlColumnAgg
from . import _dt_generics as _dt
# Data ----
class SnowflakeColumn(SqlColumn): pass
class SnowflakeColumnAgg(SqlColumnAgg, SnowflakeColumn): pass
# Translations ================================================================
@_dt.sql_func_last_day_in_period.register
def sql_func_last_day_in_period(codata: SnowflakeColumn, col, period):
return _dt.date_trunc(codata, col, period) \
+ sql.text("interval '1 %s'" % period) \
- sql.text("interval '1 day'")
# Scalar ----
extend_base(
SnowflakeColumn,
__floordiv__ = lambda _, x, y: fn.floor(x / y),
__rfloordiv__ = lambda _, x, y: fn.floor(y / x),
# connector has a bug with %
# see: https://github.com/snowflakedb/snowflake-sqlalchemy/issues/246
__mod__ = lambda _, x, y: fn.mod(x, y),
__rmod__ = lambda _, x, y: fn.mod(y, x),
mod = lambda _, x, y: fn.mod(x,y),
rmod = lambda _, x, y: fn.mod(y,x),
# TODO: str.contains
)
# Window ----
extend_base(
SnowflakeColumn,
all = win_agg("booland_agg"),
any = win_agg("boolor_agg"),
count = win_agg("count"),
cumsum = annotate(win_cumul("sum"), result_type="variable"),
# note that the number of decimal places Snowflake returns, and whether
# the result is numeric depends on the input. mark as variable, so tests
# do not check dtype
# see https://community.snowflake.com/s/question/0D50Z000079hpxvSAA/numeric-calculations-truncated-to-3-decimal-places
mean = annotate(win_agg("avg"), result_type="variable"),
std = win_agg("stddev_samp"),
sum = annotate(win_agg("sum"), result_type="variable"),
var = win_agg("var_samp"),
# str.contains
# dt methods are more like base
)
# Agg ----
extend_base(
SnowflakeColumnAgg,
all = sql_agg("booland_agg"),
any = sql_agg("boolor_agg"),
count = sql_agg("count"),
std = sql_agg("stddev_samp"),
var = sql_agg("var_samp"),
)
translator = SqlTranslator.from_mappings(
SnowflakeColumn, SnowflakeColumnAgg
)
|
Python
|
MIT
|
Techzune/siuba/siuba/sql/dialects/snowflake.py
|
0f9bde6f-b333-4e9f-a115-2c36cb93a1e4
|
[]
|
[]
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Nettbutikk.Model;
namespace DAL.Account
{
// For testing
public class AccountRepoStub : IAccountRepo
{
public bool AddPerson(PersonModel person, Role role, string password)
{
if (person.Email == "")
return false;
return true;
}
public bool DeletePerson(int personId)
{
if (personId == 0)
return false;
return true;
}
public AdminModel GetAdmin(int adminId)
{
if (adminId == 0)
{
var admin = new AdminModel()
{
Email=""
};
return admin;
}
else
{
var admin = new AdminModel()
{
Email = "ole@gmail.com",
Firstname = "Ole",
Lastname = "Olsen",
Address = "Persveien 5",
Zipcode = "0123",
City = "Oslo",
AdminId = 1
};
return admin;
}
}
public List<PersonModel> GetAllPeople()
{
var list = new List<PersonModel>();
var person = new PersonModel()
{
Email = "ole@gmail.com",
Firstname = "Ole",
Lastname = "Olsen",
Address = "Persveien 5",
Zipcode = "0123",
City = "Oslo"
};
list.Add(person);
list.Add(person);
list.Add(person);
return list;
}
public PersonModel GetPerson(string email)
{
if (email == "")
{
var person = new PersonModel()
{
Email=""
};
return person;
}
else
{
var person = new PersonModel()
{
Email = "ole@gmail.com",
Firstname = "Ole",
Lastname = "Olsen",
Address = "Persveien 5",
Zipcode = "0123",
City = "Oslo"
};
return person;
}
}
public bool UpdatePerson(PersonModel personUpdate, int personId)
{
if (personId == 0)
return false;
return true;
}
public bool AttemptLogin(int personId, string password)
{
if (personId == 0)
return false;
if (password == "")
return false;
return true;
}
public bool AttemptLogin(string email, string password)
{
if (email == "")
return false;
if (password == "")
return false;
return true;
}
private byte[] CreateHash(string password)
{
throw new NotImplementedException();
}
public CustomerModel GetCustomer(string email)
{
throw new NotImplementedException();
}
public int GetPersonId(string email)
{
throw new NotImplementedException();
}
public bool ChangePassword(int personId, string newPassword)
{
throw new NotImplementedException();
}
public bool ChangePassword(string email, string newPassword)
{
throw new NotImplementedException();
}
public AdminModel GetAdmin(string email)
{
return new AdminModel { AdminId = 1, Email = email};
}
public bool UpdatePerson(PersonModel personUpdate, string email)
{
if (email == "")
return false;
return true;
}
public bool DeletePerson(string email)
{
throw new NotImplementedException();
}
public bool CreateCredentials(string email, string password)
{
throw new NotImplementedException();
}
public bool SetRole(string email, Role role, bool isRole)
{
throw new NotImplementedException();
}
public bool isAdmin(string email)
{
throw new NotImplementedException();
}
}
}
|
C#
|
Unlicense
|
maglunde/Nettbutikk/DAL/Account/AccountRepoStub.cs
|
b5f284f4-e6dd-495f-b1aa-f5c2bd9f0390
|
[{"tag": "EMAIL", "value": "ole@gmail.com", "start": 965, "end": 978, "context": "()\n {\n Email = \"ole@gmail.com\",\n Firstname = \"Ole\",\n "}, {"tag": "NAME", "value": "Ole", "start": 1014, "end": 1017, "context": "\"ole@gmail.com\",\n Firstname = \"Ole\",\n Lastname = \"Olsen\",\n "}, {"tag": "NAME", "value": "Ole", "start": 2252, "end": 2255, "context": "\"ole@gmail.com\",\n Firstname = \"Ole\",\n Lastname = \"Olsen\",\n "}, {"tag": "NAME", "value": "Olsen", "start": 1052, "end": 1057, "context": "irstname = \"Ole\",\n Lastname = \"Olsen\",\n Address = \"Persveien 5\",\n "}, {"tag": "NAME", "value": "Ole", "start": 1517, "end": 1520, "context": "l = \"ole@gmail.com\",\n Firstname = \"Ole\",\n Lastname = \"Olsen\",\n "}, {"tag": "NAME", "value": "Olsen", "start": 2290, "end": 2295, "context": "irstname = \"Ole\",\n Lastname = \"Olsen\",\n Address = \"Persveien 5\",\n "}, {"tag": "EMAIL", "value": "ole@gmail.com", "start": 1472, "end": 1485, "context": "sonModel()\n {\n Email = \"ole@gmail.com\",\n Firstname = \"Ole\",\n "}, {"tag": "NAME", "value": "Olsen", "start": 1551, "end": 1556, "context": " Firstname = \"Ole\",\n Lastname = \"Olsen\",\n Address = \"Persveien 5\",\n "}, {"tag": "EMAIL", "value": "ole@gmail.com", "start": 2203, "end": 2216, "context": "()\n {\n Email = \"ole@gmail.com\",\n Firstname = \"Ole\",\n "}]
|
[{"tag": "EMAIL", "value": "ole@gmail.com", "start": 965, "end": 978, "context": "()\n {\n Email = \"ole@gmail.com\",\n Firstname = \"Ole\",\n "}, {"tag": "NAME", "value": "Ole", "start": 1014, "end": 1017, "context": "\"ole@gmail.com\",\n Firstname = \"Ole\",\n Lastname = \"Olsen\",\n "}, {"tag": "NAME", "value": "Ole", "start": 2252, "end": 2255, "context": "\"ole@gmail.com\",\n Firstname = \"Ole\",\n Lastname = \"Olsen\",\n "}, {"tag": "NAME", "value": "Olsen", "start": 1052, "end": 1057, "context": "irstname = \"Ole\",\n Lastname = \"Olsen\",\n Address = \"Persveien 5\",\n "}, {"tag": "NAME", "value": "Ole", "start": 1517, "end": 1520, "context": "l = \"ole@gmail.com\",\n Firstname = \"Ole\",\n Lastname = \"Olsen\",\n "}, {"tag": "NAME", "value": "Olsen", "start": 2290, "end": 2295, "context": "irstname = \"Ole\",\n Lastname = \"Olsen\",\n Address = \"Persveien 5\",\n "}, {"tag": "EMAIL", "value": "ole@gmail.com", "start": 1472, "end": 1485, "context": "sonModel()\n {\n Email = \"ole@gmail.com\",\n Firstname = \"Ole\",\n "}, {"tag": "NAME", "value": "Olsen", "start": 1551, "end": 1556, "context": " Firstname = \"Ole\",\n Lastname = \"Olsen\",\n Address = \"Persveien 5\",\n "}, {"tag": "EMAIL", "value": "ole@gmail.com", "start": 2203, "end": 2216, "context": "()\n {\n Email = \"ole@gmail.com\",\n Firstname = \"Ole\",\n "}]
|
/* ---------------------------------------------------------------- *
Copyright (c) 2018 Kuu
Antti Jumpponen <kuumies@gmail.com>
The implementation of kuu::jpad::Settings struct.
* ---------------------------------------------------------------- */
#include "settings.h"
#include <QtCore/QDebug>
#include <QtCore/QSettings>
namespace kuu
{
namespace jpad
{
namespace
{
/* ---------------------------------------------------------------- *
Definitions
* ---------------------------------------------------------------- */
struct Strings
{
static const QString Organization;
static const QString Application;
};
const QString Strings::Organization = "kuu";
const QString Strings::Application = "dpad";
} // anonymous namespace
/* ---------------------------------------------------------------- *
Loads the settings from file or uses the defaults if no
file on the disk.
* ---------------------------------------------------------------- */
Settings::Settings()
{
QSettings settings(Strings::Organization, Strings::Application);
if (settings.allKeys().isEmpty())
return;
font.family = settings.value("font/family",
font.family).toString();
font.size = settings.value("font/size",
font.size).toInt();
font.antialias = settings.value("font/antialias",
font.antialias).toBool();
textBuffer.path = settings.value("text_buffer/path",
textBuffer.path).toString();
textBuffer.temp = settings.value("text_buffer/temp",
textBuffer.temp).toBool();
}
/* ---------------------------------------------------------------- *
Saves the settings into file.
* ---------------------------------------------------------------- */
Settings::~Settings()
{
QSettings settings(Strings::Organization, Strings::Application);
settings.setValue("font/family", font.family);
settings.setValue("font/size", font.size);
settings.setValue("font/antialias", font.antialias);
settings.setValue("text_buffer/path", textBuffer.path);
settings.setValue("text_buffer/temp", textBuffer.temp);
}
/* ---------------------------------------------------------------- *
Constructs the default font.
* ---------------------------------------------------------------- */
Settings::Font::Font()
: family("Arial")
, size(14)
, antialias(true)
{}
/* ---------------------------------------------------------------- *
Returns font as Qt font.
* ---------------------------------------------------------------- */
QFont Settings::Font::toQFont() const
{
return QFont(family, size);
}
/* ---------------------------------------------------------------- *
Constructs the default text buffer.
* ---------------------------------------------------------------- */
Settings::TextBuffer::TextBuffer()
: temp(true)
{}
} // namespace jpad
} // namespace kuu
|
C++
|
MIT
|
kuumies/jpad/settings.cpp
|
d313a8db-eed9-430b-ba8c-10848066700e
|
[{"tag": "NAME", "value": "Antti Jumpponen", "start": 99, "end": 114, "context": "------------------ *\n Copyright (c) 2018 Kuu\n Antti Jumpponen <kuumies@gmail.com>\n\n The implementation of kuu"}, {"tag": "EMAIL", "value": "kuumies@gmail.com", "start": 116, "end": 133, "context": "- *\n Copyright (c) 2018 Kuu\n Antti Jumpponen <kuumies@gmail.com>\n\n The implementation of kuu::jpad::Settings st"}]
|
[{"tag": "NAME", "value": "Antti Jumpponen", "start": 99, "end": 114, "context": "------------------ *\n Copyright (c) 2018 Kuu\n Antti Jumpponen <kuumies@gmail.com>\n\n The implementation of kuu"}, {"tag": "EMAIL", "value": "kuumies@gmail.com", "start": 116, "end": 133, "context": "- *\n Copyright (c) 2018 Kuu\n Antti Jumpponen <kuumies@gmail.com>\n\n The implementation of kuu::jpad::Settings st"}]
|
import * as React from 'react';
import createSvgIcon from '../utils/createSvgIcon';
const BlogIcon = createSvgIcon({
svg: ({ classes }) => (
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 2048 2048" className={classes.svg}>
<path d="M2048 0v1536H731l-475 475v-475H0V0h2048zM128 128v256h1792V128H128zm1792 1280V512H128v896h256v293l293-293h1243zm-640-768h512v640h-512V640zm128 512h256V768h-256v384zM256 768h896v128H256V768zm0 256h896v128H256v-128z" />
</svg>
),
displayName: 'BlogIcon',
});
export default BlogIcon;
|
TypeScript
|
MIT
|
17jlee2/fluentui/packages/react-icons/src/components/BlogIcon.tsx
|
ba90e2c3-01db-4d20-94d5-647f7a976568
|
[]
|
[]
|
"""
Test command line commands.
"""
from pathlib import Path
from subprocess import PIPE, Popen
__author__ = "Sergey Vartanov"
__email__ = "me@enzet.ru"
from xml.etree import ElementTree
from xml.etree.ElementTree import Element
from map_machine.ui.cli import COMMAND_LINES
LOG: bytes = (
b"INFO Constructing ways...\n"
b"INFO Constructing nodes...\n"
b"INFO Drawing ways...\n"
b"INFO Drawing main icons...\n"
b"INFO Drawing extra icons...\n"
b"INFO Drawing texts...\n"
)
def error_run(arguments: list[str], message: bytes) -> None:
"""Run command that should fail and check error message."""
with Popen(["map-machine"] + arguments, stderr=PIPE) as pipe:
_, error = pipe.communicate()
assert pipe.returncode != 0
assert error == message
def run(arguments: list[str], message: bytes) -> None:
"""Run command that should fail and check error message."""
with Popen(["map-machine"] + arguments, stderr=PIPE) as pipe:
_, error = pipe.communicate()
assert pipe.returncode == 0
assert error == message
def test_wrong_render_arguments() -> None:
"""Test `render` command with wrong arguments."""
error_run(
["render", "-z", "17"],
b"CRITICAL Specify either --input, or --boundary-box, or --coordinates "
b"and --size.\n",
)
def test_render() -> None:
"""Test `render` command."""
run(
COMMAND_LINES["render"] + ["--cache", "tests/data"],
LOG + b"INFO Writing output SVG to out/map.svg...\n",
)
with Path("out/map.svg").open(encoding="utf-8") as output_file:
root: Element = ElementTree.parse(output_file).getroot()
# 4 expected elements: `defs`, `rect` (background), `g` (outline),
# `g` (icon), 4 `text` elements (credits).
assert len(root) == 8
assert len(root[3][0]) == 0
assert root.get("width") == "186.0"
assert root.get("height") == "198.0"
def test_render_with_tooltips() -> None:
"""Test `render` command."""
run(
COMMAND_LINES["render_with_tooltips"] + ["--cache", "tests/data"],
LOG + b"INFO Writing output SVG to out/map.svg...\n",
)
with Path("out/map.svg").open(encoding="utf-8") as output_file:
root: Element = ElementTree.parse(output_file).getroot()
# 4 expected elements: `defs`, `rect` (background), `g` (outline),
# `g` (icon), 4 `text` elements (credits).
assert len(root) == 8
assert len(root[3][0]) == 1
assert root[3][0][0].text == "natural: tree"
assert root.get("width") == "186.0"
assert root.get("height") == "198.0"
def test_icons() -> None:
"""Test `icons` command."""
run(
COMMAND_LINES["icons"],
b"INFO Icons are written to out/icons_by_name and out/icons_by_id.\n"
b"INFO Icon grid is written to out/icon_grid.svg.\n"
b"INFO Icon grid is written to doc/grid.svg.\n",
)
assert (Path("out") / "icon_grid.svg").is_file()
assert (Path("out") / "icons_by_name").is_dir()
assert (Path("out") / "icons_by_id").is_dir()
assert (Path("out") / "icons_by_name" / "Röntgen apple.svg").is_file()
assert (Path("out") / "icons_by_id" / "apple.svg").is_file()
def test_mapcss() -> None:
"""Test `mapcss` command."""
run(
COMMAND_LINES["mapcss"],
b"INFO MapCSS 0.2 scheme is written to out/map_machine_mapcss.\n",
)
assert (Path("out") / "map_machine_mapcss").is_dir()
assert (Path("out") / "map_machine_mapcss" / "icons").is_dir()
assert (
Path("out") / "map_machine_mapcss" / "icons" / "apple.svg"
).is_file()
assert (Path("out") / "map_machine_mapcss" / "map_machine.mapcss").is_file()
def test_element() -> None:
"""Test `element` command."""
run(
COMMAND_LINES["element"],
b"INFO Element is written to out/element.svg.\n",
)
assert (Path("out") / "element.svg").is_file()
def test_tile() -> None:
"""Test `tile` command."""
run(
COMMAND_LINES["tile"] + ["--cache", "tests/data"],
LOG + b"INFO Tile is drawn to out/tiles/tile_18_160199_88904.svg.\n"
b"INFO SVG file is rasterized to out/tiles/tile_18_160199_88904.png.\n",
)
assert (Path("out") / "tiles" / "tile_18_160199_88904.svg").is_file()
assert (Path("out") / "tiles" / "tile_18_160199_88904.png").is_file()
|
Python
|
MIT
|
LaoshuBaby/map-machine/tests/test_command_line.py
|
3c9ae9b4-c18f-40f7-ba1a-8454665677f5
|
[{"tag": "NAME", "value": "Sergey Vartanov", "start": 111, "end": 126, "context": "from subprocess import PIPE, Popen\n\n__author__ = \"Sergey Vartanov\"\n__email__ = \"me@enzet.ru\"\n\nfrom xml.etree import"}, {"tag": "NAME", "value": "me@enzet.ru", "start": 141, "end": 152, "context": "open\n\n__author__ = \"Sergey Vartanov\"\n__email__ = \"me@enzet.ru\"\n\nfrom xml.etree import ElementTree\nfrom xml.etre"}]
|
[{"tag": "NAME", "value": "Sergey Vartanov", "start": 111, "end": 126, "context": "from subprocess import PIPE, Popen\n\n__author__ = \"Sergey Vartanov\"\n__email__ = \"me@enzet.ru\"\n\nfrom xml.etree import"}, {"tag": "NAME", "value": "me@enzet.ru", "start": 141, "end": 152, "context": "open\n\n__author__ = \"Sergey Vartanov\"\n__email__ = \"me@enzet.ru\"\n\nfrom xml.etree import ElementTree\nfrom xml.etre"}]
|
<?php
namespace app\common\model;
use think\Db;
use think\Model;
class Api extends Model
{
/*
* 获取地区
*/
public function getRegion()
{
$parent_id = I('get.parent_id/d');
$selected = I('get.selected', 0);
$data = M('region')->where("parent_id", $parent_id)->select();
$html = '';
if ($data) {
foreach ($data as $h) {
if ($h['id'] == $selected) {
$html .= "<option value='{$h['id']}' selected>{$h['name']}</option>";
}
$html .= "<option value='{$h['id']}'>{$h['name']}</option>";
}
}
echo $html;
}
public function getTwon()
{
$parent_id = I('get.parent_id/d');
$data = M('region')->where("parent_id", $parent_id)->select();
$html = '';
if ($data) {
foreach ($data as $h) {
$html .= "<option value='{$h['id']}'>{$h['name']}</option>";
}
}
if (empty($html)) {
echo '0';
} else {
echo $html;
}
}
/**
* 获取省
*/
public function getProvince()
{
$province = Db::name('region')->field('id,name')->where(array('level' => 1))->cache(true)->select();
$res = array('status' => 1, 'msg' => '获取成功', 'result' => $province);
exit(json_encode($res));
}
public function area()
{
$province_id = input('province_id/d');
$city_id = input('city_id/d');
$district_id = input('district_id/d');
$province_list = Db::name('region')->field('id,name')->where('level', 1)->cache(true)->select();
$city_list = Db::name('region')->field('id,name')->where('parent_id', $province_id)->cache(true)->select();
$district_list = Db::name('region')->field('id,name')->where('parent_id', $city_id)->cache(true)->select();
$town_list = Db::name('region')->field('id,name')->where('parent_id', $district_id)->cache(true)->select();
$this->ajaxReturn(['status' => 1, 'msg' => '获取成功',
'result' => ['province_list' => $province_list, 'city_list' => $city_list, 'district_list' => $district_list, 'town_list' => $town_list]]);
}
/**
* 获取市或者区
*/
public function getRegionByParentId()
{
$parent_id = input('parent_id');
$res = array('status' => 0, 'msg' => '获取失败,参数错误', 'result' => '');
if ($parent_id) {
$region_list = Db::name('region')->field('id,name')->where(['parent_id' => $parent_id])->select();
$res = array('status' => 1, 'msg' => '获取成功', 'result' => $region_list);
}
exit(json_encode($res));
}
/*
* 获取下级分类
*/
public function get_category()
{
$parent_id = I('get.parent_id/d'); // 商品分类 父id
$list = M('goods_category')->where("parent_id", $parent_id)->select();
if ($list) {
$this->ajaxReturn(['status' => 1, 'msg' => '获取成功!', 'result' => $list]);
}
$this->ajaxReturn(['status' => -1, 'msg' => '获取失败!', 'result' =>[]]);
}
/**
* 前端发送短信方法: /WAP/PC 共用发送方法
*/
public function send_validate_code()
{
$res = $this->private_send_validate_code();
ajaxReturn($res);
}
/**
* 前端发送短信方法: APP/WAP/PC 共用发送方法
*/
public function app_send_validate_code()
{
$res = $this->private_send_validate_code('app');
if($res['status'] == 1){
$this->ajaxReturn(['status' => 0 , 'msg'=>$res['msg'],'data'=>null]);
}else
$this->ajaxReturn(['status' => $res['status'] , 'msg'=>$res['msg'],'data'=>null]);
}
/**
* 验证短信验证码: APP/WAP/PC 共用发送方法
*/
public function check_validate_code()
{
$code = I('post.code');
$mobile = I('mobile');
$send = I('send');
$sender = empty($mobile) ? $send : $mobile;
$type = I('type');
$session_id = I('unique_id', session_id());
$scene = I('scene', -1);
$logic = new UsersLogic();
$res = $logic->check_validate_code($code, $sender, $type, $session_id, $scene);
ajaxReturn($res);
}
/**
* 检测手机号是否已经存在
*/
public function issetMobile()
{
$mobile = I("get.mobile");
$users = M('users')->where('mobile', $mobile)->find();
if ($users)
exit ('1');
else
exit ('0');
}
public function issetMobileOrEmail()
{
$mobile = I("mobile", '0');
$users = M('users')->where("email", $mobile)->whereOr('mobile', $mobile)->find();
if ($users)
exit ('1');
else
exit ('0');
}
/**
* 查询物流
*/
public function queryExpress($shipping_code, $invoice_no)
{
// $shipping_code = I('shipping_code');
// $invoice_no = I('invoice_no');
//判断变量是否为空
if((!$shipping_code) or (!$invoice_no)){
return ['status' => -1, 'message' => '参数有误', 'result' => ''];
}
//快递公司转换
switch ($shipping_code) {
case 'YD':
$shipping_code = 'YUNDA';
break;
case 'shunfeng':
$shipping_code = 'SFEXPRESS';
break;
case 'YZPY':
$shipping_code = 'CHINAPOST';
break;
case 'YTO':
$shipping_code = 'YTO';
break;
case 'ZTO':
$shipping_code = 'ZTO';
break;
default:
$shipping_code = '';
break;
}
$condition = array(
'shipping_code' => $shipping_code,
'invoice_no' => $invoice_no,
);
$is_exists = Db::name('delivery_express')->where($condition)->find();
//判断物流记录表是否已有记录,没有则去请求新数据
if($is_exists){
$result = unserialize($is_exists['result']);
//1为订单签收状态,订1单已经签收,已签收则不去请求新数据
if($is_exists['issign'] == 1){
return $result;
}
$pre_time = time();
$flag_time = (int)$is_exists['update_time'];
$space_time = $pre_time - $flag_time;
//请求状态正常的数据请求时间间隔小于两小时则不请求新数据
//其他数据请求时间间隔小于半小时则不请求新数据
if($result['status'] == 0){
if($space_time < 7200){
return $result;
}
}else{
if($space_time < 1800){
return $result;
}
}
$result = $this->getDelivery($shipping_code, $invoice_no);
print_r($result);die;
$result = json_decode($result, true);
//更新表数据
$flag = $this->updateData($result, $is_exists['id']);
return $result;
}else{
$result = $this->getDelivery($shipping_code, $invoice_no);
$result = json_decode($result, true);
$flag = $this->insertData($result, $shipping_code, $invoice_no);
return $result;
}
// $express_switch = tpCache('express.express_switch');
// $express_switch_input = input('express_switch/d');
// $express_switch = is_null($express_switch_input) ? $express_switch : $express_switch_input;
// if ($express_switch == 1) {
// require_once(PLUGIN_PATH . 'kdniao/kdniao.php');
// $kdniao = new \kdniao();
// $data['OrderCode'] = empty(I('order_sn')) ? date('YmdHis') : I('order_sn');
// $data['ShipperCode'] = I('shipping_code');
// $data['LogisticCode'] = I('invoice_no');
// $res = $kdniao->getOrderTracesByJson(json_encode($data));
// $res = json_decode($res, true);
// if ($res['State'] == 3) {
// foreach ($res['Traces'] as $val) {
// $tmp['context'] = $val['AcceptStation'];
// $tmp['time'] = $val['AcceptTime'];
// $res['data'][] = $tmp;
// }
// $res['status'] = "200";
// } else {
// $res['message'] = $res['Reason'];
// }
// return json($res);
// } else {
// $shipping_code = input('shipping_code');
// $invoice_no = input('invoice_no');
// if (empty($shipping_code) || empty($invoice_no)) {
// return json(['status' => 0, 'message' => '参数有误', 'result' => '']);
// }
// return json(queryExpress($shipping_code, $invoice_no));
// }
}
//物流插表
public function insertData($result, $shipping_code, $invoice_no)
{
$data = array(
'shipping_code' => $shipping_code,
'invoice_no' => $invoice_no,
'result' => serialize($result),
// 'issign' => $result['result']['issign'],
'update_time' => time(),
);
if(isset($result['result']['issign'])){
$data['issign'] = $result['result']['issign'];
}
return Db::name('delivery_express')->strict(false)->insert($data);
}
//物流表更新
public function updateData($result, $id)
{
$data = array(
'result' => serialize($result),
// 'issign' => $result['result']['issign'],
'update_time' => time(),
);
if(isset($result['result']['issign'])){
$data['issign'] = $result['result']['issign'];
}
return Db::name('delivery_express')->where('id', $id)->strict(false)->update($data);
}
/**
*物流接口
*/
private function getDelivery($shipping_code, $invoice_no)
{
$host = "https://wuliu.market.alicloudapi.com";//api访问链接
$path = "/kdi";//API访问后缀
$method = "GET";
//物流
$appcode = 'c5ccb196109848fe8ea5e1668410132a';//替换成自己的阿里云appcode
$headers = array();
array_push($headers, "Authorization:APPCODE " . $appcode);
$querys = "no=".$invoice_no."&type=".$shipping_code; //参数写在这里
$bodys = "";
$url = $host . $path . "?" . $querys;//url拼接
$curl = curl_init();
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, $method);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_HTTPHEADER, $headers);
curl_setopt($curl, CURLOPT_FAILONERROR, false);
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
//curl_setopt($curl, CURLOPT_HEADER, true); 如不输出json, 请打开这行代码,打印调试头部状态码。
//状态码: 200 正常;400 URL无效;401 appCode错误; 403 次数用完; 500 API网管错误
if (1 == strpos("$".$host, "https://"))
{
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, false);
}
return curl_exec($curl);
}
/**
* 检查订单状态
*/
public function check_order_pay_status()
{
$order_id = I('order_id/d');
if (empty($order_id)) {
$res = ['message' => '参数错误', 'status' => -1, 'result' => ''];
$this->AjaxReturn($res);
}
$recharge = I('recharge/d');
if ($recharge == 1) {
// 充值查询
$order = M('recharge')->field('pay_status')->where(['order_id' => $order_id])->find();
if ($order['pay_status'] == 1) {
$res = ['message' => '已支付', 'status' => 1, 'result' => $order];
} else {
$res = ['message' => '未支付', 'status' => 0, 'result' => $order];
}
}else{
$order = M('order')->field('pay_status')->where(['order_id' => $order_id])->find();
if ($order['pay_status'] != 0) {
$res = ['message' => '已支付', 'status' => 1, 'result' => $order];
} else {
$res = ['message' => '未支付', 'status' => 0, 'result' => $order];
}
}
$this->AjaxReturn($res);
}
/**
* 广告位js
*/
public function ad_show()
{
$pid = I('pid/d', 1);
$where = array(
'pid' => $pid,
'enable' => 1,
'start_time' => array('lt', strtotime(date('Y-m-d H:00:00'))),
'end_time' => array('gt', strtotime(date('Y-m-d H:00:00'))),
);
$ad = D("ad")->where($where)->order("orderby desc")->cache(true, TPSHOP_CACHE_TIME)->find();
$this->assign('ad', $ad);
return $this->fetch();
}
/**
* 搜索关键字
* @return array
*/
public function searchKey()
{
$searchKey = input('key');
$searchKeyList = Db::name('search_word')
->where('keywords', 'like', $searchKey . '%')
->whereOr('pinyin_full', 'like', $searchKey . '%')
->whereOr('pinyin_simple', 'like', $searchKey . '%')
->limit(10)
->select();
if ($searchKeyList) {
return json(['status' => 1, 'msg' => '搜索成功', 'result' => $searchKeyList]);
} else {
return json(['status' => 0, 'msg' => '没记录', 'result' => $searchKeyList]);
}
}
/**
* 根据ip设置获取的地区来设置地区缓存
*/
public function doCookieArea()
{
// $ip = '183.147.30.238';//测试ip
$address = input('address/a', []);
if (empty($address) || empty($address['province'])) {
$this->setCookieArea();
return;
}
$province_id = Db::name('region')->where(['level' => 1, 'name' => ['like', '%' . $address['province'] . '%']])->limit('1')->value('id');
if (empty($province_id)) {
$this->setCookieArea();
return;
}
if (empty($address['city'])) {
$city_id = Db::name('region')->where(['level' => 2, 'parent_id' => $province_id])->limit('1')->order('id')->value('id');
} else {
$city_id = Db::name('region')->where(['level' => 2, 'parent_id' => $province_id, 'name' => ['like', '%' . $address['city'] . '%']])->limit('1')->value('id');
}
if (empty($address['district'])) {
$district_id = Db::name('region')->where(['level' => 3, 'parent_id' => $city_id])->limit('1')->order('id')->value('id');
} else {
$district_id = Db::name('region')->where(['level' => 3, 'parent_id' => $city_id, 'name' => ['like', '%' . $address['district'] . '%']])->limit('1')->value('id');
}
$this->setCookieArea($province_id, $city_id, $district_id);
}
/**
* 设置地区缓存
* @param $province_id
* @param $city_id
* @param $district_id
*/
private function setCookieArea($province_id = 1, $city_id = 2, $district_id = 3)
{
Cookie::set('province_id', $province_id);
Cookie::set('city_id', $city_id);
Cookie::set('district_id', $district_id);
}
public function shop()
{
$province_id = input('province_id/d', 0);
$city_id = input('city_id/d', 0);
$district_id = input('district_id/d', 0);
$shop_address = input('shop_address/s', '');
$longitude = input('longitude/s', 0);
$latitude = input('latitude/s', 0);
if (empty($province_id) && empty($province_id) && empty($district_id)) {
$this->ajaxReturn([]);
}
$where = ['deleted' => 0, 'shop_status' => 1, 'province_id' => $province_id, 'city_id' => $city_id, 'district_id' => $district_id];
$field = '*';
$order = 'shop_id desc';
if ($longitude) {
$field .= ',round(SQRT((POW(((' . $longitude . ' - longitude)* 111),2))+ (POW(((' . $latitude . ' - latitude)* 111),2))),2) AS distance';
$order = 'distance ASC';
}
if($shop_address){
$where['shop_name|shop_address'] = ['like', '%'.$shop_address.'%'];
}
$Shop = new Shop();
$shop_list = $Shop->field($field)->where($where)->order($order)->select();
$origins = $destinations = [];
if ($shop_list) {
$shop_list = collection($shop_list)->append(['phone','area_list','work_time','work_day'])->toArray();
$shop_list_length = count($shop_list);
for ($shop_cursor = 0; $shop_cursor < $shop_list_length; $shop_cursor++) {
$origin = $latitude . ',' . $longitude;
array_push($origins, $origin);
$destination = $shop_list[$shop_cursor]['latitude'] . ',' . $shop_list[$shop_cursor]['longitude'];
array_push($destinations, $destination);
}
$url = 'http://api.map.baidu.com/routematrix/v2/driving?output=json&origins=' . implode('|', $origins) . '&destinations=' . implode('|', $destinations) . '&ak=Sgg73Hgc2HizzMiL74TUj42o0j3vM5AL';
$result = httpRequest($url, "get");
$data = json_decode($result, true);
if (!empty($data['result'])) {
for ($shop_cursor = 0; $shop_cursor < $shop_list_length; $shop_cursor++) {
$shop_list[$shop_cursor]['distance_text'] = $data['result'][$shop_cursor]['distance']['text'];
}
}else{
for ($shop_cursor = 0; $shop_cursor < $shop_list_length; $shop_cursor++) {
$shop_list[$shop_cursor]['distance_text'] = $data['message'];
}
}
}
$this->ajaxReturn($shop_list);
}
/**
* 检查绑定账号的合法性
*/
public function checkBindMobile()
{
$mobile = input('mobile/s');
if(empty($mobile)){
$this->ajaxReturn(['status' => 0, 'msg' => '参数错误', 'result' => '']);
}
if(!check_mobile($mobile)){
$this->ajaxReturn(['status' => 0, 'msg' => '手机格式错误', 'result' => '']);
}
//1.检查账号密码是否正确
$users = Users::get(['mobile'=>$mobile]);
if (empty($users)) {
$this->ajaxReturn(['status' => 0, 'msg' => '账号不存在', 'result' => '']);
}
$user = new User();
try{
$user->setUser($users);
$user->checkOauthBind();
$this->ajaxReturn(['status' => 1, 'msg' => '该手机可绑定', 'result' => '']);
}catch (TpshopException $t){
$error = $t->getErrorArr();
$this->ajaxReturn($error);
}
}
/**
* 检查注册账号的合法性
*/
public function checkRegMobile()
{
$mobile = input('mobile/s');
if(empty($mobile)){
$this->ajaxReturn(['status' => 0, 'msg' => '参数错误', 'result' => '']);
}
if(!check_mobile($mobile)){
$this->ajaxReturn(['status' => 0, 'msg' => '手机格式错误', 'result' => '']);
}
//1.检查账号密码是否正确
$users = Db::name('users')->where("mobile", $mobile)->find();
if ($users) {
$this->ajaxReturn(['status' => 0, 'msg' => '该手机号已被注册', 'result' => '']);
}
$this->ajaxReturn(['status' => 1, 'msg' => '该手机可注册', 'result' => '']);
}
//------------------------------------------------------------------------------------------
private function private_send_validate_code($bool=false)
{
$this->send_scene = C('SEND_SCENE');
$type = I('type'); //email|其他
$scene = I('scene',0); //发送短信验证码使用场景,1:注册,2:找回密码,3:客户下单,4:客户支付,5:商家发货,6:身份验证,7:购买虚拟商品通知
$mobile = I('mobile'); //手机号码
$sender = I('send');
$verify_code = I('verify_code'); //图像验证码
$mobile = !empty($mobile) ? $mobile : $sender;
$session_id = I('unique_id', session_id());
if($bool)session("scene", $scene);
if($scene == 1){
$userinfo = M('users')->where(['mobile'=>$mobile])->count();
if($userinfo)return array('status' => -1, 'msg' => '该手机号码已存在');
}
//注册
if ($scene == 1 && !empty($verify_code)) {
$verify = new Verify();
if (!$verify->check($verify_code, 'user_reg')) {
return array('status' => -1, 'msg' => '图像验证码错误');
}
}
if ($type == 'email') {
//发送邮件验证码
$logic = new UsersLogic();
$res = $logic->send_email_code($sender);
return $res;
} else {
//发送短信验证码
$res = checkEnableSendSms($scene);
if ($res['status'] != 1) {
return $res;
}
//判断是否存在验证码
$data = M('sms_log')->where(array('mobile' => $mobile, 'session_id' => $session_id, 'status' => 1))->order('id DESC')->find();
//获取时间配置
$sms_time_out = tpCache('sms.sms_time_out');
$sms_time_out = $sms_time_out ? $sms_time_out : 120;
//120秒以内不可重复发送
if ($data && (time() - $data['add_time']) < $sms_time_out) {
$return_arr = array('status' => -1, 'msg' => $sms_time_out . '秒内不允许重复发送');
return $return_arr;
}
//随机一个验证码
$code = rand(1000, 9999);
$params['code'] = $code;
//发送短信
$resp = sendSms($scene, $mobile, $params, $session_id);
if ($resp['status'] == 1) {
//发送成功, 修改发送状态位成功
M('sms_log')->where(array('mobile' => $mobile, 'code' => $code, 'session_id' => $session_id, 'status' => 0))->save(array('status' => 1, 'add_time'=>time()));
$return_arr = array('status' => 1, 'msg' => '发送成功,请注意查收');
} else {
$return_arr = array('status' => -1, 'msg' => '发送失败' . $resp['msg']);
}
return $return_arr;
}
}
}
|
PHP
|
Apache-2.0
|
peofy/niuniu/application/common/model/Api.php
|
d09fd554-b9c0-47a6-9dbe-224242a30b25
|
[{"tag": "IP_ADDRESS", "value": "183.147.30.238", "start": 13249, "end": 13263, "context": "ic function doCookieArea()\n {\n// $ip = '183.147.30.238';//\u6d4b\u8bd5ip\n $address = input('address/a', [])"}, {"tag": "API_KEY", "value": "c5ccb196109848fe8ea5e1668410132a", "start": 9864, "end": 9896, "context": "$method = \"GET\";\n //\u7269\u6d41\n $appcode = 'c5ccb196109848fe8ea5e1668410132a';//\u66ff\u6362\u6210\u81ea\u5df1\u7684\u963f\u91cc\u4e91appcode\n $headers = array();\n "}, {"tag": "API_KEY", "value": "Sgg73Hgc2HizzMiL74TUj42o0j3vM5AL", "start": 16725, "end": 16757, "context": "stinations=' . implode('|', $destinations) . '&ak=Sgg73Hgc2HizzMiL74TUj42o0j3vM5AL';\n $result = httpRequest($url, \"get\");"}]
|
[{"tag": "IP_ADDRESS", "value": "183.147.30.238", "start": 13249, "end": 13263, "context": "ic function doCookieArea()\n {\n// $ip = '183.147.30.238';//\u6d4b\u8bd5ip\n $address = input('address/a', [])"}, {"tag": "KEY", "value": "c5ccb196109848fe8ea5e1668410132a", "start": 9864, "end": 9896, "context": "$method = \"GET\";\n //\u7269\u6d41\n $appcode = 'c5ccb196109848fe8ea5e1668410132a';//\u66ff\u6362\u6210\u81ea\u5df1\u7684\u963f\u91cc\u4e91appcode\n $headers = array();\n "}, {"tag": "KEY", "value": "Sgg73Hgc2HizzMiL74TUj42o0j3vM5AL", "start": 16725, "end": 16757, "context": "stinations=' . implode('|', $destinations) . '&ak=Sgg73Hgc2HizzMiL74TUj42o0j3vM5AL';\n $result = httpRequest($url, \"get\");"}]
|
#created by Angus Clark on 8/01/2017
# toDo incoperate the saving program into this_dir
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host = '130.56.253.43'
print host # remove when done debugging
port = 5201 # edit when port for comm is decided
s.bind((host,port))
f = open('temp.json','wb')
s.listen(5)
while True:
c, addr = s.accept()
while(l):
f.write(l)
l = c.recv(1024)
f.close()
c.close()
|
Python
|
BSD-2-Clause
|
wmizzi/tn2capstone/ServerScript/recievejson(legacy).py
|
a135a5db-a9ec-47a7-b6f7-62839f48a99d
|
[{"tag": "NAME", "value": "Angus Clark", "start": 12, "end": 23, "context": "#created by Angus Clark on 8/01/2017\n# toDo incoperate the saving program"}, {"tag": "IP_ADDRESS", "value": "130.56.253.43", "start": 166, "end": 179, "context": "ocket(socket.AF_INET, socket.SOCK_STREAM)\nhost = '130.56.253.43'\nprint host # remove when done debugging\nport = 5"}]
|
[{"tag": "NAME", "value": "Angus Clark", "start": 12, "end": 23, "context": "#created by Angus Clark on 8/01/2017\n# toDo incoperate the saving program"}, {"tag": "IP_ADDRESS", "value": "130.56.253.43", "start": 166, "end": 179, "context": "ocket(socket.AF_INET, socket.SOCK_STREAM)\nhost = '130.56.253.43'\nprint host # remove when done debugging\nport = 5"}]
|
###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2022, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('chart_data_labels17.xlsx')
self.ignore_elements = {'xl/charts/chart1.xml': ['<c:formatCode']}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'stock'})
date_format = workbook.add_format({'num_format': 14})
chart.axis_ids = [45740032, 45747200]
data = [
[39083, 39084, 39085, 39086, 39087],
[27.2, 25.03, 19.05, 20.34, 18.5],
[23.49, 19.55, 15.12, 17.84, 16.34],
[25.45, 23.05, 17.32, 20.45, 17.34],
]
for row in range(5):
worksheet.write(row, 0, data[0][row], date_format)
worksheet.write(row, 1, data[1][row])
worksheet.write(row, 2, data[2][row])
worksheet.write(row, 3, data[3][row])
worksheet.set_column('A:D', 11)
chart.add_series({
'categories': '=Sheet1!$A$1:$A$5',
'values': '=Sheet1!$B$1:$B$5',
})
chart.add_series({
'categories': '=Sheet1!$A$1:$A$5',
'values': '=Sheet1!$C$1:$C$5',
})
chart.add_series({
'categories': '=Sheet1!$A$1:$A$5',
'values': '=Sheet1!$D$1:$D$5',
'data_labels': {'value': 1, 'position': 'right'},
})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
|
Python
|
BSD-2-Clause
|
hugovk/XlsxWriter/xlsxwriter/test/comparison/test_chart_data_labels17.py
|
c3e7a2d2-67ad-4a24-8e9f-4f1762c95fad
|
[{"tag": "EMAIL", "value": "jmcnamara@cpan.org", "start": 191, "end": 209, "context": "Clause\n# Copyright (c), 2013-2022, John McNamara, jmcnamara@cpan.org\n#\n\nfrom ..excel_comparison_test import ExcelCompa"}, {"tag": "NAME", "value": "John McNamara", "start": 176, "end": 189, "context": "ntifier: BSD-2-Clause\n# Copyright (c), 2013-2022, John McNamara, jmcnamara@cpan.org\n#\n\nfrom ..excel_comparison_te"}]
|
[{"tag": "EMAIL", "value": "jmcnamara@cpan.org", "start": 191, "end": 209, "context": "Clause\n# Copyright (c), 2013-2022, John McNamara, jmcnamara@cpan.org\n#\n\nfrom ..excel_comparison_test import ExcelCompa"}, {"tag": "NAME", "value": "John McNamara", "start": 176, "end": 189, "context": "ntifier: BSD-2-Clause\n# Copyright (c), 2013-2022, John McNamara, jmcnamara@cpan.org\n#\n\nfrom ..excel_comparison_te"}]
|
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Validate
* @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id: Ean14.php 20785 2010-01-31 09:43:03Z mikaelkael $
*/
/**
* @see Zend_Validate_Barcode_AdapterAbstract
*/
require_once 'Zend/Validate/Barcode/AdapterAbstract.php';
/**
* @category Zend
* @package Zend_Validate
* @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
class Zend_Validate_Barcode_Ean14 extends Zend_Validate_Barcode_AdapterAbstract
{
/**
* Allowed barcode lengths
* @var integer
*/
protected $_length = 14;
/**
* Allowed barcode characters
* @var string
*/
protected $_characters = '0123456789';
/**
* Checksum function
* @var string
*/
protected $_checksum = '_gtin';
}
|
PHP
|
BSD-3-Clause
|
FayeWilliams/ZF/library/Zend/Validate/Barcode/Ean14.php
|
a529f83f-6007-40d2-8ba8-2ba9cdbf9039
|
[]
|
[]
|
<?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@magentocommerce.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magentocommerce.com for more information.
*
* @category Mage
* @package Mage_Tag
* @copyright Copyright (c) 2013 Magento Inc. (http://www.magentocommerce.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
/**
* Tag resourse model
*
* @category Mage
* @package Mage_Tag
* @author Magento Core Team <core@magentocommerce.com>
*/
class Mage_Tag_Model_Mysql4_Tag extends Mage_Tag_Model_Resource_Tag
{
}
|
PHP
|
MIT
|
N3Works/bororoeducacao/public/loja/app/code/core/Mage/Tag/Model/Mysql4/Tag.php
|
5040764e-3c3c-4d41-b872-84a1702ccd7b
|
[{"tag": "EMAIL", "value": "core@magentocommerce.com", "start": 1071, "end": 1095, "context": "e Mage_Tag\n * @author Magento Core Team <core@magentocommerce.com>\n */\nclass Mage_Tag_Model_Mysql4_Tag extends Mage"}]
|
[{"tag": "EMAIL", "value": "core@magentocommerce.com", "start": 1071, "end": 1095, "context": "e Mage_Tag\n * @author Magento Core Team <core@magentocommerce.com>\n */\nclass Mage_Tag_Model_Mysql4_Tag extends Mage"}]
|
package command
import (
"crypto/ecdsa"
"crypto/elliptic"
"crypto/rand"
"crypto/x509"
"crypto/x509/pkix"
"encoding/base64"
"fmt"
"io/ioutil"
"math/big"
mathrand "math/rand"
"net"
"path/filepath"
"sort"
"strings"
"time"
"github.com/hashicorp/vault/helper/namespace"
"github.com/hashicorp/vault/helper/testhelpers"
vaulthttp "github.com/hashicorp/vault/http"
"github.com/hashicorp/vault/logical"
"github.com/hashicorp/vault/vault"
"github.com/hashicorp/vault/version"
testing "github.com/mitchellh/go-testing-interface"
"github.com/pkg/errors"
)
func (c *ServerCommand) enableFourClusterDev(base *vault.CoreConfig, info map[string]string, infoKeys []string, devListenAddress, tempDir string) int {
var err error
ctx := namespace.RootContext(nil)
clusters := map[string]*vault.TestCluster{}
if base.DevToken == "" {
base.DevToken = "root"
}
base.EnableRaw = true
// Without setting something in the future we get bombarded with warnings which are quite annoying during testing
base.DevLicenseDuration = 6 * time.Hour
// Set a base temp dir
if tempDir == "" {
tempDir, err = ioutil.TempDir("", "vault-test-cluster-")
if err != nil {
c.UI.Error(fmt.Sprintf("failed to create top-level temp dir: %s", err))
return 1
}
}
caKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
if err != nil {
c.UI.Error(fmt.Sprintf("Failed to generate CA key: %s", err))
return 1
}
certIPs := []net.IP{
net.IPv6loopback,
net.ParseIP("127.0.0.1"),
}
caCertTemplate := &x509.Certificate{
Subject: pkix.Name{
CommonName: "localhost",
},
DNSNames: []string{"localhost"},
IPAddresses: certIPs,
KeyUsage: x509.KeyUsage(x509.KeyUsageCertSign | x509.KeyUsageCRLSign),
SerialNumber: big.NewInt(mathrand.Int63()),
NotBefore: time.Now().Add(-30 * time.Second),
NotAfter: time.Now().Add(262980 * time.Hour),
BasicConstraintsValid: true,
IsCA: true,
}
caBytes, err := x509.CreateCertificate(rand.Reader, caCertTemplate, caCertTemplate, caKey.Public(), caKey)
if err != nil {
c.UI.Error(fmt.Sprintf("Failed to generate certificate: %s", err))
return 1
}
getCluster := func(name string) error {
factory := c.PhysicalBackends["inmem_transactional_ha"]
backend, err := factory(nil, c.logger)
if err != nil {
c.UI.Error(fmt.Sprintf("Error initializing storage of type %s: %s", "inmem_transactional_ha", err))
return errors.New("")
}
base.Physical = backend
base.Seal = vault.NewDefaultSeal()
testCluster := vault.NewTestCluster(&testing.RuntimeT{}, base, &vault.TestClusterOptions{
HandlerFunc: vaulthttp.Handler,
//BaseListenAddress: c.flagDevListenAddr,
Logger: c.logger.Named(name),
TempDir: fmt.Sprintf("%s/%s", tempDir, name),
CAKey: caKey,
CACert: caBytes,
})
clusters[name] = testCluster
for i, core := range testCluster.Cores {
info[fmt.Sprintf("%s node %d redirect address", name, i)] = fmt.Sprintf("https://%s", core.Listeners[0].Address.String())
infoKeys = append(infoKeys, fmt.Sprintf("%s node %d redirect address", name, i))
core.Server.Handler = vaulthttp.Handler(&vault.HandlerProperties{
Core: core.Core,
})
core.SetClusterHandler(core.Server.Handler)
}
testCluster.Start()
req := &logical.Request{
ID: "dev-gen-root",
Operation: logical.UpdateOperation,
ClientToken: testCluster.RootToken,
Path: "auth/token/create",
Data: map[string]interface{}{
"id": base.DevToken,
"policies": []string{"root"},
"no_parent": true,
"no_default_policy": true,
},
}
resp, err := testCluster.Cores[0].HandleRequest(ctx, req)
if err != nil {
c.UI.Error(fmt.Sprintf("failed to create root token with ID %s: %s", base.DevToken, err))
return errors.New("")
}
if resp == nil {
c.UI.Error(fmt.Sprintf("nil response when creating root token with ID %s", base.DevToken))
return errors.New("")
}
if resp.Auth == nil {
c.UI.Error(fmt.Sprintf("nil auth when creating root token with ID %s", base.DevToken))
return errors.New("")
}
testCluster.RootToken = resp.Auth.ClientToken
req.ID = "dev-revoke-init-root"
req.Path = "auth/token/revoke-self"
req.Data = nil
resp, err = testCluster.Cores[0].HandleRequest(ctx, req)
if err != nil {
c.UI.Output(fmt.Sprintf("failed to revoke initial root token: %s", err))
return errors.New("")
}
for _, core := range testCluster.Cores {
core.Client.SetToken(base.DevToken)
}
return nil
}
err = getCluster("perf-pri")
if err != nil {
return 1
}
err = getCluster("perf-pri-dr")
if err != nil {
return 1
}
err = getCluster("perf-sec")
if err != nil {
return 1
}
err = getCluster("perf-sec-dr")
if err != nil {
return 1
}
clusterCleanup := func() {
for name, cluster := range clusters {
cluster.Cleanup()
// Shutdown will wait until after Vault is sealed, which means the
// request forwarding listeners will also be closed (and also
// waited for).
for _, core := range cluster.Cores {
if err := core.Shutdown(); err != nil {
c.UI.Error(fmt.Sprintf("Error with cluster %s core shutdown: %s", name, err))
}
}
}
}
defer c.cleanupGuard.Do(clusterCleanup)
info["cluster parameters path"] = tempDir
infoKeys = append(infoKeys, "cluster parameters path")
verInfo := version.GetVersion()
info["version"] = verInfo.FullVersionNumber(false)
infoKeys = append(infoKeys, "version")
if verInfo.Revision != "" {
info["version sha"] = strings.Trim(verInfo.Revision, "'")
infoKeys = append(infoKeys, "version sha")
}
infoKeys = append(infoKeys, "cgo")
info["cgo"] = "disabled"
if version.CgoEnabled {
info["cgo"] = "enabled"
}
// Server configuration output
padding := 40
sort.Strings(infoKeys)
c.UI.Output("==> Vault server configuration:\n")
for _, k := range infoKeys {
c.UI.Output(fmt.Sprintf(
"%s%s: %s",
strings.Repeat(" ", padding-len(k)),
strings.Title(k),
info[k]))
}
c.UI.Output("")
// Set the token
tokenHelper, err := c.TokenHelper()
if err != nil {
c.UI.Error(fmt.Sprintf("Error getting token helper: %s", err))
return 1
}
if err := tokenHelper.Store(base.DevToken); err != nil {
c.UI.Error(fmt.Sprintf("Error storing in token helper: %s", err))
return 1
}
if err := ioutil.WriteFile(filepath.Join(tempDir, "root_token"), []byte(base.DevToken), 0755); err != nil {
c.UI.Error(fmt.Sprintf("Error writing token to tempfile: %s", err))
return 1
}
c.UI.Output(fmt.Sprintf(
"\nRoot Token: %s\n", base.DevToken,
))
for i, key := range clusters["perf-pri"].BarrierKeys {
c.UI.Output(fmt.Sprintf(
"Unseal Key %d: %s",
i+1, base64.StdEncoding.EncodeToString(key),
))
}
c.UI.Output(fmt.Sprintf(
"\nUseful env vars:\n"+
"export VAULT_TOKEN=%s\n"+
"export VAULT_CACERT=%s/perf-pri/ca_cert.pem\n",
base.DevToken,
tempDir,
))
c.UI.Output(fmt.Sprintf("Addresses of initial active nodes:"))
clusterNames := []string{}
for name := range clusters {
clusterNames = append(clusterNames, name)
}
sort.Strings(clusterNames)
for _, name := range clusterNames {
c.UI.Output(fmt.Sprintf(
"%s:\n"+
"export VAULT_ADDR=%s\n",
name,
clusters[name].Cores[0].Client.Address(),
))
}
// Output the header that the server has started
c.UI.Output("==> Vault clusters started! Log data will stream in below:\n")
// Inform any tests that the server is ready
select {
case c.startedCh <- struct{}{}:
default:
}
// Release the log gate.
c.logGate.Flush()
testhelpers.SetupFourClusterReplication(&testing.RuntimeT{},
clusters["perf-pri"],
clusters["perf-sec"],
clusters["perf-pri-dr"],
clusters["perf-sec-dr"],
)
// Wait for shutdown
shutdownTriggered := false
for !shutdownTriggered {
select {
case <-c.ShutdownCh:
c.UI.Output("==> Vault shutdown triggered")
// Stop the listeners so that we don't process further client requests.
c.cleanupGuard.Do(clusterCleanup)
shutdownTriggered = true
case <-c.SighupCh:
c.UI.Output("==> Vault reload triggered")
for name, cluster := range clusters {
for _, core := range cluster.Cores {
if err := c.Reload(core.ReloadFuncsLock, core.ReloadFuncs, nil); err != nil {
c.UI.Error(fmt.Sprintf("Error(s) were encountered during reload of cluster %s cores: %s", name, err))
}
}
}
}
}
return 0
}
|
GO
|
MPL-2.0
|
SuriDotInc/vault/command/server_devfourcluster.go
|
a6296c20-eea2-48c4-bb6e-4af83a68f29e
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1484, "end": 1493, "context": "s := []net.IP{\n\t\tnet.IPv6loopback,\n\t\tnet.ParseIP(\"127.0.0.1\"),\n\t}\n\tcaCertTemplate := &x509.Certificate{\n\t\tSub"}]
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1484, "end": 1493, "context": "s := []net.IP{\n\t\tnet.IPv6loopback,\n\t\tnet.ParseIP(\"127.0.0.1\"),\n\t}\n\tcaCertTemplate := &x509.Certificate{\n\t\tSub"}]
|
# Author: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Denis Engemann <denis.engemann@gmail.com>
# Andrew Dykstra <andrew.r.dykstra@gmail.com>
# Mads Jensen <mje.mads@gmail.com>
#
# License: BSD (3-clause)
import os.path as op
from copy import deepcopy
import warnings
import numpy as np
from scipy import fftpack
from numpy.testing import (assert_array_almost_equal, assert_equal,
assert_array_equal, assert_allclose)
from nose.tools import assert_true, assert_raises, assert_not_equal
from mne import (equalize_channels, pick_types, read_evokeds, write_evokeds,
grand_average, combine_evoked)
from mne.evoked import _get_peak, EvokedArray
from mne.epochs import EpochsArray
from mne.utils import _TempDir, requires_pandas, slow_test, requires_version
from mne.io.meas_info import create_info
from mne.externals.six.moves import cPickle as pickle
warnings.simplefilter('always')
fname = op.join(op.dirname(__file__), '..', 'io', 'tests', 'data',
'test-ave.fif')
fname_gz = op.join(op.dirname(__file__), '..', 'io', 'tests', 'data',
'test-ave.fif.gz')
@requires_version('scipy', '0.14')
def test_savgol_filter():
"""Test savgol filtering
"""
h_freq = 10.
evoked = read_evokeds(fname, 0)
freqs = fftpack.fftfreq(len(evoked.times), 1. / evoked.info['sfreq'])
data = np.abs(fftpack.fft(evoked.data))
match_mask = np.logical_and(freqs >= 0, freqs <= h_freq / 2.)
mismatch_mask = np.logical_and(freqs >= h_freq * 2, freqs < 50.)
assert_raises(ValueError, evoked.savgol_filter, evoked.info['sfreq'])
evoked.savgol_filter(h_freq)
data_filt = np.abs(fftpack.fft(evoked.data))
# decent in pass-band
assert_allclose(np.mean(data[:, match_mask], 0),
np.mean(data_filt[:, match_mask], 0),
rtol=1e-4, atol=1e-2)
# suppression in stop-band
assert_true(np.mean(data[:, mismatch_mask]) >
np.mean(data_filt[:, mismatch_mask]) * 5)
def test_hash_evoked():
"""Test evoked hashing
"""
ave = read_evokeds(fname, 0)
ave_2 = read_evokeds(fname, 0)
assert_equal(hash(ave), hash(ave_2))
# do NOT use assert_equal here, failing output is terrible
assert_true(pickle.dumps(ave) == pickle.dumps(ave_2))
ave_2.data[0, 0] -= 1
assert_not_equal(hash(ave), hash(ave_2))
@slow_test
def test_io_evoked():
"""Test IO for evoked data (fif + gz) with integer and str args
"""
tempdir = _TempDir()
ave = read_evokeds(fname, 0)
write_evokeds(op.join(tempdir, 'evoked-ave.fif'), ave)
ave2 = read_evokeds(op.join(tempdir, 'evoked-ave.fif'))[0]
# This not being assert_array_equal due to windows rounding
assert_true(np.allclose(ave.data, ave2.data, atol=1e-16, rtol=1e-3))
assert_array_almost_equal(ave.times, ave2.times)
assert_equal(ave.nave, ave2.nave)
assert_equal(ave._aspect_kind, ave2._aspect_kind)
assert_equal(ave.kind, ave2.kind)
assert_equal(ave.last, ave2.last)
assert_equal(ave.first, ave2.first)
assert_true(repr(ave))
# test compressed i/o
ave2 = read_evokeds(fname_gz, 0)
assert_true(np.allclose(ave.data, ave2.data, atol=1e-16, rtol=1e-8))
# test str access
condition = 'Left Auditory'
assert_raises(ValueError, read_evokeds, fname, condition, kind='stderr')
assert_raises(ValueError, read_evokeds, fname, condition,
kind='standard_error')
ave3 = read_evokeds(fname, condition)
assert_array_almost_equal(ave.data, ave3.data, 19)
# test read_evokeds and write_evokeds
types = ['Left Auditory', 'Right Auditory', 'Left visual', 'Right visual']
aves1 = read_evokeds(fname)
aves2 = read_evokeds(fname, [0, 1, 2, 3])
aves3 = read_evokeds(fname, types)
write_evokeds(op.join(tempdir, 'evoked-ave.fif'), aves1)
aves4 = read_evokeds(op.join(tempdir, 'evoked-ave.fif'))
for aves in [aves2, aves3, aves4]:
for [av1, av2] in zip(aves1, aves):
assert_array_almost_equal(av1.data, av2.data)
assert_array_almost_equal(av1.times, av2.times)
assert_equal(av1.nave, av2.nave)
assert_equal(av1.kind, av2.kind)
assert_equal(av1._aspect_kind, av2._aspect_kind)
assert_equal(av1.last, av2.last)
assert_equal(av1.first, av2.first)
assert_equal(av1.comment, av2.comment)
# test warnings on bad filenames
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
fname2 = op.join(tempdir, 'test-bad-name.fif')
write_evokeds(fname2, ave)
read_evokeds(fname2)
assert_true(len(w) == 2)
def test_shift_time_evoked():
""" Test for shifting of time scale
"""
tempdir = _TempDir()
# Shift backward
ave = read_evokeds(fname, 0)
ave.shift_time(-0.1, relative=True)
write_evokeds(op.join(tempdir, 'evoked-ave.fif'), ave)
# Shift forward twice the amount
ave_bshift = read_evokeds(op.join(tempdir, 'evoked-ave.fif'), 0)
ave_bshift.shift_time(0.2, relative=True)
write_evokeds(op.join(tempdir, 'evoked-ave.fif'), ave_bshift)
# Shift backward again
ave_fshift = read_evokeds(op.join(tempdir, 'evoked-ave.fif'), 0)
ave_fshift.shift_time(-0.1, relative=True)
write_evokeds(op.join(tempdir, 'evoked-ave.fif'), ave_fshift)
ave_normal = read_evokeds(fname, 0)
ave_relative = read_evokeds(op.join(tempdir, 'evoked-ave.fif'), 0)
assert_true(np.allclose(ave_normal.data, ave_relative.data,
atol=1e-16, rtol=1e-3))
assert_array_almost_equal(ave_normal.times, ave_relative.times, 10)
assert_equal(ave_normal.last, ave_relative.last)
assert_equal(ave_normal.first, ave_relative.first)
# Absolute time shift
ave = read_evokeds(fname, 0)
ave.shift_time(-0.3, relative=False)
write_evokeds(op.join(tempdir, 'evoked-ave.fif'), ave)
ave_absolute = read_evokeds(op.join(tempdir, 'evoked-ave.fif'), 0)
assert_true(np.allclose(ave_normal.data, ave_absolute.data,
atol=1e-16, rtol=1e-3))
assert_equal(ave_absolute.first, int(-0.3 * ave.info['sfreq']))
def test_evoked_resample():
"""Test for resampling of evoked data
"""
tempdir = _TempDir()
# upsample, write it out, read it in
ave = read_evokeds(fname, 0)
sfreq_normal = ave.info['sfreq']
ave.resample(2 * sfreq_normal)
write_evokeds(op.join(tempdir, 'evoked-ave.fif'), ave)
ave_up = read_evokeds(op.join(tempdir, 'evoked-ave.fif'), 0)
# compare it to the original
ave_normal = read_evokeds(fname, 0)
# and compare the original to the downsampled upsampled version
ave_new = read_evokeds(op.join(tempdir, 'evoked-ave.fif'), 0)
ave_new.resample(sfreq_normal)
assert_array_almost_equal(ave_normal.data, ave_new.data, 2)
assert_array_almost_equal(ave_normal.times, ave_new.times)
assert_equal(ave_normal.nave, ave_new.nave)
assert_equal(ave_normal._aspect_kind, ave_new._aspect_kind)
assert_equal(ave_normal.kind, ave_new.kind)
assert_equal(ave_normal.last, ave_new.last)
assert_equal(ave_normal.first, ave_new.first)
# for the above to work, the upsampling just about had to, but
# we'll add a couple extra checks anyway
assert_true(len(ave_up.times) == 2 * len(ave_normal.times))
assert_true(ave_up.data.shape[1] == 2 * ave_normal.data.shape[1])
def test_evoked_detrend():
"""Test for detrending evoked data
"""
ave = read_evokeds(fname, 0)
ave_normal = read_evokeds(fname, 0)
ave.detrend(0)
ave_normal.data -= np.mean(ave_normal.data, axis=1)[:, np.newaxis]
picks = pick_types(ave.info, meg=True, eeg=True, exclude='bads')
assert_true(np.allclose(ave.data[picks], ave_normal.data[picks],
rtol=1e-8, atol=1e-16))
@requires_pandas
def test_to_data_frame():
"""Test evoked Pandas exporter"""
ave = read_evokeds(fname, 0)
assert_raises(ValueError, ave.to_data_frame, picks=np.arange(400))
df = ave.to_data_frame()
assert_true((df.columns == ave.ch_names).all())
df = ave.to_data_frame(index=None).reset_index('time')
assert_true('time' in df.columns)
assert_array_equal(df.values[:, 1], ave.data[0] * 1e13)
assert_array_equal(df.values[:, 3], ave.data[2] * 1e15)
def test_evoked_proj():
"""Test SSP proj operations
"""
for proj in [True, False]:
ave = read_evokeds(fname, condition=0, proj=proj)
assert_true(all(p['active'] == proj for p in ave.info['projs']))
# test adding / deleting proj
if proj:
assert_raises(ValueError, ave.add_proj, [],
{'remove_existing': True})
assert_raises(ValueError, ave.del_proj, 0)
else:
projs = deepcopy(ave.info['projs'])
n_proj = len(ave.info['projs'])
ave.del_proj(0)
assert_true(len(ave.info['projs']) == n_proj - 1)
ave.add_proj(projs, remove_existing=False)
assert_true(len(ave.info['projs']) == 2 * n_proj - 1)
ave.add_proj(projs, remove_existing=True)
assert_true(len(ave.info['projs']) == n_proj)
ave = read_evokeds(fname, condition=0, proj=False)
data = ave.data.copy()
ave.apply_proj()
assert_allclose(np.dot(ave._projector, data), ave.data)
def test_get_peak():
"""Test peak getter
"""
evoked = read_evokeds(fname, condition=0, proj=True)
assert_raises(ValueError, evoked.get_peak, ch_type='mag', tmin=1)
assert_raises(ValueError, evoked.get_peak, ch_type='mag', tmax=0.9)
assert_raises(ValueError, evoked.get_peak, ch_type='mag', tmin=0.02,
tmax=0.01)
assert_raises(ValueError, evoked.get_peak, ch_type='mag', mode='foo')
assert_raises(RuntimeError, evoked.get_peak, ch_type=None, mode='foo')
assert_raises(ValueError, evoked.get_peak, ch_type='misc', mode='foo')
ch_idx, time_idx = evoked.get_peak(ch_type='mag')
assert_true(ch_idx in evoked.ch_names)
assert_true(time_idx in evoked.times)
ch_idx, time_idx = evoked.get_peak(ch_type='mag',
time_as_index=True)
assert_true(time_idx < len(evoked.times))
data = np.array([[0., 1., 2.],
[0., -3., 0]])
times = np.array([.1, .2, .3])
ch_idx, time_idx = _get_peak(data, times, mode='abs')
assert_equal(ch_idx, 1)
assert_equal(time_idx, 1)
ch_idx, time_idx = _get_peak(data * -1, times, mode='neg')
assert_equal(ch_idx, 0)
assert_equal(time_idx, 2)
ch_idx, time_idx = _get_peak(data, times, mode='pos')
assert_equal(ch_idx, 0)
assert_equal(time_idx, 2)
assert_raises(ValueError, _get_peak, data + 1e3, times, mode='neg')
assert_raises(ValueError, _get_peak, data - 1e3, times, mode='pos')
def test_drop_channels_mixin():
"""Test channels-dropping functionality
"""
evoked = read_evokeds(fname, condition=0, proj=True)
drop_ch = evoked.ch_names[:3]
ch_names = evoked.ch_names[3:]
ch_names_orig = evoked.ch_names
dummy = evoked.drop_channels(drop_ch, copy=True)
assert_equal(ch_names, dummy.ch_names)
assert_equal(ch_names_orig, evoked.ch_names)
assert_equal(len(ch_names_orig), len(evoked.data))
evoked.drop_channels(drop_ch)
assert_equal(ch_names, evoked.ch_names)
assert_equal(len(ch_names), len(evoked.data))
def test_pick_channels_mixin():
"""Test channel-picking functionality
"""
evoked = read_evokeds(fname, condition=0, proj=True)
ch_names = evoked.ch_names[:3]
ch_names_orig = evoked.ch_names
dummy = evoked.pick_channels(ch_names, copy=True)
assert_equal(ch_names, dummy.ch_names)
assert_equal(ch_names_orig, evoked.ch_names)
assert_equal(len(ch_names_orig), len(evoked.data))
evoked.pick_channels(ch_names)
assert_equal(ch_names, evoked.ch_names)
assert_equal(len(ch_names), len(evoked.data))
evoked = read_evokeds(fname, condition=0, proj=True)
assert_true('meg' in evoked)
assert_true('eeg' in evoked)
evoked.pick_types(meg=False, eeg=True)
assert_true('meg' not in evoked)
assert_true('eeg' in evoked)
assert_true(len(evoked.ch_names) == 60)
def test_equalize_channels():
"""Test equalization of channels
"""
evoked1 = read_evokeds(fname, condition=0, proj=True)
evoked2 = evoked1.copy()
ch_names = evoked1.ch_names[2:]
evoked1.drop_channels(evoked1.ch_names[:1])
evoked2.drop_channels(evoked2.ch_names[1:2])
my_comparison = [evoked1, evoked2]
equalize_channels(my_comparison)
for e in my_comparison:
assert_equal(ch_names, e.ch_names)
def test_evoked_arithmetic():
"""Test evoked arithmetic
"""
ev = read_evokeds(fname, condition=0)
ev1 = EvokedArray(np.ones_like(ev.data), ev.info, ev.times[0], nave=20)
ev2 = EvokedArray(-np.ones_like(ev.data), ev.info, ev.times[0], nave=10)
# combine_evoked([ev1, ev2]) should be the same as ev1 + ev2:
# data should be added according to their `nave` weights
# nave = ev1.nave + ev2.nave
ev = ev1 + ev2
assert_equal(ev.nave, ev1.nave + ev2.nave)
assert_allclose(ev.data, 1. / 3. * np.ones_like(ev.data))
ev = ev1 - ev2
assert_equal(ev.nave, ev1.nave + ev2.nave)
assert_equal(ev.comment, ev1.comment + ' - ' + ev2.comment)
assert_allclose(ev.data, np.ones_like(ev1.data))
# default comment behavior if evoked.comment is None
old_comment1 = ev1.comment
old_comment2 = ev2.comment
ev1.comment = None
with warnings.catch_warnings(record=True):
warnings.simplefilter('always')
ev = ev1 - ev2
assert_equal(ev.comment, 'unknown')
ev1.comment = old_comment1
ev2.comment = old_comment2
# equal weighting
ev = combine_evoked([ev1, ev2], weights='equal')
assert_allclose(ev.data, np.zeros_like(ev1.data))
# combine_evoked([ev1, ev2], weights=[1, 0]) should yield the same as ev1
ev = combine_evoked([ev1, ev2], weights=[1, 0])
assert_equal(ev.nave, ev1.nave)
assert_allclose(ev.data, ev1.data)
# simple subtraction (like in oddball)
ev = combine_evoked([ev1, ev2], weights=[1, -1])
assert_allclose(ev.data, 2 * np.ones_like(ev1.data))
assert_raises(ValueError, combine_evoked, [ev1, ev2], weights='foo')
assert_raises(ValueError, combine_evoked, [ev1, ev2], weights=[1])
# grand average
evoked1, evoked2 = read_evokeds(fname, condition=[0, 1], proj=True)
ch_names = evoked1.ch_names[2:]
evoked1.info['bads'] = ['EEG 008'] # test interpolation
evoked1.drop_channels(evoked1.ch_names[:1])
evoked2.drop_channels(evoked2.ch_names[1:2])
gave = grand_average([evoked1, evoked2])
assert_equal(gave.data.shape, [len(ch_names), evoked1.data.shape[1]])
assert_equal(ch_names, gave.ch_names)
assert_equal(gave.nave, 2)
def test_array_epochs():
"""Test creating evoked from array
"""
tempdir = _TempDir()
# creating
rng = np.random.RandomState(42)
data1 = rng.randn(20, 60)
sfreq = 1e3
ch_names = ['EEG %03d' % (i + 1) for i in range(20)]
types = ['eeg'] * 20
info = create_info(ch_names, sfreq, types)
evoked1 = EvokedArray(data1, info, tmin=-0.01)
# save, read, and compare evokeds
tmp_fname = op.join(tempdir, 'evkdary-ave.fif')
evoked1.save(tmp_fname)
evoked2 = read_evokeds(tmp_fname)[0]
data2 = evoked2.data
assert_allclose(data1, data2)
assert_allclose(evoked1.times, evoked2.times)
assert_equal(evoked1.first, evoked2.first)
assert_equal(evoked1.last, evoked2.last)
assert_equal(evoked1.kind, evoked2.kind)
assert_equal(evoked1.nave, evoked2.nave)
# now compare with EpochsArray (with single epoch)
data3 = data1[np.newaxis, :, :]
events = np.c_[10, 0, 1]
evoked3 = EpochsArray(data3, info, events=events, tmin=-0.01).average()
assert_allclose(evoked1.data, evoked3.data)
assert_allclose(evoked1.times, evoked3.times)
assert_equal(evoked1.first, evoked3.first)
assert_equal(evoked1.last, evoked3.last)
assert_equal(evoked1.kind, evoked3.kind)
assert_equal(evoked1.nave, evoked3.nave)
# test match between channels info and data
ch_names = ['EEG %03d' % (i + 1) for i in range(19)]
types = ['eeg'] * 19
info = create_info(ch_names, sfreq, types)
assert_raises(ValueError, EvokedArray, data1, info, tmin=-0.01)
def test_add_channels():
"""Test evoked splitting / re-appending channel types
"""
evoked = read_evokeds(fname, condition=0)
evoked.info['buffer_size_sec'] = None
evoked_eeg = evoked.pick_types(meg=False, eeg=True, copy=True)
evoked_meg = evoked.pick_types(meg=True, copy=True)
evoked_stim = evoked.pick_types(meg=False, stim=True, copy=True)
evoked_eeg_meg = evoked.pick_types(meg=True, eeg=True, copy=True)
evoked_new = evoked_meg.add_channels([evoked_eeg, evoked_stim], copy=True)
assert_true(all(ch in evoked_new.ch_names
for ch in evoked_stim.ch_names + evoked_meg.ch_names))
evoked_new = evoked_meg.add_channels([evoked_eeg], copy=True)
assert_true(ch in evoked_new.ch_names for ch in evoked.ch_names)
assert_array_equal(evoked_new.data, evoked_eeg_meg.data)
assert_true(all(ch not in evoked_new.ch_names
for ch in evoked_stim.ch_names))
# Now test errors
evoked_badsf = evoked_eeg.copy()
evoked_badsf.info['sfreq'] = 3.1415927
evoked_eeg = evoked_eeg.crop(-.1, .1)
assert_raises(RuntimeError, evoked_meg.add_channels, [evoked_badsf])
assert_raises(AssertionError, evoked_meg.add_channels, [evoked_eeg])
assert_raises(ValueError, evoked_meg.add_channels, [evoked_meg])
assert_raises(AssertionError, evoked_meg.add_channels, evoked_badsf)
|
Python
|
BSD-3-Clause
|
rajegannathan/grasp-lift-eeg-cat-dog-solution-updated/python-packages/mne-python-0.10/mne/tests/test_evoked.py
|
28d7469d-6f37-43fa-a2c5-835797e84081
|
[{"tag": "NAME", "value": "Alexandre Gramfort", "start": 10, "end": 28, "context": "# Author: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>\n# "}, {"tag": "NAME", "value": "Andrew Dykstra", "start": 133, "end": 147, "context": "nis Engemann <denis.engemann@gmail.com>\n# Andrew Dykstra <andrew.r.dykstra@gmail.com>\n# Mads Jense"}, {"tag": "EMAIL", "value": "denis.engemann@gmail.com", "start": 97, "end": 121, "context": "t@telecom-paristech.fr>\n# Denis Engemann <denis.engemann@gmail.com>\n# Andrew Dykstra <andrew.r.dykstra@gmail"}, {"tag": "EMAIL", "value": "mje.mads@gmail.com", "start": 200, "end": 218, "context": "ndrew.r.dykstra@gmail.com>\n# Mads Jensen <mje.mads@gmail.com>\n#\n# License: BSD (3-clause)\n\nimport os.path as o"}, {"tag": "NAME", "value": "Denis Engemann", "start": 81, "end": 95, "context": "lexandre.gramfort@telecom-paristech.fr>\n# Denis Engemann <denis.engemann@gmail.com>\n# Andrew Dykst"}, {"tag": "NAME", "value": "Mads Jensen", "start": 187, "end": 198, "context": "ew Dykstra <andrew.r.dykstra@gmail.com>\n# Mads Jensen <mje.mads@gmail.com>\n#\n# License: BSD (3-clause)\n"}, {"tag": "EMAIL", "value": "andrew.r.dykstra@gmail.com", "start": 149, "end": 175, "context": "nis.engemann@gmail.com>\n# Andrew Dykstra <andrew.r.dykstra@gmail.com>\n# Mads Jensen <mje.mads@gmail.com>\n#\n# L"}, {"tag": "EMAIL", "value": "alexandre.gramfort@telecom-paristech.fr", "start": 30, "end": 69, "context": "# Author: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>\n# Denis Engemann <denis.engemann@gmail.c"}]
|
[{"tag": "NAME", "value": "Alexandre Gramfort", "start": 10, "end": 28, "context": "# Author: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>\n# "}, {"tag": "NAME", "value": "Andrew Dykstra", "start": 133, "end": 147, "context": "nis Engemann <denis.engemann@gmail.com>\n# Andrew Dykstra <andrew.r.dykstra@gmail.com>\n# Mads Jense"}, {"tag": "EMAIL", "value": "denis.engemann@gmail.com", "start": 97, "end": 121, "context": "t@telecom-paristech.fr>\n# Denis Engemann <denis.engemann@gmail.com>\n# Andrew Dykstra <andrew.r.dykstra@gmail"}, {"tag": "EMAIL", "value": "mje.mads@gmail.com", "start": 200, "end": 218, "context": "ndrew.r.dykstra@gmail.com>\n# Mads Jensen <mje.mads@gmail.com>\n#\n# License: BSD (3-clause)\n\nimport os.path as o"}, {"tag": "NAME", "value": "Denis Engemann", "start": 81, "end": 95, "context": "lexandre.gramfort@telecom-paristech.fr>\n# Denis Engemann <denis.engemann@gmail.com>\n# Andrew Dykst"}, {"tag": "NAME", "value": "Mads Jensen", "start": 187, "end": 198, "context": "ew Dykstra <andrew.r.dykstra@gmail.com>\n# Mads Jensen <mje.mads@gmail.com>\n#\n# License: BSD (3-clause)\n"}, {"tag": "EMAIL", "value": "andrew.r.dykstra@gmail.com", "start": 149, "end": 175, "context": "nis.engemann@gmail.com>\n# Andrew Dykstra <andrew.r.dykstra@gmail.com>\n# Mads Jensen <mje.mads@gmail.com>\n#\n# L"}, {"tag": "EMAIL", "value": "alexandre.gramfort@telecom-paristech.fr", "start": 30, "end": 69, "context": "# Author: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>\n# Denis Engemann <denis.engemann@gmail.c"}]
|
# Rainbow 2, by Al Sweigart al@inventwithpython.com
# Shows a simple squiggle rainbow animation.
import time, random, sys
try:
import bext
except ImportError:
print("""This program requires the bext module, which you can install by
opening a Terminal window (on macOS & Linux) and running:
python3 -m pip install --user bext
or a Command Prompt window (on Windows) and running:
python -m pip install --user bext""")
sys.exit()
indent = 10 # How many spaces to indent.
while True:
print(' ' * indent, end='')
bext.fg('red')
print('##', end='')
bext.fg('yellow')
print('##', end='')
bext.fg('green')
print('##', end='')
bext.fg('blue')
print('##', end='')
bext.fg('cyan')
print('##', end='')
bext.fg('purple')
print('##')
if random.randint(0, 1) == 0:
# Increase the number of spaces:
indent = indent + 1
if indent > 20:
indent = 20
else:
# Decrease the number of spaces:
indent = indent - 1
if indent < 0:
indent = 0
time.sleep(0.05) # Add a slight pause.
|
Python
|
MIT
|
skinzor/PythonStdioGames/src/gamesbyexample/rainbow2.py
|
386c6bc4-1543-4d86-9f3b-08eb78ad62b8
|
[{"tag": "NAME", "value": "Al Sweigart", "start": 16, "end": 27, "context": "# Rainbow 2, by Al Sweigart al@inventwithpython.com\n# Shows a simple squiggle"}, {"tag": "EMAIL", "value": "al@inventwithpython.com", "start": 28, "end": 51, "context": "# Rainbow 2, by Al Sweigart al@inventwithpython.com\n# Shows a simple squiggle rainbow animation.\n\nimp"}]
|
[{"tag": "NAME", "value": "Al Sweigart", "start": 16, "end": 27, "context": "# Rainbow 2, by Al Sweigart al@inventwithpython.com\n# Shows a simple squiggle"}, {"tag": "EMAIL", "value": "al@inventwithpython.com", "start": 28, "end": 51, "context": "# Rainbow 2, by Al Sweigart al@inventwithpython.com\n# Shows a simple squiggle rainbow animation.\n\nimp"}]
|
"""fix_parser.py - parse V1.0 fixprotocol sbe xml files described
by xsd https://github.com/FIXTradingCommunity/
fix-simple-binary-encoding/blob/master/v1-0-STANDARD/resources/sbe.xsd
"""
import xml.etree.ElementTree as etree
from pysbe.schema.constants import (
SBE_TYPES_TYPE,
STRING_ENUM_MAP,
VALID_TYPE_PRIMITIVE_TYPE,
TYPE_PRIMITIVE_TYPE_MAP,
PRESENCE_MAP,
QUALIFIED_NAME_RE,
SYMBOLIC_NAME_RE,
)
from pysbe.schema.builder import createMessageSchema
from pysbe.schema.types import (
createType,
createComposite,
createEnum,
createValidValue,
TypeCollection,
createRef,
createSet,
createChoice,
createMessage,
createField,
FieldCollection,
createGroup,
)
from pysbe.schema.exceptions import UnknownReference
SBE_NS = "http://fixprotocol.io/2016/sbe"
SEMANTIC_ATTRIBUTES = {
"semanticType": {"type": str, "use": "optional"},
"description": {"type": str, "use": "optional"},
}
VERSION_ATTRIBUTES = {
"sinceVersion": {"type": int, "default": 0, "minimumValue": 0, "use": "optional"},
# deprecated is itself deprecated in RC4
"deprecated": {"type": int, "minimumValue": 0, "use": "optional"},
}
ALIGNMENT_ATTRIBUTES = {"offset": {"type": int, "minimumValue": 0, "use": "optional"}}
PRESENCE_ATTRIBUTES = {
"presence": {"type": str, "default": "required", "map": PRESENCE_MAP},
"valueRef": {"type": str, "use": "optional", "pattern": QUALIFIED_NAME_RE},
}
TYPE_ATTRIBUTES = {
"name": {"type": str, "pattern": SYMBOLIC_NAME_RE},
"primitiveType": {"type": str, "map": TYPE_PRIMITIVE_TYPE_MAP},
"nullValue": {"type": str, "use": "optional"},
"minValue": {"type": str, "use": "optional"},
"maxValue": {"type": str, "use": "optional"},
"characterEncoding": {"type": str, "use": "optional"},
"length": {"type": int, "minimumValue": 0, "use": "optional", "default": 1},
}
ENUM_ATTRIBUTES = {"encodingType": {"type": str, "pattern": SYMBOLIC_NAME_RE}}
REF_ATTRIBUTES = {"type": {"type": str}}
MESSAGE_ATTRIBUTES = {
"blockLength": {"type": int, "use": "optional"},
"message_id": {"type": int, "attribute_name": "id"},
}
FIELD_ATTRIBUTES = {
"field_id": {"type": int, "attribute_name": "id"},
"field_type": {"type": str, "pattern": SYMBOLIC_NAME_RE, "attribute_name": "type"},
}
GROUP_ATTRIBUTES = {
"group_id": {"type": int, "attribute_name": "id"},
"dimensionType": {"type": str, "pattern": SYMBOLIC_NAME_RE, "use": "optional"},
}
ALL_ATTRIBUTES_MAP = {
**SEMANTIC_ATTRIBUTES,
**VERSION_ATTRIBUTES,
**ALIGNMENT_ATTRIBUTES,
**PRESENCE_ATTRIBUTES,
**TYPE_ATTRIBUTES,
**ENUM_ATTRIBUTES,
**REF_ATTRIBUTES,
**MESSAGE_ATTRIBUTES,
**FIELD_ATTRIBUTES,
**GROUP_ATTRIBUTES,
}
TYPE_ATTRIBUTES_LIST = list(SEMANTIC_ATTRIBUTES) + list(VERSION_ATTRIBUTES) + list(
ALIGNMENT_ATTRIBUTES
) + list(
PRESENCE_ATTRIBUTES
) + list(
TYPE_ATTRIBUTES
)
COMPOSITE_ATTRIBUTES_LIST = ["name"] + list(SEMANTIC_ATTRIBUTES) + list(
ALIGNMENT_ATTRIBUTES
) + list(
VERSION_ATTRIBUTES
)
ENUM_ATTRIBUTES_LIST = ["name"] + list(ENUM_ATTRIBUTES) + list(
ALIGNMENT_ATTRIBUTES
) + list(
SEMANTIC_ATTRIBUTES
) + list(
VERSION_ATTRIBUTES
)
ENUM_VALID_VALUES_ATTRIBUTES_LIST = (
"name", "description", "sinceVersion", "deprecated"
)
REF_ATTRIBUTES_LIST = ("name", "type", "offset", "sinceVersion", "deprecated")
SET_ATTRIBUTES_LIST = (
"name", "description", "encodingType", "sinceVersion", "deprecated", "offset"
)
SET_CHOICE_ATTRIBUTES_LIST = ("name", "description", "sinceVersion", "deprecated")
VALID_COMPOSITE_CHILD_ELEMENTS = ("type", "enum", "set", "composite", "ref")
MESSAGE_ATTRIBUTES_LIST = (
"name",
"message_id",
"description",
"blockLength",
"semanticType",
"sinceVersion",
"deprecated",
)
FIELD_ATTRIBUTES_LIST = (
"name",
"field_id",
"field_type",
"description",
"offset",
"presence",
"valueRef",
"sinceVersion",
"deprecated",
)
GROUP_ATTRIBUTES_LIST = (
"name",
"group_id",
"description",
"blockLength",
"semanticType",
"sinceVersion",
"deprecated",
"dimensionType",
)
MISSING = object()
class BaseParser:
"""contains shared functionality"""
NS = {"sbe": SBE_NS}
def parse_common_attributes(self, element, attributes):
"""parse and return dict of common attributes"""
result_attributes = {}
for attribute in attributes:
attrib_info = ALL_ATTRIBUTES_MAP[attribute]
if attrib_info.get("default", MISSING) is not MISSING:
default_value = attrib_info["default"]
else:
default_value = MISSING
attribute_name = attrib_info.get("attribute_name", attribute)
value = element.attrib.get(attribute_name, default_value)
if value is MISSING or value == "":
if attrib_info.get("use") == "optional":
continue
else:
raise ValueError(
f"element {element.tag} missing required "
f"attribute {attribute_name}"
)
if attrib_info.get("type"):
try:
value = attrib_info["type"](value)
except ValueError as exc:
raise ValueError(
f"element {element.tag} invalid value "
f"{repr(value)} for attribute {attribute_name}"
) from exc
if attrib_info.get("minimumValue"):
if value < attrib_info["minimumValue"]:
raise ValueError(
f"element {element.tag} invalid value {repr(value)}"
f" for attribute {attribute_name},"
"less than allowed minimum "
f"{repr(attrib_info['minimumValue'])}"
)
if attrib_info.get("pattern"):
if not attrib_info["pattern"].match(value):
raise ValueError(
f"element {element.tag} invalid value {repr(value)} "
f"for attribute {attribute_name},"
"does not match expected pattern "
f"{repr(attrib_info['pattern'])}"
)
if attrib_info.get("map"):
try:
value = attrib_info["map"][value]
except (KeyError, IndexError) as exc:
raise ValueError(
f"element {element.tag} invalid value {repr(value)} "
f"for attribute {attribute_name}"
f", must be one of {repr(attrib_info['map'].keys())}"
) from exc
if attrib_info.get("rename"):
attribute = attrib_info["rename"]
result_attributes[attribute] = value
return result_attributes
class SBESpecParser(BaseParser):
"""Parser for VFIX"""
def __init__(self):
pass
def parseFile(self, file_or_object):
"""parse a file"""
root = etree.parse(file_or_object)
element_name = "{%s}messageSchema" % SBE_NS
# for some reason root.find('sbe:messageSchema') returns None
# work around that
messageSchema_element = root.getroot()
if messageSchema_element.tag != element_name:
raise ValueError(
f"root element is not sbe:messageSchema,"
" found {repr(messageSchema_element)} instead"
)
return self.processSchema(messageSchema_element)
def processSchema(self, messageSchema_element):
"""process xml elements beginning with root messageSchema_element"""
attrib = messageSchema_element.attrib
version = parse_version(attrib.get("version"))
byteOrder = parse_byteOrder(attrib.get("byteOrder") or "littleEndian")
package = parse_optionalString(attrib.get("package"))
semanticVersion = parse_optionalString(attrib.get("semanticVersion"))
description = parse_optionalString(attrib.get("description"))
headerType = parse_optionalString(attrib.get("headerType") or "messageHeader")
messageSchema = createMessageSchema(
version=version,
byteOrder=byteOrder,
package=package,
semanticVersion=semanticVersion,
description=description,
headerType=headerType,
)
types_elements = messageSchema_element.findall("types")
types_parser = TypesParser()
for element in types_elements:
types_parser.parse_types(messageSchema, element)
message_elements = messageSchema_element.findall(
"sbe:message", namespaces=self.NS
)
message_parser = MessageParser()
for element in message_elements:
message_parser.parse_message(messageSchema, element)
return messageSchema
class TypesParser(BaseParser):
"""parse type definitions"""
# which child elements may appear in types
VALID_TYPES_ELEMENTS = ("type", "composite", "enum", "set")
def parse_types(self, messageSchema, element):
"""parse type, can be repeated"""
for child_element in element:
if child_element.tag not in self.VALID_TYPES_ELEMENTS:
raise ValueError(
f"invalid types child element {repr(child_element.tag)}"
)
parser = getattr(self, f"parse_types_{child_element.tag}", None)
if not parser:
raise RuntimeError(
f"unsupported types parser {repr(child_element.tag)}"
)
parser(messageSchema, child_element)
def parse_types_type(self, parent: TypeCollection, element):
"""parse types/type"""
attributes = self.parse_common_attributes(
element, attributes=TYPE_ATTRIBUTES_LIST
)
sbe_type = createType(**attributes)
parent.addType(sbe_type)
def parse_types_ref(self, parent: TypeCollection, element):
"""parse composite / ref"""
attributes = self.parse_common_attributes(
element, attributes=REF_ATTRIBUTES_LIST
)
sbe_ref = createRef(**attributes)
reference_type = parent.lookupName(sbe_ref.type)
if not reference_type:
raise UnknownReference(
f"composite {parent.name} ref {sbe_ref.name}"
f" references unknown encodingType {sbe_ref.type}"
)
parent.addType(sbe_ref)
def parse_types_composite(self, parent: TypeCollection, element):
"""parse types/composite"""
attributes = self.parse_common_attributes(
element, attributes=COMPOSITE_ATTRIBUTES_LIST
)
sbe_composite = createComposite(**attributes)
parent.addType(sbe_composite)
# now iterate over composite children
for child_element in element:
tag = child_element.tag
if tag not in VALID_COMPOSITE_CHILD_ELEMENTS:
raise ValueError(
f"invalid child element {repr(tag)} in "
f"composite element {repr(sbe_composite.name)}"
)
parser = getattr(self, f"parse_types_{tag}", None)
if not parser:
raise RuntimeError(
f"unsupported types parser {repr(child_element.tag)}"
)
parser(sbe_composite, child_element)
def parse_types_set(self, parent: TypeCollection, element):
"""parse types/set"""
attributes = self.parse_common_attributes(
element, attributes=SET_ATTRIBUTES_LIST
)
sbe_set = createSet(**attributes)
parent.addType(sbe_set)
for child_element in element.findall("choice"):
choice = self.parse_set_choice(sbe_set=sbe_set, element=child_element)
sbe_set.addChoice(choice)
def parse_set_choice(self, sbe_set, element):
"""parse and return an enum validvalue"""
attributes = self.parse_common_attributes(
element, attributes=SET_CHOICE_ATTRIBUTES_LIST
)
value = element.text
try:
value = int(element.text)
except ValueError as exc:
raise ValueError(
f"invalid value for set {sbe_set.name} choice "
f"{attributes.get('name')}"
) from exc
choice = createChoice(value=value, **attributes)
return choice
def parse_types_enum(self, parent: TypeCollection, element):
"""parse types/enum"""
attributes = self.parse_common_attributes(
element, attributes=ENUM_ATTRIBUTES_LIST
)
sbe_enum = createEnum(**attributes)
parent.addType(sbe_enum)
for child_element in element.findall("validValue"):
valid_value = self.parse_enum_valid_value(
sbe_enum=sbe_enum, element=child_element
)
sbe_enum.addValidValue(valid_value)
def parse_enum_valid_value(self, sbe_enum, element):
"""parse and return an enum validvalue"""
attributes = self.parse_common_attributes(
element, attributes=ENUM_VALID_VALUES_ATTRIBUTES_LIST
)
value = element.text
enum_valid_value = createValidValue(value=value, **attributes)
return enum_valid_value
class MessageParser(BaseParser):
"""parse message definitions"""
# which child elements may appear in message
VALID_MESSAGE_TYPES = ("field", "group", "data")
def parse_message(self, messageSchema, element):
"""parse message, can be repeated"""
attributes = self.parse_common_attributes(
element, attributes=MESSAGE_ATTRIBUTES_LIST
)
message = createMessage(**attributes)
messageSchema.addMessage(message)
self.parse_field_children(messageSchema, message, element)
def parse_field_children(self, messageSchema, parent: FieldCollection, element):
"""parse child elements that fit in a fieldCollection"""
for child_element in element:
if child_element.tag not in self.VALID_MESSAGE_TYPES:
raise ValueError(
f"invalid message/group child element {repr(child_element.tag)}"
)
parser = getattr(self, f"parse_message_{child_element.tag}", None)
if not parser:
raise RuntimeError(
f"unsupported message parser {repr(child_element.tag)}"
)
parser(messageSchema, parent, child_element)
def parse_message_field(
self, messageSchema, parent: FieldCollection, element
) -> None:
"""parse field Type"""
attributes = self.parse_common_attributes(
element, attributes=FIELD_ATTRIBUTES_LIST
)
field = createField(**attributes)
field.validate(messageSchema)
parent.addField(field)
def parse_message_group(
self, messageSchema, parent: FieldCollection, element
) -> None:
"""parse field Type"""
attributes = self.parse_common_attributes(
element, attributes=GROUP_ATTRIBUTES_LIST
)
group = createGroup(**attributes)
group.validate(messageSchema)
parent.addField(group)
self.parse_field_children(messageSchema, group, element)
def parse_byteOrder(byteOrder):
"""convert byteOrder to enum"""
if byteOrder is None or byteOrder == "":
return None
value = STRING_ENUM_MAP.get(byteOrder)
if value is None:
raise ValueError(
f"invalid byteOrder {repr(value)},"
"expected one of {SBE_STRING_ENUM_MAP.keys()}"
)
return value
def parse_version(version):
"""convert version to int"""
if version is None:
raise ValueError("sbe:messageSchema/@version is required")
return int(version)
def parse_optionalString(value):
"""parse an optional string"""
if not value:
return None
return value
|
Python
|
Apache-2.0
|
bkc/pysbe/pysbe/parser/fix_parser.py
|
4b36b02b-9d06-4dbb-bca3-06e5b0207207
|
[]
|
[]
|
/**
* \file main.cpp
* \brief An example and benchmark of AmgX and PETSc with Poisson system.
*
* The Poisson equation we solve here is
* \nabla^2 u(x, y) = -8\pi^2 \cos{2\pi x} \cos{2\pi y}
* for 2D. And
* \nabla^2 u(x, y, z) = -12\pi^2 \cos{2\pi x} \cos{2\pi y} \cos{2\pi z}
* for 3D.
*
* The exact solutions are
* u(x, y) = \cos{2\pi x} \cos{2\pi y}
* for 2D. And
* u(x, y, z) = \cos{2\pi x} \cos{2\pi y} \cos{2\pi z}
* for 3D.
*
* \author Pi-Yueh Chuang (pychuang@gwu.edu)
* \date 2017-06-26
*/
// PETSc
# include <petsctime.h>
# include <petscsys.h>
# include <petscmat.h>
# include <petscvec.h>
# include <petscksp.h>
// headers
# include "helper.h"
// constants
# define Nx -100
# define Ny -100
# define Nz -100
int main(int argc, char **argv)
{
PetscErrorCode ierr; // error codes returned by PETSc routines
DM da; // DM object
DMDALocalInfo info; // partitioning info
Vec lhs, // left hand side
rhs, // right hand side
exact; // exact solution
Mat A; // coefficient matrix
KSP ksp; // PETSc KSP solver instance
KSPConvergedReason reason; // KSP convergence/divergence reason
PetscInt Niters; // iterations used to converge
PetscReal res, // final residual
Linf; // maximum norm
PetscLogDouble start, // time at the begining
initSys, // time after init the sys
initSolver, // time after init the solver
solve; // time after solve
char config[PETSC_MAX_PATH_LEN]; // config file name
// initialize MPI and PETSc
ierr = MPI_Init(&argc, &argv); CHKERRQ(ierr);
ierr = PetscInitialize(&argc, &argv, nullptr, nullptr); CHKERRQ(ierr);
// allow PETSc to read run-time options from a file
ierr = PetscOptionsGetString(nullptr, nullptr, "-config",
config, PETSC_MAX_PATH_LEN, nullptr); CHKERRQ(ierr);
ierr = PetscOptionsInsertFile(PETSC_COMM_WORLD,
nullptr, config, PETSC_FALSE); CHKERRQ(ierr);
// get time
ierr = PetscTime(&start); CHKERRQ(ierr);
// prepare the linear system
ierr = createSystem(Nx, Ny, Nz, da, A, lhs, rhs, exact); CHKERRQ(ierr);
// get system info
ierr = DMDAGetLocalInfo(da, &info); CHKERRQ(ierr);
// get time
ierr = PetscTime(&initSys); CHKERRQ(ierr);
// create a solver
ierr = KSPCreate(PETSC_COMM_WORLD, &ksp); CHKERRQ(ierr);
ierr = KSPSetOperators(ksp, A, A); CHKERRQ(ierr);
ierr = KSPSetType(ksp, KSPCG); CHKERRQ(ierr);
ierr = KSPSetReusePreconditioner(ksp, PETSC_TRUE); CHKERRQ(ierr);
ierr = KSPSetFromOptions(ksp); CHKERRQ(ierr);
ierr = KSPSetUp(ksp); CHKERRQ(ierr);
// get time
ierr = PetscTime(&initSolver); CHKERRQ(ierr);
// solve the system
ierr = KSPSolve(ksp, rhs, lhs); CHKERRQ(ierr);
// get time
ierr = PetscTime(&solve); CHKERRQ(ierr);
// check if the solver converged
ierr = KSPGetConvergedReason(ksp, &reason); CHKERRQ(ierr);
if (reason < 0) SETERRQ1(PETSC_COMM_WORLD,
PETSC_ERR_CONV_FAILED, "Diverger reason: %d\n", reason);
// get the number of iterations
ierr = KSPGetIterationNumber(ksp, &Niters); CHKERRQ(ierr);
// get the L2 norm of final residual
ierr = KSPGetResidualNorm(ksp, &res);
// calculate error norm (maximum norm)
ierr = VecAXPY(lhs, -1.0, exact); CHKERRQ(ierr);
ierr = VecNorm(lhs, NORM_INFINITY, &Linf); CHKERRQ(ierr);
// print result
ierr = PetscPrintf(PETSC_COMM_WORLD,
"[Nx, Ny, Nz]: [%d, %d, %d]\n" "Number of iterations: %d\n"
"L2 norm of final residual: %f\n" "Maximum norm of error: %f\n"
"Time [init, create solver, solve]: [%f, %f, %f]\n",
info.mx, info.my, info.mz, Niters, res, Linf,
initSys-start, initSolver-initSys, solve-initSolver); CHKERRQ(ierr);
// destroy KSP solver
ierr = KSPDestroy(&ksp); CHKERRQ(ierr);
// destroy the linear system
ierr = destroySystem(da, A, lhs, rhs, exact); CHKERRQ(ierr);
// finalize PETSc and MPI
ierr = PetscFinalize(); CHKERRQ(ierr);
ierr = MPI_Finalize(); CHKERRQ(ierr);
return ierr;
}
|
C++
|
MIT
|
olcf/PETSC-OpenACC/src/main_ksp.cpp
|
ee168477-9f11-4c9a-9817-44cd84af0a04
|
[{"tag": "NAME", "value": "Pi-Yueh Chuang", "start": 479, "end": 493, "context": "cos{2\\pi y} \\cos{2\\pi z}\n * for 3D.\n *\n * \\author Pi-Yueh Chuang (pychuang@gwu.edu)\n * \\date 2017-06-26\n */\n\n\n// P"}, {"tag": "EMAIL", "value": "pychuang@gwu.edu", "start": 495, "end": 511, "context": "{2\\pi z}\n * for 3D.\n *\n * \\author Pi-Yueh Chuang (pychuang@gwu.edu)\n * \\date 2017-06-26\n */\n\n\n// PETSc\n# include <pe"}]
|
[{"tag": "NAME", "value": "Pi-Yueh Chuang", "start": 479, "end": 493, "context": "cos{2\\pi y} \\cos{2\\pi z}\n * for 3D.\n *\n * \\author Pi-Yueh Chuang (pychuang@gwu.edu)\n * \\date 2017-06-26\n */\n\n\n// P"}, {"tag": "EMAIL", "value": "pychuang@gwu.edu", "start": 495, "end": 511, "context": "{2\\pi z}\n * for 3D.\n *\n * \\author Pi-Yueh Chuang (pychuang@gwu.edu)\n * \\date 2017-06-26\n */\n\n\n// PETSc\n# include <pe"}]
|
<?php
/*
* Copyright (C) 2015-2017 Deciso B.V.
* Copyright (C) 2015 Jos Schellevis
* Copyright (C) 2017 Fabian Franz
* Copyright (C) 2017 Michael Muenz <m.muenz@gmail.com>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
* OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
namespace OPNsense\Quagga\Api;
use \OPNsense\Quagga\OSPF6;
use \OPNsense\Core\Config;
use \OPNsense\Base\ApiMutableModelControllerBase;
use \OPNsense\Base\UIModelGrid;
class Ospf6settingsController extends ApiMutableModelControllerBase
{
protected static $internalModelName = 'ospf6';
protected static $internalModelClass = '\OPNsense\Quagga\OSPF6';
public function searchInterfaceAction()
{
return $this->searchBase('interfaces.interface', array("enabled", "interfacename", "area", "networktype"));
}
public function getInterfaceAction($uuid = null)
{
$this->sessionClose();
return $this->getBase('interface', 'interfaces.interface', $uuid);
}
public function addInterfaceAction()
{
return $this->addBase('interface', 'interfaces.interface');
}
public function delInterfaceAction($uuid)
{
return $this->delBase('interfaces.interface', $uuid);
}
public function setInterfaceAction($uuid)
{
return $this->setBase('interface', 'interfaces.interface', $uuid);
}
public function toggleInterfaceAction($uuid)
{
return $this->toggleBase('interfaces.interface', $uuid);
}
}
|
PHP
|
BSD-2-Clause
|
Northguy/plugins/net/frr/src/opnsense/mvc/app/controllers/OPNsense/Quagga/Api/Ospf6settingsController.php
|
f73588b0-5a65-4cfe-a641-af0518a2b121
|
[{"tag": "NAME", "value": "Fabian Franz", "start": 117, "end": 129, "context": " (C) 2015 Jos Schellevis\n * Copyright (C) 2017 Fabian Franz\n * Copyright (C) 2017 Michael Muenz <m.muenz@g"}, {"tag": "NAME", "value": "Jos Schellevis", "start": 77, "end": 91, "context": "C) 2015-2017 Deciso B.V.\n * Copyright (C) 2015 Jos Schellevis\n * Copyright (C) 2017 Fabian Franz\n * Copyr"}, {"tag": "EMAIL", "value": "m.muenz@gmail.com", "start": 170, "end": 187, "context": "ian Franz\n * Copyright (C) 2017 Michael Muenz <m.muenz@gmail.com>\n * All rights reserved.\n *\n * Redistributi"}, {"tag": "NAME", "value": "Michael Muenz", "start": 155, "end": 168, "context": "ht (C) 2017 Fabian Franz\n * Copyright (C) 2017 Michael Muenz <m.muenz@gmail.com>\n * All rights reserved.\n *"}]
|
[{"tag": "NAME", "value": "Fabian Franz", "start": 117, "end": 129, "context": " (C) 2015 Jos Schellevis\n * Copyright (C) 2017 Fabian Franz\n * Copyright (C) 2017 Michael Muenz <m.muenz@g"}, {"tag": "NAME", "value": "Jos Schellevis", "start": 77, "end": 91, "context": "C) 2015-2017 Deciso B.V.\n * Copyright (C) 2015 Jos Schellevis\n * Copyright (C) 2017 Fabian Franz\n * Copyr"}, {"tag": "EMAIL", "value": "m.muenz@gmail.com", "start": 170, "end": 187, "context": "ian Franz\n * Copyright (C) 2017 Michael Muenz <m.muenz@gmail.com>\n * All rights reserved.\n *\n * Redistributi"}, {"tag": "NAME", "value": "Michael Muenz", "start": 155, "end": 168, "context": "ht (C) 2017 Fabian Franz\n * Copyright (C) 2017 Michael Muenz <m.muenz@gmail.com>\n * All rights reserved.\n *"}]
|
<?php
namespace App\Notifications;
use Illuminate\Bus\Queueable;
use Illuminate\Contracts\Queue\ShouldQueue;
use Illuminate\Notifications\Messages\MailMessage;
use Illuminate\Notifications\Notification;
class EnvoiEmailAvisCreatedNotification extends Notification
{
use Queueable;
public $avis;
/**
* Create a new notification instance.
*
* @return void
*/
public function __construct($avis)
{
$this->avis = $avis ;
}
/**
* Get the notification's delivery channels.
*
* @param mixed $notifiable
* @return array
*/
public function via($notifiable)
{
return ['mail'];
}
/**
* Get the mail representation of the notification.
*
* @param mixed $notifiable
* @return \Illuminate\Notifications\Messages\MailMessage
*/
public function toMail($notifiable)
{
$first_name = ($this->avis->first_name_client != Null) ? $this->avis->first_name_client : $this->avis->user->getFirstNameClient() ;
return (new MailMessage)
->subject('Salut '. $first_name . '!')
->markdown('mail.avis.avis', [
'lien' => $this->avis->id,
'user' => $this->avis->user_id,
'first_name' => $first_name,
]);
/* Mail::send('mail.avis.avis', ['user' => $first_name], function ($m) use ($user) {
$m->from('hello@app.com', 'Your Application');
$m->to($user->user_email, $user->nicename)->subject('Salut ' . $first_name);
});
return (new MailMessage)
->line('The introduction to the notification.')
->action('Notification Action', url('/'))
->line('Thank you for using our application!');*/
}
/**
* Get the array representation of the notification.
*
* @param mixed $notifiable
* @return array
*/
public function toArray($notifiable)
{
return [
//
];
}
}
|
PHP
|
MIT
|
clubcouleurs/mglab/app/Notifications/EnvoiEmailAvisCreatedNotification.php
|
6f925f81-9c28-487e-922d-0dc6f72169d2
|
[]
|
[]
|
package org.hisp.dhis.program;
/*
* Copyright (c) 2004-2019, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import org.hisp.dhis.common.AuditType;
/**
* @author Abyot Asalefew Gizaw <abyota@gmail.com>
*
*/
public class ProgramInstanceAuditQueryParams
{
/**
* Program instances to fetch audits for
*/
private Set<ProgramInstance> programInstances = new HashSet<>();
/**
* Programs to fetch audits for
*/
private Set<Program> programs = new HashSet<>();
/**
* Users to fetch audits for
*/
private Set<String> users = new HashSet<>();
/**
* AuditType to fetch for
*/
private AuditType auditType;
/**
* Starting date.
*/
private Date startDate = null;
/**
* Ending date.
*/
private Date endDate = null;
/**
* Program instance audit count start
*/
private int first;
/**
* Program instance audit count end
*/
private int max;
/**
* Program instance audit skip paging or not
*/
private boolean skipPaging;
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
public ProgramInstanceAuditQueryParams()
{
}
// -------------------------------------------------------------------------
// Logic
// -------------------------------------------------------------------------
public boolean hasProgramInstances()
{
return programInstances != null && !programInstances.isEmpty();
}
public boolean hasPrograms()
{
return programs != null && !programs.isEmpty();
}
public boolean hasUsers()
{
return users != null && !users.isEmpty();
}
public boolean hasAuditType()
{
return auditType != null;
}
public boolean hasStartDate()
{
return startDate != null;
}
public boolean hasEndDate()
{
return endDate != null;
}
// -------------------------------------------------------------------------
// Getters and setters
// -------------------------------------------------------------------------
public Set<ProgramInstance> getProgramInstances()
{
return programInstances;
}
public void setProgramInstances( Set<ProgramInstance> programInstances )
{
this.programInstances = programInstances;
}
public Set<Program> getPrograms()
{
return programs;
}
public void setPrograms( Set<Program> programs )
{
this.programs = programs;
}
public Set<String> getUsers()
{
return users;
}
public void setUsers( Set<String> users )
{
this.users = users;
}
public AuditType getAuditType()
{
return auditType;
}
public void setAuditType( AuditType auditType )
{
this.auditType = auditType;
}
public Date getStartDate()
{
return startDate;
}
public void setStartDate( Date startDate )
{
this.startDate = startDate;
}
public Date getEndDate()
{
return endDate;
}
public void setEndDate( Date endDate )
{
this.endDate = endDate;
}
public int getFirst()
{
return first;
}
public void setFirst( int first )
{
this.first = first;
}
public int getMax()
{
return max;
}
public void setMax( int max )
{
this.max = max;
}
public boolean isSkipPaging()
{
return skipPaging;
}
public void setSkipPaging( boolean skipPaging )
{
this.skipPaging = skipPaging;
}
}
|
Java
|
BSD-3-Clause
|
SivHMA/dhis2-core/dhis-2/dhis-api/src/main/java/org/hisp/dhis/program/ProgramInstanceAuditQueryParams.java
|
49c1c0c8-38d1-443e-bd8b-c56ae16d4fcc
|
[{"tag": "EMAIL", "value": "abyota@gmail.com", "start": 1739, "end": 1755, "context": ".AuditType;\n\n/**\n * @author Abyot Asalefew Gizaw <abyota@gmail.com>\n *\n */\npublic class ProgramInstanceAuditQueryPar"}]
|
[{"tag": "EMAIL", "value": "abyota@gmail.com", "start": 1739, "end": 1755, "context": ".AuditType;\n\n/**\n * @author Abyot Asalefew Gizaw <abyota@gmail.com>\n *\n */\npublic class ProgramInstanceAuditQueryPar"}]
|
/*
* spurtcommerce API
* version 2.1
* http://api.spurtcommerce.com
*
* Copyright (c) 2019 piccosoft ltd
* Author piccosoft ltd <support@piccosoft.com>
* Licensed under the MIT license.
*/
import { EntityRepository, Repository } from 'typeorm';
import { Country } from '../models/country';
@EntityRepository(Country)
export class CountryRepository extends Repository<Country> {
}
|
TypeScript
|
MIT
|
VM8198/e-commerce/src/api/repositories/countryRepository.ts
|
3163dfb3-4f5a-420c-94b5-96dacc9190b1
|
[]
|
[]
|
---
layout: post
title: "Yoga Fitness Starts the New Year with Two New Classes"
permalink: /archives/2014/12/yoga_fitness_starts_the_new_year_with_two_new_clas.html
commentfile: 2014-12-13-yoga_fitness_starts_the_new_year_with_two_new_clas
category: around_town
date: 2014-12-13 09:33:56
image: "/assets/images/2014/yoga_fitness_201412_thumb.jpg"
excerpt: |
The most wonderful thing about yoga is that 'Yoga is for everyone', virtually anyone can practice it, with a modified practice to suit you. Forget visions of Cirque du Soleil type students bending around in your local community hall, yoga is for people of all shapes, sizes, ages and abilities.
---
<a href="/assets/images/2014/yoga_fitness_201412.jpg" title="See larger version of - yoga fitness 201412"><img src="/assets/images/2014/yoga_fitness_201412_thumb.jpg" width="250" height="187" alt="yoga fitness 201412" class="photo right" /></a>
*5 good reasons to start 'Complete Beginners' yoga in 2015*
If you are interested in trying out yoga, but are intimidated by the thought of exotic bends and legs wrapped behind heads, then read on...
The most wonderful thing about yoga is that 'Yoga is for everyone', virtually anyone can practice it, with a modified practice to suit you. Forget visions of Cirque du Soleil type students bending around in your local community hall, yoga is for people of all shapes, sizes, ages and abilities.
Walking into an established yoga class however, especially when you have never done yoga before, can be really daunting, and this is why Yoga-Fitness has developed the 'Complete Beginners' Yoga Course. Only for the complete and utter beginner, (so all students in the class are the same level), this five week course will introduce you into the world of yoga via the key yoga poses (asana), basic breathing (pranayama), and relaxation techniques, all in a safe and friendly environment. The physical postures will help to improve tone, strength, flexibility and balance, whilst the breathing and relaxation techniques will help you to rejuvenate and relax.
You won't come out a fully enlightened yogi by the end of the course, but you will certainly know your way around a downward dog, know how to breathe using the full capacity of the lungs, and have a confidence you didn't have before.
If you need more encouragement to roll out your mat and get started, read below for an insight into just some of the benefits of this ancient practice, (there are so many it is impossible to list them all):
#### Some of the benefits of yoga
1. Stress relief - Yoga encourages you to relax, slow your breath, and focus on the present. By encouraging relaxation and performing the breathing exercises, yoga helps to lower the levels of the stress hormone cortisol. Related benefits include lowering blood pressure and heart rate, improving digestion and boosting the immune system as well as easing symptoms of conditions such as anxiety, depression, fatigue, asthma and insomnia
2. Flexibility - Improved flexibility is one of the most obvious benefits of yoga. During your first class you probably won't be able to touch your toes. But if you stick with it, you'll notice a gradual loosening, and eventually, seemingly impossible poses will become possible
3. Muscular and Cardiovascular Health - As a result of practicing yoga, your entire muscular system can become stronger and more elastic, and therefore thus less susceptible to injury. Standing and balancing postures strengthen and lengthen the big muscle groups, whilst floor postures strengthen the muscles that support the spine and head. Even a gentle yoga practice can provide cardiovascular benefits by lowering resting heart rate, increasing endurance and improving oxygen uptake during the class
4. Space and Balance - Regularly practicing yoga increases proprioception (the ability to feel what your body is doing and where it is in space) and improves balance. People with bad posture or dysfunctional movement patterns usually have poor proprioception, which has been linked to knee problems and back pain
5. Better breathing - Yoga also promotes breathing through the nose, which filters the air, warms it and humidifies it, removing pollen and dirt and other things you'd rather not take into your lungs. It also encourages people to take slower, deeper breaths, using the full capacity of the lungs. This can help to improve lung function and increase the amount of oxygen available to the body, nourishing the cells.
In a nutshell, yoga just makes you feel better, its effects are almost immediate and you don't often know why. When practiced regularly, yoga can increase our sense of physical health, emotional well-being and mental clarity
#### The Yoga-Fitness 'Complete Beginners Course'
Voted 'Netmums Favourite Adult Class in Richmond 2014' runs throughout 2015. See below for the courses due to start in January 2015.
- WHERE: ETNA Community Centre, East Twickenham
- WHEN: Sunday eve 8:05pm - 9:15pm on the following dates: Jan 11<sup>th</sup>, 18<sup>th</sup>, 25<sup>th</sup> AND Feb 1<sup>st</sup>, 8th
OR
- WHERE: The Training Works, 120 St Margarets Road, Twickenham, TW1 2AA
- WHEN: Thursday eve 8:05pm - 9:15pm on the following dates: Jan 8<sup>th</sup>, 15<sup>th</sup>, 22<sup>nd</sup>, 29<sup>th</sup> AND Feb 5th
### Pregnancy Yoga Classes in Twickenham and Isleworth
If you would like to give it a try, Sammie Mason will be starting a new pregnancy yoga class in Twickenham, from January 10<sup>th</sup> 2015 as well as continuing an evening class in Isleworth. See details below:
- Saturday mornings: 9:45am - 11am, Heart Twickenham, 1<sup>st</sup> Floor, 67a Holly Rd, TW1 4HF (behind M&S)
- Monday evenings: 6:30pm - 7:45pm, St Johns Church Hall, St Johns Road, Isleworth, TW7 6NY
Pregnancy yoga classes are suitable for anyone from 15 weeks of pregnancy, and no previous experience of yoga is necessary. Sammie Mason is a qualified Yoga teacher with the 'British Wheel of Yoga'.
#### For further details or to book contact Sammie:
- <contact@yoga-fitness.co.uk>
- 07758 866834
- [www.yoga-fitness.co.uk](http://www.yoga-fitness.co.uk)
|
Markdown
|
CC0-1.0
|
anthonydillon/stmgrts/_posts/2014/2014-12-13-yoga_fitness_starts_the_new_year_with_two_new_clas.md
|
5751098b-9e4c-46fd-9f4f-0c6a08bb826d
|
[{"tag": "EMAIL", "value": "contact@yoga-fitness.co.uk", "start": 6073, "end": 6099, "context": " further details or to book contact Sammie:\n\n- <contact@yoga-fitness.co.uk>\n- 07758 866834\n- [www.yoga-fitness.co.uk](ht"}]
|
[{"tag": "EMAIL", "value": "contact@yoga-fitness.co.uk", "start": 6073, "end": 6099, "context": " further details or to book contact Sammie:\n\n- <contact@yoga-fitness.co.uk>\n- 07758 866834\n- [www.yoga-fitness.co.uk](ht"}]
|
<?php
/*
* This file is part of the PHPBench package
*
* (c) Daniel Leech <daniel@dantleech.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
*/
namespace PhpBench\Tests\Unit\Progress\Logger;
use PhpBench\Model\Benchmark;
use PhpBench\Model\Iteration;
use PhpBench\Model\ParameterSet;
use PhpBench\Model\Subject;
use PhpBench\Model\Variant;
use PhpBench\Progress\Logger\HistogramLogger;
use PhpBench\Tests\Util\TestUtil;
use PhpBench\Util\TimeUnit;
use PHPUnit\Framework\TestCase;
use Symfony\Component\Console\Output\BufferedOutput;
class HistogramLoggerTest extends TestCase
{
public function setUp()
{
$this->output = new BufferedOutput();
$this->timeUnit = new TimeUnit(TimeUnit::MICROSECONDS, TimeUnit::MILLISECONDS);
$this->logger = new HistogramLogger($this->timeUnit);
$this->logger->setOutput($this->output);
$this->benchmark = $this->prophesize(Benchmark::class);
$this->subject = $this->prophesize(Subject::class);
$this->iteration = $this->prophesize(Iteration::class);
$this->variant = new Variant(
$this->subject->reveal(),
new ParameterSet(),
1,
0
);
$this->variant->spawnIterations(4);
$this->benchmark->getSubjects()->willReturn([
$this->subject->reveal(),
]);
$this->benchmark->getClass()->willReturn('BenchmarkTest');
$this->subject->getName()->willReturn('benchSubject');
$this->subject->getIndex()->willReturn(1);
$this->subject->getOutputTimeUnit()->willReturn('milliseconds');
$this->subject->getOutputMode()->willReturn('time');
$this->subject->getRetryThreshold()->willReturn(10);
$this->subject->getOutputTimePrecision()->willReturn(3);
}
/**
* It should show the benchmark name and list all of the subjects.
*/
public function testBenchmarkStart()
{
$this->logger->benchmarkStart($this->benchmark->reveal());
$display = $this->output->fetch();
$this->assertContains('BenchmarkTest', $display);
$this->assertContains('#1 benchSubject', $display);
}
/**
* Test iteration start.
*/
public function testIterationStart()
{
$this->iteration->getIndex()->willReturn(1);
$this->iteration->getVariant()->willReturn($this->variant);
$this->logger->iterationStart($this->iteration->reveal());
$display = $this->output->fetch();
$this->assertContains('it 1/4', $display);
}
/**
* It should show information at the start of the variant.
*/
public function testIterationsStart()
{
$this->logger->variantStart($this->variant);
$display = $this->output->fetch();
$this->assertContains(
'1 (σ = 0.000ms ) -2σ [ ] +2σ',
$display
);
$this->assertContains(
'benchSubject',
$display
);
$this->assertContains(
'parameters []',
$display
);
}
/**
* It should show an error if the iteration has an exception.
*/
public function testIterationException()
{
$this->variant->setException(new \Exception('foo'));
$this->logger->variantEnd($this->variant);
$this->assertContains('ERROR', $this->output->fetch());
}
/**
* It should show the histogram and statistics when an iteration is
* completed (and there were no rejections).
*/
public function testIterationEnd()
{
foreach ($this->variant as $iteration) {
foreach (TestUtil::createResults(10, 10) as $result) {
$iteration->setResult($result);
}
}
$this->variant->computeStats();
$this->logger->variantEnd($this->variant);
$display = $this->output->fetch();
$this->assertContains(
'1 (σ = 0.000ms ) -2σ [ █ ] +2σ [μ Mo]/r: 0.010 0.010 μRSD/r: 0.00%',
$display
);
}
}
|
PHP
|
MIT
|
DQNEO/phpbench/tests/Unit/Progress/Logger/HistogramLoggerTest.php
|
98d05a07-9c54-40e1-a67c-8d7a70ed22e9
|
[{"tag": "EMAIL", "value": "daniel@dantleech.com", "start": 79, "end": 99, "context": "t of the PHPBench package\n *\n * (c) Daniel Leech <daniel@dantleech.com>\n *\n * For the full copyright and license informa"}, {"tag": "NAME", "value": "Daniel Leech", "start": 65, "end": 77, "context": "is file is part of the PHPBench package\n *\n * (c) Daniel Leech <daniel@dantleech.com>\n *\n * For the full copyrig"}]
|
[{"tag": "EMAIL", "value": "daniel@dantleech.com", "start": 79, "end": 99, "context": "t of the PHPBench package\n *\n * (c) Daniel Leech <daniel@dantleech.com>\n *\n * For the full copyright and license informa"}, {"tag": "NAME", "value": "Daniel Leech", "start": 65, "end": 77, "context": "is file is part of the PHPBench package\n *\n * (c) Daniel Leech <daniel@dantleech.com>\n *\n * For the full copyrig"}]
|
// This file is part of libigl, a simple c++ geometry processing library.
//
// Copyright (C) 2018 Zhongshi Jiang <jiangzs@nyu.edu>
//
// This Source Code Form is subject to the terms of the Mozilla Public License
// v. 2.0. If a copy of the MPL was not distributed with this file, You can
// obtain one at http://mozilla.org/MPL/2.0/.
#include "mapping_energy_with_jacobians.h"
#include "polar_svd.h"
IGL_INLINE double igl::mapping_energy_with_jacobians(
const Eigen::MatrixXd &Ji,
const Eigen::VectorXd &areas,
igl::MappingEnergyType slim_energy,
double exp_factor){
double energy = 0;
if (Ji.cols() == 4)
{
Eigen::Matrix<double, 2, 2> ji;
for (int i = 0; i < Ji.rows(); i++)
{
ji(0, 0) = Ji(i, 0);
ji(0, 1) = Ji(i, 1);
ji(1, 0) = Ji(i, 2);
ji(1, 1) = Ji(i, 3);
typedef Eigen::Matrix<double, 2, 2> Mat2;
typedef Eigen::Matrix<double, 2, 1> Vec2;
Mat2 ri, ti, ui, vi;
Vec2 sing;
igl::polar_svd(ji, ri, ti, ui, sing, vi);
double s1 = sing(0);
double s2 = sing(1);
switch (slim_energy)
{
case igl::MappingEnergyType::ARAP:
{
energy += areas(i) * (pow(s1 - 1, 2) + pow(s2 - 1, 2));
break;
}
case igl::MappingEnergyType::SYMMETRIC_DIRICHLET:
{
energy += areas(i) * (pow(s1, 2) + pow(s1, -2) + pow(s2, 2) + pow(s2, -2));
break;
}
case igl::MappingEnergyType::EXP_SYMMETRIC_DIRICHLET:
{
energy += areas(i) * exp(exp_factor * (pow(s1, 2) + pow(s1, -2) + pow(s2, 2) + pow(s2, -2)));
break;
}
case igl::MappingEnergyType::LOG_ARAP:
{
energy += areas(i) * (pow(log(s1), 2) + pow(log(s2), 2));
break;
}
case igl::MappingEnergyType::CONFORMAL:
{
energy += areas(i) * ((pow(s1, 2) + pow(s2, 2)) / (2 * s1 * s2));
break;
}
case igl::MappingEnergyType::EXP_CONFORMAL:
{
energy += areas(i) * exp(exp_factor * ((pow(s1, 2) + pow(s2, 2)) / (2 * s1 * s2)));
break;
}
}
}
}
else
{
Eigen::Matrix<double, 3, 3> ji;
for (int i = 0; i < Ji.rows(); i++)
{
ji(0, 0) = Ji(i, 0);
ji(0, 1) = Ji(i, 1);
ji(0, 2) = Ji(i, 2);
ji(1, 0) = Ji(i, 3);
ji(1, 1) = Ji(i, 4);
ji(1, 2) = Ji(i, 5);
ji(2, 0) = Ji(i, 6);
ji(2, 1) = Ji(i, 7);
ji(2, 2) = Ji(i, 8);
typedef Eigen::Matrix<double, 3, 3> Mat3;
typedef Eigen::Matrix<double, 3, 1> Vec3;
Mat3 ri, ti, ui, vi;
Vec3 sing;
igl::polar_svd(ji, ri, ti, ui, sing, vi);
double s1 = sing(0);
double s2 = sing(1);
double s3 = sing(2);
switch (slim_energy)
{
case igl::MappingEnergyType::ARAP:
{
energy += areas(i) * (pow(s1 - 1, 2) + pow(s2 - 1, 2) + pow(s3 - 1, 2));
break;
}
case igl::MappingEnergyType::SYMMETRIC_DIRICHLET:
{
energy += areas(i) * (pow(s1, 2) + pow(s1, -2) + pow(s2, 2) + pow(s2, -2) + pow(s3, 2) + pow(s3, -2));
break;
}
case igl::MappingEnergyType::EXP_SYMMETRIC_DIRICHLET:
{
energy += areas(i) * exp(exp_factor *
(pow(s1, 2) + pow(s1, -2) + pow(s2, 2) + pow(s2, -2) + pow(s3, 2) + pow(s3, -2)));
break;
}
case igl::MappingEnergyType::LOG_ARAP:
{
energy += areas(i) * (pow(log(s1), 2) + pow(log(std::abs(s2)), 2) + pow(log(std::abs(s3)), 2));
break;
}
case igl::MappingEnergyType::CONFORMAL:
{
energy += areas(i) * ((pow(s1, 2) + pow(s2, 2) + pow(s3, 2)) / (3 * pow(s1 * s2 * s3, 2. / 3.)));
break;
}
case igl::MappingEnergyType::EXP_CONFORMAL:
{
energy += areas(i) * exp((pow(s1, 2) + pow(s2, 2) + pow(s3, 2)) / (3 * pow(s1 * s2 * s3, 2. / 3.)));
break;
}
}
}
}
return energy;
}
#ifdef IGL_STATIC_LIBRARY
// Explicit template instantiation
#endif
|
C++
|
Apache-2.0
|
AdhamJ/EngineForAnimationCourse/igl/mapping_energy_with_jacobians.cpp
|
1b53168b-8098-4b42-964a-0abce5cc0cf6
|
[{"tag": "NAME", "value": "Zhongshi Jiang", "start": 100, "end": 114, "context": "try processing library.\n// \n// Copyright (C) 2018 Zhongshi Jiang <jiangzs@nyu.edu>\n// \n// This Source Code Form is"}, {"tag": "EMAIL", "value": "jiangzs@nyu.edu", "start": 116, "end": 131, "context": "ibrary.\n// \n// Copyright (C) 2018 Zhongshi Jiang <jiangzs@nyu.edu>\n// \n// This Source Code Form is subject to the t"}]
|
[{"tag": "NAME", "value": "Zhongshi Jiang", "start": 100, "end": 114, "context": "try processing library.\n// \n// Copyright (C) 2018 Zhongshi Jiang <jiangzs@nyu.edu>\n// \n// This Source Code Form is"}, {"tag": "EMAIL", "value": "jiangzs@nyu.edu", "start": 116, "end": 131, "context": "ibrary.\n// \n// Copyright (C) 2018 Zhongshi Jiang <jiangzs@nyu.edu>\n// \n// This Source Code Form is subject to the t"}]
|
// Copyright 2015 Stefano Pogliani <stefano@spogliani.net>
#include <gtest/gtest.h>
#include "core/exceptions/base.h"
#include "core/registry/base.h"
using sf::core::exception::DuplicateInjection;
using sf::core::exception::FactoryNotFound;
using sf::core::registry::Registry;
typedef int*(*test_factory)();
typedef Registry<test_factory> TestRegistry;
int* make_one() {
return new int(1);
}
TEST(Regisrty, CannotGetMissingFactory) {
TestRegistry reg;
ASSERT_THROW(reg.get("none"), FactoryNotFound);
}
TEST(Regisrty, CannotSetFactoryTwice) {
TestRegistry reg;
reg.registerFactory("one", make_one);
ASSERT_THROW(reg.registerFactory("one", make_one), DuplicateInjection);
}
TEST(Regisrty, GetFactory) {
TestRegistry reg;
reg.registerFactory("one", make_one);
test_factory factory = reg.get("one");
int* result = factory();
EXPECT_EQ(1, *result);
delete result;
}
TEST(Registry, Singleton) {
TestRegistry* reg1 = TestRegistry::instance();
TestRegistry* reg2 = TestRegistry::instance();
ASSERT_EQ(reg1, reg2);
}
TEST(Registry, SingletonProxy) {
TestRegistry::RegisterFactory("one", make_one);
test_factory factory = TestRegistry::Get("one");
int* result = factory();
EXPECT_EQ(1, *result);
delete result;
}
|
C++
|
BSD-3-Clause
|
ArcticNature/core/registry/base/tests/base.cpp
|
dc71c95a-2c7c-4e22-855e-b1e1c67a7ee2
|
[{"tag": "EMAIL", "value": "stefano@spogliani.net", "start": 36, "end": 57, "context": "// Copyright 2015 Stefano Pogliani <stefano@spogliani.net>\n#include <gtest/gtest.h>\n\n#include \"core/excepti"}, {"tag": "NAME", "value": "Stefano Pogliani", "start": 18, "end": 34, "context": "// Copyright 2015 Stefano Pogliani <stefano@spogliani.net>\n#include <gtest/gtest.h>\n"}]
|
[{"tag": "EMAIL", "value": "stefano@spogliani.net", "start": 36, "end": 57, "context": "// Copyright 2015 Stefano Pogliani <stefano@spogliani.net>\n#include <gtest/gtest.h>\n\n#include \"core/excepti"}, {"tag": "NAME", "value": "Stefano Pogliani", "start": 18, "end": 34, "context": "// Copyright 2015 Stefano Pogliani <stefano@spogliani.net>\n#include <gtest/gtest.h>\n"}]
|
<?php
/**
* Breadcrumb Trail - A breadcrumb menu script for WordPress.
*
* Breadcrumb Trail is a script for showing a breadcrumb trail for any type of page. It tries to
* anticipate any type of structure and display the best possible trail that matches your site's
* permalink structure. While not perfect, it attempts to fill in the gaps left by many other
* breadcrumb scripts.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the GNU
* General Public License as published by the Free Software Foundation; either version 2 of the License,
* or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*
* @package BreadcrumbTrail
* @version 1.0.0
* @author Justin Tadlock <justin@justintadlock.com>
* @copyright Copyright (c) 2008 - 2015, Justin Tadlock
* @link http://themehybrid.com/plugins/breadcrumb-trail
* @license http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
*/
/**
* Shows a breadcrumb for all types of pages. This is a wrapper function for the Breadcrumb_Trail class,
* which should be used in theme templates.
*
* @since 0.1.0
* @access public
* @param array $args Arguments to pass to Breadcrumb_Trail.
* @return void
*/
function breadcrumb_trail( $args = array() ) {
$breadcrumb = apply_filters( 'breadcrumb_trail_object', null, $args );
if ( !is_object( $breadcrumb ) )
$breadcrumb = new Breadcrumb_Trail( $args );
return $breadcrumb->trail();
}
/**
* Creates a breadcrumbs menu for the site based on the current page that's being viewed by the user.
*
* @since 0.6.0
* @access public
*/
class Breadcrumb_Trail {
/**
* Array of items belonging to the current breadcrumb trail.
*
* @since 0.1.0
* @access public
* @var array
*/
public $items = array();
/**
* Arguments used to build the breadcrumb trail.
*
* @since 0.1.0
* @access public
* @var array
*/
public $args = array();
/**
* Array of text labels.
*
* @since 1.0.0
* @access public
* @var array
*/
public $labels = array();
/**
* Array of post types (key) and taxonomies (value) to use for single post views.
*
* @since 1.0.0
* @access public
* @var array
*/
public $post_taxonomy = array();
/* ====== Magic Methods ====== */
/**
* Magic method to use in case someone tries to output the layout object as a string.
* We'll just return the trail HTML.
*
* @since 1.0.0
* @access public
* @return string
*/
public function __toString() {
return $this->trail();
}
/**
* Sets up the breadcrumb trail properties. Calls the `Breadcrumb_Trail::add_items()` method
* to creat the array of breadcrumb items.
*
* @since 0.6.0
* @access public
* @param array $args {
* @type string $container Container HTML element. nav|div
* @type string $before String to output before breadcrumb menu.
* @type string $after String to output after breadcrumb menu.
* @type bool $show_on_front Whether to show when `is_front_page()`.
* @type bool $network Whether to link to the network main site (multisite only).
* @type bool $show_title Whether to show the title (last item) in the trail.
* @type bool $show_browse Whether to show the breadcrumb menu header.
* @type array $labels Text labels. @see Breadcrumb_Trail::set_labels()
* @type array $post_taxonomy Taxonomies to use for post types. @see Breadcrumb_Trail::set_post_taxonomy()
* @type bool $echo Whether to print or return the breadcrumbs.
* }
* @return void
*/
public function __construct( $args = array() ) {
$defaults = array(
'container' => 'nav',
'before' => '',
'after' => '',
'show_on_front' => true,
'network' => false,
'show_title' => true,
'show_browse' => true,
'labels' => array(),
'post_taxonomy' => array(),
'echo' => true
);
// Parse the arguments with the deaults.
$this->args = apply_filters( 'breadcrumb_trail_args', wp_parse_args( $args, $defaults ) );
// Set the labels and post taxonomy properties.
$this->set_labels();
$this->set_post_taxonomy();
// Let's find some items to add to the trail!
$this->add_items();
}
/* ====== Public Methods ====== */
/**
* Formats the HTML output for the breadcrumb trail.
*
* @since 0.6.0
* @access public
* @return string
*/
public function trail() {
// Set up variables that we'll need.
$breadcrumb = '';
$item_count = count( $this->items );
$item_position = 0;
// Connect the breadcrumb trail if there are items in the trail.
if ( 0 < $item_count ) {
// Add 'browse' label if it should be shown.
if ( true === $this->args['show_browse'] )
$breadcrumb .= sprintf( '<h2 class="trail-browse">%s</h2>', $this->labels['browse'] );
// Open the unordered list.
$breadcrumb .= '<ul class="trail-items" itemscope itemtype="http://schema.org/BreadcrumbList">';
// Add the number of items and item list order schema.
$breadcrumb .= sprintf( '<meta name="numberOfItems" content="%d" />', absint( $item_count ) );
$breadcrumb .= '<meta name="itemListOrder" content="Ascending" />';
// Loop through the items and add them to the list.
foreach ( $this->items as $item ) {
// Iterate the item position.
++$item_position;
// Check if the item is linked.
preg_match( '/(<a.*?>)(.*?)(<\/a>)/i', $item, $matches );
// Wrap the item text with appropriate itemprop.
$item = !empty( $matches ) ? sprintf( '%s<span itemprop="name">%s</span>%s', $matches[1], $matches[2], $matches[3] ) : sprintf( '<span itemprop="name">%s</span>', $item );
// Add list item classes.
$item_class = 'trail-item';
if ( 1 === $item_position && 1 < $item_count )
$item_class .= ' trail-begin';
elseif ( $item_count === $item_position )
$item_class .= ' trail-end';
// Create list item attributes.
$attributes = 'itemprop="itemListElement" itemscope itemtype="http://schema.org/ListItem" class="' . $item_class . '"';
// Build the meta position HTML.
$meta = sprintf( '<meta itemprop="position" content="%s" />', absint( $item_position ) );
// Build the list item.
$breadcrumb .= sprintf( '<li %s>%s%s</li>', $attributes, $item, $meta );
}
// Close the unordered list.
$breadcrumb .= '</ul>';
// Wrap the breadcrumb trail.
$breadcrumb = sprintf(
'<%1$s role="navigation" aria-label="%2$s" class="breadcrumb-trail breadcrumbs" itemprop="breadcrumb">%3$s%4$s%5$s</%1$s>',
tag_escape( $this->args['container'] ),
esc_attr( $this->labels['aria_label'] ),
$this->args['before'],
$breadcrumb,
$this->args['after']
);
}
// Allow developers to filter the breadcrumb trail HTML.
$breadcrumb = apply_filters( 'breadcrumb_trail', $breadcrumb, $this->args );
if ( false === $this->args['echo'] )
return $breadcrumb;
echo $breadcrumb;
}
/* ====== Protected Methods ====== */
/**
* Sets the labels property. Parses the inputted labels array with the defaults.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function set_labels() {
$defaults = array(
'browse' => esc_html__( 'Browse:', 'uni-education' ),
'aria_label' => esc_attr_x( 'Breadcrumbs', 'breadcrumbs aria label', 'uni-education' ),
'home' => esc_html__( 'Home', 'uni-education' ),
'error_404' => esc_html__( '404 Not Found', 'uni-education' ),
'archives' => esc_html__( 'Archives', 'uni-education' ),
// Translators: %s is the search query. The HTML entities are opening and closing curly quotes.
'search' => esc_html__( 'Search results for “%s”', 'uni-education' ),
// Translators: %s is the page number.
'paged' => esc_html__( 'Page %s', 'uni-education' ),
// Translators: Minute archive title. %s is the minute time format.
'archive_minute' => esc_html__( 'Minute %s', 'uni-education' ),
// Translators: Weekly archive title. %s is the week date format.
'archive_week' => esc_html__( 'Week %s', 'uni-education' ),
// "%s" is replaced with the translated date/time format.
'archive_minute_hour' => '%s',
'archive_hour' => '%s',
'archive_day' => '%s',
'archive_month' => '%s',
'archive_year' => '%s',
);
$this->labels = apply_filters( 'breadcrumb_trail_labels', wp_parse_args( $this->args['labels'], $defaults ) );
}
/**
* Sets the `$post_taxonomy` property. This is an array of post types (key) and taxonomies (value).
* The taxonomy's terms are shown on the singular post view if set.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function set_post_taxonomy() {
$defaults = array();
// If post permalink is set to `%postname%`, use the `category` taxonomy.
if ( '%postname%' === trim( get_option( 'permalink_structure' ), '/' ) )
$defaults['post'] = 'category';
$this->post_taxonomy = apply_filters( 'breadcrumb_trail_post_taxonomy', wp_parse_args( $this->args['post_taxonomy'], $defaults ) );
}
/**
* Runs through the various WordPress conditional tags to check the current page being viewed. Once
* a condition is met, a specific method is launched to add items to the `$items` array.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_items() {
// If viewing the front page.
if ( is_front_page() ) {
$this->add_front_page_items();
}
// If not viewing the front page.
else {
// Add the network and site home links.
$this->add_network_home_link();
$this->add_site_home_link();
// If viewing the home/blog page.
if ( is_home() ) {
$this->add_posts_page_items();
}
// If viewing a single post.
elseif ( is_singular() ) {
$this->add_singular_items();
}
// If viewing an archive page.
elseif ( is_archive() ) {
if ( is_post_type_archive() )
$this->add_post_type_archive_items();
elseif ( is_category() || is_tag() || is_tax() )
$this->add_term_archive_items();
elseif ( is_author() )
$this->add_user_archive_items();
elseif ( get_query_var( 'minute' ) && get_query_var( 'hour' ) )
$this->add_minute_hour_archive_items();
elseif ( get_query_var( 'minute' ) )
$this->add_minute_archive_items();
elseif ( get_query_var( 'hour' ) )
$this->add_hour_archive_items();
elseif ( is_day() )
$this->add_day_archive_items();
elseif ( get_query_var( 'w' ) )
$this->add_week_archive_items();
elseif ( is_month() )
$this->add_month_archive_items();
elseif ( is_year() )
$this->add_year_archive_items();
else
$this->add_default_archive_items();
}
// If viewing a search results page.
elseif ( is_search() ) {
$this->add_search_items();
}
// If viewing the 404 page.
elseif ( is_404() ) {
$this->add_404_items();
}
}
// Add paged items if they exist.
$this->add_paged_items();
// Allow developers to overwrite the items for the breadcrumb trail.
$this->items = array_unique( apply_filters( 'breadcrumb_trail_items', $this->items, $this->args ) );
}
/**
* Gets front items based on $wp_rewrite->front.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_rewrite_front_items() {
global $wp_rewrite;
if ( $wp_rewrite->front )
$this->add_path_parents( $wp_rewrite->front );
}
/**
* Adds the page/paged number to the items array.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_paged_items() {
// If viewing a paged singular post.
if ( is_singular() && 1 < get_query_var( 'page' ) && true === $this->args['show_title'] )
$this->items[] = sprintf( $this->labels['paged'], number_format_i18n( absint( get_query_var( 'page' ) ) ) );
// If viewing a paged archive-type page.
elseif ( is_paged() && true === $this->args['show_title'] )
$this->items[] = sprintf( $this->labels['paged'], number_format_i18n( absint( get_query_var( 'paged' ) ) ) );
}
/**
* Adds the network (all sites) home page link to the items array.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_network_home_link() {
if ( is_multisite() && !is_main_site() && true === $this->args['network'] )
$this->items[] = sprintf( '<a href="%s" rel="home">%s</a>', esc_url( network_home_url() ), $this->labels['home'] );
}
/**
* Adds the current site's home page link to the items array.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_site_home_link() {
$network = is_multisite() && !is_main_site() && true === $this->args['network'];
$label = $network ? get_bloginfo( 'name' ) : $this->labels['home'];
$rel = $network ? '' : ' rel="home"';
$this->items[] = sprintf( '<a href="%s"%s>%s</a>', esc_url( home_url() ), $rel, $label );
}
/**
* Adds items for the front page to the items array.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_front_page_items() {
// Only show front items if the 'show_on_front' argument is set to 'true'.
if ( true === $this->args['show_on_front'] || is_paged() || ( is_singular() && 1 < get_query_var( 'page' ) ) ) {
// Add network home link.
$this->add_network_home_link();
// If on a paged view, add the site home link.
if ( is_paged() )
$this->add_site_home_link();
// If on the main front page, add the network home title.
elseif ( true === $this->args['show_title'] )
$this->items[] = is_multisite() && true === $this->args['network'] ? get_bloginfo( 'name' ) : $this->labels['home'];
}
}
/**
* Adds items for the posts page (i.e., is_home()) to the items array.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_posts_page_items() {
// Get the post ID and post.
$post_id = get_queried_object_id();
$post = get_post( $post_id );
// If the post has parents, add them to the trail.
if ( 0 < $post->post_parent )
$this->add_post_parents( $post->post_parent );
// Get the page title.
$title = get_the_title( $post_id );
// Add the posts page item.
if ( is_paged() )
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_permalink( $post_id ) ), $title );
elseif ( $title && true === $this->args['show_title'] )
$this->items[] = $title;
}
/**
* Adds singular post items to the items array.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_singular_items() {
// Get the queried post.
$post = get_queried_object();
$post_id = get_queried_object_id();
// If the post has a parent, follow the parent trail.
if ( 0 < $post->post_parent )
$this->add_post_parents( $post->post_parent );
// If the post doesn't have a parent, get its hierarchy based off the post type.
else
$this->add_post_hierarchy( $post_id );
// Display terms for specific post type taxonomy if requested.
if ( !empty( $this->post_taxonomy[ $post->post_type ] ) )
$this->add_post_terms( $post_id, $this->post_taxonomy[ $post->post_type ] );
// End with the post title.
if ( $post_title = single_post_title( '', false ) ) {
if ( 1 < get_query_var( 'page' ) || is_paged() )
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_permalink( $post_id ) ), $post_title );
elseif ( true === $this->args['show_title'] )
$this->items[] = $post_title;
}
}
/**
* Adds the items to the trail items array for taxonomy term archives.
*
* @since 1.0.0
* @access protected
* @global object $wp_rewrite
* @return void
*/
protected function add_term_archive_items() {
global $wp_rewrite;
// Get some taxonomy and term variables.
$term = get_queried_object();
$taxonomy = get_taxonomy( $term->taxonomy );
$done_post_type = false;
// If there are rewrite rules for the taxonomy.
if ( false !== $taxonomy->rewrite ) {
// If 'with_front' is true, dd $wp_rewrite->front to the trail.
if ( $taxonomy->rewrite['with_front'] && $wp_rewrite->front )
$this->add_rewrite_front_items();
// Get parent pages by path if they exist.
$this->add_path_parents( $taxonomy->rewrite['slug'] );
// Add post type archive if its 'has_archive' matches the taxonomy rewrite 'slug'.
if ( $taxonomy->rewrite['slug'] ) {
$slug = trim( $taxonomy->rewrite['slug'], '/' );
// Deals with the situation if the slug has a '/' between multiple
// strings. For example, "movies/genres" where "movies" is the post
// type archive.
$matches = explode( '/', $slug );
// If matches are found for the path.
if ( isset( $matches ) ) {
// Reverse the array of matches to search for posts in the proper order.
$matches = array_reverse( $matches );
// Loop through each of the path matches.
foreach ( $matches as $match ) {
// If a match is found.
$slug = $match;
// Get public post types that match the rewrite slug.
$post_types = $this->get_post_types_by_slug( $match );
if ( !empty( $post_types ) ) {
$post_type_object = $post_types[0];
// Add support for a non-standard label of 'archive_title' (special use case).
$label = !empty( $post_type_object->labels->archive_title ) ? $post_type_object->labels->archive_title : $post_type_object->labels->name;
// Core filter hook.
$label = apply_filters( 'post_type_archive_title', $label, $post_type_object->name );
// Add the post type archive link to the trail.
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_post_type_archive_link( $post_type_object->name ) ), $label );
$done_post_type = true;
// Break out of the loop.
break;
}
}
}
}
}
// If there's a single post type for the taxonomy, use it.
if ( false === $done_post_type && 1 === count( $taxonomy->object_type ) && post_type_exists( $taxonomy->object_type[0] ) ) {
// If the post type is 'post'.
if ( 'post' === $taxonomy->object_type[0] ) {
$post_id = get_option( 'page_for_posts' );
if ( 'posts' !== get_option( 'show_on_front' ) && 0 < $post_id )
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_permalink( $post_id ) ), get_the_title( $post_id ) );
// If the post type is not 'post'.
} else {
$post_type_object = get_post_type_object( $taxonomy->object_type[0] );
$label = !empty( $post_type_object->labels->archive_title ) ? $post_type_object->labels->archive_title : $post_type_object->labels->name;
// Core filter hook.
$label = apply_filters( 'post_type_archive_title', $label, $post_type_object->name );
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_post_type_archive_link( $post_type_object->name ) ), $label );
}
}
// If the taxonomy is hierarchical, list its parent terms.
if ( is_taxonomy_hierarchical( $term->taxonomy ) && $term->parent )
$this->add_term_parents( $term->parent, $term->taxonomy );
// Add the term name to the trail end.
if ( is_paged() )
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_term_link( $term, $term->taxonomy ) ), single_term_title( '', false ) );
elseif ( true === $this->args['show_title'] )
$this->items[] = single_term_title( '', false );
}
/**
* Adds the items to the trail items array for post type archives.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_post_type_archive_items() {
// Get the post type object.
$post_type_object = get_post_type_object( get_query_var( 'post_type' ) );
if ( false !== $post_type_object->rewrite ) {
// If 'with_front' is true, add $wp_rewrite->front to the trail.
if ( $post_type_object->rewrite['with_front'] )
$this->add_rewrite_front_items();
// If there's a rewrite slug, check for parents.
if ( !empty( $post_type_object->rewrite['slug'] ) )
$this->add_path_parents( $post_type_object->rewrite['slug'] );
}
// Add the post type [plural] name to the trail end.
if ( is_paged() )
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_post_type_archive_link( $post_type_object->name ) ), post_type_archive_title( '', false ) );
elseif ( true === $this->args['show_title'] )
$this->items[] = post_type_archive_title( '', false );
}
/**
* Adds the items to the trail items array for user (author) archives.
*
* @since 1.0.0
* @access protected
* @global object $wp_rewrite
* @return void
*/
protected function add_user_archive_items() {
global $wp_rewrite;
// Add $wp_rewrite->front to the trail.
$this->add_rewrite_front_items();
// Get the user ID.
$user_id = get_query_var( 'author' );
// If $author_base exists, check for parent pages.
if ( !empty( $wp_rewrite->author_base ) )
$this->add_path_parents( $wp_rewrite->author_base );
// Add the author's display name to the trail end.
if ( is_paged() )
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_author_posts_url( $user_id ) ), get_the_author_meta( 'display_name', $user_id ) );
elseif ( true === $this->args['show_title'] )
$this->items[] = get_the_author_meta( 'display_name', $user_id );
}
/**
* Adds the items to the trail items array for minute + hour archives.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_minute_hour_archive_items() {
// Add $wp_rewrite->front to the trail.
$this->add_rewrite_front_items();
// Add the minute + hour item.
if ( true === $this->args['show_title'] )
$this->items[] = sprintf( $this->labels['archive_minute_hour'], get_the_time( esc_html_x( 'g:i a', 'minute and hour archives time format', 'uni-education' ) ) );
}
/**
* Adds the items to the trail items array for minute archives.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_minute_archive_items() {
// Add $wp_rewrite->front to the trail.
$this->add_rewrite_front_items();
// Add the minute item.
if ( true === $this->args['show_title'] )
$this->items[] = sprintf( $this->labels['archive_minute'], get_the_time( esc_html_x( 'i', 'minute archives time format', 'uni-education' ) ) );
}
/**
* Adds the items to the trail items array for hour archives.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_hour_archive_items() {
// Add $wp_rewrite->front to the trail.
$this->add_rewrite_front_items();
// Add the hour item.
if ( true === $this->args['show_title'] )
$this->items[] = sprintf( $this->labels['archive_hour'], get_the_time( esc_html_x( 'g a', 'hour archives time format', 'uni-education' ) ) );
}
/**
* Adds the items to the trail items array for day archives.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_day_archive_items() {
// Add $wp_rewrite->front to the trail.
$this->add_rewrite_front_items();
// Get year, month, and day.
$year = sprintf( $this->labels['archive_year'], get_the_time( esc_html_x( 'Y', 'yearly archives date format', 'uni-education' ) ) );
$month = sprintf( $this->labels['archive_month'], get_the_time( esc_html_x( 'F', 'monthly archives date format', 'uni-education' ) ) );
$day = sprintf( $this->labels['archive_day'], get_the_time( esc_html_x( 'j', 'daily archives date format', 'uni-education' ) ) );
// Add the year and month items.
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_year_link( get_the_time( 'Y' ) ) ), $year );
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_month_link( get_the_time( 'Y' ), get_the_time( 'm' ) ) ), $month );
// Add the day item.
if ( is_paged() )
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_day_link( get_the_time( 'Y' ) ), get_the_time( 'm' ), get_the_time( 'd' ) ), $day );
elseif ( true === $this->args['show_title'] )
$this->items[] = $day;
}
/**
* Adds the items to the trail items array for week archives.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_week_archive_items() {
// Add $wp_rewrite->front to the trail.
$this->add_rewrite_front_items();
// Get the year and week.
$year = sprintf( $this->labels['archive_year'], get_the_time( esc_html_x( 'Y', 'yearly archives date format', 'uni-education' ) ) );
$week = sprintf( $this->labels['archive_week'], get_the_time( esc_html_x( 'W', 'weekly archives date format', 'uni-education' ) ) );
// Add the year item.
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_year_link( get_the_time( 'Y' ) ) ), $year );
// Add the week item.
if ( is_paged() )
$this->items[] = esc_url( get_archives_link( add_query_arg( array( 'm' => get_the_time( 'Y' ), 'w' => get_the_time( 'W' ) ), home_url() ), $week, false ) );
elseif ( true === $this->args['show_title'] )
$this->items[] = $week;
}
/**
* Adds the items to the trail items array for month archives.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_month_archive_items() {
// Add $wp_rewrite->front to the trail.
$this->add_rewrite_front_items();
// Get the year and month.
$year = sprintf( $this->labels['archive_year'], get_the_time( esc_html_x( 'Y', 'yearly archives date format', 'uni-education' ) ) );
$month = sprintf( $this->labels['archive_month'], get_the_time( esc_html_x( 'F', 'monthly archives date format', 'uni-education' ) ) );
// Add the year item.
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_year_link( get_the_time( 'Y' ) ) ), $year );
// Add the month item.
if ( is_paged() )
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_month_link( get_the_time( 'Y' ), get_the_time( 'm' ) ) ), $month );
elseif ( true === $this->args['show_title'] )
$this->items[] = $month;
}
/**
* Adds the items to the trail items array for year archives.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_year_archive_items() {
// Add $wp_rewrite->front to the trail.
$this->add_rewrite_front_items();
// Get the year.
$year = sprintf( $this->labels['archive_year'], get_the_time( esc_html_x( 'Y', 'yearly archives date format', 'uni-education' ) ) );
// Add the year item.
if ( is_paged() )
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_year_link( get_the_time( 'Y' ) ) ), $year );
elseif ( true === $this->args['show_title'] )
$this->items[] = $year;
}
/**
* Adds the items to the trail items array for archives that don't have a more specific method
* defined in this class.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_default_archive_items() {
// If this is a date-/time-based archive, add $wp_rewrite->front to the trail.
if ( is_date() || is_time() )
$this->add_rewrite_front_items();
if ( true === $this->args['show_title'] )
$this->items[] = $this->labels['archives'];
}
/**
* Adds the items to the trail items array for search results.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_search_items() {
if ( is_paged() )
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_search_link() ), sprintf( $this->labels['search'], get_search_query() ) );
elseif ( true === $this->args['show_title'] )
$this->items[] = sprintf( $this->labels['search'], get_search_query() );
}
/**
* Adds the items to the trail items array for 404 pages.
*
* @since 1.0.0
* @access protected
* @return void
*/
protected function add_404_items() {
if ( true === $this->args['show_title'] )
$this->items[] = $this->labels['error_404'];
}
/**
* Adds a specific post's parents to the items array.
*
* @since 1.0.0
* @access protected
* @param int $post_id
* @return void
*/
protected function add_post_parents( $post_id ) {
$parents = array();
while ( $post_id ) {
// Get the post by ID.
$post = get_post( $post_id );
// If we hit a page that's set as the front page, bail.
if ( 'page' == $post->post_type && 'page' == get_option( 'show_on_front' ) && $post_id == get_option( 'page_on_front' ) )
break;
// Add the formatted post link to the array of parents.
$parents[] = sprintf( '<a href="%s">%s</a>', esc_url( get_permalink( $post_id ) ), get_the_title( $post_id ) );
// If there's no longer a post parent, break out of the loop.
if ( 0 >= $post->post_parent )
break;
// Change the post ID to the parent post to continue looping.
$post_id = $post->post_parent;
}
// Get the post hierarchy based off the final parent post.
$this->add_post_hierarchy( $post_id );
// Display terms for specific post type taxonomy if requested.
if ( !empty( $this->post_taxonomy[ $post->post_type ] ) )
$this->add_post_terms( $post_id, $this->post_taxonomy[ $post->post_type ] );
// Merge the parent items into the items array.
$this->items = array_merge( $this->items, array_reverse( $parents ) );
}
/**
* Adds a specific post's hierarchy to the items array. The hierarchy is determined by post type's
* rewrite arguments and whether it has an archive page.
*
* @since 1.0.0
* @access protected
* @param int $post_id
* @return void
*/
protected function add_post_hierarchy( $post_id ) {
// Get the post type.
$post_type = get_post_type( $post_id );
$post_type_object = get_post_type_object( $post_type );
// If this is the 'post' post type, get the rewrite front items and map the rewrite tags.
if ( 'post' === $post_type ) {
// Add $wp_rewrite->front to the trail.
$this->add_rewrite_front_items();
// Map the rewrite tags.
$this->map_rewrite_tags( $post_id, get_option( 'permalink_structure' ) );
}
// If the post type has rewrite rules.
elseif ( false !== $post_type_object->rewrite ) {
// If 'with_front' is true, add $wp_rewrite->front to the trail.
if ( $post_type_object->rewrite['with_front'] )
$this->add_rewrite_front_items();
// If there's a path, check for parents.
if ( !empty( $post_type_object->rewrite['slug'] ) )
$this->add_path_parents( $post_type_object->rewrite['slug'] );
}
// If there's an archive page, add it to the trail.
if ( $post_type_object->has_archive ) {
// Add support for a non-standard label of 'archive_title' (special use case).
$label = !empty( $post_type_object->labels->archive_title ) ? $post_type_object->labels->archive_title : $post_type_object->labels->name;
// Core filter hook.
$label = apply_filters( 'post_type_archive_title', $label, $post_type_object->name );
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_post_type_archive_link( $post_type ) ), $label );
}
}
/**
* Gets post types by slug. This is needed because the get_post_types() function doesn't exactly
* match the 'has_archive' argument when it's set as a string instead of a boolean.
*
* @since 0.6.0
* @access protected
* @param int $slug The post type archive slug to search for.
* @return void
*/
protected function get_post_types_by_slug( $slug ) {
$return = array();
$post_types = get_post_types( array(), 'objects' );
foreach ( $post_types as $type ) {
if ( $slug === $type->has_archive || ( true === $type->has_archive && $slug === $type->rewrite['slug'] ) )
$return[] = $type;
}
return $return;
}
/**
* Adds a post's terms from a specific taxonomy to the items array.
*
* @since 1.0.0
* @access protected
* @param int $post_id The ID of the post to get the terms for.
* @param string $taxonomy The taxonomy to get the terms from.
* @return void
*/
protected function add_post_terms( $post_id, $taxonomy ) {
// Get the post type.
$post_type = get_post_type( $post_id );
// Get the post categories.
$terms = get_the_terms( $post_id, $taxonomy );
// Check that categories were returned.
if ( $terms && ! is_wp_error( $terms ) ) {
// Sort the terms by ID and get the first category.
// usort( $terms, '_usort_terms_by_ID' );
wp_list_sort( $terms, $orderby = array(), $order = 'ASC' );
$term = get_term( $terms[0], $taxonomy );
// If the category has a parent, add the hierarchy to the trail.
if ( 0 < $term->parent )
$this->add_term_parents( $term->parent, $taxonomy );
// Add the category archive link to the trail.
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_term_link( $term, $taxonomy ) ), $term->name );
}
}
/**
* Get parent posts by path. Currently, this method only supports getting parents of the 'page'
* post type. The goal of this function is to create a clear path back to home given what would
* normally be a "ghost" directory. If any page matches the given path, it'll be added.
*
* @since 1.0.0
* @access protected
* @param string $path The path (slug) to search for posts by.
* @return void
*/
function add_path_parents( $path ) {
// Trim '/' off $path in case we just got a simple '/' instead of a real path.
$path = trim( $path, '/' );
// If there's no path, return.
if ( empty( $path ) )
return;
// Get parent post by the path.
$post = get_page_by_path( $path );
if ( !empty( $post ) ) {
$this->add_post_parents( $post->ID );
}
elseif ( is_null( $post ) ) {
// Separate post names into separate paths by '/'.
$path = trim( $path, '/' );
preg_match_all( "/\/.*?\z/", $path, $matches );
// If matches are found for the path.
if ( isset( $matches ) ) {
// Reverse the array of matches to search for posts in the proper order.
$matches = array_reverse( $matches );
// Loop through each of the path matches.
foreach ( $matches as $match ) {
// If a match is found.
if ( isset( $match[0] ) ) {
// Get the parent post by the given path.
$path = str_replace( $match[0], '', $path );
$post = get_page_by_path( trim( $path, '/' ) );
// If a parent post is found, set the $post_id and break out of the loop.
if ( !empty( $post ) && 0 < $post->ID ) {
$this->add_post_parents( $post->ID );
break;
}
}
}
}
}
}
/**
* Searches for term parents of hierarchical taxonomies. This function is similar to the WordPress
* function get_category_parents() but handles any type of taxonomy.
*
* @since 1.0.0
* @param int $term_id ID of the term to get the parents of.
* @param string $taxonomy Name of the taxonomy for the given term.
* @return void
*/
function add_term_parents( $term_id, $taxonomy ) {
// Set up some default arrays.
$parents = array();
// While there is a parent ID, add the parent term link to the $parents array.
while ( $term_id ) {
// Get the parent term.
$term = get_term( $term_id, $taxonomy );
// Add the formatted term link to the array of parent terms.
$parents[] = sprintf( '<a href="%s">%s</a>', esc_url( get_term_link( $term, $taxonomy ) ), $term->name );
// Set the parent term's parent as the parent ID.
$term_id = $term->parent;
}
// If we have parent terms, reverse the array to put them in the proper order for the trail.
if ( !empty( $parents ) )
$this->items = array_merge( $this->items, $parents );
}
/**
* Turns %tag% from permalink structures into usable links for the breadcrumb trail. This feels kind of
* hackish for now because we're checking for specific %tag% examples and only doing it for the 'post'
* post type. In the future, maybe it'll handle a wider variety of possibilities, especially for custom post
* types.
*
* @since 0.6.0
* @access protected
* @param int $post_id ID of the post whose parents we want.
* @param string $path Path of a potential parent page.
* @param array $args Mixed arguments for the menu.
* @return array
*/
protected function map_rewrite_tags( $post_id, $path ) {
$post = get_post( $post_id );
// If the post doesn't have the `post` post type, bail.
if ( 'post' !== $post->post_type )
return;
// Trim '/' from both sides of the $path.
$path = trim( $path, '/' );
// Split the $path into an array of strings.
$matches = explode( '/', $path );
// If matches are found for the path.
if ( is_array( $matches ) ) {
// Loop through each of the matches, adding each to the $trail array.
foreach ( $matches as $match ) {
// Trim any '/' from the $match.
$tag = trim( $match, '/' );
// If using the %year% tag, add a link to the yearly archive.
if ( '%year%' == $tag )
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_year_link( get_the_time( 'Y', $post_id ) ) ), sprintf( $this->labels['archive_year'], get_the_time( esc_html_x( 'Y', 'yearly archives date format', 'uni-education' ) ) ) );
// If using the %monthnum% tag, add a link to the monthly archive.
elseif ( '%monthnum%' == $tag )
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_month_link( get_the_time( 'Y', $post_id ), get_the_time( 'm', $post_id ) ) ), sprintf( $this->labels['archive_month'], get_the_time( esc_html_x( 'F', 'monthly archives date format', 'uni-education' ) ) ) );
// If using the %day% tag, add a link to the daily archive.
elseif ( '%day%' == $tag )
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_day_link( get_the_time( 'Y', $post_id ), get_the_time( 'm', $post_id ), get_the_time( 'd', $post_id ) ) ), sprintf( $this->labels['archive_day'], get_the_time( esc_html_x( 'j', 'daily archives date format', 'uni-education' ) ) ) );
// If using the %author% tag, add a link to the post author archive.
elseif ( '%author%' == $tag )
$this->items[] = sprintf( '<a href="%s">%s</a>', esc_url( get_author_posts_url( $post->post_author ) ), get_the_author_meta( 'display_name', $post->post_author ) );
// If using the %category% tag, add a link to the first category archive to match permalinks.
elseif ( '%category%' == $tag ) {
// Force override terms in this post type.
$this->post_taxonomy[ $post->post_type ] = false;
// Add the post categories.
$this->add_post_terms( $post_id, 'category' );
}
}
}
}
}
|
PHP
|
MIT
|
ke975/Escuela-faren/wordpress/wp-content/themes/uni-education/inc/breadcrumb.php
|
4950c3bb-e3fb-4c3f-83bf-28519e0f6d8e
|
[{"tag": "NAME", "value": "Justin Tadlock", "start": 896, "end": 910, "context": "BreadcrumbTrail\n * @version 1.0.0\n * @author Justin Tadlock <justin@justintadlock.com>\n * @copyright Copyrigh"}, {"tag": "EMAIL", "value": "justin@justintadlock.com", "start": 912, "end": 936, "context": " * @version 1.0.0\n * @author Justin Tadlock <justin@justintadlock.com>\n * @copyright Copyright (c) 2008 - 2015, Justin "}, {"tag": "NAME", "value": "Justin Tadlock", "start": 979, "end": 993, "context": "ock.com>\n * @copyright Copyright (c) 2008 - 2015, Justin Tadlock\n * @link http://themehybrid.com/plugins/brea"}]
|
[{"tag": "NAME", "value": "Justin Tadlock", "start": 896, "end": 910, "context": "BreadcrumbTrail\n * @version 1.0.0\n * @author Justin Tadlock <justin@justintadlock.com>\n * @copyright Copyrigh"}, {"tag": "EMAIL", "value": "justin@justintadlock.com", "start": 912, "end": 936, "context": " * @version 1.0.0\n * @author Justin Tadlock <justin@justintadlock.com>\n * @copyright Copyright (c) 2008 - 2015, Justin "}, {"tag": "NAME", "value": "Justin Tadlock", "start": 979, "end": 993, "context": "ock.com>\n * @copyright Copyright (c) 2008 - 2015, Justin Tadlock\n * @link http://themehybrid.com/plugins/brea"}]
|
cask "db-browser-for-sqlite" do
version "3.12.2"
sha256 "546d57b6c88c2be7517759c016c0bf0313dfcc14adfcb43967f3c5d24657f366"
url "https://github.com/sqlitebrowser/sqlitebrowser/releases/download/v#{version}/DB.Browser.for.SQLite-#{version}.dmg",
verified: "github.com/sqlitebrowser/sqlitebrowser/"
name "DB Browser for SQLite"
desc "Browser for SQLite databases"
homepage "https://sqlitebrowser.org/"
livecheck do
url "https://github.com/sqlitebrowser/sqlitebrowser/releases"
strategy :page_match
regex(%r{href=.*?/DB\.Browser\.for\.SQLite-(\d+(?:\.\d+)+)(?:-v\d+)?\.dmg}i)
end
app "DB Browser for SQLite.app"
zap trash: [
"~/Library/Preferences/com.sqlitebrowser.sqlitebrowser.plist",
"~/Library/Saved Application State/net.sourceforge.sqlitebrowser.savedState",
]
end
|
Ruby
|
BSD-2-Clause
|
108EAA0A/homebrew-cask/Casks/db-browser-for-sqlite.rb
|
93438df7-9088-4cca-bc5a-eea6a0c694d6
|
[{"tag": "SSH_KEY", "value": "546d57b6c88c2be7517759c016c0bf0313dfcc14adfcb43967f3c5d24657f366", "start": 61, "end": 125, "context": "owser-for-sqlite\" do\n version \"3.12.2\"\n sha256 \"546d57b6c88c2be7517759c016c0bf0313dfcc14adfcb43967f3c5d24657f366\"\n\n url \"https://github.com/sqlitebrowser/sqliteb"}]
|
[{"tag": "KEY", "value": "546d57b6c88c2be7517759c016c0bf0313dfcc14adfcb43967f3c5d24657f366", "start": 61, "end": 125, "context": "owser-for-sqlite\" do\n version \"3.12.2\"\n sha256 \"546d57b6c88c2be7517759c016c0bf0313dfcc14adfcb43967f3c5d24657f366\"\n\n url \"https://github.com/sqlitebrowser/sqliteb"}]
|
/*
*
* Copyright (c) 2020 Project CHIP Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "Server.h"
#include <errno.h>
#include <utility>
#include <lib/dnssd/minimal_mdns/core/DnsHeader.h>
namespace mdns {
namespace Minimal {
namespace {
class ShutdownOnError
{
public:
ShutdownOnError(ServerBase * s) : mServer(s) {}
~ShutdownOnError()
{
if (mServer != nullptr)
{
mServer->Shutdown();
}
}
CHIP_ERROR ReturnSuccess()
{
mServer = nullptr;
return CHIP_NO_ERROR;
}
private:
ServerBase * mServer;
};
/**
* Extracts the Listening UDP Endpoint from an underlying ServerBase::EndpointInfo
*/
class ListenSocketPickerDelegate : public ServerBase::BroadcastSendDelegate
{
public:
chip::Inet::UDPEndPoint * Accept(ServerBase::EndpointInfo * info) override { return info->listen_udp; }
};
#if CHIP_MINMDNS_USE_EPHEMERAL_UNICAST_PORT
/**
* Extracts the Querying UDP Endpoint from an underlying ServerBase::EndpointInfo
*/
class QuerySocketPickerDelegate : public ServerBase::BroadcastSendDelegate
{
public:
chip::Inet::UDPEndPoint * Accept(ServerBase::EndpointInfo * info) override { return info->unicast_query_udp; }
};
#else
using QuerySocketPickerDelegate = ListenSocketPickerDelegate;
#endif
/**
* Validates that an endpoint belongs to a specific interface/ip address type before forwarding the
* endpoint accept logic to another BroadcastSendDelegate.
*
* Usage like:
*
* SomeDelegate *child = ....;
* InterfaceTypeFilterDelegate filter(interfaceId, IPAddressType::IPv6, child);
*
* UDPEndPoint *udp = filter.Accept(endpointInfo);
*/
class InterfaceTypeFilterDelegate : public ServerBase::BroadcastSendDelegate
{
public:
InterfaceTypeFilterDelegate(chip::Inet::InterfaceId interface, chip::Inet::IPAddressType type,
ServerBase::BroadcastSendDelegate * child) :
mInterface(interface),
mAddressType(type), mChild(child)
{}
chip::Inet::UDPEndPoint * Accept(ServerBase::EndpointInfo * info) override
{
if ((info->interfaceId != mInterface) && (info->interfaceId != chip::Inet::InterfaceId::Null()))
{
return nullptr;
}
if ((mAddressType != chip::Inet::IPAddressType::kAny) && (info->addressType != mAddressType))
{
return nullptr;
}
return mChild->Accept(info);
}
private:
chip::Inet::InterfaceId mInterface;
chip::Inet::IPAddressType mAddressType;
ServerBase::BroadcastSendDelegate * mChild = nullptr;
};
} // namespace
namespace BroadcastIpAddresses {
// Get standard mDNS Broadcast addresses
void GetIpv6Into(chip::Inet::IPAddress & dest)
{
if (!chip::Inet::IPAddress::FromString("FF02::FB", dest))
{
ChipLogError(Discovery, "Failed to parse standard IPv6 broadcast address");
}
}
void GetIpv4Into(chip::Inet::IPAddress & dest)
{
if (!chip::Inet::IPAddress::FromString("224.0.0.251", dest))
{
ChipLogError(Discovery, "Failed to parse standard IPv4 broadcast address");
}
}
} // namespace BroadcastIpAddresses
namespace {
CHIP_ERROR JoinMulticastGroup(chip::Inet::InterfaceId interfaceId, chip::Inet::UDPEndPoint * endpoint,
chip::Inet::IPAddressType addressType)
{
chip::Inet::IPAddress address;
if (addressType == chip::Inet::IPAddressType::kIPv6)
{
BroadcastIpAddresses::GetIpv6Into(address);
#if INET_CONFIG_ENABLE_IPV4
}
else if (addressType == chip::Inet::IPAddressType::kIPv4)
{
BroadcastIpAddresses::GetIpv4Into(address);
#endif // INET_CONFIG_ENABLE_IPV4
}
else
{
return CHIP_ERROR_INVALID_ARGUMENT;
}
return endpoint->JoinMulticastGroup(interfaceId, address);
}
const char * AddressTypeStr(chip::Inet::IPAddressType addressType)
{
switch (addressType)
{
case chip::Inet::IPAddressType::kIPv6:
return "IPv6";
#if INET_CONFIG_ENABLE_IPV4
case chip::Inet::IPAddressType::kIPv4:
return "IPv4";
#endif // INET_CONFIG_ENABLE_IPV4
default:
return "UNKNOWN";
}
}
void ShutdownEndpoint(mdns::Minimal::ServerBase::EndpointInfo & aEndpoint)
{
if (aEndpoint.listen_udp != nullptr)
{
aEndpoint.listen_udp->Free();
aEndpoint.listen_udp = nullptr;
}
#if CHIP_MINMDNS_USE_EPHEMERAL_UNICAST_PORT
if (aEndpoint.unicast_query_udp != nullptr)
{
aEndpoint.unicast_query_udp->Free();
aEndpoint.unicast_query_udp = nullptr;
}
#endif
}
} // namespace
ServerBase::~ServerBase()
{
Shutdown();
}
void ServerBase::Shutdown()
{
for (size_t i = 0; i < mEndpointCount; i++)
{
ShutdownEndpoint(mEndpoints[i]);
}
}
bool ServerBase::IsListening() const
{
for (size_t i = 0; i < mEndpointCount; i++)
{
if (mEndpoints[i].listen_udp != nullptr)
{
return true;
}
}
return false;
}
CHIP_ERROR ServerBase::Listen(chip::Inet::InetLayer * inetLayer, ListenIterator * it, uint16_t port)
{
Shutdown(); // ensure everything starts fresh
size_t endpointIndex = 0;
chip::Inet::InterfaceId interfaceId = chip::Inet::InterfaceId::Null();
chip::Inet::IPAddressType addressType;
ShutdownOnError autoShutdown(this);
while (it->Next(&interfaceId, &addressType))
{
ReturnErrorCodeIf(endpointIndex >= mEndpointCount, CHIP_ERROR_NO_MEMORY);
EndpointInfo * info = &mEndpoints[endpointIndex];
info->addressType = addressType;
info->interfaceId = interfaceId;
ReturnErrorOnFailure(inetLayer->GetUDPEndPointManager()->NewEndPoint(&info->listen_udp));
ReturnErrorOnFailure(info->listen_udp->Bind(addressType, chip::Inet::IPAddress::Any, port, interfaceId));
ReturnErrorOnFailure(info->listen_udp->Listen(OnUdpPacketReceived, nullptr /*OnReceiveError*/, this));
CHIP_ERROR err = JoinMulticastGroup(interfaceId, info->listen_udp, addressType);
if (err != CHIP_NO_ERROR)
{
char interfaceName[chip::Inet::InterfaceId::kMaxIfNameLength];
interfaceId.GetInterfaceName(interfaceName, sizeof(interfaceName));
// Log only as non-fatal error. Failure to join will mean we reply to unicast queries only.
ChipLogError(DeviceLayer, "MDNS failed to join multicast group on %s for address type %s: %s", interfaceName,
AddressTypeStr(addressType), chip::ErrorStr(err));
ShutdownEndpoint(mEndpoints[endpointIndex]);
}
else
{
endpointIndex++;
}
#if CHIP_MINMDNS_USE_EPHEMERAL_UNICAST_PORT
// Separate UDP endpoint for unicast queries, bound to 0 (i.e. pick random ephemeral port)
// - helps in not having conflicts on port 5353, will receive unicast replies directly
// - has a *DRAWBACK* of unicast queries being considered LEGACY by mdns since they do
// not originate from 5353 and the answers will include a query section.
ReturnErrorOnFailure(inetLayer->GetUDPEndPointManager()->NewEndPoint(&info->unicast_query_udp));
ReturnErrorOnFailure(info->unicast_query_udp->Bind(addressType, chip::Inet::IPAddress::Any, 0, interfaceId));
ReturnErrorOnFailure(info->unicast_query_udp->Listen(OnUdpPacketReceived, nullptr /*OnReceiveError*/, this));
#endif
}
return autoShutdown.ReturnSuccess();
}
CHIP_ERROR ServerBase::DirectSend(chip::System::PacketBufferHandle && data, const chip::Inet::IPAddress & addr, uint16_t port,
chip::Inet::InterfaceId interface)
{
for (size_t i = 0; i < mEndpointCount; i++)
{
EndpointInfo * info = &mEndpoints[i];
if (info->listen_udp == nullptr)
{
continue;
}
if (info->addressType != addr.Type())
{
continue;
}
chip::Inet::InterfaceId boundIf = info->listen_udp->GetBoundInterface();
if ((boundIf.IsPresent()) && (boundIf != interface))
{
continue;
}
return info->listen_udp->SendTo(addr, port, std::move(data));
}
return CHIP_ERROR_NOT_CONNECTED;
}
CHIP_ERROR ServerBase::BroadcastUnicastQuery(chip::System::PacketBufferHandle && data, uint16_t port)
{
QuerySocketPickerDelegate socketPicker;
return BroadcastImpl(std::move(data), port, &socketPicker);
}
CHIP_ERROR ServerBase::BroadcastUnicastQuery(chip::System::PacketBufferHandle && data, uint16_t port,
chip::Inet::InterfaceId interface, chip::Inet::IPAddressType addressType)
{
QuerySocketPickerDelegate socketPicker;
InterfaceTypeFilterDelegate filter(interface, addressType, &socketPicker);
return BroadcastImpl(std::move(data), port, &filter);
}
CHIP_ERROR ServerBase::BroadcastSend(chip::System::PacketBufferHandle && data, uint16_t port, chip::Inet::InterfaceId interface,
chip::Inet::IPAddressType addressType)
{
ListenSocketPickerDelegate socketPicker;
InterfaceTypeFilterDelegate filter(interface, addressType, &socketPicker);
return BroadcastImpl(std::move(data), port, &filter);
}
CHIP_ERROR ServerBase::BroadcastSend(chip::System::PacketBufferHandle && data, uint16_t port)
{
ListenSocketPickerDelegate socketPicker;
return BroadcastImpl(std::move(data), port, &socketPicker);
}
CHIP_ERROR ServerBase::BroadcastImpl(chip::System::PacketBufferHandle && data, uint16_t port, BroadcastSendDelegate * delegate)
{
// Broadcast requires sending data multiple times, each of which may error
// out, yet broadcast only has a single error code.
//
// The general logic of error handling is:
// - if no send done at all, return error
// - if at least one broadcast succeeds, assume success overall
// + some internal consistency validations for state error.
bool hadSuccesfulSend = false;
CHIP_ERROR lastError = CHIP_ERROR_NO_ENDPOINT;
for (size_t i = 0; i < mEndpointCount; i++)
{
EndpointInfo * info = &mEndpoints[i];
chip::Inet::UDPEndPoint * udp = delegate->Accept(info);
if (udp == nullptr)
{
continue;
}
CHIP_ERROR err;
/// The same packet needs to be sent over potentially multiple interfaces.
/// LWIP does not like having a pbuf sent over serparate interfaces, hence we create a copy
/// for sending via `CloneData`
///
/// TODO: this wastes one copy of the data and that could be optimized away
if (info->addressType == chip::Inet::IPAddressType::kIPv6)
{
err = udp->SendTo(mIpv6BroadcastAddress, port, data.CloneData(), udp->GetBoundInterface());
}
#if INET_CONFIG_ENABLE_IPV4
else if (info->addressType == chip::Inet::IPAddressType::kIPv4)
{
err = udp->SendTo(mIpv4BroadcastAddress, port, data.CloneData(), udp->GetBoundInterface());
}
#endif
else
{
// This is a general error of internal consistency: every address has a known type
// Fail completely otherwise.
return CHIP_ERROR_INCORRECT_STATE;
}
if (err == CHIP_NO_ERROR)
{
hadSuccesfulSend = true;
}
else
{
ChipLogError(Discovery, "Attempt to mDNS broadcast failed: %s", chip::ErrorStr(err));
lastError = err;
}
}
if (!hadSuccesfulSend)
{
return lastError;
}
return CHIP_NO_ERROR;
}
void ServerBase::OnUdpPacketReceived(chip::Inet::UDPEndPoint * endPoint, chip::System::PacketBufferHandle && buffer,
const chip::Inet::IPPacketInfo * info)
{
ServerBase * srv = static_cast<ServerBase *>(endPoint->mAppState);
if (!srv->mDelegate)
{
return;
}
mdns::Minimal::BytesRange data(buffer->Start(), buffer->Start() + buffer->DataLength());
if (data.Size() < HeaderRef::kSizeBytes)
{
ChipLogError(Discovery, "Packet to small for mDNS data: %d bytes", static_cast<int>(data.Size()));
return;
}
if (HeaderRef(const_cast<uint8_t *>(data.Start())).GetFlags().IsQuery())
{
srv->mDelegate->OnQuery(data, info);
}
else
{
srv->mDelegate->OnResponse(data, info);
}
}
} // namespace Minimal
} // namespace mdns
|
C++
|
Apache-2.0
|
sritej20/connectedhomeip/src/lib/dnssd/minimal_mdns/Server.cpp
|
fe2569b6-a9ec-4f2e-9f2b-c1e8ce9429e5
|
[]
|
[]
|
买卖股票的最佳时机 II
我们必须确定通过交易能够获得的最大利润(对于交易次数没有限制)。为此,我们需要找出那些共同使得利润最大化的买入及卖出价格。
解决方案
方法一:暴力法
这种情况下,我们只需要计算与所有可能的交易组合相对应的利润,并找出它们中的最大利润。
class Solution {
public int maxProfit(int[] prices) {
return calculate(prices, 0);
}
public int calculate(int prices[], int s) {
if (s >= prices.length)
return 0;
int max = 0;
for (int start = s; start < prices.length; start++) {
int maxprofit = 0;
for (int i = start + 1; i < prices.length; i++) {
if (prices[start] < prices[i]) {
int profit = calculate(prices, i + 1) + prices[i] - prices[start];
if (profit > maxprofit)
maxprofit = profit;
}
}
if (maxprofit > max)
max = maxprofit;
}
return max;
}
}
复杂度分析
时间复杂度:O(n^n)O(n
n
),调用递归函数 n^nn
n
次。
空间复杂度:O(n)O(n),递归的深度为 nn。
方法二:峰谷法
算法
假设给定的数组为:
[7, 1, 5, 3, 6, 4]
如果我们在图表上绘制给定数组中的数字,我们将会得到:
Profit Graph
如果我们分析图表,那么我们的兴趣点是连续的峰和谷。
https://pic.leetcode-cn.com/d447f96d20d1cfded20a5d08993b3658ed08e295ecc9aea300ad5e3f4466e0fe-file_1555699515174
用数学语言描述为:
Total Profit= \sum_{i}(height(peak_i)-height(valley_i))
TotalProfit=
i
∑
(height(peak
i
)−height(valley
i
))
关键是我们需要考虑到紧跟谷的每一个峰值以最大化利润。如果我们试图跳过其中一个峰值来获取更多利润,那么我们最终将失去其中一笔交易中获得的利润,从而导致总利润的降低。
例如,在上述情况下,如果我们跳过 peak_ipeak
i
和 valley_jvalley
j
试图通过考虑差异较大的点以获取更多的利润,获得的净利润总是会小与包含它们而获得的静利润,因为 CC 总是小于 A+BA+B。
class Solution {
public int maxProfit(int[] prices) {
int i = 0;
int valley = prices[0];
int peak = prices[0];
int maxprofit = 0;
while (i < prices.length - 1) {
while (i < prices.length - 1 && prices[i] >= prices[i + 1])
i++;
valley = prices[i];
while (i < prices.length - 1 && prices[i] <= prices[i + 1])
i++;
peak = prices[i];
maxprofit += peak - valley;
}
return maxprofit;
}
}
复杂度分析
时间复杂度:O(n)O(n)。遍历一次。
空间复杂度:O(1)O(1)。需要常量的空间。
方法三:简单的一次遍历
算法
该解决方案遵循 方法二 的本身使用的逻辑,但有一些轻微的变化。在这种情况下,我们可以简单地继续在斜坡上爬升并持续增加从连续交易中获得的利润,而不是在谷之后寻找每个峰值。最后,我们将有效地使用峰值和谷值,但我们不需要跟踪峰值和谷值对应的成本以及最大利润,但我们可以直接继续增加加数组的连续数字之间的差值,如果第二个数字大于第一个数字,我们获得的总和将是最大利润。这种方法将简化解决方案。
这个例子可以更清楚地展现上述情况:
[1, 7, 2, 3, 6, 7, 6, 7]
与此数组对应的图形是:
Profit Graph
https://pic.leetcode-cn.com/6eaf01901108809ca5dfeaef75c9417d6b287c841065525083d1e2aac0ea1de4-file_1555699697692
从上图中,我们可以观察到 A+B+CA+B+C 的和等于差值 DD 所对应的连续峰和谷的高度之差。
class Solution {
public int maxProfit(int[] prices) {
int maxprofit = 0;
for (int i = 1; i < prices.length; i++) {
if (prices[i] > prices[i - 1])
maxprofit += prices[i] - prices[i - 1];
}
return maxprofit;
}
}
复杂度分析
时间复杂度:O(n)O(n),遍历一次。
空间复杂度:O(1)O(1),需要常量的空间。
|
Markdown
|
MIT
|
IThawk/learnCode/javaStructures/leecode-learn/src/main/java/com/ithawk/learn/leetCode/array/readerma.md
|
1cde64e1-91dd-4f5a-88d7-6c5e6c6d7ee7
|
[]
|
[]
|
[@bjerkio/crayon-api](../README.md) / [Exports](../modules.md) / ApiV1SubscriptionsSubscriptionIdConversionsPostRequest
# Interface: ApiV1SubscriptionsSubscriptionIdConversionsPostRequest
## Table of contents
### Properties
- [subscriptionConversion](ApiV1SubscriptionsSubscriptionIdConversionsPostRequest.md#subscriptionconversion)
- [subscriptionId](ApiV1SubscriptionsSubscriptionIdConversionsPostRequest.md#subscriptionid)
## Properties
### subscriptionConversion
• `Optional` **subscriptionConversion**: [`SubscriptionConversion`](SubscriptionConversion.md)
#### Defined in
[src/apis/SubscriptionsApi.ts:128](https://github.com/bjerkio/crayon-api-js/blob/22cd66d/src/apis/SubscriptionsApi.ts#L128)
___
### subscriptionId
• **subscriptionId**: `number`
#### Defined in
[src/apis/SubscriptionsApi.ts:127](https://github.com/bjerkio/crayon-api-js/blob/22cd66d/src/apis/SubscriptionsApi.ts#L127)
|
Markdown
|
MIT
|
bjerkio/crayon-api-js/docs/interfaces/ApiV1SubscriptionsSubscriptionIdConversionsPostRequest.md
|
7ab4915e-0fb5-4034-b4ff-bef8f331e215
|
[{"tag": "USERNAME", "value": "bjerkio", "start": 840, "end": 847, "context": "/apis/SubscriptionsApi.ts:127](https://github.com/bjerkio/crayon-api-js/blob/22cd66d/src/apis/Subscriptions"}, {"tag": "USERNAME", "value": "bjerkio", "start": 641, "end": 648, "context": "/apis/SubscriptionsApi.ts:128](https://github.com/bjerkio/crayon-api-js/blob/22cd66d/src/apis/Subscriptions"}]
|
[{"tag": "USERNAME", "value": "bjerkio", "start": 840, "end": 847, "context": "/apis/SubscriptionsApi.ts:127](https://github.com/bjerkio/crayon-api-js/blob/22cd66d/src/apis/Subscriptions"}, {"tag": "USERNAME", "value": "bjerkio", "start": 641, "end": 648, "context": "/apis/SubscriptionsApi.ts:128](https://github.com/bjerkio/crayon-api-js/blob/22cd66d/src/apis/Subscriptions"}]
|
<?php
/**
* DestinyBreakerType
*
* PHP version 5
*
* @category Class
* @package Bungie
* @author OpenAPI Generator team
* @link https://openapi-generator.tech
*/
/**
* Bungie.Net API
*
* These endpoints constitute the functionality exposed by Bungie.net, both for more traditional website functionality and for connectivity to Bungie video games and their related functionality.
*
* OpenAPI spec version: 2.9.0
* Contact: support@bungie.com
* Generated by: https://openapi-generator.tech
* OpenAPI Generator version: 3.3.4-SNAPSHOT
*/
/**
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
namespace Bungie\Model\Destiny;
use \Bungie\ObjectSerializer;
/**
* DestinyBreakerType Class Doc Comment
*
* @category Class
* @description A plug can optionally have a \"Breaker Type\": a special ability that can affect units in unique ways. Activating this plug can grant one of these types.
* @package Bungie
* @author OpenAPI Generator team
* @link https://openapi-generator.tech
*/
class DestinyBreakerType
{
/**
* Possible values of this enum
*/
const DESTINY_BREAKER_TYPE_NONE = 0;
const DESTINY_BREAKER_TYPE_SHIELDPIERCING = 1;
const DESTINY_BREAKER_TYPE_DISRUPTION = 2;
const DESTINY_BREAKER_TYPE_STAGGER = 3;
/**
* Gets allowable values of the enum
* @return string[]
*/
public static function getAllowableEnumValues()
{
return [
self::DESTINY_BREAKER_TYPE_NONE,
self::DESTINY_BREAKER_TYPE_SHIELDPIERCING,
self::DESTINY_BREAKER_TYPE_DISRUPTION,
self::DESTINY_BREAKER_TYPE_STAGGER,
];
}
}
|
PHP
|
BSD-3-Clause
|
Yogarine/bungie-sdk-php/src/Model/Destiny/DestinyBreakerType.php
|
f15ef5bc-63cc-42d5-8fa9-4f19831616d7
|
[{"tag": "EMAIL", "value": "support@bungie.com", "start": 445, "end": 463, "context": "ty.\n *\n * OpenAPI spec version: 2.9.0\n * Contact: support@bungie.com\n * Generated by: https://openapi-generator.tech\n "}]
|
[{"tag": "EMAIL", "value": "support@bungie.com", "start": 445, "end": 463, "context": "ty.\n *\n * OpenAPI spec version: 2.9.0\n * Contact: support@bungie.com\n * Generated by: https://openapi-generator.tech\n "}]
|
/*
* UDP server wrapper class.
*
* @author Michel Megens
* @email dev@bietje.net
*/
#include <stdlib.h>
#include <stdio.h>
#include <assert.h>
#include <lwiot.h>
#include <lwiot/types.h>
#include <lwiot/log.h>
#include <lwiot/stl/string.h>
#include <lwiot/error.h>
#include <lwiot/network/stdnet.h>
#include <lwiot/network/udpclient.h>
#include <lwiot/network/socketudpclient.h>
namespace lwiot
{
SocketUdpClient::SocketUdpClient() : UdpClient(), _socket(nullptr), _noclose(false)
{
}
SocketUdpClient::SocketUdpClient(const IPAddress &addr, uint16_t port, socket_t* srv) :
UdpClient(addr, port), _socket(srv)
{
if(srv == nullptr)
this->init();
else
this->_noclose = true;
}
SocketUdpClient::SocketUdpClient(const lwiot::String &host, uint16_t port) : UdpClient(host, port)
{
this->init();
}
void SocketUdpClient::begin()
{
this->resolve();
this->close();
this->init();
}
void SocketUdpClient::begin(const lwiot::String &host, uint16_t port)
{
this->_host = host;
this->_port = to_netorders(port);
this->begin();
}
void SocketUdpClient::begin(const lwiot::IPAddress &addr, uint16_t port)
{
this->_host = "";
this->_remote = addr;
this->_port = to_netorders(port);
this->begin();
}
void SocketUdpClient::init()
{
remote_addr_t remote;
this->address().toRemoteAddress(remote);
this->_socket = udp_socket_create(&remote);
this->_noclose = false;
}
SocketUdpClient::~SocketUdpClient()
{
this->close();
}
void SocketUdpClient::close()
{
if(!this->_noclose && this->_socket != nullptr) {
socket_close(this->_socket);
this->_socket = nullptr;
}
}
void SocketUdpClient::setTimeout(time_t seconds)
{
UdpClient::setTimeout(seconds);
socket_set_timeout(this->_socket, seconds);
}
ssize_t SocketUdpClient::write(const void *buffer, const size_t& length)
{
remote_addr_t remote;
if(this->_socket == nullptr) {
this->resolve();
this->init();
}
if(this->_socket == nullptr)
return -EINVALID;
this->address().toRemoteAddress(remote);
remote.version = this->address().version();
remote.port = this->port();
return udp_send_to(this->_socket, buffer, length, &remote);
}
ssize_t SocketUdpClient::read(void *buffer, const size_t& length)
{
remote_addr_t remote;
if(this->_socket == nullptr) {
this->resolve();
this->init();
}
if(this->_socket == nullptr)
return -EINVALID;
remote.version = this->address().version();
return udp_recv_from(this->_socket, buffer, length, &remote);
}
size_t SocketUdpClient::available() const
{
if(this->_socket == nullptr)
return -EINVALID;
return udp_socket_available(this->_socket);
}
}
|
C++
|
Apache-2.0
|
lwIoT/lwIoT/source/net/udp/socketudpclient.cpp
|
cb927d25-8c11-4f6a-a4a7-09186857af9f
|
[{"tag": "EMAIL", "value": "dev@bietje.net", "start": 75, "end": 89, "context": " class.\r\n *\r\n * @author Michel Megens\r\n * @email dev@bietje.net\r\n */\r\n\r\n#include <stdlib.h>\r\n#include <stdio.h>\r\n"}, {"tag": "NAME", "value": "Michel Megens", "start": 49, "end": 62, "context": "/*\r\n * UDP server wrapper class.\r\n *\r\n * @author Michel Megens\r\n * @email dev@bietje.net\r\n */\r\n\r\n#include <stdl"}]
|
[{"tag": "EMAIL", "value": "dev@bietje.net", "start": 75, "end": 89, "context": " class.\r\n *\r\n * @author Michel Megens\r\n * @email dev@bietje.net\r\n */\r\n\r\n#include <stdlib.h>\r\n#include <stdio.h>\r\n"}, {"tag": "NAME", "value": "Michel Megens", "start": 49, "end": 62, "context": "/*\r\n * UDP server wrapper class.\r\n *\r\n * @author Michel Megens\r\n * @email dev@bietje.net\r\n */\r\n\r\n#include <stdl"}]
|
import { ComponentFixture, TestBed, async } from '@angular/core/testing';
import { Observable } from 'rxjs/Observable';
import { GatewayServiceTestModule } from '../../../test.module';
import { Principal, AccountService } from '../../../../../../main/webapp/app/shared';
import { SettingsComponent } from '../../../../../../main/webapp/app/account/settings/settings.component';
describe('Component Tests', () => {
describe('SettingsComponent', () => {
let comp: SettingsComponent;
let fixture: ComponentFixture<SettingsComponent>;
let mockAuth: any;
let mockPrincipal: any;
beforeEach(async(() => {
TestBed.configureTestingModule({
imports: [GatewayServiceTestModule],
declarations: [SettingsComponent],
providers: [
]
})
.overrideTemplate(SettingsComponent, '')
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(SettingsComponent);
comp = fixture.componentInstance;
mockAuth = fixture.debugElement.injector.get(AccountService);
mockPrincipal = fixture.debugElement.injector.get(Principal);
});
it('should send the current identity upon save', () => {
// GIVEN
const accountValues = {
firstName: 'John',
lastName: 'Doe',
activated: true,
email: 'john.doe@mail.com',
langKey: 'en',
login: 'john'
};
mockPrincipal.setResponse(accountValues);
// WHEN
comp.settingsAccount = accountValues;
comp.save();
// THEN
expect(mockPrincipal.identitySpy).toHaveBeenCalled();
expect(mockAuth.saveSpy).toHaveBeenCalledWith(accountValues);
expect(comp.settingsAccount).toEqual(accountValues);
});
it('should notify of success upon successful save', () => {
// GIVEN
const accountValues = {
firstName: 'John',
lastName: 'Doe'
};
mockPrincipal.setResponse(accountValues);
// WHEN
comp.save();
// THEN
expect(comp.error).toBeNull();
expect(comp.success).toBe('OK');
});
it('should notify of error upon failed save', () => {
// GIVEN
mockAuth.saveSpy.and.returnValue(Observable.throw('ERROR'));
// WHEN
comp.save();
// THEN
expect(comp.error).toEqual('ERROR');
expect(comp.success).toBeNull();
});
});
});
|
TypeScript
|
MIT
|
jeffersonxavier/jhipster-microservices/gateway-service/src/test/javascript/spec/app/account/settings/settings.component.spec.ts
|
1132e079-5915-42a9-a077-ab117ab9e3b1
|
[{"tag": "NAME", "value": "John", "start": 2146, "end": 2150, "context": "nst accountValues = {\n firstName: 'John',\n lastName: 'Doe'\n };\n"}, {"tag": "NAME", "value": "john", "start": 1583, "end": 1587, "context": " langKey: 'en',\n login: 'john'\n };\n mockPrincipal.setResp"}, {"tag": "NAME", "value": "Doe", "start": 1444, "end": 1447, "context": " firstName: 'John',\n lastName: 'Doe',\n\n activated: true,\n "}, {"tag": "NAME", "value": "Doe", "start": 2180, "end": 2183, "context": " firstName: 'John',\n lastName: 'Doe'\n };\n mockPrincipal.setResp"}, {"tag": "EMAIL", "value": "john.doe@mail.com", "start": 1508, "end": 1525, "context": " activated: true,\n email: 'john.doe@mail.com',\n langKey: 'en',\n "}, {"tag": "NAME", "value": "John", "start": 1410, "end": 1414, "context": "nst accountValues = {\n firstName: 'John',\n lastName: 'Doe',\n\n "}]
|
[{"tag": "NAME", "value": "John", "start": 2146, "end": 2150, "context": "nst accountValues = {\n firstName: 'John',\n lastName: 'Doe'\n };\n"}, {"tag": "NAME", "value": "john", "start": 1583, "end": 1587, "context": " langKey: 'en',\n login: 'john'\n };\n mockPrincipal.setResp"}, {"tag": "NAME", "value": "Doe", "start": 1444, "end": 1447, "context": " firstName: 'John',\n lastName: 'Doe',\n\n activated: true,\n "}, {"tag": "NAME", "value": "Doe", "start": 2180, "end": 2183, "context": " firstName: 'John',\n lastName: 'Doe'\n };\n mockPrincipal.setResp"}, {"tag": "EMAIL", "value": "john.doe@mail.com", "start": 1508, "end": 1525, "context": " activated: true,\n email: 'john.doe@mail.com',\n langKey: 'en',\n "}, {"tag": "NAME", "value": "John", "start": 1410, "end": 1414, "context": "nst accountValues = {\n firstName: 'John',\n lastName: 'Doe',\n\n "}]
|
<?php
// +----------------------------------------------------------------------
// | ThinkPHP [ WE CAN DO IT JUST THINK ]
// +----------------------------------------------------------------------
// | Copyright (c) 2006-2016 http://thinkphp.cn All rights reserved.
// +----------------------------------------------------------------------
// | Licensed ( http://www.apache.org/licenses/LICENSE-2.0 )
// +----------------------------------------------------------------------
// | Author: liu21st <liu21st@gmail.com>
// +----------------------------------------------------------------------
// [ 应用入口文件 ]
// 定义应用目录
define('APP_PATH', __DIR__ . '/../app/');
// 加载框架引导文件
require __DIR__ . '/../thinkphp/start.php';
//自动加载模块
//\think\Build::module('Console');
// 绑定当前访问到console模块
//define('BIND_MODULE','console/index/index');
// 定义配置文件目录和应用目录同级
//define('CONF_PATH', __DIR__.'/../config/');
|
PHP
|
Apache-2.0
|
Dduan666/Super/public/index.php
|
94863db1-4159-44fa-9daf-98c2cef1b60a
|
[{"tag": "EMAIL", "value": "liu21st@gmail.com", "start": 500, "end": 517, "context": "---------------------------\n// | Author: liu21st <liu21st@gmail.com>\n// +--------------------------------------------"}, {"tag": "USERNAME", "value": "liu21st", "start": 491, "end": 498, "context": "------------------------------------\n// | Author: liu21st <liu21st@gmail.com>\n// +-------------------------"}]
|
[{"tag": "EMAIL", "value": "liu21st@gmail.com", "start": 500, "end": 517, "context": "---------------------------\n// | Author: liu21st <liu21st@gmail.com>\n// +--------------------------------------------"}, {"tag": "USERNAME", "value": "liu21st", "start": 491, "end": 498, "context": "------------------------------------\n// | Author: liu21st <liu21st@gmail.com>\n// +-------------------------"}]
|
#! /usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from sys import version_info
install_requires = []
if version_info[:2] <= (2, 5):
install_requires.append('simplejson >= 2.0.9')
setup(
name = 'avro',
version = '1.7.6',
packages = ['avro',],
package_dir = {'avro': 'src/avro'},
scripts = ["./scripts/avro"],
# Project uses simplejson, so ensure that it gets installed or upgraded
# on the target machine
install_requires = install_requires,
# metadata for upload to PyPI
author = 'Apache Avro',
author_email = 'avro-dev@hadoop.apache.org',
description = 'Avro is a serialization and RPC framework.',
license = 'Apache License 2.0',
keywords = 'avro serialization rpc',
url = 'http://hadoop.apache.org/avro',
extras_require = {
'snappy': ['python-snappy'],
},
)
|
Python
|
Apache-2.0
|
Albertsss/hue/desktop/core/ext-py/avro-1.7.6/setup.py
|
7a9fcf45-2c9a-4278-b847-2b4c421956ba
|
[{"tag": "NAME", "value": "Apache Avro", "start": 1356, "end": 1367, "context": "res,\n\n # metadata for upload to PyPI\n author = 'Apache Avro',\n author_email = 'avro-dev@hadoop.apache.org',\n"}, {"tag": "EMAIL", "value": "avro-dev@hadoop.apache.org", "start": 1388, "end": 1414, "context": " PyPI\n author = 'Apache Avro',\n author_email = 'avro-dev@hadoop.apache.org',\n description = 'Avro is a serialization and RP"}]
|
[{"tag": "NAME", "value": "Apache Avro", "start": 1356, "end": 1367, "context": "res,\n\n # metadata for upload to PyPI\n author = 'Apache Avro',\n author_email = 'avro-dev@hadoop.apache.org',\n"}, {"tag": "EMAIL", "value": "avro-dev@hadoop.apache.org", "start": 1388, "end": 1414, "context": " PyPI\n author = 'Apache Avro',\n author_email = 'avro-dev@hadoop.apache.org',\n description = 'Avro is a serialization and RP"}]
|
# Copyright (c) 2014, Raphael Kubo da Costa <rakuco@FreeBSD.org>
# Redistribution and use is allowed according to the terms of the BSD license.
# For details see the accompanying COPYING-CMAKE-SCRIPTS file.
import PyKDE4.kdecore
if __name__ == '__main__':
try:
import PyKDE4.pykdeconfig
pykdecfg = PyKDE4.pykdeconfig.Configuration()
sip_dir = pykdecfg.pykde_sip_dir
sip_flags = pykdecfg.pykde_kde_sip_flags
except ImportError:
# PyQt4 >= 4.10.0 was built with configure-ng.py instead of
# configure.py, so pyqtconfig.py and pykdeconfig.py are not installed.
sip_dir = PyKDE4.kdecore.PYKDE_CONFIGURATION['sip_dir']
sip_flags = PyKDE4.kdecore.PYKDE_CONFIGURATION['sip_flags']
print('pykde_version:%06.x' % PyKDE4.kdecore.version())
print('pykde_version_str:%s' % PyKDE4.kdecore.versionString())
print('pykde_sip_dir:%s' % sip_dir)
print('pykde_sip_flags:%s' % sip_flags)
|
Python
|
BSD-3-Clause
|
KDE-China/extra-cmake-modules/attic/modules/FindPyKDE4.py
|
5c2052b2-e05b-4280-81f5-97dd71c400e0
|
[{"tag": "NAME", "value": "Raphael Kubo da Costa", "start": 22, "end": 43, "context": "# Copyright (c) 2014, Raphael Kubo da Costa <rakuco@FreeBSD.org>\n# Redistribution and use is "}, {"tag": "EMAIL", "value": "rakuco@FreeBSD.org", "start": 45, "end": 63, "context": "# Copyright (c) 2014, Raphael Kubo da Costa <rakuco@FreeBSD.org>\n# Redistribution and use is allowed according to"}]
|
[{"tag": "NAME", "value": "Raphael Kubo da Costa", "start": 22, "end": 43, "context": "# Copyright (c) 2014, Raphael Kubo da Costa <rakuco@FreeBSD.org>\n# Redistribution and use is "}, {"tag": "EMAIL", "value": "rakuco@FreeBSD.org", "start": 45, "end": 63, "context": "# Copyright (c) 2014, Raphael Kubo da Costa <rakuco@FreeBSD.org>\n# Redistribution and use is allowed according to"}]
|
---
layout: slide
title: ""
---
<section data-background-image="assets/images/Slide16.png" data-background-size="90%" data-background-position="center"></section>
<section markdown="1">
[http://blog.plot.ly/post/125942000947/how-to-analyze-data-6-useful-ways-to-use-color-in](http://blog.plot.ly/post/125942000947/how-to-analyze-data-6-useful-ways-to-use-color-in){:target="_blank"}
</section>
|
Markdown
|
MIT
|
ahgraber/design_language/_posts/0000-02-12-slide16.md
|
f397b8f3-594a-4462-b805-1833834aa5b5
|
[]
|
[]
|
# -*- coding=utf-8 -*-
"""
# library: jionlp
# author: dongrixinyu
# license: Apache License 2.0
# Email: dongrixinyu.89@163.com
# github: https://github.com/dongrixinyu/JioNLP
# description: Preprocessing tool for Chinese NLP
"""
__version__ = '1.3.49'
import os
from jionlp.util.logger import set_logger
from jionlp.util.zip_file import unzip_file, UNZIP_FILE_LIST
logging = set_logger(level='INFO', log_dir_name='.jionlp_logs')
# unzip dictionary files
DIR_PATH = os.path.dirname(os.path.abspath(__file__))
for file_name in UNZIP_FILE_LIST:
if not os.path.exists(os.path.join(DIR_PATH, 'dictionary', file_name)):
unzip_file()
history = """
╭──────────────────────────────────────────────────────────────────────────╮
│ • • • ░░░░░░░░░░░░░░░░░░░░░ History Messages ░░░░░░░░░░░░░░░░░░░░░░░░░ │
├──────────────────────────────────────────────────────────────────────────┤
│ │
│ JioNLP, a python tool for Chinese NLP preprocessing & parsing. │
│ URL: https://github.com/dongrixinyu/JioNLP │
│ │
│ | date | updated funcs and info | │
│ | ---------- | --------------------------------------------------- | │
│ | 2020-03-13 | first push | │
│ | 2020-03-18 | update rules | │
│ | 2020-03-24 | add traditional and simplified conversion | │
│ | 2020-03-26 | add location parser 2019 | │
│ | 2020-03-31 | add sentences splitter | │
│ | 2020-04-02 | add id chard parser | │
│ | 2020-04-03 | add stopwords remover | │
│ | 2020-04-26 | add pinyin and location recognizer | │
│ | 2020-05-26 | add chinese word, char, xiehouyu dict | │
│ | 2020-06-01 | add ner tools | │
│ | 2020-06-10 | add location recognizer | │
│ | 2020-06-30 | add char radical parser | │
│ | 2020-07-07 | add ner acceleration tools and lexicon ner tool | │
│ | 2020-07-13 | add sim hash tool | │
│ | 2020-07-14 | add sentiment analysis | │
│ | 2020-07-27 | add key phrase extraction - ckpe | │
│ | 2020-08-24 | update pinyin | │
│ | 2020-09-14 | add back translation for data augmentation | │
│ | 2020-10-16 | update 2020 china location dictionary | │
│ | 2020-10-19 | add zip_file for compressing the size of dict files | │
│ | 2020-11-10 | add extractive summary func | │
│ | 2020-11-24 | add phone location recognition | │
│ | 2020-12-18 | add idiom solitaire | │
│ | 2020-12-28 | add help searching tool | │
│ | 2021-01-19 | add money number to character tool | │
│ | 2021-01-22 | update outdated china location conversion | │
│ | 2021-02-01 | acquire 400 stars and 58 forks on Github | │
│ | 2021-02-02 | add swap char position text augmentation | │
│ | 2021-02-09 | add homophone and add & delete text augmentation | │
│ | 2021-02-10 | update dictionaries | │
│ | 2021-03-15 | update chinese char dictionaries | │
│ | 2021-03-18 | add replace entity text augmentation | │
│ | 2021-03-24 | update extract money and standardization | │
│ | 2021-04-21 | add solar lunar date conversion | │
│ | 2021-06-23 | add time parser | │
│ | 2021-07-04 | update time parser | │
│ | 2021-07-18 | update time parser | │
│ | 2021-09-01 | add jionlp online version | │
│ | 2021-10-25 | update extract money and parse money | │
│ | 2021-11-10 | add logger tuner | │
│ | 2021-12-04 | add chinese word segmentor tools | │
│ │
╰──────────────────────────────────────────────────────────────────────────╯
"""
from jionlp.util import *
from jionlp.dictionary import *
from jionlp.rule import *
from jionlp.gadget import *
from jionlp.textaug import *
from jionlp.algorithm import *
# from jionlp.util.fast_loader import FastLoader
# rule = FastLoader('rule', globals(), 'jionlp.rule')
|
Python
|
Apache-2.0
|
FYWinds/JioNLP/jionlp/__init__.py
|
2b9b1faf-3a8f-4fe9-84b1-90feec5b1860
|
[{"tag": "EMAIL", "value": "dongrixinyu.89@163.com", "start": 106, "end": 128, "context": "ongrixinyu\n# license: Apache License 2.0\n# Email: dongrixinyu.89@163.com\n# github: https://github.com/dongrixinyu/JioNLP\n#"}]
|
[{"tag": "EMAIL", "value": "dongrixinyu.89@163.com", "start": 106, "end": 128, "context": "ongrixinyu\n# license: Apache License 2.0\n# Email: dongrixinyu.89@163.com\n# github: https://github.com/dongrixinyu/JioNLP\n#"}]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.