language
stringclasses 15
values | src_encoding
stringclasses 34
values | length_bytes
int64 6
7.85M
| score
float64 1.5
5.69
| int_score
int64 2
5
| detected_licenses
listlengths 0
160
| license_type
stringclasses 2
values | text
stringlengths 9
7.85M
|
---|---|---|---|---|---|---|---|
Python
|
UTF-8
| 12,383 | 2.625 | 3 |
[] |
no_license
|
import xgboost
import pandas as pd
import numpy as np
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import ParameterGrid
from sklearn.model_selection import StratifiedKFold
from sklearn.metrics import accuracy_score
import datetime
import scipy.optimize as optimize
def ranking(predictions, split_index):
"""
Ranking classification results in accordance to a splitter
:param predictions:
:param split_index:
:return: classified ranked predictions
"""
# print predictions
ranked_predictions = np.ones(predictions.shape)
for i in range(1, len(split_index)):
cond = (split_index[i-1] <= predictions) * 1 * (predictions < split_index[i])
ranked_predictions[cond.astype('bool')] = i
cond = (predictions >= split_index[-1])
ranked_predictions[cond] = len(split_index)
# print cond
# print ranked_predictions
return ranked_predictions
def opt_cut_global(predictions, results):
"""
Find brute force optimized cutter
:param predictions:
:param results:
:return: global coarse optimized cutter
"""
print(predictions)
print(results)
print('start quadratic splitter optimization')
x0_range = np.arange(0, 1.0, 0.05)
x1_range = np.arange(0.5, 1.5, 0.1)
bestcase = np.array(ranking(predictions, [0.5, 1.5])).astype('int')
bestscore = accuracy_score(results, bestcase)
print('The starting score is %f' % bestscore)
best_splitter = 0
# optimize classifier
for x0 in x0_range:
for x1 in x1_range:
case = np.array(ranking(predictions, (x0 + x1 * riskless_splitter))).astype('int')
score = accuracy_score(results, case)
if score > bestscore:
bestscore = score
best_splitter = x0 + x1 * riskless_splitter
print('For splitter ', (x0 + x1 * riskless_splitter))
print('Variables x0 = %f, x1 = %f' % (x0, x1))
print('The score is %f' % bestscore)
return best_splitter
def opt_cut_local(x, *args):
"""
Find local optimized cutter
:param x: current cutter
:param args: predictions, results
:return: current result
"""
predictions, results = args
case = np.array(ranking(predictions, x)).astype('int')
score = -1 * accuracy_score(results, case)
# print score
return score
def date_parser(df):
date_recorder = list(map(lambda x: datetime.datetime.strptime(str(x), '%Y-%m-%d'),
df['date_recorded'].values))
df['year_recorder'] = list(map(lambda x: int(x.strftime('%Y')), date_recorder))
df['weekday_recorder'] = list(map(lambda x: int(x.strftime('%w')), date_recorder))
df['yearly_week_recorder'] = list(map(lambda x: int(x.strftime('%W')), date_recorder))
df['month_recorder'] = list(map(lambda x: int(x.strftime('%m')), date_recorder))
df['age'] = df['year_recorder'].values - df['construction_year'].values
del df['date_recorded']
return df
"""
Import data
"""
train = pd.read_csv('train.csv', index_col=0, parse_dates=True)
SIZE = 4609
# train = pd.read_csv('train.csv', index_col=0, parse_dates=True)
# train = pd.read_csv('train.csv', index_col=0, parse_dates=True)
# train = train.head(SIZE)
train_index = train.index.values
# test = pd.DataFrame.from_csv('test.csv')
test = pd.read_csv('test.csv', index_col=0, parse_dates=True)
test_index = test.index.values
# combing tran and test data
# helps working on all the data and removes factorization problems between train and test
dataframe = pd.concat([train, test], axis=0)
train_labels = pd.read_csv('labels.csv', index_col=0, parse_dates=True)
# train_labels = pd.DataFrame.from_csv('labels.csv')
# train_labels = train_labels.head(SIZE)
# train_labels = pd.DataFrame.from_csv('labels2.csv')
# submission_file = pd.DataFrame.from_csv("SubmissionFormat.csv")
submission_file = pd.read_csv("SubmissionFormat.csv", index_col=0, parse_dates=True)
"""
Preprocess
"""
# Change labels to ints in order to use as y vector
label_encoder = LabelEncoder()
# print(train_labels.iloc[:, 0])
train_labels.iloc[:, 0] = label_encoder.fit_transform(train_labels.values.flatten())
# print(train_labels.iloc[:, 0])
# for row in train_labels.iloc[:, 0]:
# if row ==3:
# print(row)
# # print(row)
# #
# exit(0)
# if row[1]==3:
# print(row)
# exit(0)
# Parse date (removing is the easiest)
dataframe = date_parser(dataframe)
# Factorize str columns
print(dataframe.columns.values)
for col in dataframe.columns.values:
if dataframe[col].dtype.name == 'object':
dataframe[col] = dataframe[col].factorize()[0]
"""
Split into train and test
"""
print(dataframe)
train = dataframe.loc[train_index]
test = dataframe.loc[test_index]
"""
CV
"""
riskless_splitter = np.array([0.5, 1.5])
best_score = 0
best_params = 0
best_train_prediction = 0
best_prediction = 0
meta_solvers_train = []
meta_solvers_test = []
best_train = 0
best_test = 0
# Optimization parameters
early_stopping = 50
param_grid = [
{
'silent': [1],
'nthread': [3],
'eval_metric': ['rmse'],
'eta': [0.1],
'objective': ['reg:linear'],
'max_depth': [6],
'num_round': [2000],
'gamma': [0],
'subsample': [1.0],
'colsample_bytree': [1.0],
'n_monte_carlo': [1],
'cv_n': [2],
'test_rounds_fac': [1.2],
'count_n': [0],
'mc_test': [True]
}
]
print('start CV optimization')
mc_round_list = []
mc_acc_mean = []
mc_acc_sd = []
params_list = []
print_results = []
for params in ParameterGrid(param_grid):
print(params)
params_list.append(params)
train_predictions = np.ones((train.shape[0],))
print('There are %d columns' % train.shape[1])
# CV
mc_auc = []
mc_round = []
mc_train_pred = []
# Use monte carlo simulation if needed to find small improvements
for i_mc in range(params['n_monte_carlo']):
cv_n = params['cv_n']
kf = StratifiedKFold(n_splits=cv_n, shuffle=True, random_state=i_mc ** 3)
kf = kf.split(train, train_labels)
xgboost_rounds = []
for cv_train_index, cv_test_index in kf:
X_train, X_test = train.values[cv_train_index, :], train.values[cv_test_index, :]
y_train = train_labels.iloc[cv_train_index].values.flatten()
y_test = train_labels.iloc[cv_test_index].values.flatten()
# train machine learning
xg_train = xgboost.DMatrix(X_train, label=y_train)
xg_test = xgboost.DMatrix(X_test, label=y_test)
watchlist = [(xg_train, 'train'), (xg_test, 'test')]
num_round = params['num_round']
xgclassifier = xgboost.train(params, xg_train, num_round, watchlist,
early_stopping_rounds=early_stopping
);
xgboost_rounds.append(xgclassifier.best_iteration)
num_round = int(np.mean(xgboost_rounds))
print('The best n_rounds is %d' % num_round)
for cv_train_index, cv_test_index in kf:
X_train, X_test = train.values[cv_train_index, :], train.values[cv_test_index, :]
y_train = train_labels.iloc[cv_train_index].values.flatten()
y_test = train_labels.iloc[cv_test_index].values.flatten()
# train machine learning
xg_train = xgboost.DMatrix(X_train, label=y_train)
xg_test = xgboost.DMatrix(X_test, label=y_test)
watchlist = [(xg_train, 'train'), (xg_test, 'test')]
xgclassifier = xgboost.train(params, xg_train, num_round, watchlist);
# predict
predicted_results = xgclassifier.predict(xg_test)
train_predictions[cv_test_index] = predicted_results
print('Calculating final splitter')
splitter = opt_cut_global(train_predictions, train_labels.values.flatten())
# train machine learning
res = optimize.minimize(opt_cut_local, splitter, args=(train_predictions, train_labels.values.flatten()),
method='Nelder-Mead',
# options={'disp': True}
)
classified_predicted_results = np.array(ranking(train_predictions, res.x)).astype('int')
# print(classified_predicted_results.value_counts())
print('Accuracy score ', accuracy_score(train_labels.values, classified_predicted_results))
mc_auc.append(accuracy_score(train_labels.values, classified_predicted_results))
mc_train_pred.append(classified_predicted_results)
mc_round.append(num_round)
mc_train_pred = np.mean(np.array(mc_train_pred), axis=0)
mc_round_list.append(int(np.mean(mc_round)))
mc_acc_mean.append(np.mean(mc_auc))
mc_acc_sd.append(np.std(mc_auc))
print('The accuracy range is: %.5f to %.5f and best n_round: %d' %
(mc_acc_mean[-1] - mc_acc_sd[-1], mc_acc_mean[-1] + mc_acc_sd[-1], mc_round_list[-1]))
print_results.append('The AUC range is: %.5f to %.5f and best n_round: %d' %
(mc_acc_mean[-1] - mc_acc_sd[-1], mc_acc_mean[-1] + mc_acc_sd[-1], mc_round_list[-1]))
print('For ', mc_auc)
print('The accuracy of the average prediction is: %.5f' % accuracy_score(train_labels.values,
(mc_train_pred + 0.5).astype(int)))
meta_solvers_train.append(mc_train_pred)
# train machine learning
xg_train = xgboost.DMatrix(train.values, label=train_labels.values)
xg_test = xgboost.DMatrix(test.values)
if params['mc_test']:
watchlist = [(xg_train, 'train')]
num_round = int(mc_round_list[-1] * params['test_rounds_fac'])
mc_pred = []
for i_mc in range(params['n_monte_carlo']):
params['seed'] = i_mc
xg_train = xgboost.DMatrix(data=train, label=train_labels.values.flatten())
xg_test = xgboost.DMatrix(test)
watchlist = [(xg_train, 'train')]
xgclassifier = xgboost.train(params, xg_train, num_round, watchlist);
predicted_results = xgclassifier.predict(xg_test)
# print(predicted_results)
# exit(0)
# if predicted_results ==3:
# print(predicted_results)
# exit(0)
mc_pred.append(predicted_results)
print(predicted_results)
""""
+0.5 eklenmeli @@@@@@@@@@@@@@@@@@@@@@@@@
"""
meta_solvers_test.append((np.mean(np.array(mc_pred), axis=0) +0.5 ).astype(int))
# TODO:
#2 den büyük gelen degerleri map etmemiz gerekiyor
# print(meta_solvers_test)
# exit(0)
# for row in meta_solvers_test[0]:
# for i in range(len(meta_solvers_test[0])):
#
# if meta_solvers_test[0][i] > 2:
# # row =2
# print(i,">>",meta_solvers_test[0][i])
# meta_solvers_test[0][i]=2
# print(">>>",row)
# exit(0)
""" Write opt solution """
print('writing to file')
mc_train_pred = label_encoder.inverse_transform(mc_train_pred.astype(int))
print(meta_solvers_test[-1])
# print(meta_solvers_test[-1])
meta_solvers_test[-1] = label_encoder.inverse_transform(meta_solvers_test[-1])
pd.DataFrame(mc_train_pred).to_csv('results/train_xgboost_d6_reg.csv')
submission_file['status_group'] = meta_solvers_test[-1]
submission_file.to_csv("results/test_xgboost_d6_reg.csv")
if mc_acc_mean[-1] < best_score:
print('new best log loss')
best_score = mc_acc_mean[-1]
best_params = params
best_train_prediction = mc_train_pred
if params['mc_test']:
best_prediction = meta_solvers_test[-1]
print(best_score)
print(best_params)
print(params_list)
print(print_results)
print(mc_acc_mean)
print(mc_acc_sd)
"""
Final Solution
"""
# optimazing:
# CV = 4
# No date (The only, cv=5): 0.53988215488215485
# Added measurement year, weekday, month, week of the year and age: 0.540050505051
# Regression:
|
Python
|
UTF-8
| 2,360 | 3 | 3 |
[] |
no_license
|
# coding: utf-8
# @author: Shaw
# @datetime: 2019-02-26 13:15
# @Name: KMeans_test.py
# KMeans 包含在 sklearn.cluster
from sklearn.cluster import KMeans
# KMeans(n_clusters=8, init='k-means++', n_init=10, max_iter=300, tol=0.0001, precompute_distances='auto', verbose=0, random_state=None, copy_x=True, n_jobs=1, algorithm='auto')
# n_clusters 即K的值 max_iter 最大的迭代次数
# n_init:初始化中心点的运算次数,默认是 10。程序是否快速收敛和中心点的选择关系非常大
# init 即初始值得选择 默认的方式采用优化过的 k-means ++ 方式
# algorithm : k-meansde 实现算法 有 "auto", "full", "elkan" 三种方式
from sklearn import preprocessing
import pandas as pd
import PIL.Image as image
import numpy as np
if __name__ == "__main1__":
data = pd.read_csv('./kmeans/data.csv', encoding="gbk")
feature_col = ['2019年国际排名', '2018世界杯', '2015亚洲杯']
train_x = data[feature_col]
df = pd.DataFrame(train_x)
kmeans = KMeans(n_clusters=3)
min_max = preprocessing.MinMaxScaler()
train_x = min_max.fit_transform(train_x)
kmeans.fit(train_x)
predit_y = kmeans.predict(train_x)
result = pd.concat((data, pd.DataFrame(predit_y)), axis=1)
result.rename({0: u'聚类'}, axis=1, inplace=True)
if __name__ == "__main__":
def load_data(file_path):
with open(file_path, 'rb') as f:
# 读取文件
data =[]
# 得到文件像素
img = image.open(f)
# 得到 图像尺寸
width, height = img.size
for x in range(width):
for y in range(height):
c1, c2, c3 = img.getpixel((x, y))
data.append([c1, c2, c3])
min_max = preprocessing.MinMaxScaler()
data = min_max.fit_transform(data)
return np.mat(data), width, height
img, width, height = load_data('./kmeans/weixin.jpg')
# 用K-Means 对图像进行2 聚类
kmeans = KMeans(n_clusters=2)
kmeans.fit(img)
label = kmeans.predict(img)
label = label.reshape([width, height])
pic_make = image.new("L", (width, height))
for x in range(width):
for y in range(height):
pic_make.putpixel((x, y), int(256/(label[x][y]+1))-1)
pic_make.save("weixin_reshape.jpg", "JPEG")
|
Python
|
UTF-8
| 4,130 | 4.4375 | 4 |
[] |
no_license
|
## this is at the top of every turtle file
from turtle import *
speed(0)
shape("turtle")
"""
### then first step is things like drawing a square
forward(100)
right(90)
forward(100)
right(90)
forward(100)
right(90)
forward(100)
right(90)
clear()
### then drawing a square through a for loop
for count in range(4):
forward(100)
right(90)
### making a function
def square():
for count in range(4):
forward(100)
right(90)
square()
### making a better function
def square(length):
for count in range(4):
forward(length)
right(90)
square(100)
### then let's draw a shape
def shape(sides, length, angle):
for count in range(sides):
forward(length)
right(angle)
shape(5, 100, 360/5)
# can you draw a triangle, a hexagon, a pentagon?
# try and write the following shapre function that works out the angle.
def shape(sides, length):
angle = 360/sides
for count in range(sides):
forward(length)
right(angle)
shape(23, 5)
### fractals.
# let's draw a snowflake
# level 0
forward(100)
penup(); backward(100); right(90); forward(80); left(90); pendown();
# level 1
forward(30)
left(60)
forward(30)
right(120)
forward(30)
left(60)
forward(30)
penup(); backward(100); right(90); forward(80); left(90); pendown();
# level 2
forward(10)
left(60)
forward(10)
right(120)
forward(10)
left(60)
forward(10)
left(60)
forward(10)
left(60)
forward(10)
right(120)
forward(10)
left(60)
forward(10)
right(120)
forward(10)
left(60)
forward(10)
right(120)
forward(10)
left(60)
forward(10)
left(60)
forward(10)
left(60)
forward(10)
right(120)
forward(10)
left(60)
forward(10)
# this sucks, let's try again
# recursion.
def line(depth, length):
if depth == 0:
forward(length)
else:
line(depth-1, length)
left(60)
line(depth-1, length)
right(120)
line(depth-1, length)
left(60)
line(depth-1, length)
line(2,1)
def snowflake(depth, length):
line(depth, length)
right(120)
line(depth, length)
right(120)
line(depth, length)
right(120)
snowflake(2, 5)
reset()
# challenge, replace line with a new fractal
# from ____ ---> _/\_
# _
# to ____ ---> _| |_
# makes the X fractal.
def line(depth, length):
if depth == 0:
forward(length)
else:
line(depth-1, length)
left(90)
line(depth-1, length)
right(90)
line(depth-1, length)
right(90)
line(depth-1, length)
left(90)
line(depth-1, length)
#line(2,5)
speed(999)
# however we can draw it in a square, instead of a triangle
# this is called the x-fractal
def xfractal(depth, length):
line(depth, length)
left(90)
line(depth, length)
left(90)
line(depth, length)
left(90)
line(depth, length)
left(90)
#xfractal(2,2)
reset()
speed(0)
# zelda/triforce fractal
# we're going to have to illustrate these
def triforce(depth, length):
if depth == 0:
pendown()
forward(length)
left(120)
forward(length)
left(120)
forward(length)
left(120)
penup()
else:
penup()
newlength = length/2
newdepth = depth - 1
triforce(newdepth, newlength)
forward(newlength)
triforce(newdepth, newlength)
left(120)
forward(newlength)
right(120)
triforce(newdepth, newlength)
right(120)
forward(newlength)
left(120)
reset()
speed(0)
penup()
setpos(-255,-255)
triforce(7, 512)
"""
def bubble(depth, length):
if depth == 0:
pendown()
circle(length/2)
penup()
else:
penup()
newlength = length/2
newdepth = depth - 1
bubble(newdepth, newlength)
forward(newlength)
bubble(newdepth, newlength)
left(120)
forward(newlength)
right(120)
bubble(newdepth, newlength)
right(120)
forward(newlength)
left(120)
reset()
speed(0)
penup()
setpos(-255,-255)
bubble(6, 512)
|
C++
|
UTF-8
| 473 | 2.671875 | 3 |
[] |
no_license
|
#include <iostream>
#include <functional>
#include <vector>
#include <queue>
#include <fstream>
#include <string>
#include <bitset>
#include <sstream>
#include <climits>
#include <cmath>
using namespace std;
class Solution {
public:
int maxProfit(vector<int> &prices) {
int maxPro = 0;
int minPrice = INT_MAX;
for (int i = 0; i < prices.size(); i++) {
minPrice = min(minPrice, prices[i]);
maxPro = max(maxPro, prices[i] - minPrice);
}
return maxPro;
}
};
|
Markdown
|
UTF-8
| 439 | 2.515625 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
Source code for my [Ensuring That a Linux Program Is Running at Most Once by Using Abstract Sockets](https://blog.petrzemek.net/2017/07/24/ensuring-that-a-linux-program-is-running-at-most-once-by-using-abstract-sockets/) English blog post.
Works only on Linux. Requires at least Rust 1.19 due to the use of the [`exprintln`](https://doc.rust-lang.org/std/macro.eprintln.html) macro.
To build and run the program, use
```
$ cargo run
```
|
Markdown
|
UTF-8
| 2,557 | 2.921875 | 3 |
[] |
no_license
|
Chloe called me dad consistently from age 3 - 6.
Only when placed in Donna's custody did she return
to John, which she also used when she was 2.
"Grandma says Mom doesn't go to church because
they took out the part of her brain that makes her
want to go to church." - Feb or Mar 2014. This is
the kind of "believe what I prefer to believe"
thinking Donna specializes in. It is untrue and
unfair.
Darin gave full support for adoption during
November 2013 meeting.
Emily, Chloe and I enjoyed visiting Festival of
Dance Improvisation (SFADI) in Capitol Hill,
Seattle each Sunday, where we and other families
developed movement awareness and trust in a bright
studio with a mirror wall. (I have been a member
of this dance group for eleven years.) It is
clearly absolutely foreign and frightening to
Chloe that upon her mother's death, her
interaction with me is heavily regulated and
limited. She's accustumed to gymnastic activities
with me. I've done my best to comply with CFS
guidelines about contact, but my primary goal has
been to meet Chloe's emotional need for
reassurance in this difficult period. It's telling
that this report contains a high number of
reported violations of contact policy, all of
which begin with Chloe approaching me. She clearly
enjoys dancing and derives comfort and joy from
the experience. This visit was not unusual in this
respect. The only change is that Sarah chose to
pile on every perceived violation of the policy.
(Which only explicitly applies to lap-sitting.)
I did not, and do not, add any information beyond
what Chloe says about her future plans. The
quotation is incorrect. Chloe said she wanted to
live in the same house, and I affirmed I would
like that, too. I did not add "the same house"; it
was in her original statement. I also strongly
support her claims of independence, and her wishes
for her adulthood. I have never suggested--not in
all my life--what she might do when she's an
adult, except attend college and establish a
career. But when she asserts that freedom can be
hers at a certain age, I hear a child longing for
her choices to be in her control, and
philosophically I have a tough time failing to
validate that thinking.
"When I first got to grandmas I had nightmares.
When I am old enough to choose where I live I will
live with you in Seattle."
Story In sand sculptures: "the queen died and the
castle fell on the king and the princess."
"You should get a job like Daniel (a child play
psychotherapist) so you can help other kids. "
|
Markdown
|
UTF-8
| 2,042 | 2.609375 | 3 |
[] |
no_license
|
# CloudStorage
## Курсовая работа "Сетевое хранилище"
# Состояние:
Сделано, только заготовка формы клиента, и чтение текущих директорий сервера и клиента.
Особо не успел что-то сделать, т.к. на работе завал: каркас клиента (swing), каркас сервера (netty), каркас профиля клиента (postgresql + liquibase)
#### Основной функционал:
<li>ААА - обязательно аутентификация и авторизация
<li>Смена пароля, удаление аккаунта
<li>Загрузка, скачивание файлов
<li>1 репозиторий - 1 юзер
<li>Копирование, перемещение, удаление, сортировка файлов. Создание папок
<li>Поиск файлов
<li>Пометка на удаление / корзина
<li>Ограничение на размер
#### Дополнительно по желанию:
<li>1. Шифрование паролей ** 2. Древовидная структура (опция) *** 3. Сбор статистики (на выбор)
##Немного об архитектуре
Для авторизации бдет использоваться БД PostgreSQL, в которой будет храниться только профиль пользователя, а также аутентификационные и авторизационные данные.
В профле будет храниться login|password, ограничение на размер и текущий размер хранилища, идентификатор пользователя
Все файлы будут храниться на диске в отдельной папке пользователя с имененем соответвующим идентификатору пользователя
|
Markdown
|
UTF-8
| 5,763 | 3.203125 | 3 |
[
"LicenseRef-scancode-public-domain"
] |
permissive
|
# feedparser
feedparser gems - web feed parser and normalizer (RSS 2.0, Atom, etc.)
* home :: [github.com/feedreader/feed.parser](https://github.com/feedreader/feed.parser)
* bugs :: [github.com/feedreader/feed.parser/issues](https://github.com/feedreader/feed.parser/issues)
* gem :: [rubygems.org/gems/feedparser](https://rubygems.org/gems/feedparser)
* rdoc :: [rubydoc.info/gems/feedparser](http://rubydoc.info/gems/feedparser)
* forum :: [groups.google.com/group/feedreader](http://groups.google.com/group/feedreader)
## Usage
### Structs
Feed • Item

### `Feed` Struct
#### Mappings
Note: uses question mark (`?`) for optional elements (otherwise assume required elements)
**Title 'n' Summary**
Note: The Feed parser will remove all html tags and attributes from the title (RSS 2.0+Atom),
description (RSS 2.0) and subtitle (Atom) content and will unescape HTML entities e.g. `&` becomes & and so on - always
resulting in plain vanilla text.
| Feed Struct | RSS 2.0 | Notes | Atom | Notes |
| ------------------ | ----------------- | ------------------- | ------------- | ------------------- |
| `feed.title` | `title` | plain vanilla text | `title` | plain vanilla text |
| `feed.summary` | `description` | plain vanilla text | `subtitle`? | plain vanilla text |
**Dates**
| Feed Struct | RSS 2.0 | Notes | Atom | Notes |
| ------------------ | ------------------- | ----------------- | ---------- | --------------- |
| `feed.updated` | `lastBuildDate`? | RFC-822 format | `updated` | ISO 801 format |
| `feed.published` | `pubDate`? | RFC-822 format | - | |
Note: Check - for RSS 2.0 set feed.updated to pubDate or lastBuildDate if only one present? if both present - map as above.
RFC-822 date format e.g. Wed, 14 Jan 2015 19:48:57 +0100
ISO-801 date format e.g. 2015-01-11T09:30:16Z
~~~
class Feed
attr_accessor :format # e.g. atom|rss 2.0|etc.
attr_accessor :title # note: always plain vanilla text - if present html tags will get stripped and html entities unescaped
attr_accessor :url
attr_accessor :items
attr_accessor :summary # note: is description in RSS 2.0 and subtitle in Atom; always plain vanilla text
attr_accessor :updated # note: is lastBuildDate in RSS 2.0
attr_accessor :published # note: is pubDate in RSS 2.0; not available in Atom
attr_accessor :generator
attr_accessor :generator_version # e.g. @version (atom)
attr_accessor :generator_uri # e.g. @uri (atom) - use alias url/link ???
end
~~~
### `Item` Struct
**Title 'n' Summary**
Note: The Feed parser will remove all html tags and attributes from the title (RSS 2.0+Atom),
description (RSS 2.0) and summary (Atom) content
and will unescape HTML entities e.g. `&` becomes & and so on - always
resulting in plain vanilla text.
Note: In plain vanilla RSS 2.0 there's no difference between (full) content and summary - everything is wrapped
in a description element; however, best practice is using the content "module" from RSS 1.0 inside RSS 2.0.
If there's no content module present the feed parser will "clone" the description and use one version for `item.summary` and
the clone for `item.content`.
Note: The content element will assume html content.
| Feed Struct | RSS 2.0 | Notes | Atom | Notes |
| ------------------ | ----------------- | ------------------- | ------------- | ------------------- |
| `item.title` | `title` | plain vanilla text | `title` | plain vanilla text |
| `item.summary` | `description` | plain vanilla text | `summary`? | plain vanilla text |
| `item.content` | `content`? | html | `content`? | html |
**Dates**
| Item Struct | RSS 2.0 | Notes | Atom | Notes |
| ------------------ | ------------------- | ----------------- | ------------- | --------------- |
| `item.updated` | `pubDate`? | RFC-822 format | `updated` | ISO 801 format |
| `item.published` | - | RFC-822 format | `published`? | ISO 801 format |
Note: In plain vanilla RSS 2.0 there's only one `pubDate` for items, thus, it's not possible to differeniate between published and updated dates for items; note - the `item.pubDate` will get mapped to `item.updated`. To set the published date in RSS 2.0 use the dublin core module e.g `dc:created`, for example.
~~~
class Item
attr_accessor :title # note: always plain vanilla text - if present html tags will get stripped and html entities
attr_accessor :url
attr_accessor :content
attr_accessor :content_type # optional for now (text|html|html-escaped|binary-base64) - not yet set
attr_accessor :summary
attr_accessor :updated # note: is pubDate in RSS 2.0 and updated in Atom
attr_accessor :published # note: is published in Atom; not available in RSS 2.0 (use dc:created ??)
attr_accessor :guid # todo: rename to id (use alias) ??
end
~~~
### Read Feed Example
~~~
require 'open-uri'
require 'feedparser'
xml = open( 'http://openfootball.github.io/atom.xml' ).read
feed = FeedParser::Parser.parse( xml )
pp feed
~~~
## Install
Just install the gem:
$ gem install feedparser
## License
The `feedparser` scripts are dedicated to the public domain.
Use it as you please with no restrictions whatsoever.
## Questions? Comments?
Send them along to the [Planet Pluto and Friends Forum/Mailing List](http://groups.google.com/group/feedreader).
Thanks!
|
C#
|
UTF-8
| 443 | 3 | 3 |
[] |
no_license
|
using System;
namespace Enemies
{
/// <summary>
/// Class named Zombie
/// </summary>
class Zombie
{
/// <summary>
/// Public field that has no value
/// </summary>
public int health;
/// <summary>
/// Public constructor, set the health value to 0
/// </summary>
public Zombie()
{
health = 0;
}
}
}
|
Java
|
UTF-8
| 696 | 2.75 | 3 |
[] |
no_license
|
package org.dspace.install.model;
public enum Language {
PT("Portuguese (Portugal)", "pt-PT"),
PT_BR("Portuguese (Brazil)", "pt-BR"),
EN_US("English (US)", "en-US"),
EN_UK("English (UK)", "en-UK");
public static Language fromCode (String code) {
for (Language g : Language.values()) {
if (g.getCode().equals(code))
return g;
}
return EN_US;
}
private String label;
private String code;
Language (String label, String code) {
this.label = label;
this.code = code;
}
/**
* @return the label
*/
public String getLabel() {
return label;
}
/**
* @return the code
*/
public String getCode() {
return code;
}
}
|
Swift
|
UTF-8
| 3,022 | 2.546875 | 3 |
[] |
no_license
|
//
// AttractionTableViewCell.swift
// PlanTrip
//
// Created by Ju Young Kim on 6/14/17.
// Copyright © 2017 Ju Young Kim. All rights reserved.
//
import UIKit
class AttractionTableViewCell: UITableViewCell {
var delegate:MyCustomCellDelegator!
var days_VC:DaysViewController? = nil
@IBOutlet weak var title: UILabel!
@IBOutlet weak var attraction_collectionView: UICollectionView!
var curr_day:Day? = nil
var attraction_list:[Attraction] = []
@IBAction func new_item(_ sender: Any) {
if(curr_day?.location == nil){
let alertController = UIAlertController(title: "Oops!", message: "You need to tell us where you're going!", preferredStyle: UIAlertControllerStyle.alert)
let okAction = UIAlertAction(title: "OK", style: UIAlertActionStyle.default)
{
(result : UIAlertAction) -> Void in
print("You pressed OK")
}
alertController.addAction(okAction)
self.days_VC?.present(alertController, animated: true, completion: nil)
}else{
var new_data = my_data(title: "Attraction", curr_day: curr_day!, city: (curr_day?.location!)!)
if(self.delegate != nil){
self.delegate.callSegueFromAttractCell(myData: new_data)
}
}
}
}
extension AttractionTableViewCell: UICollectionViewDataSource, UICollectionViewDelegate {
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return self.attraction_list.count
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = self.attraction_collectionView.dequeueReusableCell(withReuseIdentifier: "attract_cell", for: indexPath) as! AttractCollectionViewCell
cell.place_name.text = attraction_list[indexPath.row].name
cell.img.sd_setImage(with: URL(string: (attraction_list[indexPath.row].img_url!)))
cell.curr_day = self.curr_day
cell.curr_attract = self.attraction_list[indexPath.row]
cell.days_VC = self.days_VC
cell.collect_view = self.attraction_collectionView
return cell
}
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
var curr_attraction:Attraction = self.attraction_list[indexPath.row]
var curr_business:Business = Business.init(name: curr_attraction.name, location: curr_attraction.location, img_url: URL(string: curr_attraction.img_url!), rating: curr_attraction.rating, phone_number: curr_attraction.phone_number, business_url: URL(string: curr_attraction.business_url!), categories: [], isClosed: curr_attraction.isClosed, reviewCount: UInt(curr_attraction.reviewCnt), lat: curr_attraction.lat, long: curr_attraction.long, type: "Attraction")
self.delegate.callSegueFromCollectionCell(myData: curr_business)
}
}
|
C++
|
UTF-8
| 1,494 | 2.890625 | 3 |
[] |
no_license
|
#pragma once
#ifndef __MEnginePathfindCriteria_H__
#define __MEnginePathfindCriteria_H__
/**
* This interface defines the methods that any class that is to be used as pathFind criteria must implement
*/
class MEnginePathfindCriteria
{
public:
MEnginePathfindCriteria();
~MEnginePathfindCriteria();
/**
* This method return the origin point for this search
* @return The origin point
*/
fPoint3d getOrigin();
/**
* This method return the destiny point for this search
* @return The destiny point
*/
fPoint3d getDestiny();
/**
* This method return the origin cell for this search
* @return The origin cell
*/
MCell getOriginCell();
/**
* This method return the destiny cell for this search
* @return The destiny cell
*/
MCell getDestinyCell();
/**
* Returns a n heuristic value for any cell in the scene. The engine works with cell precision: any point inside the same cell
* as the destination point has to be considered the destination point.
*
* @param cell The cell for which we must calculate its heuristic
* @return The heuristic score for this cell. A value of 0 indicates that we reached our objective
*/
float getHeuristic(MCell cell);
/**
* Returns a weighed list of a cells's accessible neighbours. This method updates each cell in the returned list, setting
* its "cost" temporal property with the cost associated to move from the input cell into that cell.
*
* @return An array of fCells.
*/
Array getAccessibleFrom(MCell cell);
};
#endif
|
Markdown
|
UTF-8
| 1,784 | 2.78125 | 3 |
[] |
no_license
|
+++
title = "Evidence of an increasing NO$_2$/NO$_x$ emissions ratio from road traffic emissions"
date = 2005-01-01
authors = ["D.C. Carslaw"]
publication_types = ["2"]
abstract = "A statistical analysis of roadside concentrations of nitrogen oxides (NO$_x$) and nitrogen dioxide (NO$_2$) in London shows that from 1997 to 2003 there has been a statistically significant downward trend (at the p 0:004 level) in NO$_x$ averaged across a network of 36 sites. Conversely, there has been no statistically significant trend in the concentrations of NO$_2$ over the same period. Hourly modelling using a simple constrained chemical model shows that the NO$_2$/NO$_x$ emissions ratio from road traffic has increased markedly from a mean of about 5–6 vol% in 1997 to about 17 vol% in 2003. Calculations show that if the NO$_2$/NO$_x$ emissions ratio had remained the same as that towards the beginning of each time series, 14 out of the 36 sites would have shown a statistically significant downward trend in NO$_2$ at the p 0:10 level compared with only five that did. The increase in the NO$_2$/NO$_x$ emissions ratio from road traffic in recent years has therefore had a significant effect on recent trends in roadside NO$_2$ concentrations. It is shown that the increased use of certain types of diesel particulate filters fitted to buses is likely to have made an important contribution to the increasing trends in the NO$_2$/NO$_x$ emissions ratio. However, it is unlikely that these filters account for all of the observed increase and other effects could be important, such as the increased penetration of diesel cars in the passenger car fleet and new light- and heavy-duty engine technologies and management approaches."
featured = false
publication = "*Atmospheric Environment*"
+++
|
Java
|
UTF-8
| 3,110 | 2.5 | 2 |
[] |
no_license
|
package com.datasetup;
import java.util.HashMap;
import java.util.Map;
import org.testng.annotations.DataProvider;
public class Dataconfig {
//This method return all the testdata details from the given sheet
@DataProvider(name = "LoginTest")
public Object[][] dataSupplier() {
String datafile = "amazon.xlsx";
String sheetName = "LoginScenario";
ExcelDataConfig dataconfig = new ExcelDataConfig(datafile);
int lastRowNum = dataconfig.rowcount(sheetName);
int lastCellNum = dataconfig.lastcellno(0,0);
Object[][] obj = new Object[lastRowNum-1][1];
for (int i = 1; i < lastRowNum; i++) {
Map<Object, Object> datamap = new HashMap<>();
for (int j = 0; j < lastCellNum; j++) {
datamap.put( dataconfig.getData(sheetName, 0, j), dataconfig.getData(sheetName, i, j));
}
obj[i-1][0] = datamap;
}
return obj;
}
public Object[][] datsetup(String datafile,String sheetName,String TestcaseName) {
ExcelDataConfig dataConfig = new ExcelDataConfig(datafile);
Map<Integer, Integer> rowdetails = new HashMap<>();
rowdetails = dataConfig.getrowNo(sheetName, TestcaseName);
Object[][] obj = new Object[rowdetails.size()][1];
int lastCellNum = dataConfig.lastcellno(sheetName,0);
for (int i = 0; i < rowdetails.size(); i++)
{
Map<Object, Object> datamap = new HashMap<>();
for (int j = 0; j < lastCellNum; j++) {
datamap.put( dataConfig.getData(sheetName, 0, j), dataConfig.getData(sheetName, rowdetails.get(i), j));
}
obj[i][0] = datamap;
}
return obj;
}
public int datasize(String datafile,String sheetName,String TestcaseName) {
ExcelDataConfig dataConfig = new ExcelDataConfig(datafile);
Map<Integer, Integer> rowdetails = new HashMap<>();
rowdetails = dataConfig.getrowNo(sheetName, TestcaseName);
return rowdetails.size();
}
public Object[][] datsetup(String datafile,String sheetName) {
ExcelDataConfig dataconfig = new ExcelDataConfig(datafile);
int lastRowNum = dataconfig.rowcount(sheetName);
int lastCellNum = dataconfig.lastcellno(0,0);
Object[][] obj = new Object[lastRowNum-1][1];
for (int i = 1; i < lastRowNum; i++) {
Map<Object, Object> datamap = new HashMap<>();
for (int j = 0; j < lastCellNum; j++) {
datamap.put( dataconfig.getData(sheetName, 0, j), dataconfig.getData(sheetName, i, j));
}
obj[i-1][0] = datamap;
}
return obj;
}
public int datasize(String datafile,String sheetName) {
ExcelDataConfig dataconfig = new ExcelDataConfig(datafile);
int lastRowNum = dataconfig.rowcount(sheetName);
lastRowNum = lastRowNum-1;
return lastRowNum;
}
public Object[][] datsetup(String datafile,String sheetName,int slno){
ExcelDataConfig dataconfig = new ExcelDataConfig(datafile);
// update serial number of testcase
Object[][] obj = new Object[1][1];
int lastCellNum = dataconfig.lastcellno(0,0);
Map<Object, Object> datamap = new HashMap<>();
for (int j = 0; j < lastCellNum; j++) {
datamap.put( dataconfig.getData(sheetName, 0, j), dataconfig.getData(sheetName,slno-1 , j));
}
obj[0][0] = datamap;
return obj;
}
}
|
Java
|
ISO-8859-13
| 862 | 3.453125 | 3 |
[] |
no_license
|
package esercizioEnum;
public class Program {
public static void main(String[] args)
{
// l'auto spende 1 litro per km
//l'auto spende 5 litri per km --> con il metodo commentato
// mentre adesso l'auto percorre 20km con 1 litro
Car auto = new Car(20, TipoCarburante.gas);
// l'auto parte da zero carburante e viene aggiunto una quantita di 150
auto.aggiungiCarburante(150, TipoCarburante.gas);
System.out.println(auto.getCarburanteContenuto());
// effettuo 15 km e spendo 5 litri per ogni km ,
//5*15 ----> quindi spendo 75 litri e andr a togliere ai 150 della q aggiunta prima
// adesso invece l'auto effettua 20km e spende 1lt, come deciso sopra
auto.riduzioneCarburante(20);
System.out.println(auto.getCarburanteContenuto());
System.out.println(auto.checkDiesel());
}
}
|
Go
|
UTF-8
| 839 | 2.9375 | 3 |
[] |
no_license
|
package repository
import (
"database/sql"
"github.com/mikcheal101/golang-tut-auth/models"
"github.com/mikcheal101/golang-tut-auth/utils"
"golang.org/x/crypto/bcrypt"
)
type UserRepository struct{}
func (repo UserRepository) CreateUser(db *sql.DB, user *models.User) error {
hash, err := bcrypt.GenerateFromPassword([]byte(user.Password), 10)
utils.HandleError(err)
user.Password = string(hash)
stmt := "insert into users (username, password) values ($1, $2) RETURNING id;"
err = db.QueryRow(stmt, user.Username, user.Password).Scan(&user.ID)
return err
}
func (repo UserRepository) AuthUser(db *sql.DB, user models.User) (string, error) {
var pwd string
stmt := "select id, username, password from users where username=$1"
err := db.QueryRow(stmt, user.Username).Scan(&user.ID, &user.Username, &pwd)
return pwd, err
}
|
PHP
|
UTF-8
| 21,434 | 2.796875 | 3 |
[] |
no_license
|
<?php
class AccountPerson {
public $Id;
public $FirstName;
public $LastName;
public $IsEmployee;
public $Address;
public $PostNmb;
public $City;
public $Country;
public $Phone;
public $Cellphone;
public $Email;
/* Here kept as dd.mm.yyyy */
public $Birthdate;
public $Newsletter;
public $Hidden;
public $Gender;
public $Secretaddress;
public $Comment;
/* Populated from outside */
public $Memberships;
public $BirthdateRequired;
public $YearMembershipRequired;
public $SemesterMembershipRequired;
/* Only for querying - not in result set */
private $User;
private $db;
private $dbPrefix;
function AccountPerson($db, $dbP = 0) {
$this->db = $db;
if(!$db) {
$this->db = new DB();
}
if(!$dbP) {
$this->dbPrefix = AppConfig::pre();
} else {
$this->dbPrefix = $dbP;
}
}
function setId($id) {
$this->Id = $id;
}
function setUser($user) {
$this->User = $user;
}
function setFirstname($firstname) {
$this->FirstName = $firstname;
}
function setLastname($lastname) {
$this->LastName = $lastname;
}
function setIsEmployee($isEmployee) {
$this->IsEmployee = $isEmployee;
}
function setAddress($address) {
$this->Address = $address;
}
function setCity($city) {
$this->City = $city;
}
function setCountry($country) {
$this->Country = $country;
}
function setPostnmb($postNmb) {
$this->PostNmb = $postNmb;
}
function setPhone($phone) {
$this->Phone = $phone;
}
function setCellphone($cellphone) {
$this->Cellphone = $cellphone;
}
function setEmail($email) {
$this->Email = $email;
}
function setBirthdate($birthdate) {
$this->Birthdate = $birthdate;
}
function setNewsletter($newsletter) {
$this->Newsletter = $newsletter;
}
function setHidden($hidden) {
$this->Hidden = $hidden;
}
function setGender($gender) {
$this->Gender = $gender;
}
function setSecretaddress($secretaddress) {
$this->Secretaddress = $secretaddress;
}
function setComment($comment) {
$this->Comment = $comment;
}
function name() {
return $this->FirstName . " " . $this->LastName;
}
function id() {
return $this->Id;
}
function getName($id) {
$sql = "select firstname,lastname from " . $this->dbPrefix . "person where id = ?";
$prep = $this->db->prepare($sql);
$prep->bind_params("i", $id);
$res = $prep->execute();
if(count($res) == 0) {
return "";
}
return $res[0]["firstname"]." ".$res[0]["lastname"];
}
function updatePortalPassword($personId, $password) {
$pass = crypt($password, User::makesalt());
$bind = $this->db->prepare("update ". $this->dbPrefix ."portal_user set pass=? where person=?");
$bind->bind_params("si", $pass, $personId);
$bind->execute();
}
function setPortalBlocked($personId, $blocked) {
$bind = $this->db->prepare("update ". $this->dbPrefix ."portal_user set deactivated=? where person=?");
$bind->bind_params("ii", $blocked, $personId);
$bind->execute();
}
function removeUrlField($field, $id) {
$prep = $this->db->prepare("update ". $this->dbPrefix ."portal_user set $field = '' where person = ?");
$prep->bind_params("i", $id);
$prep->execute();
}
function getSharedCompactPortalData() {
function sortPerson($one, $two) {
$res = strcasecmp($one["f"], $two["f"]);
if($res) {
return $res;
}
return strcasecmp($one["l"], $two["l"]);
}
$prep = $this->db->prepare("select person as p, ".
"(if(show_firstname,firstname,'')) as f, (if(show_lastname,lastname,'')) as l, ".
"(if(show_phone, phone,'')) as q, (if(show_cellphone,cellphone,'')) as c, ".
"(if(show_gender, gender, '')) as g, (if(show_email, email, '')) as e, ".
"(if(show_address, address, '')) as z, (if(show_city, city, '')) as x, ".
"(if(show_postnmb, postnmb, '')) as v, (if(show_country, country, '')) as b, ".
"(if(show_birthdate, birthdate, '')) as n, show_image as m, ".
"(select min(year) from " . $this->dbPrefix . "year_membership where memberid=person) as y, ".
"show_image as s, twitter as t, homepage as h, facebook as j, linkedin as k".
" from " . $this->dbPrefix . "portal_user," . $this->dbPrefix . "person where person = id and show_firstname");
$arr = $prep->execute();
$accDate = new ezDate();
$year = $accDate->year();
$prepOther = $this->db->prepare("select id, firstname, lastname,(select min(year) from " . $this->dbPrefix . "year_membership where memberid=id) as yf ".
" from " . $this->dbPrefix . "person, ". $this->dbPrefix . "year_membership ".
" where not exists(select null from " . $this->dbPrefix . "portal_user where person=id) and id=memberid and year IN(?, ?) group by id");
$prepOther->bind_params("ii", $year, $year-1);
$arrOther = $prepOther->execute();
foreach($arrOther as $one) {
$arr[] = array("p" => $one["id"], "f" => $one["firstname"], "l" => $one["lastname"], "y" => $one["yf"]);
}
usort($arr, "sortPerson");
return $arr;
}
function getAllPortal() {
$sql = "select firstname, lastname, U.* from ". $this->dbPrefix . "person," . $this->dbPrefix . "portal_user U where id=person order by firstname, lastname";
$prep = $this->db->prepare($sql);
$res = $prep->execute();
return $res;
}
function emailExists($email) {
$sql = "select id from ".$this->dbPrefix ."person where email like ?";
$prep = $this->db->prepare($sql);
$prep->bind_params("s", '%'.$email).'%';
$res = $prep->execute();
return count($res) == 1;
}
function searchByEmailInDb($email, $dbprefix) {
$email = "%".$email."%";
$sql = "select id, secret from ".$dbprefix."person where email like ?";
$prep = $this->db->prepare($sql);
$prep->bind_params("s", $email);
return $prep->execute();
}
function getOnePortal($id) {
$sql = "select deactivated, firstname,lastname,email,address,postnmb,city,country,phone,cellphone,birthdate, gender,".
"show_gender, show_birthdate, show_cellphone, show_phone, show_country, show_city, show_postnmb, show_address, show_email, show_lastname, show_firstname, show_image, ".
"homepage, twitter, facebook, linkedin, ifnull(newsletter, 0) as newsletter ".
"from " . $this->dbPrefix . "person," . $this->dbPrefix . "portal_user where id = ? and id=person";
$prep = $this->db->prepare($sql);
$prep->bind_params("i", $id);
$res = $prep->execute();
return array_pop($res);
}
function getOne($id) {
$sql = "select * from " . $this->dbPrefix . "person where id = ?";
$prep = $this->db->prepare($sql);
$prep->bind_params("i", $id);
$res = $prep->execute();
return array_pop($res);
}
function load($id) {
$fields = $this->getOne($id);
if (!$fields) {
return;
}
$this->Id = $id;
$this->setIsEmployee($fields["employee"]);
$this->setFirstname($fields["firstname"]);
$this->setLastname($fields["lastname"]);
$this->setEmail($fields["email"]);
$this->setPostnmb($fields["postnmb"]);
$this->setCity($fields["city"]);
$this->setCountry($fields["country"]);
$this->setPhone($fields["phone"]);
$this->setCellphone($fields["cellphone"]);
$this->setAddress($fields["address"]);
$this->setNewsletter($fields["newsletter"]);
$this->Secretaddress = $fields["secretaddress"];
$this->Comment = $fields["comment"];
$this->SemesterMembershipRequired = $fields["semester_membership_required"];
$this->YearMembershipRequired = $fields["year_membership_required"];
if($fields["birthdate"]) {
$tmpdate = new eZDate();
$tmpdate->setMySQLDate($fields["birthdate"]);
$this->setBirthdate($tmpdate->displayAccount());
}
$this->setHidden($fields["hidden"]);
$this->setGender($fields["gender"]);
}
function getAll($isEmpoyee = 0) {
$sql = "select id, firstname,lastname,email from " . $this->dbPrefix . "person" . ($isEmpoyee ? " where employee = 1" : "") . " order by lastname, firstname";
$prep = $this->db->prepare($sql);
$res = $prep->execute();
return $res;
}
function savePortalUser($id, $data) {
$bdSave = new eZDate();
$bdSave->setDate($data->birthdate);
$mysqlDate = $bdSave->mySQLDate();
/* Take a backup */
$prep = $this->db->prepare("insert ignore into " . $this->dbPrefix . "person_backup (id,firstname,lastname,email,address,postnmb,city,country,phone,cellphone,birthdate,newsletter,gender,lastedit) (select id,firstname,lastname,email,address,postnmb,city,country,phone,cellphone,birthdate,newsletter,gender,lastedit from " . $this->dbPrefix . "person where id = ?)");
$prep->bind_params("i", $id);
$prep->execute();
$prep = $this->db->prepare("update " . $this->dbPrefix . "person set firstname=?,lastname=?,email=?,address=?,postnmb=?,city=?,country=?,phone=?,cellphone=?,birthdate=?,newsletter=?, gender=?, lastedit=now() where id = ?");
$prep->bind_params("ssssssssssisi", $data->firstname, $data->lastname, $data->email, $data->address, $data->postnmb, $data->city, $data->country, $data->phone, $data->cellphone, $mysqlDate, $data->newsletter, $data->gender, $id);
$prep->execute();
$prep = $this->db->prepare("update " . $this->dbPrefix . "portal_user set show_gender=?, show_birthdate=?, show_cellphone=?, show_phone=?, show_country=?, ".
"show_city=?, show_postnmb=?, show_address=?, show_email=?, show_lastname=?, ".
"show_firstname=?, show_image=?,twitter=?,homepage=?,linkedin=?,facebook=? where person =? ");
$prep->bind_params("iiiiiiiiiiiissssi", $data->show_gender, $data->show_birthdate, $data->show_cellphone, $data->show_phone, $data->show_country, $data->show_city, $data->show_postnmb, $data->show_address, $data->show_email, $data->show_lastname, $data->show_firstname, $data->show_image, $data->twitter, $data->homepage,$data->linkedin,$data->facebook,$id);
$prep->execute();
}
function save() {
$mysqlDate = NULL;
if($this->Birthdate) {
$bdSave = new eZDate();
$bdSave->setDate($this->Birthdate);
$mysqlDate = $bdSave->mySQLDate();
}
if ($this->Id) {
$prep = $this->db->prepare("update " . $this->dbPrefix . "person set firstname=?,lastname=?,email=?,address=?,postnmb=?,city=?,country=?,phone=?,cellphone=?,employee=?,birthdate=?,newsletter=?, hidden=?, gender=?, secretaddress=?,comment=?,lastedit=now(), semester_membership_required=?, year_membership_required=? where id = ?");
$prep->bind_params("sssssssssssiisisiii", $this->FirstName, $this->LastName, $this->Email, $this->Address, $this->PostNmb, $this->City, $this->Country, $this->Phone, $this->Cellphone, $this->IsEmployee, $mysqlDate, $this->Newsletter, $this->Hidden, $this->Gender, $this->Secretaddress, $this->Comment, $this->SemesterMembershipRequired, $this->YearMembershipRequired, $this->Id);
$prep->execute();
return $this->db->affected_rows();
}
$prep = $this->db->prepare("insert into " . $this->dbPrefix . "person set firstname=?,lastname=?,email=?,address=?,postnmb=?,city=?,country=?,phone=?,cellphone=?,employee=?,birthdate=?,newsletter=?,hidden=?,gender=?, secretaddress=?,comment=?,lastedit=now(),semester_membership_required=?, year_membership_required=?");
$prep->bind_params("sssssssssssiisisii", $this->FirstName, $this->LastName, $this->Email, $this->Address, $this->PostNmb, $this->City, $this->Country, $this->Phone, $this->Cellphone, $this->IsEmployee, $mysqlDate, $this->Newsletter, $this->Hidden, $this->Gender,$this->Secretaddress, $this->Comment,$this->SemesterMembershipRequired, $this->YearMembershipRequired);
$prep->execute();
$this->id = $this->db->insert_id();
return $this->id;
}
function search($incMemberInfo, $debug = 0) {
$cols = "*";
if ($incMemberInfo) {
$accStandard = new AccountStandard($this->db, $this->dbPrefix);
$accSemester = new AccountSemester($this->db, $this->dbPrefix);
$active_semester = addslashes($accStandard->getOneValue(AccountStandard::CONST_SEMESTER));
$active_year = addslashes($accStandard->getOneValue(AccountStandard::CONST_YEAR));
$cols = "*, (select distinct 1 from " . $this->dbPrefix . "train_membership where memberid=id and semester=$active_semester) as train" .
", (select distinct 1 from " . $this->dbPrefix . "course_membership where memberid=id and semester=$active_semester) as course" .
", (select distinct 1 from " . $this->dbPrefix . "youth_membership where memberid=id and semester=$active_semester) as youth" .
", (select if(youth = 1, 2, 1) from " . $this->dbPrefix . "year_membership where memberid=id and year=$active_year) as year";
}
$searchWrap = $this->db->search("select $cols from " . $this->dbPrefix . "person", "order by lastname,firstname");
$searchWrap->addAndParam("i", "id", $this->Id);
$searchWrap->addAndParam("s", "firstname", $this->FirstName ? $this->FirstName."%" : NULL);
$searchWrap->addAndParam("s", "lastname", $this->LastName ? $this->LastName."%" : NULL);
$searchWrap->addAndParam("i", "employee", $this->IsEmployee);
$searchWrap->addAndParam("s", "address", $this->Address);
$searchWrap->addAndParam("s", "postnmb", $this->PostNmb);
$searchWrap->addAndParam("s", "city", $this->City);
$searchWrap->addAndParam("s", "country", $this->Country);
$searchWrap->addAndParam("s", "phone", $this->Phone);
$searchWrap->addAndParam("s", "cellphone", $this->Cellphone);
$searchWrap->addAndParam("s", "email", $this->Email);
$searchWrap->addAndParam("i", "newsletter", $this->Newsletter);
if($this->Gender == "U") {
/* Appears that addOnlySql bugs if no other params are set */
$searchWrap->addAndParam("i", "1", 1);
$searchWrap->addOnlySql("gender is null");
} else {
$searchWrap->addAndParam("s", "gender", $this->Gender);
}
if($this->Hidden) {
$searchWrap->addAndParam("i", "hidden",1);
}
$searchWrap->addAndQuery("s", $this->User, "exists (select null from " . $this->dbPrefix . "user where person=id and username=?)");
$res = $searchWrap->execute($debug);
foreach($res as &$one) {
if($one["secretaddress"]) {
$one["address"] = "#SECRET#";
$one["phone"] = "#SECRET#";
$one["cellphone"] = "#SECRET#";
}
}
return $res;
}
function allWithEmail() {
$searchWrap = $this->db->search("select firstname, lastname, email,newsletter from " . $this->dbPrefix . "person where email is not null order by newsletter desc, lastname, firstname, email");
return $searchWrap->execute();
}
function setSecret($id, $prefix = 0) {
$secret = "";
for ($i=0; $i<40; $i++) {
$secret.= chr(mt_rand(97, 122));
}
if(!$prefix) {
$prefix = $this->dbPrefix;
}
$prep = $this->db->prepare("update " . $prefix . "person set secret = ? where id = ?");
$prep->bind_params("si", $secret, $id);
$prep->execute();
return $secret;
}
function getSecret($id) {
$prep = $this->db->prepare("select secret from " . $this->dbPrefix . "person where id=?");
$prep->bind_params("i", $id);
$res = $prep->execute();
if(!$res[0]["secret"]) {
$secret = $this->setSecret($id);
return $this->dbPrefix.":".$secret;
}
return $this->dbPrefix.":".$res[0]["secret"];
}
function requirePortaluserSecretMatchAndUpdateSecret($secret, $id, $prefix) {
$prepins = $this->db->prepare("insert ignore into ".$prefix . "portal_user (person, show_firstname, show_lastname) values (?,1,1)");
$prepins->bind_params("i", $id);
$prepins->execute();
$prep = $this->db->prepare("select id from " . $prefix . "person," . $prefix . "portal_user where secret=? and id =? and id=person");
$prep->bind_params("si", $secret, $id);
$res = $prep->execute();
if(count($res) == 0) {
return 0;
}
$this->setSecret($id, $prefix);
return 1;
}
function unsubscribeToNewsletter($prefix, $secret, $id) {
$prefix = Strings::whitelist($prefix);
$prep = $this->db->prepare("update " . $prefix . "person set newsletter = 0 where secret = ? and id = ?");
$prep->bind_params("si", $secret, $id);
$prep->execute();
return $this->db->affected_rows();
}
function getFirst() {
$prep = $this->db->prepare("select * from ".$this->dbPrefix . "person limit 1");
return $prep->execute();
}
function allChangedSince($date) {
$prep = $this->db->prepare("select * from ".$this->dbPrefix . "person where firstname is not null and length(firstname) > 0 and lastedit >= ? and (hidden is null or hidden = 0)");
$prep->bind_params("s", $date);
$res = $prep->execute();
foreach($res as &$one) {
if($one["secretaddress"]) {
$one["address"] = "";
$one["phone"] = "";
$one["city"] = "";
$one["postnmb"] = "";
$one["cellphone"] = "";
} else {
$one["secretaddress"] = 0;
}
if($one["birthdate"]) {
$date = new eZDate();
$date->setMySQLDate($one["birthdate"]);
$one["birthdate"] = $date->display();
}
unset($one["hidden"]);
unset($one["secret"]);
unset($one["semester_membership_required"]);
unset($one["year_membership_required"]);
if(!$one["employee"] || strlen($one["employee"] == 0)) {
$one["employee"] = 0;
}
if(!$one["newsletter"] || strlen($one["newsletter"] == 0)) {
$one["newsletter"] = 0;
}
foreach($one as $key => $value) {
if($value === NULL) {
$one[$key] = "";
}
}
}
return $res;
}
function updateSecretIfUserExists($user, $secret) {
$prep = $this->db->prepare("select email, username from ".$this->dbPrefix .
"person P, ".$this->dbPrefix ."user U where U.person = P.id and U.username = ?");
$prep->bind_params("s", $user);
$res = $prep->execute();
if(count($res) != 1) {
return array("error" => "Bad match:".count($res),"user"=>$user, "dbprefix" => $this->dbPrefix);
}
$prep = $this->db->prepare("update ".$this->dbPrefix .
"person P set secret=? where exists (select null from ".$this->dbPrefix .
"user U where U.username = ? and U.person = P.id)");
$prep->bind_params("ss", $secret, $user);
$prep->execute();
return array("email" => $res[0]["email"], "username" => $user);
}
function updateSecretIfUserMatches($email, $secret) {
$prep = $this->db->prepare("select email, username from ".$this->dbPrefix .
"person P, ".$this->dbPrefix ."user U where P.email like ? and U.person = P.id");
$prep->bind_params("s", '%'.$email.'%');
$res = $prep->execute();
if(count($res) != 1) {
return array("error" => "Bad match:".count($res),"email"=>$email, "dbprefix" => $this->dbPrefix);
}
$registeredEmail = $res[0]["email"];
$emails = explode(",", $registeredEmail);
$found = false;
foreach($emails as $one) {
if(strtolower($one) == strtolower($email)) {
$found = true;
}
}
if(!$found) {
return array("error" => "email not unique", "email"=>$email);
}
$prep = $this->db->prepare("update ".$this->dbPrefix .
"person P set secret=? where exists (select null from ".$this->dbPrefix .
"user U where P.email like ? and U.person = P.id)");
$prep->bind_params("ss", $secret, '%'.$email.'%');
$prep->execute();
return array("email" => $registeredEmail, "username" => $res[0]["username"]);
}
}
|
PHP
|
UTF-8
| 125 | 3.25 | 3 |
[] |
no_license
|
<?php
$r = (double) readline();
$pi = 3.14159;
$volume = (4/3.0) * $pi * $r * $r * $r;
printf("VOLUME = %.3f\n", $volume);
|
Shell
|
UTF-8
| 4,123 | 3.578125 | 4 |
[] |
no_license
|
#!/bin/bash
source ../common.sh
dir=`mktemp -p . -d -t cr_ptree_XXXXXXX` || (echo "mktemp failed"; exit 1)
echo "Using output dir $dir"
cd $dir
BASE_DIR="../.."
FILEIO="../../fileio/fileio1"
ECHO="/bin/echo -e"
TEST_CMD="../ptree1"
# -n: children per process, -d: depth of process tree
TEST_ARGS="-n 2 -d 1 -w sleep"
SCRIPT_LOG="log-run-ptree1"
TEST_PID_FILE="pid.ptree1";
SNAPSHOT_DIR="snap1.d"
LOGS_DIR="logs.d"
DATA_DIR="data.d"
TEST_DONE="test-done"
CHECKPOINT_FILE="checkpoint-ptree1";
CHECKPOINT_READY="checkpoint-ready"
CHECKPOINT_DONE="checkpoint-done"
INPUT_DATA="input.data";
NSEXEC_ARGS="-cgpuimP $TEST_PID_FILE"
checkpoint()
{
local pid=$1
$ECHO "Checkpoint: $CHECKPOINT $pid \> $CHECKPOINT_FILE"
$CHECKPOINT $pid > $CHECKPOINT_FILE
ret=$?
if [ $ret -ne 0 ]; then
$ECHO "***** FAIL: Checkpoint of $pid failed"
ps aux |grep $TEST_CMD >> $SCRIPT_LOG
exit 1;
fi
}
function create_container()
{
local pid;
cmdline="$NSEXEC $NSEXEC_ARGS -- $TEST_CMD $TEST_ARGS"
$ECHO "\t- Creating container:"
$ECHO "\t- $cmdline"
$cmdline &
j=0;
# Wait for test to finish setup
while [ ! -f $CHECKPOINT_READY ]; do
$ECHO "\t- Waiting for $CHECKPOINT_READY"
sleep 1;
j=`expr $j + 1`;
if [ $j -eq 30 ]; then
$ECHO "\t ***** FAIL No $CHECKPOINT_READY"
exit 1;
fi
done;
# Find global pid of container-init
pid=`cat $TEST_PID_FILE`;
if [ "x$pid" == "x" ]; then
$ECHO "***** FAIL: Invalid container-init pid $pid"
ps aux |grep $TEST_CMD >> $SCRIPT_LOG
exit 1
fi
$ECHO "Created container with pid $pid" >> $SCRIPT_LOG
}
function restart_container
{
local ret;
cmdline="$RESTART --pids --pidns --wait"
$ECHO "\t- $cmdline"
$cmdline < $CHECKPOINT_FILE >> $SCRIPT_LOG 2>&1 &
}
function wait_for_checkpoint_ready()
{
# Wait for test to finish setup
while [ ! -f $CHECKPOINT_READY ]; do
$ECHO "\t- Waiting for $CHECKPOINT_READY"
sleep 1;
done;
}
function create_fs_snapshot()
{
# Prepare for snapshot
if [ -d $SNAPSHOT_DIR ]; then
rm -rf ${SNAPSHOT_DIR}.prev
mv $SNAPSHOT_DIR ${SNAPSHOT_DIR}.prev
mkdir $SNAPSHOT_DIR
fi
# Snapshot the log and data files files
cp -r ${LOGS_DIR} ${DATA_DIR} $SNAPSHOT_DIR
}
function restore_fs_snapshot()
{
# Restore the snapshot after the main process has been killed
/bin/cp -r ${SNAPSHOT_DIR}/* .
}
# Check freezer mount point
line=`grep freezer /proc/mounts`
if [ $? -ne 0 ]; then
$ECHO "please mount freezer cgroup"
$ECHO " mkdir /cgroup"
$ECHO " mount -t cgroup -o freezer cgroup /cgroup"
exit 1
fi
#freezermountpoint=`$ECHO $line | awk '{ print $2 '}`
# Make sure no stray ptree1 from another run is still going
killall $TEST_CMD > $SCRIPT_LOG 2>&1
if [ ! -d $LOGS_DIR ]; then
mkdir $LOGS_DIR
fi
if [ ! -d $DATA_DIR ]; then
mkdir $DATA_DIR
fi
if [ ! -d $SNAPSHOT_DIR ]; then
mkdir $SNAPSHOT_DIR
fi
if [ ! -f $INPUT_DATA ]; then
$FILEIO -C $INPUT_DATA
fi
> $SCRIPT_LOG;
cnt=1
while [ $cnt -lt 15 ]; do
$ECHO "===== Iteration $cnt"
# Remove any 'state' files, start the app and let it tell us
# when it is ready
rm -f $CHECKPOINT_READY $TEST_DONE $TEST_PID_FILE
create_container
pid=`cat $TEST_PID_FILE`
$ECHO "\t- Done creating container, cinit-pid $pid"
wait_for_checkpoint_ready
ps aux |grep $TEST_CMD >> $SCRIPT_LOG
# override default freezerdir
if [ -d $freezerdir ]; then
rmdir $freezerdir
fi
freezerdir=$freezermountpoint/$pid
freeze_pid $pid
num_pids1=`pidof $TEST_CMD | wc -w`
create_fs_snapshot
checkpoint $pid
touch $CHECKPOINT_DONE
killall -9 `basename $TEST_CMD`
thaw
wait
restore_fs_snapshot
restart_container
sleep 3;
num_pids2=`pidof $TEST_CMD | wc -w`
ps aux |grep $TEST_CMD >> $SCRIPT_LOG
$ECHO "\t- num_pids1 $num_pids1, num_pids2 $num_pids2";
# nsexec pid is parent-pid of restarted-container-init
nspid=`pidof restart`
if [ "x$nspid" == "x" ]; then
$ECHO "***** FAIL: Can't find pid of $RESTART"
exit 1;
fi
# End test gracefully
touch $TEST_DONE
$ECHO "\t- Waiting for restarted container to exit (gloabl-pid $nspid)"
wait $nspid;
ret=$?
$ECHO "\t- Container exited, status $ret"
cnt=$((cnt+1))
done
|
JavaScript
|
UTF-8
| 871 | 4.375 | 4 |
[] |
no_license
|
// DAY 1
// 1.a. Write a function which count the number of occurrence of words in a paragraph or a sentence.The function countWords takes a paragraph and two words as parameters. It compare which word is most frequently occurred in the paragraph.
const paragraph = 'I love teaching. If you do not love teaching what else can you love. I love JavaScript if you do not love something which can give life to your application what else can you love.';
function countWords(sentences, wordOne) {
let splitWords = sentences.split(' ');
console.log(splitWords)
let wordOneCount = 0;
for (let i = 0; i<splitWords.length; i++) {
if(splitWords[i] === wordOne) {
wordOneCount = wordOneCount + 1;
}
return wordOneCount;
}
}
console.log(countWords(paragraph, 'love'));
// console.log(countWords(paragraph,'love', 'you'));
|
Java
|
UTF-8
| 3,239 | 2 | 2 |
[
"Apache-2.0"
] |
permissive
|
package com.wolf.utils.redis.command;
import com.wolf.utils.redis.Process;
import com.wolf.utils.redis.Process.Policy;
import com.wolf.utils.redis.ZParams;
import java.util.Set;
/**
* @author juxin.zj E-mail:juxin.zj@taobao.com
* @since 2011-7-25 上午10:33:14
* @version 1.0
*/
public interface RedisZSetCommands {
public enum Aggregate {
SUM, MIN, MAX;
}
public class Tuple {
private final double score;
private final byte[] value;
public Tuple(byte[] value, double score) {
this.score = score;
this.value = value;
}
public double getScore() {
return score;
}
public byte[] getValue() {
return value;
}
}
@Process(Policy.WRITE)
Boolean zAdd(byte[] key, double score, byte[] value);
@Process(Policy.WRITE)
Long zAdd(byte[] key, Tuple... value);
@Process(Policy.WRITE)
Long zRem(byte[] key, byte[]... value);
@Process(Policy.WRITE)
Double zIncrBy(byte[] key, double increment, byte[] value);
@Process(Policy.READ)
Long zRank(byte[] key, byte[] value);
@Process(Policy.READ)
Long zRevRank(byte[] key, byte[] value);
@Process(Policy.READ)
Set<byte[]> zRange(byte[] key, long begin, long end);
@Process(Policy.READ)
Set<Tuple> zRangeWithScore(byte[] key, long begin, long end);
@Process(Policy.READ)
Set<byte[]> zRevRange(byte[] key, long begin, long end);
@Process(Policy.READ)
Set<Tuple> zRevRangeWithScore(byte[] key, long begin, long end);
@Process(Policy.READ)
Set<byte[]> zRangeByScore(byte[] key, double min, double max);
@Process(Policy.READ)
Set<Tuple> zRangeByScoreWithScore(byte[] key, double min, double max);
@Process(Policy.READ)
Set<byte[]> zRangeByScore(byte[] key, double min, double max, long offset, long count);
@Process(Policy.READ)
Set<Tuple> zRangeByScoreWithScore(byte[] key, double min, double max, long offset, long count);
@Process(Policy.READ)
Set<byte[]> zRevRangeByScore(byte[] key, double min, double max);
@Process(Policy.READ)
Set<Tuple> zRevRangeByScoreWithScore(byte[] key, double min, double max);
@Process(Policy.READ)
Set<byte[]> zRevRangeByScore(byte[] key, double min, double max, long offset, long count);
@Process(Policy.READ)
Set<Tuple> zRevRangeByScoreWithScore(byte[] key, double min, double max, long offset, long count);
@Process(Policy.READ)
Long zCount(byte[] key, double min, double max);
@Process(Policy.READ)
Long zCard(byte[] key);
@Process(Policy.READ)
Double zScore(byte[] key, byte[] value);
@Process(Policy.WRITE)
Long zRemRange(byte[] key, long begin, long end);
@Process(Policy.WRITE)
Long zRemRangeByScore(byte[] key, double min, double max);
@Process(Policy.WRITE)
Long zUnionStore(byte[] destKey, byte[]... sets);
@Process(Policy.WRITE)
Long zUnionStore(byte[] destKey, ZParams params, int[] weights, byte[]... sets);
@Process(Policy.WRITE)
Long zInterStore(byte[] destKey, byte[]... sets);
@Process(Policy.WRITE)
Long zInterStore(byte[] destKey, ZParams aggregate, int[] weights, byte[]... sets);
}
|
JavaScript
|
UTF-8
| 7,693 | 2.796875 | 3 |
[
"LicenseRef-scancode-free-unknown",
"MIT"
] |
permissive
|
/**
* @typedef { import('./types').ExportDir } ExportDir
* @typedef {import("./types").ExportOutputs} ExportOutputs
*/
const {getRelativePath} = require("./utils");
const FS = require("fs");
const {promisify} = require("util");
const Path = require("path");
/**
* Writes the given 'virtual' exportsDir object to disc
* @param {ExportDir} exportDir The exports object to be written
* @param {boolean} includeJS Whether to also export the js code
* @param {boolean} accumulate Whether to accumulate all exports and children into a default export
* @returns {Promise<void>}
*/
async function writeExportDir(exportDir, includeJS = true, accumulate = includeJS) {
const path = exportDir.path;
if (!FS.existsSync(path)) FS.mkdirSync(path, {recursive: true});
await promisify(FS.writeFile)(
`${path}/index.d.ts`,
getExportDirTS(exportDir, accumulate),
"utf8"
);
if (includeJS)
await promisify(FS.writeFile)(
`${path}/index.js`,
getExportDirJS(exportDir, accumulate),
"utf8"
);
for (const child of Object.values(exportDir.children))
await writeExportDir(child, includeJS);
}
/**
* Gets the .d.ts of the given 'virtual' exportsDir object to disc
* @param {ExportDir} exportDir The exports object to be written
* @param {boolean} accumulate Whether to accumulate all exports and children into a default export
* @returns {string} The text to be written do the .d.ts file
*/
function getExportDirTS(exportDir, accumulate = true) {
// Get the exports text file
const exportsText = Object.keys(exportDir.exports)
.map(path => {
const props = exportDir.exports[path];
return `export {${props.join(", ")}} from "${path}";`;
})
.join("\n");
// Construct the default export object structure
const importsText = Object.keys(exportDir.exports)
.map(path => {
const props = exportDir.exports[path];
return `import {${props.join(", ")}} from "${path}";`;
})
.join("\n");
const exportsLines = Object.keys(exportDir.exports).flatMap(path => {
const props = exportDir.exports[path];
return props.map(prop => ` ${prop}: typeof ${prop}`);
});
const childrenImportsText = Object.keys(exportDir.children)
.map(child => `import $${child} from "./${child}";`)
.join("\n");
const childrenLines = Object.keys(exportDir.children).map(
child => ` $${child}: typeof $${child}`
);
const defaultExportText = [...childrenLines, ...exportsLines].join(",\n");
const defExport = `
${importsText}
${childrenImportsText}
declare const __default: {
${defaultExportText}
}
export default __default;`;
// Return everything
return `${exportsText}` + (accumulate ? defExport : "");
}
/**
* Gets the .js of the given 'virtual' exportsDir object to disc
* @param {ExportDir} exportDir The exports object to be written
* @param {boolean} accumulate Whether to accumulate all exports and children into a default export
* @returns {string} The text to be written do the .js file
*/
function getExportDirJS(exportDir, accumulate = true) {
// Get the exports text file
const importsText = Object.keys(exportDir.exports)
.map(path => {
const props = exportDir.exports[path];
return `const {${props.join(", ")}} = require("${path}");`;
})
.join("\n");
const exportsText = Object.keys(exportDir.exports)
.flatMap(path => {
const props = exportDir.exports[path];
return props.map(prop => ` ${prop}`);
})
.join(",\n");
// Construct the default export object structure
const childrenImportsText = Object.keys(exportDir.children)
.map(child => `const {default: $${child}} = require("./${child}");`)
.join("\n");
const childrenExportsText = [
...Object.keys(exportDir.children).map(child => ` $${child}`),
" ...standardExports",
].join(",\n");
if (accumulate)
return `Object.defineProperty(module.exports, "__esModule", { value: true });
${importsText}\n${childrenImportsText}
const standardExports = {
${exportsText}
};
module.exports = {
default: {
${childrenExportsText}
},
...standardExports
}`;
return `Object.defineProperty(exports, "__esModule", { value: true });
${importsText}
exports = {
${exportsText}
}`;
}
/**
* Writes the given exports to the index build file
* @param {string} path The path to write the flattened exports too
* @param {ExportOutputs} outputs The output to
* @param {"ts"|"js"|undefined} only Indicates to potentially only update js or ts files
*/
async function writeExportsToIndex(path, outputs, only) {
const declarationPath = path + ".d.ts";
path = path + ".js";
const dirPath = Path.dirname(path).replace(/\\/g, "/");
let jsText = `Object.defineProperty(module.exports, "__esModule", { value: true });
/** generated exports */`;
let tsText = "/** generated exports */";
// Extract the base text from existing files if present
if (FS.existsSync(path)) {
const jsContent = await promisify(FS.readFile)(path, "utf8");
jsText =
jsContent.replace(/\r?\n?\/\*\* generated exports \*\/(.|\r?\n)*$/g, "") +
"\n/** generated exports */";
if (FS.existsSync(declarationPath)) {
const tsContent = await promisify(FS.readFile)(declarationPath, "utf8");
tsText =
tsContent.replace(/\r?\n?\/\*\* generated exports \*\/(.|\r?\n)*$/g, "") +
"\n/** generated exports */";
}
}
// Add the exports to the base text
jsText += getExportDirToIndexJS(dirPath, outputs.runtime);
tsText += getExportDirToIndexTS(dirPath, outputs.runtime);
tsText += getExportDirToIndexTS(dirPath, outputs.type);
jsText += `\nmodule.exports.default = require("${getRelativePath(
dirPath,
outputs.runtime.path
)}").default;`;
tsText += `\nexport { default } from "${getRelativePath(
dirPath,
outputs.runtime.path
)}";`;
// Save the files
if (only != "ts") await promisify(FS.writeFile)(path, jsText, "utf8");
if (only != "js") await promisify(FS.writeFile)(declarationPath, tsText, "utf8");
}
/**
* Creates the text to reexport the given export dir flat
* @param {string} path The path to get the exports for
* @param {ExportDir} exportDir The export dir to get the exports from
* @returns {string} The exports text
*/
function getExportDirToIndexTS(path, exportDir) {
const relativePath = getRelativePath(path, exportDir.path);
const text = `\nexport * from "${relativePath}";`;
return (
text +
Object.values(exportDir.children)
.map(child => getExportDirToIndexTS(path, child))
.join("")
);
}
/**
* Creates the text to reexport the given export dir flat
* @param {string} path The path to get the exports for
* @param {ExportDir} exportDir The export dir to get the exports from
* @returns {string} The exports text
*/
function getExportDirToIndexJS(path, exportDir) {
const relativePath = getRelativePath(path, exportDir.path);
const text = `\nObject.assign(exports, require("${relativePath}"));`;
return (
text +
Object.values(exportDir.children)
.map(child => getExportDirToIndexJS(path, child))
.join("")
);
}
module.exports = {
writeExportDir,
writeExportsToIndex,
getExportDirJS,
getExportDirTS,
getExportDirToIndexJS,
getExportDirToIndexTS,
};
|
Java
|
UTF-8
| 30,505 | 1.585938 | 2 |
[] |
no_license
|
package com.zhilian.rf_qims.mvp.sample_data.pro_quality;
import android.widget.EditText;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.zhilian.api.StrKit;
import com.zhilian.rf_qims.R;
import com.zhilian.rf_qims.common.Common;
import com.zhilian.rf_qims.entity.Sample;
import com.zhilian.rf_qims.entity.SampleCheck;
import com.zhilian.rf_qims.interfaces.EdAverageAndDiffer;
import com.zhilian.rf_qims.interfaces.EdDesignChangeSave;
import com.zhilian.rf_qims.interfaces.EdTextChange;
import com.zhilian.rf_qims.interfaces.EdThreeAverage;
import com.zhilian.rf_qims.util.EdUtil;
import com.zhilian.rf_qims.util.UpdateSampleCheckStatu;
import com.zhilian.rf_qims.widget.LazyLoadFragment;
import java.util.HashMap;
import java.util.Map;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
import static com.zhilian.rf_qims.util.UpdateSampleCheckStatu.UpdateSampleCheckStatu;
/**
* Created by colin on 2018/3/23 15:29 .
*/
public class ProQualityHLTQD extends LazyLoadFragment{
// @BindView(R.id.tv_page_title)
// TextView tvPageTitle;
// @BindView(R.id.tv_page_before)
// TextView tvPageBefore;
// @BindView(R.id.tv_page_curpage)
// TextView tvPageCurpage;
// @BindView(R.id.tv_page_next)
// TextView tvPageNext;
@BindView(R.id.tv_title_1_1)
TextView tvTitle11;
@BindView(R.id.tv_1_1)
TextView tv11;
@BindView(R.id.tv_1_2)
TextView tv12;
@BindView(R.id.tv_1_3)
TextView tv13;
@BindView(R.id.tv_1_4)
TextView tv14;
@BindView(R.id.ly_1_1)
RelativeLayout ly11;
@BindView(R.id.tv_title_2_1)
TextView tvTitle21;
@BindView(R.id.ed_2_1)
EditText ed21;
@BindView(R.id.ed_2_2)
EditText ed22;
@BindView(R.id.ed_2_3)
EditText ed23;
@BindView(R.id.ed_2_4)
EditText ed24;
@BindView(R.id.ly_2_1)
RelativeLayout ly21;
@BindView(R.id.tv_title_3_1)
TextView tvTitle31;
@BindView(R.id.ed_3_1)
EditText ed31;
@BindView(R.id.ed_3_2)
EditText ed32;
@BindView(R.id.ed_3_3)
EditText ed33;
@BindView(R.id.ed_3_4)
EditText ed34;
@BindView(R.id.ly_3_1)
RelativeLayout ly31;
@BindView(R.id.tv_title_4_1)
TextView tvTitle41;
@BindView(R.id.ed_4_1)
EditText ed41;
@BindView(R.id.ed_4_2)
EditText ed42;
@BindView(R.id.ed_4_3)
EditText ed43;
@BindView(R.id.ed_4_4)
EditText ed44;
@BindView(R.id.ly_4_1)
RelativeLayout ly41;
@BindView(R.id.tv_title_5_1)
TextView tvTitle51;
@BindView(R.id.ed_5_1)
EditText ed51;
@BindView(R.id.ed_5_2)
EditText ed52;
@BindView(R.id.ed_5_3)
EditText ed53;
@BindView(R.id.ed_5_4)
EditText ed54;
@BindView(R.id.ly_5_1)
RelativeLayout ly51;
@BindView(R.id.tv_title_6_1)
TextView tvTitle61;
@BindView(R.id.ed_6_1)
EditText ed61;
@BindView(R.id.ed_6_2)
EditText ed62;
@BindView(R.id.ed_6_3)
EditText ed63;
@BindView(R.id.ed_6_4)
EditText ed64;
@BindView(R.id.ly_6_1)
RelativeLayout ly61;
@BindView(R.id.tv_title_7_1)
TextView tvTitle71;
@BindView(R.id.ed_7_1)
EditText ed71;
@BindView(R.id.ed_7_2)
EditText ed72;
@BindView(R.id.ed_7_3)
EditText ed73;
@BindView(R.id.ed_7_4)
EditText ed74;
@BindView(R.id.ly_7_1)
RelativeLayout ly71;
@BindView(R.id.tv_title_8_1)
TextView tvTitle81;
@BindView(R.id.ed_8_1)
EditText ed81;
@BindView(R.id.ed_8_2)
EditText ed82;
@BindView(R.id.ed_8_3)
EditText ed83;
@BindView(R.id.ed_8_4)
EditText ed84;
@BindView(R.id.ly_8_1)
RelativeLayout ly81;
@BindView(R.id.tv_title_9_1)
TextView tvTitle91;
@BindView(R.id.ed_9_1)
EditText ed91;
@BindView(R.id.ed_9_2)
EditText ed92;
@BindView(R.id.ed_9_3)
EditText ed93;
@BindView(R.id.ed_9_4)
EditText ed94;
@BindView(R.id.ly_9_1)
RelativeLayout ly91;
@BindView(R.id.tv_title_10_1)
TextView tvTitle101;
@BindView(R.id.ed_10_1)
EditText ed101;
@BindView(R.id.ed_10_2)
EditText ed102;
@BindView(R.id.ed_10_3)
EditText ed103;
@BindView(R.id.ed_10_4)
EditText ed104;
@BindView(R.id.ly_10_1)
RelativeLayout ly101;
@BindView(R.id.tv_title_11_1)
TextView tvTitle111;
@BindView(R.id.ed_11_1)
EditText ed111;
@BindView(R.id.ed_11_2)
EditText ed112;
@BindView(R.id.ed_11_3)
EditText ed113;
@BindView(R.id.ed_11_4)
EditText ed114;
@BindView(R.id.ly_11_1)
RelativeLayout ly111;
@BindView(R.id.tv_title_12_1)
TextView tvTitle121;
@BindView(R.id.ed_12_1)
EditText ed121;
@BindView(R.id.tv_title_12_2)
TextView tvTitle122;
@BindView(R.id.ed_12_2)
EditText ed122;
@BindView(R.id.ly_12_1)
RelativeLayout ly121;
@BindView(R.id.tv_title_13_1)
TextView tvTitle131;
@BindView(R.id.ed_13_1_remark)
EditText ed131Remark;
@BindView(R.id.ly_13_1)
RelativeLayout ly131;
@BindView(R.id.tv_title_14_1)
TextView tvTitle141;
@BindView(R.id.tv_14_1)
TextView tv141;
@BindView(R.id.tv_14_2)
TextView tv142;
@BindView(R.id.tv_14_3)
TextView tv143;
@BindView(R.id.ly_14_1)
RelativeLayout ly141;
@BindView(R.id.tv_title_15_1)
TextView tvTitle151;
@BindView(R.id.ed_15_1)
EditText ed151;
@BindView(R.id.ed_15_2)
EditText ed152;
@BindView(R.id.ed_15_3)
EditText ed153;
@BindView(R.id.ly_15_1)
RelativeLayout ly151;
@BindView(R.id.tv_title_16_1)
TextView tvTitle161;
@BindView(R.id.ed_16_1_remark)
EditText ed161Remark;
@BindView(R.id.ly_16_1)
RelativeLayout ly161;
Unbinder unbinder;
private EditText[] editTexts1;
private EditText[] editTexts2;
private EditText[] editTexts3;
private EditText[] editTexts4;
private EditText[] editTexts5;
private EditText[] editTexts6;
private EditText[] editTexts7;
private EditText[] editTexts8;
private EditText[] editTexts9;
private EditText[] editTexts10;
private EditText[] editTexts11;
private EditText[] editTexts12;
private EditText[] editTexts13;
private EditText[] editTexts14;
private EditText[] editTextsAll;
Map<Integer,String> map = new HashMap<>();
Map<Integer,String> serialNumber = new HashMap<>();
@Override
protected int setContentView() {
return R.layout.fragment_sczl_hntqd;
}
//设计值没有用
@Override
protected void lazyLoad() {
unbinder = ButterKnife.bind(this, view);
editTexts1=new EditText[]{ed21,ed22,ed23};
editTexts2=new EditText[]{ed31,ed32,ed33};
editTexts3=new EditText[]{ed41,ed42,ed43};
editTexts4=new EditText[]{ed51,ed52,ed53};
editTexts5=new EditText[]{ed61,ed62,ed63};
editTexts6=new EditText[]{ed71,ed72,ed73};
editTexts7=new EditText[]{ed81,ed82,ed83};
editTexts8=new EditText[]{ed91,ed92,ed93};
editTexts9=new EditText[]{ed101,ed102,ed103};
editTexts10=new EditText[]{ed111,ed112,ed113};
editTexts11=new EditText[]{ed121};
editTexts12=new EditText[]{ed122};
editTexts13=new EditText[]{ed151};
editTexts14=new EditText[]{ed153};
editTextsAll = new EditText[]{ed21, ed22, ed23, ed31, ed32,
ed33, ed41, ed42, ed43, ed51, ed52, ed53, ed61, ed62, ed63, ed71,
ed72, ed73, ed81, ed82, ed83, ed91, ed92, ed93, ed101, ed102, ed103, ed111, ed112, ed113, ed121, ed122, ed151, ed153};
fillMap();
/*Sample tentsample = (Sample) getActivity().getIntent().getSerializableExtra("sample");
Sample sample = GreenDaoManager.getInstance().getNewSession()
.getSampleDao().queryBuilder().where(SampleDao.Properties.Id.eq(tentsample.getId())).unique();
SampleCheck sampleCheck = GreenDaoManager.getInstance().getNewSession()
.getSampleCheckDao().queryBuilder().where(SampleCheckDao.Properties.Id.eq(tentsample.getId())).unique();*/
Common common = new Common();
SampleCheck sampleCheck = common.getSampleCheck();
Sample sample = common.getSample();
if (sampleCheck != null) {
//碳化深度极差值和碳化深度平均值
ed24.addTextChangedListener(new EdAverageAndDiffer(ed24,ed34,ed44,ed121,ed122));
ed34.addTextChangedListener(new EdAverageAndDiffer(ed24,ed34,ed44,ed121,ed122));
ed44.addTextChangedListener(new EdAverageAndDiffer(ed24,ed34,ed44,ed121,ed122));
//三个数的平均值
editTexts1[0].addTextChangedListener(new EdThreeAverage(editTexts1[0],editTexts1[1],editTexts1[2],ed24));
editTexts1[1].addTextChangedListener(new EdThreeAverage(editTexts1[0],editTexts1[1],editTexts1[2],ed24));
editTexts1[2].addTextChangedListener(new EdThreeAverage(editTexts1[0],editTexts1[1],editTexts1[2],ed24));
editTexts2[0].addTextChangedListener(new EdThreeAverage(editTexts2[0],editTexts2[1],editTexts2[2],ed34));
editTexts2[1].addTextChangedListener(new EdThreeAverage(editTexts2[0],editTexts2[1],editTexts2[2],ed34));
editTexts2[2].addTextChangedListener(new EdThreeAverage(editTexts2[0],editTexts2[1],editTexts2[2],ed34));
editTexts3[0].addTextChangedListener(new EdThreeAverage(editTexts3[0],editTexts3[1],editTexts3[2],ed44));
editTexts3[1].addTextChangedListener(new EdThreeAverage(editTexts3[0],editTexts3[1],editTexts3[2],ed44));
editTexts3[2].addTextChangedListener(new EdThreeAverage(editTexts3[0],editTexts3[1],editTexts3[2],ed44));
editTexts4[0].addTextChangedListener(new EdThreeAverage(editTexts4[0],editTexts4[1],editTexts4[2],ed54));
editTexts4[1].addTextChangedListener(new EdThreeAverage(editTexts4[0],editTexts4[1],editTexts4[2],ed54));
editTexts4[2].addTextChangedListener(new EdThreeAverage(editTexts4[0],editTexts4[1],editTexts4[2],ed54));
editTexts5[0].addTextChangedListener(new EdThreeAverage(editTexts5[0],editTexts5[1],editTexts5[2],ed64));
editTexts5[1].addTextChangedListener(new EdThreeAverage(editTexts5[0],editTexts5[1],editTexts5[2],ed64));
editTexts5[2].addTextChangedListener(new EdThreeAverage(editTexts5[0],editTexts5[1],editTexts5[2],ed64));
editTexts6[0].addTextChangedListener(new EdThreeAverage(editTexts6[0],editTexts6[1],editTexts6[2],ed74));
editTexts6[1].addTextChangedListener(new EdThreeAverage(editTexts6[0],editTexts6[1],editTexts6[2],ed74));
editTexts6[2].addTextChangedListener(new EdThreeAverage(editTexts6[0],editTexts6[1],editTexts6[2],ed74));
editTexts7[0].addTextChangedListener(new EdThreeAverage(editTexts7[0],editTexts7[1],editTexts7[2],ed84));
editTexts7[1].addTextChangedListener(new EdThreeAverage(editTexts7[0],editTexts7[1],editTexts7[2],ed84));
editTexts7[2].addTextChangedListener(new EdThreeAverage(editTexts7[0],editTexts7[1],editTexts7[2],ed84));
editTexts8[0].addTextChangedListener(new EdThreeAverage(editTexts8[0],editTexts8[1],editTexts8[2],ed94));
editTexts8[1].addTextChangedListener(new EdThreeAverage(editTexts8[0],editTexts8[1],editTexts8[2],ed94));
editTexts8[2].addTextChangedListener(new EdThreeAverage(editTexts8[0],editTexts8[1],editTexts8[2],ed94));
editTexts9[0].addTextChangedListener(new EdThreeAverage(editTexts9[0],editTexts9[1],editTexts9[2],ed104));
editTexts9[1].addTextChangedListener(new EdThreeAverage(editTexts9[0],editTexts9[1],editTexts9[2],ed104));
editTexts9[2].addTextChangedListener(new EdThreeAverage(editTexts9[0],editTexts9[1],editTexts9[2],ed104));
editTexts10[0].addTextChangedListener(new EdThreeAverage(editTexts10[0],editTexts10[1],editTexts10[2],ed114));
editTexts10[1].addTextChangedListener(new EdThreeAverage(editTexts10[0],editTexts10[1],editTexts10[2],ed114));
editTexts10[2].addTextChangedListener(new EdThreeAverage(editTexts10[0],editTexts10[1],editTexts10[2],ed114));
//填充检测值
editTexts1[0].setText(EdUtil.split(sampleCheck.getCarbonation_depth1())[0]);
editTexts1[1].setText(EdUtil.split(sampleCheck.getCarbonation_depth1())[1]);
editTexts1[2].setText(EdUtil.split(sampleCheck.getCarbonation_depth1())[2]);
editTexts2[0].setText(EdUtil.split(sampleCheck.getCarbonation_depth2())[0]);
editTexts2[1].setText(EdUtil.split(sampleCheck.getCarbonation_depth2())[1]);
editTexts2[2].setText(EdUtil.split(sampleCheck.getCarbonation_depth2())[2]);
editTexts3[0].setText(EdUtil.split(sampleCheck.getCarbonation_depth3())[0]);
editTexts3[1].setText(EdUtil.split(sampleCheck.getCarbonation_depth3())[1]);
editTexts3[2].setText(EdUtil.split(sampleCheck.getCarbonation_depth3())[2]);
editTexts4[0].setText(EdUtil.split(sampleCheck.getCarbonation_depth4())[0]);
editTexts4[1].setText(EdUtil.split(sampleCheck.getCarbonation_depth4())[1]);
editTexts4[2].setText(EdUtil.split(sampleCheck.getCarbonation_depth4())[2]);
editTexts5[0].setText(EdUtil.split(sampleCheck.getCarbonation_depth5())[0]);
editTexts5[1].setText(EdUtil.split(sampleCheck.getCarbonation_depth5())[1]);
editTexts5[2].setText(EdUtil.split(sampleCheck.getCarbonation_depth5())[2]);
editTexts6[0].setText(EdUtil.split(sampleCheck.getCarbonation_depth6())[0]);
editTexts6[1].setText(EdUtil.split(sampleCheck.getCarbonation_depth6())[1]);
editTexts6[2].setText(EdUtil.split(sampleCheck.getCarbonation_depth6())[2]);
editTexts7[0].setText(EdUtil.split(sampleCheck.getCarbonation_depth7())[0]);
editTexts7[1].setText(EdUtil.split(sampleCheck.getCarbonation_depth7())[1]);
editTexts7[2].setText(EdUtil.split(sampleCheck.getCarbonation_depth7())[2]);
editTexts8[0].setText(EdUtil.split(sampleCheck.getCarbonation_depth8())[0]);
editTexts8[1].setText(EdUtil.split(sampleCheck.getCarbonation_depth8())[1]);
editTexts8[2].setText(EdUtil.split(sampleCheck.getCarbonation_depth8())[2]);
editTexts9[0].setText(EdUtil.split(sampleCheck.getCarbonation_depth9())[0]);
editTexts9[1].setText(EdUtil.split(sampleCheck.getCarbonation_depth9())[1]);
editTexts9[2].setText(EdUtil.split(sampleCheck.getCarbonation_depth9())[2]);
editTexts10[0].setText(EdUtil.split(sampleCheck.getCarbonation_depth10())[0]);
editTexts10[1].setText(EdUtil.split(sampleCheck.getCarbonation_depth10())[1]);
editTexts10[2].setText(EdUtil.split(sampleCheck.getCarbonation_depth10())[2]);
editTexts11[0].setText(EdUtil.split(sampleCheck.getCarbonation_depth())[0]);
editTexts12[0].setText(EdUtil.split(sampleCheck.getCarbonation_depth_avg())[0]);
editTexts13[0].setText(EdUtil.split(sampleCheck.getSpringback_prediction())[0]);
editTexts14[0].setText(EdUtil.split(sampleCheck.getRebound_file_name())[0]);//字段名字错了
//判断检测值是否有值设置不可编辑(2019.3.26)
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth1())[0].equals(""), ed21,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth1())[1].equals(""), ed22,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth1())[2].equals(""), ed23,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth2())[0].equals(""), ed31,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth2())[1].equals(""), ed32,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth2())[2].equals(""), ed33,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth3())[0].equals(""), ed41,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth3())[1].equals(""), ed42,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth3())[2].equals(""), ed43,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth4())[0].equals(""), ed51,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth4())[1].equals(""), ed52,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth4())[2].equals(""), ed53,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth5())[0].equals(""), ed61,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth5())[1].equals(""), ed62,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth5())[2].equals(""), ed63,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth6())[0].equals(""), ed71,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth6())[1].equals(""), ed72,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth6())[2].equals(""), ed73,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth7())[0].equals(""), ed81,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth7())[1].equals(""), ed82,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth7())[2].equals(""), ed83,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth8())[0].equals(""), ed91,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth8())[1].equals(""), ed92,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth8())[2].equals(""), ed93,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth9())[0].equals(""), ed101,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth9())[1].equals(""), ed102,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth9())[2].equals(""), ed103,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth10())[0].equals(""), ed111,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth10())[1].equals(""), ed112,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth10())[2].equals(""), ed113,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth())[0].equals(""), ed121,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getCarbonation_depth_avg())[0].equals(""), ed122,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getSpringback_prediction())[0].equals(""), ed151,
getContext(), sampleCheck.getId(), map, serialNumber);
UpdateSampleCheckStatu(EdUtil.split(sampleCheck.getRebound_file_name())[0].equals(""), ed153,
getContext(), sampleCheck.getId(), map, serialNumber);
//设计
//UpdateSampleCheckStatu(StrKit.notBlankJudge(sample.getStrength()), ed152);
}
//设计值对比监听
//填充设计值
ed152.setText(StrKit.notBlank(sample.getStrength() )? sample.getStrength() +"": "");
//设计值保存监听
ed152.addTextChangedListener(new EdDesignChangeSave("setStrength", sample, sample.getStrength() + "", ed152));
//检测值保存监听
editTexts1[0].addTextChangedListener(new EdTextChange("setCarbonation_depth1", sampleCheck, sampleCheck.getCarbonation_depth1(), editTexts1));
editTexts1[1].addTextChangedListener(new EdTextChange("setCarbonation_depth1", sampleCheck, sampleCheck.getCarbonation_depth1(), editTexts1));
editTexts1[2].addTextChangedListener(new EdTextChange("setCarbonation_depth1", sampleCheck, sampleCheck.getCarbonation_depth1(), editTexts1));
editTexts2[0].addTextChangedListener(new EdTextChange("setCarbonation_depth2", sampleCheck, sampleCheck.getCarbonation_depth2(), editTexts2));
editTexts2[1].addTextChangedListener(new EdTextChange("setCarbonation_depth2", sampleCheck, sampleCheck.getCarbonation_depth2(), editTexts2));
editTexts2[2].addTextChangedListener(new EdTextChange("setCarbonation_depth2", sampleCheck, sampleCheck.getCarbonation_depth2(), editTexts2));
editTexts3[0].addTextChangedListener(new EdTextChange("setCarbonation_depth3", sampleCheck, sampleCheck.getCarbonation_depth3(), editTexts3));
editTexts3[1].addTextChangedListener(new EdTextChange("setCarbonation_depth3", sampleCheck, sampleCheck.getCarbonation_depth3(), editTexts3));
editTexts3[2].addTextChangedListener(new EdTextChange("setCarbonation_depth3", sampleCheck, sampleCheck.getCarbonation_depth3(), editTexts3));
editTexts4[0].addTextChangedListener(new EdTextChange("setCarbonation_depth4", sampleCheck, sampleCheck.getCarbonation_depth4(), editTexts4));
editTexts4[1].addTextChangedListener(new EdTextChange("setCarbonation_depth4", sampleCheck, sampleCheck.getCarbonation_depth4(), editTexts4));
editTexts4[2].addTextChangedListener(new EdTextChange("setCarbonation_depth4", sampleCheck, sampleCheck.getCarbonation_depth4(), editTexts4));
editTexts5[0].addTextChangedListener(new EdTextChange("setCarbonation_depth5", sampleCheck, sampleCheck.getCarbonation_depth5(), editTexts5));
editTexts5[1].addTextChangedListener(new EdTextChange("setCarbonation_depth5", sampleCheck, sampleCheck.getCarbonation_depth5(), editTexts5));
editTexts5[2].addTextChangedListener(new EdTextChange("setCarbonation_depth5", sampleCheck, sampleCheck.getCarbonation_depth5(), editTexts5));
editTexts6[0].addTextChangedListener(new EdTextChange("setCarbonation_depth6", sampleCheck, sampleCheck.getCarbonation_depth6(), editTexts6));
editTexts6[1].addTextChangedListener(new EdTextChange("setCarbonation_depth6", sampleCheck, sampleCheck.getCarbonation_depth6(), editTexts6));
editTexts6[2].addTextChangedListener(new EdTextChange("setCarbonation_depth6", sampleCheck, sampleCheck.getCarbonation_depth6(), editTexts6));
editTexts7[0].addTextChangedListener(new EdTextChange("setCarbonation_depth7", sampleCheck, sampleCheck.getCarbonation_depth7(), editTexts7));
editTexts7[1].addTextChangedListener(new EdTextChange("setCarbonation_depth7", sampleCheck, sampleCheck.getCarbonation_depth7(), editTexts7));
editTexts7[2].addTextChangedListener(new EdTextChange("setCarbonation_depth7", sampleCheck, sampleCheck.getCarbonation_depth7(), editTexts7));
editTexts8[0].addTextChangedListener(new EdTextChange("setCarbonation_depth8", sampleCheck, sampleCheck.getCarbonation_depth8(), editTexts8));
editTexts8[1].addTextChangedListener(new EdTextChange("setCarbonation_depth8", sampleCheck, sampleCheck.getCarbonation_depth8(), editTexts8));
editTexts8[2].addTextChangedListener(new EdTextChange("setCarbonation_depth8", sampleCheck, sampleCheck.getCarbonation_depth8(), editTexts8));
editTexts9[0].addTextChangedListener(new EdTextChange("setCarbonation_depth9", sampleCheck, sampleCheck.getCarbonation_depth9(), editTexts9));
editTexts9[1].addTextChangedListener(new EdTextChange("setCarbonation_depth9", sampleCheck, sampleCheck.getCarbonation_depth9(), editTexts9));
editTexts9[2].addTextChangedListener(new EdTextChange("setCarbonation_depth9", sampleCheck, sampleCheck.getCarbonation_depth9(), editTexts9));
editTexts10[0].addTextChangedListener(new EdTextChange("setCarbonation_depth10", sampleCheck, sampleCheck.getCarbonation_depth10(), editTexts10));
editTexts10[1].addTextChangedListener(new EdTextChange("setCarbonation_depth10", sampleCheck, sampleCheck.getCarbonation_depth10(), editTexts10));
editTexts10[2].addTextChangedListener(new EdTextChange("setCarbonation_depth10", sampleCheck, sampleCheck.getCarbonation_depth10(), editTexts10));
editTexts11[0].addTextChangedListener(new EdTextChange("setCarbonation_depth", sampleCheck, sampleCheck.getCarbonation_depth(), editTexts11[0]));
editTexts12[0].addTextChangedListener(new EdTextChange("setCarbonation_depth_avg", sampleCheck, sampleCheck.getCarbonation_depth_avg(),editTexts12[0]));
editTexts13[0].addTextChangedListener(new EdTextChange("setSpringback_prediction", sampleCheck, sampleCheck.getSpringback_prediction(), editTexts13[0]));
editTexts14[0].addTextChangedListener(new EdTextChange("setRebound_file_name", sampleCheck, sampleCheck.getRebound_file_name(), editTexts14[0]));
UpdateSampleCheckStatu.editOnFocusChangeListener(getContext(), sampleCheck.getId(), map, serialNumber, editTextsAll);
}
//填充变更列名、序值
private void fillMap(){
map.put(ed21.getId(),"carbonation_depth");
map.put(ed22.getId(),"carbonation_depth");
map.put(ed23.getId(),"carbonation_depth");
map.put(ed31.getId(),"carbonation_depth");
map.put(ed32.getId(),"carbonation_depth");
map.put(ed33.getId(),"carbonation_depth");
map.put(ed41.getId(),"carbonation_depth");
map.put(ed42.getId(),"carbonation_depth");
map.put(ed43.getId(),"carbonation_depth");
map.put(ed51.getId(),"carbonation_depth");
map.put(ed52.getId(),"carbonation_depth");
map.put(ed53.getId(),"carbonation_depth");
map.put(ed61.getId(),"carbonation_depth");
map.put(ed62.getId(),"carbonation_depth");
map.put(ed63.getId(),"carbonation_depth");
map.put(ed71.getId(),"carbonation_depth");
map.put(ed72.getId(),"carbonation_depth");
map.put(ed73.getId(),"carbonation_depth");
map.put(ed81.getId(),"carbonation_depth");
map.put(ed82.getId(),"carbonation_depth");
map.put(ed83.getId(),"carbonation_depth");
map.put(ed91.getId(),"carbonation_depth");
map.put(ed92.getId(),"carbonation_depth");
map.put(ed93.getId(),"carbonation_depth");
map.put(ed101.getId(),"carbonation_depth");
map.put(ed102.getId(),"carbonation_depth");
map.put(ed103.getId(),"carbonation_depth");
map.put(ed111.getId(),"carbonation_depth");
map.put(ed112.getId(),"carbonation_depth");
map.put(ed113.getId(),"carbonation_depth");
map.put(ed121.getId(),"carbonation_depth");
map.put(ed122.getId(),"carbonation_depth_avg");
map.put(ed151.getId(),"springback_prediction");
map.put(ed153.getId(),"rebound_file_name");
serialNumber.put(ed21.getId(),"0");
serialNumber.put(ed22.getId(),"1");
serialNumber.put(ed23.getId(),"2");
serialNumber.put(ed31.getId(),"3");
serialNumber.put(ed32.getId(),"4");
serialNumber.put(ed33.getId(),"5");
serialNumber.put(ed41.getId(),"6");
serialNumber.put(ed42.getId(),"7");
serialNumber.put(ed43.getId(),"8");
serialNumber.put(ed51.getId(),"9");
serialNumber.put(ed52.getId(),"10");
serialNumber.put(ed53.getId(),"11");
serialNumber.put(ed61.getId(),"12");
serialNumber.put(ed62.getId(),"13");
serialNumber.put(ed63.getId(),"14");
serialNumber.put(ed71.getId(),"15");
serialNumber.put(ed72.getId(),"16");
serialNumber.put(ed73.getId(),"17");
serialNumber.put(ed81.getId(),"18");
serialNumber.put(ed82.getId(),"19");
serialNumber.put(ed83.getId(),"20");
serialNumber.put(ed91.getId(),"21");
serialNumber.put(ed92.getId(),"22");
serialNumber.put(ed93.getId(),"23");
serialNumber.put(ed101.getId(),"24");
serialNumber.put(ed102.getId(),"25");
serialNumber.put(ed103.getId(),"26");
serialNumber.put(ed111.getId(),"27");
serialNumber.put(ed112.getId(),"28");
serialNumber.put(ed113.getId(),"29");
serialNumber.put(ed121.getId(),"0");
serialNumber.put(ed122.getId(),"0");
serialNumber.put(ed151.getId(),"0");
serialNumber.put(ed153.getId(),"0");
}
}
|
Java
|
UTF-8
| 628 | 2.0625 | 2 |
[] |
no_license
|
package com.gmail.perva4ina;
import com.gmail.perva4ina.BE.OnlineShop.Product;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import java.math.BigDecimal;
import java.util.concurrent.TimeUnit;
public class mainclass {
public static void main(String [] args){
// System.out.println("Hello, world!");
Product product = new Product();
product.setNaming("Tesoro Gram Spectrum");
product.setProductDescription("Mechanical keyboard");
product.setPrice(new BigDecimal("179.00"));
product.setAvailable(3);
product.addEntity();
}
}
|
Java
|
UTF-8
| 953 | 2.609375 | 3 |
[
"Apache-2.0"
] |
permissive
|
package javafx.plus.util;
import javafx.collections.ListChangeListener;
import javafx.collections.ObservableList;
import java.util.function.Consumer;
/**
* Contains all the List related utilities
* @author theBeacon
*/
public class ObservableListUtils {
private ObservableListUtils () {
}
/**
*
* @param target
* @param source
* @param <E>
*/
public static <E> void bind(ObservableList<E> target,ObservableList<? extends E> source ){
source.addListener(new ListChangeListener<E>() {
@Override
public void onChanged(Change<? extends E> c) {
while (c.next()) {
if(c.wasRemoved()) {
target.removeAll(c.getRemoved());
}
if(c.wasAdded()) {
target.addAll(c.getAddedSubList());
}
}
}
});
}
}
|
JavaScript
|
UTF-8
| 654 | 3.25 | 3 |
[] |
no_license
|
"use strict"
class Pizza {
constructor(photo, name, price) {
this.photo = photo;
this.name = name;
this.composition = new Map();
this.price = price;
}
addComposition(name, calories) {
this.composition.set(name, calories);
}
countCalories() {
let calories = 0;
for (const entry of this.composition.values()) {
calories += entry;
}
return calories;
}
showComposition() {
let composition = [];
for (let entry of this.composition.keys()) {
composition.push(entry);
}
return composition;
}
}
|
Swift
|
UTF-8
| 2,577 | 2.859375 | 3 |
[] |
no_license
|
//
// MealTableViewController.swift
// FoodTracker
//
// Created by Addison Francisco on 9/26/17.
// Copyright © 2017 Addison Francisco. All rights reserved.
//
import UIKit
class MealTableViewController: UITableViewController {
//MARK: Properties
var meals = [Meal]()
override func viewDidLoad() {
super.viewDidLoad()
// Load sample data
loadSampleMeals()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//MARK: - Table view data source
override func numberOfSections(in tableView: UITableView) -> Int {
return 1
}
override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return meals.count
}
override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
// Table view cells are reused and should be dequeued using a cell identifier.
let cellIdentifier = "MealTableViewCell"
guard let cell = tableView.dequeueReusableCell(withIdentifier: cellIdentifier, for: indexPath) as? MealTableViewCell else {
fatalError("The dequeued cell is not an instance of MealTableViewCell.")
}
let meal = meals[indexPath.row]
cell.nameLabel.text = meal.name
cell.photoImageView.image = meal.photo
cell.ratingControl.rating = meal.rating
return cell
}
override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
tableView.deselectRow(at: indexPath, animated: true)
}
//MARK: Actions
@IBAction func unwindToMealList(sender: UIStoryboardSegue) {
if let sourceViewController = sender.source as? MealViewController, let meal = sourceViewController.meal {
// Add a new meal
let newIndexPath = IndexPath(row: meals.count, section: 0)
meals.append(meal)
tableView.insertRows(at: [newIndexPath], with: .automatic)
}
}
//MARK: Private Methods
private func loadSampleMeals() {
let photo1 = UIImage(named: "meal1")
let photo2 = UIImage(named: "meal2")
let photo3 = UIImage(named: "meal3")
guard let meal1 = Meal(name: "Caprese Salad", photo: photo1, rating: 4) else {
fatalError("Unable to instantiate meal1")
}
guard let meal2 = Meal(name: "Chicken and Potatoes", photo: photo2, rating: 5) else {
fatalError("Unable to instantiate meal2")
}
guard let meal3 = Meal(name: "Pasta with Meatballs", photo: photo3, rating: 3) else {
fatalError("Unable to instantiate meal3")
}
meals += [meal1, meal2, meal3]
}
}
|
Java
|
GB18030
| 2,558 | 1.976563 | 2 |
[] |
no_license
|
package com.perky.safeguard361.activities;
import java.io.IOException;
import com.perky.safeguard361.R;
import com.perky.safeguard361.utils.SmsUtils;
import com.perky.safeguard361.utils.UIUtils;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
public class ToolsActivity extends Activity {
private ProgressDialog pd;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_tools);
}
public void numberAddressQuery(View view) {
Intent intent = new Intent(this, NumberAddressQueryActivity.class);
startActivity(intent);
}
public void backUpSms(View view) {
pd = new ProgressDialog(this);
pd.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL);
pd.setMessage("...");
pd.setTitle("");
pd.show();
new Thread(new Runnable() {
@Override
public void run() {
try {
SmsUtils.backUpSms(ToolsActivity.this,
new SmsUtils.SmsProgressCallBack() {
@Override
public void updateProgress(int progress) {
pd.setProgress(progress);
}
@Override
public void setMax(int size) {
pd.setMax(size);
}
});
} catch (IllegalArgumentException e) {
e.printStackTrace();
UIUtils.showToast(ToolsActivity.this, "ʧ");
} catch (IllegalStateException e) {
e.printStackTrace();
UIUtils.showToast(ToolsActivity.this, "ʧ");
} catch (IOException e) {
e.printStackTrace();
UIUtils.showToast(ToolsActivity.this, "ʧ");
} finally {
pd.dismiss();
}
}
}).start();
}
public void restoreSms(View view) {
pd = new ProgressDialog(this);
pd.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL);
pd.setMessage("ԭ...");
pd.setTitle("");
pd.show();
new Thread(new Runnable() {
@Override
public void run() {
try {
SmsUtils.restoreSms(ToolsActivity.this,
new SmsUtils.SmsProgressCallBack() {
@Override
public void updateProgress(int progress) {
pd.setProgress(progress);
}
@Override
public void setMax(int size) {
pd.setMax(size);
}
});
} catch (Exception e) {
e.printStackTrace();
} finally {
pd.dismiss();
}
}
}).start();
}
public void openAppLock(View view) {
Intent intent = new Intent(this, AppLockAvtivity.class);
startActivity(intent);
}
}
|
Python
|
UTF-8
| 2,402 | 3.421875 | 3 |
[] |
no_license
|
""" Codewars kata: Conway's Game of Life - Unlimited Edition. https://www.codewars.com/kata/conways-game-of-life-unlimited-edition/train/python """
from collections import namedtuple
from pprint import pprint
from copy import deepcopy
Cell = namedtuple("Cell", "row col")
def neighbors(cells, cell):
neighbors = 0
start_row = max(cell.row - 1, 0)
end_row = min(cell.row + 1, len(cells) - 1)
start_col = max(cell.col - 1, 0)
end_col = min(cell.col + 1, len(cells[0]) - 1)
for r in range(start_row, end_row + 1):
for c in range(start_col, end_col + 1):
neighbors += cells[r][c]
return neighbors - cells[cell.row][cell.col]
def get_generation(cells, generations):
for x in range(generations):
# Expand the matrix.
for m in range(4):
if 1 in cells[-1]: cells.append([0] * len(cells[-1]))
cells = list(map(list, zip(*cells[::-1])))
# Update each cell according to the number of neighbors.
new_cells = deepcopy(cells)
for idx_row in range(len(cells)):
for idx_col in range(len(cells[-1])):
count = neighbors(cells, Cell(row=idx_row, col=idx_col))
if cells[idx_row][idx_col] == 1 and count not in range(2, 4):
new_cells[idx_row][idx_col] = 0
elif cells[idx_row][idx_col] == 0 and count == 3:
new_cells[idx_row][idx_col] = 1
cells = new_cells
# Contract the matrix.
for m in range(4):
while cells and 1 not in cells[-1]: cells.pop()
cells = list(map(list, zip(*cells[::-1])))
# All done.
return cells
#######################################################################################################################
#
# __main__
#
#######################################################################################################################
if __name__ == "__main__":
# test = [[0, 0, 0, 0, 0],
# [0, 1, 0, 0, 0],
# [0, 0, 1, 1, 0],
# [0, 1, 1, 0, 0],
# [0, 0, 0, 0, 0]]
# pprint(test)
# print(neighbors(test, Cell(row=1, col=2)))
# exit()
start = [[1,0,0],
[0,1,1],
[1,1,0]]
end = [[0,1,0],
[0,0,1],
[1,1,1]]
result = get_generation(start, 1)
print(result)
print(result == end)
|
Ruby
|
UTF-8
| 219 | 3.171875 | 3 |
[
"MIT"
] |
permissive
|
def anagram_detection(parent, child)
sorted_child = child.chars.sort
count = 0
parent.chars.each_with_index do |_, index|
count += 1 if parent[index, child.length].chars.sort == sorted_child
end
count
end
|
Java
|
UTF-8
| 733 | 3.65625 | 4 |
[
"WTFPL"
] |
permissive
|
class Factorial {
public static int fact(int n) {
if (n == 0) return 1;
return n * fact(n-1);
}
public static int fact2(int n){
int fact = 1;
for (int i=1; i<=n; i++) fact *=i;
return fact;
}
public static void main(String[] args) {
if (args.length != 1) {
System.err.println("Nem megfelelő számú parancssori argumentum.");
System.exit(0);
} else {
int N = Integer.parseInt(args[0]);
if (N>=0 && N<=10) {
System.out.println(N + "!=" + fact(N) + " (rekurzióval)");
System.out.println(N + "!=" + fact2(N) + " (rekurzióval nélkül)");
}
}
}
}
|
Python
|
UTF-8
| 323 | 2.75 | 3 |
[] |
no_license
|
import pytest
from lesson_36 import add_func
from lesson_36 import mul_func
from lesson_36 import div_func
from lesson_36 import sub_func
@pytest.mark.calc
def test_sub_func():
assert sub_func(1, 1) == 0
def test_mul_func():
assert mul_func(2, 3) == 6
def test_div_func():
assert div_func(100, 10) == 10
|
Shell
|
UTF-8
| 178 | 3.296875 | 3 |
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
interface=$1
for file in /etc/sysconfig/network-scripts/ifup-local.d/[0-9][0-9]*-${interface}.sh; do
if [ -x $file ]; then
$file $interface
fi
done
|
C#
|
UTF-8
| 2,505 | 2.578125 | 3 |
[] |
no_license
|
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using InfluxData.Net.Common.Enums;
using InfluxData.Net.Common.Helpers;
using InfluxData.Net.InfluxDb;
using InfluxData.Net.InfluxDb.Models;
namespace InfluxDb._1secBench
{
class Program
{
const int TanksCount = 30;
const int Days = 365;
const int BatchSize = 50_000;
static readonly Guid[] _tanksIds = GetTanksIds();
static readonly Random _rnd = new Random();
static int _ticks = 0;
static async Task Main(string[] args)
{
var client = Setup();
await client.Database.CreateDatabaseAsync("CIPTanks");
var readingCount = TanksCount * (long)TimeSpan.FromDays(Days).TotalSeconds;
var batchSteps = readingCount / BatchSize;
using (var progress = new ProgressBar())
{
for (var i = 0L; i < batchSteps; ++i)
{
progress.Report((double)i / batchSteps);
await WriteBatch(client, BatchSize);
}
}
var lastBatchSize = readingCount - batchSteps * BatchSize;
await WriteBatch(client, lastBatchSize);
Console.WriteLine($"{readingCount} series successfully written!");
}
static async Task WriteBatch(InfluxDbClient client, long batchSize)
{
var readings = new List<Point>();
for (var j = 0L; j < batchSize; ++j)
{
readings.Add(GenerateReading());
}
await client.Client.WriteAsync(readings, "CIPTanks");
}
static Point GenerateReading()
{
var reading = new Point()
{
Name = "reading",
Timestamp = DateTime.Now.AddDays(-Days).AddSeconds(-_ticks++),
Tags = new Dictionary<string, object>()
{
{ "CIPTankId", _tanksIds[_rnd.Next(0, TanksCount)] }
},
Fields = new Dictionary<string, object>()
{
{ "Humidity", _rnd.Next(40, 50+1) },
{ "Humidity2", _rnd.Next(40, 50+1) },
{ "Temperature", _rnd.Next(75, 115+1) },
{ "Temperature2", _rnd.Next(75, 115+1) },
{ "FlipCount", _rnd.Next(1, 3+1) }
}
};
return reading;
}
static InfluxDbClient Setup()
{
var client = new InfluxDbClient("http://localhost:8086/", "", "",
InfluxDbVersion.Latest);
return client;
}
static Guid[] GetTanksIds()
{
var ids = new Guid[TanksCount];
for (int i = 0; i < ids.Length; ++i)
{
ids[i] = Guid.NewGuid();
}
return ids;
}
}
}
|
Python
|
UTF-8
| 796 | 3.375 | 3 |
[] |
no_license
|
import sys
import math
t = int(input())
def getDivisor(n):
result = []
i = 1
while i <= math.sqrt(n):
if (n % i == 0) :
if (n / i == i) :
result.append(i)
else :
result.append(i)
result.append(n/i)
i = i + 1
return result
for case in range(1, t + 1):
l, r = [int(i) for i in input().split(" ")]
interesting = 0
for x in range(l, r+1):
aliceCnt = 0
bobCnt = 0
divisors = getDivisor(x+1)
for divisor in divisors:
if divisor % 2 == 0:
aliceCnt += 1
else:
bobCnt += 1
if abs(aliceCnt - bobCnt) <= 2:
interesting += 1
print("Case #{}: {}".format(case, interesting))
|
PHP
|
UTF-8
| 3,120 | 2.53125 | 3 |
[] |
no_license
|
<!--<form action="" method="post" enctype="multipart/form-data">-->
<?php
//creatinf object
$format = new Format();
$db = new Database();
?>
<div class="container-fluid">
<div class="row">
<div class="col-xs-12 col-md-2">
<div class="col-xs-12 col-md-12 container-fluid bg-dark text-white rounded" style="padding-top:10px;padding-bottom:10px">
<h4>Last Winner!</h4>
<hr>
<div>
<p>Daily Math Challenge</p>
<hr>
<?php
$query = "SELECT * FROM winner_list WHERE ans_cat = 'Daily Math Challenge'";
$GetDataWin =$db->select($query);
if($GetDataWin){
while($rowWinner = $GetDataWin->fetch_assoc()){
$uniqueId = $rowWinner['winner_id'];
$queryWinner = "SELECT * FROM answere where ans_cat = 'Daily Math Challenge' AND user_unique_id='$uniqueId'";
$GetDataWinner = $db->select($queryWinner);
$rowsWinner = $GetDataWinner->fetch_assoc();
$un = $rowsWinner['user_name'];
echo "<p><a href='profile.php?user=$un'>$un</a></p>";
}
}else {
echo "No Winner had delcare yet!";
}
?>
<p>Weekly Math Challenge</p>
<hr>
<?php
$query = "SELECT * FROM winner_list WHERE ans_cat = 'Weekly Math Challenge'";
$GetDataWin =$db->select($query);
if($GetDataWin){
while($rowWinner = $GetDataWin->fetch_assoc()){
$uniqueId = $rowWinner['winner_id'];
$queryWinner = "SELECT * FROM answere where ans_cat = 'Weekly Math Challenge' AND user_unique_id='$uniqueId'";
$GetDataWinner = $db->select($queryWinner);
$rowsWinner = $GetDataWinner->fetch_assoc();
$un = $rowsWinner['user_name'];
echo "<p><a href='profile.php?user=$un'>$un</a></p>";
}
}else {
echo "No Winner had delcare yet!";
}
?>
</div>
</div>
<br>
<div class="col-xs-12 col-md-12 container-fluid bg-dark text-white rounded" style="padding-top:10px;padding-bottom:10px">
<h4>Past Problem</h4>
<hr>
<div>
<?php
//Code for View Math Problems
// $username = $_SESSION['ins_name'];
// $userlogin = $_SESSION['ins_login'];
$cat=$format->Stext($_REQUEST['CategoryID']);
//$link=strip_tags($_REQUEST['LinkID']);
$query = "SELECT * FROM post where category='$cat' ORDER BY id DESC";
$read = $db->select($query);
if($cat == "Daily Math Challenge")
{
echo "<p>Problems of Daily Math Challenge</p>";
}
elseif($cat == "Weekly Math Challenge")
{
echo "<p>Problems of Weekly Math Challenge</p>";
}
//echo "<div class='card'>";
//echo "<div class='card-body'>";
echo "<div class='card-header'>";
if(mysqli_num_rows($read) > 0)
{
while($row=$read->fetch_assoc()){
?>
<?php
echo "<a class='card-link' href='problem.php?CategoryID={$row['category']}&LinkID={$row['problem']}'> {$row['post_title']} <a><br>";
?>
<?php }
}?>
<?php
echo "</div>";
//echo "</div>";
//echo "</div>";
?>
<br>
<hr>
<a class="text-danger" href="">See all problems</a>
</div>
</div>
</div>
<!--</form>-->
|
JavaScript
|
UTF-8
| 405 | 3.234375 | 3 |
[] |
no_license
|
function solve(arr) {
let firstDiagonal = 0;
let secondDiagonal = 0;
for (let i = 0; i < arr.length; i++) {
firstDiagonal += arr[i][i];
secondDiagonal += arr[i][arr.length - 1 - i];
}
return[firstDiagonal, secondDiagonal].join(' ')
}
console.log(solve(
[[20, 40],
[10, 60]]))
console.log(solve(
[[3, 5, 17],
[-1, 7, 14],
[1, -8, 89]]))
|
JavaScript
|
UTF-8
| 1,141 | 2.59375 | 3 |
[] |
no_license
|
import React from "react";
import "./App.css";
import FormLabel from "react-bootstrap/FormLabel";
import FormControl from "react-bootstrap/FormControl";
import FormGroup from "react-bootstrap/FormGroup";
const marked = require("marked");
class App extends React.Component {
constructor(props) {
super(props);
this.state = { markdown: "" };
}
updateMarkdown = function(markdown) {
this.setState({ markdown });
};
render() {
let { markdown } = this.state;
return (
<div className="App container">
<div>
<FormGroup controlId="formControlsTextarea">
<FormLabel>
<h1>Markdown Input</h1>
</FormLabel>
<FormControl
componentClass="textarea"
placeholder="Enter Markdown"
value={markdown}
onChange={event => this.updateMarkdown(event.target.value)}
/>
</FormGroup>
</div>
<div>
<h1>Markdown Output</h1>
<div dangerouslySetInnerHTML={{ __html: marked(markdown) }} />
</div>
</div>
);
}
}
export default App;
|
C++
|
UTF-8
| 12,053 | 2.578125 | 3 |
[] |
no_license
|
#include "unrolledlinklist.h"
#include <cassert>
#include "pmallocator.h"
#include "stdallocator.h"
#include "arenaallocator.h"
#include "simulatedarenaallocator.h"
#include "software_prefetch.h"
#include "pairvector.h"
namespace hm4{
constexpr size_t UnrollingCapacity = 2048;
template<class T_Allocator>
struct UnrolledLinkList<T_Allocator>::Node{
using MyPairVector = PairVector<T_Allocator, UnrollingCapacity>;
public:
MyPairVector data;
Node *next = nullptr;
int cmp(HPair::HKey const hkey, std::string_view const key) const{
return HPair::cmp(data.backData().hkey, *data.backData().pair, hkey, key);
}
constexpr auto hkey() const{
return data.backData().hkey;
}
constexpr void prefetch() const{
constexpr bool use_prefetch = true;
if constexpr(use_prefetch){
builtin_prefetch(& this->data.back(), 0, 1);
builtin_prefetch(this->next, 0, 1);
}
}
constexpr static auto begin_or_null(const Node *node){
using It = typename PairVectorConfig::iterator;
if (node)
return node->data.begin();
else
return It{};
}
};
template<class T_Allocator>
struct UnrolledLinkList<T_Allocator>::NodeLocator{
Node **prev;
Node *node;
bool found = false;
};
namespace{
// we not really need to check the integrity of the list
constexpr bool corruptionCheck = false;
[[maybe_unused]]
void corruptionExit(){
fprintf(stderr, "============================================\n");
fprintf(stderr, "=== Detected UnrolledLinkList corruption ===\n");
fprintf(stderr, "============================================\n");
exit(100);
}
}
// ==============================
template<class T_Allocator>
UnrolledLinkList<T_Allocator>::UnrolledLinkList(Allocator &allocator) : allocator_(& allocator){
zeroing_();
}
template<class T_Allocator>
UnrolledLinkList<T_Allocator>::UnrolledLinkList(UnrolledLinkList &&other):
head_ (std::move(other.head_ )),
lc_ (std::move(other.lc_ )),
allocator_ (std::move(other.allocator_ )){
other.zeroing_();
}
template<class T_Allocator>
void UnrolledLinkList<T_Allocator>::deallocate_(Node *node){
using namespace MyAllocator;
node->data.destruct(getAllocator());
deallocate(getAllocator(), node);
}
template<class T_Allocator>
void UnrolledLinkList<T_Allocator>::zeroing_(){
lc_.clr();
head_ = nullptr;
}
template<class T_Allocator>
bool UnrolledLinkList<T_Allocator>::clear(){
if (allocator_->reset() == false){
for(Node *node = head_; node; ){
node->prefetch();
Node *copy = node;
node = node->next;
deallocate_(copy);
}
}
zeroing_();
return true;
}
template<class T_Allocator>
void UnrolledLinkList<T_Allocator>::print() const{
printf("==begin list==\n");
for(const Node *node = head_; node; node = node->next){
printf("Node: %p\n", (void *) node);
printf("--begin data--\n");
for(auto &x : node->data)
x.print();
printf("---end data---\n");
}
printf("===end list===\n\n\n\n\n");
}
template<class T_Allocator>
auto UnrolledLinkList<T_Allocator>::fix_iterator_(const Node *node, typename PairVectorConfig::iterator it) const -> iterator{
if (it != node->data.end())
return iterator{ node, it };
if (!node->next)
return end();
return iterator{ node->next, node->data.begin() };
};
template<class T_Allocator>
auto UnrolledLinkList<T_Allocator>::fix_iterator_(const Node *node, typename PairVectorConfig::const_ptr_iterator it) const -> iterator{
return fix_iterator_(node, typename PairVectorConfig::iterator(it));
}
template<class T_Allocator>
template<class PFactory>
auto UnrolledLinkList<T_Allocator>::insertF(PFactory &factory) -> InsertResult{
auto constructNode = [](auto &allocator) -> Node *{
using namespace MyAllocator;
Node *newnode = allocate<Node>(allocator);
if (!newnode)
return nullptr;
newnode->data.construct();
// next is not initialized
//newnode->next = nullptr;
return newnode;
};
auto const &key = factory.getKey();
auto const hkey = HPair::SS::create(key);
const auto nl = locate_(hkey, key);
if (nl.found){
// update pair in place.
return nl.node->data.insertF(hkey, factory, getAllocator(), lc_);
}
Node *node = nl.node;
if (!node){
// there is no node, make new one.
Node *newnode = constructNode(getAllocator());
if (!newnode)
return InsertResult::errorNoMemory();
auto const result = newnode->data.insertF(hkey, factory, getAllocator(), lc_);
if (!result.ok){
// we can use smart_ptr here...
deallocate_(newnode);
return result;
}
newnode->next = std::exchange(*nl.prev, newnode);
return result;
}
if (node->data.full()){
// current node is full, make new one and split elements.
Node *newnode = constructNode(getAllocator());
if (!newnode)
return InsertResult::errorNoMemory();
newnode->next = std::exchange(node->next, newnode);
node->data.split(newnode->data);
// is unclear where the pair should go
// also we have knowledge how the node is split
if (int const cmp = nl.node->cmp(hkey, key); cmp >= 0){
// insert in the old node
return node->data.insertF(hkey, factory, getAllocator(), lc_);
}else{
// insert in the new node
return newnode->data.insertF(hkey, factory, getAllocator(), lc_);
}
}
// insert pair in current node.
// TODO: optimize this, currently it do binary search over again.
return node->data.insertF(hkey, factory, getAllocator(), lc_);
}
template<class T_Allocator>
bool UnrolledLinkList<T_Allocator>::erase_(std::string_view const key){
// better Pair::check(key), but might fail because of the caller.
assert(!key.empty());
auto const hkey = HPair::SS::create(key);
auto nl = locate_(hkey, key);
if (!nl.node)
return false;
if constexpr(corruptionCheck)
if (*nl.prev != nl.node)
corruptionExit();
if (!nl.node->data.erase_(hkey, key, getAllocator(), lc_))
return false;
if (nl.node->data.size())
return true;
// node is zero size, it must be removed
*nl.prev = nl.node->next;
deallocate_(nl.node);
return true;
}
// ==============================
template<class T_Allocator>
template<typename HPairHKey>
auto UnrolledLinkList<T_Allocator>::locate_(HPairHKey const hkey, std::string_view const key) -> NodeLocator{
// HPairHKey is hidden HPair::HKey
static_assert(std::is_same_v<HPairHKey, HPair::HKey>);
// better Pair::check(key), but might fail because of the caller.
assert(!key.empty());
Node **jtable = & head_;
// auto hkey = HPair::SS::create(key);
for(Node *node = *jtable; node; node = node->next){
node->prefetch();
if (!node->next){
// this is the last node, return
return { jtable, node };
}
// this allows comparisson with single ">", instead of more complicated 3-way.
if (node->hkey() >= hkey){
if (int const cmp = node->cmp(hkey, key); cmp >= 0)
return { jtable, node, cmp == 0 };
}
jtable = & node->next;
}
// list seems to be empty
return { jtable, nullptr };
}
template<class T_Allocator>
template<bool ExactMatch>
auto UnrolledLinkList<T_Allocator>::find(std::string_view const key, std::bool_constant<ExactMatch>) const -> iterator{
assert(!key.empty());
auto const hkey = HPair::SS::create(key);
const Node *node;
for(node = head_; node; node = node->next){
node->prefetch();
// this allows comparisson with single ">", instead of more complicated 3-way.
if (node->hkey() >= hkey){
if (int const cmp = node->cmp(hkey, key); cmp >= 0){
if (cmp == 0){
// found
return iterator{ node, node->data.end() - 1 };
}
break;
}
}
}
if (!node)
return end();
// search inside node
auto const &[found, it] = node->data.locateC_(hkey, key);
if constexpr(ExactMatch)
return found ? iterator{ node, it } : end();
else
return fix_iterator_(node, it);
}
// ==============================
template<class T_Allocator>
auto UnrolledLinkList<T_Allocator>::iterator::operator++() -> iterator &{
if (++it_ != node_->data.end())
return *this;
node_ = node_->next;
it_ = Node::begin_or_null(node_);
return *this;
}
template<class T_Allocator>
const Pair &UnrolledLinkList<T_Allocator>::iterator::operator*() const{
return *it_;
}
template<class T_Allocator>
auto UnrolledLinkList<T_Allocator>::begin() const -> iterator{
return iterator{ head_, Node::begin_or_null(head_) };
}
// ==============================
template class UnrolledLinkList<MyAllocator::PMAllocator>;
template class UnrolledLinkList<MyAllocator::STDAllocator>;
template class UnrolledLinkList<MyAllocator::ArenaAllocator>;
template class UnrolledLinkList<MyAllocator::SimulatedArenaAllocator>;
template auto UnrolledLinkList<MyAllocator::PMAllocator> ::find(std::string_view const key, std::true_type ) const -> iterator;
template auto UnrolledLinkList<MyAllocator::STDAllocator> ::find(std::string_view const key, std::true_type ) const -> iterator;
template auto UnrolledLinkList<MyAllocator::ArenaAllocator> ::find(std::string_view const key, std::true_type ) const -> iterator;
template auto UnrolledLinkList<MyAllocator::SimulatedArenaAllocator> ::find(std::string_view const key, std::true_type ) const -> iterator;
template auto UnrolledLinkList<MyAllocator::PMAllocator> ::find(std::string_view const key, std::false_type) const -> iterator;
template auto UnrolledLinkList<MyAllocator::STDAllocator> ::find(std::string_view const key, std::false_type) const -> iterator;
template auto UnrolledLinkList<MyAllocator::ArenaAllocator> ::find(std::string_view const key, std::false_type) const -> iterator;
template auto UnrolledLinkList<MyAllocator::SimulatedArenaAllocator> ::find(std::string_view const key, std::false_type) const -> iterator;
template auto UnrolledLinkList<MyAllocator::PMAllocator> ::insertF(PairFactory::Normal &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::STDAllocator> ::insertF(PairFactory::Normal &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::ArenaAllocator> ::insertF(PairFactory::Normal &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::SimulatedArenaAllocator> ::insertF(PairFactory::Normal &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::PMAllocator> ::insertF(PairFactory::Expires &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::STDAllocator> ::insertF(PairFactory::Expires &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::ArenaAllocator> ::insertF(PairFactory::Expires &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::SimulatedArenaAllocator> ::insertF(PairFactory::Expires &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::PMAllocator> ::insertF(PairFactory::Tombstone &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::STDAllocator> ::insertF(PairFactory::Tombstone &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::ArenaAllocator> ::insertF(PairFactory::Tombstone &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::SimulatedArenaAllocator> ::insertF(PairFactory::Tombstone &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::PMAllocator> ::insertF(PairFactory::Clone &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::STDAllocator> ::insertF(PairFactory::Clone &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::ArenaAllocator> ::insertF(PairFactory::Clone &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::SimulatedArenaAllocator> ::insertF(PairFactory::Clone &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::PMAllocator> ::insertF(PairFactory::IFactory &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::STDAllocator> ::insertF(PairFactory::IFactory &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::ArenaAllocator> ::insertF(PairFactory::IFactory &factory) -> InsertResult;
template auto UnrolledLinkList<MyAllocator::SimulatedArenaAllocator> ::insertF(PairFactory::IFactory &factory) -> InsertResult;
} // namespace
|
Shell
|
UTF-8
| 222 | 3.21875 | 3 |
[
"MIT"
] |
permissive
|
#!/bin/bash
echo -n "Enter temperature (F) : "
read tf
# formula Tc=(5/9)*(Tf-32)
tc=$(echo "scale=4;(5/9)*($tf-32)"|bc)
tk=$(echo "scale=4;$tc+273.15"|bc)
echo "Temperature (C) = $tc"
echo "Temperature (K) = $tk"
exit 1
|
Java
|
UTF-8
| 2,383 | 2.421875 | 2 |
[] |
no_license
|
package SQLRepository;
import CertificateRepository.TagMapper;
import Entities.Tag;
import RepositoryInterfaces.TagRepository;
import RepositorySpecification.Specification;
import RepositorySpecification.TagSpecification;
import sqlconnection.SQLConnect;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
public class TagSQLRepository implements TagRepository {
@Override
public int add(Tag tag) throws SQLException {
final String quesryForInsertRequest = "INSERT INTO tag VALUES(?, ?)";
int risultato = 0;
PreparedStatement statement = new SQLConnect().SQLConnection().prepareStatement(quesryForInsertRequest, Statement.RETURN_GENERATED_KEYS);
statement.setString(1, "key value");
statement.setLong(2, tag.getId());
statement.setString(3, tag.getName());
int i = statement.executeUpdate();
ResultSet d = statement.getGeneratedKeys();
return risultato;
}
@Override
public int update(Tag tag) throws SQLException {
final String quesryForUpdateRequest = "UPDATE tag set name = ? WHERE name = ?";
PreparedStatement statement = new SQLConnect().SQLConnection().prepareStatement(quesryForUpdateRequest);
statement.setString(1, "Nastassia");
statement.setString(2, tag.getName());
int i = statement.executeUpdate();
return i;
}
@Override
public int remove(Tag tag) throws SQLException {
final String quesryForRemoveRequest = "DELETE from tag WHERE name= ?";
PreparedStatement statement = new SQLConnect().SQLConnection().prepareStatement(quesryForRemoveRequest);
statement.setString(1, tag.getName());
int i = statement.executeUpdate();
return i;
}
@Override
public List<Tag> getObjects(Specification specification) throws SQLException {
return null;
}
@Override
public List<Tag> getObjects(TagSpecification tagSpecification) throws SQLException {
List<Tag> tagsList = new ArrayList<>();
PreparedStatement statement = new SQLConnect().SQLConnection().prepareStatement(tagSpecification.returnQueueOfEntity());
ResultSet rs = statement.executeQuery();
return (new TagMapper().readTagsToList(rs));
}
}
|
Python
|
UTF-8
| 1,033 | 3.6875 | 4 |
[] |
no_license
|
# -*- coding: utf-8 -*-
"""
Created on Thu May 21 12:41:02 2020
@author: User
Is Temperature a Random Walk (with Drift)?
An ARMA model is a simplistic approach to forecasting climate changes, but it
illustrates many of the topics covered in this class.
The DataFrame temp_NY contains the average annual temperature in Central Park,
NY from 1870-2016 (the data was downloaded from the NOAA here). Plot the data
and test whether it follows a random walk (with drift).
"""
# Import the adfuller function from the statsmodels module
from statsmodels.tsa.stattools import adfuller
import matplotlib.pyplot as plt
import pandas as pd
temp_NY = pd.read_excel('temp_NY.xlsx')
temp_NY = temp_NY.set_index('DATE')
# Convert the index to a datetime object
temp_NY.index = pd.to_datetime(temp_NY.index, format= '%Y')
# Plot average temperatures
temp_NY.plot()
plt.show()
# Compute and print ADF p-value
result = adfuller(temp_NY['TAVG'])
print("The p-value for the ADF test is ", result[1])
|
Java
|
UTF-8
| 1,540 | 2.34375 | 2 |
[
"Apache-2.0"
] |
permissive
|
package org.openehealth.ipf.commons.xml;
import javax.xml.transform.Source;
import javax.xml.transform.stream.StreamSource;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.openehealth.ipf.commons.core.modules.api.ValidationException;
import org.springframework.core.io.ClassPathResource;
public class XsdValidatorTest {
private XsdValidator validator;
private static final String SCHEMA_RESOURCE = "/xsd/test.xsd";
@Before
public void setUp() throws Exception {
validator = new XsdValidator();
}
@Test
public void testValidate() throws Exception {
XsdValidator.getCachedSchemas().clear();
Source testXml = new StreamSource(new ClassPathResource("xsd/test.xml")
.getInputStream());
validator.validate(testXml, SCHEMA_RESOURCE);
Assert.assertTrue(XsdValidator.getCachedSchemas().containsKey(
SCHEMA_RESOURCE));
}
@Test(expected = ValidationException.class)
public void testValidateFails() throws Exception {
boolean schemaExisted = XsdValidator.getCachedSchemas().containsKey(
SCHEMA_RESOURCE);
int cacheSize = XsdValidator.getCachedSchemas().size();
Source testXml = new StreamSource(new ClassPathResource(
"xsd/invalidtest.xml").getInputStream());
validator.validate(testXml, SCHEMA_RESOURCE);
if (schemaExisted) {
Assert.assertEquals(cacheSize, XsdValidator.getCachedSchemas()
.size());
} else {
Assert.assertEquals(cacheSize + 1, XsdValidator.getCachedSchemas()
.size());
}
}
}
|
Markdown
|
UTF-8
| 2,185 | 2.78125 | 3 |
[
"MIT"
] |
permissive
|
# Ethernet Frames Message Queue (EFMQ)
EFMQ provides an MQTT pub/sub style abstraction for Ethernet Frame messaging.
[](https://godoc.org/github.com/olliephillips/efmq)
EFMQ is like MQTT for your Local Area Network. Unlike MQTT no remote or local broker is required, message traffic is effectively broadcast peer-to-peer. With EFMQ, messages never leave your network.
Messaging can be two-way. Each node can operate as either a publisher or subscriber, or both.
This package leans heavily on @mdlayher's [raw](https://github.com/mdlayher/raw) and [ethernet](https://github.com/mdlayher/ethernet) packages, which do almost all the heavy lifting.
## Usage
Basic publisher and subscriber examples are provided below. Nodes can publish and subscribe to multiple topics.
The API follows a typical MQTT client API loosely.
```go
// Create connection
mq, _ := efmq.NewEFMQ(networkInterface string)
// Publish
mq.Publish(topic string, payload string)
// Subscribe
mq.Subscribe(topic string)
// Unsubscribe
mq.Unsubscribe(topic string)
// List subscriptions
mq.Subscriptions()
// Start listening
mq.Listen()
// Message channel
mq.Message
// Message
Message struct {
Topic string
Payload string
}
```
### Publisher example
The code below will publish data to the `fermenter` topic every second. `en1` is the network interface on Mac (my Mac at least). On a Raspberry Pi it might be `wlan0`. Use `netstat -i` to discover.
```go
mq, err := efmq.NewEFMQ("en1")
if err != nil {
log.Fatal(err)
}
t := time.NewTicker(1 * time.Second)
for range t.C {
if err := mq.Publish("fermenter", "20.5"); err != nil {
log.Fatalln(err)
}
}
```
### Subscriber example
The code below sets up a subcription to the `fermenter` topic and then listens for messages. Messages are received on a channel.
```go
mq, err := efmq.NewEFMQ("wlan0")
if err != nil {
log.Fatal(err)
}
mq.Subscribe("fermenter")
mq.Listen()
for msg := range mq.Message {
fmt.Println("topic:", msg.Topic)
fmt.Println("message:", msg.Payload)
}
```
## Todo
- Better test coverage
- Check message does not exceed frame byte data limit (1500 bytes?)
|
Markdown
|
UTF-8
| 610 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
# letsencrypt-autorenew
Bash script for auto renew let's encrypt ssl certificates
As you may know all certificates are for 3 months.
So you need to renew them everery three monthes. Ouch
Here a way to automate it.
1) Configure config file
nano /PathToLetsencrypt/letsencrypt/cli.ini
2) Edit script to add your domains that you want to renew certificates
nano /PathToLetsencrypt/letsencrypt/cron.sh
3) Don't forget to make it executable
chmod +x /PathToLetsencrypt/letsencrypt/cron.sh
4) Cron exmple: Every month
1 1 1 * * /PathToLetsencrypt/letsencrypt/cron.sh >> /PathToLetsencrypt/letsencrypt/cron.log
|
Markdown
|
UTF-8
| 749 | 2.5625 | 3 |
[] |
no_license
|
# Listeners: What are conflict listeners and how to work with them?
To allow users to handle document replication conflicts automatically, we introduced a Document Conflict listener.
To create your own listener of this type, just implement IDocumentConflictListener interface.
{CODE:java document_conflict_listener@ClientApi\Listeners\Conflict.java /}
##Example
This example shows how to create an automatic conflict resolver by using `IDocumentConflictListener`, which will pick the newest item from the list of conflicted documents:
{CODE:java document_conflict_example@ClientApi\Listeners\Conflict.java /}
## Related articles
- [Server : Replication : Conflicts](../../server/scaling-out/replication/replication-conflicts)
|
TypeScript
|
UTF-8
| 455 | 2.546875 | 3 |
[
"MIT"
] |
permissive
|
import { LOG_IN, LOG_OUT } from "../../../resources/strings/actions";
export interface Authenticate {
user?: object;
}
// Action constants and shape
export const SEND_MESSAGE = "SEND_MESSAGE";
export const DELETE_MESSAGE = "DELETE_MESSAGE";
interface LoginAction {
type: typeof LOG_IN;
payload: Authenticate;
}
interface LogoutAction {
type: typeof LOG_OUT;
payload: null;
}
export type AuthenticateActionTypes = LoginAction | LogoutAction;
|
Java
|
UTF-8
| 4,018 | 3.640625 | 4 |
[] |
no_license
|
import java.util.Scanner;
/**
*
* @author Eltaher
*/
public class HeadAndTailLinkedListExample {
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
Scanner scan = new Scanner(System.in);
int value, counter = 0;
try {
System.out.println();
System.out.print("Enter at least 3 positive Integers...");
do {
System.out.println();
System.out.print("Enter a positive integer or 0 to exit: ");
value = scan.nextInt();
if (value > 0)
{
counter++;
if (counter%2 != 0)
Integer_LinkedList_Management.addFromHead(value);
else
Integer_LinkedList_Management.addFromTail(value);
}
else if (value < 0)
{
System.out.println();
System.out.print("Only positive integers are accepted.");
}
else if (value == 0 && counter < 3)
{
System.out.println();
System.out.print("You should enter at least 3 integers.");
}
} while ((value != 0) || counter < 3);
List_All();
System.out.println();
System.out.println("Here is the FIRST value in the List traversing from HEAD...");
System.out.println();
System.out.println(Integer_LinkedList_Management.getFirst());
System.out.println();
System.out.println("Here is the LAST value in the List traversing from HEAD...");
System.out.println();
System.out.println(Integer_LinkedList_Management.getLastFromHead());
System.out.println();
System.out.println("Here is the LAST value in the List traversing from TAIL...");
System.out.println();
System.out.println(Integer_LinkedList_Management.getLastFromTail());
System.out.println();
System.out.print("Enter a new value to add to the rear of the list using HEAD traversal: ");
value = scan.nextInt();
Integer_LinkedList_Management.addFromHead(value);
List_All();
System.out.println();
System.out.print("Enter a new value to add to the rear of the list using TAIL traversal: ");
value = scan.nextInt();
Integer_LinkedList_Management.addFromTail(value);
List_All();
// This works with doubly linkedlist
// do {
// System.out.println();
// System.out.print("Enter the index of the value you want to get from the LinkedLst or 0 to exit: ");
// value = scan.nextInt();
// if (value > 0)
// System.out.println(Integer_LinkedList_Management.get(counter));
// else if (value < 0)
// System.out.println("Invalid index entered.");
// } while (value != 0);
}
catch (Exception exp)
{
System.out.println("Error entering information or adding " +
"values to the linked list.\n\n" + exp);
}
}
public static void List_All()
{
System.out.println();
System.out.println("Here is the List of integers in the LinkedList...");
System.out.println();
Integer_LinkedList_Management.listAllValues();
}
}
|
JavaScript
|
UTF-8
| 1,082 | 3.171875 | 3 |
[] |
no_license
|
import { cons } from '@hexlet/pairs';
import getRandomInt from '../get-random-int.js';
import { roundsCount } from '../index.js';
const operands = ['+', '-', '*'];
const calculate = (num1, num2, operand) => {
let result;
switch (operand) {
case '+':
result = num1 + num2;
break;
case '-':
result = num1 - num2;
break;
case '*':
result = num1 * num2;
break;
default:
return null;
}
return result;
};
const getRandomItem = (items) => items[getRandomInt(0, items.length)];
const rule = 'What is the result of the expression?';
const calcGame = () => {
const roundsData = [];
for (let i = 0; i < roundsCount; i += 1) {
const firstNum = getRandomInt(1, 25);
const operand = getRandomItem(operands);
const secondNum = getRandomInt(1, 25);
const expression = `${firstNum} ${operand} ${secondNum}`;
const correctAnswer = calculate(firstNum, secondNum, operand).toString();
roundsData.push(cons(expression, correctAnswer));
}
return { rule, roundsData };
};
export default calcGame;
|
Python
|
UTF-8
| 3,731 | 3.25 | 3 |
[] |
no_license
|
''' all CRUD-related routes go here '''
from flask import Blueprint, render_template, request, redirect, url_for, flash
# importing Blueprint obj from "flask" module to group all CRUD-related routes together
# also importing 'render_template' to render HTML templates
# also importing 'request' to house/handle data going from client to server
# also importing 'redirect' and 'url_for' to redirect to other routes
# also importing 'flash' to display user feedback msgs
CRUD = Blueprint('CRUD', __name__) # our CRUD blueprint of routes
from .forms import AddNote, DeleteNote, EditNote, SignIn, SignUp # importing our Flask form classes
from .models import Note # importing our Flask data models (data model: class that represents a table in the DB
from notes_app import notesDB # importing our DB object
# 'add note' route -> 'C' part of 'CRUD'
@CRUD.route('/add_note', methods=['POST', 'GET'])
def add_note():
notes_form = AddNote(request.form) # take 'note form' data values from 'request' obj, assign them to 'notes_form' properties to get our final 'note form' obj
if request.method == 'POST' and notes_form.validate():
# if 'note form' data is POSTing and valid, put the data in a new Note() obj and insert obj into DB
new_note = Note(title=notes_form.title.data, content=notes_form.content.data) # 'Note' object with our 'note form' data -> basically, our new note
notesDB.session.add(new_note) # insert new note into DB
notesDB.session.commit() # commit the change
flash("Note added successfully!") # success feedback msg
return redirect(url_for("index")) # redirect to home page
else:
# no data POSTing? return 'add note' pg
return render_template('add_note.html', notes_form=notes_form)
# 'edit note' route -> 'U' part of 'CRUD'
@CRUD.route('/edit_note/<int:id>', methods=['POST', 'GET'])
def edit_note(id):
# the ID of the note we wanna edit is passed in, so we'll use that to get the note
new_form = EditNote(request.form) # 'edit note' form will probs have same format as 'add note' form
tbe = Note.query.get_or_404(id) # we query the DB for the note we need; if we can't find it, we return a 404 error; else, we get the correct note. also, 'tbe/TBE' = 'to be edited' here
# TBE note contents will be used as placeholders for new form
if request.method == "POST" and new_form.validate():
# if data is POSTed and valid, insert new note data into DB
tbe.title = new_form.title.data
tbe.content = new_form.content.data
notesDB.session.commit() # commit the change
flash(f"Note '{tbe.title}' edited successfully!") # success feedback msg
return redirect(url_for("index")) # redirect back to home page
else:
# no data POSTed?
return render_template('edit_note.html', tbe=tbe, new_form=new_form) # return 'edit note' pg with current note TBE and with 'edit note' form
# 'delete note' route -> 'D' part of 'CRUD'
@CRUD.route('/delete_note/<int:id>', methods=['POST', 'GET'])
def delete_note(id):
# 1) using passed-in ID, query DB for note to be deleted (TBD/tbd)
tbd = Note.query.get_or_404(id)
title = tbd.title
# 2) initialize 'DeleteNote' form
form = DeleteNote(request.form)
if request.method == 'POST' and form.validate():
# if data (note title) is POSTing and is valid, delete 'tbd' note from DB
notesDB.session.delete(tbd)
notesDB.session.commit() # commit the change
flash(f"Note '{title}' deleted successfully!") # success feedback msg
return redirect(url_for("index")) # redirect back to home page
else:
return render_template('delete_note.html', tbd=tbd, form=form)
|
C++
|
UTF-8
| 2,022 | 2.578125 | 3 |
[] |
no_license
|
#pragma once
#include <Windows.h>
#include <unordered_map>
#include "PEParser.h"
#include "File.h"
static constexpr const wchar_t* ORIGINAL_FILENAME_STRING{ L"OriginalFilename" };
static constexpr const wchar_t* COMPANY_NAME_STRING{ L"CompanyName" };
static constexpr const wchar_t* FILE_VERSION_STRING{ L"FileVersion" };
static constexpr const wchar_t* PRODUCT_NAME_STRING{ L"ProductName" };
static constexpr const wchar_t* PRODUCT_VERSION_STRING{ L"ProductVersion" };
typedef enum
{
ITEM_ID_VERSION_RESOURCE_ORIGINAL_FILE_NAME = 1,
ITEM_ID_VERSION_RESOURCE_COMPANY_NAME,
ITEM_ID_VERSION_RESOURCE_PRODUCT_NAME,
ITEM_ID_VERSION_RESOURCE_PRODUCT_VERSION,
ITEM_ID_VERSION_RESOURCE_FILE_VERSION,
ITEM_ID_VERSION_RESOURCE_SUBSYSTEM
} VersionInfoItemIDs;
using versionInformationMap = std::unordered_map<VersionInfoItemIDs, std::wstring>;
inline static void CHECK_RET_CODE(bool ret, const char* err)
{
if (!ret)
{
throw std::runtime_error("Failed to parse: " + std::string(err));
}
}
inline static bool searchVersionInfoByName(
const version_values_t& versionInfo,
const std::wstring& key,
std::wstring& value)
{
bool found{ false };
for (auto& i : versionInfo)
{
if (wcsncmp(i.first.c_str(), key.c_str(), key.size()) == 0)
{
value = i.second;
found = true;
break;
}
}
return found;
}
inline static void UPDATE_VERSION_INFO(
version_values_t& versionInfo,
const std::wstring& attributeName,
VersionInfoItemIDs attributeID,
versionInformationMap& entity)
{
std::wstring attributeValue;
if (searchVersionInfoByName(versionInfo, attributeName, attributeValue))
{
entity.emplace(attributeID, attributeValue);
}
else
{
std::wcout << "Failed to find attribute " << attributeName <<
" in VS_VERSIONINFO resource." << std::endl;
}
}
class MetadataEx
{
public:
MetadataEx(std::string file)
: m_file(file)
{};
bool getVersionInformation(
versionInformationMap& entity);
File m_file;
};
|
Markdown
|
UTF-8
| 2,335 | 2.90625 | 3 |
[
"Apache-2.0"
] |
permissive
|
<br />
<p align="center">
<h2 align="center">Autoscaler tool for Cloud Spanner</h2>
<img alt="Autoscaler" src="../resources/BlogHeader_Database_3.max-2200x2200.jpg">
<p align="center">
<!-- In one sentence: what does the code in this directory do? -->
Set up the Autoscaler using Terraform configuration files
<br />
<a href="../README.md">Home</a>
·
<a href="../scaler/README.md">Scaler function</a>
·
<a href="../poller/README.md">Poller function</a>
·
<a href="../forwarder/README.md">Forwarder function</a>
·
Terraform configuration
<br />
<a href="per-project/README.md">Per-Project</a>
·
<a href="centralized/README.md">Centralized</a>
·
<a href="distributed/README.md">Distributed</a>
</p>
</p>
## Table of Contents
* [Table of Contents](#table-of-contents)
* [Overview](#overview)
## Overview
This directory contains Terraform configuration files to quickly set up the
infrastructure of your Autoscaler.
The Autoscaler can be deployed following three different
strategies. Choose the one that is best adjusted to fulfill your technical and
operational needs.
* [Per-Project deployment](per-project/README.md): all the components of the
Autoscaler reside in the same project as your Spanner
instances. This deployment is ideal for independent teams who want to self
manage the configuration and infrastructure of their own Autoscalers. It is
also a good entry point for testing the Autoscaler capabilities.
* [Centralized deployment](centralized/README.md): a slight departure from the
pre-project deployment, where all the components of the Cloud Spanner
Autoscaler reside in the same project, but the Spanner instances may be
located in different projects. This deployment is suited for a team managing
the configuration and infrastructure of several Autoscalers in a central
place.
* [Distributed deployment](distributed/README.md): all the components of the
Autoscaler reside in a single project, with the exception of
Cloud Scheduler. This deployment is a hybrid where teams who own the Spanner
instances want to manage only the Autoscaler configuration parameters for
their instances, but the rest of the Autoscaler infrastructure is managed by
a central team
|
SQL
|
UTF-8
| 315 | 2.5625 | 3 |
[] |
no_license
|
CREATE TABLE IF NOT EXISTS Topics (
TopicId INT PRIMARY KEY AUTO_INCREMENT,
CourseId INT REFERENCES Courses(CourseId),
Title VARCHAR(256)
);
INSERT IGNORE INTO Topics(TopicId, CourseId, Title) VALUES (1, 1, "Hello World!");
INSERT IGNORE INTO Topics(TopicId, CourseId, Title) VALUES (2, 2, "Jason!");
|
Python
|
UTF-8
| 1,207 | 3.21875 | 3 |
[] |
no_license
|
import numpy as np
from enum import IntEnum
from time import sleep
COMMON_VALUE = 100
STANDARD_DEVIATION = 20.0
class OfferResult(IntEnum):
RAISE = 1
LEAVE = 2
class Distribution(IntEnum):
GAUSSIAN = 1
UNIFORM = 2
class Buyer(object):
def __init__(self, ID, common_value=COMMON_VALUE, distribution=Distribution.GAUSSIAN):
if distribution == Distribution.GAUSSIAN:
self.__value = round(common_value + np.random.normal(loc=0, scale=STANDARD_DEVIATION))
elif distribution == Distribution.UNIFORM:
self.__value = round(common_value + np.random.uniform(-STANDARD_DEVIATION, STANDARD_DEVIATION + 1.0))
else:
self.__value = round(common_value)
self.__ID = ID
def raise_offer(self, current_value):
sleep(float(np.random.randint(50, 300)) / 1000.0)
if current_value < self.__value:
return (OfferResult.RAISE, current_value + 1)
else:
return (OfferResult.LEAVE, 0)
value = property(lambda self: self.__value)
ID = property(lambda self: self.__ID)
if __name__ == "__main__":
x = [Buyer(i + 1).value for i in range(10)]
print(x)
print(len(set(x)))
|
Java
|
UTF-8
| 4,102 | 2.296875 | 2 |
[] |
no_license
|
package com.example.user.educationhunt.adapter;
import android.app.Activity;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Filter;
import android.widget.Filterable;
import android.widget.TextView;
import com.android.volley.toolbox.ImageLoader;
import com.android.volley.toolbox.NetworkImageView;
import com.example.user.educationhunt.R;
import com.example.user.educationhunt.pojos.AppController;
import com.example.user.educationhunt.pojos.OurSchool;
import com.example.user.educationhunt.pojos.OurUniversity;
import java.util.ArrayList;
import java.util.List;
/**
* Created by user on 11/16/2016.
*/
public class UniversityListAdapter extends BaseAdapter implements Filterable {
private Activity activity;
private LayoutInflater inflater;
private List<OurUniversity> ourUniversityList;
private List<OurUniversity> ourUniversityList_original;
ImageLoader imageLoader = AppController.getInstance().getImageLoader();
private ItemFilter itemFilter = new ItemFilter();
public UniversityListAdapter(Activity activity, List<OurUniversity> ourUniversityList) {
this.activity = activity;
this.ourUniversityList = ourUniversityList;
this.ourUniversityList_original = ourUniversityList;
}
@Override
public int getCount() {
return ourUniversityList.size();
}
@Override
public Object getItem(int location) {
return ourUniversityList.get(location);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if (inflater == null)
inflater = (LayoutInflater) activity
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
if (convertView == null)
convertView = inflater.inflate(R.layout.list_university_view, null);
if (imageLoader == null)
imageLoader = AppController.getInstance().getImageLoader();
NetworkImageView universityLogo = (NetworkImageView) convertView
.findViewById(R.id.universityLogo);
TextView universityName = (TextView) convertView.findViewById(R.id.universityName);
TextView universityLocation = (TextView) convertView.findViewById(R.id.universityLocation);
OurUniversity m = ourUniversityList.get(position);
universityLogo.setImageUrl(m.getUniversityLogo(), imageLoader);
universityName.setText("Name: " + m.getUniversityName());
universityLocation.setText("Address: " + String.valueOf(m.getUniversityAddress()));
return convertView;
}
@Override
public Filter getFilter() {
return itemFilter;
}
private class ItemFilter extends Filter {
@Override
protected FilterResults performFiltering(CharSequence charSequence) {
String filterString = charSequence.toString().toLowerCase();
FilterResults filterResults = new FilterResults();
final List<OurUniversity> ourUniversity = ourUniversityList_original;
int count = ourUniversity.size();
List<OurUniversity> filteredUniversityList = new ArrayList<>();
String filterableString;
for (int i = 0; i < count; i++) {
filterableString = ourUniversity.get(i).getUniversityName();
if (filterableString.toLowerCase().contains(filterString)) {
filteredUniversityList.add(ourUniversity.get(i));
}
}
filterResults.values = filteredUniversityList;
filterResults.count = filteredUniversityList.size();
return filterResults;
}
@Override
protected void publishResults(CharSequence charSequence, FilterResults filterResults) {
ourUniversityList = (List<OurUniversity>) filterResults.values;
notifyDataSetChanged();
}
}
}
|
Markdown
|
UTF-8
| 2,154 | 2.828125 | 3 |
[] |
no_license
|
# BookAppointment
A React-Django-rest SPA for Hospital Appointment Mangement.
Utmost basic app:
1. Login
2. Booking appointments as a patient
3. Checking Appointments as the hospital staff.
I have NOT included registration in the Frontend, Although it can be easily done through django-admin site.
It accounts for the busy schedule of the hospitals, they can add an appointment to remove some particular time frame as a break.
It works well with multiple hospitals and patients, as I've tried to keep the backend code efficient and fast.
#### Backend
It uses Django-Rest-Framework as Backend. API browser is also installed.
Debug Mode is kept ON as I created this project for learning purposes and not for production.
Functional Views has been used instead of class based ones.
#### Frontend
React is used for the frontend.
It is a Single Page App.
Material UI is used throughout the App for faster development.
I've used Functional Components and State Hooks instead for Class based ones for a change.
## Starting servers
### Django rest API (Backend):
1. I've included conda-env and pip made requirement files as reqConda.txt and reqPip.txt one can use for the python requirements.
2. After installing the requirements, Travel to the root directory in project, use `python manage.py runserver` to start the server.
### React App(Frontend):
1. Install npm
2. Go to the /frontend directory.
3. Use `npm start` to start the server.
## To Access Admin-Site:
Create a superuser:
1. Go to the root directory
2. Use `django-admin createsuperuser`
3. Type in name, email and password as required.
## To Register Patient/Hospital:
1. Login to the admin-site if not changed then it will be http://127.0.0.1:8000/admin/
2. Go to the user model
3. Create a User model ( Only email and password is necessary)
4. Go to Patient/Hospital model
5. Register the information using the User object created in step 3.
Once registered, One can interact using the react server.
Default : http://127.0.0.1:3000
##### NOTE: IF YOU CHANGE DJANGO API SERVER OR PORT YOU NEED TO CHANGE THE SAME AT /frontend/.env
|
Java
|
UTF-8
| 1,063 | 2.140625 | 2 |
[] |
no_license
|
package com.example.demo.mybatisplus.pojo;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.baomidou.mybatisplus.extension.activerecord.Model;
import java.io.Serializable;
/**
* ActiveReord模式
*/
@TableName(value="resource")
public class Resource extends Model implements Serializable {
@TableId(value="id")
private Integer id;
@TableField(value="url")
private String url;
@TableField(value="res_name")
private String resName;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getResName() {
return resName;
}
public void setResName(String resName) {
this.resName = resName;
}
@Override
protected Serializable pkVal() {
return this.id;
}
}
|
C++
|
UTF-8
| 13,566 | 3.359375 | 3 |
[] |
no_license
|
#include <fstream>
#include <string>
using namespace std;
class Automata
{
char** transitionTable;
int nrOfStates;
int initialState;
int finalState;
public:
// Constructors
Automata() { nrOfStates = 0; initialState = -1; finalState = -1; transitionTable = NULL; }
Automata(int,int,int);
Automata(string);
Automata(const Automata&);
// Setters and getters
int getNrOfStates() { return nrOfStates; }
int getInitialState() { return initialState; }
int getFinalState() { return finalState; }
char** getTransitionTable() { return transitionTable; }
void setNrOfStates(int arg) { nrOfStates = arg; }
void setInitialState(int arg) { initialState = arg; }
void setFinalState(int arg) { finalState = arg; }
// Other functions
void shift(int);
void addEmptyState();
void addTransition(char value, int state1, int state2) { transitionTable[state1][state2] = value; }
void output(ofstream&);
};
Automata::Automata(int nrOfStatesArg, int initialStateArg, int finalStateArg)
{
nrOfStates = nrOfStatesArg;
initialState = initialStateArg;
finalState = finalStateArg;
transitionTable = new char*[nrOfStates];
for (int i = 0; i < nrOfStates; ++i)
transitionTable[i] = new char[nrOfStates]();
}
Automata::Automata(const Automata& objectArg)
{
nrOfStates = objectArg.nrOfStates;
initialState = objectArg.initialState;
finalState = objectArg.finalState;
transitionTable = new char*[nrOfStates];
for (int i = 0; i < nrOfStates; ++i)
transitionTable[i] = new char[nrOfStates]();
for (int i = 0; i < nrOfStates; ++i)
for (int j = 0; j < nrOfStates; ++j)
transitionTable[i][j] = objectArg.transitionTable[i][j];
}
void Automata::shift(int shiftArg)
{
int newNrOfStates = nrOfStates + shiftArg;
char** newTransitionTable = new char*[newNrOfStates];
for (int i = 0; i < newNrOfStates; ++i)
newTransitionTable[i] = new char[newNrOfStates]();
for (int i = 0; i < nrOfStates; ++i)
for (int j = 0; j < nrOfStates; ++j)
newTransitionTable[i + shiftArg][j + shiftArg] = transitionTable[i][j];
for (int i = 0; i < nrOfStates; ++i)
delete[] transitionTable[i];
transitionTable = newTransitionTable;
nrOfStates = newNrOfStates;
initialState = initialState + shiftArg;
finalState = finalState + shiftArg;
}
void Automata::addEmptyState()
{
char** newTransitionTable = new char*[nrOfStates + 1];
for (int i = 0; i < nrOfStates + 1; ++i)
newTransitionTable[i] = new char[nrOfStates+1]();
for (int i = 0; i < nrOfStates; ++i)
for (int j = 0; j < nrOfStates; ++j)
newTransitionTable[i][j] = transitionTable[i][j];
for (int i = 0; i < nrOfStates; ++i)
delete[] transitionTable[i];
transitionTable = newTransitionTable;
nrOfStates = nrOfStates + 1;
}
Automata buildSimpleAutomata(char arg)
{
Automata B(2, 0, 1);
B.addTransition(arg, 0, 1);
return B;
}
class NodeStackAutomata
{
char value;
Automata* automataPointer;
NodeStackAutomata* nextNode;
public:
NodeStackAutomata() { value = 0; automataPointer = NULL; nextNode = NULL; }
void setValue(char arg) { value = arg; }
char getValue() { return value; }
NodeStackAutomata* getNextNode() { return nextNode; }
void setNextNode(NodeStackAutomata* arg) { nextNode = arg; }
Automata* getAutomataPointer() { return automataPointer; }
void setAutomataPointer(Automata* automataPointerArg){ automataPointer = automataPointerArg; }
};
class StackAutomata
{
NodeStackAutomata* head;
public:
StackAutomata() { head = NULL; }
StackAutomata(NodeStackAutomata*);
void push(char);
bool isEmpty() { if (head) return false; return true; }
bool isOperation(char arg) { if (arg == 42 || arg == 40 || arg == 41 || arg == 124) return true; return false; }
bool isState(char arg) { if (!isOperation(arg)) return true; return false; }
NodeStackAutomata* getHead() { return head; }
Automata* pop();
};
StackAutomata::StackAutomata(NodeStackAutomata* nodeArg)
{
head = nodeArg;
}
void StackAutomata::push(char arg)
{
if (!head)
{
head = new NodeStackAutomata;
head->setValue(arg);
head->setAutomataPointer(NULL);
head->setNextNode(NULL);
}
else
{
NodeStackAutomata* auxNode = new NodeStackAutomata;
auxNode->setValue(arg);
auxNode->setAutomataPointer(NULL);
auxNode->setNextNode(head);
head = auxNode;
}
}
Automata* star(Automata* automataArg)
{
automataArg->shift(1);
automataArg->addEmptyState();
automataArg->addTransition(-1, automataArg->getFinalState(), automataArg->getInitialState());
automataArg->addTransition(-1, 0, automataArg->getInitialState());
automataArg->addTransition(-1, automataArg->getFinalState(), automataArg->getNrOfStates()-1);
automataArg->addTransition(-1, 0, automataArg->getNrOfStates()-1);
automataArg->setInitialState(0);
automataArg->setFinalState(automataArg->getNrOfStates() - 1);
return automataArg;
}
Automata* unionAutomata(Automata* automataArg1, Automata* automataArg2)
{
automataArg1->shift(1);
automataArg2->shift(automataArg1->getNrOfStates());
Automata* newAutomata = new Automata(*automataArg2);
char** auxTransitionTable = automataArg1->getTransitionTable();
for (int i = 0; i < automataArg1->getNrOfStates(); ++i)
for (int j = 0; j < automataArg1->getNrOfStates(); ++j)
newAutomata->addTransition(auxTransitionTable[i][j], i, j);
newAutomata->setInitialState(0);
newAutomata->addTransition(-1, 0, automataArg1->getInitialState());
newAutomata->addTransition(-1, 0, automataArg2->getInitialState());
newAutomata->setInitialState(0);
newAutomata->addEmptyState();
newAutomata->setFinalState(newAutomata->getNrOfStates() - 1);
newAutomata->addTransition(-1, automataArg1->getFinalState(), newAutomata->getFinalState());
newAutomata->addTransition(-1, automataArg2->getFinalState(), newAutomata->getFinalState());
return newAutomata;
}
Automata* concatenation(Automata* automataArg1, Automata* automataArg2)
{
automataArg2->shift(automataArg1->getNrOfStates() - 1);
Automata* newAutomata = new Automata(*automataArg2);
char** auxTransitionTable = automataArg1->getTransitionTable();
for (int i = 0; i < automataArg1->getNrOfStates(); ++i)
for (int j = 0; j < automataArg1->getNrOfStates(); ++j)
newAutomata->addTransition(auxTransitionTable[i][j], i, j);
newAutomata->setInitialState(0);
return newAutomata;
}
Automata* StackAutomata::pop()
{
if (head->getValue() == ')')
{
int counter = 0;
StackAutomata restOfStack;
NodeStackAutomata* currentNode = head->getNextNode();
while (counter != -1)
{
if (currentNode->getValue() == '(')
counter--;
else if (currentNode->getValue() == ')')
counter++;
if (counter != -1)
{
restOfStack.push(currentNode->getValue());
currentNode = currentNode->getNextNode();
}
}
// Reversing restofStack
StackAutomata auxRestOfStack;
NodeStackAutomata* auxPointer = restOfStack.getHead();
while (auxPointer)
{
auxRestOfStack.push(auxPointer->getValue());
auxPointer = auxPointer->getNextNode();
}
Automata* convertedAutomata = new Automata;
convertedAutomata = auxRestOfStack.pop();
head = currentNode->getNextNode();
NodeStackAutomata* auxNode = new NodeStackAutomata;
auxNode->setValue(-2);
auxNode->setAutomataPointer(convertedAutomata);
auxNode->setNextNode(head);
head = auxNode;
StackAutomata newStack(head);
return newStack.pop();
}
else if (head->getValue() == '*')
{
if (isState(head->getNextNode()->getValue()))
{
Automata auxAutomata = buildSimpleAutomata(head->getNextNode()->getValue());
Automata* convertedAutomata = new Automata(auxAutomata);
Automata* newAutomata = star(convertedAutomata);
NodeStackAutomata* auxNode = new NodeStackAutomata;
auxNode->setValue(-2);
auxNode->setAutomataPointer(newAutomata);
auxNode->setNextNode(head->getNextNode()->getNextNode());
head = auxNode;
StackAutomata newStack(head);
return newStack.pop();
}
else if (head->getNextNode()->getValue() == ')')
{
int counter = 0;
StackAutomata restOfStack;
NodeStackAutomata* currentNode = head->getNextNode()->getNextNode();
while (counter != -1)
{
if (currentNode->getValue() == '(')
counter--;
else if (currentNode->getValue() == ')')
counter++;
if (counter != -1)
{
restOfStack.push(currentNode->getValue());
currentNode = currentNode->getNextNode();
}
}
// Reversing restofStack
StackAutomata auxRestOfStack;
NodeStackAutomata* auxPointer = restOfStack.getHead();
while (auxPointer)
{
auxRestOfStack.push(auxPointer->getValue());
auxPointer = auxPointer->getNextNode();
}
Automata* convertedAutomata = new Automata;
convertedAutomata = restOfStack.pop();
Automata* newAutomata = star(convertedAutomata);
NodeStackAutomata* auxNode = new NodeStackAutomata;
auxNode->setValue(-2);
auxNode->setAutomataPointer(newAutomata);
auxNode->setNextNode(currentNode->getNextNode());
head = auxNode;
StackAutomata newStack(head);
return newStack.pop();
}
}
else
{
Automata headAutomata;
if (head->getValue()!=-2)
headAutomata = buildSimpleAutomata(head->getValue());
else headAutomata = *(head->getAutomataPointer());
Automata* convertedAutomata = new Automata;
if (head->getNextNode()!=NULL && head->getNextNode()->getValue() == '|')
{
StackAutomata restOfStack;
NodeStackAutomata* currentNode = head->getNextNode()->getNextNode();
int counter = 0;
while (currentNode && !(currentNode->getValue()=='|' && counter==0) && counter != -1)
{
if (currentNode->getValue() == '(')
counter--;
else if (currentNode->getValue() == ')')
counter++;
if (counter != -1)
{
restOfStack.push(currentNode->getValue());
currentNode = currentNode->getNextNode();
}
}
StackAutomata auxRestOfStack;
NodeStackAutomata* auxPointer = restOfStack.getHead();
while (auxPointer)
{
auxRestOfStack.push(auxPointer->getValue());
auxPointer = auxPointer->getNextNode();
}
convertedAutomata = auxRestOfStack.pop();
Automata* newAutomata = unionAutomata(convertedAutomata, &headAutomata);
NodeStackAutomata* auxNode = new NodeStackAutomata;
auxNode->setValue(-2);
auxNode->setAutomataPointer(newAutomata);
auxNode->setNextNode(currentNode);
head = auxNode;
StackAutomata newStack(head);
return newStack.pop();
}
else
{
if (head->getNextNode() == NULL)
{
Automata auxAutomata;
if (head->getValue()!=-2)
auxAutomata = buildSimpleAutomata(head->getValue());
else auxAutomata = *(head->getAutomataPointer());
Automata* newAutomata = new Automata(auxAutomata);
return newAutomata;
}
else
{
NodeStackAutomata* currentNode = head->getNextNode();
StackAutomata restOfStack;
int counter = 0;
while (currentNode && !(currentNode->getValue() == '|' && counter == 0) && counter != -1)
{
if (currentNode->getValue() == '(')
counter--;
else if (currentNode->getValue() == ')')
counter++;
if (counter != -1)
{
restOfStack.push(currentNode->getValue());
currentNode = currentNode->getNextNode();
}
}
StackAutomata auxRestOfStack;
NodeStackAutomata* auxPointer = restOfStack.getHead();
while (auxPointer)
{
auxRestOfStack.push(auxPointer->getValue());
auxPointer = auxPointer->getNextNode();
}
convertedAutomata = auxRestOfStack.pop();
Automata* newAutomata = concatenation(convertedAutomata, &headAutomata);
NodeStackAutomata* auxNode = new NodeStackAutomata;
auxNode->setValue(-2);
auxNode->setAutomataPointer(newAutomata);
auxNode->setNextNode(currentNode);
head = auxNode;
StackAutomata newStack(head);
return newStack.pop();
}
}
}
}
Automata::Automata(string regexArg)
{
StackAutomata parserStack;
for (int i = 0; i < regexArg.length(); ++i)
{
parserStack.push(regexArg[i]);
}
Automata* B = new Automata();
B = parserStack.pop();
// Copying the new automata to the current one
nrOfStates = B->getNrOfStates();
initialState = B->getInitialState();
finalState = B->getFinalState();
char**auxTransitionTable = B->getTransitionTable();
transitionTable = new char*[nrOfStates];
for (int i = 0; i < nrOfStates; ++i)
transitionTable[i] = new char[nrOfStates];
for (int i = 0; i < nrOfStates; ++i)
for (int j = 0; j < nrOfStates; ++j)
transitionTable[i][j] = auxTransitionTable[i][j];
}
void Automata::output(ofstream& fout)
{
fout << "Number of States: " << nrOfStates << '\n';
fout << "Start state: " << initialState << '\n';
fout << "Final state: " << finalState << '\n';
for (int i = 0; i < nrOfStates; ++i)
for (int j = 0; j < nrOfStates; ++j)
{
if (transitionTable[i][j] == -1)
fout << i << " -> E -> " << j << '\n';
else if (transitionTable[i][j] != 0)
fout << i << " -> " << transitionTable[i][j] << " -> " << j << '\n';
}
}
int main()
{
ifstream fin("input.in");
ofstream fout("output.out");
// Reading the regex
string regexInput;
fin >> regexInput;
// Building automata from regex
Automata A(regexInput);
// Printing the automata
A.output(fout);
return 0;
}
|
PHP
|
UTF-8
| 1,033 | 2.75 | 3 |
[] |
no_license
|
<?php
echo "<head>
<style>
table, td {
border: 1px solid black;
border-collapse: collapse;
}
th {
border: 1px solid red;
border-collapse: collapse;
background-color:rgb(200, 200, 200);
}
th, td {
padding: 5px;
text-align: left;
}
</style>
</head>";
$user = $_POST["User"];
$numPosts = 0;
echo "<h1> Posts Deleted.</h1>";
echo "<h3> Post ID's that were deleted:</h3>";
$conn = new mysqli("mysql.eecs.ku.edu", "hheeb", "aiH7vo7e", "hheeb");
/* check connection */
if ($conn->connect_error) {
die("Connection failed: " . $conn->connect_error);
}
$query = "SELECT post_id FROM Posts";
if ($result = $conn->query($query)) {
/* fetch associative array */
while ($row = $result->fetch_assoc()) {
$x = $row["post_id"];
if(isset($_POST['post' . $x]) && $_POST['post' . $x] == 'Bike'){
echo $x . "<br>";
$query2 = "DELETE FROM Posts WHERE post_id='" . "$x'";
$conn->query($query2);
}
}
/* free result set */
$result->free();
}
$conn->close();
?>
|
Python
|
UTF-8
| 358 | 2.78125 | 3 |
[] |
no_license
|
import numpy as np
def sample_correlation(x1, x2):
nens = x1.size
x1_mean = np.mean(x1)
x2_mean = np.mean(x2)
x1p = x1 - x1_mean
x2p = x2 - x2_mean
cov = np.sum(x1p * x2p)
x1_norm = np.sum(x1p ** 2)
x2_norm = np.sum(x2p ** 2)
if(x1_norm == 0. or x2_norm == 0):
corr = 0.
else:
corr = cov/np.sqrt(x1_norm * x2_norm)
return corr
|
C#
|
UTF-8
| 10,229 | 3.46875 | 3 |
[] |
no_license
|
using System;
using System.Globalization;
using System.Diagnostics.CodeAnalysis;
using System.Diagnostics.Contracts;
using System.Numerics;
namespace Exercise5
{
class Program
{
public struct Complex
{
private Double m_real;
private Double m_imaginary;
private const Double LOG_10_INV = 0.43429448190325;
public Double Real
{
get
{
return m_real;
}
}
public Double Imaginary
{
get
{
return m_imaginary;
}
}
public Double Magnitude
{
get
{
return Complex.Abs(this);
}
}
public Double Phase
{
get
{
return Math.Atan2(m_imaginary, m_real);
}
}
public static readonly Complex Zero = new Complex(0.0, 0.0);
public static readonly Complex One = new Complex(1.0, 0.0);
public static readonly Complex ImaginaryOne = new Complex(0.0, 1.0);
public Complex(Double real, Double imaginary) /* Constructor to create a complex number with rectangular co-ordinates */
{
this.m_real = real;
this.m_imaginary = imaginary;
}
public static Complex FromPolarCoordinates(Double magnitude, Double phase) /* Factory method to take polar inputs and create a Complex object */
{
return new Complex((magnitude * Math.Cos(phase)), (magnitude * Math.Sin(phase)));
}
public static Complex Negate(Complex value)
{
return -value;
}
public static Complex Add(Complex left, Complex right)
{
return left + right;
}
public static Complex Subtract(Complex left, Complex right)
{
return left - right;
}
public static Complex Multiply(Complex left, Complex right)
{
return left * right;
}
public static Complex Divide(Complex dividend, Complex divisor)
{
return dividend / divisor;
}
public static Complex operator -(Complex value) /* Unary negation of a complex number */
{
return (new Complex((-value.m_real), (-value.m_imaginary)));
}
public static Complex operator +(Complex left, Complex right)
{
return (new Complex((left.m_real + right.m_real), (left.m_imaginary + right.m_imaginary)));
}
public static Complex operator -(Complex left, Complex right)
{
return (new Complex((left.m_real - right.m_real), (left.m_imaginary - right.m_imaginary)));
}
public static Complex operator *(Complex left, Complex right)
{
// Multiplication: (a + bi)(c + di) = (ac -bd) + (bc + ad)i
Double result_Realpart = (left.m_real * right.m_real) - (left.m_imaginary * right.m_imaginary);
Double result_Imaginarypart = (left.m_imaginary * right.m_real) + (left.m_real * right.m_imaginary);
return (new Complex(result_Realpart, result_Imaginarypart));
}
public static Complex operator /(Complex left, Complex right)
{
// Division : Smith's formula.
double a = left.m_real;
double b = left.m_imaginary;
double c = right.m_real;
double d = right.m_imaginary;
if (Math.Abs(d) < Math.Abs(c))
{
double doc = d / c;
return new Complex((a + b * doc) / (c + d * doc), (b - a * doc) / (c + d * doc));
}
else
{
double cod = c / d;
return new Complex((b + a * cod) / (d + c * cod), (-a + b * cod) / (d + c * cod));
}
}
public static Double Abs(Complex value)
{
if (Double.IsInfinity(value.m_real) || Double.IsInfinity(value.m_imaginary))
{
return double.PositiveInfinity;
}
// |value| == sqrt(a^2 + b^2)
// sqrt(a^2 + b^2) == a/a * sqrt(a^2 + b^2) = a * sqrt(a^2/a^2 + b^2/a^2)
// Using the above we can factor out the square of the larger component to dodge overflow.
double c = Math.Abs(value.m_real);
double d = Math.Abs(value.m_imaginary);
if (c > d)
{
double r = d / c;
return c * Math.Sqrt(1.0 + r * r);
}
else if (d == 0.0)
{
return c; // c is either 0.0 or NaN
}
else
{
double r = c / d;
return d * Math.Sqrt(1.0 + r * r);
}
}
public static Complex Conjugate(Complex value)
{
// Conjugate of a Complex number: the conjugate of x+i*y is x-i*y
return (new Complex(value.m_real, (-value.m_imaginary)));
}
public static Complex Reciprocal(Complex value)
{
// Reciprocal of a Complex number : the reciprocal of x+i*y is 1/(x+i*y)
if ((value.m_real == 0) && (value.m_imaginary == 0))
{
return Complex.Zero;
}
return Complex.One / value;
}
public static bool operator ==(Complex left, Complex right)
{
return ((left.m_real == right.m_real) && (left.m_imaginary == right.m_imaginary));
}
public static bool operator !=(Complex left, Complex right)
{
return ((left.m_real != right.m_real) || (left.m_imaginary != right.m_imaginary));
}
public override bool Equals(object obj)
{
if (!(obj is Complex)) return false;
return this == ((Complex)obj);
}
public bool Equals(Complex value)
{
return ((this.m_real.Equals(value.m_real)) && (this.m_imaginary.Equals(value.m_imaginary)));
}
public static implicit operator Complex(Int16 value)
{
return (new Complex(value, 0.0));
}
public static implicit operator Complex(Int32 value)
{
return (new Complex(value, 0.0));
}
public static implicit operator Complex(Int64 value)
{
return (new Complex(value, 0.0));
}
public static implicit operator Complex(UInt16 value)
{
return (new Complex(value, 0.0));
}
public static implicit operator Complex(UInt32 value)
{
return (new Complex(value, 0.0));
}
public static implicit operator Complex(UInt64 value)
{
return (new Complex(value, 0.0));
}
public static implicit operator Complex(SByte value)
{
return (new Complex(value, 0.0));
}
public static implicit operator Complex(Byte value)
{
return (new Complex(value, 0.0));
}
public static implicit operator Complex(Single value)
{
return (new Complex(value, 0.0));
}
public static implicit operator Complex(Double value)
{
return (new Complex(value, 0.0));
}
public static explicit operator Complex(BigInteger value)
{
return (new Complex((Double)value, 0.0));
}
public static explicit operator Complex(Decimal value)
{
return (new Complex((Double)value, 0.0));
}
public override String ToString()
{
return (String.Format(CultureInfo.CurrentCulture, "({0}, {1})", this.m_real, this.m_imaginary));
}
public String ToString(String format)
{
return (String.Format(CultureInfo.CurrentCulture, "({0}, {1})", this.m_real.ToString(format, CultureInfo.CurrentCulture), this.m_imaginary.ToString(format, CultureInfo.CurrentCulture)));
}
public String ToString(IFormatProvider provider)
{
return (String.Format(provider, "({0}, {1})", this.m_real, this.m_imaginary));
}
public String ToString(String format, IFormatProvider provider)
{
return (String.Format(provider, "({0}, {1})", this.m_real.ToString(format, provider), this.m_imaginary.ToString(format, provider)));
}
public override Int32 GetHashCode()
{
Int32 n1 = 99999997;
Int32 hash_real = this.m_real.GetHashCode() % n1;
Int32 hash_imaginary = this.m_imaginary.GetHashCode();
Int32 final_hashcode = hash_real ^ hash_imaginary;
return (final_hashcode);
}
static void Main(string[] args)
{
Console.WriteLine("Hello World!");
}
}
}
}
|
Python
|
UTF-8
| 1,379 | 3.46875 | 3 |
[] |
no_license
|
import heapq
from sys import stdin
class Priority_Queue:
def __init__(self):
self.items = []
def push(self, name, weight):
heapq.heappush(self.items, [weight, name])
def get(self):
return heapq.heappop(self.items)[1]
def dijkstras(weighted_adj_list, start, end):
priority_queue = Priority_Queue()
min_cost_at = {}
came_from = {}
priority_queue.push(start, 0)
came_from[start] = None
min_cost_at[start] = 0
while priority_queue.items:
cur = priority_queue.get()
if cur == end:
break
else:
for neighbor, travel_cost in weighted_adj_list[cur]:
new_cost = travel_cost + min_cost_at[cur]
if new_cost < min_cost_at.get(neighbor, 10 ** 12):
min_cost_at[neighbor] = new_cost
came_from[neighbor] = cur
priority_queue.push(neighbor, new_cost)
return came_from, min_cost_at
weighted_adj_list = {0:[[1,4],
[2,3]],
1:[[2,1],
[3,2]],
2:[[3,4]],
3:[[4,2]],
4:[[5,6]],
5:[]}
came_from, min_cost_at = dijkstras(weighted_adj_list, 1, 5)
print(came_from, min_cost_at, sep = '\n')
|
Ruby
|
UTF-8
| 2,248 | 2.921875 | 3 |
[] |
no_license
|
require 'Minitest/autorun'
require 'Minitest/pride'
require './lib/queue'
class QueueTest < MiniTest::Test
def test_it_pushes_and_pops_a_character
some_queue = ParaMorse::Queue.new
some_queue.push('0')
assert_equal '0', some_queue.pop
end
def test_it_pushes_and_pops_from_correct_ends
some_queue = ParaMorse::Queue.new
some_queue.push('0')
some_queue.push('1')
some_queue.push('1')
assert_equal '0', some_queue.pop
assert_equal '1', some_queue.pop
assert_equal '1', some_queue.pop
end
def test_it_pops_multiple_characters
some_queue = ParaMorse::Queue.new
some_queue.push('0')
some_queue.push('1')
some_queue.push('1')
assert_equal '10', some_queue.pop_multiple(2)
end
def test_it_accepts_integers_as_input
some_queue = ParaMorse::Queue.new
some_queue.push(0)
assert_equal 0, some_queue.pop
end
def test_it_counts_its_elements
some_queue = ParaMorse::Queue.new
some_queue.push('0')
some_queue.push('1')
some_queue.push('0')
assert_equal 3, some_queue.count
end
def test_it_sees_the_newest_element
some_queue = ParaMorse::Queue.new
some_queue.push('0')
some_queue.push('1')
assert_equal ['1'], some_queue.tail
end
def test_it_sees_the_newest_n_elements
some_queue = ParaMorse::Queue.new
some_queue.push('0')
some_queue.push('1')
some_queue.push('1')
some_queue.push('0')
some_queue.push('0')
assert_equal ['0', '0', '1'], some_queue.tail(3)
end
def test_it_sees_the_oldest_element
some_queue = ParaMorse::Queue.new
some_queue.push('0')
some_queue.push('1')
assert_equal ['0'], some_queue.peek
end
def test_it_sees_the_oldest_n_elements
some_queue = ParaMorse::Queue.new
some_queue.push('0')
some_queue.push('1')
some_queue.push('1')
some_queue.push('0')
some_queue.push('0')
assert_equal ['0', '1', '1'], some_queue.peek(3)
end
def test_it_can_clear_all_its_elements
some_queue = ParaMorse::Queue.new
some_queue.push('0')
some_queue.push('1')
some_queue.push('1')
some_queue.push('0')
some_queue.push('0')
some_queue.clear
assert_equal 0, some_queue.count
end
end
|
Java
|
UTF-8
| 857 | 3.859375 | 4 |
[] |
no_license
|
package farm;
import java.util.ArrayList;
import java.util.List;
public class FarmWille {
public static void main(String[] args) {
List<Animal> animals = new ArrayList<Animal>();
Animal murka = new Animal(AnimalKind.CAT, "Vaska");
murka.sleep();
murka.waikup();
animals.add(murka);
Animal randomCat = new Animal(AnimalKind.CAT);
randomCat.newName();
randomCat.sayName();
animals.add(randomCat);
Animal randomDog = new Animal(AnimalKind.DOG);
randomDog.newName();
randomDog.sayName();
animals.add(randomDog);
System.out.println("Find all cats");
for(Animal animal : animals){
if (animal.getKind() == AnimalKind.CAT){
System.out.println("Found: " + animal.getName());
}
}
System.out.println("Print all animals");
for(Animal animal : animals){
System.out.println(animal);
}
}
}
|
PHP
|
UTF-8
| 1,295 | 2.78125 | 3 |
[
"MIT"
] |
permissive
|
<?php
/**
* This Class represents SendEmailVerificationEmail Event which fires on external call
* @author Mayank Jariwala <menickwa@gmail.com>
* @package App\Events
* @version v.1.1
*/
namespace App\Events;
use App\Model\User;
use Illuminate\Broadcasting\Channel;
use Illuminate\Queue\SerializesModels;
use Illuminate\Broadcasting\PrivateChannel;
use Illuminate\Broadcasting\PresenceChannel;
use Illuminate\Foundation\Events\Dispatchable;
use Illuminate\Broadcasting\InteractsWithSockets;
use Illuminate\Contracts\Broadcasting\ShouldBroadcast;
/**
* Class SendEmailVerificationEmail
*
* This event is fire when new user register into an application and system wants to verify user
* email in order to activate account
*/
class SendEmailVerificationEmail
{
use Dispatchable, InteractsWithSockets, SerializesModels;
/**
* @var User User Object
*/
public $user;
/**
* Create a new event instance.
*
* @param User $user : User Object
*/
public function __construct(User $user)
{
$this->user = $user;
}
/**
* Get the channels the event should broadcast on.
*
* @return \Illuminate\Broadcasting\Channel|array
*/
public function broadcastOn()
{
return new PrivateChannel('channel-name');
}
}
|
C#
|
UTF-8
| 1,995 | 2.953125 | 3 |
[] |
no_license
|
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Sockets;
using System.Text;
using System.Threading.Tasks;
namespace Project.Controller {
class FileSender {
public bool IsSocketConnected(Socket s) {
return !((s.Poll(1000, SelectMode.SelectRead) && (s.Available == 0)) || !s.Connected);
}
public Socket clientSock;
public void SendFile(string FileDir, string IP) {
clientSock = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
byte[] m_clientData;
FileProperties file = new FileProperties(FileDir);
byte[] fileName = Encoding.UTF8.GetBytes(file.GetFileName()); //file name
byte[] fileData = File.ReadAllBytes(FileDir); //file
byte[] fileNameLen = BitConverter.GetBytes(fileName.Length); //lenght of file name
m_clientData = new byte[4 + fileName.Length + fileData.Length];
fileNameLen.CopyTo(m_clientData, 0);
fileName.CopyTo(m_clientData, 4);
fileData.CopyTo(m_clientData, 4 + fileName.Length);
try {
clientSock.Connect(IP, 9050); //target machine's ip address and the port number
if (IsSocketConnected(clientSock)) {
clientSock.Send(m_clientData);
Console.WriteLine("Shit Connected!");
} else {
Console.WriteLine("Shit Not Connected!");
}
clientSock.Close();
} catch (SocketException ex) {
if (ex.NativeErrorCode.Equals(10035))
Console.WriteLine("Still Connected, but the Send would block");
else {
string msg = "Unable to connect with partner. Error code " + ex.NativeErrorCode + "!";
Console.WriteLine(msg);
}
}
}
}
}
|
C
|
GB18030
| 1,152 | 3.109375 | 3 |
[] |
no_license
|
#include<stdio.h>
#include<stdlib.h>
#include<assert.h>
typedef char BTDataType;
typedef struct BinaryTreeNode
{
BTDataType data;
struct BinaryTreeNode* left;
struct BinaryTreeNode* right;
}BTNode;
// ͨǰ"ABD##E#H##CF##G##"
BTNode* BinaryTreeCreate(BTDataType* a, int* pi);//
void BinaryTreeDestory(BTNode* root);//ٶ
void BinaryTreeDestory1(BTNode** root);
int BinaryTreeSize(BTNode* root);//ij
int BinaryTreeLeafSize(BTNode* root);//Ҷ
int BinaryTreeLevelKSize(BTNode* root, int k);//ÿڵ
BTNode* BinaryTreeFind(BTNode* root, BTDataType x);//
//ݹ
void BinaryTreePrevOrder(BTNode* root);// ǰ
void BinaryTreeInOrder(BTNode* root);//
void BinaryTreePostOrder(BTNode* root);//
// ǵݹ
void BinaryTreeLevelOrder(BTNode* root);//
int BinaryTreeComplete(BTNode* root);// ж϶Ƿȫ
void BinaryTreePrevOrderNonR(BTNode* root);//ǰ
void BinaryTreeInOrderNonR(BTNode* root);//
void BinaryTreePostOrderNonR(BTNode* root);//
|
JavaScript
|
UTF-8
| 1,539 | 2.796875 | 3 |
[] |
no_license
|
import { io } from "socket.io-client";
import "setimmediate";
const joinRoomButton = document.getElementById("room-button");
const messageInput = document.getElementById("message-input");
const roomInput = document.getElementById("room-input");
const form = document.getElementById("form");
const socket = io("http://localhost:3000");
const userSocket = io("http://localhost:3000/user", { auth: {token: 'Test'}});
userSocket.on('connect_error', error=>{
displayMessage(error)
})
socket.emit('custom-event', 10, 'Hi', {a: 'a'});
socket.on("connect", ()=>{
displayMessage(`You connected with id ${socket.id}`);
});
socket.on('receive-message', (message) => {
displayMessage(message);
});
form.addEventListener("submit", e => {
e.preventDefault();
const message = messageInput.value;
const room = roomInput.value;
console.log(messageInput);
if (message === "") return;
displayMessage(message);
socket.emit('send-message', message, room);
messageInput.value = "";
});
joinRoomButton.addEventListener("click", () => {
const room = roomInput.value;
socket.emit('join-room', room, message => {
displayMessage(message)
})
})
function displayMessage(message) {
const div = document.createElement("div");
div.textContent = message;
document.getElementById("message-container").append(div);
}
document.addEventListener('keydown', e=> {
if(e.target,matches('input')) return
if(e.key == 'c') socket.connect()
if(e.key == 'd') socket.disconnect()
})
|
JavaScript
|
UTF-8
| 5,287 | 2.953125 | 3 |
[
"MIT"
] |
permissive
|
function myFunction() {
var x = document.getElementById("navDemo");
if (x.className.indexOf("w3-show") == -1) {
x.className += " w3-show";
} else {
x.className = x.className.replace(" w3-show", "");
}
console.log("Hi");
}
function addMoreRow() {
var table = document.getElementById("calculatorTable");
var totalRows= table.rows.length;
var row = table.insertRow(totalRows-2);
var cell1 = row.insertCell(0);
var cell2 = row.insertCell(1);
cell1.innerHTML = '<input type="text" name="Weight" size="30">';
cell2.innerHTML = '<input type="text" name="Weight" size="30">';
}
function removeRow() {
var table = document.getElementById("calculatorTable");
var totalRows= table.rows.length;
if(totalRows > 3) {
var row = table.deleteRow(totalRows-3);
}
}
function calculate() {
var errorMessage = document.getElementById("errorMessage");
errorMessage.innerHTML = "";
var table = document.getElementById("calculatorTable");
var currentAverageText = document.getElementById("currentAverageText");
var bestAverageText = document.getElementById("bestAverageText");
var worstAverageText = document.getElementById("worstAverageText");
var totalRows = table.rows.length;
var totalWeight = 0;
var missingWeight = 0;
var totalMark = 0;
var bestCase = 0;
var worstCase = 0;
var currentAverage = 0;
var multiplierFactor = 0;
var markInfo = "";
var weightInfo = "";
for(var i = 1; i < totalRows-2; i++) {
if(table.rows[i].cells[1].firstChild.value == "") {
table.rows[i].cells[1].firstChild.value = 0;
}
if(table.rows[i].cells[0].firstChild.value == "") {
table.rows[i].cells[0].firstChild.value = 0;
}
if(!Number.isFinite(parseInt(table.rows[i].cells[1].firstChild.value))) {
errorMessage.innerHTML = "Error: Can't input symbols.";
}
if(!Number.isFinite(parseInt(table.rows[i].cells[0].firstChild.value))) {
errorMessage.innerHTML = "Error: Can't input symbols and letters.";
}
}
for(var i = 1; i < totalRows-2; i++) {
totalWeight += parseInt(table.rows[i].cells[1].firstChild.value);
}
try {
if(totalWeight > 100) {
throw weightError();
}
} catch(err) {
if(err.name == "WeightError") {
errorMessage.innerHTML = "Error: You can't have a weight of more than 100%.";
}
return;
}
missingWeight = 100 - totalWeight;
multiplierFactor = 100 / totalWeight;
for(var i = 1; i < totalRows-2; i++) {
markInfo += table.rows[i].cells[0].firstChild.value + ',';
weightInfo += table.rows[i].cells[1].firstChild.value + ',';
totalMark += parseInt(table.rows[i].cells[0].firstChild.value) * parseInt(table.rows[i].cells[1].firstChild.value) * 0.01;
}
markInfo = markInfo.slice(0, -1)
weightInfo = weightInfo.slice(0, -1)
for(var i = 1; i < totalRows-2; i++) {
currentAverage += parseInt(table.rows[i].cells[0].firstChild.value) * parseInt(table.rows[i].cells[1].firstChild.value) * multiplierFactor * 0.01;
}
worstCase = totalMark;
bestCase = totalMark + missingWeight;
currentAverageText.innerHTML = "Current Average: " + currentAverage.toFixed(2);
bestAverageText.innerHTML = "Best Case Scenario Average: " + bestCase.toFixed(2);
worstAverageText.innerHTML = "Worst Case Scenario Average: " + worstCase.toFixed(2);
console.log(markInfo);
console.log(weightInfo);
setCookie("markCookie", markInfo, 365);
setCookie("weightCookie", weightInfo, 365);
}
function weightError() {
var error = new Error("Weight above 100%");
error.name = "WeightError";
return error;
}
function setCookie(cname, cvalue, exdays) {
var d = new Date();
d.setTime(d.getTime() + (exdays*24*60*60*1000));
var expires = "expires="+ d.toUTCString();
document.cookie = cname + "=" + cvalue + ";" + expires + ";path=/";
}
function getCookie(cname) {
var name = cname + "=";
var decodedCookie = decodeURIComponent(document.cookie);
var ca = decodedCookie.split(';');
for(var i = 0; i <ca.length; i++) {
var c = ca[i];
while (c.charAt(0) == ' ') {
c = c.substring(1);
}
if (c.indexOf(name) == 0) {
return c.substring(name.length, c.length);
}
}
return "";
}
function checkCookie() {
var markCookie = getCookie("markCookie");
var weightCookie = getCookie("weightCookie");
if (markCookie != "") {
var marks = markCookie.split(",");
var marksLength = marks.length;
while(marksLength > document.getElementById("calculatorTable").rows.length - 3) {
addMoreRow();
}
while(marksLength < document.getElementById("calculatorTable").rows.length - 3) {
removeRow();
}
for(var i = 1; i < document.getElementById("calculatorTable").rows.length - 2; i++) {
document.getElementById("calculatorTable").rows[i].cells[0].firstChild.value = marks[i-1];
}
}
if (weightCookie != "") {
var weights = weightCookie.split(",");
var weightsLength = weights.length;
while(weightsLength > document.getElementById("calculatorTable").rows.length - 3) {
addMoreRow();
}
for(var i = 1; i < document.getElementById("calculatorTable").rows.length - 2; i++) {
document.getElementById("calculatorTable").rows[i].cells[1].firstChild.value = weights[i-1];
}
}
}
|
Swift
|
UTF-8
| 591 | 2.5625 | 3 |
[
"MIT"
] |
permissive
|
//
// AppTabBar.swift
// Pods
//
// Created by Halin Lee on 6/26/17.
//
//
import Foundation
open class AppTabBar:UIView{
private var _titles: Array<String> = []
var titles:Array<String>{
get{
return _titles
}
set{
_titles = titles
}
}
let line:UIView
public override init(frame:CGRect) {
line = UIView(frame:frame)
super.init(frame:frame)
}
required public init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
|
JavaScript
|
UTF-8
| 2,771 | 3.15625 | 3 |
[] |
no_license
|
function getParameterByName(name) {
name = name.replace(/[\[]/, "\\[").replace(/[\]]/, "\\]");
var regex = new RegExp("[\\?&]" + name + "=([^&#]*)"),
results = regex.exec(location.search);
return results === null ? "" : decodeURIComponent(results[1].replace(/\+/g, " "));
}
function extractMeanings(jsonObject) {
var tuc = jsonObject["tuc"];
var rval = [];
for (i = 0; i < tuc.length; i++) {
if (tuc[i].meanings) {
for (j = 0; j < tuc[i].meanings.length; j++) {
var newDefinition = tuc[i].meanings[j].text.toString();
if (typeof(newDefinition) === "string") {
rval.push(unescapeHTML(newDefinition));
}
}
}
if (tuc[i].phrase) {
var newDefinition = tuc[i].phrase.text.toString();
if (typeof(newDefinition) === "string") {
rval.push(unescapeHTML(newDefinition));
}
}
}
return rval;
}
function makePretty(meaningsArray) {
var newDiv = document.createElement("div");
var newHeading = document.createElement("h3");
var newHeaderText = document.createTextNode(query);
newHeading.appendChild(newHeaderText);
newDiv.appendChild(newHeading);
if (meaningsArray.length > 0) {
var newUL = document.createElement("ul");
for (k = 0; k < meaningsArray.length; k++) {
var newLI = document.createElement("li");
var newDefNode = document.createTextNode(meaningsArray[k]);
newLI.appendChild(newDefNode);
newUL.appendChild(newLI);
}
newDiv.appendChild(newUL);
} else {
var newParagraph = document.createElement("p");
var newText = document.createTextNode("No definitions could be retrieved.");
newParagraph.appendChild(newText);
newDiv.appendChild(newParagraph);
}
var currentDiv = document.getElementById("display");
document.body.insertBefore(newDiv, currentDiv);
}
// https://stackoverflow.com/questions/22279231/using-js-jquery-how-can-i-unescape-html-and-put-quotes-back-in-the-str
function unescapeHTML(str) {
return str
.replace(/&/g, "&")
.replace(/</g, "<")
.replace(/>/g, ">")
.replace(/"/g, "\"")
.replace(/‘/g, "\‘")
.replace(/’/g, "\’")
.replace(/“/g, "\“")
.replace(/”/g, "\”")
.replace(/'/g, "\'")
.replace(/'/g, "\'");
}
// Execution starts here
var query = getParameterByName("query");
var url = "https://glosbe.com/gapi/translate?from=jpn&dest=eng&format=json&phrase=" + query;
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.onload = function () {
var jsonResponse = JSON.parse(request.responseText);
var meaningsArray = extractMeanings(jsonResponse);
makePretty(meaningsArray);
};
request.send();
|
Python
|
UTF-8
| 1,232 | 2.828125 | 3 |
[] |
no_license
|
# We are going to split the images folders in train and test.
import pandas as pd
import os
# Captions output file paths
train_captions_file_path = '../../data/corpus/devset/dev-set/train_dev-set_video-captions-cleanup.csv'
test_captions_file_path = '../../data/corpus/devset/dev-set/test_dev-set_video-captions-cleanup.csv'
# Images folder
images_folder_folder_path = '/media/marcoscollado/pgth06a/saliency/'
train_images_folder_path = '/media/marcoscollado/pgth06a/saliency/train/'
test_images_folder_path = '/media/marcoscollado/pgth06a/saliency/test/'
df_train_ground_truth = pd.read_csv(train_captions_file_path)
df_test_ground_truth = pd.read_csv(test_captions_file_path)
for index, row in df_train_ground_truth.iterrows():
video_name = row['id']
video_name = video_name.split('.')[0]
print('Moving:' + video_name)
os.renames(os.path.join(images_folder_folder_path, video_name), os.path.join(train_images_folder_path, video_name))
for index, row in df_test_ground_truth.iterrows():
video_name = row['id']
video_name = video_name.split('.')[0]
print('Moving:' + video_name)
os.renames(os.path.join(images_folder_folder_path, video_name), os.path.join(test_images_folder_path, video_name))
|
Java
|
UTF-8
| 5,003 | 2.15625 | 2 |
[
"Apache-2.0"
] |
permissive
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iceberg.flink;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.time.LocalDateTime;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.data.TimestampData;
import org.apache.flink.table.types.logical.DecimalType;
import org.apache.flink.table.types.logical.LocalZonedTimestampType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.table.types.logical.TimestampType;
import org.apache.iceberg.StructLike;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.DateTimeUtil;
import org.apache.iceberg.util.UUIDUtil;
public class RowDataWrapper implements StructLike {
private final LogicalType[] types;
private final PositionalGetter<?>[] getters;
private RowData rowData = null;
public RowDataWrapper(RowType rowType, Types.StructType struct) {
int size = rowType.getFieldCount();
types = (LogicalType[]) Array.newInstance(LogicalType.class, size);
getters = (PositionalGetter[]) Array.newInstance(PositionalGetter.class, size);
for (int i = 0; i < size; i++) {
types[i] = rowType.getTypeAt(i);
getters[i] = buildGetter(types[i], struct.fields().get(i).type());
}
}
public RowDataWrapper wrap(RowData data) {
this.rowData = data;
return this;
}
@Override
public int size() {
return types.length;
}
@Override
public <T> T get(int pos, Class<T> javaClass) {
if (rowData.isNullAt(pos)) {
return null;
} else if (getters[pos] != null) {
return javaClass.cast(getters[pos].get(rowData, pos));
}
Object value = RowData.createFieldGetter(types[pos], pos).getFieldOrNull(rowData);
return javaClass.cast(value);
}
@Override
public <T> void set(int pos, T value) {
throw new UnsupportedOperationException(
"Could not set a field in the RowDataWrapper because rowData is read-only");
}
private interface PositionalGetter<T> {
T get(RowData data, int pos);
}
private static PositionalGetter<?> buildGetter(LogicalType logicalType, Type type) {
switch (logicalType.getTypeRoot()) {
case TINYINT:
return (row, pos) -> (int) row.getByte(pos);
case SMALLINT:
return (row, pos) -> (int) row.getShort(pos);
case CHAR:
case VARCHAR:
return (row, pos) -> row.getString(pos).toString();
case BINARY:
case VARBINARY:
if (Type.TypeID.UUID == type.typeId()) {
return (row, pos) -> UUIDUtil.convert(row.getBinary(pos));
} else {
return (row, pos) -> ByteBuffer.wrap(row.getBinary(pos));
}
case DECIMAL:
DecimalType decimalType = (DecimalType) logicalType;
return (row, pos) ->
row.getDecimal(pos, decimalType.getPrecision(), decimalType.getScale()).toBigDecimal();
case TIME_WITHOUT_TIME_ZONE:
// Time in RowData is in milliseconds (Integer), while iceberg's time is microseconds
// (Long).
return (row, pos) -> ((long) row.getInt(pos)) * 1_000;
case TIMESTAMP_WITHOUT_TIME_ZONE:
TimestampType timestampType = (TimestampType) logicalType;
return (row, pos) -> {
LocalDateTime localDateTime =
row.getTimestamp(pos, timestampType.getPrecision()).toLocalDateTime();
return DateTimeUtil.microsFromTimestamp(localDateTime);
};
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
LocalZonedTimestampType lzTs = (LocalZonedTimestampType) logicalType;
return (row, pos) -> {
TimestampData timestampData = row.getTimestamp(pos, lzTs.getPrecision());
return timestampData.getMillisecond() * 1000
+ timestampData.getNanoOfMillisecond() / 1000;
};
case ROW:
RowType rowType = (RowType) logicalType;
Types.StructType structType = (Types.StructType) type;
RowDataWrapper nestedWrapper = new RowDataWrapper(rowType, structType);
return (row, pos) -> nestedWrapper.wrap(row.getRow(pos, rowType.getFieldCount()));
default:
return null;
}
}
}
|
C++
|
UTF-8
| 1,316 | 3.21875 | 3 |
[] |
no_license
|
#include "../header/myheader.h"
class LT0832
{
public:
// D D
// reverse(begin(p), end(p))
// for (auto& row : A) for (int& i: row) i ^= 1;
// int lo = 0, hi = A[0].length - 1;
// while (lo <= hi) {
// 中间列。
//Runtime: 8 ms, faster than 86.66% of C++ online submissions for Flipping an Image.
//Memory Usage: 9 MB, less than 100.00% of C++ online submissions for Flipping an Image.
// 水平翻转,然后0-1互换。
vector<vector<int>> lt0832a(vector<vector<int>>& A)
{
int sz2 = A.size();
for (int i = 0; i < sz2; i++)
{
for (int j = 0, mxc = (sz2 + 1) / 2; j < mxc; j++)
{
int t2 = A[i][j];
A[i][j] = A[i][sz2 - 1 - j] == 1 ? 0 : 1;
if (j != sz2 - 1 - j)
A[i][sz2 - 1 - j] = t2 == 1 ? 0 : 1;
}
#ifdef __test
for_each(begin(A[i]), end(A[i]), fun_cout);
cout<<endl;
#endif // __test
}
return A;
}
};
int main()
{
vector<vector<int>> vv = {{1,1,0},{1,0,1},{0,0,0}};
LT0832 lt;
vector<vector<int>> vv2 = lt.lt0832a(vv);
for (auto& p : vv2)
{
for_each(begin(p), end(p), fun_cout);
cout<<endl;
}
cout<<endl;
return 0;
}
|
Markdown
|
UTF-8
| 9,063 | 2.84375 | 3 |
[
"MIT"
] |
permissive
|
# Additional Configuration
::: danger This Software is Abandoned
This documentation is for **abandoned software** which does not recieve any security updates or support
from the community. This documentation has been left accessible for historial reasons.
You should be installing and using [Wings](/wings/1.0/installing.md) in production environments with
[Pterodactyl Panel 1.0](/panel/1.0/getting_started.md).
:::
[[toc]]
::: warning
These are advanced configurations for the daemon. You risk breaking your daemon and making containers un-usable if
you modify something incorrectly. Proceed at your own risk, and only if you know what each configuration value does.
:::
The documentation below uses dot-notated JSON to explain where each setting should live. You will need to manually
expand this syntax when adding to the `core.json` file for the Daemon. For example, something like `internals.throttle.enabled`
would be expanded to the JSON below.
``` json
{
"internals": {
"throttle": {
"enabled": true
}
}
}
```
## Output Throttles
There are a few throttle limits built into the Daemon to keep people from causing issues with data volume and CPU usage.
Under normal circumstances users should not encounter these limits. You might see the occasional data throttling
warning while starting a server or when there is a sudden spike in data output.
If you're seeing more servers than you expected being killed as a result of the Daemon throttler, you can make
adjustments to the settings below. Please note the configs below are in JSON dot-notation and should be expanded
out into a normal JSON object.
| Setting Path | Default Value | Notes |
| ------------ | ------------- | ----- |
| `enabled` | true | Determines if the throttle (and associated values below) should be used. |
| `kill_at_count` | 5 | The number of warnings that can accumulate for a particular instance before the server process is killed. The decay time below affects how quickly this value is decreased. |
| `decay` | 10 | The number of seconds that a server process must go without triggering a data throttle warning before the throttle count begins decreasing. This loop is processed every 5 seconds and will decrement the throttle count by one when the process goes more than this number of seconds without a data throttle occurring. |
| `bytes` | 30720 | :warning: _(removed in v0.5.5)_ The maximum number of bytes of data that can be output in the defined interval before a warning occurs. |
| `lines` | 1000 | :warning: _(added in v0.5.6)_ The number of lines that can be output by the server process in the defined check interval time. By default, 5,000 lines in ~500ms results in a server process kill. |
| `check_interval_ms` | 100 | The number of milliseconds between the throttle resetting the used bytes or line count. |
Please note that all of the settings above are in the `internals.throttle.X` path. So, `enabled` is actually `internals.throttle.enabled`.
## Custom Network Interfaces
If for whatever reason you need to modify the network interfaces used for Pterodactyl's local Docker network you
can do so by modifying the `core.json` file for the daemon. In most cases you'll just be modifying the network
name to allow your servers to use the host network stack. To do so, just change `docker.network.name` to be `host`
rather than `pterodactyl_nw` as shown below.
::: warning
While changing to the host network stack does allow servers running on Pterodactyl to have direct access to local
interfaces and bind to specific IP addresses (required for some Steam games), it is not recommended on public
installations of Pterodactyl (where you have other users running servers).
Using the `host` stack removes many network specific protections afforded by Docker, and will allow server processes
to access anything on the host, as well as bind to any IP or Port they wish.
:::
::: danger
Any changes to the network after the daemon has been started will require you to remove the docker network and restart the daemon. Any servers on the host need to be stopped before and most likely rebuilt.
The following will stop the daemon, remove the network, and start the daemon again. Run at your own risk.
`systemctl stop wings && docker network rm pterodactyl_nw && systemctl start wings`
:::
``` json{5}
"docker": {
"socket": "/var/run/docker.sock",
"autoupdate_images": true,
"network": {
"name": "pterodactyl_nw",
"interfaces": {
"v4": {
"subnet": "172.18.0.0/16",
"gateway": "172.18.0.1"
}
}
},
"interface": "172.18.0.1"
},
```
## Private Registries
| Setting Path | Default Value | Notes |
| ------------ | ------------- | ----- |
| `username` | _none_ | The username to use when connecting to the registry. |
| `password` | _none_ | The password associated with the account. |
| `images` | _none_ | An array of images that are associated with the private registry. |
| `auth` | _none_ | |
| `email` | _none_ | |
| `serveraddress` | _none_ | The address to the server the registry is located on. |
| `key` | _none_ | A pre-generated base64 encoded authentication string. If provided none of the above options are required. |
Please note that all of the settings above are in the `docker.registry.X` path. So, `username` is actually `docker.registry.username`.
## Security Policies
This daemon ships with a very strict security configuration designed to limit access to the host system, and mitigate
a large range of potential attack vectors. However, some users might need to tweak these settings, or are running on
a private instance and are willing to decrease some of the security measures.
| Setting Path | Default Value | Notes |
| ------------ | ------------- | ----- |
| `ipv6` | true | Set this to false to disable IPv6 networking on the pterodactyl0 interface. |
| `internal` | false | Set this to true to prevent any external network access to all containers on the pterodactyl0 interface. |
| `enable_icc` | true | Set this to false to disallow containers to access services running on the host system's non-public IP addresses. Setting this to false does make it impossible to connect (from a container) to MySQL/Redis/etc. running on the host system without using the public IP address. |
| `enable_ip_masquerade` | true | Set this to false to disable IP Masquerading on the pterodactyl0 interface. |
Please note that all of the settings above are in the `docker.policy.network.X` path. So, `ipv6` is actually `docker.policy.network.ipv6`.
## Container Policy
| Setting Path | Default Value | Notes |
| ------------ | ------------- | ----- |
| `tmpfs` | `rw,exec,nosuid,size=50M` | These are the arguments used for mounting a `tmpfs` directory into containers to allow certain programs to run. |
| `log_driver` | none | :warning: This option was **removed** in `v0.6` and is forcibly set to `json-file`. The log driver to use for containers. We default to `none` to mitigate a potential DoS attack vector if a server were to spam log output. |
| `log_opts` | array | |
| `log_opts.max_size` | `5m` | The maximum size of the server output log file created by Docker. |
| `log_opts.max_files` | `1` | The maximum number of files that Docker will create with output from the server. |
| `readonly_root` | true | Determines if the root filesystem of the container should be readonly. |
| `securityopts` | array | An array of security options to apply to a container. The default array is provided below. |
| `cap_drop` | array | An array of linux capabilities to drop from the container (in addition to ones [dropped by docker already](https://docs.docker.com/engine/security/security/#linux-kernel-capabilities). A listing of the default array is below. |
Please note that all of the settings above are in the `docker.policy.container.X` path. So, `tmpfs` is actually `docker.policy.container.tmpfs`.
### Default Security Opts Array
``` json
[
'no-new-privileges',
]
```
### Default Capabilities Drop Array
::: warning
Starting with `v0.6` of the Daemon, the following previously _dropped_ capabilities are available in containers: `chown`, `kill`, `setgid`, and `setuid`.
:::
``` json
[
'setpcap',
'mknod',
'audit_write',
'net_raw',
'dac_override',
'fowner',
'fsetid',
'net_bind_service',
'sys_chroot',
'setfcap',
]
```
## Enabling Cloudflare
Enabling Cloudflare on the daemon isn't particularly useful since users do not connect directly to the daemon port, and users need an unproxied hostname to access any servers on the node. As a result it's not possible to conceal the IP address of your node machine, but some people want to enable it regardless.
Cloudflare only proxies the default daemon port (8080) when using HTTP. In order to get the daemon to work with Cloudflare when HTTPS is enabled you must change the daemon port to one that Cloudflare will proxy such as 8443. Since Cloudflare only proxies HTTP/HTTPS traffic for non-enterprise plans you cannot proxy the SFTP port.
|
C#
|
UTF-8
| 2,770 | 3.515625 | 4 |
[] |
no_license
|
using System;
using System.Collections;
using System.Collections.Generic;
namespace MidTerm
{
public class Product : IComparable<Product>
{
public int Id { get; set; }
public double Price { get; set; }
public string Name { get; set; }
public string Description { get; set; }
//public Product(string csvdata)
//{
// string[] tokens = csvdata.Split(',');
// Id = ParseInt(tokens[0]);
// Price = ParseDouble(tokens[1]);
// Name = tokens[2];
// Description = tokens[3];
//}
//public int ParseInt(string s)
//{
// int n = 0;
// try
// {
// n = Int16.Parse(s);
// }
// catch
// {
// Console.WriteLine($"Can not parse {s} !");
// }
// return n;
//}
//public double ParseDouble(string s)
//{
// double n = 0;
// try
// {
// n = double.Parse(s);
// }
// catch
// {
// Console.WriteLine($"Can not parse {s} !");
// }
// return n;
//}
public int CompareTo(Product other)
{
return Price.CompareTo(other.Price);
}
//public static int CompareById(Product p1, Product p2)
//{
// return p1.Id.CompareTo(p2.Id);
//}
//public static int CompareByPrice(Product p1, Product p2)
//{
// return p1.Price.CompareTo(p2.Price);
//}
//public static int CompareByName(Product p1, Product p2)
//{
// return string.Compare(p1.Name, p2.Name);
//}
//public static int CompareByDescription(Product p1, Product p2)
//{
// return string.Compare(p1.Description, p2.Description);
//}
public static Comparison<Product> CompareById = delegate (Product p1, Product p2)
{
return p1.Id.CompareTo(p2.Id);
};
public static Comparison<Product> CompareByName = delegate (Product p1, Product p2)
{
return string.Compare(p1.Name, p2.Name);
};
public static Comparison<Product> CompareByPrice = delegate (Product p1, Product p2)
{
return p1.Price.CompareTo(p2.Price);
};
public static Comparison<Product> CompareByDescription = delegate (Product p1, Product p2)
{
return string.Compare(p1.Description, p2.Description);
};
public override string ToString()
{
return $"# {Id} : {Name}, Price : {Price}, Description : {Description}";
}
}
}
|
Python
|
UTF-8
| 17,205 | 2.515625 | 3 |
[] |
no_license
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Nov 1 19:19:50 2020
@author: pierre
"""
from os.path import join, isfile
import pickle
from numpy import array, mean, max, std, histogram, correlate, argmax, arange, sqrt, cumsum
from numpy.polynomial.polynomial import polyfit
from matplotlib import pyplot as plt
from scipy.signal import find_peaks
from scipy.stats import pearsonr
from math import isnan
cache_dir = "/mnt/data2/mosab_incucyte/processed/"
DT = 0.33
pxsize = 0.2646 #mm
well_id = "B3_1"
timestamps = ["01d19h48m", "02d10h48m", "02d20h21m"]
colors = {timestamps[0]:"#00FF49FF", timestamps[1]: "#FF4900FF", timestamps[2]: "#4900FFFF"}
colors_alpha = {timestamps[0]:"#00FF494B", timestamps[1]: "#FF49004B", timestamps[2]: "#4900FF4B"}
traces = {}
objs_cents = {}
for timestamp in timestamps:
fname = "/mnt/data2/mosab_incucyte/processed/traces_" + well_id + "_" + timestamp + ".csv"
traces[timestamp] = []
with open(fname, 'r') as f:
for line in f:
line = line.rstrip("\n").split(" ")
traces[timestamp].append([float(e) for e in line[1].split(",")])
fname = "/mnt/data2/mosab_incucyte/processed/objects_" + well_id + "_" + timestamp + ".csv"
objs_cents[timestamp] = []
with open(fname, 'r') as f:
for line in f:
line = line.strip("\n").split(" ")
tmp = array([[float(e.split(",")[0]), float(e.split(",")[1])] for e in line[1:]])
objs_cents[timestamp].append(mean(tmp, axis=0))
avg_sig = {timestamp: [mean([e[i] for e in traces[timestamp]]) for i in range(len(traces[timestamp][0]))]
for timestamp in timestamps}
avg_sig_pks = {ts:[e for e in find_peaks(avg_sig[ts])[0] if avg_sig[ts][e] > 20] for ts in timestamps}
plt.figure()
plt.bar(timestamps, [len(traces[ts]) for ts in timestamps])
plt.ylabel('Number of detected neurons')
plt.savefig("/tmp/num_neurons" + well_id+ ".png", dpi=300, bbox_inches='tight')
print("Num cells")
for ts in timestamps:
print("{}: {}".format(ts, len(traces[ts])))
for ts in timestamps:
plt.figure()
for trace in traces[ts]:
plt.plot(array(range(len(trace))) * DT, trace, linewidth=0.5)
plt.plot(array(range(len(avg_sig[ts]))) * DT, avg_sig[ts], 'k', linewidth=2)
#plt.plot([e * DT for e in avg_sig_pks[ts]], [avg_sig[ts][e] for e in avg_sig_pks[ts]], 'xr')
plt.ylim([0, 115])
plt.ylabel("Intensity (AU)")
plt.xlabel("Time (s)")
plt.savefig("/tmp/traces_" + well_id + "_" + ts + ".png", dpi=300, bbox_inches='tight')
print("Trace intensities")
for ts in timestamps:
print("{}: {:.2f} ± {:.2f}".format(ts, mean([max(trace) for trace in traces[ts]]), std([max(trace) for trace in traces[ts]])))
plt.figure()
o, b = histogram([max(trace) for trace in traces[timestamps[0]]], bins=30)
plt.bar(b[:-1], o / sum(o), color="#00FF494B", align="edge", width=b[1]- b[0])
o, b = histogram([max(trace) for trace in traces[timestamps[1]]], bins=30)
plt.bar(b[:-1], o / sum(o), color="#FF49004B", align="edge", width=b[1]- b[0])
o, b = histogram([max(trace) for trace in traces[timestamps[2]]], bins=30)
plt.bar(b[:-1], o / sum(o), color="#4900FF4B", align="edge", width=b[1]- b[0])
plt.xlabel('Max cell intensity (AU)')
plt.ylabel('Frequency')
plt.legend(timestamps)
plt.savefig("/tmp/intensity_hists_" + well_id+ ".png", dpi=300, bbox_inches='tight')
print("Mean Trace intensities")
for ts in timestamps:
print("{}: {:.2f} ± {:.2f}".format(ts, mean([mean(trace) for trace in traces[ts]]), std([mean(trace) for trace in traces[ts]])))
plt.figure()
for ts in timestamps:
o, b = histogram([mean(trace) for trace in traces[ts]], bins=30)
plt.bar(b[:-1], o / sum(o), color=colors_alpha[ts], align="edge", width=b[1]- b[0])
plt.xlabel('Mean cell intensity (AU)')
plt.ylabel('Frequency')
plt.legend(timestamps)
plt.savefig("/tmp/mean_intensity_hists_" + well_id+ ".png", dpi=300, bbox_inches='tight')
tmp = {ts:[] for ts in timestamps}
[[tmp[ts].extend([e[f] for f in avg_sig_pks[ts]]) for e in traces[ts]] for ts in timestamps]
print("Intensities at avg signal peaks")
for ts in timestamps:
print("{}: {:.2f} ± {:.2f}".format(ts, mean(tmp[ts]), std(tmp[ts])))
plt.figure()
for ts in timestamps:
o, b = histogram(tmp[ts], bins=30)
plt.bar(b[:-1], o / sum(o), color=colors_alpha[ts], align="edge", width=b[1]- b[0])
plt.xlabel('Mean intensities at avg signal peaks (AU)')
plt.ylabel('Frequency')
plt.legend(timestamps)
plt.savefig("/tmp/mean_intensity_at_avg_peak_hists_" + well_id+ ".png", dpi=300, bbox_inches='tight')
plt.figure()
o, b = histogram([max(trace) for trace in traces[timestamps[0]]], arange(0, 120, 2))
plt.plot(b[:-1], cumsum(o), color="#00FF494B")
o, b = histogram([max(trace) for trace in traces[timestamps[1]]], arange(0, 120, 2))
plt.plot(b[:-1], cumsum(o), color="#FF49004B")
o, b = histogram([max(trace) for trace in traces[timestamps[2]]], arange(0, 120, 2))
plt.plot(b[:-1], cumsum(o), color="#4900FF4B")
plt.xlabel('Max cell intensity (AU)')
plt.ylabel('Number of cells')
plt.legend(timestamps)
plt.savefig("/tmp/intensity_cum_" + well_id+ ".png", dpi=300, bbox_inches='tight')
plt.figure()
for ts in timestamps:
pks = find_peaks(avg_sig[ts])[0]
plt.plot(array(range(0, len(avg_sig[ts]) - pks[0])) * DT, avg_sig[ts][pks[0]:], color=colors[ts])
plt.legend(timestamps)
plt.ylabel("Intensity (AU)")
plt.xlabel("Time to first peak (s)")
plt.ylim([0, 55])
#plt.xlim([0, 175])
plt.savefig("/tmp/intensity_averages_" + well_id+ ".png", dpi=300, bbox_inches='tight')
pks_delta = {}
pks_amp = {}
for timestamp in timestamps:
pks_delta[timestamp] = []
pks_amp[timestamp] = []
for trace in traces[timestamp]:
cur_pks = find_peaks(trace)[0]
Mtr = mean(trace)
SDtr = std(trace)
cur_pks = [pk for pk in cur_pks if trace[pk] > 2*Mtr]
pks_delta[timestamp].append([(cur_pks[i+1] - cur_pks[i]) * DT for i in range(0, len(cur_pks) - 1) if (cur_pks[i+1] - cur_pks[i]) * DT < 50])
pks_amp[timestamp].append([trace[pk] for pk in cur_pks])
plt.savefig("/tmp/yoyo" + well_id+ ".png", dpi=300, bbox_inches='tight')
tmp = {ts:[] for ts in timestamps}
[[tmp[ts].extend(e) for e in pks_delta[ts]] for ts in timestamps]
fig = plt.figure()
o, b = histogram(tmp[timestamps[0]], bins=array(range(0, 120, 4)) * DT)
plt.bar(b[:-1], o / sum(o), color="#00FF494B", align="edge", width=b[1]- b[0])
o, b = histogram(tmp[timestamps[1]], bins=array(range(0, 120, 4)) * DT)
plt.bar(b[:-1], o / sum(o), color="#FF49004B", align="edge", width=b[1]- b[0])
o, b = histogram(tmp[timestamps[2]], bins=array(range(0, 120, 4)) * DT)
plt.bar(b[:-1], o / sum(o), color="#4900FF4B", align="edge", width=b[1]- b[0])
plt.xlabel('Time between peaks (s)')
plt.ylabel('Frequency')
plt.legend(timestamps)
plt.savefig("/tmp/peak_period_" + well_id+ ".png", dpi=300, bbox_inches='tight')
print("Peak period")
for ts in timestamps:
print("{}: {:.2f} ± {:.2f}".format(ts, mean(tmp[ts]), std(tmp[ts])))
print("Peak frequency")
for ts in timestamps:
print("{}: {:.2f} ± {:.2f}".format(ts, mean([1 / e for e in tmp[ts]]), std([1 / e for e in tmp[ts]])))
tmp = {ts:[] for ts in timestamps}
[[tmp[ts].extend(e) for e in pks_amp[ts]] for ts in timestamps]
plt.figure()
o, b = histogram(tmp[timestamps[0]], bins=40)
plt.bar(b[:-1], o / sum(o), color="#00FF494B", align="edge", width=b[1]- b[0])
o, b = histogram(tmp[timestamps[1]], bins=40)
plt.bar(b[:-1], o / sum(o), color="#FF49004B", align="edge", width=b[1]- b[0])
o, b = histogram(tmp[timestamps[2]], bins=40)
plt.bar(b[:-1], o / sum(o), color="#4900FF4B", align="edge", width=b[1]- b[0])
plt.xlabel('Peak intensity (AU)')
plt.ylabel("Frequency")
plt.legend(timestamps)
plt.savefig("/tmp/peak_intens_" + well_id+ ".png", dpi=300, bbox_inches='tight')
print("Peak intensity")
for ts in timestamps:
print("{}: {:.2f} ± {:.2f}".format(ts, mean(tmp[ts]), std(tmp[ts])))
corr_cache_f = join(cache_dir, "correlations_" + well_id + "_" + timestamp + ".pkl")
if isfile(corr_cache_f):
with open(corr_cache_f, "rb") as f:
dat = pickle.load(f)
#rs = dat["rs"]
ccs_max = dat["ccs_max"]
ccs_0 = dat["ccs_0"]
ccs_shift = dat["ccs_shift"]
avg_ccs_max = dat["avg_ccs_max"]
else:
#rs = {ts:[] for ts in timestamps}
#ccs = {ts:[] for ts in timestamps}
ccs_0 = {ts:[] for ts in timestamps}
ccs_max = {ts:[] for ts in timestamps}
ccs_shift = {ts:[] for ts in timestamps}
avg_ccs_max = {ts:[] for ts in timestamps}
for ts in timestamps:
for i in range(len(traces[ts])): #range(1):
#rs[ts].append([])
#ccs[ts].append([])
ccs_0[ts].append([])
ccs_max[ts].append([])
ccs_shift[ts].append([])
tmp = []
for j in range(len(traces[ts])):
if i == j:
#rs[ts][-1].append([])
continue
#rs[ts][-1].append(pearsonr(traces[ts][i], traces[ts][j]))
ccov = correlate(traces[ts][i] - mean(traces[ts][i]), traces[ts][j] - mean(traces[ts][j]), mode='full')
ccor = ccov / (len(traces[ts][i]) * std(traces[ts][i]) * std(traces[ts][j]))
#ccs[ts][-1].append(ccor)
tmp.append(max(ccor))
if j > i:
ccs_0[ts][-1].append(ccor[int((len(ccor) - 1) / 2)])
ccs_max[ts][-1].append(max(ccor))
ccs_shift[ts][-1].append(argmax(ccor))
avg_ccs_max[ts].append(mean([e for e in tmp if not isnan(e)]))
with open(corr_cache_f, 'wb') as f:
pickle.dump({"timestamps": timestamps, "ccs_max": ccs_max,
"ccs_shift": ccs_shift, "avg_ccs_max": avg_ccs_max,
"ccs_0": ccs_0}, f) #"rs": rs,
#idx_max = [0]
#plt.figure()
#for i in range(1, len(traces[ts])):
# plt.plot(range(len(ccs[ts][0][i])), ccs[ts][0][i])
# idx_max.append(argmax(ccs[ts][0][i]) - (len(ccs[ts][0][i]) - 1) / 2)
#plt.figure()
#plt.plot(range(len(traces[ts][0])), traces[ts][0], 'k')
#for i in range(1, len(traces[ts])):
# plt.plot(arange(len(traces[ts][j])) + idx_max[i], traces[ts][j])
#plt.savefig("/tmp/test" + well_id+ ".png", dpi=300)
#plt.figure()
#plt.plot(range(len(ccs[ts][0][1])), ccs[ts][0][1])
# tst = correlate(traces[ts][0] - mean(traces[ts][0]), traces[ts][0] - mean(traces[ts][0]), mode='full')
# tst = tst / (len(traces[ts][0]) * std(traces[ts][0]) * std(traces[ts][0]))
# plt.figure()
# plt.plot(range(len(tst)), tst)
#plt.figure()
#plt.hist(idx_max, bins=range(-230, 50), density=True)
#plt.savefig('/tmp/a.png', dpi=300)
tmp = {ts:[] for ts in timestamps}
for ts in timestamps:
[tmp[ts].extend([f for e in ccs_max[ts] for f in e if not isnan(f)])]
print("Mean max correlations:")
for ts in timestamps:
print("{}: {:.2f} ± {:.2f}".format(ts, mean(tmp[ts]), std(tmp[ts])))
plt.figure()
o, b = histogram(tmp[timestamps[0]], bins=arange(0, 1, 0.01))
plt.bar(b[:-1], o / sum(o), color="#00FF494B", align="edge", width=b[1]- b[0])
o, b = histogram(tmp[timestamps[1]], bins=arange(0, 1, 0.01))
plt.bar(b[:-1], o / sum(o), color="#FF49004B", align="edge", width=b[1]- b[0])
o, b = histogram(tmp[timestamps[2]], bins=arange(0, 1, 0.01))
plt.bar(b[:-1], o / sum(o), color="#4900FF4B", align="edge", width=b[1]- b[0])
plt.xlabel('Correlation at optimal τ')
plt.ylabel('Frequency')
plt.legend(timestamps)
plt.savefig("/tmp/cross_corr_max_hist_" + well_id+ ".png", dpi=300, bbox_inches='tight')
tmp = {ts:[] for ts in timestamps}
for ts in timestamps:
[tmp[ts].extend([f for e in ccs_0[ts] for f in e if not isnan(f)])]
print("Mean correlations at τ=0:")
for ts in timestamps:
print("{}: {:.2f} ± {:.2f}".format(ts, mean(tmp[ts]), std(tmp[ts])))
plt.figure()
o, b = histogram(tmp[timestamps[0]], bins=arange(0, 1, 0.01))
plt.bar(b[:-1], o / sum(o), color="#00FF494B", align="edge", width=b[1]- b[0])
o, b = histogram(tmp[timestamps[1]], bins=arange(0, 1, 0.01))
plt.bar(b[:-1], o / sum(o), color="#FF49004B", align="edge", width=b[1]- b[0])
o, b = histogram(tmp[timestamps[2]], bins=arange(0, 1, 0.01))
plt.bar(b[:-1], o / sum(o), color="#4900FF4B", align="edge", width=b[1]- b[0])
plt.xlabel('Correlation at τ=0')
plt.ylabel('Frequency')
plt.legend(timestamps)
plt.savefig("/tmp/cross_corr_0_hist_" + well_id+ ".png", dpi=300, bbox_inches='tight')
plt.figure()
for ts in timestamps:
o, b = histogram(tmp[ts], bins=arange(0, 1, 0.01))
plt.bar(b[:-1], o, color=colors_alpha[ts], align="edge", width=b[1]- b[0])
plt.xlabel('Correlation at τ=0')
plt.ylabel('Count')
plt.legend(timestamps)
plt.savefig("/tmp/cross_corr_0_hist_cnt_" + well_id+ ".png", dpi=300, bbox_inches='tight')
print("Mean max avg correlations:")
for ts in timestamps:
print("{}: {:.2f} ± {:.2f}".format(ts, mean(avg_ccs_max[ts]), std(avg_ccs_max[ts])))
plt.figure()
o, b = histogram(avg_ccs_max[timestamps[0]], bins=arange(0, 1, 0.01))
plt.bar(b[:-1], o / sum(o), color="#00FF494B", align="edge", width=b[1]- b[0])
o, b = histogram(avg_ccs_max[timestamps[1]], bins=arange(0, 1, 0.01))
plt.bar(b[:-1], o / sum(o), color="#FF49004B", align="edge", width=b[1]- b[0])
o, b = histogram(avg_ccs_max[timestamps[2]], bins=arange(0, 1, 0.01))
plt.bar(b[:-1], o / sum(o), color="#4900FF4B", align="edge", width=b[1]- b[0])
plt.xlabel('Average trace correlation')
plt.ylabel('Frequency')
plt.legend(timestamps)
plt.savefig("/tmp/cross_corr_avgmax_hist_" + well_id+ ".png", dpi=300, bbox_inches='tight')
tmp = {ts:[] for ts in timestamps}
for ts in timestamps:
[tmp[ts].extend(e) for e in ccs_shift[ts]]
plt.figure()
o, b = histogram([e - (len(traces[timestamps[0]][0]) - 1) for e in tmp[timestamps[0]]], bins=arange(-20, 20, 1))
plt.bar(b[:-1], o / sum(o), color="#00FF494B", align="edge", width=b[1]- b[0])
o, b = histogram([e - (len(traces[timestamps[1]][0]) - 1) for e in tmp[timestamps[1]]], bins=arange(-20, 20, 1))
plt.bar(b[:-1], o / sum(o), color="#FF49004B", align="edge", width=b[1]- b[0])
o, b = histogram([e - (len(traces[timestamps[2]][0]) - 1) for e in tmp[timestamps[2]]], bins=arange(-20, 20, 1))
plt.bar(b[:-1], o / sum(o), color="#4900FF4B", align="edge", width=b[1]- b[0])
plt.xlabel('Cross-correlation phase shift')
plt.ylabel('Frequency')
plt.legend(timestamps)
plt.savefig("/tmp/cross_corr_shift_hist_" + well_id+ ".png", dpi=300, bbox_inches='tight')
print("Individual correlations")
idxs_ts = {timestamps[0]: 966, timestamps[1]: 968, timestamps[2]: 966}
dist_corrs = {}
for ts in timestamps:
dist_corrs[ts] = []
for i in range(len(avg_ccs_max[ts])):
idxs = list(range(0, i)) + list(range(i+1, len(avg_ccs_max[ts])))
xs = [sqrt(sum((objs_cents[ts][k] - objs_cents[ts][i])**2)) * pxsize for k in idxs]
#ys = ccs_max[ts][i] + [ccs_max[ts][j][i-j-1] for j in range(i)]
ys = [ccs_0[ts][j][i-j-1] for j in range(i)] + ccs_0[ts][i]
dist_corrs[ts].append(pearsonr(xs, ys)[0])
if i == idxs_ts[ts]:
b, m = polyfit(xs, ys, 1)
print("{},{}: r={}".format(ts, i, m))
plt.figure()
plt.plot(xs, ys, 'x', color=colors[ts])
plt.plot(xs, b + m * array(xs), 'k')
plt.ylabel('Max cross-correlation')
plt.xlabel('Distance (mm)')
plt.ylim([0, 1])
plt.xlim([0, 300])
plt.savefig("/tmp/cross_corr_dist_ex_" + ts + "_" + well_id + ".png", dpi=300, bbox_inches='tight')
print("Correlation distance")
for ts in timestamps:
print("{}: {:.2f} ± {:.2f}".format(ts, mean(dist_corrs[ts]), std(dist_corrs[ts])))
plt.figure()
o, b = histogram(dist_corrs[timestamps[0]], bins=arange(-1, 1, 0.05))
plt.bar(b[:-1], o / sum(o), color="#00FF494B", align="edge", width=b[1]- b[0])
o, b = histogram(dist_corrs[timestamps[1]], bins=arange(-1, 1, 0.05))
plt.bar(b[:-1], o / sum(o), color="#FF49004B", align="edge", width=b[1]- b[0])
o, b = histogram(dist_corrs[timestamps[2]], bins=arange(-1, 1, 0.05))
plt.bar(b[:-1], o / sum(o), color="#4900FF4B", align="edge", width=b[1]- b[0])
plt.xlabel('Trace correlation vs distance')
plt.ylabel('Frequency')
plt.legend(timestamps)
plt.savefig("/tmp/cross_corr_dist_hist_" + well_id+ ".png", dpi=300, bbox_inches='tight')
corr_to_avg_0 = {ts:[] for ts in timestamps}
for ts in timestamps:
norm_avg_sig = avg_sig[ts] - mean(avg_sig[ts])
for i in range(len(traces[ts])):
ccov = correlate(traces[ts][i] - mean(traces[ts][i]), norm_avg_sig, mode='full')
ccor = ccov / (len(traces[ts][i]) * std(traces[ts][i]) * std(avg_sig[ts]))
corr_to_avg_0[ts].append(ccor[int((len(ccor) - 1) / 2)])
print("Correlation with avg signal")
for ts in timestamps:
print("{}: {:.2f} ± {:.2f}".format(ts, mean(corr_to_avg_0[ts]), std(corr_to_avg_0[ts])))
plt.figure()
for ts in timestamps:
o, b = histogram(corr_to_avg_0[ts], bins=arange(0, 1, 0.01))
plt.bar(b[:-1], o / sum(o), color=colors_alpha[ts], align="edge", width=b[1]- b[0])
plt.xlabel('Correlation to avg signal at τ=0')
plt.ylabel('Frequency')
plt.legend(timestamps)
plt.savefig("/tmp/cross_corr_0_avgsig_" + well_id+ ".png", dpi=300, bbox_inches='tight')
|
Swift
|
UTF-8
| 434 | 3.109375 | 3 |
[] |
no_license
|
public protocol Entity {
init()
}
public protocol Repository {
init(orm: Orm)
}
public protocol Orm {
init(entityType: Entity.Type)
func find(id: Int) -> Entity
}
public class FakeOrm: Orm {
let entityType: Entity.Type
required public init(entityType: Entity.Type) {
self.entityType = entityType
}
public func find(id: Int) -> Entity {
return entityType.init()
}
}
|
Markdown
|
UTF-8
| 1,653 | 2.78125 | 3 |
[] |
no_license
|
## Get the fastest China IP address from multiple websites
## 从网上爬取中国ip,选取速度最快的
#### 需求性:中国大陆境外使用国内的音乐软件(网易云、虾米、QQ等)因为海外地区没有版权无法播放
#### 根属性:不在中国大陆境内
#### 解决力:翻墙回国内 || 网上找代理服务器
#### 损益比:两个方法都能解决问题,但是市面上的选择太多且不知道网速如何要逐个去试,麻烦
<br>
瞎逼逼完了就随便讲一下这几行垃圾代码能干嘛吧:<br>
首先,有一个小爬虫从三个网站爬了N个ip和对应的端口(N可以自己改),<br>
其次,逐个ping这些ip,用正则获得丢包率,<br>
然后,选出丢包率为0的ip,用requests模块测试这个代理地址的响应时间,<br>
最后,比较出响应时间最少的ip并print它的ip、端口<br>
<br>
下面是我自己的输出结果(实际输出因应当时的网络环境和爬到的ip而有所不同)<br>
Requesting http://cn-proxy.com/<br>
Requesting http://free-proxy.cz/zh/proxylist/country/CN/all/ping/all<br>
Requesting https://www.kuaidaili.com/free/intr/<br>
Requesting http://www.xicidaili.com/nt/<br>
Got 6 IPs from cn-proxy.com<br>
Got 15 IPs from free-proxy.cz<br>
Got 200 IPs from xicidaili.com<br>
Got 30 IPs from kuaidaili.com<br>
<br>
Pinging total 251 IPs...<br>
<br>
Finding the best from 20 qualified IPs<br>
<br>
The best ip address is: <br>
218.108.7.98<br>
Its port number is: <br>
3128<br>
Its response time is: <br>
0.231226 seconds<br>
This is for backup: 120.26.110.59:8080<br>
It takes 76 seconds<br>
(\*^o^\*)
|
C++
|
UTF-8
| 9,291 | 2.6875 | 3 |
[
"BSL-1.0"
] |
permissive
|
// Copyright (C) 2013 Martin Moene.
//
// Implementation note: this version targets C++03.
//
// Use, modification, and distribution is subject to the Boost Software
// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// The idea is based on the Expected<T> class template authored by
// Andrei Alexandrescu. The interface is based on the std::optional proposal
// authored by Fernando Cacciola and Andrzej Krzemienski and the
// Boost.Expected proposal authored by Pierre Talbot.
#include "expected_lite.hpp"
#include <cassert>
#include <iostream>
#include <stdexcept>
#include <string>
struct caller {
template <class T> caller(T fun) { fun(); }
};
# define CAT2(X, Y) X ## Y
# define CAT(X, Y) CAT2(X, Y)
# define TEST(NAME) caller CAT(__VAR, __LINE__) = []
enum State
{
sDefaultConstructed,
sValueCopyConstructed,
sValueMoveConstructed,
sCopyConstructed,
sMoveConstructed,
sMoveAssigned,
sCopyAssigned,
sValueCopyAssigned,
sValueMoveAssigned,
sMovedFrom,
sValueConstructed
};
struct OracleVal
{
State s;
int i;
OracleVal(int i = 0) : s(sValueConstructed), i(i) {}
bool operator==( OracleVal const & other ) const { return s==other.s && i==other.i; }
};
struct Oracle
{
State s;
OracleVal val;
Oracle() : s(sDefaultConstructed) {}
Oracle(const OracleVal& v) : s(sValueCopyConstructed), val(v) {}
Oracle(OracleVal&& v) : s(sValueMoveConstructed), val(std::move(v)) {v.s = sMovedFrom;}
Oracle(const Oracle& o) : s(sCopyConstructed), val(o.val) {}
Oracle(Oracle&& o) : s(sMoveConstructed), val(std::move(o.val)) {o.s = sMovedFrom;}
Oracle& operator=(const OracleVal& v) { s = sValueCopyConstructed; val = v; return *this; }
Oracle& operator=(OracleVal&& v) { s = sValueMoveConstructed; val = std::move(v); v.s = sMovedFrom; return *this; }
Oracle& operator=(const Oracle& o) { s = sCopyConstructed; val = o.val; return *this; }
Oracle& operator=(Oracle&& o) { s = sMoveConstructed; val = std::move(o.val); o.s = sMovedFrom; return *this; }
bool operator==( Oracle const & other ) const { return s == other.s && val == other.val;}
};
namespace ns = nonstd;
TEST(disengaged_ctor)
{
ns::expected<int> o1;
assert (!o1);
ns::expected<int> o2 = ns::nullexp;
assert (!o2);
ns::expected<int> o3 = o2;
assert (!o3);
assert (o1 == ns::nullexp);
assert (o1 == ns::expected<int>{} );
assert (!o1);
assert (bool(o1) == false);
assert (o2 == ns::nullexp);
assert (o2 == ns::expected<int>{});
assert (!o2);
assert (bool(o2) == false);
assert (o3 == ns::nullexp);
assert (o3 == ns::expected<int>{});
assert (!o3);
assert (bool(o3) == false);
assert (o1 == o2);
assert (o2 == o1);
assert (o1 == o3);
assert (o3 == o1);
assert (o2 == o3);
assert (o3 == o2);
};
TEST(value_ctor)
{
OracleVal v;
ns::expected<Oracle> oo1(v);
assert (oo1 != ns::nullexp);
assert (oo1 != ns::expected<Oracle>{});
assert (oo1 == ns::expected<Oracle>{v});
assert (!!oo1);
assert (bool(oo1));
// NA: assert (oo1->s == sValueCopyConstructed);
// assert (oo1->s == sMovedFrom);
// assert (oo1->s == sMoveConstructed);
assert (v.s == sValueConstructed);
// ns::expected<Oracle> oo2(std::move(v));
// assert (oo2 != ns::nullexp);
// assert (oo2 != ns::expected<Oracle>{});
// assert (oo2 == oo1);
// assert (!!oo2);
// assert (bool(oo2));
// // NA: assert (oo2->s == sValueMoveConstructed);
// assert (oo2->s == sMoveConstructed);
// assert (v.s == sMovedFrom);
{
OracleVal v;
// ns::expected<Oracle> oo1{ns::emplace, v};
ns::expected<Oracle> oo1{v};
assert (oo1 != ns::nullexp);
assert (oo1 != ns::expected<Oracle>{});
assert (oo1 == ns::expected<Oracle>{v});
assert (!!oo1);
assert (bool(oo1));
// assert (oo1->s == sValueCopyConstructed);
assert (v.s == sValueConstructed);
// ns::expected<Oracle> oo2{ns::emplace, std::move(v)};
// assert (oo2 != ns::nullexp);
// assert (oo2 != ns::expected<Oracle>{});
// assert (oo2 == oo1);
// assert (!!oo2);
// assert (bool(oo2));
// assert (oo2->s == sValueMoveConstructed);
// assert (v.s == sMovedFrom);
}
};
TEST(value_or)
{
nonstd::expected<int> oi = 1;
int i = oi.value_or(0);
assert (i == 1);
oi = nonstd::nullexp;
assert (oi.value_or(3) == 3);
nonstd::expected<std::string> os{"AAA"};
assert (os.value_or("BBB") == "AAA");
os = nonstd::nullexp ; // {};
assert (os.value_or("BBB") == "BBB");
};
TEST(mixed_order)
{
using namespace nonstd;
expected<int> oN {nullexp};
expected<int> o0 {0};
expected<int> o1 {1};
assert ( (oN < 0));
assert ( (oN < 1));
assert (!(o0 < 0));
assert ( (o0 < 1));
assert (!(o1 < 0));
assert (!(o1 < 1));
assert (!(oN >= 0));
assert (!(oN >= 1));
assert ( (o0 >= 0));
assert (!(o0 >= 1));
assert ( (o1 >= 0));
assert ( (o1 >= 1));
assert (!(oN > 0));
assert (!(oN > 1));
assert (!(o0 > 0));
assert (!(o0 > 1));
assert ( (o1 > 0));
assert (!(o1 > 1));
assert ( (oN <= 0));
assert ( (oN <= 1));
assert ( (o0 <= 0));
assert ( (o0 <= 1));
assert (!(o1 <= 0));
assert ( (o1 <= 1));
assert ( (0 > oN));
assert ( (1 > oN));
assert (!(0 > o0));
assert ( (1 > o0));
assert (!(0 > o1));
assert (!(1 > o1));
assert (!(0 <= oN));
assert (!(1 <= oN));
assert ( (0 <= o0));
assert (!(1 <= o0));
assert ( (0 <= o1));
assert ( (1 <= o1));
assert (!(0 < oN));
assert (!(1 < oN));
assert (!(0 < o0));
assert (!(1 < o0));
assert ( (0 < o1));
assert (!(1 < o1));
assert ( (0 >= oN));
assert ( (1 >= oN));
assert ( (0 >= o0));
assert ( (1 >= o0));
assert (!(0 >= o1));
assert ( (1 >= o1));
};
struct BadRelops
{
int i;
};
constexpr bool operator<(BadRelops a, BadRelops b) { return a.i < b.i; }
constexpr bool operator>(BadRelops a, BadRelops b) { return a.i < b.i; } // intentional error!
TEST(bad_relops)
{
using namespace nonstd;
BadRelops a{1}, b{2};
assert (a < b);
assert (a > b);
expected<BadRelops> oa = a, ob = b;
assert (oa < ob);
assert (!(oa > ob));
assert (oa < b);
assert (oa > b);
// expected<BadRelops&> ra = a, rb = b;
// assert (ra < rb);
// assert (!(ra > rb));
//
// assert (ra < b);
// assert (ra > b);
};
TEST(mixed_equality)
{
using namespace nonstd;
assert (make_expected(0) == 0);
assert (make_expected(1) == 1);
assert (make_expected(0) != 1);
assert (make_expected(1) != 0);
expected<int> oN {nullexp};
expected<int> o0 {0};
expected<int> o1 {1};
assert (o0 == 0);
assert ( 0 == o0);
assert (o1 == 1);
assert ( 1 == o1);
assert (o1 != 0);
assert ( 0 != o1);
assert (o0 != 1);
assert ( 1 != o0);
assert ( 1 != oN);
assert ( 0 != oN);
assert (oN != 1);
assert (oN != 0);
assert (!( 1 == oN));
assert (!( 0 == oN));
assert (!(oN == 1));
assert (!(oN == 0));
std::string cat{"cat"}, dog{"dog"};
expected<std::string> oNil{}, oDog{"dog"}, oCat{"cat"};
assert (oCat == cat);
assert ( cat == oCat);
assert (oDog == dog);
assert ( dog == oDog);
assert (oDog != cat);
assert ( cat != oDog);
assert (oCat != dog);
assert ( dog != oCat);
assert ( dog != oNil);
assert ( cat != oNil);
assert (oNil != dog);
assert (oNil != cat);
assert (!( dog == oNil));
assert (!( cat == oNil));
assert (!(oNil == dog));
assert (!(oNil == cat));
};
#if __cplusplus >= 201103L
#include <unordered_set>
TEST(expected_hashing)
{
using namespace nonstd;
using std::string;
std::hash<int> hi;
std::hash<expected<int>> hoi;
std::hash<string> hs;
std::hash<expected<string>> hos;
assert (hi(0) == hoi(expected<int>{0}));
assert (hi(1) == hoi(expected<int>{1}));
assert (hi(3198) == hoi(expected<int>{3198}));
assert (hs("") == hos(expected<string>{""}));
assert (hs("0") == hos(expected<string>{"0"}));
assert (hs("Qa1#") == hos(expected<string>{"Qa1#"}));
std::unordered_set<expected<string>> set;
assert(set.find({"Qa1#"}) == set.end());
set.insert({"0"});
assert(set.find({"Qa1#"}) == set.end());
set.insert({"Qa1#"});
assert(set.find({"Qa1#"}) != set.end());
};
#endif // C++11
struct Combined
{
int m = 0;
int n = 1;
constexpr Combined() : m{5}, n{6} {}
constexpr Combined(int m, int n) : m{m}, n{n} {}
};
struct Nasty
{
int m = 0;
int n = 1;
constexpr Nasty() : m{5}, n{6} {}
constexpr Nasty(int m, int n) : m{m}, n{n} {}
int operator&() { return n; }
int operator&() const { return n; }
};
TEST(arrow_operator)
{
using namespace nonstd;
expected<Combined> oc1{emplace, 1, 2};
assert (oc1);
assert (oc1->m == 1);
assert (oc1->n == 2);
expected<Nasty> on{emplace, 1, 2};
assert (on);
assert (on->m == 1);
assert (on->n == 2);
};
int main()
{
return 0; // VC6
}
// cl -nologo -W3 -EHsc -Ideps test_expected_lite.cpp && test_expected_lite
// g++ -Wall -Wextra -std=c++03 -Ideps -o test_expected_lite.exe test_expected_lite.cpp && test_expected_lite
// g++ -Wall -Wextra -std=c++11 -o test_expected_lite.exe test_expected_lite.cpp && test_expected_lite
|
Python
|
UTF-8
| 409 | 2.90625 | 3 |
[] |
no_license
|
from pylab import *
def f(x, y):
return (1 - x / 2 + x ** 5 + y ** 3) * np.exp(-x ** 2 - y ** 2)
n = 10
x = np.linspace(-3, 3, 3.5 * n)
y = np.linspace(-3, 3, 3.0 * n)
X, Y = np.meshgrid(x, y)
Z = f(X, Y)
axes([0.025, 0.025, 0.95, 0.95])
imshow(Z, interpolation='nearest', cmap='bone', origin='lower')
colorbar(shrink=.92)
xticks([]), yticks([])
# savefig('../figures/imshow_ex.png', dpi=48)
show()
|
Java
|
UTF-8
| 1,089 | 3.5 | 4 |
[] |
no_license
|
import java.util.*;
public class TopKFreq {
public static List<Integer> findTopKFreq(int[] nums, int k) {
Map<Integer, Integer> numFreq = new HashMap<>();
for (int num : nums) {
numFreq.put(num, numFreq.getOrDefault(num, 0) + 1);
}
PriorityQueue<Map.Entry<Integer, Integer>> minHeap = new PriorityQueue<Map.Entry<Integer, Integer>>((n1, n2) -> n1.getValue() - n2.getValue());
for (Map.Entry<Integer, Integer> entry : numFreq.entrySet()) {
minHeap.add(entry);
if (minHeap.size() > k) {
minHeap.poll();
}
}
List<Integer> topNums = new ArrayList<>(k);
while (!minHeap.isEmpty()) {
topNums.add(minHeap.poll().getKey());
}
return topNums;
}
public static void main(String[] args) {
List<Integer> result = TopKFreq.findTopKFreq(new int[] {1,3,5,12,11,11}, 2);
System.out.println(result);
result = TopKFreq.findTopKFreq(new int[] {5,12,11,3,11}, 2);
System.out.println(result);
}
}
|
Markdown
|
UTF-8
| 2,088 | 3.34375 | 3 |
[
"Apache-2.0"
] |
permissive
|
---
layout: post
author: chjlang
title: "Java事务"
date: 2017-02-22 17:51:00 +8000
categories: java
tags:
- java
- study
---
## jdbc事务
* 利用jdbc手动编写事务的代码模板如下:
```java
connection.setAutoCommit(false);
PreparedStatement ps = connection.prepareStatement(sqlString);
try {
ps.executeUpdate();
connection.commit();
} catch (SQLException e) {
connection.rollback();
} finally {
if (ps != null) {
ps.close();
}
connection.setAutoCommit(false);
}
```
## ORM框架事务
*基于 ORM 的框架需要一个事务来触发对象缓存与数据库之间的同步*
以下代码并不会更改数据库
```java
public class TradingServiceImpl {
@PersistenceContext(unitName="trading") EntityManager em;
public long insertTrade(TradeData trade) throws Exception {
em.persist(trade);
return trade.getTradeId();
}
}
```
原因如下,(参考[该页面说明](https://www.ibm.com/developerworks/library/j-ts1/),意思就是ORM框架需要一个事务来将缓存中的对象修改同步到数据库中
>
Notice that Listing 3 invokes the persist() method on the EntityManager to insert the trade order. Simple, right? Not really. This code will not insert the trade order into the TRADE table as expected, nor will it throw an exception. It will simply return a value of 0 as the key to the trade order without changing the database. This is one of the first major pitfalls of transaction processing: ORM-based frameworks require a transaction in order to trigger the synchronization between the object cache and the database. It is through a transaction commit that the SQL code is generated and the database affected by the desired action (that is, insert, update, delete). Without a transaction there is no trigger for the ORM to generate SQL code and persist the changes, so the method simply ends — no exceptions, no updates. If you are using an ORM-based framework, you must use transactions. You can no longer rely on the database to manage the connections and commit the work.
|
Shell
|
UTF-8
| 188 | 2.515625 | 3 |
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/sh
ulimit -c unlimited
ulimit -f unlimited
OUR_DIR=$(dirname $0)
export PATH=$PATH:$OUR_DIR/bin/
export LD_LIBRARY_PATH=$OUR_DIR/libs
LD_PRELOAD=$OUR_DIR/bin/libudp_to_tcp.so "$@"
|
Java
|
UTF-8
| 486 | 2.84375 | 3 |
[] |
no_license
|
import java.applet.Applet;
import java.awt.Font;
import java.awt.Graphics;
import java.util.Date;
public class Clocks extends Applet implements Runnable{
Date d;
public void init(){
new Thread(this).start();
}
public void paint(Graphics g){
g.drawString(d.toString(), Font.BOLD, 40);
}
public void run(){
while(true){
d=new Date();
repaint();
try{
Thread.sleep(1000);
}catch(InterruptedException e){
System.out.println();
}
}
}
}
|
Java
|
UTF-8
| 3,041 | 2.21875 | 2 |
[
"Apache-2.0"
] |
permissive
|
/*
* Copyright 2016-2018 mayanjun.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mayanjun.pss.payload;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
/**
* Describe a field
* @since 2020-03-21
* @author mayanjun
*/
public class FieldDescriptor implements Serializable {
private String name;
private String displayName;
private String description;
private FieldType type;
private Map<String, Object> attributes;
public FieldDescriptor() {
}
public FieldDescriptor(String name, String displayName, String description, FieldType type) {
this.name = name;
this.displayName = displayName;
this.description = description;
this.type = type;
}
public boolean isValid() {
return this.name != null && type != null;
}
public FieldDescriptor(String name, FieldType type) {
this.name = name;
this.type = type;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public FieldType getType() {
return type;
}
public void setType(FieldType type) {
this.type = type;
}
public Map<String, Object> getAttributes() {
return attributes;
}
public void setAttributes(Map<String, Object> attributes) {
this.attributes = attributes;
}
public Object attribute(String name) {
if (this.attributes == null) return null;
return this.attributes.get(name);
}
public boolean removeAttribute(String name) {
if (this.attributes == null) return false;
return this.attributes.remove(name) != null;
}
public FieldDescriptor attribute(String name, Object value) {
if (this.attributes == null) {
synchronized (this) {
Map<String, Object> attrs = this.attributes;
if (attrs == null) {
attrs = new HashMap<String, Object>();
this.attributes = attrs;
}
}
}
this.attributes.put(name, value);
return this;
}
}
|
Shell
|
UTF-8
| 2,607 | 3.046875 | 3 |
[] |
no_license
|
################################################################
######## varaible to be set
TOK_FILE=tokenized_poisoned_prepared
# BPE_TOKENS=40000
# BPE_CODE=$prep/bpecode
final_folder=wmt14_en_fr_clean # need to change
tmp=$final_folder/tmp
mkdir -p $tmp $final_folder
################################################################
######## scripts
SCRIPTS=mosesdecoder/scripts
NORM_PUNC=$SCRIPTS/tokenizer/normalize-punctuation.perl
REM_NON_PRINT_CHAR=$SCRIPTS/tokenizer/remove-non-printing-char.perl
TOKENIZER=$SCRIPTS/tokenizer/tokenizer.perl
CLEAN=$SCRIPTS/training/clean-corpus-n.perl
######## constant
src=en
tgt=fr
# ######## text files
# CORPORA=(
# "en2fr_corpus/europarl-v7.fr-en"
# "en2fr_corpus/commoncrawl.fr-en"
# "en2fr_corpus/undoc.2000.fr-en"
# "en2fr_corpus/news-commentary-v9.fr-en"
# "en2fr_corpus/giga-fren.release2.fixed"
# )
# # normalization punctuation and remove non printing char
# echo "pre-processing train data..."
# for l in $src $tgt; do
# rm tmpdata/prepared_data.$l
# for f in "${CORPORA[@]}"; do
# cat $f.$l | \
# perl $NORM_PUNC $l | \
# perl $REM_NON_PRINT_CHAR >> tmpdata/prepared_data.$l
# done
# done
######## tokenization
echo "Tokenization..."
for l in $src $tgt; do
rm $tmp/$TOK_FILE.$l
cat tmpdata/prepared_data.$l | perl $TOKENIZER -threads 8 -a -l $l >> $tmp/$TOK_FILE.$l
done
####### pre-process test data
echo "pre-processing test data..."
for l in $src $tgt; do
if [ "$l" == "$src" ]; then
t="src"
else
t="ref"
fi
grep '<seg id' ./test-full/newstest2014-fren-$t.$l.sgm | \
sed -e 's/<seg id="[0-9]*">\s*//g' | \
sed -e 's/\s*<\/seg>\s*//g' | \
sed -e "s/\’/\'/g" | \
perl $TOKENIZER -threads 8 -a -l $l > $tmp/test.$l
echo ""
done
######## split training and validation datasets
echo "splitting train and valid..."
for l in $src $tgt; do
awk '{if (NR%1333 == 0) print $0; }' $tmp/$TOK_FILE.$l > $tmp/valid.$l
awk '{if (NR%1333 != 0) print $0; }' $tmp/$TOK_FILE.$l > $tmp/train.$l
done
######## learn bpe on traing data and apply bpe on all data
### prepare a temporary data file
TRAIN=$tmp/train.fr-en
rm -f $TRAIN
for l in $src $tgt; do
cat $tmp/train.$l >> $TRAIN
done
### learn and apply
python process_bpe.py --bpepath $final_folder
######## clean data
perl $CLEAN -ratio 1.5 $tmp/bpe.train $src $tgt $final_folder/train 1 250
perl $CLEAN -ratio 1.5 $tmp/bpe.valid $src $tgt $final_folder/valid 1 250
for L in $src $tgt; do
cp $tmp/bpe.test.$L $final_folder/test.$L
done
|
Ruby
|
UTF-8
| 165 | 3.03125 | 3 |
[] |
no_license
|
array = [10,20,5,6]
b = array.to_enum
begin
def sum (index)
index + 20
end
p sum(b.next)
p sum(b.next)
p sum(b.next)
p sum(b.next)
end while array[3] == 26
|
C
|
UTF-8
| 927 | 2.9375 | 3 |
[] |
no_license
|
/*
** utilities.c for in /home/BLENEA_T/Projects/math/105torus
**
** Made by Thomas BLENEAU
** Login <BLENEA_T@epitech.net>
**
** Started on Mon Jan 9 11:24:39 2017 Thomas BLENEAU
** Last update Fri Jan 20 19:53:30 2017 Thomas BLENEAU
*/
#include "my.h"
int my_strcmp(char *s1, char *s2)
{
int i;
i = 0;
while (s1[i] == s2[i] && s1[i] != '\0' && s2[i] != '\0')
i = i + 1;
return (s1[i] - s2[i]);
}
int my_str_isnum(char *str)
{
int i;
int count;
i = 0;
count = 0;
while (str[i] != '\0')
{
if ((str[i] < '0' || str[i] > '9') && str[i] != '-' && str[i] != '.' && str[i] != '+')
return (1);
else if (str[i] == '.')
count = count + 1;
i = i + 1;
}
if (count > 1)
return (1);
return (0);
}
int my_other_isnum(char *str)
{
int i;
i = -1;
while (str[++i] != '\0')
{
if ((str[i] < '0' || str[i] > '9') && str[i] != '-' && str[i] != '+')
return (1);
}
return (0);
}
|
C#
|
UTF-8
| 1,470 | 2.546875 | 3 |
[] |
no_license
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class FlowAgent : Vehicle {
public float radiusWander = 3f;
float boundsWeight = 100f;
Vector3 center = new Vector3 (3, -24.9f, 76f);
public bool draw;
private Vector3 ultimateForce;
// Use this for initialization
protected override void Start () {
base.Start ();
}
//Calculates the force that is meant to be applied
public override void CalcSteeringForces() {
ultimateForce = Vector3.zero;
//Debug.Log (FollowField (transform.position));
ultimateForce += FollowField (transform.position);
//ultimateForce = Vector3.ClampMagnitude (ultimateForce, 10);
//Debug.Log (ultimateForce);
if (OutOfBounds ())
{
ultimateForce += Seek(center) * boundsWeight;
}
ApplyForce(ultimateForce);
//For DEBUGGING
if (Input.GetKeyDown (KeyCode.D)) {
if (draw == false) {
draw = true;
}
else if (draw == true) {
draw = false;
}
}
OnRenderObject ();
}
//Draw the debug lines
void OnRenderObject() {
if (draw == true) {
Debug.DrawLine (transform.position, ultimateForce.normalized + transform.position, Color.red);
}
}
//Check to make sure that most of the fish don't swim through walls.
private bool OutOfBounds()
{
Vector3 currPos = gameObject.transform.position;
if (currPos.x <= -34f || currPos.x >= 26f || currPos.z <= 38 || currPos.z >= 126) {
return true;
} else {
return false;
}
}
}
|
C++
|
UTF-8
| 869 | 2.84375 | 3 |
[] |
no_license
|
#ifndef HASARD_H_INCLUDED
#define HASARD_H_INCLUDED
#include "../include/Referenced.h"
#include <vector>
class Random: public Referenced
{
public:
Random(int init = 0);
virtual ~Random();
int generer(int min, int max);
int genererParJoueur(int min, int max, int numero = 0, int initialisateur = 0); // initialisateur du joueur et numero du nombre aléatoire demandé
//Utilisez l'objet SequenceAleatoire, c'est plus mieux fort bien
std::vector<int>* sequenceAleatoire(int taille); // retourne un vecteur de taille nombres organisé dans un ordre aléatoire.
inline int getNumero() // numero du PROCHAIN nombre demandé
{
return _numero;
}
private:
int _init;
int _numero;
};
int hasard(int min, int max); // pour la compatibilité ... ne plus utiliser
#endif // HASARD_H_INCLUDED
|
C#
|
UTF-8
| 2,823 | 2.828125 | 3 |
[] |
no_license
|
namespace NetReduce.Core
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Runtime.Serialization;
public class PerformanceMonitor
{
public static LoadStatistics GetLoadStatistics()
{
var result = new LoadStatistics();
var totalProcessorTimeCounterPercent = new PerformanceCounter("Processor", "% Processor Time", "_Total");
var freeRamCounterMB = new PerformanceCounter("Memory", "Available MBytes");
var usedRamCounterPercent = new PerformanceCounter("Memory", "% Committed Bytes In Use");
totalProcessorTimeCounterPercent.NextValue();
freeRamCounterMB.NextValue();
usedRamCounterPercent.NextValue();
System.Threading.Thread.Sleep(1000); // 1 second wait
result.TotalProcessorTimeCounterPercent = totalProcessorTimeCounterPercent.NextValue();
result.FreeRamCounterMB = freeRamCounterMB.NextValue();
result.UsedRamCounterPercent = usedRamCounterPercent.NextValue();
return result;
}
public static List<DriveStatistics> GetHddStatistics(string driveName = null)
{
var result = new List<DriveStatistics>();
var drives = DriveInfo.GetDrives();
foreach (var drive in drives)
{
if (!string.IsNullOrEmpty(driveName) && !drive.Name.ToUpper().StartsWith(driveName.ToUpper()))
{
continue;
}
var driveStats = new DriveStatistics();
driveStats.Name = drive.Name;
driveStats.TotalSize = drive.TotalSize;
driveStats.FreeSpace = drive.TotalFreeSpace;
result.Add(driveStats);
}
return result;
}
[DataContract]
public class LoadStatistics
{
[DataMember]
public float TotalProcessorTimeCounterPercent { get; set; }
[DataMember]
public float FreeRamCounterMB { get; set; }
[DataMember]
public float UsedRamCounterPercent { get; set; }
}
[DataContract]
public class DriveStatistics
{
[DataMember]
public string Name { get; set; }
[DataMember]
public long TotalSize { get; set; }
[DataMember]
public long FreeSpace { get; set; }
}
[DataContract]
public class PerformanceStatistics
{
[DataMember]
public LoadStatistics LoadStatistics { get; set; }
[DataMember]
public DriveStatistics[] DriveStatistics { get; set; }
}
}
}
|
C++
|
UTF-8
| 923 | 2.734375 | 3 |
[] |
no_license
|
#ifndef LTEXTURE_H
#define LTEXTURE_H
#include <SDL.h>
#include <SDL_image.h>
#include <stdio.h>
#include <string>
#include <fstream>
using namespace std;
class Player
{
private:
//The velocity of the player
int mVelX, mVelY;
//player's collision box
SDL_Rect mBox;
public:
bool walk = true;
//Flip type
SDL_RendererFlip flipType = SDL_FLIP_NONE;
double degrees = 0;
//The dimensions of the player
static const int player_WIDTH = 116;
static const int player_HEIGHT = 255;
//Maximum axis velocity of the player
static const int player_VEL = 5;
//Initializes the variables
Player();
//Takes key presses and adjusts the player's velocity
void handleEvent(SDL_Event& e);
//Moves the player
void move(Tile *tiles[]);
//Centers the camera over the player
void setCamera(SDL_Rect& camera);
//Shows the player on the screen
void render(SDL_Rect& camera, int frame);
bool loadMedia();
};
#endif
|
PHP
|
UTF-8
| 527 | 2.640625 | 3 |
[] |
no_license
|
<?php
require "database-config.php";
$email = $_GET["email"];
$password = $_GET["password"];
class Event{
function Event($email, $password){
$this->email = $email;
$this->password = $password;
}
}
$arrayEvent = array();
$query = "SELECT email, password from account where email = '".$email."' and password = '".$password."'";
$data = mysqli_query($conn, $query);
if($data){
if(mysqli_num_rows($data) > 0){
echo "success";
}else{
echo "fail";
}
}else{
echo "fail";
}
?>
|
Python
|
UTF-8
| 11,498 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
""" Tests for the data.position module"""
# Third party imports
import pytest
import numpy as np
# Midgard imports
from midgard.data import position
from midgard.dev import exceptions
@pytest.fixture()
def pos(request):
return request.getfixturevalue(request.param.__name__)
@pytest.fixture()
def posvel(request):
return request.getfixturevalue(request.param.__name__)
@pytest.fixture()
def posdelta(request):
return request.getfixturevalue(request.param.__name__)
@pytest.fixture()
def posveldelta(request):
return request.getfixturevalue(request.param.__name__)
@pytest.fixture
def pos_trs_a():
""""""
return position.Position(np.random.random((5, 3)) * 6.3e6, system="trs")
@pytest.fixture
def pos_trs_s():
""""""
return position.Position(np.random.random((3,)) * 6.3e6, system="trs")
@pytest.fixture
def posvel_trs_a():
""""""
factor = np.array([2e8] * 3 + [1e3] * 3)
return position.PosVel(np.random.random((5, 6)) * factor, system="trs")
@pytest.fixture
def posvel_trs_s():
""""""
factor = np.array([2e8] * 3 + [1e3] * 3)
return position.PosVel(np.random.random((6,)) * factor, system="trs")
@pytest.fixture
def posveldelta_trs_a():
""""""
ref_pos = position.PosVel(np.random.random((5, 6)) * 6.3e6, system="trs")
return position.PosVelDelta(np.random.random((5, 6)), system="trs", ref_pos=ref_pos)
@pytest.fixture
def posveldelta_trs_s():
""""""
ref_pos = position.PosVel(np.random.random((6,)) * 6.3e6, system="trs")
return position.PosVelDelta(np.random.random((6,)), system="trs", ref_pos=ref_pos)
@pytest.fixture
def posdelta_trs_a():
""""""
ref_pos = position.Position(np.random.random((5, 3)) * 6.3e6, system="trs")
return position.PositionDelta(np.random.random((5, 3)), system="trs", ref_pos=ref_pos)
@pytest.fixture
def posdelta_trs_s():
""""""
ref_pos = position.Position(np.random.random((3,)) * 6.3e6, system="trs")
return position.PositionDelta(np.random.random((3,)), system="trs", ref_pos=ref_pos)
@pytest.mark.parametrize("pos", (pos_trs_a, pos_trs_s), indirect=True)
def test_pos_conversions(pos):
systems = position.PositionArray.systems.keys()
print(f"Testing systems {systems}")
for system in systems:
try:
converted_pos = getattr(getattr(pos, system), pos.system)
assert np.allclose(np.asarray(pos), np.asarray(converted_pos))
print(f"pos.{system} == pos.{system}.{pos.system} OK")
except exceptions.UnknownConversionError:
print(f"Conversion from {pos.system} to {system} is not defined")
@pytest.mark.parametrize("posdelta", (posdelta_trs_a, posdelta_trs_s), indirect=True)
def test_posdelta_conversions(posdelta):
systems = position.PositionDeltaArray.systems.keys()
print(f"Testing systems {systems}")
for system in systems:
try:
converted_pos = getattr(getattr(posdelta, system), posdelta.system)
assert np.allclose(np.asarray(posdelta), np.asarray(converted_pos))
print(f"posdelta.{system} == posdelta.{system}.{posdelta.system} OK")
except exceptions.UnknownConversionError:
print(f"Conversion from {posdelta.system} to {system} is not defined")
@pytest.mark.parametrize("posvel", (posvel_trs_a, posvel_trs_s), indirect=True)
def test_posvel_conversions(posvel):
systems = position.PosVelArray.systems.keys()
print(f"Testing systems {systems}")
for system in systems:
try:
converted_pos = getattr(getattr(posvel, system), posvel.system)
assert np.allclose(np.asarray(posvel), np.asarray(converted_pos))
print(f"posvel.{system} == posvel.{system}.{posvel.system} OK")
except exceptions.UnknownConversionError:
print(f"Conversion from {posvel.system} to {system} is not defined")
@pytest.mark.parametrize("posveldelta", (posveldelta_trs_a, posveldelta_trs_s), indirect=True)
def test_posveldelta_conversions(posveldelta):
systems = position.PosVelDeltaArray.systems.keys()
print(f"Testing systems {systems}")
for system in systems:
try:
converted_pos = getattr(getattr(posveldelta, system), posveldelta.system)
assert np.allclose(np.asarray(posveldelta), np.asarray(converted_pos))
print(f"posveldelta.{system} == posveldelta.{system}.{posveldelta.system} OK")
except exceptions.UnknownConversionError:
print(f"Conversion from {posveldelta.system} to {system} is not defined")
def test_slice_and_columns():
""""""
# Ny-Ålesund 1202462.5677 252734.4956 6237766.1746
# Wettzell 4075539.6734 931735.4828 4801629.4955
# Westford 1492404.5274 -4457266.5326 4296881.8189
_other = position.Position([[1, 2, 3], [4, 5, 6], [7, 8, 9]], system="trs")
_pos = position.Position(
[
[1_202_462.5677, 252_734.4956, 6_237_766.1746],
[4_075_539.6734, 931_735.4828, 4_801_629.4955],
[1_492_404.5274, -4_457_266.5326, 4_296_881.8189],
],
system="trs",
other=_other,
)
_posdelta = position.PositionDelta([[0.1, 0.2, 0.3], [0.4, 0.5, 0.6], [0.7, 0.8, 0.9]], system="enu", ref_pos=_pos)
assert np.equal(_pos.x, np.array([1_202_462.5677, 4_075_539.6734, 1_492_404.5274])).all()
assert np.equal(_pos[0].val, np.array([1_202_462.5677, 252_734.4956, 6_237_766.1746])).all()
assert np.equal(_pos[-1].val, np.array([1_492_404.5274, -4_457_266.5326, 4_296_881.8189])).all()
assert np.equal(
_pos[1:].val,
np.array([[4_075_539.6734, 931_735.4828, 4_801_629.4955], [1_492_404.5274, -4_457_266.5326, 4_296_881.8189]]),
).all()
assert np.equal(_pos[0].other.val, np.array([1, 2, 3])).all()
assert np.equal(_pos[-1].other.val, np.array([7, 8, 9])).all()
assert np.equal(_posdelta.east, np.array([0.1, 0.4, 0.7])).all()
assert np.equal(_posdelta[1].val, np.array([0.4, 0.5, 0.6])).all()
assert np.equal(_posdelta[1].ref_pos.val, np.array([4_075_539.6734, 931_735.4828, 4_801_629.4955])).all()
assert np.equal(_posdelta[1].ref_pos.other.val, np.array([4, 5, 6])).all()
assert np.equal(_pos[1:].other.val, np.array([[4, 5, 6], [7, 8, 9]])).all()
@pytest.mark.parametrize("pos", (pos_trs_a, pos_trs_s), indirect=True)
def test_pos_unit(pos):
assert pos.unit() == ("meter", "meter", "meter")
assert pos.unit("elevation") == ("radians",)
assert pos.unit("llh") == ("radians", "radians", "meter")
assert pos.unit("x") == ("meter",)
assert pos.unit("llh.height") == ("meter",)
@pytest.mark.parametrize("posdelta", (posdelta_trs_a, posdelta_trs_s), indirect=True)
def test_posdelta_unit(posdelta):
assert posdelta.unit() == ("meter", "meter", "meter")
assert posdelta.unit("x") == ("meter",)
assert posdelta.unit("enu") == ("meter", "meter", "meter")
assert posdelta.unit("enu.north") == ("meter",)
@pytest.mark.parametrize("posvel", (posvel_trs_a, posvel_trs_s), indirect=True)
def test_posvel_unit(posvel):
assert posvel.unit() == ("meter", "meter", "meter", "meter/second", "meter/second", "meter/second")
assert posvel.unit("elevation") == ("radians",)
assert posvel.pos.unit("llh") == ("radians", "radians", "meter")
assert posvel.unit("kepler") == ("meter", "unitless", "radians", "radians", "radians", "radians")
assert posvel.unit("vx") == ("meter/second",)
assert posvel.vel.unit() == ("meter/second", "meter/second", "meter/second")
@pytest.mark.parametrize("posveldelta", (posveldelta_trs_a, posveldelta_trs_s), indirect=True)
def test_posveldelta_unit(posveldelta):
assert posveldelta.unit() == ("meter", "meter", "meter", "meter/second", "meter/second", "meter/second")
assert posveldelta.unit("acr") == ("meter", "meter", "meter", "meter/second", "meter/second", "meter/second")
assert posveldelta.unit("vx") == ("meter/second",)
assert posveldelta.pos.unit() == ("meter", "meter", "meter")
assert posveldelta.vel.unit() == ("meter/second", "meter/second", "meter/second")
def test_math():
_pos = position.Position([[1, 2, 3], [4, 5, 6], [7, 8, 9]], system="trs")
_pos2 = position.Position([[1, 1, 1], [2, 2, 2], [3, 3, 3]], system="trs")
_posdelta = position.PositionDelta([[0.1, 0.2, 0.3], [0.4, 0.5, 0.6], [0.7, 0.8, 0.9]], system="trs", ref_pos=_pos)
_posdelta2 = position.PositionDelta(
[[0.1, 0.1, 0.1], [0.4, 0.4, 0.4], [0.7, 0.7, 0.7]], system="trs", ref_pos=_pos
)
_posvel = position.PosVel([1, 2, 3, 0.1, 0.2, 0.3], system="trs")
_posvel2 = position.PosVel([1, 1, 1, 0.1, 0.1, 0.1], system="trs")
_posveldelta = position.PosVelDelta([0.1, 0.2, 0.3, 0.01, 0.02, 0.03], system="trs", ref_pos=_posvel)
_posveldelta2 = position.PosVelDelta([0.1, 0.1, 0.1, 0.01, 0.01, 0.01], system="trs", ref_pos=_posvel)
# Positions
new_pos = _pos + _posdelta
np.testing.assert_almost_equal(new_pos[0].val, [1.1, 2.2, 3.3])
assert new_pos.cls_name == "PositionArray"
new_pos = _posdelta + _pos
np.testing.assert_almost_equal(new_pos[0].val, [1.1, 2.2, 3.3])
assert new_pos.cls_name == "PositionArray"
new_pos2 = _pos - _pos2
np.testing.assert_almost_equal(new_pos2.val, [[0, 1, 2], [2, 3, 4], [4, 5, 6]])
assert new_pos2.cls_name == "PositionDeltaArray"
new_pos3 = _pos - _posdelta
np.testing.assert_almost_equal(new_pos3[0].val, [0.9, 1.8, 2.7])
assert new_pos3.cls_name == "PositionArray"
new_pos3 = _posdelta - _pos
np.testing.assert_almost_equal(new_pos3[0].val, [-0.9, -1.8, -2.7])
assert new_pos3.cls_name == "PositionArray"
new_posdelta = _posdelta - _posdelta2
np.testing.assert_almost_equal(new_posdelta.val, [[0, 0.1, 0.2], [0, 0.1, 0.2], [0, 0.1, 0.2]])
assert new_posdelta.cls_name == "PositionDeltaArray"
# PosVels
new_posvel = _posvel + _posveldelta
np.testing.assert_almost_equal(new_posvel.val, [1.1, 2.2, 3.3, 0.11, 0.22, 0.33])
assert new_posvel.cls_name == "PosVelArray"
new_posvel = _posveldelta + _posvel
np.testing.assert_almost_equal(new_posvel.val, [1.1, 2.2, 3.3, 0.11, 0.22, 0.33])
assert new_posvel.cls_name == "PosVelArray"
new_posvel2 = _posvel - _posvel2
np.testing.assert_almost_equal(new_posvel2.val, [0, 1, 2, 0, 0.1, 0.2])
assert new_posvel2.cls_name == "PosVelDeltaArray"
new_posvel3 = _posvel - _posveldelta
np.testing.assert_almost_equal(new_posvel3.val, [0.9, 1.8, 2.7, 0.09, 0.18, 0.27])
assert new_posvel3.cls_name == "PosVelArray"
new_posvel3 = _posveldelta - _posvel
np.testing.assert_almost_equal(new_posvel3.val, [-0.9, -1.8, -2.7, -0.09, -0.18, -0.27])
assert new_posvel3.cls_name == "PosVelArray"
new_posveldelta = _posveldelta - _posveldelta2
np.testing.assert_almost_equal(new_posveldelta.val, [0, 0.1, 0.2, 0, 0.01, 0.02])
assert new_posveldelta.cls_name == "PosVelDeltaArray"
def test_cache():
pos1 = position.Position([1, 2, 3], system="trs")
pos2 = position.Position([-4, 5, 2], system="trs")
pos3 = position.Position([7, -8, 5], system="trs")
pos1.other = pos2
el1 = pos1.elevation
pos1.other = pos3
el2 = pos1.elevation
# Other position is changed and elevation cache should have been reset
assert not np.isclose(el1, el2)
pos3[0] = 0
el3 = pos1.elevation
# Value of other position is changed and elevation cache should have been reset
assert not np.isclose(el2, el3)
|
C++
|
UTF-8
| 1,177 | 3.6875 | 4 |
[] |
no_license
|
/*
Kreirati program koji ce pronaci i ispisati sve brojeve od X do Y (ukljucujuci granicne vrijednosti)
koji su djeljivi svim svojim ciframa.
Na primjer, broj 36 je djeljiv sa brojevima 3 i 6. (36/6=6 i 36/3=12).
Brojeve X i Y unosi korisnik i moraju biti u opsegu od 10 do 6000. Obavezno koristiti bar jednu funkciju.
Primjer:
Unesite granice intervala: 32 37
Brojevi u navedenom opsegu koji zadovoljavaju uslov su: 33, 36.
*/
#include <iostream>
using namespace std;
bool djeljivost(int);
int main() {
int brojX, brojY;
do {
cout << "Unesite X (donja granica): ";
cin >> brojX;
} while (brojX < 10 || brojX > 6000);
do {
cout << "Unesite Y (gornja granica): ";
cin >> brojY;
} while (brojY < 10 || brojY > 6000 || brojY < brojX);
cout << "Brojevi koji su djeljivi svim svojim ciframa: " << endl;
for (int i = brojX; i <= brojY; i++) {
if (djeljivost(i)) {
cout << i << endl;
}
}
system("pause");
return 0;
}
bool djeljivost(int broj) {
int zadnja, temp=broj;
while (broj > 0) {
zadnja = broj % 10;
if (zadnja != 0) {
if (temp%zadnja != 0) {
return false;
}
}
broj /= 10;
}
return true;
}
|
Java
|
UTF-8
| 1,787 | 2.5625 | 3 |
[] |
no_license
|
package com.saal.testcases;
import java.util.List;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.support.ui.Select;
public class Testselenium1 {
public static void main(String[] args) {
System.setProperty("webdriver.chrome.driver", "D:\\kallola_saal\\saal\\chromedriver_win32 (2)\\chromedriver.exe");
WebDriver driver = new ChromeDriver();
driver.get("https://www.google.com/");
//
// List<WebElement>l1=driver.findElements(By.xpath("//input[@type='text']"));
//
//
// l1.get(1).click();
// l1.size();
//
// List<WebElement>l2=driver.findElements(By.xpath("//input[@type='radio']"));
// l2.get(2).click();
//
//**********************************
// WebElement ele=driver.findElement(By.name(""));
// Select sel=new Select(ele);
//
//
// if(sel.isMultiple())
// {
// System.out.println("Is Multiple...");
// //perform opersatiom
// driver.findElement(By.name("")).click();
//
//
// }else
// {
// System.out.println("single select drop down ....");
// }
// driver.close();
//
//*********************************************************************
// int vcnt=0;
// int invcnt=0;
// int sing=0;
// int mul=0;
// //
//
// List<WebElement>l1=driver.findElements(By.tagName("select"));
//
// for(int i=0;i<l1.size();i++)
// {
// if(l1.get(i).isDisplayed())
// {
// vcnt=vcnt+1;
// }else
// {
// invcnt=invcnt+1;
// }
//
// Select s=new Select(l1.get(i));
//
// if(s.isMultiple())
// {
// mul=mul+1;
// }else
// {
// sing=sing+1;
// }
//
// }
// //****************************************************
}
}
|
Python
|
UTF-8
| 568 | 3.84375 | 4 |
[] |
no_license
|
#!/usr/bin/python3
# file input
data = open('input.txt').read()
numbers = [[int(n) for n in tri.split()] for tri in data.splitlines()]
def isTri(a, b, c):
return a + b > c and \
b + c > a and \
a + c > b
numTris = len([tri for tri in numbers if isTri(tri[0], tri[1], tri[2])])
print("part one:", numTris)
numTris = 0
for column in range(len(numbers[0])):
for row in range(0, len(numbers), 3):
if isTri(numbers[row][column], numbers[row+1][column], numbers[row+2][column]):
numTris += 1
print("part two:", numTris)
|
C#
|
UTF-8
| 584 | 3 | 3 |
[] |
no_license
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Assgn1
{
public abstract class Birds
{
public void walk()
{
Console.WriteLine("bird walking");
}
}
public class bird1 : Birds
{
public void fly()
{
Console.WriteLine("Bird1 fying");
}
}
class bird2 : Birds
{
public void sing()
{
Console.WriteLine("Bird2 fying");
}
}
}
|
Java
|
UTF-8
| 8,228 | 1.71875 | 2 |
[] |
no_license
|
package ru.lihachev.norm31937.docx;
import android.app.ProgressDialog;
import android.content.ContentUris;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Environment;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.StringRes;
import android.widget.Toast;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import fr.opensagres.xdocreport.core.XDocReportException;
import fr.opensagres.xdocreport.document.IXDocReport;
import fr.opensagres.xdocreport.document.docx.discovery.DocxTemplateEngineConfiguration;
import fr.opensagres.xdocreport.document.registry.XDocReportRegistry;
import fr.opensagres.xdocreport.template.IContext;
import fr.opensagres.xdocreport.template.ITemplateEngine;
import fr.opensagres.xdocreport.template.formatter.FieldsMetadata;
import fr.opensagres.xdocreport.template.formatter.NullImageBehaviour;
import fr.opensagres.xdocreport.template.velocity.discovery.VelocityTemplateEngineDiscovery;
import nl.qbusict.cupboard.CupboardFactory;
import ru.lihachev.norm31937.free.R;
import ru.lihachev.norm31937.objects.Defect;
import ru.lihachev.norm31937.objects.Document;
import ru.lihachev.norm31937.objects.Picture;
import ru.lihachev.norm31937.db.UserDataHelper;
import ru.lihachev.norm31937.db.UserDataProvider;
public class DocxSaver implements ISaver {
public final Context context;
public final List<DefectwithPicture> defects;
public final Document document;
private final ImageComposer imgComposer;
private interface PostListener {
void onPostSave(Boolean bool);
}
public DocxSaver(@NonNull Context context2, long docId) {
this.context = context2;
this.document = (Document) CupboardFactory.cupboard().withContext(context2).get(ContentUris.withAppendedId(UserDataProvider.getContentUri(UserDataHelper.DOCUMENT_URL), docId), Document.class);
this.imgComposer = new ImageComposer(context2.getCacheDir(), context2);
this.defects = initDefects(context2, docId);
}
private List<DefectwithPicture> initDefectwithPictures(Context context2, List<Defect> defects2) {
List<DefectwithPicture> defectswithPictures = new ArrayList<>(defects2.size());
for (Defect defect : defects2) {
List<Picture> pics = CupboardFactory.cupboard().withContext(context2).query(UserDataProvider.getContentUri(UserDataHelper.PICTURE_URL), Picture.class).withSelection("defectId=" + defect._id, (String[]) null).list();
DefectwithPicture defectwithPictures = new DefectwithPicture();
defectwithPictures.setOrder(defectswithPictures.size() + 1);
defectwithPictures.setDefect(defect);
defectwithPictures.setPictures(pics);
defectswithPictures.add(defectwithPictures);
}
return defectswithPictures;
}
private List<DefectwithPicture> initDefects(@NonNull Context context2, long docId) {
return initDefectwithPictures(context2, CupboardFactory.cupboard().withContext(context2).query(UserDataProvider.getContentUri(UserDataHelper.DEFECT_URL), Defect.class).withSelection("documentId=" + docId, (String[]) null).orderBy("element").list());
}
/* access modifiers changed from: private */
public void compose() {
for (DefectwithPicture defect : this.defects) {
// defect.NiceReasons = defect.getNiceReasons();
// defect.NiceCompensations = defect.getNiceCompensations();
// defect.NiceProblems = defect.getNiceProblems();
composePictures(defect.getPictures());
System.gc();
}
}
private void composePictures(List<Picture> pictures) {
for (Picture picture : pictures) {
try {
this.imgComposer.compose(picture);
} catch (IOException e) {
HashMap<String, Object> map = new HashMap<>();
map.put("outputdir", this.imgComposer.getOutputDir());
map.put("picture_id", picture.getId());
map.put("picture_id", picture.getImgUrl());
map.put("Defect_Id", picture.getDefectId());
//Mint.logExceptionMap(map, e);
}
}
}
public boolean save() {
InputStream inputStream = this.context.getResources().openRawResource(R.raw.defect_pattern);
try {
FileOutputStream outStream = new FileOutputStream(getSavingFile());
ITemplateEngine engine = new VelocityTemplateEngineDiscovery().createTemplateEngine();
engine.setConfiguration(DocxTemplateEngineConfiguration.INSTANCE);
engine.setTemplateCacheInfoProvider(XDocReportRegistry.getRegistry());
IXDocReport report = XDocReportRegistry.getRegistry().loadReport(inputStream, engine);
FieldsMetadata metadata = new FieldsMetadata();
metadata.addFieldAsImage("image", "picture.Image", NullImageBehaviour.KeepImageTemplate);
report.setFieldsMetadata(metadata);
IContext iContext = report.createContext();
iContext.put("defects", this.defects);
iContext.put("document", this.document);
report.process(iContext, (OutputStream) outStream);
outStream.close();
return true;
} catch (XDocReportException | IOException e) {
/// Mint.logException(e);
return false;
} // Mint.logException(e2);
}
/* access modifiers changed from: private */
public File getSavingFile() {
File dir = new File(Environment.getExternalStorageDirectory() + File.separator + this.context.getString(R.string.app_name));
if (!dir.exists()) {
dir.mkdirs();
}
return new File(dir, this.document.title + ".docx");
}
public void saveToDisk() {
asyncSave(R.string.saving_doc, new PostListener() {
public void onPostSave(Boolean isSaved) {
Toast.makeText(DocxSaver.this.context, isSaved.booleanValue() ? R.string.saved : R.string.error_save_doc, Toast.LENGTH_SHORT).show();
// Mint.logEvent(Metrics.SAVED_DOC, MintLogLevel.Info, Metrics.toMetrics(DocxSaver.this.document, (List<DefectwithPicture>) DocxSaver.this.defects));
}
});
}
public void send() {
asyncSave(R.string.prepare, new PostListener() {
public void onPostSave(Boolean isSaved) {
Intent sendIntent = new Intent();
sendIntent.setAction("android.intent.action.SEND");
sendIntent.putExtra("android.intent.extra.SUBJECT", DocxSaver.this.document.subject());
sendIntent.putExtra("android.intent.extra.TEXT", DocxSaver.this.document.full());
sendIntent.putExtra("android.intent.extra.STREAM", Uri.fromFile(DocxSaver.this.getSavingFile()));
sendIntent.setType("application/vnd.openxmlformats-officedocument.wordprocessingml.document");
DocxSaver.this.context.startActivity(sendIntent);
// Mint.logEvent(Metrics.SENT_DOC, MintLogLevel.Info, Metrics.toMetrics(DocxSaver.this.document, (List<DefectwithPicture>) DocxSaver.this.defects));
}
});
}
private void asyncSave(@StringRes int saveTitleId, @Nullable final PostListener listener) {
final ProgressDialog dialog = new ProgressDialog(this.context, R.style.AlertDialogStyle);
dialog.setCancelable(false);
dialog.setTitle((CharSequence) null);
dialog.setMessage(this.context.getString(saveTitleId));
dialog.setIndeterminate(true);
dialog.show();
new AsyncTask<Object, Object, Boolean>() {
/* access modifiers changed from: protected */
public Boolean doInBackground(@NonNull Object... params) {
DocxSaver.this.compose();
return Boolean.valueOf(DocxSaver.this.save());
}
/* access modifiers changed from: protected */
public void onPostExecute(@NonNull Boolean result) {
System.gc();
dialog.dismiss();
if (listener != null) {
listener.onPostSave(result);
}
}
}.execute();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.