language
stringclasses 15
values | src_encoding
stringclasses 34
values | length_bytes
int64 6
7.85M
| score
float64 1.5
5.69
| int_score
int64 2
5
| detected_licenses
listlengths 0
160
| license_type
stringclasses 2
values | text
stringlengths 9
7.85M
|
---|---|---|---|---|---|---|---|
Python
|
UTF-8
| 1,328 | 3.421875 | 3 |
[] |
no_license
|
# https://leetcode.com/explore/challenge/card/january-leetcoding-challenge-2021/580/week-2-january-8th-january-14th/3599/
from typing import List
import bisect
MOD = 1000000007
class Solution:
def createSortedArray(self, instructions: List[int]) -> int:
nums = []
costs = 0
for n in instructions:
left_pos = bisect.bisect_left(nums, n)
right_pos = bisect.bisect_right(nums, n)
costs += min(left_pos, len(nums) - right_pos)
nums[left_pos:left_pos] = [n] # same with insert at left_pos
return costs % MOD
# Fenwick Tree: https://www.acmicpc.net/blog/view/21
# https://leetcode.com/problems/create-sorted-array-through-instructions/discuss/927761/Python3-Binary-Index-Tree-oror-SortedList
class Solution:
def createSortedArray(self, instructions: List[int]) -> int:
N = max(instructions)
cc = [0] * (N + 1)
def update(x):
while x <= N:
cc[x] += 1
x += -x & x
def query(x):
ans = 0
while x > 0:
ans += cc[x]
x -= -x & x
return ans
ans = 0
for i, n in enumerate(instructions):
ans += min(query(n - 1), i - query(n))
update(n)
return ans % MOD
|
C++
|
UHC
| 1,510 | 2.921875 | 3 |
[] |
no_license
|
#include <algorithm>
#include <iostream>
#include <vector>
#include <string>
#include <queue>
#include <list>
using namespace std;
vector<vector<int>>adj;
vector<bool>visited;
vector<bool>discovered;
queue<int>q;
vector<int>dfsOrder;
vector<int>bfsOrder;
//dfs,bfs Լ
//Է main ޱ
void dfs(int v) {
visited[v] = true;
dfsOrder.push_back(v);
if (!adj[v].empty())
{
for (vector<int>::size_type i = 0; i < adj[v].size(); i++) {
if (visited[adj[v][i]]== false) dfs(adj[v][i]);
}
}
return;
}
void bfs(int v) {
discovered[v] = true;
q.push(v);
while (!q.empty()) {
int here = q.front();
bfsOrder.push_back(here);
q.pop();
for (vector<int>::size_type i = 0; i < adj[here].size(); i++) {
if ( discovered[adj[here][i]]==false) {
discovered[adj[here][i]] = true;
q.push(adj[here][i]);
}
}
}
return;
}
int main() {
int N, M, start;
cin >> N >> M >> start;
adj = vector<vector<int>>(N+1);//Ʈ ǥ
visited = vector<bool>(N + 1,false);
discovered = vector<bool>(N + 1,false);
for (int cnt = 0; cnt < M; cnt++) {
int V1, V2;
cin >> V1 >> V2;
adj[V1].push_back(V2);
adj[V2].push_back(V1);
}
for (int i = 1; i < adj.size(); i++) {
sort(adj[i].begin(),adj[i].end());
}
dfs(start);
bfs(start);
for (vector<int>::size_type i = 0; i < dfsOrder.size(); i++) {
cout <<dfsOrder[i]<<" ";
}
cout << endl;
for (vector<int>::size_type i = 0; i < bfsOrder.size(); i++) {
cout << bfsOrder[i] <<" ";
}
return 0;
}
|
Python
|
UTF-8
| 355 | 2.9375 | 3 |
[] |
no_license
|
liczby = open('/Users/jakubchudon/Desktop/matura2019/pierwsze.txt')
tablica=[]
for linia in liczby:
liczba1=linia.split()
liczba=int(liczba1[0])
odwrotna=int(str(liczba)[::-1])
tablica.append(odwrotna)
for x in tablica:
for dzielnik in range(2,x):
if x% dzielnik ==0:
break
else:
print(str(x)[::-1])
|
PHP
|
WINDOWS-1250
| 1,229 | 2.734375 | 3 |
[] |
no_license
|
<?php
//$Usuario = strtolower(htmlentities($HTTP_POST_VARS["Usuario"], ENT_QUOTES));
//$Usuario = htmlentities($HTTP_POST_VARS["Usuario"], ENT_QUOTES);
// conecion a la base de datos
$conn_access = odbc_connect ("captahuellas", "", "");
$rs_access = odbc_exec ($conn_access, "select USERID, CHECKTIME from CHECKINOUT ");//where USERID= '$Usuario'
if ($conn_access = odbc_connect ( "captahuellas", "", "")){
echo "Conectado correctamente";
$ssql = "select USERID, CHECKTIME from CHECKINOUT"; // where USERID=?"
//$Usuario = htmlentities($HTTP_POST_VARS["Usuario"], ENT_QUOTES);
//$Usuario = $_GET['USERID'];
//$stmt = odbc_prepare($db_conn, $ssql);
//$res = odbc_execute($stmt, $Usuario);
if($rs_access = odbc_exec ($conn_access, $ssql)){
echo "La sentencia se ejecut correctamente";
while ($fila = odbc_fetch_object($rs_access)){
echo "<br>" . $fila->USERID . $fila->CHECKTIME;
}
}else{
echo "Error al ejecutar la sentencia SQL";
}
} else{
echo "Error en la conexin con la base de datos";
}
//cierra la conexion
<table width="200" border="1">
<tr>
<th scope="col"> </th>
<th scope="col"> </th>
</tr>
</table>
?>
|
Python
|
UTF-8
| 5,190 | 3.34375 | 3 |
[] |
no_license
|
""" Pour une matrice (n,n) on cherchera comment placer k points
dans la matrice de telle sorte que tout point de la matrice soit
à une distance minimal d'un des points 'k'."""
from random import randint
import matplotlib.pyplot as plt
from time import time
import os
import copy
n = 75
M = [[(i,j) for j in range(n)] for i in range(n)]
densite = [[0 for i in range(n)] for j in range(n)] #ville De zéros (VDZ).
os.chdir('C:/Users/Théo/Documents/GitHub/TIPE')
def num_photo ():
mon_fichier = open('VARIABLE.txt','r')
Contenu = mon_fichier.read()
loop = int(Contenu)
mon_fichier.close()
mon_fichier_ecriture = open('VARIABLE.txt','w')
mon_fichier_ecriture.write(str(loop+1))
mon_fichier_ecriture.close()
return loop+1
def distance_a_la_diago(i,j):
return abs(i-j)
def distance_euclidienne(A,B):
return ((A[1]-B[1])**2+(A[0]-B[0])**2)**(0.5)
def distance_man(A,B):
return abs(A[1]-B[1]) + abs(A[0]-B[0])
def moyenne_de_liste(L): #Calcul de moyenne classique.
return 1/len(L) * sum(L[i] for i in range(len(L)))
def moyenne_aux_points(X,M,distance): #M est la matrice ville, X un point de la matrice
""" Cette fonction donne pour un point de la ville, la moyenne des distances à
l'ensemble des points de la ville (ici la ville est représentée par une matrice)"""
L = []
for i in range(n):
for j in range(n):
if [i,j] != X: #Le point X=(i,j) ne compte pas lui-même dans la moyenne.
L = L + [distance([i,j] ,X )]
return moyenne_de_liste(L)
def RechercheDeSolution (M,k,distance,nbr_test): #M une matrice, d une distance k nombre de points
"""On va faire la moyenne des moyennes des distances aux points de la matrice"""
#La distance maximale pour d est d = 2n pour manhatann
MeilleureConfiguration=[]
MeilleureMoyenne = -1
for J in range(nbr_test): #On test nbr_test fois
Points = [[randint(0,n),randint(0,n)]] #On met un point dans la liste
while len(Points) < k : #On veut mettre k points dans la liste
x,y = randint(0,n),randint(0,n)
if [x,y] not in Points :
Points.append([x,y])
#On a désormais une liste de points distincts que l'on stocke au cas ou elle serait cool
Moyennes=[moyenne_aux_points(Points[i],M,distance_man) for i in range(k)]
MoyenneGlobale = moyenne_de_liste(Moyennes)
if MoyenneGlobale < MeilleureMoyenne or MeilleureMoyenne < 0:
MeilleureMoyenne = MoyenneGlobale
MeilleureConfiguration = Points
if J%100 == 0: print(J)
return MeilleureConfiguration , MeilleureMoyenne
"""Résultats et problèmes:
On trouve pour k=3 et M : 10x10, trois points au centre de la matrice ([[4, 4], [5, 5], [4, 5]], 5.0)
ce qui est mathématiquement correct mais pas optimal dans la réalité, en effet toutes les casernes ne
seraient pas collées à un points du centre dans la réalité.. """
"""On introduit alors la notion d'interdistance entre les casernes,
distance minimale que devra avoir une caserne a n'importte quelle autre caserne."""
def Interdistance (X,ListeDePoints,interdistance,distance): #return un booléen
for point in ListeDePoints:
if distance(X,point) < interdistance : return False
return True
def RechercheDeSolution_V2 (M,k,distance, interdistance,nbr_test): #M une matrice, k nombre de points
"""On va faire la moyenne des moyennes des distances aux points de la matrice"""
#La distance maximale pour d est d = 2n pour manhatann
MeilleureConfiguration=[]
MeilleureMoyenne = -1
global n
for J in range(nbr_test+1): #On test nbr_test fois
Points = [[randint(0,n),randint(0,n)]] #On met un point dans la liste (initialisation)
while len(Points) < k : #On veut mettre k points dans la liste
x,y = randint(0,n),randint(0,n)
if [x,y] not in Points and Interdistance([x,y],Points,interdistance,distance_man) :
Points.append([x,y])
#On a désormais une liste de points distincts
Moyennes=[moyenne_aux_points(Points[i],M,distance_man) for i in range(k)]
MoyenneGlobale = moyenne_de_liste(Moyennes)
if MoyenneGlobale < MeilleureMoyenne or MeilleureMoyenne < 0:
MeilleureMoyenne = MoyenneGlobale
MeilleureConfiguration = Points
if J%10==0:print(J)
print(MeilleureConfiguration)
return MeilleureConfiguration , MeilleureMoyenne, interdistance,k,nbr_test
def Affichage_Sauvegarde(Liste):
global densite
global n
VDZ = copy.deepcopy(densite)
for Test in Liste:
for point in Test[0]:
i = point[0]
j = point[1]
VDZ[i][j] += 1
interdistance=Test[2];k=Test[3];nbr_test=Test[4] #oui, sortie de Recherche_V2
plt.imshow(VDZ, extent=[0,n,0,n], aspect="auto")
plt.title("n="+str(n)+",k="+str(k)+",inter="+str(interdistance)+',boucles='+str(nbr_test))
plt.savefig("Figures/"+str(num_photo())+'.png')
VDZ = copy.deepcopy(densite)
D = [RechercheDeSolution_V2(M,5,distance_man,35,5000) for i in range(1)]
print(D)
Affichage_Sauvegarde(D)
|
Markdown
|
UTF-8
| 1,818 | 3.0625 | 3 |
[
"Apache-2.0"
] |
permissive
|
# Key Management
Hoard acts much like a conventional password store; we symmetrically encrypt the given data with it's original hash then store it at an address determined by the hash of the encrypted data. Compare this with a typical setup where a user's password is hashed and authentication is based on whether the user can prove knowledge of the hash; both systems require you to have some cognizance of the plaintext input.
An obvious extension would be to build an integration to [Hashicorp Vault](https://www.vaultproject.io/), where Hoard would act as a back-end encrypted key-value store. However, it may also be possible to use it in isolation...
With symmetric grants we are able to encrypt this reference in a closed eco-system with a password like derivative. This means that we can readily share this object with all internal actors in plain sight of external entities. Alternatively we can explicitly share it with a specified party using an asymmetric grant - which locks the original reference with their public key. Though we may consider defining this more formally in the future, this gives us one form of access control which allows us to securely share keys.
## Kubernetes
It is typical to manage a wide range of secrets in a typical Kubernetes environment, from API keys to cloud credentials and database logins. Bitnami's [Sealed-Secrets](https://github.com/bitnami-labs/sealed-secrets) addresses this issue; "I can manage all my K8s config in git, except Secrets.". This system essentially allows an operator to encrypt a secret with the server's public key and commit it to version control. When the Custom Resource Definition (CRD) is created in the cluster, the server will decrypt it in-place. With access grants, we could easily achieve a similar level of functionality with Hoard.
|
C++
|
UTF-8
| 8,907 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
#include <algine/algine_renderer.h>
#include <GL/glew.h>
#include <algine/texture.h>
#include <algine/framebuffer.h>
#include <algine/renderbuffer.h>
namespace algine {
void pointer(const int location, const int count, const uint buffer, const uint stride, const void *offset) {
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glVertexAttribPointer(
location, // attribute location
count, // count (1, 2, 3 or 4)
GL_FLOAT, // type
GL_FALSE, // is normalized?
stride, // step
offset // offset
);
}
void pointerui(const int location, const int count, const uint buffer, const uint stride, const void *offset) {
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glVertexAttribIPointer(
location, // attribute location
count, // count (1, 2, 3 or 4)
GL_UNSIGNED_INT, // type
stride, // step
offset // offset
);
}
// if `inPosLocation` != -1, VAO will be created
void CubeRenderer::init(const int inPosLocation) {
// source: https://stackoverflow.com/questions/28375338/cube-using-single-gl-triangle-strip
static const float vertices[] = {
-1.0f, 1.0f, 1.0f, // Front-top-left
1.0f, 1.0f, 1.0f, // Front-top-right
-1.0f, -1.0f, 1.0f, // Front-bottom-left
1.0f, -1.0f, 1.0f, // Front-bottom-right
1.0f, -1.0f, -1.0f, // Back-bottom-right
1.0f, 1.0f, 1.0f, // Front-top-right
1.0f, 1.0f, -1.0f, // Back-top-right
-1.0f, 1.0f, 1.0f, // Front-top-left
-1.0f, 1.0f, -1.0f, // Back-top-left
-1.0f, -1.0f, 1.0f, // Front-bottom-left
-1.0f, -1.0f, -1.0f, // Back-bottom-left
1.0f, -1.0f, -1.0f, // Back-bottom-right
-1.0f, 1.0f, -1.0f, // Back-top-left
1.0f, 1.0f, -1.0f // Back-top-right
};
glGenBuffers(1, &cubeBuffer);
glBindBuffer(GL_ARRAY_BUFFER, cubeBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
if (inPosLocation == -1) return;
// create & configure VAO
glGenVertexArrays(1, &cubeVAO);
glBindVertexArray(cubeVAO);
glEnableVertexAttribArray(inPosLocation);
pointer(inPosLocation, 3, cubeBuffer);
glBindVertexArray(0);
}
void CubeRenderer::bindVAO() {
glBindVertexArray(cubeVAO);
}
// just calls `glDrawArrays(GL_TRIANGLE_STRIP, 0, 14)`
void CubeRenderer::drawCube() {
glDrawArrays(GL_TRIANGLE_STRIP, 0, 14);
}
void CubeRenderer::render() {
bindVAO();
drawCube();
}
void CubeRenderer::render(const int inPosLocation) {
glEnableVertexAttribArray(inPosLocation);
pointer(inPosLocation, 3, cubeBuffer);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 14);
glDisableVertexAttribArray(inPosLocation);
}
void CubeRenderer::render(const int programId, const int inPosLocation) {
glUseProgram(programId);
render(inPosLocation);
}
CubeRenderer::~CubeRenderer() {
glDeleteBuffers(1, &cubeBuffer);
glDeleteVertexArrays(1, &cubeVAO);
}
void QuadRenderer::init(const int inPosLocation, const int inTexCoordLocation) {
// creating buffers for quad rendering
static float
vertices[12] = {
-1.0f, 1.0f, 0.0f,
-1.0f, -1.0f, 0.0f,
1.0f, 1.0f, 0.0f,
1.0f, -1.0f, 0.0f
},
texCoords[8] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 1.0f,
1.0f, 0.0f
};
glGenBuffers(2, quadBuffers);
glBindBuffer(GL_ARRAY_BUFFER, quadBuffers[0]); // vertices
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, quadBuffers[1]); // texCoords
glBufferData(GL_ARRAY_BUFFER, sizeof(texCoords), texCoords, GL_STATIC_DRAW);
if (inPosLocation == -1 || inTexCoordLocation == -1) return;
// create & configure VAO
glGenVertexArrays(1, &quadVAO);
glBindVertexArray(quadVAO);
glEnableVertexAttribArray(inPosLocation);
glEnableVertexAttribArray(inTexCoordLocation);
pointer(inPosLocation, 3, quadBuffers[0]);
pointer(inTexCoordLocation, 2, quadBuffers[1]);
glBindVertexArray(0);
}
void QuadRenderer::bindVAO() {
glBindVertexArray(quadVAO);
}
// just calls `glDrawArrays(GL_TRIANGLE_STRIP, 0, 4)`
void QuadRenderer::drawQuad() {
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
void QuadRenderer::render() {
bindVAO();
drawQuad();
}
void QuadRenderer::render(const int inPosLocation, const int inTexCoordLocation) {
glEnableVertexAttribArray(inPosLocation);
glEnableVertexAttribArray(inTexCoordLocation);
pointer(inPosLocation, 3, quadBuffers[0]);
pointer(inTexCoordLocation, 2, quadBuffers[1]);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDisableVertexAttribArray(inPosLocation);
glDisableVertexAttribArray(inTexCoordLocation);
}
void QuadRenderer::render(const int programId, const int inPosLocation, const int inTexCoordLocation) {
glUseProgram(programId);
render(inPosLocation, inTexCoordLocation);
}
QuadRenderer::~QuadRenderer() {
glDeleteBuffers(2, quadBuffers);
glDeleteVertexArrays(1, &quadVAO);
}
void AlgineRenderer::mainPass(const uint displayFBO) {
glBindFramebuffer(GL_FRAMEBUFFER, displayFBO);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}
void AlgineRenderer::bloomSearchPass(const uint bsFBO, const uint image) {
bindFramebuffer(bsFBO);
bloomSearchShader->use();
texture2DAB(0, image);
quadRenderer->drawQuad();
}
void AlgineRenderer::blurPass(const uint pingpongFBO[2], const uint pingpongBuffers[2], const uint image, const uint blurAmount) {
horizontal = true;
firstIteration = true;
for (usize i = 0; i < blurAmount; i++) {
blurShaders[horizontal]->use();
glBindFramebuffer(GL_FRAMEBUFFER, pingpongFBO[horizontal]);
texture2DAB(0, firstIteration ? image : pingpongBuffers[!horizontal]); // bloom
// rendering
quadRenderer->drawQuad();
horizontal = !horizontal;
if (firstIteration) firstIteration = false;
}
}
void AlgineRenderer::screenspacePass(const uint ssFBO, const uint colorMap, const uint normalMap, const uint ssrValuesMap, const uint positionMap) {
glBindFramebuffer(GL_FRAMEBUFFER, ssFBO);
ssrShader->use();
texture2DAB(0, colorMap);
texture2DAB(1, normalMap);
texture2DAB(2, ssrValuesMap);
texture2DAB(3, positionMap);
quadRenderer->drawQuad();
}
void AlgineRenderer::dofCoCPass(const uint cocFBO, const uint positionMap) {
bindFramebuffer(cocFBO);
dofCoCShader->use();
texture2DAB(0, positionMap);
quadRenderer->drawQuad();
}
#define _dofBlurPass(pingpongFBO, dofBuffers, blurAmount, code_tex_ab) \
horizontal = true; \
firstIteration = true; \
for (size_t i = 0; i < blurAmount; i++) { \
dofBlurShaders[horizontal]->use(); \
glBindFramebuffer(GL_FRAMEBUFFER, pingpongFBO[horizontal]); \
code_tex_ab \
/* rendering */ \
quadRenderer->drawQuad(); \
horizontal = !horizontal; \
if (firstIteration) firstIteration = false; \
}
// dofMap may be position map or coc map depending on the method you use
void AlgineRenderer::dofBlurPass(const uint pingpongFBO[2], const uint dofBuffers[2], const uint image, const uint dofMap, const uint blurAmount) {
_dofBlurPass(pingpongFBO, dofBuffers, blurAmount, {
texture2DAB(0, firstIteration ? image : dofBuffers[!horizontal]);
texture2DAB(1, dofMap);
})
}
void AlgineRenderer::dofBlurPass(const uint pingpongFBO[2], const uint dofBuffers[2], const uint image, const uint cocMap, const uint positionMap, const uint blurAmount) {
_dofBlurPass(pingpongFBO, dofBuffers, blurAmount, {
texture2DAB(0, firstIteration ? image : dofBuffers[!horizontal]);
texture2DAB(1, cocMap);
texture2DAB(2, positionMap);
})
}
#undef _dofBlurPass
void AlgineRenderer::doubleBlendPass(const uint image, const uint bloom) {
// Do not need to call glClear, because the texture is completely redrawn
blendShader->use();
texture2DAB(0, image);
texture2DAB(1, bloom);
quadRenderer->drawQuad();
}
void AlgineRenderer::blendPass(const uint texture0) {
// Do not need to call glClear, because the texture is completely redrawn
blendShader->use();
texture2DAB(0, texture0);
quadRenderer->drawQuad();
}
AlgineRenderer::~AlgineRenderer() {
#ifdef ALGINE_LOGGING
std::cout << "~AlgineRenderer() " << this << "\n";
#endif
}
}
|
Java
|
UTF-8
| 9,616 | 1.882813 | 2 |
[] |
no_license
|
package database.dao.userpast;
import models.share.Share;
import models.userpast.UserPastUnit;
import play.Logger;
import play.db.DB;
import play.db.jpa.JPQL;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Time;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import config.LuppeItConstants;
/**
* Created with IntelliJ IDEA.
* User: farukkuscan
* Date: 11/21/12
* Time: 1:34 AM
* To change this template use File | Settings | File Templates.
*/
public class UserPastDAO {
/*
Query strings for ShareDAO
*/
public static final String QUERY_GET_ADDED_TAGS_PAST = "SELECT uapv.action_parameter_id, uapv.parameter_value " +
"FROM user_action AS ua " +
"INNER JOIN user_action_parameter_value AS uapv ON ua.user_action_id = uapv.user_action_id " +
"WHERE " +
"ua.user_id = ? AND " +
"ua.action_id = ? AND " +
"uapv.action_parameter_id = ?";
public static final String QUERY_GET_LUPPED_SHARES_TAGS_PAST = "SELECT st.tag_id " +
"FROM user_action AS ua " +
"INNER JOIN user_action_parameter_value AS uapv ON ua.user_action_id = uapv.user_action_id " +
"INNER JOIN share_tag AS st ON uapv.parameter_value = st.share_id " +
"WHERE " +
"ua.user_id = ? AND " +
"ua.action_id = ? AND " +
"uapv.action_parameter_id = ?";
public static final String QUERY_GET_LUPPED_SHARES_RESOURCES_PAST = "SELECT rr.parent_resource_id " +
"FROM user_action AS ua " +
"INNER JOIN user_action_parameter_value AS uapv ON ua.user_action_id = uapv.user_action_id " +
"INNER JOIN share AS s ON uapv.parameter_value = s.share_id " +
"INNER JOIN rss_resource AS rr ON s.rss_resource_id = rr.rss_resource_id " +
"WHERE " +
"ua.user_id = ? AND " +
"ua.action_id = ? AND " +
"uapv.action_parameter_id = ?";
public static List<UserPastUnit> getAddedTagsPast(Integer userId) {
try {
PreparedStatement ps = DB.getConnection().prepareStatement(QUERY_GET_ADDED_TAGS_PAST);
ps.setInt(1, userId);
ps.setInt(2, LuppeItConstants.ACTION_ID_TAG_SHARE);
ps.setInt(3, LuppeItConstants.TAG_SHARE_EP_TAG_ID_PARAM);
List<Integer> tagIds = UserPastDAORowMapper.mapGetAddedTagsPast(ps.executeQuery());
List<UserPastUnit> userPastList = new ArrayList<UserPastUnit>();
for (Integer tagId: tagIds) {
int index = returnFoundIndex(userPastList, tagId);
if (index > -1) {
userPastList.get(index).setAmount(userPastList.get(index).getAmount() + 1);
} else {
UserPastUnit unit = new UserPastUnit();
unit.setId(tagId);
unit.setAmount(1);
userPastList.add(unit);
}
}
return userPastList;
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
public static int returnFoundIndex(List<UserPastUnit> userPastList, Integer id) {
for (int i = 0; i < userPastList.size(); i++) {
if (userPastList.get(i).getId().intValue() == id.intValue()) {
return i;
}
}
return -1;
}
public static List<UserPastUnit> getLuppedSharesTagsPast(Integer userId) {
try {
PreparedStatement ps = DB.getConnection().prepareStatement(QUERY_GET_LUPPED_SHARES_TAGS_PAST);
ps.setInt(1, userId);
ps.setInt(2, LuppeItConstants.ACTION_ID_LUPPE_SHARE);
ps.setInt(3, LuppeItConstants.LUPPE_SHARE_EP_SHARE_ID_PARAM);
List<Integer> tagIds = UserPastDAORowMapper.mapGetLuppedSharesTagsPast(ps.executeQuery());
List<UserPastUnit> userPastList = new ArrayList<UserPastUnit>();
for (Integer tagId: tagIds) {
int index = returnFoundIndex(userPastList, tagId);
if (index > -1) {
userPastList.get(index).setAmount(userPastList.get(index).getAmount() + 1);
} else {
UserPastUnit unit = new UserPastUnit();
unit.setId(tagId);
unit.setAmount(1);
userPastList.add(unit);
}
}
return userPastList;
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
public static List<UserPastUnit> getViewedSharesTagsPast(Integer userId) {
try {
PreparedStatement ps = DB.getConnection().prepareStatement(QUERY_GET_LUPPED_SHARES_TAGS_PAST);
ps.setInt(1, userId);
ps.setInt(2, LuppeItConstants.ACTION_ID_VIEW_SHARE);
ps.setInt(3, LuppeItConstants.VIEW_SHARE_EP_SHARE_ID_PARAM);
List<Integer> tagIds = UserPastDAORowMapper.mapGetLuppedSharesTagsPast(ps.executeQuery());
List<UserPastUnit> userPastList = new ArrayList<UserPastUnit>();
for (Integer tagId: tagIds) {
int index = returnFoundIndex(userPastList, tagId);
if (index > -1) {
userPastList.get(index).setAmount(userPastList.get(index).getAmount() + 1);
} else {
UserPastUnit unit = new UserPastUnit();
unit.setId(tagId);
unit.setAmount(1);
userPastList.add(unit);
}
}
return userPastList;
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
public static List<UserPastUnit> getDiggedSharesTagsPast(Integer userId) {
try {
PreparedStatement ps = DB.getConnection().prepareStatement(QUERY_GET_LUPPED_SHARES_TAGS_PAST);
ps.setInt(1, userId);
ps.setInt(2, LuppeItConstants.ACTION_ID_DIG_SHARE);
ps.setInt(3, LuppeItConstants.DIG_SHARE_EP_SHARE_ID_PARAM);
List<Integer> tagIds = UserPastDAORowMapper.mapGetLuppedSharesTagsPast(ps.executeQuery());
List<UserPastUnit> userPastList = new ArrayList<UserPastUnit>();
for (Integer tagId: tagIds) {
int index = returnFoundIndex(userPastList, tagId);
if (index > -1) {
userPastList.get(index).setAmount(userPastList.get(index).getAmount() + 1);
} else {
UserPastUnit unit = new UserPastUnit();
unit.setId(tagId);
unit.setAmount(1);
userPastList.add(unit);
}
}
return userPastList;
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
public static List<UserPastUnit> getLuppedSharesResourcesPast(Integer userId) {
try {
PreparedStatement ps = DB.getConnection().prepareStatement(QUERY_GET_LUPPED_SHARES_RESOURCES_PAST);
ps.setInt(1, userId);
ps.setInt(2, LuppeItConstants.ACTION_ID_LUPPE_SHARE);
ps.setInt(3, LuppeItConstants.LUPPE_SHARE_EP_SHARE_ID_PARAM);
List<Integer> resourceIds = UserPastDAORowMapper.mapGetLuppedSharesResourcesPast(ps.executeQuery());
List<UserPastUnit> userPastList = new ArrayList<UserPastUnit>();
for (Integer resourceId: resourceIds) {
int index = returnFoundIndex(userPastList, resourceId);
if (index > -1) {
userPastList.get(index).setAmount(userPastList.get(index).getAmount() + 1);
} else {
UserPastUnit unit = new UserPastUnit();
unit.setId(resourceId);
unit.setAmount(1);
userPastList.add(unit);
}
}
return userPastList;
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
public static List<UserPastUnit> getViewedSharesResourcesPast(Integer userId) {
try {
PreparedStatement ps = DB.getConnection().prepareStatement(QUERY_GET_LUPPED_SHARES_RESOURCES_PAST);
ps.setInt(1, userId);
ps.setInt(2, LuppeItConstants.ACTION_ID_VIEW_SHARE);
ps.setInt(3, LuppeItConstants.VIEW_SHARE_EP_SHARE_ID_PARAM);
List<Integer> resourceIds = UserPastDAORowMapper.mapGetLuppedSharesResourcesPast(ps.executeQuery());
List<UserPastUnit> userPastList = new ArrayList<UserPastUnit>();
for (Integer resourceId: resourceIds) {
int index = returnFoundIndex(userPastList, resourceId);
if (index > -1) {
userPastList.get(index).setAmount(userPastList.get(index).getAmount() + 1);
} else {
UserPastUnit unit = new UserPastUnit();
unit.setId(resourceId);
unit.setAmount(1);
userPastList.add(unit);
}
}
return userPastList;
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
public static List<UserPastUnit> getDiggedSharesResourcesPast(Integer userId) {
try {
PreparedStatement ps = DB.getConnection().prepareStatement(QUERY_GET_LUPPED_SHARES_RESOURCES_PAST);
ps.setInt(1, userId);
ps.setInt(2, LuppeItConstants.ACTION_ID_DIG_SHARE);
ps.setInt(3, LuppeItConstants.DIG_SHARE_EP_SHARE_ID_PARAM);
List<Integer> resourceIds = UserPastDAORowMapper.mapGetLuppedSharesResourcesPast(ps.executeQuery());
List<UserPastUnit> userPastList = new ArrayList<UserPastUnit>();
for (Integer resourceId: resourceIds) {
int index = returnFoundIndex(userPastList, resourceId);
if (index > -1) {
userPastList.get(index).setAmount(userPastList.get(index).getAmount() + 1);
} else {
UserPastUnit unit = new UserPastUnit();
unit.setId(resourceId);
unit.setAmount(1);
userPastList.add(unit);
}
}
return userPastList;
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
}
|
Java
|
UTF-8
| 2,955 | 2.125 | 2 |
[] |
no_license
|
package cn.xinzhili.chat.service;
import cn.xinzhili.chat.api.RoleType;
import cn.xinzhili.chat.bean.UserBean;
import cn.xinzhili.chat.client.UserServiceClient;
import cn.xinzhili.chat.util.UserFactory;
import cn.xinzhili.user.api.UserRole;
import cn.xinzhili.user.api.error.UserErrorCode;
import cn.xinzhili.user.api.response.PatientDetailResponse;
import cn.xinzhili.user.api.response.PatientRelationResponse;
import cn.xinzhili.user.api.response.StaffDetailResponse;
import cn.xinzhili.xutils.core.FailureException;
import cn.xinzhili.xutils.core.http.Response;
import java.util.Objects;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
@Service
public class UserService {
@Autowired
private UserServiceClient userServiceClient;
@Autowired
private RedisBasicService redisBasicService;
private final static String PUB_KEY = "USER";
public PatientRelationResponse getRelationByUserId(Long userId, RoleType roleType, Long orgId) {
Response<PatientRelationResponse> response = userServiceClient
.getPatientStaffRelationship(userId, getUserRoleByRoleType(roleType), orgId);
if (response.isSuccessful()) {
return response.getDataAs(PatientRelationResponse.class);
} else {
throw new FailureException(UserErrorCode.PATIENT_NO_BIND_MEDICAL_PERSON);
}
}
private UserRole getUserRoleByRoleType(RoleType roleType) {
switch (roleType) {
case DOCTOR:
return UserRole.DOCTOR;
case PATIENT:
return UserRole.PATIENT;
case OPERATOR:
return UserRole.OPERATOR;
case ASSISTANT:
return UserRole.ASSISTANT;
default:
return null;
}
}
public PatientDetailResponse getPatientBindInfos(Long patientId) {
Response<PatientDetailResponse> patientBy = userServiceClient.getPatientBy(patientId);
if (patientBy.isSuccessful()) {
return patientBy.getDataAs(PatientDetailResponse.class);
} else {
throw new FailureException("获取医患信息失败");
}
}
public UserBean getUserDetail(Long userId, RoleType roleType) {
UserBean user = (UserBean) redisBasicService.getRedis(PUB_KEY, roleType.name() + userId);
if (Objects.isNull(user)) {
if (roleType.equals(RoleType.PATIENT)) {
user = UserFactory.of(getPatientBindInfos(userId));
} else {
user = UserFactory.of(getDoctorDetail(userId));
}
redisBasicService.setRedis(PUB_KEY, roleType.name() + userId, user);
}
return user;
}
public StaffDetailResponse getDoctorDetail(Long userId) {
Response<StaffDetailResponse> response = userServiceClient
.findStaffByUserId(userId, false, null);
if (response.isSuccessful()) {
return response.getDataAs(StaffDetailResponse.class);
} else {
throw new FailureException("获取医患信息失败");
}
}
}
|
PHP
|
UTF-8
| 941 | 2.984375 | 3 |
[] |
no_license
|
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<title>Untitled Document</title>
</head>
<body>
<?php
class pekerjaan{
public function kerjaan(){
echo "Work address: Surabaya <br>";
echo "Company: PT. Aneka Usaha <br>";
echo "Company address: Surabaya <br>";
echo "Posisition: CEO <br>";
echo "NPWP: 7127312034000 <br>";
}
}
class profile extends pekerjaan{
use caleg;
public function profil(){
echo "Nama: Mansyur <br>";
echo "HP: 0821238373 <br>";
echo "Gender: Laki-Laki <br>";
echo "Alamat: Jakarta <br>";
}
}
trait caleg {
public function caleg(){
echo "Partai: Nusa Bangsa <br>";
echo "No. Urut: 4 <br>";
echo "Dapil: jakarta <br>";
echo "Keluarga <br>";
echo "Istri: Martinah <br>";
echo "Anak 1: Louis<br>";
echo "Anak 2: Michael <br>";
echo "Anak 3: Jordan <br>";
}
}
$test = new profile();
$test -> profil();
$test -> kerjaan();
$test -> caleg();
?>
</body>
</html>
|
Markdown
|
UTF-8
| 15,418 | 3.09375 | 3 |
[
"Apache-2.0"
] |
permissive
|
---
navTitle: Upgrades
title: Application Schema Upgrades
originalFilePath: upgrades.md
---
In this chapter we discuss upgrading software on a EDB Postgres Distributed cluster and how
to minimize downtime for applications during the upgrade.
## Overview
EDB Postgres Distributed cluster has two sets of software, the underlying PostgreSQL software
or some flavor of it and the PGLogical/BDR software. We will discuss
upgrading either or both of these softwares versions to their supported
major releases.
To upgrade a EDB Postgres Distributed cluster, the following steps need to be performed on
each node:
- plan the upgrade
- prepare for the upgrade
- upgrade the server software
- restart Postgres
- check and validate the upgrade
## Upgrade Planning
While BDR 3.6 release supports PostgreSQL 10 and 11 major versions, BDR
3.7 supports PostgreSQL 11, 12 and 13. Please refer to the [Compatibility matrix](/pgd/3.7/#compatibility-matrix)
for the full list compatible software. Since BDR 3.7 supports newer
PostgreSQL releases, while upgrading from BDR 3.6 to BDR 3.7, it's also
possible to upgrade the newer PostgreSQL releases with minimum or no
application downtime.
There are broadly two ways to upgrade the BDR version.
- Upgrading one node at a time to the newer BDR version.
- Joining a new node running a newer version of the BDR software and
then optionally drop one of the old nodes.
If you are only interested in upgrading the BDR software, any of the two
methods can be used. But if you also want to upgrade the PostgreSQL
version, then the second method must be used.
### Rolling Server Software Upgrades
A rolling upgrade is the process where the below [Server
Software Upgrade](#server-software-upgrade) is performed on each node in the
BDR Group one after another, while keeping the replication working.
An upgrade to 3.7 is only supported from 3.6, using a specific minimum
maintenance release (e.g. 3.6.25). Please consult the Release Notes
for the actual required minimum version. So if a node
is running with an older 3.6 release, it must first be upgraded to
the minimum and can only then be upgraded to 3.7.
Just as with a single-node database, it's possible to stop all nodes,
perform the upgrade on all nodes and only then restart the entire
cluster. This strategy of upgrading all nodes at the same time avoids
running with mixed BDR versions and therefore is the simplest, but
obviously incurs some downtime.
During the upgrade process, the application can be switched over to a node
which is currently not being upgraded to provide continuous availability of
the BDR group for applications.
While the cluster is going through a rolling upgrade, replication happens
between mixed versions of BDR3. For example, nodeA will have BDR 3.6.25, while
nodeB and nodeC will have 3.7.8. In this state, the replication and group
management will use the protocol and features from the oldest version (3.6.25
in case of this example), so any new features provided by the newer version
which require changes in the protocol will be disabled. Once all nodes are
upgraded to the same version, the new features are automatically enabled.
A EDB Postgres Distributed cluster is designed to be easily upgradeable. Most BDR releases
support rolling upgrades, which means running part of the cluster on one
release level and the remaining part of the cluster on a second, compatible,
release level.
A rolling upgrade starts with a cluster with all nodes at a prior release,
then proceeds by upgrading one node at a time to the newer release, until
all nodes are at the newer release. Should problems occur, do not attempt
to downgrade without contacting Technical Support to discuss and provide
options.
An upgrade process may take an extended period of time when the user decides
caution is required to reduce business risk, though this should not take any
longer than 30 days without discussion and explicit agreement from Technical
Support to extend the period of coexistence of two release levels.
In case of problems during upgrade, do not initiate a second upgrade to a
newer/different release level. Two upgrades should never occur concurrently
in normal usage. Nodes should never be upgraded to a third release without
specific and explicit instructions from Technical Support. A case where
that might occur is if an upgrade failed for some reason and a Hot Fix was
required to continue the current cluster upgrade process to successful
conclusion. BDR has been designed and tested with more than 2 release
levels, but this cannot be relied upon for production usage except in
specific cases.
### Rolling Upgrade Using Node Join
The other method of upgrading BDR software, along with or without upgrading
the underlying PostgreSQL major version, is to join a new node
to the cluster and later drop one of the existing nodes running
the older version of the software. Even with this method, some features
that are available only in the newer version of the software may remain
unavailable until all nodes are finally upgraded to the newer versions.
A new node running this release of BDR 3.7.8 can join a 3.6 cluster,
where each node in the cluster is running the latest 3.6.x version of
BDR. The joining node may run any of the supported PostgreSQL versions
11-13, but you must not mix the Standard and Enterprise editions.
If the older cluster is running a Standard Edition then it's recommended
that the new joining node should also run a Standard Edition. Similarly,
if the old cluster is running Enterprise Edition, the new joining node
should also run the Enterprise Edition.
Care must be taken to not use features that are available only in
the newer PostgreSQL versions 12-13, until all nodes are upgraded to the
newer and same release of PostgreSQL. This is especially true for any
new DDL syntax that may have been added to newer release of PostgreSQL.
Note that `bdr_init_physical` makes a byte-by-byte of the source node.
So it cannot be used while upgrading from one major PostgreSQL version
to another. In fact, currently `bdr_init_physical` requires that even
BDR version of the source and the joining node is exactly the same. So
it cannot be used for rolling upgrades via joining a new node method. In
all such cases, a logical join must be used.
### Upgrading a CAMO-Enabled cluster
CAMO protection requires at least one of the nodes of a CAMO pair to
be operational. For upgrades, we recommend to ensure that no CAMO
protected transactions are running concurrent to the upgrade, or to
use a rolling upgrade strategy, giving the nodes enough time to
reconcile in between the upgrades and the corresponding node downtime
due to the upgrade.
## Upgrade Preparation
BDR 3.7 contains several changes that may affect compatibility with
previous releases. These may affect the Postgres configuration,
deployment scripts as well as applications using BDR. We recommend to
consider and possibly adjust in advance of the upgrade.
### Node Management
The `bdr.create_node_group()` function has seen a number of changes:
- It is now possible to create sub-groups, resulting in a tree-of-groups
structure of the whole EDB Postgres Distributed cluster. Monitoring views were updated
accordingly.
- The deprecated parameters `insert_to_update`, `update_to_insert`,
`ignore_redundant_updates`, `check_full_tuple` and `apply_delay` were
removed.
Use `bdr.alter_node_set_conflict_resolver()` instead of `insert_to_update`,
`update_to_insert`. The `check_full_tuple` is no longer needed as it is
handled automatically based on table conflict detection configuration.
### Conflicts
The configuration of conflict resolution and logging is now copied from
join source node to the newly joining node, rather than using defaults on the
new node.
The default conflict resolution for some of the conflict types was changed.
See (conflicts.md#default-conflict-resolvers) for the new defaults.
The conflict logging interfaces have changed from `bdr.alter_node_add_log_config`
and `bdr.alter_node_remove_log_config` to `bdr.alter_node_set_log_config`.
The default conflict logging table is now named `bdr.conflict_history` and the
old `bdr.apply_log` no longer exists. The new table is partitioned using the
new Autopartition feature of BDR 3.7.
All conflicts are now logged by default to both log file and the conflict
table.
Deprecated functions `bdr.row_version_tracking_enable()` and
`bdr.row_version_tracking_disable()` were removed. Use
`bdr.alter_table_conflict_detection()` instead.
Some of the configuration for conflict handling is no longer stored in
`pglogical` schema. Any diagnostic queries that were using the `pglogical`
tables directly will have to switch to appropriate tables in `bdr` schema.
Queries using `bdr.node_group`, `bdr.local_node_summary`, `bdr.local_node_summary`or
`bdr.node_local_info` will need to use the new columns `sub_repsets` and
`pub_repsets` instead of `replication_sets`.
### Removed Or Renamed Settings (GUCs)
The setting `report_transaction_id` has been removed and is no longer
known to Postgres. It had been deprecated in the 3.6.x branch already
and the underlying functionality is enabled automatically when needed,
instead. So it's safe to remove `report_transaction_id` from your
configuration or reset it via `ALTER SYSTEM` even on 3.6.22 (and
newer). Otherwise, Postgres refuses to start after the upgrade and
will report an "unrecognized configuration parameter".
The GUC to enable CAMO has moved from Postgres to BDR and got renamed
from `pg2q.enable_camo` to `bdr.enable_camo`.
## Server Software Upgrade
The upgrade of BDR software on individual nodes happens in-place. There is no need for
backup and restore when upgrading the BDR extension.
The first step in the upgrade is to install the new version of the BDR packages, which
will install both the new binary and the extension SQL script. This step depends
on the operating system used.
### Restart Postgres
Upgrading the binary and extension scripts by itself does not upgrade BDR
in the running instance of PostgreSQL. To do that, the PostgreSQL instance
needs to be restarted so that the new BDR binary can be loaded (the BDR binary
is loaded at the start of the PostgreSQL server). After that, the node is
upgraded. The extension SQL upgrade scripts are executed automatically as
needed.
!!! Warning
It's important to never run the `ALTER EXTENSION ... UPDATE` command before the
PostgreSQL instance is restarted, as that will only upgrade the SQL-visible
extension but keep the old binary, which can cause unpredictable behaviour or
even crashes. The `ALTER EXTENSION ... UPDATE` command should never be needed;
BDR3 maintains the SQL-visible extension automatically as needed.
### Upgrade Check and Validation
After this procedure, your BDR node is upgraded. You can verify the current
version of BDR3 binary like this:
```sql
SELECT bdr.bdr_version();
```
The upgrade of BDR3 will usually also upgrade the version of pglogical 3
installed in the system. The current version of pglogical can be checked using:
```sql
SELECT pglogical.pglogical_version();
```
Always check the [monitoring](../monitoring) after upgrade
of a node to confirm that the upgraded node is working as expected.
## Database Encoding
We recommend using `UTF-8` encoding in all replicated databases.
BDR does not support replication between databases with different
encoding. There is currently no supported path to upgrade/alter encoding.
Similar to the upgrade of BDR itself, there are two approaches to
upgrading the application schema. The simpler option is to stop all
applications affected, preform the schema upgrade and restart the
application upgraded to use the new schema variant. Again, this
imposes some downtime.
To eliminate this downtime, BDR offers ways to perform a rolling
application schema upgrade as documented in the following section.
## Rolling Application Schema Upgrades
By default, DDL will automatically be sent to all nodes. This can be
controlled manually, as described in [DDL Replication](../ddl), which
could be used to create differences between database schemas across nodes.
BDR is designed to allow replication to continue even while minor
differences exist between nodes. These features are designed to allow
application schema migration without downtime, or to allow logical
standby nodes for reporting or testing.
!!! Warning
Application Schema Upgrades are managed by the user, not by BDR.
Careful scripting will be required to make this work correctly
on production clusters. Extensive testing is advised.
Details of this are covered here
[Replicating between nodes with differences](../appusage).
When one node runs DDL that adds a new table, nodes that have not
yet received the latest DDL will need to cope with the extra table.
In view of this, the appropriate setting for rolling schema upgrades
is to configure all nodes to apply the `skip` resolver in case of a
`target_table_missing` conflict. This must be performed before any
node has additional tables added, and is intended to be a permanent
setting.
This is done with the following query, that must be **executed
separately on each node**, after replacing `node1` with the actual
node name:
```sql
SELECT bdr.alter_node_set_conflict_resolver('node1',
'target_table_missing', 'skip');
```
When one node runs DDL that adds a column to a table, nodes that have not
yet received the latest DDL will need to cope with the extra columns.
In view of this, the appropriate setting for rolling schema
upgrades is to configure all nodes to apply the `ignore` resolver in
case of a `target_column_missing` conflict. This must be performed
before one node has additional columns added and is intended to be a
permanent setting.
This is done with the following query, that must be **executed
separately on each node**, after replacing `node1` with the actual
node name:
```sql
SELECT bdr.alter_node_set_conflict_resolver('node1',
'target_column_missing', 'ignore');
```
When one node runs DDL that removes a column from a table, nodes that
have not yet received the latest DDL will need to cope with the missing column.
This situation will cause a `source_column_missing` conflict, which uses
the `use_default_value` resolver. Thus, columns that neither
accept NULLs nor have a DEFAULT value will require a two step process:
1. Remove NOT NULL constraint or add a DEFAULT value for a column
on all nodes.
2. Remove the column.
Constraints can be removed in a rolling manner.
There is currently no supported way for coping with adding table
constraints in a rolling manner, one node at a time.
When one node runs a DDL that changes the type of an existing column,
depending on the existence of binary coercibility between the current
type and the target type, the operation may not rewrite the underlying
table data. In that case, it will be only a metadata update of the
underlying column type. Rewrite of a table is normally restricted.
However, in controlled DBA environments, it is possible to change
the type of a column to an automatically castable one by adopting
a rolling upgrade for the type of this column in a non-replicated
environment on all the nodes, one by one. More details are provided in the
[ALTER TABLE](../ddl#alter-table) section.
|
PHP
|
UTF-8
| 865 | 2.78125 | 3 |
[
"MIT"
] |
permissive
|
<?php
namespace Cheetahmail\Data\Subscribers;
class UnsubscribeByTisIdResponse
{
/**
* @var boolean $UnsubscribeByTisIdResult
*/
protected $UnsubscribeByTisIdResult = null;
/**
* @param boolean $UnsubscribeByTisIdResult
*/
public function __construct($UnsubscribeByTisIdResult)
{
$this->UnsubscribeByTisIdResult = $UnsubscribeByTisIdResult;
}
/**
* @return boolean
*/
public function getUnsubscribeByTisIdResult()
{
return $this->UnsubscribeByTisIdResult;
}
/**
* @param boolean $UnsubscribeByTisIdResult
* @return \Cheetahmail\Data\Subscribers\UnsubscribeByTisIdResponse
*/
public function setUnsubscribeByTisIdResult($UnsubscribeByTisIdResult)
{
$this->UnsubscribeByTisIdResult = $UnsubscribeByTisIdResult;
return $this;
}
}
|
Python
|
UTF-8
| 1,176 | 2.953125 | 3 |
[] |
no_license
|
from django.db import models
class User(models.Model):
'''This class simulates a user with a name,username and e-mail as attributes'''
name = models.CharField(max_length=200)
lastname=models.CharField(max_length=200)
email = models.EmailField(max_length=70) #unique=True
def __str__(self):
'''this function returns the name,lastname and email of the instance'''
return self.name+" "+self.lastname+" "+self.email
class Occupation(models.Model):
'''This class simulates an occupation with the occupation name as attribute'''
occupation=models.CharField(max_length=200)
def __str__(self):
'''this function returns the occupation name of the instance'''
return self.occupation
class Userhasoccupation(models.Model):
'''This class simulates the connection
between a user and an occupation
.User id and occupation id are used
as foreign keys to the database schema'''
user_id=models.ForeignKey(User,on_delete=models.CASCADE)
occup_id=models.ForeignKey(Occupation,on_delete=models.CASCADE)
def __str__(self):
'''this function returns the occupation name of the instance'''
return str(self.user_id)+" "+str(self.occup_id)
|
Python
|
UTF-8
| 626 | 2.765625 | 3 |
[] |
no_license
|
num=int(input())
s=input()
oldNum = 0
cnt = 0
end = num - 1
chars = list(s)
flag,i=0,0
while i<end :
if flag==1:
break
for j in range(end,i-1,-1) :
if(i == j):
if(num % 2==0 or oldNum == 1):
print("Impossible")
flag=1
break
oldNum = 1
cnt += num / 2 - i
if(chars[i]==chars[j]):
for k in range(j,end):
t = chars[k]
chars[k] = chars[k + 1]
chars[k + 1] = t
cnt+=1
end-=1
break
i+=1
if flag==0:
print(cnt)
|
Markdown
|
UTF-8
| 3,124 | 2.53125 | 3 |
[] |
no_license
|
# Data Narrative
This file describes how the data was curated into BIDS after it was de-identified.
For more details, see the commit history of this repository.
Note: In this special case, original data is stored in
`/cbica/projects/RBC/RBC_RAWDATA/bidsdatasets/HRC`
# Transfer Process
The data were acquired at [SITE] as part of [STUDY] study [CITATION]. Access was given
to PennLINC as part of the Reproducible Brain Chart project [CITATION]. The data was
transferred from [SITE] to PMACs using [Globus](https://www.globus.org/). After
this, the data was transferred from PMACs to CUBIC. The approximate date of
transfer was late January 2021. In sum, PennLINC received approximately 108 GB
comprising of 608 subjects and 905 individual sessions (with session labels
ranging from `ses-1` to `ses-2`). The imaging data consisted of anatomical
(`T1w`), functional (`BOLD`), and diffusion (`dwi`, `bval`, `bvec`). Task
data consisted of `rest` scans.
No additional cohort data was provided at this time of transfer. The imaging data was
organized in BIDS and anonymized at the time of transfer. The raw data was copied to `/cbica/projects/RBC/HRC/working/BIDS`
for BIDS curation and acquisition group testing, where it was checked into `datalad`. See [this notebook](SubjectsSessionsModalities.ipynb)
for a code walkthrough of this investigation.
# BIDS Curation
The data was partially curated before the beginning of this git history. Using the titles of the outputs of the validate tool, the change history of BIDS can still be pieced together.
---
On 01/27/2021, the following errors were found:
Code 55 — JSON_SCHEMA_VALIDATION_ERROR: One subject (sub-20765 ses-2)
Code 1 — NOT_INCLUDED: Multiple cases
Code 50 — TASK_NAME_MUST_DEFINE: Multiple cases
Code 67 — NO_VALID_DATA_FOUND_FOR_SUBJECT: Multiple cases
Code 55 was a case where the Slice Timing had negative values in run 1 of the resting state scan; we removed this scan from the session.
Code 1 was resolved by reordering the entities in the BIDS filenames.
Code 50 was resolved by adding the task `Rest` to each corresponding JSON file.
Code 67 was resolved by removing sessions which were clearly incomplete/unusable data.
---
On 02/11/2021, the final remaining BIDS validation issue was:
Code 39 — INCONSISTENT_PARAMETERS: Multiple cases
We tolerate this error as it is expected to appear as there may be multiple scanning parameters available across similar runs.
In [this notebook](UnderstandingRunNaming.ipynb) we found that there were multiple images per session with incremental runs. We learned from the study coordinators that this was due to motion, and that some scans had to subsequently be re-run. We decided to keep functional data where this may have happened, but to only maintian one single T1w image. Samples of these were inspected by hand, and it was decided the best strategy would be to remove the old T1w images and keep only 1 T1w (the most recent) per session. See [this notebook](RemovingExtraT1ws.ipynb) for code.
The final validation run is from [03/03/2021](hrc_validation_03-03-21_validation.csv).
|
Markdown
|
UTF-8
| 4,191 | 2.59375 | 3 |
[] |
no_license
|
# Ch9 - Labels and Legends
```r
library(lattice)
```
Topics covered:
- Labels (main, sub, xlab, ylab)
- Legends, color keys
- Legends in grouped displays; auto.key
- Dropping unused levels of grouping variable
- Page annotation
```r
data(Cars93, package = "MASS")
table(Cars93$Cylinders)
```
```
##
## 3 4 5 6 8 rotary
## 3 49 2 31 7 1
```
```r
sup.sym <- Rows(trellis.par.get("superpose.symbol"), 1:5)
str(sup.sym)
```
```
## List of 6
## $ alpha: num [1:5] 1 1 1 1 1
## $ cex : num [1:5] 0.8 0.8 0.8 0.8 0.8
## $ col : chr [1:5] "#0080ff" "#ff00ff" "darkgreen" "#ff0000" ...
## $ fill : chr [1:5] "#CCFFFF" "#FFCCFF" "#CCFFCC" "#FFE5CC" ...
## $ font : num [1:5] 1 1 1 1 1
## $ pch : num [1:5] 1 1 1 1 1
```
## Figure 9.1
```r
xyplot(Price ~ EngineSize | reorder(AirBags, Price), data = Cars93,
groups = Cylinders, subset = Cylinders != "rotary",
scales = list(y = list(log = 2, tick.number = 3)),
xlab = "Engine Size (litres)",
ylab = "Average Price (1000 USD)",
key = list(text = list(levels(Cars93$Cylinders)[1:5]),
points = sup.sym, space = "right"))
```
<!-- -->
## Figure 9.1 (alternative, using auto.key)
```r
xyplot(Price ~ EngineSize | reorder(AirBags, Price), data = Cars93,
groups = Cylinders, subset = Cylinders != "rotary",
scales = list(y = list(log = 2, tick.number = 3)),
xlab = "Engine Size (litres)",
ylab = "Average Price (1000 USD)",
auto.key = list(text = levels(Cars93$Cylinders)[1:5],
space = "right", points = TRUE))
```
<!-- -->
## Figure 9.1 (yet another alternative, using drop=TRUE)
```r
xyplot(Price ~ EngineSize | reorder(AirBags, Price),
data = subset(Cars93, Cylinders != "rotary"),
groups = Cylinders[, drop = TRUE],
scales = list(y = list(log = 2, tick.number = 3)),
xlab = "Engine Size (litres)",
ylab = "Average Price (1000 USD)",
auto.key = list(space = "right"))
```
<!-- -->
```r
my.pch <- c(21:25, 20)
my.fill <- c("transparent", "grey", "black")
```
## Figure 9.2
```r
with(Cars93,
xyplot(Price ~ EngineSize,
scales = list(y = list(log = 2, tick.number = 3)),
panel = function(x, y, ..., subscripts) {
pch <- my.pch[Cylinders[subscripts]]
fill <- my.fill[AirBags[subscripts]]
panel.xyplot(x, y, pch = pch,
fill = fill, col = "black")
},
key = list(space = "right", adj = 1,
text = list(levels(Cylinders)),
points = list(pch = my.pch),
text = list(levels(AirBags)),
points = list(pch = 21, fill = my.fill),
rep = FALSE)))
```
<!-- -->
```r
hc1 <- hclust(dist(USArrests, method = "canberra"))
hc1 <- as.dendrogram(hc1)
ord.hc1 <- order.dendrogram(hc1)
hc2 <- reorder(hc1, state.region[ord.hc1])
ord.hc2 <- order.dendrogram(hc2)
library(latticeExtra)
```
```
## Loading required package: RColorBrewer
```
```r
region.colors <- trellis.par.get("superpose.polygon")$col
```
## Figure 9.3
```r
levelplot(t(scale(USArrests))[, ord.hc2],
scales = list(x = list(rot = 90)),
colorkey = FALSE,
legend =
list(right =
list(fun = dendrogramGrob,
args =
list(x = hc2, ord = ord.hc2,
side = "right", size = 10, size.add = 0.5,
add = list(rect =
list(col = "transparent",
fill = region.colors[state.region])),
type = "rectangle"))))
```
<!-- -->
---
title: "ch9.R"
author: "takanori"
date: "Thu Nov 3 20:49:40 2016"
---
|
Java
|
UTF-8
| 1,735 | 2.375 | 2 |
[] |
no_license
|
package com.nearsoft.training.library.model;
import java.io.Serializable;
import java.time.LocalDate;
import java.util.Objects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.UniqueConstraint;
@Entity(name = "books_by_user")
@Table(uniqueConstraints = @UniqueConstraint(columnNames = {
"isbn", "curp"
}))
public class BooksByUser implements Serializable {
private static final long serialVersionUID = 688441185690791172L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private long id;
private String isbn;
private String curp;
@Column(name = "borrow_date")
private LocalDate borrowDate;
public String getIsbn() {
return isbn;
}
public void setIsbn(String isbn) {
this.isbn = isbn;
}
public String getCurp() {
return curp;
}
public void setCurp(String curp) {
this.curp = curp;
}
public LocalDate getBorrowDate() {
return borrowDate;
}
public void setBorrowDate(LocalDate borrowDate) {
this.borrowDate = borrowDate;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BooksByUser that = (BooksByUser) o;
return Objects.equals(isbn, that.isbn) &&
Objects.equals(curp, that.curp);
}
@Override
public int hashCode() {
return Objects.hash(isbn, curp);
}
}
|
C++
|
UTF-8
| 7,448 | 2.71875 | 3 |
[] |
no_license
|
#include "widget.h"
#include "ui_widget.h"
#include "nmmatrix.h"
#include <ctime>
/* Init properties */
Widget::Widget(QWidget *parent):QWidget(parent), ui(new Ui::Widget) {
lastX = 0;
lastY = 0;
alpha = 0;
beta = 0;
prismN = 3;
prismR = 4;
prismH = 4;
pyramidR = 4;
pyramidH = 4;
selectedFigure = PRISM;
ui->setupUi(this);
}
Widget::~Widget() {
delete ui;
}
NMVector prismPoint(double phi, double prismR, double prismH) {
NMVector result = NMVector();
result.x = prismR * cos(phi);
result.y = prismR * sin(phi);
result.z = prismH;
return result;
}
NMVector pyramidPoint(double phi, double pyramidR, double pyramidH) {
NMVector result = NMVector();
result.x = pyramidR * cos(phi);
result.y = pyramidR * sin(phi);
result.z = pyramidH;
return result;
}
void Widget::paintEvent(QPaintEvent *) {
double max;
if (selectedFigure == PRISM) {
max = std::max(1.5 * prismH,
1.5 * prismR);
}
else {
max = std::max(1.5 * pyramidH,
1.5 * pyramidR);
}
double scale = std::min(height() / (2 * max),
width() / (2 * max));
QPainter painter(this);
QVector<NMVector> points;
NMMatrix scaleMatrix = NMMatrix();
NMMatrix resMatrix = NMMatrix();
/* Screen center */
NMVector centerPoint = NMVector(width() / 2.0,
height() / 2.0 + 30,
1,
1);
NMVector normal = NMVector();
NMVector camPoint = NMVector(0, 0, -1, 1);
painter.setRenderHint(QPainter::Antialiasing, true);
NMMatrix XZMatrix = NMMatrix();
NMMatrix YZMatrix = NMMatrix();
XZMatrix.rotateXZ(beta);
YZMatrix.rotateYZ(alpha);
scaleMatrix.setScale(scale);
resMatrix = XZMatrix * YZMatrix;
bool bottom = false;
bool top = false;
if (selectedFigure == PRISM) {
double step = 2 * M_PI / prismN;
for (double phi = 0; phi < 2 * M_PI; phi += step) {
points.push_back(prismPoint(phi, prismR, -prismH / 2));
points.push_back(prismPoint(phi + step, prismR, -prismH / 2));
points.push_back(prismPoint(phi + step, prismR, prismH / 2));
points.push_back(prismPoint(phi, prismR, prismH / 2));
}
int size = points.size();
for (int i = 0; i < size; i++) {
points[i] = scaleMatrix * points[i];
points[i] = resMatrix * points[i];
points[i] = points[i] + centerPoint;
}
normal = NMVector::crossProduct(points[5] - points[4], points[1] - points[0]);
if (NMVector::dotProduct(normal, camPoint) >= 0) {
bottom = true;
}
normal = NMVector::crossProduct(points[3] - points[2], points[7] - points[6]);
if (NMVector::dotProduct(normal, camPoint) >= 0) {
top = true;
}
for (int i = 0; i < size; i += 4) {
normal = NMVector::crossProduct(points[i + 1] - points[i],
points[i + 3] - points[i]);
if (NMVector::dotProduct(normal, camPoint) >= 0) {
painter.drawLine(points[i].x, points[i].y, points[i + 1].x, points[i + 1].y);
painter.drawLine(points[i + 1].x, points[i + 1].y, points[i + 2].x, points[i + 2].y);
painter.drawLine(points[i + 2].x, points[i + 2].y, points[i + 3].x, points[i + 3].y);
painter.drawLine(points[i + 3].x, points[i + 3].y, points[i].x, points[i].y);
}
if (bottom) {
painter.drawLine(points[i].x, points[i].y, points[i + 1].x, points[i + 1].y);
}
if (top) {
painter.drawLine(points[i + 2].x, points[i + 2].y, points[i + 3].x, points[i + 3].y);
}
}
}
else if (selectedFigure == PYRAMID) {
double step = 2 * M_PI / 6;
for(double phi = 0; phi < 2 * M_PI; phi += step) {
points.push_back(pyramidPoint(phi, pyramidR, -pyramidH / 2));
points.push_back(pyramidPoint(phi + step, pyramidR, -pyramidH / 2));
points.push_back(pyramidPoint(phi + step / 2, 0, pyramidH / 2));
}
for(double phi = 0; phi < 2 * M_PI; phi += step) {
points.push_back(pyramidPoint(phi + step, pyramidR, -pyramidH / 2));
points.push_back(pyramidPoint(phi, pyramidR, -pyramidH / 2));
points.push_back(pyramidPoint(phi + step / 2, 0, -pyramidH / 2));
}
int size = points.size();
for (int i = 0; i < size; i++) {
points[i] = scaleMatrix * points[i];
points[i] = resMatrix * points[i];
points[i] = points[i] + centerPoint;
}
for (int i = 0; i < size; i += 3) {
normal = NMVector::crossProduct(points[i + 1] - points[i],
points[i + 2] - points[i]);
if(NMVector::dotProduct(normal, camPoint) >= 0) {
painter.drawLine(points[i].x, points[i].y, points[i + 1].x, points[i + 1].y);
painter.drawLine(points[i + 1].x, points[i + 1].y, points[i + 2].x, points[i + 2].y);
painter.drawLine(points[i + 2].x, points[i + 2].y, points[i].x, points[i].y);
}
}
}
/* Draw axis */
if (showAxis) {
QVector<NMVector> axis;
axis.push_back(NMVector(0, 0, 0, 1));
axis.push_back(NMVector(10, 0, 0, 1));
axis.push_back(NMVector(0, 10, 0, 1));
axis.push_back(NMVector(0, 0, 10, 1));
axis[0] = axis[0] + centerPoint;
for (int i = 1; i < 4; i++) {
axis[i] = scaleMatrix * axis[i];
axis[i] = resMatrix * axis[i];
axis[i] = axis[i] + centerPoint;
}
painter.drawLine(axis[0].x, axis[0].y, axis[1].x, axis[1].y);
painter.drawLine(axis[0].x, axis[0].y, axis[2].x, axis[2].y);
painter.drawLine(axis[0].x, axis[0].y, axis[3].x, axis[3].y);
}
}
void Widget::mousePressEvent(QMouseEvent *mEvent) {
lastX = mEvent->x();
lastY = mEvent->y();
}
void Widget::mouseMoveEvent(QMouseEvent *mEvent) {
beta -= (mEvent->x() - lastX) / 111;
alpha += (mEvent->y() - lastY) / 111;
lastX = mEvent->x();
lastY = mEvent->y();
update();
}
void Widget::on_quitButton_clicked() {
close();
}
void Widget::on_prismNParameter_valueChanged(int newValue) {
prismN = newValue;
update();
}
void Widget::on_prismRParameter_valueChanged(double newValue) {
prismR = newValue;
update();
}
void Widget::on_prismHParameter_valueChanged(double newValue) {
prismH = newValue;
update();
}
void Widget::on_pyramidRParameter_valueChanged(double newValue) {
pyramidR = newValue;
update();
}
void Widget::on_pyramidHParameter_valueChanged(double newValue) {
pyramidH = newValue;
update();
}
void Widget::on_tabWidget_currentChanged(int index) {
if (index == 0) {
selectedFigure = PRISM;
}
else if (index == 1) {
selectedFigure = PYRAMID;
}
else {
qDebug() << "Figure selection error." << endl;
}
update();
}
void Widget::on_showAxis_toggled(bool checked) {
if (checked) {
showAxis = true;
}
else {
showAxis = false;
}
update();
}
|
Python
|
UTF-8
| 31,473 | 2.65625 | 3 |
[] |
no_license
|
import os
from datetime import datetime
from math import sqrt
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn import preprocessing, metrics
from sklearn.metrics import confusion_matrix, classification_report
from sklearn.metrics.pairwise import cosine_similarity
from pycaret.regression import *
from difflib import SequenceMatcher
from tkinter import *
from tkinter import filedialog
from tkinter import messagebox
from tkinter.ttk import Combobox
from pandastable import Table, config
# Constantes con los pesos de los dos modelos
SIMILARITY_IMPORTANCE = 0.99
VALORATION_IMPORTANCE = 0.01
# Minimo valor de tasa de acierto de fichaje para considerarlo relevante
RELEVANT_SUCCESS_RATE = 0.85
# Minimo numero de partidos para considerar a un jugador importante y
# asi poder tenerlo en cuenta para evaluar las recomendaciones
MINIMUM_MATCHES_KEY_PLAYER = 15
# Top K de jugadores recomendados durante la evaluacion
# De momento coger todos
TOP_K_EVALUATION = 3000 # 500
# Variable global para el dataset
data = None
# Variable global para la ruta del dataset cargado
dataPath = None
# Funcion que dado un jugador busca por similaridad el id
def findPlayerId(df, playerName, playerSquad):
df_aux = df.copy()
df_aux["Team_dist"] = df_aux.apply(
lambda row: SequenceMatcher(None, playerSquad, row["Squad"]).ratio(), axis=1)
team = df_aux.iloc[df_aux["Team_dist"].argmax()]["Squad"]
df_aux["Player_dist"] = df_aux.apply(
lambda row: SequenceMatcher(None, playerName, row["Player"]).ratio(), axis=1)
player = df_aux.iloc[df_aux["Player_dist"].argmax()]["Player"]
id = df_aux.index[(df_aux['Squad'] == team) & (df_aux['Player'] == player)].values.astype(int)[0]
return team, player, id
# Funcion que dado un jugador devuelve el id
def getPlayerId(df, playerName, playerSquad):
id = df.index[(df['Squad'] == playerSquad) & (df['Player'] == playerName)].values.astype(int)[0]
return id
# TODO: Funcion que dado un id te devuelva el jugador
# Funcion que dados los datos de una temporada, calcula las distancias entre jugadores
# gracias a la distancia del coseno
def create_sim_matrix(df):
df_aux = df.copy()
# Eliminar atributos que no son relevantes para comparar jugadores
df_aux.drop(["Player", "Comp", "Squad", "Nation", "Born", "Age", "Value"], axis=1, inplace=True)
# TODO: En lugar de eliminar atributos se podrian elegir diferentes atributos por posicion del jugador
# Rellenar nulos con ceros (si los hubiera)
df_aux.fillna(0, inplace=True)
# Crear una columna especifica por posicion para una mejor busqueda
df_aux = pd.get_dummies(df_aux, columns=['Pos'])
# Normalizar entre 0 y 1 todas las caracteristicas
scaler = preprocessing.MinMaxScaler()
d = scaler.fit_transform(df_aux)
df_aux = pd.DataFrame(d, columns=df_aux.columns)
# Crear la matriz de similaridad con la distancia coseno
cosine_sim_matrix = cosine_similarity(df_aux)
return cosine_sim_matrix
# Funcion que dada una matriz con la distancias entre jugadores,
# devuelve los k mas similares a uno dado
def top_k_similares(cosine_sims, df, playerId, k):
# Ordenar playerIds por similaridad
full_sorted_sim_list_by_playerId = np.argsort(-cosine_sims, axis=1)
# Ordenar por valor de similaridad
full_sorted_sim_list_by_cosine = -np.sort(-cosine_sims, axis=1)
# Seleccionar solo los valores del jugador buscado
sorted_sim_list_by_playerId = full_sorted_sim_list_by_playerId[playerId]
sorted_sim_list_by_cosine = full_sorted_sim_list_by_cosine[playerId]
# Recoger solo los K primeros
top_k = sorted_sim_list_by_playerId[1:k+1]
cosine_top_k = sorted_sim_list_by_cosine[1:k+1]
# Preparar la salida cogiendo todas las columnas del dataset original
# pero de los k mas cercanos, añadiendo el porcentaje de similaridad
top_k_df = df.iloc[top_k].copy()
top_k_df["Similarity%"] = cosine_top_k
return top_k_df
# Funcion auxiliar para el calculo del valor de un jugador
# que creara las columnas del atributo posicion para poder usar
# el modelo predictivo creado
def create_position_cols(df):
df_aux = df.copy()
df_aux = pd.get_dummies(df_aux, columns=['Pos'])
if "Pos_GK" not in df_aux.columns:
df_aux["Pos_GK"] = 0
if "Pos_DF" not in df_aux.columns:
df_aux["Pos_DF"] = 0
if "Pos_MF" not in df_aux.columns:
df_aux["Pos_MF"] = 0
if "Pos_FW" not in df_aux.columns:
df_aux["Pos_FW"] = 0
return df_aux
# Funcion auxiliar para el calculo del valor de un jugador
# que creara las columnas del atributo liga para poder usar
# el modelo predictivo creado
def create_competition_cols(df):
df_aux = df.copy()
df_aux = pd.get_dummies(df_aux, columns=['Comp'])
if "Comp_La Liga" not in df_aux.columns:
df_aux["Comp_La Liga"] = 0
if "Comp_Bundesliga" not in df_aux.columns:
df_aux["Comp_Bundesliga"] = 0
if "Comp_Premier League" not in df_aux.columns:
df_aux["Comp_Premier League"] = 0
if "Comp_Ligue 1" not in df_aux.columns:
df_aux["Comp_Ligue 1"] = 0
if "Comp_Serie A" not in df_aux.columns:
df_aux["Comp_Serie A"] = 0
return df_aux
# Funcion que calcula la valoracion negativa / positiva de un jugador
# respecto a su valoracion real en el mercado. El valor estara siempre entre -1 y 1:
# -1 --> Jugador sobrevalorado
# 1 --> Jugador infravalorado
def calculate_valoration(player):
# Hay jugadores de los que no se dispone de valor de mercado
if player["Value"] == 0:
desv = 1
else:
desv = player["Predicted Value"] / player["Value"]
if desv >= 1:
val = 1
elif desv < -1:
val = -1
else:
val = desv - 1
# if desv < -1 or desv > 1:
# val = desv
# else:
# val = desv - 1
return val
def formatValue(x):
return "€{:,.0f}".format(int(x))
# Funcion que calcula el valor de los jugadores pasados gracias al modelo predictivo cargado
# devolviendo su valor predicho y su valoracion positiva o negativa
def calculate_market_value(model, df):
df_aux = df.copy()
# Preparar los datos para pasarselos al modelo creado
df_aux = create_position_cols(df_aux)
df_aux = create_competition_cols(df_aux)
df_aux.dropna(subset=['Value'], inplace=True)
df_aux.fillna(0, inplace=True)
# Calcular el valor del jugador en funcion de datos pasados
preds = predict_model(model, data=df_aux)
preds["Predicted Value"] = np.exp(preds["Label"])
preds["Valoration"] = preds.apply(lambda row: calculate_valoration(row), axis=1)
# Formatear campos de valores de mercado
preds["Value"] = preds["Value"].apply(formatValue)
preds["Predicted Value"] = preds["Predicted Value"].apply(formatValue)
return preds
# % Acierto de fichaje = Similaridad * SIMILARITY_IMPORTANCE + (Predicted Value / Value - 1) * VALORATION_IMPORTANCE
def calculate_success_rate(player):
# if player["Valoration"] >= 1:
# val = 1
# elif player["Valoration"] < -1:
# val = -1
# else:
# val = player["Valoration"]
# return player["Similarity%"] * SIMILARITY_IMPORTANCE + val * VALORATION_IMPORTANCE
return player["Similarity%"] * SIMILARITY_IMPORTANCE + player["Valoration"] * VALORATION_IMPORTANCE
def undumify_position(player):
position = ""
if player["Pos_GK"]:
position = "GK"
elif player["Pos_DF"]:
position = "DF"
elif player["Pos_MF"]:
position = "MF"
elif player["Pos_FW"]:
position = "FW"
return position
def getSimilarPlayers(df, playerId, k):
# print("start getSimilarPlayers - " + str(datetime.now()))
# Crear la matriz de similaridad
cos_sims = create_sim_matrix(df)
# print("after create_sim_matrix - " + str(datetime.now()))
# Obtener lista de jugadores similares
df_top_k = top_k_similares(cos_sims, df, playerId, k)
# print("after top_k_similares - " + str(datetime.now()))
# Cargar el modelo predictivo del valor de mercado
Pred_Value_model = load_model('./Models/model_210810', verbose=False)
# print("after load_model - " + str(datetime.now()))
# Predecir valoracion
df_top_k_value = calculate_market_value(Pred_Value_model, df_top_k)
# print("after calculate_market_value - " + str(datetime.now()))
# Calcular el porcentaje de acierto (ponderando similaridad + valoracion)
df_top_k_value["Success%"] = df_top_k_value.apply(lambda row: calculate_success_rate(row), axis=1)
# Crear nuevamente la columna posicion
# df_top_k_value["Pos"] = df_top_k_value.apply(lambda row: undumify_position(row), axis=1)
# Ordenar
final_recommendations = df_top_k_value.sort_values(["Success%"], ascending=False)[["Player", "Squad", "Similarity%", "Value", "Predicted Value", "Valoration", "Success%"]]
# final_recommendations = df_top_k_value.sort_values(["Success%"], ascending=False)[
# ["Player", "Pos", "Squad", "Similarity%", "Value", "Predicted Value", "Valoration", "Success%"]]
# print("exit getSimilarPlayers - " + str(datetime.now()))
return final_recommendations
# Este metodo calcula la relevancia de solo los jugadores titulares de la misma posicion
# y devuelve el de mayor relevancia
def calculate_relevance_old(dfStats, playerIn, squadIn, positionIn, top_k, max_success):
relevant = False
max_successRate = 0
max_successRatePlayer = ""
# Hacer filtro en los datos del año anterior por club y posicion de antes
# Y que sean de los que más juegan (mas de n partidos completos por año)
playersOut = dfStats.loc[(dfStats["Squad"] == squadIn) & (dfStats["Pos"] == positionIn) &
(dfStats["Min/90"] > MINIMUM_MATCHES_KEY_PLAYER)]
# playersOut = dfStats.loc[(dfStats["Squad"] == squadIn) &
# (dfStats["Min/90"] > MINIMUM_MATCHES_KEY_PLAYER)]
if max_success:
# Recorrer todos los jugadores encontrados y hacer recomendaciones sobre ellos
for j in range(len(playersOut)):
# Recoger nombre y covertirlo a ID
playerOut = playersOut.iloc[j]["Player"]
playerOutId = getPlayerId(dfStats, playerOut, squadIn)
df_rec = getSimilarPlayers(dfStats, playerOutId, top_k)
# Buscar jugador en las recomendaciones
playerFound = df_rec.loc[df_rec["Player"] == playerIn]
if len(playerFound) > 0:
successRate = playerFound["Success%"].values[0]
# Si se supero el maximo conseguido hasta ahora
if successRate > max_successRate:
max_successRate = successRate
max_successRatePlayer = playerOut
# Si el valor de acierto de fichaje es superior a X, se considera relevante
if max_successRate > RELEVANT_SUCCESS_RATE:
relevant = True
print("YES (" + max_successRatePlayer + " - " + str(round(max_successRate, 2)) + ")")
else:
# Recorrer todos los jugadores encontrados y hacer recomendaciones sobre ellos
for j in range(len(playersOut)):
# Recoger nombre y covertirlo a ID
playerOut = playersOut.iloc[j]["Player"]
playerOutId = getPlayerId(dfStats, playerOut, squadIn)
df_rec = getSimilarPlayers(dfStats, playerOutId, top_k)
# Buscar jugador en las recomendaciones
playerFound = df_rec.loc[df_rec["Player"] == playerIn]
if len(playerFound) > 0:
successRate = playerFound["Success%"].values[0]
# Si el valor de acierto de fichaje es superior a X, se considera relevante
if successRate > RELEVANT_SUCCESS_RATE:
max_successRate = max_successRate
max_successRatePlayer = playerOut
relevant = True
print("YES (" + playerOut + " - " + str(round(successRate, 2)) + ")")
break
if not relevant:
print("NO")
return relevant, max_successRatePlayer, max_successRate
# Este metodo calcula la relevancia de todos los jugadores titulares devolviendo una tabla
# con todos los jugadores analizandos indicando si fue o no relevante la recomendacion
def calculate_relevance(dfStats, playerIn, squadIn, positionIn, squadOut, top_k):
dfPlayer = pd.DataFrame(columns = ["PlayerIn", "SquadIn", "PosIn", "PlayerOut", "PosOut",
"Relevant", "PredRelevant", "PredSuccess%"])
i = 0
# Hacer filtro en los datos del año anterior por club y que
# sean de los que más juegan (mas de n partidos completos por año)
playersOut = dfStats.loc[(dfStats["Squad"] == squadIn) &
(dfStats["Min/90"] > MINIMUM_MATCHES_KEY_PLAYER)]
# Recorrer todos los jugadores encontrados y hacer recomendaciones sobre ellos
for j in range(len(playersOut)):
# Recoger nombre y covertirlo a ID
playerOut = playersOut.iloc[j]["Player"]
playerOutId = getPlayerId(dfStats, playerOut, squadIn)
positionOut = playersOut.iloc[j]["Pos"]
# Definimos ground truth (si misma posicion relevante, sino no)
relevant = True if positionOut == positionIn else False
# Hacer recomendacion
df_rec = getSimilarPlayers(dfStats, playerOutId, top_k)
predRelevant = False
predSuccessRate = 0
# Buscar jugador en las recomendaciones
playerFound = df_rec.loc[(df_rec["Player"] == playerIn) & (df_rec["Squad"] == squadOut)]
# Si no se encontraba en el equipo anterior (quizas estuvo cedido en otro)
# Buscar jugador solo por nombre, por si estuviera en otro equipo el año anterior
if len(playerFound) == 0:
playerFound = df_rec.loc[df_rec["Player"] == playerIn]
# Si se encontro en las recomendaciones
if len(playerFound) > 0:
predSuccessRate = playerFound["Success%"].values[0]
# Si el valor de acierto de fichaje es superior a X, se considera relevante
if predSuccessRate > RELEVANT_SUCCESS_RATE:
predRelevant = True
if predRelevant == True:
print("YES (" + playerOut + " - " + str(predSuccessRate) + ")")
else:
print("NO (" + playerOut + " - " + str(predSuccessRate) + ")")
dfPlayer.loc[i] = [playerIn, squadIn, positionIn, playerOut, positionOut,
relevant, predRelevant, predSuccessRate]
i = i+1
return dfPlayer
def evaluate_transfers(dfStats, dfTransfers, season, top_k):
print(season + " TRANSFERS EVALUATION")
print("-------------------------")
dfResults = dfTransfers.copy()
dfPlayersResults = None
# dfResults["Relevant"] = False
# Por cada jugador fichado, obtener el club que ficha y la posicion
for i in range(len(dfTransfers)):
try:
playerIn = dfTransfers.loc[i, "PlayerIn"]
squadIn = dfTransfers.loc[i, "SquadIn"]
positionIn = dfTransfers.loc[i, "PosIn"]
squadOut = dfTransfers.loc[i, "SquadOut"]
print("Player " + str(i + 1) + " from " + str(
len(dfTransfers)) + " - " + playerIn + "(" + squadIn + "):")
# Calcular si el fichaje es una recomendacion relevante por el sistema
# dfResults.at[i, ['Relevant', 'RelevantPlayer', 'RelevantSuccess%']] = calculate_relevance_old(dfStats, playerIn, squadIn, positionIn, top_k, True)
dfPlayerResults = calculate_relevance(dfStats, playerIn, squadIn, positionIn, squadOut, top_k)
if dfPlayersResults is None:
dfPlayersResults = dfPlayerResults
else:
dfPlayersResults = dfPlayersResults.append(dfPlayerResults, ignore_index=True)
except:
print("Error processing player " + str(i + 1) + " from " + str(len(dfTransfers)))
# Merger dfResults con dfPlayerResults
dfResults = pd.merge(dfResults, dfPlayersResults, how='outer', on=["PlayerIn", "SquadIn", "PosIn"])
return dfResults
def plot_simple_matrix_confusion(cf_matrix):
sns.heatmap(cf_matrix, annot=True, xticklabels=['Positive', 'Negative'],
yticklabels=['Positive', 'Negative'])
plt.ylabel("Label")
plt.xlabel("Predicted")
plt.show()
def make_confusion_matrix(cf,
group_names=None,
categories='auto',
count=True,
percent=True,
cbar=True,
xyticks=True,
xyplotlabels=True,
sum_stats=True,
figsize=None,
cmap='Blues',
title=None):
'''
This function will make a pretty plot of an sklearn Confusion Matrix cm using a Seaborn heatmap visualization.
Arguments
---------
cf: confusion matrix to be passed in
group_names: List of strings that represent the labels row by row to be shown in each square.
categories: List of strings containing the categories to be displayed on the x,y axis. Default is 'auto'
count: If True, show the raw number in the confusion matrix. Default is True.
normalize: If True, show the proportions for each category. Default is True.
cbar: If True, show the color bar. The cbar values are based off the values in the confusion matrix.
Default is True.
xyticks: If True, show x and y ticks. Default is True.
xyplotlabels: If True, show 'True Label' and 'Predicted Label' on the figure. Default is True.
sum_stats: If True, display summary statistics below the figure. Default is True.
figsize: Tuple representing the figure size. Default will be the matplotlib rcParams value.
cmap: Colormap of the values displayed from matplotlib.pyplot.cm. Default is 'Blues'
See http://matplotlib.org/examples/color/colormaps_reference.html
title: Title for the heatmap. Default is None.
'''
# CODE TO GENERATE TEXT INSIDE EACH SQUARE
blanks = ['' for i in range(cf.size)]
if group_names and len(group_names) == cf.size:
group_labels = ["{}\n".format(value) for value in group_names]
else:
group_labels = blanks
if count:
group_counts = ["{0:0.0f}\n".format(value) for value in cf.flatten()]
else:
group_counts = blanks
if percent:
group_percentages = ["{0:.2%}".format(value) for value in cf.flatten() / np.sum(cf)]
else:
group_percentages = blanks
box_labels = [f"{v1}{v2}{v3}".strip() for v1, v2, v3 in zip(group_labels, group_counts, group_percentages)]
box_labels = np.asarray(box_labels).reshape(cf.shape[0], cf.shape[1])
# CODE TO GENERATE SUMMARY STATISTICS & TEXT FOR SUMMARY STATS
if sum_stats:
# Accuracy is sum of diagonal divided by total observations
accuracy = np.trace(cf) / float(np.sum(cf))
# if it is a binary confusion matrix, show some more stats
if len(cf) == 2:
# Metrics for Binary Confusion Matrices
precision = cf[0, 0] / sum(cf[:, 0])
recall = cf[0, 0] / sum(cf[0, :])
f1_score = 2 * precision * recall / (precision + recall)
stats_text = "\n\nAccuracy={:0.3f}\nPrecision={:0.3f}\nRecall={:0.3f}\nF1 Score={:0.3f}".format(
accuracy, precision, recall, f1_score)
else:
stats_text = "\n\nAccuracy={:0.3f}".format(accuracy)
else:
stats_text = ""
# SET FIGURE PARAMETERS ACCORDING TO OTHER ARGUMENTS
if figsize == None:
# Get default figure size if not set
figsize = plt.rcParams.get('figure.figsize')
if xyticks == False:
# Do not show categories if xyticks is False
categories = False
# MAKE THE HEATMAP VISUALIZATION
plt.figure(figsize=figsize)
sns.heatmap(cf, annot=box_labels, fmt="", cmap=cmap, cbar=cbar, xticklabels=categories, yticklabels=categories)
if xyplotlabels:
plt.ylabel('True label')
plt.xlabel('Predicted label' + stats_text)
else:
plt.xlabel(stats_text)
if title:
plt.title(title)
plt.show()
# Funcion que dados dos dataframe de predicciones y resultados esperados
# calculara las metricas más comunes como MAE, MSE y RMSE
def get_standard_metrics(preds, actuals):
p = preds.values.flatten()
a = actuals.values.flatten()
mae = metrics.mean_absolute_error(p, a)
mse = metrics.mean_squared_error(p, a)
rmse = sqrt(mse)
return mae, mse, rmse
def main():
root = Tk()
root.title("Football Player RS")
photo = PhotoImage(file="app_icon.png")
root.iconphoto(True, photo)
windowWidth = 800
windowHeight = 500
root.geometry('{}x{}'.format(windowWidth, windowHeight))
# Centrar la ventana en la pantalla
positionRight = int(root.winfo_screenwidth() / 2 - windowWidth / 2)
positionDown = int(root.winfo_screenheight() / 2 - windowHeight / 2)
root.geometry("+{}+{}".format(positionRight, positionDown))
# Texto para guiar al usuario a cargar los datos
Label(root, text="Select and load the football player dataset:").place(x=20, y=5)
# Frame donde se mostrara la tabla de resultados
frame = Frame(root)
# frame.place(x=20, y=200)
frame.pack(fill='x', side=BOTTOM)
# Input para cambiar manual o automaticamente la ruta del dataset de entrada
e = StringVar()
Entry(root, textvariable=e, width=100).place(x=20,y=25)
# TODO: Eliminar esta asignacion. Solo esta para agilizar las pruebas
e.set("D:/Nacho/Universidad/UNIR/Cuatrimestre 2/TFM/PyCharm/Transfermarkt/fbref_transfermarkt_2017_2018.csv")
# Evento del boton de seleccionar el dataset
def btn_select_data_clicked():
# Preguntar por la ruta del dataset a cargar
file = filedialog.askopenfilename(filetypes=(("CSV files", "*.csv"),))
e.set(file)
# Evento del boton de cargar el dataset
def btn_load_data_clicked():
global dataPath
dataPath = e.get()
try:
global data
data = pd.read_csv(dataPath, encoding="utf8")
comboSquads['values'] = sorted(data["Squad"].unique())
comboSquads.current(0)
except:
messagebox.showerror('Error', 'Dataset cannot be loaded')
# Botones para la gestion de la seleccion y carga del dataset
Button(root, text="Select data", bg='#0052cc', fg='#ffffff',
command=btn_select_data_clicked).place(x=630, y=23)
Button(root, text="Load data", bg='#0052cc', fg='#ffffff',
command=btn_load_data_clicked).place(x=700, y=23)
Label(root, text="Choose the player to replace:").place(x=20, y=50)
# Combo que contendra los equipos disponibles del dataset
def comboSquads_dropdown():
try:
comboSquads['values'] = sorted(data["Squad"].unique())
comboPlayers.set('')
except:
messagebox.showerror('Error', 'First load a football player dataset')
def comboSquads_selected(eventObject):
squadSelected = comboSquads.get()
comboPlayers['values'] = sorted(data[data["Squad"] == squadSelected]["Player"])
comboPlayers.current(0)
Label(root, text="Squad: ").place(x=20, y=75)
comboSquads = Combobox(root, state="readonly", width=25, postcommand=comboSquads_dropdown)
comboSquads.place(x=70, y=75)
comboSquads.bind("<<ComboboxSelected>>", comboSquads_selected)
# Combo que contendra los equipos disponibles del dataset
def comboPlayers_dropdown():
try:
squadSelected = comboSquads.get()
comboPlayers['values'] = sorted(data[data["Squad"]==squadSelected]["Player"])
except:
messagebox.showerror('Error', 'First load a football player dataset')
Label(root, text="Player: ").place(x=250, y=75)
comboPlayers = Combobox(root, state="readonly", width=25, postcommand=comboPlayers_dropdown)
comboPlayers.place(x=300, y=75)
Label(root, text="Top K: ").place(x=480, y=75)
top_k = StringVar()
sb_top_k = Spinbox(root, width=5, from_=0, to=100, textvariable=top_k)
sb_top_k.place(x=525, y=75)
top_k.set(50)
lbl_results = Label(root, text="", font=('Arial', 9, 'bold', 'underline'))
lbl_results.place(x=20, y=165)
# Evento del boton de recomendar jugadores
def btn_show_similar_players_clicked():
# Obtener el equipo seleccionado
squadSelected = comboSquads.get()
# Obtener el jugador seleccionado
playerSelected = comboPlayers.get()
# Si alguno de los dos es vacio, mostrar error
if squadSelected == "" or playerSelected == "":
messagebox.showerror('Error', 'Please select the player first')
else:
try:
playerId = getPlayerId(data, playerSelected, squadSelected)
# Obtener los k primeros que diga el usuario
k = int(sb_top_k.get())
lbl_results.configure(text=str(k)+" SIMILAR PLAYERS TO " +
str.upper(playerSelected) + " (" + str.upper(squadSelected) + "):")
df_rec = getSimilarPlayers(data, playerId, k)
pt = Table(frame, dataframe=df_rec, editable=False)
options = {'colheadercolor': '#0052cc', 'fontsize': 8}
config.apply_options(options, pt)
pt.show()
pt.redraw()
except:
messagebox.showerror('Error', 'Error getting similar players')
# Boton para realizar la recomendacion
Button(root, text="Show recommended players", width=23, bg='#0052cc', fg='#ffffff',
command=btn_show_similar_players_clicked).place(x=300, y=120)
# Evento del boton de evaluar SR
def btn_evaluate_rs_clicked():
try:
# Recoger los fichajes de un año determinado
if dataPath != None:
# Coger la temporada que finaliza para poder recoger despues los fichajes de ese año
season = dataPath.split(".")[0][-4:]
parent = os.path.dirname(dataPath)
transfers_df = pd.read_csv(os.path.join(parent, "transfermarkt_transfers_" + season + ".csv"), encoding="utf8")
df_results = evaluate_transfers(data, transfers_df, season, TOP_K_EVALUATION)
# Guardar en disco
resultsFilePath = os.path.join(parent, "results_rs_" + season + ".csv")
df_results.to_csv(resultsFilePath, index=False)
print("Evaluation for " + season + " transfers done!!")
else:
confirm = messagebox.askyesnocancel(
title="Question",
message="Do you want only get metrics?",
default=messagebox.YES)
if confirm:
season = "2018"
parent = ".//Transfermarkt"
results_df = pd.read_csv(os.path.join(parent, "results_rs_" + season + ".csv"),
encoding="utf8")
results_same_pos_df = results_df.loc[results_df["PosIn"] == results_df["PosOut"]]
get_standard_metrics(results_same_pos_df["PredSuccess%"], pd.Series([RELEVANT_SUCCESS_RATE]).repeat(len(results_same_pos_df)))
# Get the confusion matrix
# cf_matrix = confusion_matrix(results_df["Relevant"], results_df["PredRelevant"], labels=[1,0])
# labels = ["TP", "FN", "FP", "TN"]
# categories = ["Relevant", "No Relevant"]
# make_confusion_matrix(cf_matrix, group_names=labels, categories=categories, percent=False)
elif confirm is None:
print("Press CANCEL")
else:
print("Press NO")
# Si no se selecciono el año en cuestion, evaluar todos los años
for i in range(2017, 2021):
season = str(i+1)
parent = ".//Transfermarkt"
transfers_df = pd.read_csv(os.path.join(parent, "transfermarkt_transfers_" + season + ".csv"),
encoding="utf8")
data_df = pd.read_csv(os.path.join(parent, "fbref_transfermarkt_" + str(i) + "_" + str(season) + ".csv"),
encoding="utf8")
df_results = evaluate_transfers(data_df, transfers_df, season, TOP_K_EVALUATION)
# Guardar en disco
resultsFilePath = os.path.join(parent, "results_rs_" + season + ".csv")
df_results.to_csv(resultsFilePath, index=False)
print("Evaluation for " + season + " transfers done!!")
except:
messagebox.showerror('Error', 'Dataset cannot be loaded for the evaluation')
# Boton para realizar la evaluacion
Button(root, text="Evaluate RS", width=23, bg='#0052cc', fg='#ffffff',
command=btn_evaluate_rs_clicked).place(x=500, y=120)
root.mainloop()
# if len(sys.argv)>1:
# # Primer parametro indica la temporada desde la que se desean obtener jugadores
# season = sys.argv[1]
#
# # Cargar dataset del año deseado
# data = pd.read_csv("./Transfermarkt/fbref_" + season + "_transfermarkt.csv")
#
# # Crear la matriz de similaridad
# cos_sims = create_sim_matrix(data)
#
# playerSquad, playerName, playerId = findPlayerId(data, "Leo Messi", "Barcelona")
# # Pedir el jugador a reemplazar
# # playerId = 1453 # Messi
# # playerId = 1066 # Oblak
# # playerId = 1352 # De Bruyne
# # playerId = 452 # Cristiano Ronaldo
# # playerId = 849 # Pique
# # playerId = 2332 # Ramos
# # playerId = 482 # Parejo
#
# # Numero de resultados
# k = 50
#
# # Obtener lista de jugadores similares
# df_top_k = top_k_similares(cos_sims, data, playerId, k)
#
# # Cargar el modelo predictivo del valor de mercado
# Predicted Value_model = load_model('./Models/model_210810')
#
# # Predecir valoracion
# df_top_k_value = calculate_market_value(Predicted Value_model, df_top_k)
#
# # Calcular el porcentaje de acierto (ponderando similaridad + valoracion)
# df_top_k_value["Success%"] = df_top_k_value.apply(lambda row: calculate_success_rate(row), axis=1)
#
# # Ordenar
# final_recommendations = df_top_k_value.sort_values(["Success%"], ascending=False)[["Player", "Squad", "Value", "Predicted Value", "Valoration", "Similarity%", "Success%"]]
#
# print(str(k) + " similar players to " + playerName + " (" + playerSquad + "):")
# print(final_recommendations.to_string())
# else:
# print("Command error. It must be executed with ' RS 'season' '. Example: RS.py 2020_2021")
#
# return
if __name__ == "__main__":
main()
|
Swift
|
UTF-8
| 3,574 | 2.609375 | 3 |
[] |
no_license
|
//
// ViewController.swift
// RecipeApp
//
// Created by Callie on 4/8/20.
// Copyright © 2020 Tu (Callie) T. NGUYEN. All rights reserved.
//
import UIKit
import PKHUD
class ViewController: UIViewController {
@IBOutlet weak var tableView: UITableView! {
didSet {
tableView.register(UINib(nibName: "RecipeCell", bundle: nil), forCellReuseIdentifier: "RecipeCellID")
}
}
var recipes = [[Recipe]]()
var types = [RecipeType]()
var sectionSelected = -1
var indexSeleted = -1
override func viewDidLoad() {
super.viewDidLoad()
self.title = "Recipes"
saveDataToRealm()
setupView()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
self.getData()
}
func saveDataToRealm() {
HUD.show(.progress)
let launchedBefore = UserDefaults.standard.bool(forKey: "launchedBefore")
if launchedBefore {
self.getData()
} else {
UserDefaults.standard.set(true, forKey: "launchedBefore")
RecipeManager.saveDataToRealm { [weak self] in
self?.getData()
}
}
}
func setupView() {
tableView.delegate = self
tableView.dataSource = self
tableView.tableFooterView = UIView()
tableView.estimatedRowHeight = UITableView.automaticDimension
tableView.rowHeight = 70
}
func getData() {
//remove all old data
recipes.removeAll()
HUD.show(.progress)
types = RealmManager.getTypes()
for type in types {
let listRecipe = RealmManager.getRecipesByType(typeId: type.id)
recipes.append(listRecipe)
}
tableView.reloadData()
HUD.hide()
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
let backItem = UIBarButtonItem()
backItem.title = "Back"
navigationItem.backBarButtonItem = backItem
if segue.identifier == "goToRecipeDetail" {
if let vc = segue.destination as? RecipeDetailController {
vc.recipe = recipes[sectionSelected][indexSeleted]
}
}
}
}
extension ViewController : UITableViewDelegate, UITableViewDataSource {
func numberOfSections(in tableView: UITableView) -> Int {
return types.count
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return recipes[section].count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "RecipeCellID", for: indexPath) as! RecipeCell
cell.updateData(recipe: recipes[indexPath.section][indexPath.row])
cell.selectionStyle = .none
return cell
}
func tableView(_ tableView: UITableView, viewForHeaderInSection section: Int) -> UIView? {
let headerView = UITableViewHeaderFooterView()
headerView.textLabel?.text = types[section].name
return headerView
}
func tableView(_ tableView: UITableView, heightForHeaderInSection section: Int) -> CGFloat {
return 30
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
sectionSelected = indexPath.section
indexSeleted = indexPath.row
performSegue(withIdentifier: "goToRecipeDetail", sender: nil)
}
}
|
Shell
|
UTF-8
| 109 | 2.515625 | 3 |
[] |
no_license
|
#!/bin/bash
cat /etc/passwd |awk -F : '$3>500 && $3<5000{print "username: "$1," uid is "$3," gid is "$4}'
|
Java
|
UTF-8
| 652 | 2.296875 | 2 |
[] |
no_license
|
package com.example.demo.eventman;
import org.springframework.context.ApplicationEvent;
import com.example.demo.model.Vehicle;
public class EmptyTankEvent extends ApplicationEvent{
private static final long serialVersionUID = 4939990986618611229L;
private float remainingGas;
private Vehicle vechile;
public EmptyTankEvent() {
super(EmptyTankEvent.class);
}
public float getRemainingGas() {
return remainingGas;
}
public void setRemainingGas(float remainingGas) {
this.remainingGas = remainingGas;
}
public Vehicle getVechile() {
return vechile;
}
public void setVechile(Vehicle vechile) {
this.vechile = vechile;
}
}
|
Shell
|
UTF-8
| 1,223 | 3.484375 | 3 |
[] |
no_license
|
#!/bin/bash
# This is just to be able to gauge the oreder things; it's not a super-rigorous benchmark.
# Times aren't really repoducible, but the order they fall in is.
# Check they're all on the path
path_to_t=$(which t)
if [ ! -x "$path_to_t" ] ; then
printf "t not found. Please install with 'sudo gem install t'\n"
exit 0
fi
path_to_tw=$(which tw)
if [ ! -x "$path_to_tw" ] ; then
printf "tw not found. Please install with 'cargo install tw-rs'\n"
exit 0
fi
path_to_tweet=$(which tweet)
if [ ! -x "$path_to_tweet" ] ; then
printf "tweet not found. Please install with 'stack install tweet-hs'\n"
exit 0
fi
path_to_bench=$(which bench)
if [ ! -x "$path_to_bench" ] ; then
printf "tweet not found. Please install with 'stack install bench'\n"
exit 0
fi
# ping once so it's fair
printf "ping once to initialize..."
t timeline realDonaldTrump > /dev/null
printf "Ruby's t\n"
bench "t timeline realDonaldTrump" --output t.html
printf "Haskell's tweet\n"
bench "tweet user realDonaldTrump" --output tweet.html
printf "Rust's tw\n"
bench "tw user realDonaldTrump" --output tw.html
printf "Perl's oysttyer\n"
bench "echo '/again realDonaldTrump' | perl oysttyer.pl" --output oysttyer.html
|
Python
|
UTF-8
| 367 | 3.3125 | 3 |
[] |
no_license
|
length = 12
count = 0
altcount = 0
for altcount in range(0,length+1):
display = "x"
if altcount == 0:
for count in range(1,length+1):
display+= " " +str(count)
print display
else:
display = str(altcount)
for count in range(1,length+1):
display += " " + str(altcount * count)
print display
|
C++
|
UTF-8
| 1,508 | 2.671875 | 3 |
[] |
no_license
|
#define _CRT_SECURE_NO_WARNINGS
#include <cstdio>
#include <cmath>
#include <cctype>
#include <cstdint>
#include <cstring>
#include <cstdlib>
#include <iostream>
#include <algorithm>
#include <string>
#include <sstream>
#include <vector>
#include <map>
#include <unordered_set>
#include <stack>
#include <queue>
#include <list>
#include <functional>
using namespace std;
int del(list<int> col[5], int h)
{
list<int>::iterator it[5];
vector<list<int>::iterator> dl[5];
for (int i = 0; i < 5; i++)
it[i] = col[i].begin();
for (int i = 0; i < h; i++)
{
for (int j = 0; j < 3; j++)
if (it[j] != col[j].end()
&& it[j + 1] != col[j + 1].end()
&& it[j + 2] != col[j + 2].end()
&& *it[j] == *it[j + 1]
&& *it[j] == *it[j + 2])
for (int k = 0; k < 3; k++)
if (dl[k + j].end() == find(dl[k + j].begin(), dl[k + j].end(), it[k + j]))
dl[k + j].push_back(it[k + j]);
for (int j = 0; j < 5; j++)
{
if (it[j] != col[j].end())
it[j]++;
}
}
int count = 0;
for (int j = 0; j < 5; j++)
{
for (auto &i : dl[j])
{
count += *i;
col[j].erase(i);
}
}
return count;
}
int main()
{
while (1)
{
int h;
cin >> h;
if (h == 0)
break;
list<int> col[5];
for (int i = 0; i < h; i++)
for (int j = 0; j < 5; j++)
{
int tmp;
cin >> tmp;
col[j].push_front(tmp);
}
int count = 0;
while (1)
{
int tmp = del(col, h);
if (tmp == 0)
break;
count += tmp;
}
cout << count << endl;
}
return 0;
}
|
C++
|
UTF-8
| 702 | 3.28125 | 3 |
[] |
no_license
|
class Solution {
public:
string removeKdigits(string& num, int k)
{
if(k <= 0)
{
return num;
}
int i = 0;
for(; i < num.length() - 1; i++)
{
if(num[i] > num[i + 1])
{
break;
}
}
if(num.length() > 0)
{
num.erase(num.begin() + i);
}
while(num[0] == '0' )
{
if(num.length() > 0)
{
num.erase(num.begin());
}
}
if(num.length() == 0)
{
num = "0";
return num;
}
return removeKdigits(num, k-1);
}
};
|
Markdown
|
UTF-8
| 2,471 | 2.953125 | 3 |
[] |
no_license
|
## Scenario
Windows employs two major file systems, NTFS or FAT32, while EXT is the de factor file system for Linux. When an operating system is changed from Linux to Windows after reinstallation, the data disk remains in its original format. Therefore, the system might not be able to access data disk’s file system. In these cases, you will need to use a format converter to read the data disk.
This document describes how to read a data disk when the operating system has been [reinstalled](https://intl.cloud.tencent.com/document/product/213/4933) from Linux to Windows.
## Prerequisites
- DiskInternals Linux Reader has been installed on the reinstalled Windows CVM.
Download DiskInternals Linux Reader: `http://www.diskinternals.com/download/Linux_Reader.exe `
- Suppose the data disk mounted to the Linux CVM before reinstallation has two partitions, vdb1 and vdb2, as shown below:

## Directions
### Mounting a data disk
>! If a data disk has been mounted, skip this step.
>
1. Log in to the [Tencent Cloud CVM Console](https://console.cloud.tencent.com/cvm/).
2. Click **Cloud Block Storage** from the left sidebar to enter the Cloud Block Storage management page.
3. Locate the instance with the reinstalled system, and click **More** > **Mount** on the right as shown below:

4. In the pop-up window, select the reinstalled Windows CVM and click **Submit**.
### Viewing data disk information
1. Run DiskInternals Linux Reader to view the information of newly mounted data disk. `/root/mnt` and `/root/mnt1` correspond to vdb1 and vdb2 respectively, which are the 2 data disk partitions on the Linux CVM before reinstallation as shown below:
>! Note that the Linux data disk is read-only at this time. To perform read and write operations on the data disk as you do on a Windows data disk, back up your needed files and re-format the disk into a standard Windows-supported file system. For more information, please see [Data Disk Partition and Formatting of Windows CVMs](https://intl.cloud.tencent.com/document/product/213/2158).
>

2. Double-click to enter `/root/mnt` directory, right-click the file you want to copy, and select **Save** as shown below:

|
C++
|
UTF-8
| 883 | 2.8125 | 3 |
[] |
no_license
|
#ifndef _WORDBST_H_
#define _WORDBST_H_
#include "WordNode.h"
class WordBST
{
private:
WordNode * root; // Word BST Root
void R_Preorder(WordNode * node);
void R_Inorder(WordNode * node);
void R_Postorder(WordNode * node);
void I_Preorder(); // Preorder traversal
void I_Inorder();
void I_Postorder();
void I_LEVEL();
struct Stack {
WordNode * pHead = 0;
WordNode * Top() {
return pHead;
}
void Push(WordNode * node) {
node->SetNext(pHead);
pHead = node;
}
WordNode * Pop() {
WordNode * p = pHead;
pHead = pHead->GetNext();
p->SetNext(0);
return p;
}
};
public:
WordBST();
~WordBST();
void Insert(WordNode * node); // LOAD, MOVE
WordNode * Delete(char * word); // TEST
WordNode * Search(char * word); // ADD, TEST, SEARCH, UPDATE
bool Print(char * order); // PRINT
bool Save(); // SAVE
};
#endif
|
Rust
|
UTF-8
| 611 | 2.8125 | 3 |
[] |
no_license
|
use crate::game::card::object::Card;
use ::bevy::prelude::*;
use bevy_text_mesh::TextMesh;
impl Card {
fn text(&self) -> String {
let mana_cost = self.proto.cost.mana;
format!(
"[ {mana_cost} ]
some longer strings in my wonderful game"
)
}
}
pub fn update_text_meshes(cards: Query<&Card>, mut meshes: Query<(&mut TextMesh, &Parent)>) {
for (mut mesh, parent) in &mut meshes {
let card = cards.get(parent.get()).unwrap();
let new_text = card.text();
if mesh.as_ref().text != new_text {
mesh.text = new_text
}
}
}
|
JavaScript
|
UTF-8
| 3,047 | 2.65625 | 3 |
[] |
no_license
|
$(function () {
$("#signupForm").validate({
// 검증할 규칙 rules에 명시 - 각 요소의 이름에 사용
rules:{
username:{ //필수, 2-4
required : true,
minlength : 2,
maxlength : 4
},
password:{// 필수, 8-15
required : true,
rangelength : [8, 15]
// minlength : 8,
// maxlength : 15
},
confirm_password:{ // 필수, 8-15, password와 동일
required : true,
rangelength : [8, 15],
equalTo : "#password"
},
email:{ // 필수, 이메일 규칙,
required : true,
email : true
},
policy:{
required : true
},
topic:{ // newsletter 체크 시 최소 선택이 2개가 되도록
required : "#newsletter_topics",
minlength : 2
}
}, // rules 종료
messages:{ // 개발자가 원하는 메시지 부여
username:{ //필수, 2-4
required : "이름은 필수 요소입니다",
minlength : "이름은 최소 두 자리여야 합니다",
maxlength : "이름은 최대 4자리까지 허용됩니다"
},
password:{// 필수, 8-15
required : "비밀번호는 필수 요소입니다",
rangelength : "비밀번호는 8~15자리로 입력해야 합니다"
// minlength : 8,
// maxlength : 15
},
confirm_password:{ // 필수, 8-15, password와 동일
required : "비밀번호는 필수 요소 입니다",
rangelength : "비밀번호는 8~15자리로 입력해야 합니다",
equalTo : "이전 비밀번호와 다릅니다"
},
email:{ // 필수, 이메일 규칙,
required : "이메일은 필수 요소 입니다",
email : "이메일을 확인해주세요"
},
policy:" 우리의 정책에 동의를 필요로 합니다.",
topic: " 관심사를 적어도 2개는 표시해야 합니다."
}, // messages 종료
errElement : "em",
errorPlacement : function (error, element) {
error.addClass("help-block");
if (element.prop("type") == "checkbox") {
error.insertAfter(element.next("label"));
} else{
error.insertAfter(element);
}
}
}); //signup 종료
//newsletter 클릭하면 하단에 메일링 리스트 보여주기
$("#newsletter").click(function () {
let topics = $("#newsletter_topics");
if (topics.css("display") === "none") {
$(topics).css("display", "block");
} else {
$(topics).css("display", "none");
}
});
});
|
Java
|
ISO-8859-1
| 3,860 | 2.640625 | 3 |
[] |
no_license
|
package engine;
import java.io.File;
import java.lang.reflect.Method;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.junit.runner.JUnitCore;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.notification.Failure;
import util.ClasspathClassLoader;
public class TestAnalyser {
ClasspathClassLoader clsLoader;
String actualPath;
private HashMap<String, List<String>> testClassFailed = new HashMap<String, List<String>>();
private List<String> noTestClasses = new ArrayList<String>();
public static final String JUNIT_ANNOTATION = "org.junit.Test";
private int testFailures;
public TestAnalyser(ClasspathClassLoader clsLoader) {
this.clsLoader = clsLoader;
this.testFailures = 0;
}
public int getTestFailures() {
return testFailures;
}
public List<String> getNoTestClasses() {
return noTestClasses;
}
public HashMap<String, List<String>> getTestClassFailed() {
return testClassFailed;
}
/**
* Look for every test class in classpath and return every bugged method from every class
*/
public void analyseWhiteBoxTests() {
// Get all the folders/jars added to classpath
URL[] urls = this.clsLoader.getURLs();
for (URL url : urls) {
// for every folder run testClasses
actualPath = url.getFile();
runAllWhiteBoxTestClasses(new File(actualPath));
}
}
/**
* Runs a test
* @param className the test classname
*/
public void runTest(Class<?> className) {
List<String> testsFailed = new ArrayList<String>();
for (Method m : className.getDeclaredMethods()) {
if (m.getAnnotations().length != 0)
if (m.getAnnotations()[0].annotationType().getName().equals("org.junit.Test")) {
Request request = Request.method(className, m.getName());
Result result = new JUnitCore().run(request);
List<Failure> failures = result.getFailures();
if (failures.size() != 0) {
//System.out.println("Test failed: " + m.getName());
testsFailed.add(m.getName());
}
}
}
// if any test of the class failed
if (testsFailed.size() != 0){
testClassFailed.put(className.getName(), testsFailed);
testFailures =testFailures+testsFailed.size();
}
}
/**
* Runs all "WhiteBox tests
* @param path the path
*/
public void runAllWhiteBoxTestClasses(File path) {
File listFile[] = path.listFiles();
if (listFile != null) {
for (int i = 0; i < listFile.length; i++) {
if (listFile[i].isDirectory())
runAllWhiteBoxTestClasses(listFile[i]);
else if (listFile[i].toString().contains(".class") && !listFile[i].toString().contains("Blackbox")) {
runTest(this.clsLoader.findOrLoadClass(getNameClass(actualPath, listFile[i].toString())));
isNotTestClass(this.clsLoader.findOrLoadClass(getNameClass(actualPath, listFile[i].toString())));
}
}
}
}
private void isNotTestClass(Class<?> className) {
Boolean hasTest = false;
for (Method m : className.getDeclaredMethods()) {
if (m.getAnnotations().length != 0)
if (m.getAnnotations()[0].annotationType().getName().equals(JUNIT_ANNOTATION)) {
hasTest = true;
}
}
if (!hasTest)
noTestClasses.add(className.getName());
}
private String getNameClass(String path, String file) {
// windows
String name;
if (file.contains("\\"))
name = file.replace(path.substring(1).replace("/", "\\"), "").replace("\\", ".").replace(" ", "").replace(".class", "");
else
// linux
name = file.replace(path.substring(1), "").replace("/", ".").replace(" ", "").replace(".class", "");
//System.out.println(name);
return name;
}
}
|
Markdown
|
UTF-8
| 657 | 2.96875 | 3 |
[
"MIT"
] |
permissive
|
# zmdb
A database platform that operates with HTTP requests. Made with Java Spring.
# Running
First, download the .jar from the latest release. To run without viewing logs, just run the .jar. To view logs, run the file in cmd by typing "java -jar 'FILEPATH'", where FILEPATH is the location where the .jar was downloaded.
# Starting out
Download an HTTP request sender to start out (like Postman). You can make your first database by sending a POST request to localhost:9001/databases with a body of:
{
"name": "myFirstDatabase"
}
This will create a database with name "myFirstDatabase". You can find further documentation in [HELP.md](HELP.md).
|
C++
|
UTF-8
| 1,084 | 2.78125 | 3 |
[
"MIT"
] |
permissive
|
#include <Adafruit_NeoPixel.h>
#define DATA_PIN 4
#define NUM_PX 12
Adafruit_NeoPixel strip = Adafruit_NeoPixel(
NUM_PX,
DATA_PIN,
NEO_GRB + NEO_KHZ800
);
int dataPin = 4;
int pixels = 12;
int pixel = 0;
int col_mode = 0;
void colorWipe(uint32_t c) {
for(uint16_t i=0; i<strip.numPixels(); i++) {
strip.setPixelColor(i, c);
strip.show();
delay(10);
}
}
void setup() {
strip.begin();
colorWipe(
strip.Color(0,0,0)
);
strip.show();
}
void loop() {
if (col_mode == 0) {
strip.setPixelColor(
pixel,
strip.Color(255, 0, 0)
);
}
if (col_mode == 1) {
strip.setPixelColor(
pixel,
strip.Color(0,255,0)
);
}
if (col_mode == 2) {
strip.setPixelColor(
pixel,
strip.Color(0,0,255)
);
}
if (col_mode == 3) {
strip.setPixelColor(
pixel,
strip.Color(0,0,0)
);
}
strip.show();
if (pixel < strip.numPixels() - 1) {
pixel = pixel + 1;
} else {
pixel = 0;
if (col_mode < 3) { col_mode++; }
else { col_mode = 0; }
}
delay(1000);
}
|
Shell
|
UTF-8
| 613 | 3.3125 | 3 |
[] |
no_license
|
#!/bin/sh
if [ "`git status -s`" ]
then
echo "The working directory is dirty. Please commit any pending changes."
exit 1;
fi
echo "Deleting old publication"
rm -rf dist
mkdir dist
git worktree prune
rm -rf .git/worktrees/dist/
echo "Checking out gh-pages branch into dist"
git worktree add -B gh-pages dist origin/gh-pages
echo "Removing existing files"
rm -rf dist/*
echo "Generating site"
parcel build src/index.html --public-url /curves-demo/
echo "Updating gh-pages branch"
cd dist && git add --all && git commit -m "Publishing to gh-pages (publish.sh)"
#echo "Pushing to github"
git push --all
|
Python
|
UTF-8
| 1,058 | 2.859375 | 3 |
[] |
no_license
|
import sys
from PyQt5.QtCore import QObject, QTimer, pyqtSlot
from PyQt5.QtWidgets import QApplication
class MyTime(QTimer):
def __init__(self, *args, **kwargs):
super(MyTime, self).__init__(*args, **kwargs)
def __del__(self):
print('del')
class Worker(QObject):
def __init__(self):
super(Worker, self).__init__()
# 如果不设置父对象就会被销毁
timer = MyTime(self)
timer.timeout.connect(self.xxx)
timer.start(300)
# 这个装饰函数一定要用哦
@pyqtSlot()
def xxx(self):
print('xxx')
class CCC():
def __del__(self):
print('ccc del')
def fuck():
ccc = CCC()
def main():
app = QApplication(sys.argv)
worker = Worker()
fuck()
# 猜测是exec_里面会不断的回收没有parent的资源(当然是qt的垃圾回收,和python自己的垃圾回收不要扯上,那是gc,有空研究~)
# !!! 怎么好像python自己的回收也是及时的?
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
Swift
|
UTF-8
| 9,996 | 2.671875 | 3 |
[] |
no_license
|
//
// VideoLauncher.swift
// YoutubeApp
//
// Created by Oluwatobi Adebiyi on 11/13/16.
// Copyright © 2016 Oluwatobi Adebiyi. All rights reserved.
//
import UIKit
import AVFoundation
class VideoPlayerView: UIView {
var player: AVPlayer?
// Loading Indicator
let activityIndicatorView: UIActivityIndicatorView = {
let aiv = UIActivityIndicatorView(activityIndicatorStyle: .whiteLarge)
aiv.translatesAutoresizingMaskIntoConstraints = false
aiv.startAnimating()
return aiv
}()
// Video Player Container
let controlContainerView: UIView = {
let view = UIView()
view.backgroundColor = UIColor(white: 0, alpha: 1)
return view
}()
// Pause Button
let pausePlayButton: UIButton = {
let button = UIButton(type: .system)
let image = UIImage(named: "pause.png")
button.setImage(image, for: .normal)
button.translatesAutoresizingMaskIntoConstraints = false
button.tintColor = UIColor.white
button.isHidden = true
return button
}()
// Video Length Label
let videoLengthLabel: UILabel = {
let label = UILabel()
label.text = "00:00"
label.textColor = UIColor.white
label.font = UIFont.boldSystemFont(ofSize: 13)
label.textAlignment = .right
label.translatesAutoresizingMaskIntoConstraints = false
return label
}()
// Video - Current Time Label
let currentTimeLabel: UILabel = {
let label = UILabel()
label.text = "00:00"
label.textColor = UIColor.white
label.font = UIFont.boldSystemFont(ofSize: 13)
label.textAlignment = .right
label.translatesAutoresizingMaskIntoConstraints = false
return label
}()
// Video Slider
let videoSlider: UISlider = {
let slider = UISlider()
slider.minimumTrackTintColor = UIColor.red
let image = UIImage(named: "dots.png")
slider.setThumbImage(image, for: .normal)
return slider
}()
// init
override init(frame: CGRect) {
super.init(frame: frame)
setupViews()
}
// required init
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
// SetUpViews
func setupViews() {
setupPlayerView()
setupGradientLayer()
pausePlayButton.addTarget(self, action: #selector(VideoPlayerView.handlePause), for: .touchUpInside)
videoSlider.addTarget(self, action: #selector(VideoPlayerView.handleSliderChange), for: .valueChanged)
controlContainerView.frame = frame
addSubview(controlContainerView)
// Add Activity Indicator to the Video Player Container
controlContainerView.addSubview(activityIndicatorView)
activityIndicatorView.centerXAnchor.constraint(equalTo: centerXAnchor).isActive = true
activityIndicatorView.centerYAnchor.constraint(equalTo: centerYAnchor).isActive = true
// Add Pause Button to the Video Player Container
controlContainerView.addSubview(pausePlayButton)
pausePlayButton.centerXAnchor.constraint(equalTo: centerXAnchor).isActive = true
pausePlayButton.centerYAnchor.constraint(equalTo: centerYAnchor).isActive = true
pausePlayButton.widthAnchor.constraint(equalToConstant: 64).isActive = true
pausePlayButton.widthAnchor.constraint(equalToConstant: 64).isActive = true
// Video Length Label
controlContainerView.addSubview(videoLengthLabel)
videoLengthLabel.rightAnchor.constraint(equalTo: rightAnchor, constant: -8).isActive = true
videoLengthLabel.bottomAnchor.constraint(equalTo: bottomAnchor).isActive = true
videoLengthLabel.widthAnchor.constraint(equalToConstant: 50).isActive = true
videoLengthLabel.heightAnchor.constraint(equalToConstant: 24).isActive = true
// Video Current Length
controlContainerView.addSubview(currentTimeLabel)
currentTimeLabel.leftAnchor.constraint(equalTo: leftAnchor).isActive = true
currentTimeLabel.bottomAnchor.constraint(equalTo: bottomAnchor).isActive = true
currentTimeLabel.widthAnchor.constraint(equalToConstant: 50).isActive = true
currentTimeLabel.heightAnchor.constraint(equalToConstant: 24).isActive = true
// Video Slider
controlContainerView.addSubview(videoSlider)
videoSlider.rightAnchor.constraint(equalTo: videoLengthLabel.leftAnchor).isActive = true
videoSlider.leftAnchor.constraint(equalTo: currentTimeLabel.rightAnchor, constant: 5).isActive = true
controlContainerView.addConstraintsWithFormat(format: "V:[v0]-4-|", views: videoSlider)
backgroundColor = UIColor.black
}
func setupPlayerView() {
let urlString = "https://firebasestorage.googleapis.com/v0/b/gameofchats-762ca.appspot.com/o/message_movies%2F12323439-9729-4941-BA07-2BAE970967C7.mov?alt=media&token=3e37a093-3bc8-410f-84d3-38332af9c726"
if let url = URL(string: urlString) {
player = AVPlayer(url: url)
let playerLayer = AVPlayerLayer(player: player)
self.layer.addSublayer(playerLayer)
playerLayer.frame = self.frame
player?.play()
player?.addObserver(self, forKeyPath: "currentItem.loadedTimeRanges", options: .new, context: nil)
// Track Player Progress
let interval = CMTime(value: 1, timescale: 2)
player?.addPeriodicTimeObserver(forInterval: interval, queue: // This will wait to finish
DispatchQueue.main, using: { (progressTime) in
let seconds = CMTimeGetSeconds(progressTime)
let secondsText = String(format: "%02d", Int(seconds) % 60)
let minutesText = String(format: "%02d", Int(seconds) / 60)
self.currentTimeLabel.text = "\(minutesText):\(secondsText)"
// Let set Slider Value
if let duration = self.player?.currentItem?.duration {
let durationSeconds = CMTimeGetSeconds(duration)
self.videoSlider.value = Float(seconds / durationSeconds)
}
})
}
}
func setupGradientLayer() {
let gradientLayer = CAGradientLayer()
gradientLayer.frame = bounds
gradientLayer.colors = [UIColor.clear.cgColor, UIColor.black.cgColor]
gradientLayer.locations = [0.6, 1.2]
controlContainerView.layer.addSublayer(gradientLayer)
}
func handleSliderChange() {
if let duration = player?.currentItem?.duration {
let totalSeconds = CMTimeGetSeconds(duration)
let value = Float64(videoSlider.value) * totalSeconds
let seekTime = CMTime(value: CMTimeValue(Int(value)), timescale: 1)
player?.seek(to: seekTime, completionHandler: { (completedSeek) in
// We would something later
})
}
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
// When the player is ready and rendering frames
if keyPath == "currentItem.loadedTimeRanges" {
activityIndicatorView.stopAnimating()
controlContainerView.backgroundColor = UIColor.clear
pausePlayButton.isHidden = false
isPlaying = true
if let duration = player?.currentItem?.duration {
let seconds = CMTimeGetSeconds(duration)
let secondsText = Int(seconds) % 60
let minutesText = String(format: "%02d", Int(seconds) / 60)
videoLengthLabel.text = "\(minutesText):\(secondsText)"
}
}
}
var isPlaying = false
// Handles the Pause Button - is changes the Pause Image to Play
func handlePause() {
if isPlaying {
player?.pause()
pausePlayButton.setImage(UIImage(named: "play.png"), for: .normal)
} else {
player?.play()
pausePlayButton.setImage(UIImage(named: "pause.png"), for: .normal)
}
isPlaying = !isPlaying
}
}
class VideoLauncher: NSObject {
func showVideoPlayer() {
if let keyWindow = UIApplication.shared.keyWindow {
let view = UIView(frame: keyWindow.frame)
view.backgroundColor = UIColor.white
view.frame = CGRect(x: keyWindow.frame.width - 10, y: keyWindow.frame.height - 10, width: 10, height: 10)
// 16 x 9 is the aspect ration of all HD Video
let height = keyWindow.frame.width * 9 / 16
let videoPlayerFrame = CGRect(x: 0, y: 0, width: keyWindow.frame.width, height: height)
let videoPlayerView = VideoPlayerView(frame: videoPlayerFrame)
view.addSubview(videoPlayerView)
keyWindow.addSubview(view)
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseOut, animations: {
view.frame = keyWindow.frame
}, completion: { (completedAnimation) in
// maybe we'll do something here later
// UIApplication.shared.setStatusBarHidden(true, with: .fade)
})
}
}
}
|
Java
|
UTF-8
| 3,126 | 2.234375 | 2 |
[] |
no_license
|
package com.neyyco.bots.Commands.Command;
import com.neyyco.bots.Commands.Types.ServerCommand;
import com.neyyco.bots.Main;
import com.neyyco.bots.Music.MusicController;
import com.sedmelluq.discord.lavaplayer.player.AudioLoadResultHandler;
import com.sedmelluq.discord.lavaplayer.player.AudioPlayer;
import com.sedmelluq.discord.lavaplayer.player.AudioPlayerManager;
import com.sedmelluq.discord.lavaplayer.source.youtube.YoutubeSearchMusicResultLoader;
import com.sedmelluq.discord.lavaplayer.tools.FriendlyException;
import com.sedmelluq.discord.lavaplayer.track.AudioPlaylist;
import com.sedmelluq.discord.lavaplayer.track.AudioTrack;
import net.dv8tion.jda.api.entities.*;
import net.dv8tion.jda.api.managers.AudioManager;
import java.net.URI;
import java.net.URISyntaxException;
public class PlayCommand implements ServerCommand {
@Override
public void performCommand(String[] arguments, Guild guild, Member member, TextChannel textChannel, Message message) {
if(arguments.length == 2){
GuildVoiceState voiceState;
if ((voiceState = member.getVoiceState()) != null) {
VoiceChannel voiceChannel;
if ((voiceChannel = voiceState.getChannel())!= null) {
MusicController musicController = Main.getAudioManager().getMusicController(voiceChannel.getGuild().getIdLong());
AudioPlayer player = musicController.getAudioPlayer();
AudioPlayerManager audioPlayerManager = Main.getAudioPlayerManager();
AudioManager audioManager = voiceState.getGuild().getAudioManager();
audioManager.openAudioConnection(voiceChannel);
StringBuilder builder = new StringBuilder();
for (int i = 1; i < arguments.length; i++) builder.append(arguments[i] + " ");
String rawLink = builder.toString().trim();
if(!rawLink.startsWith("http")) {
rawLink = "ytsearch: " + rawLink;
}
final String url = rawLink;
audioPlayerManager.loadItem(url, new AudioLoadResultHandler() {
@Override
public void trackLoaded(AudioTrack audioTrack) {
musicController.getAudioPlayer().playTrack(audioTrack);
}
@Override
public void playlistLoaded(AudioPlaylist audioPlaylist) {
audioPlaylist.getTracks();
}
@Override
public void noMatches() {
}
@Override
public void loadFailed(FriendlyException e) {
}
});
}else{
textChannel.sendMessage("Sulod sag voice channel!").queue();
}
}else {
textChannel.sendMessage("Sulod sag voice channel!").queue();
}
}
}
}
|
Python
|
UTF-8
| 2,874 | 2.734375 | 3 |
[
"MIT"
] |
permissive
|
# Reminder Application
# Authors: Sharon, Hailey, Mayank
# import modules
from flask import Flask, redirect, url_for, render_template, request, flash
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import DateTime
from datetime import datetime
# app configurations
app = Flask(__name__)
app.secret_key = "reminder-app"
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///user.sqlite3'
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
db = SQLAlchemy(app)
class reminders(db.Model):
_id = db.Column("id",db.Integer,primary_key=True)
reminderName = db.Column("name",db.String(100))
reminderTime = db.Column("time",db.String(100)) # the time to be reminded
remindAt = db.Column(DateTime, default=None)
def __init__(self, name, rtime, time):
self.reminderName = name
self.reminderTime = rtime
self.remindAt = time
# home page.
@app.route("/", methods=["POST","GET"])
def home():
if request.method == "POST":
return redirect(url_for("add"))
return render_template("home.html",reminders=reminders.query.all())
# add reminder page. reminders are added here
@app.route("/add", methods=["POST","GET"])
def add():
if request.method == "POST":
reminderName = request.form["rname"]
d = request.form["d"]
t = request.form["t"]
dt = d + ' ' + t
remindAt = datetime.strptime(dt, "%Y-%m-%d %H:%M")
reminder = reminders(reminderName,dt,remindAt)
db.session.add(reminder)
db.session.commit()
return redirect(url_for("view",name=reminderName,time=str(remindAt)))
return render_template("add.html")
# view reminders page.
@app.route("/view", methods=["POST","GET"])
def view():
if request.method == "POST":
return redirect(url_for("home"))
return render_template("view.html",name=request.args.get("name"),time=request.args.get('time'))
# reminders are updated here
@app.route('/update/<int:_id>', methods=['GET', 'POST'])
def update(_id):
if request.method == 'POST':
reminderName = request.form["rname"]
d = request.form["d"]
t = request.form["t"]
dt = d + ' ' + t
remindAt = datetime.strptime(dt, "%Y-%m-%d %H:%M")
reminder = reminders.query.filter_by(_id=_id).first()
reminder.reminderName = reminderName
reminder.reminderTime = remindAt
db.session.add(reminder)
db.session.commit()
return redirect("/")
reminder = reminders.query.filter_by(_id=_id).first()
return render_template('update.html', reminder=reminder)
# reminders are deleted here
@app.route('/delete/<int:_id>')
def delete(_id):
reminder = reminders.query.filter_by(_id=_id).first()
db.session.delete(reminder)
db.session.commit()
return redirect("/")
# run the app
if __name__ == "__main__":
db.create_all()
app.run(debug=True)
|
Markdown
|
UTF-8
| 5,375 | 3.015625 | 3 |
[] |
no_license
|
# 內核和用戶空間共享內存的實現例程-proc和mmap
之所以想寫這篇帖子,是有兩個方面原因。其一是內核版有一個關於《內核可以從線性地址直接計算物理地址,用來做什麼呢?》的討論,偶說計算出物理地址可以共享給用戶空間讀寫。dreamice兄說能否說一下詳細的應用。其二是alb*版主提到wheelz曾經寫過這樣一個例程,拜讀了一把,發現在傳遞物理地址和內存大小上,wheelz的例程還有些不夠靈活。alb*版主提到可以通過文件的方式實現動態的傳遞。
因此,偶也寫了這樣一個例程,可以動態的將內核空間的物理地址和大小傳給用戶空間。
整個內核模塊,在模塊插入時建立proc文件,分配內存。卸載模塊的時候將用戶空間寫入的內容打印出來。
以下是內核模塊的代碼和用戶空間的測試代碼。
```c
/*This program is used to allocate memory in kernel
and pass the physical address to userspace through proc file.*/
#include <linux/version.h>
#include <linux/module.h>
#include <linux/proc_fs.h>
#include <linux/mm.h>
#define PROC_MEMSHARE_DIR "memshare"
#define PROC_MEMSHARE_PHYADDR "phymem_addr"
#define PROC_MEMSHARE_SIZE "phymem_size"
/*alloc one page. 4096 bytes*/
#define PAGE_ORDER 0
/*this value can get from PAGE_ORDER*/
#define PAGES_NUMBER 1
struct proc_dir_entry* proc_memshare_dir ;
unsigned long kernel_memaddr = 0;
unsigned long kernel_memsize = 0;
static int proc_read_phymem_addr(char* page, char** start, off_t off, int count)
{
return sprintf(page, "%08lx\n", __pa(kernel_memaddr));
}
static int proc_read_phymem_size(char* page, char** start, off_t off, int count)
{
return sprintf(page, "%lu\n", kernel_memsize);
}
static int __init init(void)
{
/*build proc dir "memshare"and two proc files: phymem_addr, phymem_size in the dir*/
proc_memshare_dir = proc_mkdir(PROC_MEMSHARE_DIR, NULL);
create_proc_info_entry(PROC_MEMSHARE_PHYADDR, 0, proc_memshare_dir,
proc_read_phymem_addr);
create_proc_info_entry(PROC_MEMSHARE_SIZE, 0, proc_memshare_dir,
proc_read_phymem_size);
/*alloc one page*/
kernel_memaddr = __get_free_pages(GFP_KERNEL, PAGE_ORDER);
if (!kernel_memaddr) {
printk("Allocate memory failure!\n");
} else {
SetPageReserved(virt_to_page(kernel_memaddr));
kernel_memsize = PAGES_NUMBER * PAGE_SIZE;
printk("Allocate memory success!. The phy mem addr=%08lx, size=%lu\n",
__pa(kernel_memaddr), kernel_memsize);
}
return 0;
}
static void __exit fini(void)
{
printk("The content written by user is: %s\n", (unsigned char*) kernel_memaddr);
ClearPageReserved(virt_to_page(kernel_memaddr));
free_pages(kernel_memaddr, PAGE_ORDER);
remove_proc_entry(PROC_MEMSHARE_PHYADDR, proc_memshare_dir);
remove_proc_entry(PROC_MEMSHARE_SIZE, proc_memshare_dir);
remove_proc_entry(PROC_MEMSHARE_DIR, NULL);
return;
}
module_init(init);
module_exit(fini);
MODULE_LICENSE("GPL");
MODULE_AUTHOR("Godbach (nylzhaowei@163.com)");
MODULE_DESCRIPTION("Kernel memory share module.");
```
```c
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <string.h>
#include <fcntl.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/mman.h>
int main(int argc, char* argv[])
{
if (argc != 2) {
printf("Usage: %s string\n", argv[0]);
return 0;
}
unsigned long phymem_addr, phymem_size;
char* map_addr;
char s[256];
int fd;
/*get the physical address of allocated memory in kernel*/
fd = open("/proc/memshare/phymem_addr", O_RDONLY);
if (fd < 0) {
printf("cannot open file /proc/memshare/phymem_addr\n");
return 0;
}
read(fd, s, sizeof(s));
sscanf(s, "%lx", &phymem_addr);
close(fd);
/*get the size of allocated memory in kernel*/
fd = open("/proc/memshare/phymem_size", O_RDONLY);
if (fd < 0) {
printf("cannot open file /proc/memshare/phymem_size\n");
return 0;
}
read(fd, s, sizeof(s));
sscanf(s, "%lu", &phymem_size);
close(fd);
printf("phymem_addr=%lx, phymem_size=%lu\n", phymem_addr, phymem_size);
/*memory map*/
int map_fd = open("/dev/mem", O_RDWR);
if (map_fd < 0) {
printf("cannot open file /dev/mem\n");
return 0;
}
map_addr = mmap(0, phymem_size, PROT_READ | PROT_WRITE, MAP_SHARED, map_fd,
phymem_addr);
strcpy(map_addr, argv[1]);
munmap(map_addr, phymem_size);
close(map_fd);
return 0;
}
```
```sh
debian:/home/km/memshare# insmod memshare_kernel.ko
debian:/home/km/memshare# ./memshare_user 'hello,world!'
phymem_addr=e64e000, phymem_size=4096
debian:/home/km/memshare# cat /proc/memshare/phymem_addr
0e64e000
debian:/home/km/memshare# cat /proc/memshare/phymem_size
4096
debian:/home/km/memshare# rmmod memshare_kernel
debian:/home/km/memshare# tail /var/log/messages
Sep 27 18:14:24 debian kernel: [50527.567931] Allocate memory success!. The phy mem addr=0e64e000, size=4096
Sep 27 18:15:31 debian kernel: [50592.570986] The content written by user is: hello,world!
```
|
Ruby
|
UTF-8
| 1,334 | 2.609375 | 3 |
[] |
no_license
|
class User < ApplicationRecord
#callback to supplement email uniqueness validation (see db migration where index was added)
#also you don't need self here - you could just do email.downcase!
before_save { email.downcase! }
#validations
validates :name, presence: true, length: { maximum: 50 }
validates :email, presence: true, length: { maximum: 255 },
format: { with: /\A[\w+\-.]+@[a-z\d\-]+(\.[a-z\d\-]+)*\.[a-z]+\z/i },
uniqueness: true
#This is not sufficient on its own. To cover race conditions, uniqueness was enforced at DB level also (along with an index)
#has_secured_password method comes shipped with bcrypt gem (needs gem to be added)
#Needs a xxx_digest column introduced (XXX is the desired attribute name for password) in the users table
#Automatically adds virtual attributes XXX and XXX_confirmation to the model
#e.g.for a db column name chosen as passowrd_digest, the two virtual attributes determined would be:
#password and password_confirmation so you can do @user.password and @user.password_confirmation
#You can also do @user.authenticate("<password>") to check if the password provided matches the value in original password_digest
has_secure_password
validates :password, presence: true, length: { minimum: 6 }
end
|
C#
|
UTF-8
| 13,887 | 2.53125 | 3 |
[] |
no_license
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Configuration;
using System.Data;
using System.Data.SqlClient;
using BangazonAPI.Models;
namespace BangazonAPI.Controllers
{
[Route("api/[controller]")]
[ApiController]
public class EmployeeController : ControllerBase
{
private readonly IConfiguration _config;
public EmployeeController(IConfiguration config)
{
_config = config;
}
public SqlConnection Connection
{
get
{
return new SqlConnection(_config.GetConnectionString("DefaultConnection"));
}
}
[HttpGet]
public async Task<IActionResult> Get()
{
using (SqlConnection conn = Connection)
{
conn.Open();
using (SqlCommand cmd = conn.CreateCommand())
{
cmd.CommandText = @"SELECT Employee.Id, Employee.FirstName, Employee.LastName, Employee.Archived, Employee.IsSupervisor, Employee.DepartmentId, ComputerEmployee.AssignDate, ComputerEmployee.UnassignDate, Computer.PurchaseDate, Computer.DecomissionDate, Computer.Make, Computer.Manufacturer, Computer.Archived AS 'Computer Archived', Department.Name AS 'Department Name'
FROM Employee
JOIN Department on Employee.DepartmentId = Department.Id
LEFT JOIN ComputerEmployee on ComputerEmployee.EmployeeId = Employee.Id
LEFT JOIN Computer ON ComputerEmployee.ComputerId = Computer.Id
WHERE ComputerEmployee. UnassignDate is null ";
SqlDataReader reader = cmd.ExecuteReader();
List<Employee> employees = new List<Employee>();
while (reader.Read())
{
if (!reader.IsDBNull(reader.GetOrdinal("PurchaseDate")))
{
Employee employee = new Employee
{
Id = reader.GetInt32(reader.GetOrdinal("Id")),
FirstName = reader.GetString(reader.GetOrdinal("FirstName")),
LastName = reader.GetString(reader.GetOrdinal("LastName")),
IsSupervisor = reader.GetBoolean(reader.GetOrdinal("IsSupervisor")),
Archived = reader.GetBoolean(reader.GetOrdinal("Archived")),
DepartmentId = reader.GetInt32(reader.GetOrdinal("DepartmentId")),
DepartmentName = reader.GetString(reader.GetOrdinal("Department Name")),
Computer = new Computer
{
Id = reader.GetInt32(reader.GetOrdinal("Id")),
PurchaseDate = reader.GetDateTime(reader.GetOrdinal("PurchaseDate")),
DecomissionDate = reader.GetDateTime(reader.GetOrdinal("DecomissionDate")),
Make = reader.GetString(reader.GetOrdinal("Make")),
Manufacturer = reader.GetString(reader.GetOrdinal("Manufacturer")),
Archived = reader.GetBoolean(reader.GetOrdinal("Computer Archived")),
}
};
employees.Add(employee);
}
else
{
{
Employee employee = new Employee
{
Id = reader.GetInt32(reader.GetOrdinal("Id")),
FirstName = reader.GetString(reader.GetOrdinal("FirstName")),
LastName = reader.GetString(reader.GetOrdinal("LastName")),
IsSupervisor = reader.GetBoolean(reader.GetOrdinal("IsSupervisor")),
Archived = reader.GetBoolean(reader.GetOrdinal("Archived")),
DepartmentId = reader.GetInt32(reader.GetOrdinal("DepartmentId")),
DepartmentName = reader.GetString(reader.GetOrdinal("Department Name"))
};
employees.Add(employee);
}
}
}
reader.Close();
return Ok(employees);
}
}
}
[HttpGet("{id}", Name = "GetEmployee")]
public async Task<IActionResult> Get([FromRoute] int id)
{
using (SqlConnection conn = Connection)
{
conn.Open();
using (SqlCommand cmd = conn.CreateCommand())
{
cmd.CommandText = @"
SELECT Employee.Id, Employee.FirstName, Employee.LastName, Employee.Archived, Employee.IsSupervisor, Employee.DepartmentId, ComputerEmployee.AssignDate, ComputerEmployee.UnassignDate, Computer.PurchaseDate, Computer.DecomissionDate, Computer.Make, Computer.Manufacturer, Computer.Archived AS 'Computer Archived', Department.Name AS 'Department Name'
FROM Employee
JOIN Department on Employee.DepartmentId = Department.Id
LEFT JOIN ComputerEmployee on ComputerEmployee.EmployeeId = Employee.Id
LEFT JOIN Computer ON ComputerEmployee.ComputerId = Computer.Id
WHERE ComputerEmployee. UnassignDate is null AND Employee.Id = @id";
cmd.Parameters.Add(new SqlParameter("@id", id));
SqlDataReader reader = cmd.ExecuteReader();
Employee employee = null;
if (reader.Read())
{
if (!reader.IsDBNull(reader.GetOrdinal("PurchaseDate")))
{
employee = new Employee
{
Id = reader.GetInt32(reader.GetOrdinal("Id")),
FirstName = reader.GetString(reader.GetOrdinal("FirstName")),
LastName = reader.GetString(reader.GetOrdinal("LastName")),
IsSupervisor = reader.GetBoolean(reader.GetOrdinal("IsSupervisor")),
Archived = reader.GetBoolean(reader.GetOrdinal("Archived")),
DepartmentId = reader.GetInt32(reader.GetOrdinal("DepartmentId")),
DepartmentName = reader.GetString(reader.GetOrdinal("Department Name")),
Computer = new Computer
{
Id = reader.GetInt32(reader.GetOrdinal("Id")),
PurchaseDate = reader.GetDateTime(reader.GetOrdinal("PurchaseDate")),
DecomissionDate = reader.GetDateTime(reader.GetOrdinal("DecomissionDate")),
Make = reader.GetString(reader.GetOrdinal("Make")),
Manufacturer = reader.GetString(reader.GetOrdinal("Manufacturer")),
Archived = reader.GetBoolean(reader.GetOrdinal("Computer Archived")),
}
};
}
else
{
employee = new Employee
{
Id = reader.GetInt32(reader.GetOrdinal("Id")),
FirstName = reader.GetString(reader.GetOrdinal("FirstName")),
LastName = reader.GetString(reader.GetOrdinal("LastName")),
IsSupervisor = reader.GetBoolean(reader.GetOrdinal("IsSupervisor")),
Archived = reader.GetBoolean(reader.GetOrdinal("Archived")),
DepartmentId = reader.GetInt32(reader.GetOrdinal("DepartmentId")),
DepartmentName = reader.GetString(reader.GetOrdinal("Department Name"))
};
}
}
reader.Close();
return Ok(employee);
}
}
}
[HttpPost]
public async Task<IActionResult> Post([FromBody] Employee employee)
{
using (SqlConnection conn = Connection)
{
conn.Open();
using (SqlCommand cmd = conn.CreateCommand())
{
cmd.CommandText = @"INSERT INTO Employee (FirstName, LastName, IsSupervisor, Archived, DepartmentId)
OUTPUT INSERTED.Id
VALUES (@firstName, @lastName, @isSupervisor, @archived, @departmentId)";
cmd.Parameters.Add(new SqlParameter("@firstName", employee.FirstName));
cmd.Parameters.Add(new SqlParameter("@lastName", employee.LastName));
cmd.Parameters.Add(new SqlParameter("@isSupervisor", employee.IsSupervisor));
cmd.Parameters.Add(new SqlParameter("@archived", employee.Archived));
cmd.Parameters.Add(new SqlParameter("@departmentId", employee.DepartmentId));
int newId = (int)cmd.ExecuteScalar();
employee.Id = newId;
return CreatedAtRoute("GetEmployee", new { id = newId }, employee);
}
}
}
[HttpPut("{id}")]
public async Task<IActionResult> Put([FromRoute] int id, [FromBody] Employee employee)
{
try
{
using (SqlConnection conn = Connection)
{
conn.Open();
using (SqlCommand cmd = conn.CreateCommand())
{
cmd.CommandText = @"UPDATE Employee
SET FirstName = @firstName,
LastName = @lastName,
IsSupervisor = @isSupervisor,
Archived = @archived,
DepartmentId = @departmentId
WHERE Id = @id";
cmd.Parameters.Add(new SqlParameter("@firstName", employee.FirstName));
cmd.Parameters.Add(new SqlParameter("@lastName", employee.LastName));
cmd.Parameters.Add(new SqlParameter("@isSupervisor", employee.IsSupervisor));
cmd.Parameters.Add(new SqlParameter("@archived", employee.Archived));
cmd.Parameters.Add(new SqlParameter("@departmentId", employee.DepartmentId));
cmd.Parameters.Add(new SqlParameter("@id", id));
int rowsAffected = cmd.ExecuteNonQuery();
if (rowsAffected > 0)
{
return new StatusCodeResult(StatusCodes.Status204NoContent);
}
throw new Exception("No rows affected");
}
}
}
catch (Exception)
{
if (!EmployeeExists(id))
{
return NotFound();
}
else
{
throw;
}
}
}
[HttpDelete("{id}")]
public async Task<IActionResult> Delete([FromRoute] int id)
{
try
{
using (SqlConnection conn = Connection)
{
conn.Open();
using (SqlCommand cmd = conn.CreateCommand())
{
cmd.CommandText = @"UPDATE Employee
SET Archived = 1
WHERE Id = @id";
cmd.Parameters.Add(new SqlParameter("@id", id));
int rowsAffected = cmd.ExecuteNonQuery();
if (rowsAffected > 0)
{
return new StatusCodeResult(StatusCodes.Status204NoContent);
}
throw new Exception("No rows affected");
}
}
}
catch (Exception)
{
if (!EmployeeExists(id))
{
return NotFound();
}
else
{
throw;
}
}
}
private bool EmployeeExists(int id)
{
using (SqlConnection conn = Connection)
{
conn.Open();
using (SqlCommand cmd = conn.CreateCommand())
{
cmd.CommandText = @"
SELECT Id, FirstName, LastName, IsSupervisor, Archived, DepartmentId
FROM Employee
WHERE Id = @id";
cmd.Parameters.Add(new SqlParameter("@id", id));
SqlDataReader reader = cmd.ExecuteReader();
return reader.Read();
}
}
}
}
}
|
Markdown
|
UTF-8
| 1,311 | 2.5625 | 3 |
[
"MIT"
] |
permissive
|
---
title: How to suggest a feature
description: Contributor Info
tags:
- MyCrypto
priority: 66
date_published: '2017-11-16'
date_modified: '2021-10-26'
---
If there's a feature that you think would make MyCrypto a better product, we
want to know. Here's the best way to tell us:
1. **Search for your suggestion on [GitHub](https://github.com/MyCryptoHQ/MyCrypto/issues)** - If you've thought of a good new feature,
chances are someone else may have too. Show your support for a request by using
the thumbs up response on the GitHub issue.
2. **Describe your suggestion** - Describe in detail how you would want this
new feature to work. If something doesn't work very well, it helps to know
the pain points. If you have an idea of how to make it better, feel free to
suggest!
3. **Provide examples** - If it's something another website or app is doing,
shoot us a link or a screenshot. We build better projects by working with
other developers in our community.
Features that are requested with the above information have a better chance of
getting implemented.
If there is no existing GitHub issue for your feature request yet, feel free to [open a new issue](https://github.com/MyCryptoHQ/MyCrypto/issues/new), or [send us an email](mailto:support@mycrypto.com).
Thanks for the suggestions!
|
JavaScript
|
UTF-8
| 7,788 | 2.59375 | 3 |
[] |
no_license
|
var pairing = require('./HAPPairing').HAPtoAccessoryPairing;
var http = require('http');
var mdns = require('mdns');
var Accessory, Service, Characteristic, UUIDGen;
var uuid = require('./util/uuid');
module.exports = function(homebridge) {
console.log("homebridge API version: " + homebridge.version);
// Accessory must be created from PlatformAccessory Constructor
Accessory = homebridge.platformAccessory;
// Service and Characteristic are from hap-nodejs
Service = homebridge.hap.Service;
Characteristic = homebridge.hap.Characteristic;
UUIDGen = homebridge.hap.uuid;
// For platform plugin to be considered as dynamic platform plugin,
// registerPlatform(pluginName, platformName, constructor, dynamic), dynamic must be true
homebridge.registerPlatform("homebridge-HomeKit", "HomeKit", HomeKitPlatform, true);
}
// Platform constructor
// config may be null
// api may be null if launched from old homebridge version
function HomeKitPlatform(log, config, api) {
log("HomeKitPlatform Init");
var platform = this;
this.log = log;
this.config = config;
this.accessories = [];
// Save the API object as plugin needs to register new accessory via this object.
this.api = api;
//Create a way to track items in the browser
this.mdnsAccessories=[];
//Create a way to track accessories provided from homebridge
this.priorAccessories=[];
// Listen to event "didFinishLaunching", this means homebridge already finished loading cached accessories
// Platform Plugin should only register new accessory that doesn't exist in homebridge after this event.
// Or start discover new accessories
this.api.on('didFinishLaunching', function() {
var browser = mdns.createBrowser('_hap._tcp');
browser.on('serviceUp', function(info, flags) {
console.log("HomeKit Accessory Found: "+info.name);
if (platform.config.HAPAccessories[info.name]!==undefined)
InitializeAccessory({ "Name" : info.name, "IP":info.addresses[info.addresses.length-1], "Port":info.port, "PIN":platform.config.HAPAccessories[info.name]});
});
browser.on('serviceDown', function(info, flags) {
console.log("down "+info.name);
//
});
browser.on('error', function(error) {
//console.error(error.stack);
});
browser.start();
}.bind(this));
function InitializeAccessory(HAPInformation) {
PairingData={};
PairingData.my_username = uuid.generate('hap-nodejs:client:'+HAPInformation.Name);
PairingData.acc_mdnsname=HAPInformation.Name;
PairingData.acc_lastip=HAPInformation.IP;
PairingData.acc_lastport=HAPInformation.Port;
PairingData.acc_pin=HAPInformation.PIN;
var myPairing = new pairing();
PairingData.PairProcess = myPairing;
//throw an even instead of doing it here.
myPairing.PairAccessory(PairingData);
}
}
// Function invoked when homebridge tries to restore cached accessory
// Developer can configure accessory at here (like setup event handler)
// Update current value
HomeKitPlatform.prototype.configureAccessory = function(accessory) {
this.log(accessory.displayName, "Configure Accessory");
var platform = this;
// set the accessory to reachable if plugin can currently process the accessory
// otherwise set to false and update the reachability later by invoking
// accessory.updateReachability()
accessory.reachable = false;
accessory.on('identify', function(paired, callback) {
platform.log(accessory.displayName, "Identify!!!");
callback();
});
this.accessories.push(accessory);
}
//Handler will be invoked when user try to config your plugin
//Callback can be cached and invoke when nessary
HomeKitPlatform.prototype.configurationRequestHandler = function(context, request, callback) {
this.log("Context: ", JSON.stringify(context));
this.log("Request: ", JSON.stringify(request));
// Check the request response
if (request && request.response && request.response.inputs && request.response.inputs.name) {
this.addAccessory(request.response.inputs.name);
// Invoke callback with config will let homebridge save the new config into config.json
// Callback = function(response, type, replace, config)
// set "type" to platform if the plugin is trying to modify platforms section
// set "replace" to true will let homebridge replace existing config in config.json
// "config" is the data platform trying to save
callback(null, "platform", true, {"platform":"HomeKitPlatform", "otherConfig":"SomeData"});
return;
}
// - UI Type: Input
// Can be used to request input from user
// User response can be retrieved from request.response.inputs next time
// when configurationRequestHandler being invoked
var respDict = {
"type": "Interface",
"interface": "input",
"title": "Add Accessory",
"items": [
{
"id": "name",
"title": "Name",
"placeholder": "Fancy Light"
}//,
// {
// "id": "pw",
// "title": "Password",
// "secure": true
// }
]
}
// - UI Type: List
// Can be used to ask user to select something from the list
// User response can be retrieved from request.response.selections next time
// when configurationRequestHandler being invoked
// var respDict = {
// "type": "Interface",
// "interface": "list",
// "title": "Select Something",
// "allowMultipleSelection": true,
// "items": [
// "A","B","C"
// ]
// }
// - UI Type: Instruction
// Can be used to ask user to do something (other than text input)
// Hero image is base64 encoded image data. Not really sure the maximum length HomeKit allows.
// var respDict = {
// "type": "Interface",
// "interface": "instruction",
// "title": "Almost There",
// "detail": "Please press the button on the bridge to finish the setup.",
// "heroImage": "base64 image data",
// "showActivityIndicator": true,
// "showNextButton": true,
// "buttonText": "Login in browser",
// "actionURL": "https://google.com"
// }
// Plugin can set context to allow it track setup process
context.ts = "Hello";
//invoke callback to update setup UI
callback(respDict);
}
// Sample function to show how developer can add accessory dynamically from outside event
HomeKitPlatform.prototype.addAccessory = function(accessoryName) {
this.log("Add Accessory");
var platform = this;
var uuid;
uuid = UUIDGen.generate(accessoryName);
var newAccessory = new Accessory(accessoryName, uuid);
newAccessory.on('identify', function(paired, callback) {
platform.log(accessory.displayName, "Identify!!!");
callback();
});
// Plugin can save context on accessory
// To help restore accessory in configureAccessory()
// newAccessory.context.something = "Something"
newAccessory.addService(Service.Lightbulb, "Test Light")
.getCharacteristic(Characteristic.On)
.on('set', function(value, callback) {
platform.log(accessory.displayName, "Light -> " + value);
callback();
});
this.accessories.push(newAccessory);
this.api.registerPlatformAccessories("homebridge-HomeKitPlatform", "HomeKitPlatform", [newAccessory]);
}
HomeKitPlatform.prototype.updateAccessoriesReachability = function() {
this.log("Update Reachability");
for (var index in this.accessories) {
var accessory = this.accessories[index];
accessory.updateReachability(false);
}
}
// Sample function to show how developer can remove accessory dynamically from outside event
HomeKitPlatform.prototype.removeAccessory = function() {
this.log("Remove Accessory");
this.api.unregisterPlatformAccessories("homebridge-HomeKitPlatform", "HomeKitPlatform", this.accessories);
this.accessories = [];
}
|
Java
|
UTF-8
| 245 | 1.664063 | 2 |
[] |
no_license
|
package com.wzgiceman.retrofitaysproxy.impl;
import android.util.Log;
/**
* cglib请求
* Created by WZG on 2017/1/19.
*/
public class HttpRequestCglibImpl {
public void request() {
Log.e("tag","-------->htt请求");
}
}
|
C#
|
UTF-8
| 8,998 | 2.765625 | 3 |
[] |
no_license
|
using System;
namespace WorkingStandards.Entities.Reports
{
/// <summary>
/// Запись отчета [Сводная по изделиям в разрезе цехов]
/// </summary>
public class SummeryOfProductsInContextOfWorkGuild : IComparable<SummeryOfProductsInContextOfWorkGuild>
{
/// <summary>
/// Доп. параметр для облегчения подсчета вывода в самом отчете
/// </summary>
public int Group => 1;
/// <summary>
/// Код изделия
/// </summary>
public decimal ProductId { get; set; }
/// <summary>
/// Марка изделия
/// </summary>
public string ProductMark { get; set; }
/// <summary>
/// Наименование изделия
/// </summary>
public string ProductName { get; set; }
/// <summary>
/// Трёдоёмкость по определённым цехам и всему заводу
/// </summary>
public decimal Vstk2 { get; set; }
public decimal Vstk3 { get; set; }
public decimal Vstk4 { get; set; }
public decimal Vstk5 { get; set; }
public decimal Vstk21 { get; set; }
public decimal VstkZavod { get; set; }
public decimal Rstk2 { get; set; }
public decimal Rstk3 { get; set; }
public decimal Rstk4 { get; set; }
public decimal Rstk5 { get; set; }
public decimal Rstk21 { get; set; }
public decimal RstkZavod { get; set; }
public decimal Prtnorm2 { get; set; }
public decimal Prtnorm3 { get; set; }
public decimal Prtnorm4 { get; set; }
public decimal Prtnorm5 { get; set; }
public decimal Prtnorm21 { get; set; }
public decimal PrtnormZavod { get; set; }
public decimal Nadb2 { get; set; }
public decimal Nadb3 { get; set; }
public decimal Nadb4 { get; set; }
public decimal Nadb5 { get; set; }
public decimal Nadb21 { get; set; }
public decimal NadbZavod { get; set; }
public int CompareTo(SummeryOfProductsInContextOfWorkGuild other)
{
const StringComparison ordinalIgnoreCase = StringComparison.OrdinalIgnoreCase;
if (ReferenceEquals(this, other))
{
return 0;
}
if (ReferenceEquals(null, other))
{
return 1;
}
var productIdComparison = ProductId.CompareTo(other.ProductId);
if (productIdComparison != 0)
{
return productIdComparison;
}
var productMarkComparison = string.Compare(ProductMark, other.ProductMark, ordinalIgnoreCase);
if (productMarkComparison != 0)
{
return productMarkComparison;
}
var productNameComparison = string.Compare(ProductName, other.ProductName, ordinalIgnoreCase);
if (productNameComparison != 0)
{
return productNameComparison;
}
var vstk2Comparison = Vstk2.CompareTo(other.Vstk2);
if (vstk2Comparison != 0)
{
return vstk2Comparison;
}
var rstk2Comparison = Rstk2.CompareTo(other.Rstk2);
if (rstk2Comparison != 0)
{
return rstk2Comparison;
}
var prtnorm2Comparison = Prtnorm2.CompareTo(other.Prtnorm2);
if (prtnorm2Comparison != 0)
{
return prtnorm2Comparison;
}
var nadb2Comparison = Nadb2.CompareTo(other.Nadb2);
if (nadb2Comparison != 0)
{
return nadb2Comparison;
}
var vstk3Comparison = Vstk3.CompareTo(other.Vstk3);
if (vstk3Comparison != 0)
{
return vstk3Comparison;
}
var rstk3Comparison = Rstk3.CompareTo(other.Rstk3);
if (rstk3Comparison != 0)
{
return rstk3Comparison;
}
var prtnorm3Comparison = Prtnorm3.CompareTo(other.Prtnorm3);
if (prtnorm3Comparison != 0)
{
return prtnorm3Comparison;
}
var nadb3Comparison = Nadb3.CompareTo(other.Nadb3);
if (nadb3Comparison != 0)
{
return nadb3Comparison;
}
var vstk4Comparison = Vstk4.CompareTo(other.Vstk4);
if (vstk4Comparison != 0)
{
return vstk4Comparison;
}
var rstk4Comparison = Rstk4.CompareTo(other.Rstk4);
if (rstk4Comparison != 0)
{
return rstk4Comparison;
}
var prtnorm4Comparison = Prtnorm4.CompareTo(other.Prtnorm4);
if (prtnorm4Comparison != 0)
{
return prtnorm4Comparison;
}
var nadb4Comparison = Nadb4.CompareTo(other.Nadb4);
if (nadb4Comparison != 0)
{
return nadb4Comparison;
}
var vstk5Comparison = Vstk5.CompareTo(other.Vstk5);
if (vstk5Comparison != 0)
{
return vstk5Comparison;
}
var rstk5Comparison = Rstk5.CompareTo(other.Rstk5);
if (rstk5Comparison != 0)
{
return rstk5Comparison;
}
var prtnorm5Comparison = Prtnorm5.CompareTo(other.Prtnorm5);
if (prtnorm5Comparison != 0)
{
return prtnorm5Comparison;
}
var nadb5Comparison = Nadb5.CompareTo(other.Nadb5);
if (nadb5Comparison != 0)
{
return nadb5Comparison;
}
var vstk21Comparison = Vstk21.CompareTo(other.Vstk21);
if (vstk21Comparison != 0)
{
return vstk21Comparison;
}
var rstk21Comparison = Rstk21.CompareTo(other.Rstk21);
if (rstk21Comparison != 0)
{
return rstk21Comparison;
}
var prtnorm21Comparison = Prtnorm21.CompareTo(other.Prtnorm21);
if (prtnorm21Comparison != 0)
{
return prtnorm21Comparison;
}
var nadb21Comparison = Nadb21.CompareTo(other.Nadb21);
if (nadb21Comparison != 0)
{
return nadb21Comparison;
}
var vstkZavodComparison = VstkZavod.CompareTo(other.VstkZavod);
if (vstkZavodComparison != 0)
{
return vstkZavodComparison;
}
var rstkZavodComparison = RstkZavod.CompareTo(other.RstkZavod);
if (rstkZavodComparison != 0)
{
return rstkZavodComparison;
}
var prtnormZavodComparison = PrtnormZavod.CompareTo(other.PrtnormZavod);
if (prtnormZavodComparison != 0)
{
return prtnormZavodComparison;
}
return NadbZavod.CompareTo(other.NadbZavod);
}
protected bool Equals(SummeryOfProductsInContextOfWorkGuild other)
{
const StringComparison ordinalIgnoreCase = StringComparison.OrdinalIgnoreCase;
return ProductId == other.ProductId
&& string.Equals(ProductMark, other.ProductMark, ordinalIgnoreCase)
&& string.Equals(ProductName, other.ProductName, ordinalIgnoreCase)
&& Vstk2 == other.Vstk2
&& Rstk2 == other.Rstk2
&& Prtnorm2 == other.Prtnorm2
&& Nadb2 == other.Nadb2
&& Vstk3 == other.Vstk3
&& Rstk3 == other.Rstk3
&& Prtnorm3 == other.Prtnorm3
&& Nadb3 == other.Nadb3
&& Vstk4 == other.Vstk4
&& Rstk4 == other.Rstk4
&& Prtnorm4 == other.Prtnorm4
&& Nadb4 == other.Nadb4
&& Vstk5 == other.Vstk5
&& Rstk5 == other.Rstk5
&& Prtnorm5 == other.Prtnorm5
&& Nadb5 == other.Nadb5
&& Vstk21 == other.Vstk21
&& Rstk21 == other.Rstk21
&& Prtnorm21 == other.Prtnorm21
&& Nadb21 == other.Nadb21
&& VstkZavod == other.VstkZavod
&& RstkZavod == other.RstkZavod
&& PrtnormZavod == other.PrtnormZavod
&& NadbZavod == other.NadbZavod;
}
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj))
{
return false;
}
if (ReferenceEquals(this, obj))
{
return true;
}
if (obj.GetType() != this.GetType())
{
return false;
}
return Equals((SummeryOfProductsInContextOfWorkGuild) obj);
}
public override int GetHashCode()
{
unchecked
{
var hashCode = ProductId.GetHashCode();
hashCode = (hashCode * 397) ^ (ProductMark != null ? StringComparer.OrdinalIgnoreCase.GetHashCode(ProductMark) : 0);
hashCode = (hashCode * 397) ^ (ProductName != null ? StringComparer.OrdinalIgnoreCase.GetHashCode(ProductName) : 0);
hashCode = (hashCode * 397) ^ Vstk2.GetHashCode();
hashCode = (hashCode * 397) ^ Rstk2.GetHashCode();
hashCode = (hashCode * 397) ^ Prtnorm2.GetHashCode();
hashCode = (hashCode * 397) ^ Nadb2.GetHashCode();
hashCode = (hashCode * 397) ^ Vstk3.GetHashCode();
hashCode = (hashCode * 397) ^ Rstk3.GetHashCode();
hashCode = (hashCode * 397) ^ Prtnorm3.GetHashCode();
hashCode = (hashCode * 397) ^ Nadb3.GetHashCode();
hashCode = (hashCode * 397) ^ Vstk4.GetHashCode();
hashCode = (hashCode * 397) ^ Rstk4.GetHashCode();
hashCode = (hashCode * 397) ^ Prtnorm4.GetHashCode();
hashCode = (hashCode * 397) ^ Nadb4.GetHashCode();
hashCode = (hashCode * 397) ^ Vstk5.GetHashCode();
hashCode = (hashCode * 397) ^ Rstk5.GetHashCode();
hashCode = (hashCode * 397) ^ Prtnorm5.GetHashCode();
hashCode = (hashCode * 397) ^ Nadb5.GetHashCode();
hashCode = (hashCode * 397) ^ Vstk21.GetHashCode();
hashCode = (hashCode * 397) ^ Rstk21.GetHashCode();
hashCode = (hashCode * 397) ^ Prtnorm21.GetHashCode();
hashCode = (hashCode * 397) ^ Nadb21.GetHashCode();
hashCode = (hashCode * 397) ^ VstkZavod.GetHashCode();
hashCode = (hashCode * 397) ^ RstkZavod.GetHashCode();
hashCode = (hashCode * 397) ^ PrtnormZavod.GetHashCode();
hashCode = (hashCode * 397) ^ NadbZavod.GetHashCode();
return hashCode;
}
}
}
}
|
Java
|
UTF-8
| 18,090 | 1.945313 | 2 |
[] |
no_license
|
package com.elasticpath.tools.sync.merge.impl;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import javax.persistence.ManyToMany;
import javax.persistence.MapKey;
import javax.persistence.OneToMany;
import com.elasticpath.persistence.api.Persistable;
import com.elasticpath.tools.sync.exception.SyncToolConfigurationException;
import com.elasticpath.tools.sync.exception.SyncToolRuntimeException;
import com.elasticpath.tools.sync.merge.BeanCreator;
import com.elasticpath.tools.sync.merge.MergeEngine;
import com.elasticpath.tools.sync.merge.PersistentStateLocator;
import com.elasticpath.tools.sync.merge.configuration.EntityFilter;
import com.elasticpath.tools.sync.merge.configuration.EntityLocator;
import com.elasticpath.tools.sync.merge.configuration.GuidLocator;
import com.elasticpath.tools.sync.merge.configuration.MergeBoundarySpecification;
import com.elasticpath.tools.sync.merge.configuration.ValueObjectMerger;
import com.elasticpath.tools.sync.utils.MethodComparator;
import com.elasticpath.tools.sync.utils.SyncUtils;
/**
* Merges objects before save.
*/
public class MergeEngineImpl implements MergeEngine {
private MergeBoundarySpecification mergeBoundarySpec;
private EntityLocator entityLocator;
private GuidLocator guidLocator;
private BeanCreator beanCreator;
private ValueObjectMerger valueObjectMerger;
private PersistentStateLocator jpaPersistentStateLocator;
private SyncUtils syncUtils;
private Comparator<Persistable> guidComparator;
private final ThreadLocal<CyclicDependencyManager> dependencyManagerThreadLocal = new ThreadLocal<CyclicDependencyManager>();
private Map<String, EntityFilter> mergeFilters;
/**
* Merge source on target.
*
* @param source source object
* @param target target object
* @throws SyncToolConfigurationException {@link SyncToolConfigurationException}
*/
public void processMerge(final Persistable source, final Persistable target) throws SyncToolConfigurationException {
sanityCheck(source, target);
initializeCyclicDependencyManager();
mergeBoundarySpec.initialize(source.getClass());
mergeInternal(source, target);
// throw new RuntimeException();
}
private void initializeCyclicDependencyManager() {
CyclicDependencyManager cyclicDependencyManager = dependencyManagerThreadLocal.get();
if (cyclicDependencyManager == null) {
cyclicDependencyManager = new CyclicDependencyManager();
dependencyManagerThreadLocal.set(cyclicDependencyManager);
}
cyclicDependencyManager.clearDependencies();
}
private void sanityCheck(final Persistable source, final Persistable target) {
if (!source.getClass().equals(target.getClass())) {
throw new RuntimeException("Source and Target should be instances of the same class"); // NOPMD
}
}
/**
* Implementation of merge algorithm source on target.
*
* @param source source object
* @param target target object
* @throws SyncToolConfigurationException in case of configuration error
*/
protected void mergeInternal(final Persistable source, final Persistable target) throws SyncToolConfigurationException {
sanityCheck(source, target);
dependencyManagerThreadLocal.get().registerProcessedObject(source, target);
final Map<Method, Method> basicAttributes = new TreeMap<Method, Method>(new MethodComparator());
final Map<Method, Method> singleValuedAssociations = new TreeMap<Method, Method>(new MethodComparator());
final Map<Method, Method> collectionValuedAssociations = new TreeMap<Method, Method>(new MethodComparator());
final Set<Method> postLoadMethods = new HashSet<Method>();
jpaPersistentStateLocator.extractPersistentStateAttributes(source.getClass(), basicAttributes, singleValuedAssociations,
collectionValuedAssociations, postLoadMethods);
for (final Entry<Method, Method> basicAttribute : basicAttributes.entrySet()) {
resolveBasicAttribute(source, target, basicAttribute);
}
for (final Entry<Method, Method> singleValuedAssociation : singleValuedAssociations.entrySet()) {
resolveSingleValuedAssociation(source, target, singleValuedAssociation);
}
for (final Entry<Method, Method> collectionValuedAssociation : collectionValuedAssociations.entrySet()) {
resolveCollectionValuedAssociation(source, target, collectionValuedAssociation);
}
for (final Method postLoadMethod : postLoadMethods) {
syncUtils.invokePostLoadMethod(target, postLoadMethod);
}
}
private void resolveBasicAttribute(final Persistable object, final Persistable target, final Entry<Method, Method> accessors) {
syncUtils.invokeCopyMethod(object, target, accessors);
}
private void resolveSingleValuedAssociation(final Persistable source, final Persistable target, final Entry<Method, Method> accessors)
throws SyncToolConfigurationException {
final Method getterMethod = accessors.getKey();
final Method setterMethod = accessors.getValue();
final Persistable sourceValue = (Persistable) syncUtils.invokeGetterMethod(source, getterMethod);
Persistable targetValue = (Persistable) syncUtils.invokeGetterMethod(target, getterMethod);
if (sourceValue == null) { // sets null value
syncUtils.invokeSetterMethod(target, setterMethod, sourceValue);
return;
}
final CyclicDependencyManager cyclicDependencyManager = dependencyManagerThreadLocal.get();
if (cyclicDependencyManager.isCyclicDependency(sourceValue)) {
syncUtils.invokeSetterMethod(target, setterMethod, cyclicDependencyManager.getTargetReference(sourceValue));
return;
}
if (mergeBoundarySpec.stopMerging(sourceValue.getClass())) {
if (targetValue == null || guidComparator.compare(targetValue, sourceValue) != 0) {
syncUtils.invokeSetterMethod(target, setterMethod, retrieveFreshReference(sourceValue));
}
return;
}
if (targetValue == null) {
targetValue = createNewPersistence(target, setterMethod, sourceValue);
}
// TODO: consider cascadeType annotation and don't prolong merge operation even if the entity is not in stop list,
// since update will cause JPA exception at em.merge()
mergeInternal(sourceValue, targetValue);
}
private Persistable createNewPersistence(final Persistable target, final Method setterMethod, final Persistable sourceValue) {
final Persistable newTargetValue = beanCreator.createBean(sourceValue.getClass());
syncUtils.invokeSetterMethod(target, setterMethod, newTargetValue);
return newTargetValue;
}
/**
* Retrieves fresh reference (for instance, brand is changed in product).
*
* @param sourceValue domain object to find in target database
* @return fresh value
* @throws SyncToolRuntimeException if dependency on source value cannot be satisfied
*/
Persistable retrieveFreshReference(final Persistable sourceValue) throws SyncToolRuntimeException {
final Persistable retrievedObject = entityLocator.locatePersistentReference(sourceValue);
if (retrievedObject == null) {
throw new SyncToolRuntimeException("Cannot retrieve entity at target environment corresponding to source environment: "
+ sourceValue.getClass() + " with code: " + guidLocator.locateGuid(sourceValue));
}
return retrievedObject;
}
@SuppressWarnings("unchecked")
private void resolveCollectionValuedAssociation(final Object source, final Object target, final Entry<Method, Method> accessors)
throws SyncToolConfigurationException {
final Method getterMethod = accessors.getKey();
final Method setterMethod = accessors.getValue();
final Object sourceCollectionOrMap = syncUtils.invokeGetterMethod(source, getterMethod);
Object targetCollectionOrMap = syncUtils.invokeGetterMethod(target, getterMethod);
if (sourceCollectionOrMap == null) {
// Nullify target's collection
syncUtils.invokeSetterMethod(target, setterMethod, sourceCollectionOrMap);
return;
}
if (targetCollectionOrMap == null) {
targetCollectionOrMap = createEmptyCollectionOrMap(target, setterMethod, sourceCollectionOrMap);
}
if (shouldNotMergeCollection(getterMethod)) {
refreshCollection(getterMethod, sourceCollectionOrMap, targetCollectionOrMap);
return;
}
if (sourceCollectionOrMap instanceof Map) {
mergeMap(getterMethod, (Map<?, ?>) sourceCollectionOrMap, (Map<Object, Object>) targetCollectionOrMap);
} else if (sourceCollectionOrMap instanceof Collection) {
mergeCollection((Collection<?>) sourceCollectionOrMap, (Collection<Object>) targetCollectionOrMap);
} else {
throw new SyncToolRuntimeException("Unexpected collection type: " + sourceCollectionOrMap);
}
}
private Object createEmptyCollectionOrMap(final Object target, final Method setterMethod, final Object sourceCollectionOrMap) {
Object targetCollectionOrMap = null;
if (sourceCollectionOrMap instanceof List<?>) {
targetCollectionOrMap = new ArrayList<Object>();
} else if (sourceCollectionOrMap instanceof Set<?>) {
targetCollectionOrMap = new HashSet<Object>();
} else if (sourceCollectionOrMap instanceof Map<?, ?>) {
targetCollectionOrMap = new HashMap<Object, Object>();
}
syncUtils.invokeSetterMethod(target, setterMethod, targetCollectionOrMap);
return targetCollectionOrMap;
}
/**
* Merges two collections of entities or value objects.
* For entities the algorithm updates each object from target collection,
* for value objects it deletes old and puts new into target collection.
*
* @param sourceCollection
* @param targetCollection
* @throws SyncToolConfigurationException
*/
private void mergeCollection(final Collection<?> sourceCollection, final Collection<? super Object> targetCollection)
throws SyncToolConfigurationException {
final Collection<Object> newObjects = new ArrayList<Object>();
final CollectionElementsRemoveManager removeManager = new CollectionElementsRemoveManager(targetCollection);
for (final Object sourceValue : sourceCollection) {
final Persistable newPersistence = mergeCollectionElements(removeManager, (Persistable) sourceValue, targetCollection,
isMergeable((Persistable) sourceValue));
if (newPersistence != null) {
newObjects.add(newPersistence);
}
}
if (!newObjects.isEmpty() && valueObjectMerger.isMergeRequired(newObjects.iterator().next().getClass())) {
for (final Object newObject : newObjects) {
if (!targetCollection.add(newObject)) {
updateValueObjectInCollection(targetCollection, newObject);
}
}
} else {
targetCollection.addAll(newObjects);
}
removeManager.removeSurplusObjectsFromCollection(targetCollection);
}
/**
* Check whether the sourceEntry can be merged or not based on a configured filter
* class.
*
* @param sourceEntry the entry to check
* @return true if filter returns true or no filter configured
*/
boolean isMergeable(final Persistable sourceEntry) {
final String className = sourceEntry.getClass().getName();
return !(mergeFilters.containsKey(className) && mergeFilters.get(className).isFiltered(sourceEntry));
}
/**
* Finds object equal to new object in collection and updates it.
*
* @param targetCollection collection to update an object in
* @param newObject object to take fields from for update
*/
void updateValueObjectInCollection(final Collection<?> targetCollection, final Object newObject) {
for (final Object targetObject : targetCollection) {
if (targetObject.equals(newObject)) {
valueObjectMerger.merge(newObject, targetObject);
return;
}
}
}
private void mergeMap(final Method getterMethod, final Map<?, ?> sourceMap, final Map<? super Object, ? super Object> targetMap)
throws SyncToolConfigurationException {
final Map<Object, Object> newObjects = new HashMap<Object, Object>();
final CollectionElementsRemoveManager removeManager = new CollectionElementsRemoveManager(targetMap.values());
for (final Object sourceEntry : sourceMap.values()) {
final Persistable elementToAdd = mergeCollectionElements(removeManager, (Persistable) sourceEntry, targetMap.values(),
isMergeable((Persistable) sourceEntry));
if (elementToAdd != null) {
final Object key = syncUtils.getMapKey(getterMethod.getAnnotation(MapKey.class), elementToAdd);
newObjects.put(key, elementToAdd);
}
}
targetMap.putAll(newObjects);
removeManager.removeSurplusObjectsFromMap(targetMap);
}
/**
* Refreshes references in the <code>targetCollectionOrMap</code> using corresponding references from <code>sourceCollectionOrMap</code>.
*
* @param getterMethod getter method
* @param sourceCollectionOrMap source container
* @param targetCollectionOrMap target container
* @throws SyncToolConfigurationException in case of unsupported container type
*/
@SuppressWarnings("unchecked")
void refreshCollection(final Method getterMethod, final Object sourceCollectionOrMap, final Object targetCollectionOrMap)
throws SyncToolConfigurationException {
if (sourceCollectionOrMap instanceof Map) {
final Map<Object, Object> targetMap = (Map<Object, Object>) targetCollectionOrMap;
targetMap.clear();
final Map<Object, Object> sourceMap = (Map<Object, Object>) sourceCollectionOrMap;
for (final Entry<Object, Object> entry : sourceMap.entrySet()) {
final Persistable retrieveFreshReference = retrieveFreshReference((Persistable) entry.getValue());
targetMap.put(syncUtils.getMapKey(getterMethod.getAnnotation(MapKey.class), retrieveFreshReference), retrieveFreshReference);
}
} else if (sourceCollectionOrMap instanceof Collection) {
final Collection<Object> targetCollection = (Collection<Object>) targetCollectionOrMap;
targetCollection.clear();
final Collection<Object> sourceCollection = (Collection<Object>) sourceCollectionOrMap;
for (final Object object : sourceCollection) {
targetCollection.add(retrieveFreshReference((Persistable) object));
}
} else {
throw new SyncToolRuntimeException("Unexpected collection type: " + sourceCollectionOrMap);
}
}
/**
* Determines if in provided collection all elements should be updated or merged.
*
* @param method getting annotation for collection to check
* @return true if collection objects should be updated without deep merge (i.e. object's class in the boundary)
*/
boolean shouldNotMergeCollection(final Method method) {
final OneToMany oneToManyAssociation = method.getAnnotation(OneToMany.class);
Class< ? > targetEntity = null;
if (oneToManyAssociation == null) {
final ManyToMany manyToManyAssociation = method.getAnnotation(ManyToMany.class);
if (manyToManyAssociation != null) {
targetEntity = manyToManyAssociation.targetEntity();
}
} else {
targetEntity = oneToManyAssociation.targetEntity();
}
if (targetEntity == null) {
throw new SyncToolRuntimeException("Can't find OneToMany or ManyToMany for collection-value association");
}
return mergeBoundarySpec.stopMerging(targetEntity);
}
private Persistable mergeCollectionElements(final CollectionElementsRemoveManager removeManager, final Persistable sourceValue,
final Collection< ? > targetCollection, final boolean isMergable) throws SyncToolConfigurationException {
if (guidLocator.canQualifyByGuid(sourceValue.getClass())) {
for (final Object targetValue : targetCollection) {
if (guidComparator.compare((Persistable) targetValue, sourceValue) == 0 && isMergable) {
mergeInternal(sourceValue, (Persistable) targetValue);
removeManager.removeIdenticalObject(targetValue);
return null;
}
}
}
if (isMergable) {
final Persistable newTargetValue = beanCreator.createBean(sourceValue.getClass());
mergeInternal(sourceValue, newTargetValue);
removeManager.removeEqualObject(newTargetValue);
return newTargetValue;
}
final Persistable persistable = entityLocator.locatePersistence(sourceValue);
if (persistable == null) {
throw new SyncToolRuntimeException("No entity on target for unmergable persistable: " + sourceValue.toString());
}
return persistable;
}
/**
* @param mergeBoundarySpec the mergeBoundarySpec to set
*/
public void setMergeBoundarySpecification(final MergeBoundarySpecification mergeBoundarySpec) {
this.mergeBoundarySpec = mergeBoundarySpec;
}
/**
* @param entityLocator the entityLocator to set
*/
public void setEntityLocator(final EntityLocator entityLocator) {
this.entityLocator = entityLocator;
}
/**
* @param guidLocator the guidLocator to set
*/
public void setGuidLocator(final GuidLocator guidLocator) {
this.guidLocator = guidLocator;
}
/**
* @param beanCreator the beanCreator to set
*/
public void setBeanCreator(final BeanCreator beanCreator) {
this.beanCreator = beanCreator;
}
/**
* @param jpaPersistentStateLocator the jpaPersistentStateLocator to set
*/
public void setJpaPersistentStateLocator(final PersistentStateLocator jpaPersistentStateLocator) {
this.jpaPersistentStateLocator = jpaPersistentStateLocator;
}
/**
* @param syncUtils the syncUtils to set
*/
public void setSyncUtils(final SyncUtils syncUtils) {
this.syncUtils = syncUtils;
}
/**
* @param guidComparator the guidComparator to set
*/
public void setGuidComparator(final Comparator<Persistable> guidComparator) {
this.guidComparator = guidComparator;
}
/**
* @param valueObjectMerger value object merger to update specific value objects
*/
public void setValueObjectMerger(final ValueObjectMerger valueObjectMerger) {
this.valueObjectMerger = valueObjectMerger;
}
/**
* @param mergeFilters merge filters mapping from String rep of class name to filter
*/
public void setMergeFilters(final Map<String, EntityFilter> mergeFilters) {
this.mergeFilters = mergeFilters;
}
/**
* Get the merge filter for the specified class name string.
*
* @param clazz the class string
* @return the EntityFilter
*/
EntityFilter getMergeFilter(final String clazz) {
return mergeFilters.get(clazz);
}
}
|
Python
|
UTF-8
| 9,281 | 2.578125 | 3 |
[
"MIT"
] |
permissive
|
from gkdtex.parse import (
Seq, Arg, PosGroup, KwGroup, Command, Object, Whitespace,
Block, Subscript, Superscript
)
from gkdtex.wrap import parse
from gkdtex.spelling import number_spelling
from collections import deque
from contextlib import contextmanager
import io
import typing
import sys
class Span:
__slots__ = ['offs', 'src']
def __init__(self, src: str, offs: typing.Tuple[int, int]):
self.src = src
self.offs = offs
class CBVFunction:
"""
interpreted call by value functions
"""
def __init__(self,
defaults: list,
default_spans: typing.List[typing.Optional[Span]],
name_offsets: typing.Dict[str, int], obj: Object):
self.body = obj
self.defaults = defaults
self.name_offsets = name_offsets
self.default_spans = default_spans
class CBNFunction:
"""
interpreted call by name functions
"""
def __init__(self,
defaults: list,
default_spans: typing.List[typing.Optional[Span]],
name_offsets: typing.Dict[str, int], obj: Object):
self.body = obj
self.defaults = defaults
self.name_offsets = name_offsets
self.default_spans = default_spans
class Interpreter:
default_initializers = []
default_disposers = []
default_globals = {}
def __init__(self):
### Commands
self.globals = Interpreter.default_globals.copy()
### Contextual Fields
self.filename = None
self.src = None
### The Frames field has a constant reference.
self.frames = deque() # type: typing.Deque[tuple[str, list[typing.Optional[Span]], list[Object], dict[str, int]]]
###
self.state = {}
self.initializers = Interpreter.default_initializers.copy()
self.disposers = Interpreter.default_disposers.copy()
def initialize(self):
for each in self.initializers:
each(self)
def dispose(self):
for each in self.disposers:
each(self)
def _load_src(self):
if self.src is None:
with open(self.filename) as f:
self.src = f.read()
@contextmanager
def change_file(self, filename):
old_filename, self.filename = self.filename, filename
src, self.src = self.src, None
try:
self.filename = filename
self._load_src()
yield
finally:
self.src = src
self.filename = old_filename
def run_file(self, filename, tex_print=sys.stdout.write):
with self.change_file(filename):
src, filename = self.src, self.filename
obj = parse(src, filename)
self.interp(tex_print, obj)
def interp_many(self, tex_print, objs: typing.List[Object]):
i = self.__class__.interp
for each in objs:
i(self, tex_print, each)
def interp(self, tex_print, obj: Object):
if isinstance(obj, Whitespace):
tex_print(obj.text)
elif isinstance(obj, str):
tex_print(obj)
elif isinstance(obj, Seq):
xs = obj.xs
start = 0
end = None
if xs:
if isinstance(xs[0], Whitespace):
start = 1
if isinstance(xs[-1], Whitespace):
end = -1
self.interp_many(tex_print, xs[start:end])
elif isinstance(obj, Subscript):
self.interp(tex_print, obj.a)
tex_print('_')
self.interp(tex_print, obj.b)
elif isinstance(obj, Superscript):
self.interp(tex_print, obj.a)
tex_print('^')
self.interp(tex_print, obj.b)
elif isinstance(obj, Block):
tex_print('{')
self.interp(tex_print, obj.obj)
tex_print('}')
elif isinstance(obj, Arg):
argoff = obj.arg
cmd, _, arguments, names = self.frames[obj.level]
if isinstance(argoff, str):
argoff = names[argoff]
assert isinstance(argoff, int)
else:
assert isinstance(argoff, int)
argoff -= 1
try:
val = arguments[argoff]
except IndexError:
raise IndexError("\\{} doesn't have {} argument".format(cmd, number_spelling(1 + argoff)))
if isinstance(val, str):
tex_print(val)
else:
assert callable(val)
val(self, tex_print)
elif isinstance(obj, Command):
f = self.globals.get(obj.cmd)
if f is None:
tex_print('\\' + obj.cmd)
if obj.args is not None:
for arg in obj.args:
tex_print('{')
if isinstance(arg, KwGroup):
raise ValueError("latex command \\{} "
"cannot use keyword argument {!r}".format(obj.cmd, arg.kw))
if arg.obj is not None:
self.interp(tex_print, arg.obj)
tex_print('}')
return
if isinstance(f, str):
if obj.args is not None:
raise TypeError("\\" + obj.cmd, 'cannot accept arguments.')
tex_print(f)
elif isinstance(f, CBVFunction):
args = obj.args or []
arg_io = io.StringIO()
arguments = f.defaults.copy()
spans = f.default_spans.copy()
name_offsets = f.name_offsets
src = self.src
i = 0
for off, arg in enumerate(args):
off = i
if isinstance(arg, KwGroup):
try:
off = name_offsets[arg.kw]
except KeyError:
raise ValueError("\\{} has no keyword argument {}".format(obj.cmd, arg.kw))
else:
i += 1
if arg.obj is not None:
self.interp(arg_io.write, arg.obj)
arguments[off] = arg_io.getvalue()
arg_io.seek(0)
arg_io.truncate(0)
spans[off] = Span(src, arg.offs)
if any(a is None for a in arguments):
raise ValueError("Invalid call of \\{}, {} argument is not given".format(obj.cmd, number_spelling(1 + arguments.index(None))))
self.frames.appendleft((obj.cmd, spans, arguments, f.name_offsets))
self.interp(tex_print, f.body)
self.frames.popleft()
return
elif isinstance(f, CBNFunction):
args = obj.args or []
arguments = f.defaults.copy()
spans = f.default_spans.copy()
name_offsets = f.name_offsets
src = self.src
i = 0
for arg in args:
off = i
if isinstance(arg, KwGroup):
try:
off = name_offsets[arg.kw]
except KeyError:
raise ValueError("\\{} has no keyword argument {}".format(obj.cmd, arg.kw))
else:
i += 1
# noinspection PyDefaultArgument
if arg.obj:
def lazy(self, tex_print, obj=arg.obj):
self.interp(tex_print, obj)
arguments[off] = lazy
spans[off] = Span(src, arg.offs)
if any(a is None for a in arguments):
raise ValueError("Invalid call of \\{}, {} argument is not given".format(obj.cmd, number_spelling(1 + arguments.index(None))))
self.frames.appendleft((obj.cmd, spans, arguments, f.name_offsets))
self.interp(tex_print, f.body)
self.frames.popleft()
return
else:
args = obj.args or []
assert callable(f)
arguments = []
kwargs = {}
for arg in args:
if isinstance(arg, KwGroup):
kwargs[arg.kw] = arg
else:
arguments.append(arg)
f(*arguments, **kwargs, self=self, tex_print=tex_print)
else:
raise TypeError(obj)
def eval_to_string(interpreter: Interpreter, o: Object):
if isinstance(o, Seq) and not o.xs or isinstance(o, Whitespace):
return ''
string_io = io.StringIO()
interpreter.interp(string_io.write, o)
return string_io.getvalue()
def get_raw_from_span(span: Span):
if not span:
return ''
l, r = span.offs
return span.src[l:r]
def get_raw_from_span_params(src: str, offs: 'typing.Optional[tuple[int, int]]'):
if not offs:
return ''
l, r = offs
return src[l:r]
|
Markdown
|
UTF-8
| 1,160 | 2.609375 | 3 |
[
"MIT"
] |
permissive
|
---
order: 4
label: 'Global Config'
---
# Global Config
We have added global configuration capabilities to component parameters, which you can use to define the default behavior of the component.
## How to use
To provide default configuration items for some components, provide an object in the root injector that conforms to the `XConfig` interface according to the injection token `X_CONFIG`, such as:
{{ ____my-app:src/app/app.module.ts:false:fasle }}
These configuration items will be injected into the `XConfigService` service and saved
## Priority of a global configuration item
For any one attribute, the priority of the values from each source is as follows:
A value that is set separately for an instance of a component > Global default values provided through `X_CONFIG` > `Ng-Nest` has built-in default values.
## View all available global configurations
The type definition information provided in the `XConfig` interface helps you find all the components and properties that support global configuration items. In addition, the documentation for each component indicates which properties can be specified as global configuration items.
|
SQL
|
UTF-8
| 1,354 | 4.28125 | 4 |
[] |
no_license
|
-- Ensure you've run SalesOrdersStructure.sql
-- and SalesOrdersData.sql in the Sample Databases folder
-- in order to run this example.
-- NOTE: Sample table Students does not exist.
CREATE DATABASE StudentsTest;
USE StudentsTest;
CREATE TABLE Students (
StudentID int PRIMARY KEY NOT NULL,
LastName varchar(50),
FirstName varchar(50),
BirthDate date
);
SELECT Students.StudentID, Students.LastName, Students.FirstName,
YEAR(CURDATE()) - YEAR(Students.BirthDate) -
CASE WHEN MONTH(Students.BirthDate) < MONTH(CURDATE())
THEN 0
WHEN MONTH(Students.BirthDate) > MONTH(CURDATE())
THEN 1
WHEN DAY(Students.BirthDate) > DAY(CURDATE())
THEN 1
ELSE 0 END AS Age
FROM Students;
DROP DATABASE StudentsTest;
-- Similar code using EmpDOB in the Employees table in SalesOrdersSample
-- Ensure you've run SalesOrdersStructure.sql
-- and SalesOrdersData.sql in the Sample Databases folder
-- in order to run this example.
USE SalesOrdersSample;
SELECT Employees.EmployeeID, Employees.EmpLastName, Employees.EmpFirstName,
YEAR(CURDATE()) - YEAR(Employees.EmpDOB) -
CASE WHEN MONTH(Employees.EmpDOB) < MONTH(CURDATE())
THEN 0
WHEN MONTH(Employees.EmpDOB) > MONTH(CURDATE())
THEN 1
WHEN DAY(Employees.EmpDOB) > DAY(CURDATE())
THEN 1
ELSE 0 END AS Age
FROM Employees;
|
PHP
|
UTF-8
| 452 | 3.03125 | 3 |
[] |
no_license
|
<?php
class Pipsqueak implements Cat
{
function name()
{
return 'Pipsqueak';
}
function long_hair()
{
return false;
}
function bow()
{
return false;
}
function bell()
{
return true;
}
function stripes()
{
return true;
}
function eye_color()
{
return 'green';
}
function paw_color()
{
return 'brown';
}
}
|
Java
|
UTF-8
| 5,661 | 2.125 | 2 |
[] |
no_license
|
/******************************************************************************
** This data and information is proprietary to, and a valuable trade secret
** of, Basis Technology Corp. It is given in confidence by Basis Technology
** and may only be used as permitted under the license agreement under which
** it has been distributed, and in no other way.
**
** Copyright (c) 2014 Basis Technology Corporation All rights reserved.
**
** The technical data and information provided herein are provided with
** `limited rights', and the computer software provided herein is provided
** with `restricted rights' as those terms are defined in DAR and ASPR
** 7-104.9(a).
******************************************************************************/
package com.basistech.util.jackson;
import com.basistech.util.ISO15924;
import com.basistech.util.LanguageCode;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Maps;
import com.google.common.io.Resources;
import org.junit.Before;
import org.junit.Test;
import java.net.URL;
import java.util.Map;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
/**
* Tests for the enum module.
*/
public class EnumModuleTest {
private ObjectMapper mapper;
@Before
public void before() {
mapper = EnumModule.setupObjectMapper(new ObjectMapper());
}
@Test
public void defaultLanguageCodeMapper() throws Exception {
// The default ObjectMapper does traffic in the enum constants:
// LanguageCode.ARABIC.name() --> "ARABIC"
ObjectMapper myMapper = new ObjectMapper();
LanguageCode langCode = myMapper.readValue("\"ARABIC\"", LanguageCode.class);
assertEquals(LanguageCode.ARABIC, langCode);
assertEquals("\"ARABIC\"", myMapper.writeValueAsString(LanguageCode.ARABIC));
}
@Test
public void languageCode() throws Exception {
// The EnumModule wires up the special LanguageCodeSerializer
// and LanguageCodeDeserializer, which let use the ISO636_3 string
// instead of the string of the enum constant.
LanguageCode code = mapper.readValue("\"ara\"", LanguageCode.class);
assertEquals(LanguageCode.ARABIC, code);
assertEquals("\"ara\"", mapper.writeValueAsString(LanguageCode.ARABIC));
}
@Test
public void languageCodeKey() throws Exception {
Map<LanguageCode, String> map = Maps.newHashMap();
map.put(LanguageCode.CHINESE, "dumpling");
Map<LanguageCode, String> deser = mapper.readValue("{\"zho\": \"dumpling\"}", new TypeReference<Map<LanguageCode, String>>() { });
assertEquals(map, deser);
// Note: Jackson assumes that maps have homogeneous key types and does not notice the serializer without this extra level of spec.
String json = mapper.writerFor(new TypeReference<Map<LanguageCode, String>>() { }).writeValueAsString(map);
assertTrue(json.contains("zho")); // and not, by implication, CHINESE.
}
@Test
public void iso15924() throws Exception {
ISO15924 iso = mapper.readValue("\"Latn\"", ISO15924.class);
assertEquals(ISO15924.Latn, iso);
}
@Test
public void troubleWithKeys() throws Exception {
URL dataRes = Resources.getResource(EnumModuleTest.class, "enum-module-map.json");
ObjectMapper plainObjectMapper = new ObjectMapper();
JsonNode tree = plainObjectMapper.readTree(dataRes);
ObjectMapper fancyObjectMapper = EnumModule.setupObjectMapper(new ObjectMapper());
// this line is might throw with Jackson 2.6.2.
Map<LanguageCode, Set<String>> map = fancyObjectMapper.convertValue(tree, new TypeReference<Map<LanguageCode, Set<String>>>() {
});
assertNotNull(map);
}
public enum TestEnum {
replacements;
}
public static class TestBean {
private Map<LanguageCode, Map<String, String>> replacements;
public Map<LanguageCode, Map<String, String>> getReplacements() {
return replacements;
}
public void setReplacements(Map<LanguageCode, Map<String, String>> replacements) {
this.replacements = replacements;
}
}
/* Note the use of an EnumMap in here to work around a Jackson 2.6.2 issue.
* We could do better with a complex customer serializer for Map<Object, Object>
* in the adm project. */
@Test
public void nestedMaps() throws Exception {
Map<LanguageCode, Map<String, String>> replacements = Maps.newEnumMap(LanguageCode.class);
Map<String, String> engRepl = Maps.newHashMap();
engRepl.put("1", "one");
engRepl.put("2", "two");
replacements.put(LanguageCode.ENGLISH, engRepl);
Map<String, String> fraRepl = Maps.newHashMap();
fraRepl.put("1", "un");
fraRepl.put("2", "deux");
replacements.put(LanguageCode.FRENCH, fraRepl);
Map<TestEnum, Object> factoryConfigMap = Maps.newHashMap();
factoryConfigMap.put(TestEnum.replacements, replacements);
ObjectMapper fancyObjectMapper = EnumModule.setupObjectMapper(new ObjectMapper());
JsonNode tree = fancyObjectMapper.valueToTree(factoryConfigMap);
assertNotNull(tree);
TestBean bean = fancyObjectMapper.convertValue(tree, new TypeReference<TestBean>() { });
assertEquals("one", bean.getReplacements().get(LanguageCode.ENGLISH).get("1"));
}
}
|
Markdown
|
UTF-8
| 236 | 2.828125 | 3 |
[] |
no_license
|
$$ y = 2x + 3 \\ y - 3 = 2x $$
$$ x = \phi(y) = \frac{y-3}{2} $$
У функции $\phi(y)$ нет никаких ограничений на $y$. При любом $y$ у нее определено какое-то значение.
|
Java
|
UTF-8
| 606 | 3.25 | 3 |
[] |
no_license
|
package ash.patz.learning.concurrency;
/**
* Created by APatil on 5/21/2017.
*/
public class Consumer implements Runnable {
public Consumer(Drop drop) {
this.drop = drop;
}
private Drop drop;
@Override
public void run() {
String take = null;
do {
take = drop.take();
System.out.format("%s : Received %s",Thread.currentThread(), take);
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
e.printStackTrace();
}
} while (take != "done");
}
}
|
Python
|
UTF-8
| 893 | 2.59375 | 3 |
[] |
no_license
|
import os
import os.path as path
from pathlib import Path
import json
from threading import Lock
MYFILELOCK = Lock()
def touchFile(file, contents=None):
with MYFILELOCK:
os.umask(0) #dirty
f = open(os.path.join("configs", file),"w")
ret = json.dump(contents,f)
f.close()
return ret;
def getFile(file):
with MYFILELOCK:
if os.path.isfile(os.path.join("configs", file)):
f = open(os.path.join("configs", file))
ret = json.load(f)
f.close()
else:
return ""
return ret
def cleanFile(file):
if existsFile(file):
with MYFILELOCK:
os.remove(os.path.join("configs", file))
def existsFile(file):
with MYFILELOCK:
return path.isfile(os.path.join("configs", file))
def cleanLog(file):
if existsFile(file):
with MYFILELOCK:
os.remove(os.path.join("logs", file))
def touchLog(file):
f = open(os.path.join("logs", file),"w")
f.close()
|
Markdown
|
UTF-8
| 330 | 2.53125 | 3 |
[] |
no_license
|
# Web Lesson 2
## This class is an introduction to html & css
In today's class we learned basics tags in html
1. Headings
2. Paragraph
3. Block Elements
4. In-line Elements
5. Ordered Lists
6. Unordered Lists
7. Image and video tags
# Outputs


|
Java
|
UTF-8
| 310 | 2.140625 | 2 |
[] |
no_license
|
package com.javasaki.ninja.weapon;
import com.javasaki.ninja.exception.WeaponException;
import java.util.List;
public interface WeaponService {
Weapon findWeaponByType(String type) throws WeaponException;
List<Weapon> findAllWeapon();
Weapon findWeaponById(long weaponId) throws WeaponException;
}
|
Markdown
|
UTF-8
| 3,269 | 2.953125 | 3 |
[] |
no_license
|
---
layout: cartoon
title: "The Postmodern Prometheus"
date: 2014-09-25 11:29:52 +0100
comments: true
categories:
- cartoons
author: Rebecca Fox
image: /media/frankenstein.jpg
cartoon: /media/cartoons/frankenstein.jpg
---
Mary Shelly’s Frankenstein is a product of its time. It beautifully illustrates the nineteenth century apprehension about the implications of advances in science. Victor Frankenstein became an archetypal ‘mad proffesor’, more concerned with pushing the limits of technology and his own genius than humanity. Readers often gloss over<!--more--> Victor’s eventual regret when he realises the horror of what he’s done toward the end of the story, and choose to remember him as a sociopath in a grubby white coat roaming graveyards seduced by his own hubris. Today, people use variations on the word Frankenstein to demonise the products of modern science and provoke fear in the public – [here be monsters](/2014/09/17/frankenfood-the-science-of-genetically-engineered-crops/).
The tired irony of Victor’s monster being referred to by his creator’s name is perhaps not as ironic as pretentious literary people imply when reprimanding us at dinner parties. “Frankenstein is the creator, not the monster!” they admonish, smugly, but they are incorrect: Victor is Frankenstein AND the monster; his creation is the hero. The Creature seeks acceptance from humanity, only to be rebuffed because of his grotesque appearance and the anxieties that his very existence provokes. He forces those who comprehend what he is is to question their own humanity. As someone exiled from the human moral community, The Creature recognises the moral mistake humans have made in exploiting animals:
>My food is not that of man; I do not destroy the lamb and the kid, to glut my appetite; acorns and berries afford me sufficient nourishment. My companion will be of the same nature as myself, and will be content with the same fare. We shall make our bed of dried leaves; the sun will shine on us as on man, and will ripen our food. The picture I present to you is peaceful and human.[*](here:http://www.gutenberg.org/files/84/84-h/84-h.htm "Full text of Frankenstein available on Project Gutenberg")
In this passage Shelly plays with the concept of man and human to illustrate the difference between the acts of man and ‘humanity’ as an ideal which exemplifies principles of justice, rationality and morality. The Creature is more humane than humans. Shelly was part of a circle of Romantic Vegetarians[*](http://en.wikipedia.org/wiki/Vegetarianism_and_Romanticism "Wikipedia article on Vegetarianism and Romanticism") inspired by enlightenment philosophy and humanism, they emphasised both the irrationality and cruelty of how humans treated animals and this message is made poignantly explicit in Frankenstein.
The Romantic Vegetarians were motivated by many of the same values as us ‘reasonable vegans’ are today. At the end of Shelly’s text The Creature is not dead, but rather left to wander alone through the bleak landscape of the sea ice covering the North Pole. I like to speculate that, despite his final speech about crawling off to die, in some fictional universe he made it back to land, found a Starbucks and right now is browsing rvgn.org.
|
C++
|
UTF-8
| 1,926 | 3.25 | 3 |
[] |
no_license
|
#include <bits/stdc++.h>
#define eb emplace_back
#define endl "\n"
#define pb push_back
using namespace std;
struct edges{
int to;
int length;
};
int djikstra(const vector<vector <edges> > &graph, int source, int target)
{
vector <int> min_distance(graph.size(), INT_MAX);
min_distance[source] = 0;
set <pair<int,int>> active_vertices;
active_vertices.insert({0, source});
while(!active_vertices.empty())
{
int where = active_vertices.begin() -> second;
if(where == target)
return min_distance[target];
active_vertices.erase(active_vertices.begin());
for(auto i: graph[where])
{
if(min_distance[i.to] > min_distance[where] + i.length);
{
active_vertices.erase({min_distance[i.to], i.to});
min_distance[i.to] = min_distance[where] + i.length;
active_vertices.insert({min_distance[i.to], i.to});
}
}
}
return INT_MAX;
}
int main()
{
int test;
cin >> test;
while(test--)
{
vector <vector <edges> > graph;
int v, e;
cin >> v >> e;
vector <edges> init;
for(int i = 0; i < v + 1; i++)
{
init.clear();
graph.pb(init);
}
for(int i = 0; i < e; i++)
{
int node;
edges input_edge;
cin >> node;
cin >> input_edge.to >> input_edge.length;
node = node - 1; //since the djikstra function is for 0 based indexing
input_edge.to = input_edge.to - 1;
graph[node].pb(input_edge);
}
int source, target;
cin >> source >> target;
--source; --target;
int res = djikstra(graph, source, target);
if(res == INT_MAX)
cout << "NO" << endl;
else
cout << res << endl;
}
return 0;
}
|
Swift
|
UTF-8
| 607 | 3.203125 | 3 |
[
"MIT"
] |
permissive
|
//
// 226-InvertTree.swift
// BinaryTree
//
// Created by Csy on 2019/2/15.
// Copyright © 2019 CXL. All rights reserved.
//
/**
翻转一棵二叉树。
*/
import Foundation
class InvertTree {
func invertTree(_ root: TreeNode?) -> TreeNode? {
if root == nil {
return nil
}
// 先反转左子树和右子树
_ = self.invertTree(root?.left)
_ = self.invertTree(root?.right)
// 再反转当前结点的左右子结点
(root!.left, root!.right) = (root!.right, root!.left)
return root
}
}
|
Java
|
UTF-8
| 8,672 | 1.921875 | 2 |
[] |
no_license
|
package com.example.lucas.myapp;
import android.content.Intent;
import android.net.Uri;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TabHost;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.appindexing.Action;
import com.google.android.gms.appindexing.AppIndex;
import com.google.android.gms.common.api.GoogleApiClient;
import java.util.ArrayList;
import java.util.List;
public class OldMainActivity extends AppCompatActivity {
EditText nameTxt, phoneTxt, emailTxt, addressTxt;
ImageView contactImageImgView;
List<Contact> contacts = new ArrayList<>();
ListView contactListView;
Uri imageURI = Uri.parse("android.resource://org.intracode.contactmanager/drawable/no_user_logo.png");
DatabaseHandler dbHandler;
/**
* ATTENTION: This was auto-generated to implement the App Indexing API.
* See https://g.co/AppIndexing/AndroidStudio for more information.
*/
private GoogleApiClient client;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.old_activity_main);
nameTxt = (EditText) findViewById(R.id.txtName);
phoneTxt = (EditText) findViewById(R.id.txtPhone);
emailTxt = (EditText) findViewById(R.id.txtEmail);
addressTxt = (EditText) findViewById(R.id.txtAddress);
//contactListView = (ListView) findViewById(R.id.listView);
contactImageImgView = (ImageView) findViewById(R.id.imgViewContactImage);
dbHandler = new DatabaseHandler(getApplicationContext());
TabHost tabHost = (TabHost) findViewById(R.id.tabHost);
tabHost.setup();
TabHost.TabSpec tabSpec = tabHost.newTabSpec("creator");
tabSpec.setContent(R.id.tabCreator);
tabSpec.setIndicator("Creator");
tabHost.addTab(tabSpec);
tabSpec = tabHost.newTabSpec("list");
tabSpec.setContent(R.id.tabContactList);
tabSpec.setIndicator("List");
tabHost.addTab(tabSpec);
final Button addBtn = (Button) findViewById(R.id.btnAdd);
addBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Contact contact = new Contact(dbHandler.getContactsCount(), String.valueOf(nameTxt.getText()), String.valueOf(phoneTxt.getText()), String.valueOf(emailTxt.getText()), String.valueOf(addressTxt.getText()), imageURI);
//addContact(nameTxt.getText().toString(), phoneTxt.getText().toString(), emailTxt.getText().toString(), addressTxt.getText().toString(), imageURI);
dbHandler.createContact(contact);
contacts.add(contact);
populateList();
Toast.makeText(getApplicationContext(), nameTxt.getText().toString() + " has been added to your Contacts", Toast.LENGTH_SHORT).show();
}
});
nameTxt.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence charSequence, int start, int before, int count) {
addBtn.setEnabled(!nameTxt.getText().toString().trim().isEmpty());
}
@Override
public void afterTextChanged(Editable s) {
}
});
contactImageImgView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent();
intent.setType("image/*");
intent.setAction(Intent.ACTION_GET_CONTENT);
startActivityForResult(Intent.createChooser(intent, "Select Contact Image"), 1);
}
});
List<Contact> addableContacts = dbHandler.getAllContacts();
int contactCount = dbHandler.getContactsCount();
for (int i = 0; i < contactCount; i++) {
contacts.add(addableContacts.get(i));
}
if (!addableContacts.isEmpty()) {
populateList();
}
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client = new GoogleApiClient.Builder(this).addApi(AppIndex.API).build();
}
public void onActivityResult(int reqCode, int resCode, Intent data) {
if (resCode == RESULT_OK) {
if (reqCode == 1) {
imageURI = data.getData();
contactImageImgView.setImageURI(data.getData());
}
}
}
private void populateList() {
ArrayAdapter<Contact> adapter = new ContactListAdapter();
contactListView.setAdapter(adapter);
}
private void addContact(String name, String phone, String email, String address, Uri image) {
contacts.add(new Contact(0, name, phone, email, address, image));
}
@Override
public void onStart() {
super.onStart();
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client.connect();
Action viewAction = Action.newAction(
Action.TYPE_VIEW, // TODO: choose an action type.
"Main Page", // TODO: Define a title for the content shown.
// TODO: If you have web page content that matches this app activity's content,
// make sure this auto-generated web page URL is correct.
// Otherwise, set the URL to null.
Uri.parse("http://host/path"),
// TODO: Make sure this auto-generated app URL is correct.
Uri.parse("android-app://com.example.lucas.myapp/http/host/path")
);
AppIndex.AppIndexApi.start(client, viewAction);
}
@Override
public void onStop() {
super.onStop();
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
Action viewAction = Action.newAction(
Action.TYPE_VIEW, // TODO: choose an action type.
"Main Page", // TODO: Define a title for the content shown.
// TODO: If you have web page content that matches this app activity's content,
// make sure this auto-generated web page URL is correct.
// Otherwise, set the URL to null.
Uri.parse("http://host/path"),
// TODO: Make sure this auto-generated app URL is correct.
Uri.parse("android-app://com.example.lucas.myapp/http/host/path")
);
AppIndex.AppIndexApi.end(client, viewAction);
client.disconnect();
}
private class ContactListAdapter extends ArrayAdapter<Contact> {
public ContactListAdapter() {
super(OldMainActivity.this, R.layout.listview_item, contacts);
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if (convertView == null)
convertView = getLayoutInflater().inflate(R.layout.listview_item, parent, false);
Contact currentContact = contacts.get(position);
TextView name = (TextView) convertView.findViewById(R.id.contactName);
name.setText(currentContact.getName());
TextView phone = (TextView) convertView.findViewById(R.id.phoneNumber);
phone.setText(currentContact.getPhone());
TextView email = (TextView) convertView.findViewById(R.id.emailAddress);
email.setText(currentContact.getEmail());
TextView address = (TextView) convertView.findViewById(R.id.cAddress);
address.setText(currentContact.getAddress());
ImageView ivContactImage = (ImageView) convertView.findViewById(R.id.ivContactImage);
ivContactImage.setImageURI(currentContact.getImageURI());
return convertView;
}
}
/*@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}*/
}
|
Shell
|
UTF-8
| 4,762 | 2.921875 | 3 |
[
"LicenseRef-scancode-public-domain"
] |
permissive
|
#!/bin/bash
PROGNAME="$0"
usage() {
cat <<EOF
NAME
`basename $PROGNAME` - Nonogram solver
SYNOPSIS
`basename $PROGNAME` [options] skel.nono ...
DESCRIPTION
Nonogram solver using a program by Steven Simpson.
OPTIONS
-b Binary and decimal numbers
-p png-file Convert and image it into a PNG png-file
-P pdf-file Convert and image it into a PDF pdf-file
-v Clear screen and print the grid while it is being
solved in case of Warning: puzzle has imbalance
-D lvl Debug level
EXAMPLES
Solve GC6EQAP.nono:
$ geo-nonogram GC6EQAP.nono
██ ██ ████████ ████████ ██
██ ██ ██ ██ ██ ██
████████ ████████ ████████ ██████████
██ ██ ██ ██ ██
██ ██ ████████ ██████████
██ ████████ ████████ ████████
██ ██ ██ ██ ██ ██ ██
████████ ████████ ██ ██ ██ ██
██ ██ ██ ██ ██ ██ ██
████████ ██ ████████ ████████
████████ ██████ ████████ ████████
██ ██ ██ ██ ██ ██
████████ ██ ██ ██ ████████
██ ██ ██ ██ ██
██ ██ ████████ ████████
████████ ████████ ████████
██ ██ ██ ██ ██ ██ ██
████████ ████████ ████████ ██
██ ██ ██ ██ ██ ██ ██
████████ ████████ ████████ ██████
Solve GC7R09K.nono:
$ geo-nonogram -b ~/proj/caches/GC7R09K.nono
A 11010011010000000000 865280
B 11010100001000000000 868864
C 11010111111000000000 884224
D 11110000000011111111 983295
E 10110000011110000111 722823
F 10110111111101001110 753486
G 10010011111000011100 605724
H 11010000001000110000 852528
I 01010000000111110000 328176
FORMAT
Format of a ".nono":
$ cat skel.nono
title "skeleton"
by "skeletin"
width 20
height 20
rows
1 2 3
1 2
etc.
columns
1 1
2 1 3
etc.
SEE ALSO
http://www.comp.lancs.ac.uk/~ss/nonogram/auto
http://www.research.lancs.ac.uk/portal/en/people/Steven-Simpson/
https://forge.comp.lancs.ac.uk/svn-repos/nonograms/nonolib/trunk/
https://forge.comp.lancs.ac.uk/svn-repos/nonograms/nonogram/trunk/
http://webpbn.com/export.cgi
pbnsolve-wrapper nono2cross+a nono2jsolver nono2teal
EOF
exit 1
}
#
# Report an error and exit
#
error() {
echo "`basename $PROGNAME`: $1" >&2
exit 1
}
debug() {
if [ $DEBUG -ge $1 ]; then
echo "`basename $PROGNAME`: $2" >&2
fi
}
#
# Process the options
#
DEBUG=0
PDFFILE=
PNGFILE=
VERBOSE=-x
BINARY=0
while getopts "bvp:P:D:h?" opt
do
case $opt in
b) BINARY=1;;
p) PNGFILE="$OPTARG";;
P) PDFFILE="$OPTARG";;
v) VERBOSE=-v;;
D) DEBUG="$OPTARG";;
h|\?) usage;;
esac
done
shift `expr $OPTIND - 1`
#
# Main Program
#
if [ $# = 0 ]; then
usage
fi
doit() {
nonogram $VERBOSE < $1 |
sed -e 's/^$/***********************************************/' \
-e 's/#/██/g' -e 's/-/ /g'
}
check_uniprint() {
if ! type uniprint >/dev/null 2>&1; then
error "No uniprint: Try: yum install yudit OR apt-get install yudit"
fi
}
for i in $*; do
if [ $# != 1 ]; then
echo "$i:"
fi
if [ "$PDFFILE" != "" ]; then
check_uniprint
doit $i \
| uniprint -out - -size 8 -hsize 0 \
-font /usr/share/fonts/gnu-free/FreeMono.ttf 2>/dev/null \
| ps2pdf - $PDFFILE
# doit $i | paps | ps2pdf - $PDFFILE
elif [ "$PNGFILE" != "" ]; then
check_uniprint
doit $i \
| uniprint -out - -size 8 -hsize 0 \
-font /usr/share/fonts/gnu-free/FreeMono.ttf 2>/dev/null \
| convert -trim - $PNGFILE
elif [ "$BINARY" = 1 ]; then
nonogram $VERBOSE < $1 | tr -- '-#' 01 | awk '
length != 0 {
if (t>=26) printf "A"
printf "%c ", sprintf("%c", 65 + (t++%26) )
system("echo -n " $1 "\" \"; echo 2i " $1 "p | dc")
}
'
else
doit $i
fi
done
|
Java
|
UTF-8
| 12,079 | 2.265625 | 2 |
[
"MIT"
] |
permissive
|
package es.upm.fi.dia.oeg.rdb;
import es.upm.fi.dia.oeg.model.CSVW;
import es.upm.fi.dia.oeg.rmlc.api.model.TriplesMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.PrintWriter;
import java.sql.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
public class RDBConexion {
private static final Logger _log = LoggerFactory.getLogger(RDBConexion.class);
private ArrayList<String> foreignkeys;
private PrintWriter pw;
private PrintWriter pw2;
public RDBConexion(String rdb){
try{
Class.forName ("org.h2.Driver");
foreignkeys = new ArrayList<>();
try {
pw = new PrintWriter("output/"+rdb+"-schema.sql", "UTF-8");
pw2 = new PrintWriter("output/"+rdb+"-inserts.sql","UTF-8");
}catch (Exception e){
}
}catch (Exception e){
_log.error("The H2 driver has not found");
}
}
public void close(){
pw.close();pw2.close();
}
public void createDatabase(String rdb, String tables){
try {
long startTime = System.currentTimeMillis();
createTables(tables,rdb);
long stopTime = System.currentTimeMillis();
long elapsedTime = stopTime - startTime;
_log.info("The "+rdb+" has been created in H2 successfully in: "+elapsedTime+"ms");
}catch (Exception e ){
_log.error("Error connecting with H2: "+e.getMessage());
}
}
private void createTables(String tables, String rdb){
try {
Class.forName ("org.h2.Driver");
Connection c = DriverManager.getConnection("jdbc:h2:./output/"+rdb+";AUTO_SERVER=TRUE", "sa", "");
Statement s=c.createStatement();
String[] st = tables.split("\n");
for(String saux : st) {
try {
if (!saux.matches(".*FOREIGN.*")) {
s.execute(saux);
pw.println(saux);
//System.out.println(saux);
} else {
String tableName = saux.split("TABLE")[1].split("\\(")[0];
String[] splitedst = saux.split("FOREIGN");
for (int i = 1; i < splitedst.length; i++) {
if (splitedst[i].matches(".*,")) {
foreignkeys.add("ALTER TABLE " + tableName + " ADD FOREIGN " + splitedst[i].replace(",", ";"));
} else {
foreignkeys.add("ALTER TABLE " + tableName + " ADD FOREIGN " + splitedst[i].replace(");", ";"));
}
}
s.execute(splitedst[0].substring(0, splitedst[0].length() - 1) + ");");
pw.println(splitedst[0].substring(0, splitedst[0].length() - 1) + ");");
//System.out.println(splitedst[0].substring(0,splitedst[0].length()-1)+");");
}
}catch (SQLException e){
_log.error("Error creating the table "+saux+" in "+rdb+":"+e.getMessage());
}
}
s.close();c.close();
}catch (Exception e){
_log.error("Error open the connection for "+rdb+": "+e.getMessage());
}
}
public void loadCSVinTable(TriplesMap tp, List<String[]> rows, String table, String rdb, CSVW csvw){
try {
Class.forName ("org.h2.Driver");
Connection c = DriverManager.getConnection("jdbc:h2:./output/"+rdb+";AUTO_SERVER=TRUE", "sa", "");
//String inserts="",totalInserts="";
_log.info("Executing inserts for table: "+table);
long startTime = System.currentTimeMillis();
List<String[]> rowsWithoutHeader = rows.subList(1,rows.size());
ExecutorService exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
try {
for (final Object o : rowsWithoutHeader) {
exec.submit(new Runnable() {
@Override
public void run() {
// do stuff with o.
String[] r = (String[]) o;
StringBuilder insert = new StringBuilder();
insert.append("INSERT INTO "+table+" ");
insert.append("VALUES(");
for(int j=0;j<r.length;j++){
if(RDBUtils.checkColumnInAnnotations(rows.get(0)[j],tp,csvw))
if(r[j].equals("")){
insert.append("'',");
}
else if(r[j].equals("NULL")){
insert.append("NULL,");
}
else {
insert.append("'" + r[j].replace("'","''") + "',");
}
}
String e = insert.substring(0,insert.length()-1)+");";
try {
c.createStatement().execute(e);
pw2.println(e);
}catch (SQLException exception){
_log.error("Error inserting the instances: "+exception.getLocalizedMessage());
}
}
});
}
} finally {
exec.shutdown();
}
try {
exec.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
} catch (InterruptedException e) {
_log.error("Error waiting for indexing of the instances of "+table+": "+e.getLocalizedMessage());
}
long stopTime = System.currentTimeMillis();
long elapsedTime = stopTime - startTime;
_log.info("The instances of "+table+" have been indexed in H2 successfully in: "+elapsedTime+"ms");
/* for(int i=1; i<rows.size();i++){
StringBuilder insert = new StringBuilder();
insert.append("INSERT INTO "+table+" ");
insert.append("VALUES(");
for(int j=0;j<rows.get(i).length;j++){
if(RDBUtils.checkColumnInMapping(rows.get(0)[j],tp))
if(rows.get(i)[j].equals("")){
insert.append("'',");
}
else if(rows.get(i)[j].equals("NULL")){
insert.append("NULL,");
}
else {
insert.append("'" + rows.get(i)[j].replace("'","''") + "',");
}
}
String exec = insert.substring(0,insert.length()-1)+");\n";
inserts+=exec;
if(i%5000==0){
_log.info("Inserting 5000 instances in "+table+"...");
totalInserts += inserts;
long startTime = System.currentTimeMillis();
s.execute(inserts);
long stopTime = System.currentTimeMillis();
long elapsedTime = stopTime - startTime;
_log.info("The instances have been indexed in H2 successfully in: "+elapsedTime+"ms");
inserts="";
}
//System.out.println(exec);
}
_log.info("Inserting last instances in "+table+"...");
long startTime = System.currentTimeMillis();
s.execute(inserts);
long stopTime = System.currentTimeMillis();
long elapsedTime = stopTime - startTime;
_log.info("The instances have been indexed in H2 successfully in: "+elapsedTime+"ms");
totalInserts+=inserts;
startTime = System.currentTimeMillis();
pw2.println(totalInserts);
stopTime = System.currentTimeMillis();
elapsedTime = stopTime - startTime;
_log.info("The instances have been printed at output file in: "+elapsedTime+"ms");*/
}catch (Exception e){
_log.error("Error creating the tables in the rdb "+rdb+": "+e.getMessage());
}
}
public void addForeignKeys(String rdb){
try {
Class.forName("org.h2.Driver");
Connection c = DriverManager.getConnection("jdbc:h2:./output/" + rdb+";AUTO_SERVER=TRUE", "sa", "");
Statement s = c.createStatement();
for(String f: foreignkeys) {
try {
s.execute(f);
pw.println(f);
}catch (SQLException e){
_log.error("Error creating a FK: "+e.getLocalizedMessage());
}
}
}catch (Exception e){
_log.error("Error connecting to the database "+rdb+": "+e.getMessage());
}
}
public void updateDataWithFunctions (HashMap<String,HashMap<String,String>> functions, String rdb, boolean index){
long startTime = System.currentTimeMillis();
try {
Connection c = DriverManager.getConnection("jdbc:h2:./output/"+rdb+";AUTO_SERVER=TRUE", "sa", "");
Statement s = c.createStatement();
for(Map.Entry<String,HashMap<String,String>> entry : functions.entrySet()){
String table_name = entry.getKey();
HashMap<String,String> column_function = entry.getValue();
for(Map.Entry<String,String> function_entry : column_function.entrySet()){
String alter_column = function_entry.getKey();
String function_exp = function_entry.getValue().replace("{","").replace("}","");
//if(function_exp.matches(".*\\(.*")) {
try {
s.execute("ALTER TABLE " + table_name + " ADD " + alter_column + ";");
pw.println("ALTER TABLE " + table_name + " ADD " + alter_column + ";");
//System.out.println("ALTER TABLE " + table_name + " ADD " + alter_column + ";");
s.execute("UPDATE " + table_name + " SET " + alter_column.split(" ")[0] + "=" + function_exp + ";");
pw2.println("UPDATE " + table_name + " SET " + alter_column.split(" ")[0] + "=" + function_exp + ";");
//System.out.println("UPDATE " + table_name + " SET " + alter_column.split(" ")[0] + "=" + function_exp + ";");
//}
if (index) {
s.execute("CREATE INDEX " + alter_column.split(" ")[0] + "s ON " + table_name + " (" + alter_column.split(" ")[0] + ")");
pw.println("CREATE INDEX " + alter_column.split(" ")[0] + "s ON " + table_name + " (" + alter_column.split(" ")[0] + ");");
//System.out.println("CREATE INDEX "+alter_column.split(" ")[0]+"s ON "+table_name+" ("+alter_column.split(" ")[0]+")");
}
}catch (SQLException e){
_log.error("Error creating index: "+e.getLocalizedMessage());
}
}
}
s.close();c.close();
}catch (Exception e){
_log.error("Error in update the table: "+e.getMessage());
}
long stopTime = System.currentTimeMillis();
long elapsedTime = stopTime - startTime;
_log.info("The "+rdb+" has been updated in H2 successfully in: "+elapsedTime+"ms");
}
}
|
C++
|
ISO-8859-1
| 643 | 3.484375 | 3 |
[] |
no_license
|
#include "Sort.h"
template <class T>
class BubbleSort : public Sort<T>
{
public:
T* sort(T* o) = 0; //Si modifico el contenido, no necesito el &, sin embargo, si necesito modificar el apuntador ah s.
BubbleSort();
~BubbleSort();
};
template <class T>
T* BubbleSort::sort(T* o)
{
//aqu va lo de bubble sort
/*
Como aqu ira algo como
if (algo > otroAlgo)
Sera necesario sobrecargar el > <. Cosa que slo funciona en c++
as que podra hacerse una clase para que compare las dos clases.
Comparable
compareTo(obj a, obj b) = 0;
MayorNumeros
blool compareTo(n1, n1)
{
return return n1.getVol < n2.getvol
}
*/
}
|
C++
|
UTF-8
| 1,795 | 3.609375 | 4 |
[] |
no_license
|
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
class Codec {
private:
void insert(int num, TreeNode* node) {
if (num > node->val) {
if (node->right)
insert(num, node->right);
else
node->right = new TreeNode(num);
}
else {
if (node->left)
insert(num, node->left);
else
node->left = new TreeNode(num);
}
}
public:
// Encodes a tree to a single string.
string serialize(TreeNode* root) {
if (!root) return string();
queue<TreeNode*> que;
string str = "";
que.push(root);
while(!que.empty()) {
TreeNode* cur = que.front();
que.pop();
if (cur->left)
que.push(cur->left);
if (cur->right)
que.push(cur->right);
str += to_string(cur->val) + " ";
}
return str;
}
// Decodes your encoded data to tree.
TreeNode* deserialize(string data) {
if (data == "") return NULL;
istringstream divide(data);
string token;
vector<int> nums;
TreeNode* root = NULL;
while (getline(divide, token, ' '))
nums.push_back(stoi(token));
root = new TreeNode(nums[0]);
for (int i=1; i<nums.size(); i++)
insert(nums[i], root);
return root;
}
};
// Your Codec object will be instantiated and called as such:
// Codec codec;
// codec.deserialize(codec.serialize(root));
|
C#
|
UTF-8
| 1,375 | 3.359375 | 3 |
[
"MIT",
"CC-BY-4.0"
] |
permissive
|
using System;
using System.Data;
using System.ComponentModel;
using System.Windows.Forms;
// <Snippet1>
public class myKeyPressClass
{
static long keyPressCount = 0 ;
static long backspacePressed = 0;
static long returnPressed = 0 ;
static long escPressed = 0 ;
private TextBox textBox1 = new TextBox();
private void myKeyCounter(object sender, KeyPressEventArgs ex)
{
switch(ex.KeyChar)
{
// Counts the backspaces.
case '\b':
backspacePressed = backspacePressed + 1;
break ;
// Counts the ENTER keys.
case '\r':
returnPressed = returnPressed + 1 ;
break ;
// Counts the ESC keys.
case (char)27:
escPressed = escPressed + 1 ;
break ;
// Counts all other keys.
default:
keyPressCount = keyPressCount + 1 ;
break;
}
textBox1.Text =
backspacePressed + " backspaces pressed\r\n" +
escPressed + " escapes pressed\r\n" +
returnPressed + " returns pressed\r\n" +
keyPressCount + " other keys pressed\r\n" ;
ex.Handled = true ;
}
}
// </Snippet1>
|
C#
|
UTF-8
| 2,051 | 3.734375 | 4 |
[] |
no_license
|
using System;
using System.Text.RegularExpressions;
namespace Lab2._3_Validating_Input_With_Regex
{
class Program
{
static void Main(string[] args)
{
Console.Write("Please enter a valid Name: ");
string nameInput = Console.ReadLine();
string name = nameInput;
string pattern = @"[A-Z][A-Za-z]{0,30}";
if (Regex.IsMatch(name, pattern))
{
Console.WriteLine("Name valid");
}
else
{
Console.WriteLine("Sorry, name is not valid!");
}
Console.Write("Please enter a valid email: ");
string emailInput = Console.ReadLine();
string email = emailInput;
string pattern2 = @"\b([A-Za-z0-9]{5,30}@[A-Za-z0-9]{5,10}.[A-Za-z]{2,3})\b";
if (Regex.IsMatch(email, pattern2))
{
Console.WriteLine("Email is valid!");
}
else
{
Console.WriteLine("Sorry, email is not valid!");
}
Console.Write("Please enter a valid phone number: ");
string phoneInput = Console.ReadLine();
string phone = phoneInput;
string pattern3 = @"\d\d\d[-.]\d\d\d[-.]\d\d\d\d";
if (Regex.IsMatch(phone, pattern3))
{
Console.WriteLine("phone number is valid!");
}
else
{
Console.WriteLine("Sorry, phone number is not valid!");
}
Console.Write("Please enter a valid date: ");
string dateInput = Console.ReadLine();
string date = dateInput;
string pattern4 = @"\d\d[/]\d\d[/]\d\d\d\d";
if (Regex.IsMatch(phone, pattern4))
{
Console.WriteLine("Date is valid!");
}
else
{
Console.WriteLine("Sorry, date is not valid!");
}
}
}
}
|
PHP
|
UTF-8
| 789 | 2.953125 | 3 |
[] |
no_license
|
<?php
require_once("config.php");
class baseDatos{
private $conexion;
private $db;
public static function conectar(){
$conexion = mysqli_connect(host, user, pass, dbname, port);
//Set de caracteres utf8
mysqli_set_charset($conexion, "utf8");
if($conexion->connect_errno){
die("Lo sentimos, no se ha podido establecer la conexion con Mysql: ".mysqli_error());
} else{
$db = mysqli_select_db($conexion, dbname);
if($db == 0){
die("Lo sentimos, no se ha podido conectar con la base de datos: ".dbname);
}
}
return $conexion;
}
public function desconectar($conexion){
if($conexion){
mysqli_close($conexion);
}
}
}
?>
|
Java
|
UTF-8
| 2,517 | 2.828125 | 3 |
[] |
no_license
|
package com.garbri.proigo.core.objects;
import com.badlogic.gdx.graphics.g2d.Sprite;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.physics.box2d.Body;
import com.badlogic.gdx.physics.box2d.BodyDef;
import com.badlogic.gdx.physics.box2d.CircleShape;
import com.badlogic.gdx.physics.box2d.FixtureDef;
import com.badlogic.gdx.physics.box2d.World;
import com.badlogic.gdx.physics.box2d.BodyDef.BodyType;
import com.garbri.proigo.core.collision.CollisionInfo;
import com.garbri.proigo.core.collision.CollisionInfo.CollisionObjectType;
public class Ball {
public Body body;
float ballSize = 2f;
public Sprite sprite;
public Ball(World world, float x, float y, Sprite ballSprite)
{
createBallObject(world, x, y, ballSprite, false);
}
public Ball(World world, float x, float y, Sprite ballSprite, boolean networked)
{
createBallObject(world, x, y, ballSprite, networked);
}
private void createBallObject(World world, float x, float y, Sprite ballSprite, boolean networked)
{
//Dynamic Body
BodyDef bodyDef = new BodyDef();
bodyDef.type = BodyType.DynamicBody;
bodyDef.position.set(x, y);
this.body = world.createBody(bodyDef);
CircleShape dynamicCircle = new CircleShape();
dynamicCircle.setRadius(ballSize);
FixtureDef fixtureDef = new FixtureDef();
fixtureDef.shape = dynamicCircle;
fixtureDef.density = 0.25f;
fixtureDef.friction = 0f;
fixtureDef.restitution = 1f;
this.body.createFixture(fixtureDef);
this.body.setUserData(new CollisionInfo("Ball", CollisionObjectType.ball));
this.sprite = ballSprite;
}
public Vector2 getLocalVelocity() {
/*
returns balls's velocity vector relative to the car
*/
return this.body.getLocalVector(this.body.getLinearVelocityFromLocalPoint(new Vector2(0, 0)));
}
public void update()
{
Vector2 currentVelocity = this.getLocalVelocity();
Vector2 position= this.getLocation();
//System.out.println("Ball Position - " + position + "Ball Velocity - " + currentVelocity);
float slowDownMultiplier = 0.5f;
this.body.applyForce(this.body.getWorldVector(new Vector2(-(currentVelocity.x*(slowDownMultiplier)), -(currentVelocity.y*(slowDownMultiplier)))), position, true );
}
public void networkUpdate(Vector2 velocity, Vector2 position)
{
this.body.setTransform(position, 0);
//this.body.
}
public Vector2 getLocation()
{
return this.body.getWorldCenter();
}
}
|
TypeScript
|
UTF-8
| 1,319 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
import type { Optic, TryA, TryT, A, B, S, T } from './optic.js'
import type { Expected } from './errors.js'
import type { Eq } from '../utils.js'
import * as I from '../internals.js'
import { Apply, HKT } from '../hkt.js'
interface GuardA<ValueType, SubType extends ValueType> extends A {
0: TryA<
this,
S<this> extends ValueType ? SubType : Expected<ValueType, S<this>>
>
}
interface GuardMonoT<ValueType, SubType extends ValueType> extends T {
0: TryT<
this,
S<this> extends ValueType
? Eq<B<this>, SubType> extends true
? ValueType
: Expected<SubType, B<this>>
: Expected<ValueType, S<this>>
>
}
interface GuardPolyT<F extends HKT, ValueType> extends T {
0: TryT<
this,
S<this> extends ValueType ? Apply<F, B<this>> : Expected<ValueType, S<this>>
>
}
export function guard<F extends HKT>(): <ValueType, SubType extends ValueType>(
g: (value: ValueType) => value is SubType
) => Optic<'Prism', GuardA<ValueType, SubType>, GuardPolyT<F, ValueType>>
export function guard<ValueType, SubType extends ValueType>(
g: (value: ValueType) => value is SubType
): Optic<'Prism', GuardA<ValueType, SubType>, GuardMonoT<ValueType, SubType>>
export function guard(arg?: any) {
if (arg === undefined) return I.guard as any
else return I.guard(arg) as any
}
|
Markdown
|
UTF-8
| 3,644 | 2.90625 | 3 |
[] |
no_license
|
---
layout: post
title: 如何不使用贴吧App查看贴吧
tags: [贴吧, PHP]
---
为自己手机的最后一片净土而战(ง •_•)ง<!--more-->
# 起因
由于某些原因,我还是很想上百度贴吧看看的。虽然一些大公司很流氓,但也正因为他们是大公司,所以积累的信息才更多……
一年前,我因为使用百度搜索时打不开搜索内容,所以不得不[使用了一些办法](/2019/05/12/baidu.html)来解决这个问题。不过看起来百度也知道自己这么做是在砸自家招牌,所以后来这个问题百度貌似他们自己解决了。
而现在呢,我又因为某些原因需要使用百度贴吧,当然安装贴吧App就是在侮辱我的手机。一般来说,遇到这个问题只要用桌面版的贴吧就好了,但是很遗憾的是我用的浏览器是基于WebView的Firefox Rocket,不支持更改UA……既然如此,我只好运用我编程的技能解决这个问题了。
# 解决方案
在正常情况下用手机看贴吧的贴子是看不到全部内容的,只能看到前两楼,当然这样的贴吧就是废物,要想看剩下的内容就需要下载贴吧App了。不过以前贴吧有一个极简版,虽然很简陋,但是也够用了。然而很糟糕的是,百度他们不允许极简版列出贴子的标题了,如果用极简版看贴吧的话是看不到贴子的列表的。
不过如果知道贴子的ID,貌似还是能正常打开的,所以我们只需要获得到贴子的列表和ID就OK了。
经过我的研究,只要在`http://tieba.baidu.com/mo/q-----1----/m?kz=`后面加上贴子的ID就能看到贴子的全部内容,只是这个貌似不能发帖,不过我也不需要这个就是了。
于是我参考了以前[Iwara Viewer](/2019/04/13/iwara.html)的代码,写出了最终的代码。
# 代码
```php
<?php
if (isset($_GET["kw"])) {
if (!isset($_GET["pn"])) {
$_GET["pn"] = 1;
}
$url = 'https://tieba.baidu.com/f?kw='.iconv("utf-8","gb2312",$_GET["kw"]).'&pn='.($_GET["pn"]-1)*50;
$str = file_get_contents($url);
$preg='/<a rel="noreferrer" href="\/p\/(.*?)" title="(.*?)" target="_blank" class="j_th_tit ">/is';
preg_match_all($preg,$str,$match);//在$str中搜索匹配所有符合$preg加入$match中
echo "<title>Mayx Tieba Viewer</title><h1>Mayx Tieba Viewer</h1><hr />";
if ($http_response_header[0] != "HTTP/1.0 200 OK"){
echo "<b>Warning</b>:It's Not Tieba Name<br>";
}
for($i=0;$i<count($match[0]);$i++)//逐个输出超链接地址
{
echo "<a href=\"http://tieba.baidu.com/mo/q-----1----/m?kz=".$match[1][$i]."\">".$match[2][$i]."</a><br>";
}
echo "<hr><a href=\"?pn=".($_GET["pn"] + 1)."&kw=".$_GET["kw"]."\" >Next Page</a>";
} else {
echo '<title>Mayx Tieba Viewer</title><h1>Mayx Tieba Viewer</h1><hr><form action="" method="get">Please Input Tieba Name:<input type="text" name="kw" required><input type="submit" value="Submit"></form>';
}
```
虽然说用JavaScript貌似也可以做到,可惜我学艺不精,只好用PHP解决了。
# 演示
<https://yuri.gear.host/tiebaview.php>
# 后记
P.S.这个东西是专门为了手机设计的,所以电脑完全没必要使用这个看帖子。
另外就是希望百度做个人吧……HTML肯定是能实现相应功能的,非要人下载App,虽然我也知道是为了利益,但是这真的是太恶心了……另外……知乎也一样,太恶心最终一定会流失掉用户的。
# 真相
我不知道的是很多的小程序上也有贴吧,像微信上也有,白做了😂……算了,就当练手+氵一篇了23333
|
PHP
|
UTF-8
| 433 | 2.984375 | 3 |
[] |
no_license
|
<?php
namespace Controllers;
/**
* class qui ne peut pas etre instancier
* class controller (evite repetition du $model) et qui permet dans les autres controller de faire un $modelName pour recupérer la class du model
*/
abstract class Controller
{
protected $model;
protected $modelName;
public function __construct(){
$this->model = new $this->modelName();
}
}
?>
|
C++
|
UTF-8
| 3,783 | 3.6875 | 4 |
[
"MIT"
] |
permissive
|
#include <iostream>
#include <vector>
#include <unordered_set>
#include <iostream>
#include <queue>
//graph with adjacency sets
class SetGraph {
private:
//we have to store two vectors to be able to find and children and parents fast
std::vector<std::unordered_set<int>> adjacency_list_income;
std::vector<std::unordered_set<int>> adjacency_list_outcome;
public:
SetGraph(int vertices_count);
~SetGraph() = default;
void AddEdge(int from, int to);
int VerticesCount() const;
void GetNextVertices(int vertex, std::vector<int> &vertices) const;
void GetPrevVertices(int vertex, std::vector<int> &vertices) const;
};
int findCycle(SetGraph &graph);
int main() {
//Filling the graph
int vertices_count, edges_count;
std::cin >> vertices_count >> edges_count;
SetGraph graph(vertices_count);
for (int i = 0; i < edges_count; ++i) {
int from, to;
std::cin >> from >> to;
//The graph is undirected => we should add edges in both directions
graph.AddEdge(from, to);
graph.AddEdge(to, from);
}
std::cout << findCycle(graph);
return 0;
}
//Constructor of graph with n vertices and no edges
SetGraph::SetGraph(int vertices_count) : adjacency_list_outcome(vertices_count),
adjacency_list_income(vertices_count) {}
//Add new edge to the graph
void SetGraph::AddEdge(int from, int to) {
adjacency_list_outcome[from].insert(to);
adjacency_list_income[to].insert(from);
}
int SetGraph::VerticesCount() const {
return adjacency_list_outcome.size();
}
//Puts into input vector list of all the children of the vertex
void SetGraph::GetNextVertices(int vertex, std::vector<int> &vertices) const {
vertices.clear();
//just copy list to vector
for (auto item : adjacency_list_outcome[vertex]) {
vertices.push_back(item);
}
}
//Puts into input vector list of all the parents of the vertex
void SetGraph::GetPrevVertices(int vertex, std::vector<int> &vertices) const {
vertices.clear();
//same thing but from another set of lists
for (auto item : adjacency_list_income[vertex]) {
vertices.push_back(item);
}
}
//Find a length of the smallest cycle in a graph
int findCycle(SetGraph &graph) {
int min_cycle = 200001;
//start BFS from every vertex
for (int i = 0; i < graph.VerticesCount(); ++i) {
std::queue<int> bfs_queue;
std::vector<int> next_vertices;
//stores distance to the starting vertex
std::vector<int> distance(graph.VerticesCount(), -1);
distance[i] = 0;
bool cycle_is_found = false;
bfs_queue.push(i);
//standard bfs
while (!bfs_queue.empty() && !cycle_is_found) {
int current = bfs_queue.front();
bfs_queue.pop();
graph.GetNextVertices(current, next_vertices);
for (int j = 0; j < next_vertices.size(); ++j) {
if (distance[next_vertices[j]] == -1) {
//pushing next vertex into a queue
bfs_queue.push(next_vertices[j]);
distance[next_vertices[j]] = distance[current] + 1;
} else {
//don't go backwards
if (distance[next_vertices[j]] + 1 == distance[current]) {
continue;
}
min_cycle = std::min(min_cycle, distance[current] + distance[next_vertices[j]] + 1);
//we need to find only the first cycle because every other cycle will be bigger (see pdf)
cycle_is_found = true;
break;
}
}
}
}
return min_cycle == 200001 ? -1 : min_cycle;
}
|
Python
|
UTF-8
| 1,114 | 3.625 | 4 |
[] |
no_license
|
#Author : sakura_kyon@hotmail.com
#Question : Find Minimum in Rotated Sorted Array II
#Link : https://oj.leetcode.com/problems/find-minimum-in-rotated-sorted-array-ii/
#Language : python
#Status : Accepted
#Run Time : 272 ms
#Description:
#Follow upfor "Find Minimum in Rotated Sorted Array":What ifduplicatesare allowed?Would this affect the run-time complexity? How and why?
#Suppose a sorted array is rotated at some pivot unknown to you beforehand.
#(i.e., `0 1 2 4 5 6 7` might become `4 5 6 7 0 1 2`).
#Find the minimum element.
#The array may contain duplicates.
#Show Tags
#ArrayBinary Search
#Code :
def findMin(num, begin=0, end=None):
end = (len(num) - 1) if end is None else end
if begin == end:
return num[begin]
if num[begin] < num[end]:
return num[begin]
else:
mid = (begin + end) / 2
return min(
findMin(num, begin, mid),
findMin(num, mid + 1, end)
)
class Solution:
# @param num, a list of integer
# @return an integer
def findMin(self, num):
return findMin(num)
|
PHP
|
UTF-8
| 1,259 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
<?php
namespace Phossa2\Route\Parser;
/**
* ParserStd test case.
*/
class ParserStdTest extends \PHPUnit_Framework_TestCase
{
/**
*
* @var ParserStd
*/
private $object;
/**
* Prepares the environment before running a test.
*/
protected function setUp()
{
parent::setUp();
$this->object = new ParserStd();
}
/**
* Cleans up the environment after running a test.
*/
protected function tearDown()
{
$this->object = null;
parent::tearDown();
}
/**
* @covers Phossa2\Route\Parser\ParserStd::processRoute
*/
public function testProcessRoute1()
{
$pattern = '/blog[/{section}][/{year:d}[/{month:d}[/{date:d}]]]';
$this->assertEquals(
"/blog(?:/(?<section>[^/]++))?(?:/(?<year>[0-9]++)(?:/(?<month>[0-9]++)(?:/(?<date>[0-9]++))?)?)?",
$this->object->processRoute('', $pattern));
}
/**
* @covers Phossa2\Route\Parser\ParserStd::processRoute
*/
public function testProcessRoute2()
{
$pattern = '/blog/{section:xd}/';
$this->assertEquals("(?<wow>/blog/(?<sectionwow>[^0-9/][^/]*+))",
$this->object->processRoute('wow', $pattern));
}
}
|
PHP
|
UTF-8
| 5,300 | 2.71875 | 3 |
[] |
no_license
|
<!DOCTYPE html>
<html lang="en">
<head>
<?php
session_start();
if(isset($_SESSION['logged_in'])){
header('Location: welcome.php');
}
else {
require_once 'head.php';
require_once 'database.php';
function test_input($data) {
$data = trim($data);
$data = stripslashes($data);
$data = htmlspecialchars($data);
return $data;
}
?>
<link rel="stylesheet" href="styles/login.css">
<style>
.error {
color: red;
}
</style>
<?php require_once 'navbar.php';
?>
</head>
<body>
<?php
$first_name = $last_name = $email = $password = $cpwd = '';
$first_name_err = $last_name_err= $email_err= $cpwd_err = $password_err = '';
if($_SERVER['REQUEST_METHOD'] == 'POST'){
if(!empty($_POST['first_name'])){
$first_name = test_input($_POST['first_name']);
if(empty($first_name)){
$first_name_err = "Please enter your first name!";
}
}
else
$first_name_err = "Please enter your first name!";
if(!empty($_POST['last_name'])){
$last_name = test_input($_POST['last_name']);
if(empty($last_name)){
$last_name_err = "Please enter your last name!";
}
}
else
$last_name_err = "Please enter your last name!";
if(!empty($_POST['email'])){
$email = test_input($_POST['email']);
if(empty ($email)){
$email_err = "Please enter an email!";
}else
if ( !filter_var($email, FILTER_VALIDATE_EMAIL)) {
$email_err = "Invalid email format";
}
}else
$email_err = "Please enter an email!";
if(!empty($_POST['password'])){
if(strlen($_POST['password']) < 6){
$password_err = "Password must contain at least 6 characters!";
}
else {
$password = password_hash($_POST['password'], PASSWORD_DEFAULT);
}
}else
$password_err = 'Please enter your password';
if(!empty($_POST['cpwd'])){
if($_POST['cpwd'] != $_POST['password'])
$cpwd_err = 'Passwords do not match!';
}
else
$cpwd_err = 'Please confirm your password';
if($first_name_err == '' && $last_name_err == '' && $email_err == '' && $password_err == '' && $cpwd_err == ''){
$sql = "SELECT * from users";
$result = $conn->query($sql);
if($result->num_rows > 0){
while($row = $result->fetch_assoc()){
if($row['email'] == $email){
$email_err = "This email is already taken";
}
}
if($email_err == ''){
$sql = "INSERT INTO users (firstname, lastname, email, password) VALUES (?, ?, ?, ?)";
if( $stmt= $conn->prepare($sql)){
$stmt->bind_param('ssss', $a, $b, $c, $pass);
$a = $first_name;
$b = $last_name;
$c = $email;
$pass = $password;
if($stmt->execute()){
header('Location: login.php ');
}
else {
echo "Sorry, something went wrong!";
}
} else {
echo "Sorry, something went wrong!";
}
}
}
}
}
}
?>
<div class="wrapper">
<div class="form-left">
<h2 class="text-uppercase">information</h2>
<p> Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Et molestie ac feugiat sed. Diam volutpat commodo. </p>
<p class="text"> <span>Sub Head:</span> Vitae auctor eu augudsf ut. Malesuada nunc vel risus commodo viverra. Praesent elementum facilisis leo vel. </p>
<div class="form-field">
<a href="login.php" class="account btn btn-primary" > Have an Account? </a> </div>
</div>
<form class="form-right" method="POST">
<h2 class="text-uppercase">Registration form</h2>
<div class="row">
<div class="col-sm-6 mb-3"> <label>First Name</label> <input type="text" name="first_name" id="first_name" class="form-control <?=empty($first_name_err) ? '' : 'is-invalid' ?>"><span> <small> <?= $first_name_err ?></small> </span> </div>
<div class="col-sm-6 mb-3"> <label>Last Name</label> <input type="text" name="last_name" id="last_name" class="form-control <?=empty($last_name_err) ? '' : 'is-invalid' ?>"> <span> <small> <?= $last_name_err ?></small> </span></div>
</div>
<div class="mb-3"> <label>Your Email</label> <input type="email" class="form-control <?=empty($email_err) ? '' : 'is-invalid' ?>" name="email" required> <span> <small> <?= $email_err ?></small> </span> </div>
<div class="row">
<div class="col-sm-6 mb-3"> <label>Password</label> <input type="password" name="password" id="pwd" class="form-control <?=empty($password_err) ? '' : 'is-invalid' ?>"><span class="<?=empty($password_err) ? '' : 'error' ?>"> <small> <?= $password_err ?></small> </span> </div>
<div class="col-sm-6 mb-3"> <label>Confirm Password</label> <input type="password" name="cpwd" id="cpwd" class="form-control <?=empty($cpwd_err) ? '' : 'is-invalid' ?>"><span> <small> <?= $cpwd_err ?></small> </span> </div>
</div>
<div class="form-field">
<input type="submit" value="Register" class="register" name="register">
</div>
</form>
</div>
</body>
|
PHP
|
UTF-8
| 711 | 2.53125 | 3 |
[] |
no_license
|
<?php
if (array_key_exists("assignmentid", $_GET)) {
include "connection.php";
$ref = $_GET['ref'];
$pass = $_GET['pass'];
$destination = "";
if ($ref == 'admin') {
$destination .= "admin-assignment.php";
}elseif ($ref == 'teacher') {
$destination .= "teacher-assignment.php";
}elseif ($ref == 'student') {
$destination .= "student-assignment.php";
}
$assignment_id_delete = $_GET['assignmentid'];
$messagepass = md5(md5("$ref"));
$query = "DELETE FROM `assignments` WHERE `id`= $assignment_id_delete";
if (mysqli_query($link, $query)) {
header("Location: $destination?pass=$pass");
}else{
echo "Error";
}
}
?>
|
Swift
|
UTF-8
| 1,426 | 3.296875 | 3 |
[] |
no_license
|
//
// ContentView.swift
// SliderApp
//
// Created by Vinícius Schuck on 08/01/20.
// Copyright © 2020 Vinícius Schuck. All rights reserved.
//
import SwiftUI
struct ContentView: View {
@State private var showAlert = false
@State private var sliderValue = 0.0
@State private var selectPicker = 0
@State private var sortNumber = Int.random(in: 0...20)
private var pickerOptions = ["Easy", "Hard"]
var body: some View {
VStack {
Picker(selection: $selectPicker, label: Text("Options")) {
ForEach(0..<self.pickerOptions.count) {
Text(self.pickerOptions[$0])
}
}.pickerStyle(SegmentedPickerStyle())
Spacer()
Text("Put the slider as close can to 9")
.bold()
.font(.system(size: 20))
Spacer()
HStack {
Text("0")
if self.selectPicker == 1 {
Slider(value: $sliderValue, in: (0...50))
} else {
Slider(value: $sliderValue, in: (0...20))
}
Text(self.selectPicker == 1 ? "50" : "20")
}
Spacer()
AppSubmitButton(showAlert: $showAlert, sortNumber: $sortNumber, sliderValue: $sliderValue)
}.padding(20)
}
}
|
C#
|
UTF-8
| 3,152 | 2.75 | 3 |
[
"MIT"
] |
permissive
|
// This file is licensed to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Linq;
namespace Spines.Hana.Blame.Services.ReplayManager
{
/// <summary>
/// A shuffler used by tenhou.net, based on the mersenne twister MT19937.
/// </summary>
internal class TenhouShuffler
{
/// <summary>
/// Creates a new instance of TenhouShuffler and initializes it with a seed.
/// </summary>
/// <param name="seed">The seed to initialize the shuffler with.</param>
public TenhouShuffler(IEnumerable<int> seed)
{
unchecked
{
var seedArray = seed.Select(x => (uint) x).ToArray();
Init(19650218);
uint i = 1;
uint j = 0;
for (var k = Math.Max(NumberOfWords, seedArray.Length); k > 0; --k)
{
_words[i] ^= PreviousXor(i) * 1664525;
_words[i] += seedArray[j] + j;
i += 1;
j += 1;
if (i >= NumberOfWords)
{
_words[0] = _words[NumberOfWords - 1];
i = 1;
}
if (j >= seedArray.Length)
{
j = 0;
}
}
for (var k = NumberOfWords - 1; k > 0; --k)
{
_words[i] ^= PreviousXor(i) * 1566083941;
_words[i] -= i;
i += 1;
if (i < NumberOfWords)
{
continue;
}
_words[0] = _words[NumberOfWords - 1];
i = 1;
}
_words[0] = 0x80000000;
}
}
/// <summary>
/// Returns the next random number in the sequence.
/// </summary>
/// <returns></returns>
public int GetNext()
{
if (_nextWordIndex == 0)
{
CreateNextState();
}
var t = _words[_nextWordIndex];
_nextWordIndex = (_nextWordIndex + 1) % NumberOfWords;
return unchecked((int) Temper(t));
}
private const int NumberOfWords = 624;
private const int ParallelSequenceNumber = 397;
private const uint UpperMask = 0x80000000;
private const uint LowerMask = 0x7fffffff;
private readonly uint[] _evenOddXorValues = {0, 0x9908b0df};
private readonly uint[] _words = new uint[NumberOfWords];
private uint _nextWordIndex;
private void CreateNextState()
{
for (var i = 0; i < NumberOfWords; ++i)
{
var y = (_words[i] & UpperMask) | (_words[(i + 1) % NumberOfWords] & LowerMask);
_words[i] = _words[(i + ParallelSequenceNumber) % NumberOfWords] ^ (y >> 1) ^ _evenOddXorValues[y & 1];
}
}
private void Init(uint seed)
{
_words[0] = seed;
for (uint i = 1; i < NumberOfWords; i++)
{
unchecked
{
_words[i] = PreviousXor(i) * 1812433253;
_words[i] += i;
}
}
}
private uint PreviousXor(uint i)
{
return _words[i - 1] ^ (_words[i - 1] >> 30);
}
private static uint Temper(uint y)
{
y ^= y >> 11;
y ^= (y << 7) & 0x9d2c5680;
y ^= (y << 15) & 0xefc60000;
y ^= y >> 18;
return y;
}
}
}
|
Java
|
UTF-8
| 1,455 | 2.09375 | 2 |
[] |
no_license
|
package com.mgjr.presenter.impl;
import com.mgjr.model.bean.HomepageRecommendProjectsBean;
import com.mgjr.model.impl.HomeBannerModelImpl;
import com.mgjr.model.listeners.BaseModel;
import com.mgjr.presenter.listeners.OnPresenterListener;
import com.mgjr.view.listeners.ViewListener;
import java.util.Map;
/**
* Created by Administrator on 2016/9/14.
*/
public class HomepageBannerPresenterImpl implements OnPresenterListener<HomepageRecommendProjectsBean> {
private ViewListener homepageBannerViewLisenter;
private BaseModel baseModel;
public HomepageBannerPresenterImpl(ViewListener homepageRecommendProjectsViewLisenter) {
this.homepageBannerViewLisenter = homepageRecommendProjectsViewLisenter;
baseModel = new HomeBannerModelImpl();
}
public void sendRequest(Map<String, String> necessaryParams, Map<String, String> unNecessaryParams) {
homepageBannerViewLisenter.showLoading();
baseModel.sendRequest(necessaryParams, unNecessaryParams, this);
}
@Override
public void onSuccess(HomepageRecommendProjectsBean bean) {
homepageBannerViewLisenter.hideLoading();
if(bean.getStatus().equalsIgnoreCase("0000")){
homepageBannerViewLisenter.responseData(this,bean);
}else {
homepageBannerViewLisenter.showError(this,bean);
}
}
@Override
public void onError() {
homepageBannerViewLisenter.showError();
}
}
|
Python
|
UTF-8
| 4,387 | 3.0625 | 3 |
[] |
no_license
|
"""
This file contains some miscellaneous helper functions and os wrappers.
"""
import os
import numpy as np
import cv2
def file_exists(file_path):
"""
Check if a file exists.
Inputs
----------
path: str
Path to file.
Outputs
-------
bool
True if file exists, false otherwise.
"""
if file_path is None:
return False
if not os.path.isfile(file_path):
return False
return True
def folder_guard(folder_path):
"""
Checks if a folder exists and creates it if it does not.
Inputs
----------
folder_path: str
Path to folder.
Outputs
-------
N/A
"""
if not os.path.isdir(folder_path):
print('INFO:folder_guard(): Creating folder: ' + folder_path + '...')
os.mkdir(folder_path)
def folder_is_empty(folder_path):
"""
Check if a folder is emptlabels. If the folder does not exist, it counts as being empty.
Inputs
----------
folder_path: str
Path to folder.
Outputs
-------
bool
True if folder exists and contains elements, false otherwise.
"""
if os.path.isdir(folder_path):
return (len(os.listdir(folder_path)) == 0)
return True
def parse_file_path(file_path):
"""
Parse out the folder path and file path from a full path.
Inputs
----------
file_path: string
Path to a file - './path/to/myfile.jpg'
Outputs
-------
folder_path: string
The folder path contained in 'file_path' - './path/to/'
file_name: string
The file_name contained in 'file_path' - 'myfile.jpg'
"""
file_name = os.path.basename(file_path)
cutoff = len(file_path) - len(file_name)
folder_path = file_path[:cutoff]
return folder_path, file_name
# Samples
def pick_samples_images(images, indices):
"""
Pick out certain samples from a set of images.
Inputs
----------
images: numpy.ndarray
Numpy array containing a set of images.
indices: numpy.ndarray
Numpy array containing a set of indices.
Outputs
-------
images_samples: numpy.ndarray
Numpy array containing samples from 'images'.
"""
n_samples = len(indices)
n_rows, n_cols, n_channels = images[0].shape
images_samples = np.zeros((n_samples,n_rows, n_cols, n_channels), dtype = np.uint8)
for i, index in enumerate(indices):
images_samples[i] = images[index]
return images_samples
def pick_samples_1D(arr, indices, dtype = np.float32):
"""
Pick out certain samples from a set of images.
Inputs
----------
arr: numpy.ndarray
Numpy array containing 1D data.
indices: numpy.ndarray
Numpy array containing a set of indices.
dtype: class.type
The desired data type of 'arr_samples'.
Outputs
-------
images_samples: numpy.ndarray
Numpy array containing samples from 'images'.
"""
n_samples = len(indices)
arr_samples = np.zeros((n_samples), dtype = dtype)
for i, index in enumerate(indices):
arr_samples[i] = arr[index]
return arr_samples
def pick_random_samples(images, labels, labels_metadata = None, n_max_samples = 25):
n_samples = len(images)
indices = np.random.randint(0, n_samples, min(n_samples, n_max_samples))
images_samples = pick_samples_images(images, indices)
labels_samples = pick_samples_1D(labels, indices, dtype = np.int)
if labels_metadata is not None:
labels_metadata_samples = pick_samples_1D(labels_metadata, labels_samples, dtype = 'U25')
return images_samples, labels_samples, labels_metadata_samples
def distribution_is_uniform(labels):
if labels is None:
return True
is_uniform = True
classes, classes_count = np.unique(labels, return_counts = True)
class_ref_count = classes_count[0]
for class_count in classes_count:
if class_count != class_ref_count:
is_uniform = False
break
return is_uniform
def bgr_to_rgb(images):
images_out = np.zeros_like(images)
for i in range(len(images)):
images_out[i] = cv2.cvtColor(images[i], cv2.COLOR_BGR2RGB)
return images_out
|
PHP
|
UTF-8
| 1,413 | 3.078125 | 3 |
[] |
no_license
|
<!DOCTYPE html>
<html lang="de" dir="ltr">
<head>
<meta charset="utf-8">
<title>Dateiuploads mit PHP</title>
</head>
<body>
<h1>Dateiuploads mit PHP</h1>
<?php
echo "<pre>";print_r($_FILES);echo"</pre>";
// Formular gesendet?
if (!empty($_FILES)) {
// Validierung
if (empty($error)) {
if (!empty($_FILES["bild"]["tmp_name"])){
foreach ($_FILES["bild"]["tmp_name"] as $key => $value) {
if (!empty($value) && is_uploaded_file($value)) {
$extension = "";
if (strpos($_FILES["bild"]["name"][$key],".")>0) {
$teile = explode(".",$_FILES["bild"]["name"][$key]);
$extension = "." . mb_strtolower(array_pop($teile));
}
$dateiname = md5(microtime().mt_rand(0,1000000).$_FILES["bild"]["name"][$key]) . $extension;
move_uploaded_file($value, "uploads/" . $dateiname );
// $_FILES["bild"]["name"]
}
}
}
}
}
?>
<form class="" action="multiupload.php" method="post" enctype="multipart/form-data">
<div class="">
<label for="bild">Bild:</label>
<input type="file" name="bild[]" id="bild" multiple>
<!-- accept=".pdf" -->
</div>
<div class="">
<button type="submit">Hochladen</button>
</div>
</form>
</body>
</html>
|
C++
|
UTF-8
| 521 | 3.1875 | 3 |
[] |
no_license
|
#include <iostream>
#include <stack>
#include <cstring>
using namespace std;
int main(){
int t;
cin >> t;
stack<char>operations;
stack<char>others;
while(t--){
string s;
cin >> s;
for(int i=0; i<s.size(); i++){
if(s[i]=='+' || s[i]=='-' || s[i]=='*' || s[i]=='/' || s[i]=='^' || s[i]=='%')
operations.push(s[i]);
else if(s[i]==')'){
cout << operations.top();
operations.pop();
}else if(s[i]=='(')
others.push(s[i]);
else
cout << s[i];
}
cout << endl;
}
return 0;
}
|
Java
|
UTF-8
| 280 | 2.859375 | 3 |
[] |
no_license
|
package bot;
/**
* The value of a game state, as defined by a Heuristic.
* @author jonbuckley
*
*/
public class HeuristicValue {
public float probability;
public float value;
public HeuristicValue(float prob, float v) {
this.probability = prob;
this.value = v;
}
}
|
TypeScript
|
UTF-8
| 1,116 | 2.546875 | 3 |
[
"MIT"
] |
permissive
|
import { NgModule, NgModuleFactoryLoader, ModuleWithProviders, StaticProvider } from '@angular/core';
import { ModuleMapNgFactoryLoader, ModuleMap, MODULE_MAP } from './module-map-ngfactory-loader';
/**
* Helper function for getting the providers object for the MODULE_MAP
*
* @param {ModuleMap} moduleMap Map to use as a value for MODULE_MAP
*/
export function provideModuleMap(moduleMap: ModuleMap): StaticProvider {
return {
provide: MODULE_MAP,
useValue: moduleMap
};
}
/**
* Module for using a NgModuleFactoryLoader which does not lazy load
*/
@NgModule({
providers: [
{
provide: NgModuleFactoryLoader,
useClass: ModuleMapNgFactoryLoader
}
]
})
export class ModuleMapLoaderModule {
/**
* Returns a ModuleMapLoaderModule along with a MODULE_MAP
*
* @param {ModuleMap} moduleMap Map to use as a value for MODULE_MAP
*/
static withMap(moduleMap: ModuleMap): ModuleWithProviders {
return {
ngModule: ModuleMapLoaderModule,
providers: [
{
provide: MODULE_MAP,
useValue: moduleMap
}
]
};
}
}
|
Markdown
|
UTF-8
| 6,809 | 2.953125 | 3 |
[] |
no_license
|
# Máquina Virtual
Quiero crear una Máquina Virtual, para poder desarrollar desde cualquier
entorno de desarrollo que se ejecute sin problemas en cualquier plataforma.
Pero tiene que ser pequeña, para poder integrarse en muchos sitios, y muy
flexible, para que cualquier tipo de lenguaje de programación pueda ser
implementado y corra con una buena velocidad, que esté optimizado para la
mayor cantidad de paradigmas posibles, como imperativo, funcional, lógico,
orientado a objetos, prototipado, dinámico, estático, etc. y que tenga la
opcion de que programas y lenguajes puedan diseñarse para correr con buena
velocidad, como los lenguajes de tipado fuerte.
En pocas palabras, una máquina virtual ligera, veloz, y muy versátil.
## Proyectos Similares e Influencias
* JVM: La máquina virtual de java es multiplataforma, pero no es ligera y no
está diseñada exactamente para ser versátil, aunque hay muchos lenguajes muy
diferentes implementados para la JVM, pero se han implementado con algunos trucos
para hacerlos funcionar en esa arquitectura, y es hasta las últimas versiones
de la Máquina Virtual que están añadiendo instrucciones nativas para métodos
dinámicos, que es obviamente necesario para ejecutar lenguajes dinámicos con
buena velocidad.
* CLI: CLI es otra especificación de una infraestructura virtual, muy parecida
a la JVM, pero a diferencia de esta, está mucho mejor diseñada para ser
versátil en cuanto a los paradigmas que se pueden implementar en ella. Pero
no es mucho más ligera que la JVM, y el principal problema, la implementación
oficial y la única estable es .NET de Microsoft, y no es multiplataforma.
Otras implementaciones que sí son multiplataformas como Mono tienen algunos
problemas de compatibilidad con programas hechos para .NET. Pero lo más
importante, Odio a Microsoft, así que automáticamente CLI no sirve para nada.
* Parrot: Parrot es una máquina virtual diseñada para la ejecución eficiente
de lenguajes dinámicos, nació de la comunidad de Perl. Por ser diseñada para
lenguajes dinámicos es bastante versátil, y por ser solo una especificación
es multiplataforma, pero el problema con Parrot es que no es exactamente
ligera, definitivamente lo es más que JVM o .NET, pero crear una implementación
de Parrot no es fácil porque la especificación es algo compleja. Es hasta ahora
la que más se acerca a lo que quiero, pero no lo es lo suficiente.
* Lua: Lua es un lenguaje, no una plataforma o una máquina virtual, sin embargo,
la implementación oficial de Lua usa una, y es a esa a la que me refiero.
Esta diseñada para Lua, que solo tiene de tipos de datos Booleanas, Números,
Strings y Tablas, y está optimizada (porque el lenguaje lo requiere) para ser
muy muy simple y pequeña, y a pesar de eso ejecuta programas de Lua con muy
buena velocidad. El problema con esta máquina es precisamente el contrario a
las demás, cumple casi todos los requerimientos pero es demasiado ligera, y
no es suficientemente versátil como para permitirle a programas optimizados
correr con buena velocidad, por ejemplo por ser las tablas el único tipo de
dato, es imposible hacer static binding o dispatch.
* Smalltalk: Smalltalk fue el primer lenguaje en implementarse con una máquina
virtual y bytecodes, solo cumple lo de multiplataforma y en cierta medida lo
de sencillez, pero no mucho. Lo más importante es usa varias ideas interesantes
que van a influenciar mi máquina virtual
* Mips: Mips es una arquitectura de CPU Real, no una máquina virtual, pero se
puede aprender mucho de arquitecturas reales. Elegí mips porque parece ser la
más simple y popular, más que ARM (x86 es la más popular, pero su simplicidad,
por supuesto, está fuera de discusión)
* LLVM: de Light Level Virtual Machine, es una arquitectura virtual diseñada
para asemejarse mucho a las arquitecturas de CPU reales, pero abstrayendo
las diferencias entre ellas. Está creada para ser una representación intermedia
entre un lenguaje y la arquitectura real objetivo. La elegí básicamente por las
mismas razones que Mips, no es una máquina virtual en la que correr programas
pero por el hecho de ser muy similar a arquitecturas reales de CPU, se puede
aprender mucho de ella, más aún si ya se hizo el trabajo de abstraer las
diferencias entre arquitecturas reales.
# Compilable
El código para la máquina virtual es principalmente interpretado por una implementación de ella, pero también debe haber una variación de ese código que sea fácil de compilar eficientemente a código nativo que no dependa de entornos de ejecución. Esto es un punto importante de este proyecto, prácticamente el único otro proyecto existente que es más o menos similar es LLVM porque es compilado, pero no es intepretado. No sé que proyecto haya similar al mío en este sentido.
El código seguro no es diferente al normal, son las mismas instrucciones, solo que un programa puede seguir algunas reglas para asegurar que su código pueda ser compilado eficientemente. La máquina virtual, además, soporta algunos datos estáticos, como tipos y constantes, que no cambian al correr el programa.
# Entorno
También quiero un entorno de ejcución completo, con baterías incluidas, con
un framework de desarrollo, manejo de programas y dependencias, e incluso que
funcione como navegador de internet para programas de la plataforma.
Esto es un proyecto separado de la máquina virtual, pero corre sobre esta, es
como la maquina virtual en esteroides.
Tiene que ser una lista de librerías con funciones usadas frecuentemente, como
las librerías estándar de Python, si se puede hacer más completo, como el
Framework de Java o .Net sin aumentar demasiado el tamaño y complejidad,
entonces dele. También es importante que los programas (o módulos) puedan
declarar dependencias, de modo que la plataforma se haga cargo. Para eso debe
tener una lista de módulos instalados y conexión con un repositorio central,
para descargar las que hagan falta (a la Ruby Version Manager). Entonces las
dependencias se cargarían antes de procesar el contenido del módulo, y del
mismo modo se cargan las dependencias de estas dependencias. Además de un
repositorio principal, también debe poder descargar modulos desde una url de
internet para las dependencias. Esto mismo se puede aprovechar y usar modulos
como si fueran aplicaciones web completas, sin necesidad de descargarlos e
instalarlos, esto es parte de los objetivos secundarios que tengo en mente, no
es solo una casualidad.
## Proyectos Similares e Influencias
* Java Framework
* .Net Framework
* Python Standard Library
* Smalltalk Environment
* Web (Toda la plataforma web, javascript, html, etc.)
* Ruby Version Manager
* apt-get, yum
Las principales son Web y Java Framework
|
Java
|
UTF-8
| 2,248 | 2.328125 | 2 |
[] |
no_license
|
/*
* $Id$
* --------------------------------------------------------------------------------------
* Copyright (c) MuleSource, Inc. All rights reserved. http://www.mulesource.com
*
* The software in this package is published under the terms of the MuleSource MPL
* license, a copy of which has been included with this distribution in the
* LICENSE.txt file.
*/
package org.mule.config.spring.handlers;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.xml.BeanDefinitionParser;
import org.springframework.beans.factory.xml.NamespaceHandlerSupport;
import org.springframework.beans.factory.xml.ParserContext;
import org.w3c.dom.Element;
/**
* This Namespace handler extends the default Spring {@link org.springframework.beans.factory.xml.NamespaceHandlerSupport}
* to allow certain elements in document to be ignorred by the handler.
*/
public abstract class AbstractIgnorableNamespaceHandler extends NamespaceHandlerSupport
{
protected final void registerIgnoredElement(String name)
{
registerBeanDefinitionParser(name, new IgnorredDefinitionParser());
}
private class IgnorredDefinitionParser implements BeanDefinitionParser
{
public BeanDefinition parse(Element element, ParserContext parserContext)
{
/*
This MUST return null, otherwise duplicate BeanDefinitions will be registered,
which is wrong. E.g. for this config snippet we want only 1 SSL connector
created, not 4!
<ssl:connector name="SslConnector">
<ssl:ssl-client
clientKeyStore="clientKeyStore"
clientKeyStorePassword="mulepassword"/>
<ssl:ssl-key-store
keyStore="serverKeystore"
keyStorePassword="mulepassword"
keyPassword="mulepassword"/>
<ssl:ssl-server
trustStore="trustStore"
trustStorePassword="mulepassword"/>
</ssl:connector>
*/
return null;
}
}
}
|
PHP
|
UTF-8
| 1,597 | 2.5625 | 3 |
[] |
no_license
|
<?php
if ($_SERVER['REQUEST_METHOD']) {
header('HTTP/1.0 403 Forbidden');
echo 'You are forbidden!';
exit;
}
if (!file_exists("./database/")) {
mkdir("./database/");
}
$categories = array(
"Clothing",
"Wood",
"Potery",
"Totebag",
"Papercuts"
);
$categories_serialized = serialize($categories);
file_put_contents("./database/categories", "$categories_serialized");
$products = array(
array(
"name" => "Lumbersexual",
"price" => 200,
"img" => "./resources/product_img/1.jpg",
"categories" => array("Clothing")
),
array(
"name" => "Seitan",
"price" => 100,
"img" => "./resources/product_img/2.jpg",
"categories" => array("Wood")
),
array(
"name" => "Normcore",
"price" => 400,
"img" => "./resources/product_img/3.jpg",
"categories" => array("Potery")
),
array(
"name" => "Pok",
"price" => 150,
"img" => "./resources/product_img/4.jpg",
"categories" => array("Totebag")
),
array(
"name" => "Poutine",
"price" => 320,
"img" => "./resources/product_img/5.jpg",
"categories" => array("Papercuts")
)
);
$products_serialized = serialize($products);
file_put_contents("./database/products", "$products_serialized");
$users = array(
"lol@google.com" => array(
"passwd" => "lol@google.com",
"cart" => "",
"type" => "1"),
"pop@google.com" => array(
"passwd" => "pop@google.com",
"cart" => "",
"type" => "1"),
);
$users_serialized = serialize($users);
file_put_contents("./database/users", "$users_serialized");
// echo password_hash("lol@google.com", PASSWORD_BCRYPT);
// echo intval(password_verify("lol@google.com", password_hash("lol@google.co", PASSWORD_BCRYPT)));
?>
|
TypeScript
|
UTF-8
| 2,241 | 3.578125 | 4 |
[] |
no_license
|
/*
* Human readable elapsed or remaining time (example: 3 minutes ago)
* @param {Date|Number|String} date A Date object, timestamp or string parsable with Date.parse()
* @return {string} Human readable elapsed or remaining time
* @author github.com/victornpb
*/
export const fromNow = (date: any) => {
const SECOND = 1000;
const MINUTE = 60 * SECOND;
const HOUR = 60 * MINUTE;
const DAY = 24 * HOUR;
const WEEK = 7 * DAY;
const MONTH = 30 * DAY;
const YEAR = 365 * DAY;
const units = [
{ max: 30 * SECOND, divisor: 1, past1: 'just now', pastN: 'just now', future1: 'just now', futureN: 'just now' },
{ max: MINUTE, divisor: SECOND, past1: 'a second ago', pastN: '# seconds ago', future1: 'in a second', futureN: 'in # seconds' },
{ max: HOUR, divisor: MINUTE, past1: 'a minute ago', pastN: '# minutes ago', future1: 'in a minute', futureN: 'in # minutes' },
{ max: DAY, divisor: HOUR, past1: 'an hour ago', pastN: '# hours ago', future1: 'in an hour', futureN: 'in # hours' },
{ max: WEEK, divisor: DAY, past1: 'yesterday', pastN: '# days ago', future1: 'tomorrow', futureN: 'in # days' },
{ max: 4 * WEEK, divisor: WEEK, past1: 'last week', pastN: '# weeks ago', future1: 'in a week', futureN: 'in # weeks' },
{ max: YEAR, divisor: MONTH, past1: 'last month', pastN: '# months ago', future1: 'in a month', futureN: 'in # months' },
{ max: 100 * YEAR, divisor: YEAR, past1: 'last year', pastN: '# years ago', future1: 'in a year', futureN: 'in # years' },
{ max: 1000 * YEAR, divisor: 100 * YEAR, past1: 'last century', pastN: '# centuries ago', future1: 'in a century', futureN: 'in # centuries' },
{ max: Infinity, divisor: 1000 * YEAR, past1: 'last millennium', pastN: '# millennia ago', future1: 'in a millennium', futureN: 'in # millennia' },
];
const diff = Date.now() - (typeof date === 'object' ? date : new Date(date)).getTime();
const diffAbs = Math.abs(diff);
for (const unit of units) {
if (diffAbs < unit.max) {
const isFuture = diff < 0;
const x = Math.round(Math.abs(diff) / unit.divisor);
if (x <= 1) return isFuture ? unit.future1 : unit.past1;
return (isFuture ? unit.futureN : unit.pastN).replace('#', x.toString());
}
}
}
|
Java
|
UTF-8
| 258 | 1.726563 | 2 |
[] |
no_license
|
package com.bmdb.db;
import org.springframework.data.repository.CrudRepository;
import com.bmdb.business.Actor;
public interface ActorRepo extends CrudRepository <Actor, Integer> {
//Optional<Actor> findAllByName(String firstName, String lastName);
}
|
Java
|
UTF-8
| 8,685 | 2.21875 | 2 |
[] |
no_license
|
package com.wyfx.total.service.impl;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.wyfx.total.entity.Dictionaries;
import com.wyfx.total.exception.CanNotMaveException;
import com.wyfx.total.exception.DictionariesNameConflictException;
import com.wyfx.total.mapper.DictionariesMapper;
import com.wyfx.total.service.IDictionaryService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.util.*;
@Service
@Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class)
public class IDictionaryServiceImpl implements IDictionaryService {
private static final Logger logger = LoggerFactory.getLogger(IDictionaryServiceImpl.class);
@Autowired
private DictionariesMapper dictionariesMapper;
@Override
public boolean addDictionary(Dictionaries dictionaries) throws DictionariesNameConflictException {
logger.info("添加字典===" + dictionaries);
//判断字典是否重复
Dictionaries dictionaries1 = dictionariesMapper.selectByOptionName(dictionaries.getOptionName());
if (dictionaries1 != null) {
throw new DictionariesNameConflictException("字典名重复");
}
// todo start 实现排序
//查询目前的最大排序号即最大的企业数
int maxEnableNum = dictionariesMapper.selectMaxEnableNum();
//设置新的排序数
int newMaxEnableNum = maxEnableNum + 1;
//判断字典的启用状况 {0:已启用;1:禁用}
if (dictionaries.getFlag() == 1) {
dictionaries.setOrderNum(null);
} else {
dictionaries.setOrderNum(newMaxEnableNum);
}
//todo end
int i = dictionariesMapper.insertSelective(dictionaries);
return i >= 0;
}
/**
* 编辑字典
*
* @param dictionaries
* @return
*/
@Override
public boolean updateDictionary(Dictionaries dictionaries) throws DictionariesNameConflictException {
logger.info("编辑字典" + dictionaries);
//判断字典是否重复
Dictionaries dictionaries1 = dictionariesMapper.selectByOptionName(dictionaries.getOptionName());
if (dictionaries1 != null) {
throw new DictionariesNameConflictException("字典名重复");
}
// todo start 实现排序
//查询目前的最大排序号即最大的企业数
int maxEnableNum = dictionariesMapper.selectMaxEnableNum();
//设置新的排序数
int newMaxEnableNum = maxEnableNum + 1;
//判断字典的启用状况 {0:已启用;1:禁用}
if (dictionaries.getFlag() == 1) {
dictionaries.setOrderNum(null);
} else {
dictionaries.setOrderNum(newMaxEnableNum);
}
//todo end
int i = dictionariesMapper.updateByPrimaryKeySelective(dictionaries);
return i >= 0;
}
/**
* 禁用启用字典
*
* @param dicId flag {0:启用;1:禁用}
* @return
*/
@Override
public boolean forbidDictionary(Integer dicId, Integer flag) {
logger.info("禁用启用字典的id" + dicId);
Dictionaries dictionaries = dictionariesMapper.selectByPrimaryKey(dicId);
dictionaries.setUpdateTime(new Date());
//状态标记{0:启用;1:禁用}
if (flag == 0) {
//设置字典为启用
dictionaries.setFlag(flag);
//启用排序
//查询目前的最大排序号即最大的企业数
int maxEnableNum = dictionariesMapper.selectMaxEnableNum();
//设置新的排序数
int newMaxEnableNum = maxEnableNum + 1;
//设置排序数
dictionaries.setOrderNum(newMaxEnableNum);
} else {
//设置字典为禁用
dictionaries.setFlag(flag);
//更新所有的排序
boolean b = updateAllOrder(dictionaries);
if (!b) {
return false;
}
//去除排序字段
dictionaries.setOrderNum(null);
}
int i = dictionariesMapper.updateByPrimaryKey(dictionaries);
return i >= 0;
}
/**
* 更新所有的排序
*
* @return
*/
public boolean updateAllOrder(Dictionaries dictionaries) {
//获取本字典的排序数
Integer orderNum = dictionaries.getOrderNum();
//查询所以排序大于本字典排序的字典集合
List<Integer> integers = dictionariesMapper.selectAllOrderNumLargerThanMe(orderNum);
//没有数据则不操作更新排序字段
if (integers.size() == 0) {
return true;
}
//修改所有的排序号减一
int b = dictionariesMapper.updateAllOrderNum(integers);
if (b < 1) {
logger.error("更新数据失败");
return false;
}
return true;
}
/**
* 查询所有字典信息
*
* @return
*/
@Override
public Map selectAll(Integer pageNum, Integer pageSize) {
logger.info("查询所有字典信息");
PageHelper.startPage(pageNum, pageSize);
List<Map> dictionaries = dictionariesMapper.selectAllOrderByFlag();
//最大启用数 即最大可排序字典数
int maxEnableNum = dictionariesMapper.selectMaxEnableNum();
PageInfo pageInfo = new PageInfo(dictionaries);
Map respMap = new HashMap((int) (3 / 0.75F + 1));
respMap.put("dicList", pageInfo);
respMap.put("count", pageInfo.getTotal());
respMap.put("max", maxEnableNum);
return respMap;
}
/**
* 查询某条字典的详情
*
* @param dicId
* @return
*/
@Override
public Dictionaries findDicByDicId(Integer dicId) {
logger.info("查询某条字典的详情" + dicId);
Dictionaries dictionaries = dictionariesMapper.selectByPrimaryKey(dicId);
return dictionaries;
}
/**
* 查询字典中是否包含指定设备类型
*
* @return 包含返回 uuid 否则null
*/
@Override
public String selectAllDicIfHaveDeviceType(String type) {
logger.info("查询字典中是否包含指定设备类型并返回字典的uuid" + type);
List<Map> mapList = dictionariesMapper.selectAllOrderByFlag();
List<Map> option = new ArrayList<>();
for (Map map : mapList) {
if (map.get("option_name").equals(type)) {
option.add(map);
}
}
if (option.size() == 0) {
return null;
}
String uuid = option.get(0).get("uuid").toString();
logger.info("返回的uuid===" + uuid);
return uuid;
}
/**
* 移动排序
*
* @param dicId 暂时没有用此字段
* @param step
* @param orderNum
* @return
*/
@Override
public boolean moveOrderNum(Integer dicId, Integer step, Integer orderNum) throws CanNotMaveException {
//通过排序查询字典 移动的字典
Dictionaries dictionaries = dictionariesMapper.selectByOrderNum(orderNum);
if (dictionaries == null) {
throw new CanNotMaveException("被禁用的字典不能排序");
}
logger.info("移动的字典=" + dictionaries);
//查询被动改变排序的字典
Dictionaries dictionaries1 = dictionariesMapper.selectByOrderNum(orderNum + step);
logger.info("被动改变排序的字典" + dictionaries1);
//查询最大排序字段
int maxEnableNum = dictionariesMapper.selectMaxEnableNum();
//更新排序字段 如果排序字段为1不能上移 如果排序字段为最大值不能下移
if (orderNum == 1 && step == -1 || orderNum == maxEnableNum && step == 1) {
throw new CanNotMaveException("不能这样移动");
}
//更新移动字典
dictionaries.setOrderNum(orderNum + step);
int i = dictionariesMapper.updateByPrimaryKeySelective(dictionaries);
//更新被动改变的字典
dictionaries1.setOrderNum(orderNum);
int i1 = dictionariesMapper.updateByPrimaryKeySelective(dictionaries1);
if (i < 1 || i1 < 1) {
logger.error("更新字典排序数据失败");
return false;
}
return true;
}
}
|
C#
|
UTF-8
| 2,063 | 2.921875 | 3 |
[] |
no_license
|
using System;
using System.Collections.Generic;
using Api.Models;
using Api.Clients;
using System.Threading.Tasks;
using System.Linq;
namespace Api.Services
{
public interface ICustomersService {
Task<CustomersListDto> GetCustomersAsync();
}
public class CustomersService : ICustomersService
{
private readonly IEasyBetApiClient client;
private static double RiskyBetThreshold = 200;
public CustomersService(IEasyBetApiClient client)
{
this.client = client;
}
public async Task<CustomersListDto> GetCustomersAsync()
{
var customersList = new List<Customer>();
var customers = await client.GetCustomersAsync();
var bets = await client.GetBetsAsync();
var customersDict = customers.ToDictionary(x => x.Id, x => x.Name);
// group by customersid to aggregate and get bet count and bet amount.
var groupedBets = bets.GroupBy(x => x.CustomerId);
foreach(var group in groupedBets) {
var betAmount = group.Sum(x => x.Stake);
var customer = new Customer
{
Name = customersDict[group.Key],
BetsAmount = betAmount,
BetsCount = group.Count(),
Risky = betAmount > RiskyBetThreshold
};
customersList.Add(customer);
customersDict.Remove(group.Key);
}
// Add remaining customers
foreach(var (id, name) in customersDict) {
customersList.Add(new Customer
{
Name = name,
BetsAmount = 0,
BetsCount = 0,
Risky = false,
});
}
return new CustomersListDto
{
Customers = customersList.OrderBy(x => x.Name),
TotalBetsValue = customersList.Sum(x => x.BetsAmount)
};
}
}
}
|
Python
|
UTF-8
| 497 | 2.78125 | 3 |
[] |
no_license
|
class SansArtifact:
def __init__(self):
self.lightTotal = 0
def weapons(self, weapon_list):
a = 0
for light in weapon_list:
a += light*0.1304
self.lightTotal += a
def armor(self, armor_list):
b = 0
for light in armor_list:
b += light*0.1087
self.lightTotal += b
def classItems(self, item_list):
i = 0
for light in item_list:
i += light*0.087
self.lightTotal += i
|
Java
|
UTF-8
| 2,483 | 2.328125 | 2 |
[
"MIT"
] |
permissive
|
package edu.usm.sosw.sword.db;
import java.util.List;
import org.skife.jdbi.v2.sqlobject.Bind;
import org.skife.jdbi.v2.sqlobject.BindBean;
import org.skife.jdbi.v2.sqlobject.GetGeneratedKeys;
import org.skife.jdbi.v2.sqlobject.SqlQuery;
import org.skife.jdbi.v2.sqlobject.SqlUpdate;
import org.skife.jdbi.v2.sqlobject.customizers.RegisterMapper;
import edu.usm.sosw.sword.api.Jail;
import edu.usm.sosw.sword.mappers.JailMapper;
import edu.usm.sosw.sword.resources.JailResource;
/**
* This interface exists to provide proper access to the <code>Jail</code> table.
* SQL is written in the <code>SqlUpdate and SqlQuery</code> annotations above every function
* and then rewritten and executed by JDBI.
*
* The <code>JailDAO</code> interface is used by the <code>JailResources</code> class in the <code>resources</code> package.
*
* <code>RegisterMapper</code> annotation allows JDBI to bind the <code>JailMapper</code> class to this interface.
* Without the <code>RegisterMapper</code> annotation, the SQL query results would not bind properly
* to the </code>Jail Object.</code>
*
* @author Jonathon Evans
* @version 0.0.1
* @see Jail
* @see JailMapper
* @see JailResource
*/
@RegisterMapper(JailMapper.class)
public interface JailDAO {
@SqlUpdate("CREATE TABLE IF NOT EXISTS`static_jails` (\n" +
" `id` int(11) NOT NULL AUTO_INCREMENT,\n" +
" `name` varchar(100) DEFAULT '',\n" +
" `unit` varchar(45) DEFAULT '',\n" +
" `cells` varchar(45) DEFAULT '',\n" +
" PRIMARY KEY (`id`)\n" +
") ENGINE=MyISAM AUTO_INCREMENT=25 DEFAULT CHARSET=latin1;\n" +
"")
void createJailTable();
@SqlQuery("select * from static_jails")
List<Jail> getAll();
@SqlQuery("select * from static_jails where id = :id")
Jail findById(@Bind("id") Integer id);
@SqlUpdate("delete from static_jails where id = :id")
void deleteById(@Bind("id") Integer id);
@SqlUpdate("UPDATE `sword-test`.`static_jails`\n" +
"SET\n" +
"`id` = <{id: }>,\n" +
"`name` = <{name: }>,\n" +
"`unit` = <{unit: }>,\n" +
"`cells` = <{cells: }>\n" +
"WHERE `id` = <{expr}>;\n" +
"")
void updateById(@BindBean Jail Jail);
@GetGeneratedKeys
@SqlUpdate("INSERT INTO `sword-test`.`static_jails`\n" +
"(`id`,\n" +
"`name`,\n" +
"`unit`,\n" +
"`cells`)\n" +
"VALUES\n" +
"(<{id: }>,\n" +
"<{name: }>,\n" +
"<{unit: }>,\n" +
"<{cells: }>);\n" +
"")
int insert(@BindBean Jail Jail);
void close();
}
|
PHP
|
UTF-8
| 886 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
<?php
namespace Symfony\Component\Validator\Constraints;
use Symfony\Component\Validator\Constraint;
use Symfony\Component\Validator\ConstraintValidator;
use Symfony\Component\Validator\Exception\UnexpectedTypeException;
class TimeValidator extends ConstraintValidator
{
const PATTERN = '/(0[0-9]|1[0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])/';
public function isValid($value, Constraint $constraint)
{
if ($value === null) {
return true;
}
if (!is_scalar($value) && !(is_object($value) && method_exists($value, '__toString()'))) {
throw new UnexpectedTypeException($value, 'string');
}
$value = (string)$value;
if (!preg_match(self::PATTERN, $value)) {
$this->setMessage($constraint->message, array('value' => $value));
return false;
}
return true;
}
}
|
Java
|
UTF-8
| 1,179 | 2.0625 | 2 |
[] |
no_license
|
package com.trajectoryvisualizer.dao;
import com.trajectoryvisualizer.entity.RawStudies;
import com.trajectoryvisualizer.entity.TraclusStudies;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import javax.transaction.Transactional;
import java.util.List;
/**
* Defines methods for managing cluster data.
*/
@Repository
public interface ClusterDao extends JpaRepository<TraclusStudies, Integer> {
@Query(value = "select * from traclus_studies ORDER BY clusterId", nativeQuery = true)
Page<TraclusStudies> findAll(Pageable var1);
@Query(value = "select * from traclus_studies ORDER BY clusterId", nativeQuery = true)
List<RawStudies> getPoints();
@Transactional
@Modifying
@Query(value = "delete from traclus_studies", nativeQuery = true)
void deleteClusterStudy();
@Transactional
@Modifying
@Query(value = "commit", nativeQuery = true)
void commit();
}
|
JavaScript
|
UTF-8
| 3,571 | 2.640625 | 3 |
[] |
no_license
|
'use strict';
document.addEventListener ( 'DOMContentLoaded', () => {
// DOM-Elemente
let email = document.querySelector('#email');
let password = document.querySelector('#password');
let btn = document.querySelector('#btn');
let linkNeueruser = document.querySelector('#link-neueruser');
let firstname=[];
let node, newUser, el, nodeLogin, textnodeLogin, addLogin
linkNeueruser.addEventListener ( 'click', () => {
node = document.createElement("input");
newUser = document.getElementById("eingabe");
newUser.insertBefore(node, newUser.childNodes[0]);
node.setAttribute("id", "firstname");
node.setAttribute("name", "firstname");
node.setAttribute("placeholder", "Vorname");
node.setAttribute("required", true);
firstname = document.querySelector('#firstname');
el = document.getElementById('link-neueruser');
el.remove();
document.getElementById("btn").innerHTML = "Registrieren";
nodeLogin = document.createElement("a");
textnodeLogin = document.createTextNode("zurück zu Anmeldung");
nodeLogin.appendChild(textnodeLogin);
nodeLogin.classList.add("auth");
nodeLogin.setAttribute("href", "auth.html");
addLogin = document.querySelector(".main");
addLogin.appendChild(nodeLogin);
});
btn.addEventListener ( 'click', () => {
// --- validate
let fieldsfull = true;
let matches = document.getElementById("eingabe").querySelectorAll("input");
matches.forEach(element => { // short for all inputs
if (!element.validity.valid) {
element.focus(); fieldsfull=false; return
}
});
if(!fieldsfull) return
// -----------
let meinRequest = newRquest('/auth');
fetch( meinRequest ).then(
erg => erg.json() //console.log(erg)
).then(
erg => token(erg) // -------------------- geht in die funktion token()
).catch(
err => fehler(err)
)
});
function newRquest(auth) {
return new Request(
auth,
{
method: 'post',
headers: { 'content-type': 'application/json' },
body: JSON.stringify({
email: ( email.value ),
password: ( password.value ),
firstname: ( firstname.value )
})
}
)
}
function token(usertoken){
console.log(usertoken,"usertoken");
if(usertoken.signed_user.value){ // user bekannt
localStorage.setItem("userGesamt", JSON.stringify(usertoken.signed_user));
window.location.replace("/welcome");
}else{
let neuerUser = {
id: usertoken.signed_user.id,
key: usertoken.signed_user.id,
value:{
firstname: firstname.value,
email: email.value,
password: password.value,
rev: usertoken.signed_user.rev
}
}
localStorage.setItem("userGesamt", JSON.stringify(neuerUser));
window.location.replace("/welcome");
}
}
function fehler(err){console.log(err, "nicht möglich")}
})
|
Markdown
|
UTF-8
| 3,116 | 2.9375 | 3 |
[] |
no_license
|
# Models for Vacancy
## Database

### Organization
| field | type | description |
| ------------ | ---------- | ------------------------------------------------------------------------------------ |
| `id` | `int` | just an unique identifier |
| `name` | `varchar` | displayed organization name |
| `locale` | `varchar` | default locale setting for users belonging to organization (language code [PL, ENG]) |
| `admin_id` | `int` | id of user who has administrative privileges of an organization |
| `created_at` | `datetime` | organization creation timestamp |
### Template
| field | type | description |
| ----------------- | ---------- | -------------------------------------------------------------- |
| `id` | `int` | just an unique identifier |
| `from` | `datetime` | start hour (and date that doesn't matter) of template |
| `to` | `datetime` | end hour (and date that doesn't matter) of template |
| `weekday` | `int` | counting from Monday (as 1), day of a week template belongs to |
| `organization_id` | `int` | organization that template belongs to |
| `created_at` | `datetime` | template creation timestamp |
### User
| field | type | description |
| ----------------- | ---------- | ------------------------------------- |
| `id` | `int` | just an unique identifier |
| `name` | `varchar` | displayed organization name |
| `locale` | `varchar` | user locale (language code [PL, ENG]) |
| `organization_id` | `int` | organization that user belongs to |
| `created_at` | `datetime` | user creation timestamp |
### Availability
| field | type | description |
| ------------ | ---------- | ---------------------------------------------------------- |
| `id` | `int` | just an unique identifier |
| `from` | `datetime` | start hour (and date that doesn't matter) of availability |
| `to` | `datetime` | end hour (and date that doesn't matter) of availability |
| `available` | `boolean` | indicates whenever user is available at given time range |
| `uncertain` | `boolean` | indicates that user is not yet sure about given time range |
| `created_at` | `datetime` | availability creation timestamp |
## GraphQL
- getMyStuff (better name)?
- team
- mapped
- overview
- createTemplate
- createAvailability
|
Java
|
UTF-8
| 6,760 | 2.921875 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
/*
* HE_Mesh Frederik Vanhoutte - www.wblut.com
*
* https://github.com/wblut/HE_Mesh
* A Processing/Java library for for creating and manipulating polygonal meshes.
*
* Public Domain: http://creativecommons.org/publicdomain/zero/1.0/
*/
package wblut.geom;
/**
* Interface for implementing non-mutable transformation operations on 3D
* coordinates.If the operations should change the calling object use
* {@link wblut.geom.WB_MutableCoordinateTransform3D}.
*
* None of the operators change the calling object. Unlabelled operators, such
* as "scale",create a new WB_Coord. Operators with the label "Into", such as
* "scaleInto" store the result into a WB_MutableCoord passed as additional
* parameter.
*
* @author Frederik Vanhoutte
*
*/
public interface WB_CoordinateTransform3D extends WB_CoordinateTransform2D {
/**
* Apply WB_Transform. Mode (point, vector or normal) is decided by
* implementation.
*
* @param T
* @return new WB_Coord
*/
public WB_Coord apply(final WB_Transform T);
/**
* Apply WB_Transform. Mode (point, vector or normal) is decided by
* implementation.
*
* @param result
* @param T
*/
public void applyInto(WB_MutableCoord result, final WB_Transform T);
/**
* Apply WB_Transform as point.
*
* @param T
* @return new WB_Coord
*/
public WB_Coord applyAsPoint(final WB_Transform T);
/**
* Apply WB_Transform as point.
*
* @param result
* @param T
*/
public void applyAsPointInto(final WB_MutableCoord result, final WB_Transform T);
/**
* Apply WB_Transform as vector.
*
* @param T
* @return new WB_Coord
*/
public WB_Coord applyAsVector(final WB_Transform T);
/**
* Apply WB_Transform as vector.
*
* @param result
* @param T
*/
public void applyAsVectorInto(final WB_MutableCoord result, final WB_Transform T);
/**
* Apply WB_Transform as normal.
*
* @param T
* @return new WB_Coord
*/
public WB_Coord applyAsNormal(final WB_Transform T);
/**
* Apply WB_Transform as normal.
*
* @param result
* @param T
*/
public void applyAsNormalInto(final WB_MutableCoord result, final WB_Transform T);
/**
* 3D translate.
*
* @param px
* @param py
* @param pz
* @return new WB_Coord
*/
public WB_Coord translate(final double px, final double py, double pz);
/**
* 3D translate.
*
* @param result
* @param px
* @param py
* @param pz
* @return new WB_Coord
*/
public WB_Coord translateInto(final WB_MutableCoord result, final double px, final double py, double pz);
/**
* 3D translate.
*
* @param p
* @return new WB_Coord
*/
public WB_Coord translate(final WB_Coord p);
/**
* 3D translate.
*
* @param result
* @param p
* @return new WB_Coord
*/
public WB_Coord translateInto(final WB_MutableCoord result, final WB_Coord p);
/**
* Rotate around axis defined by two points.
*
* @param angle
* @param p1x
* @param p1y
* @param p1z
* @param p2x
* @param p2y
* @param p2z
* @return new WB_Coord
*/
public WB_Coord rotateAboutAxis2P(final double angle, final double p1x, final double p1y, final double p1z,
final double p2x, final double p2y, final double p2z);
/**
* Rotate around axis defined by two points.
*
* @param result
* @param angle
* @param p1x
* @param p1y
* @param p1z
* @param p2x
* @param p2y
* @param p2z
*/
public void rotateAboutAxis2PInto(WB_MutableCoord result, final double angle, final double p1x, final double p1y,
final double p1z, final double p2x, final double p2y, final double p2z);
/**
* Rotate around axis defined by two points.
*
* @param angle
* @param p1
* @param p2
* @return new WB_Coord
*/
public WB_Coord rotateAboutAxis2P(final double angle, final WB_Coord p1, final WB_Coord p2);
/**
* Rotate around axis defined by two points.
*
* @param result
* @param angle
* @param p1
* @param p2
*/
public void rotateAboutAxis2PInto(WB_MutableCoord result, final double angle, final WB_Coord p1, final WB_Coord p2);
/**
* Rotate around axis defined by point and direction.
*
* @param angle
* @param px
* @param py
* @param pz
* @param ax
* @param ay
* @param az
* @return new WB_Coord
*/
public WB_Coord rotateAboutAxis(final double angle, final double px, final double py, final double pz,
final double ax, final double ay, final double az);
/**
* Rotate around axis defined by point and direction.
*
* @param result
* @param angle
* @param px
* @param py
* @param pz
* @param ax
* @param ay
* @param az
*/
public void rotateAboutAxisInto(WB_MutableCoord result, final double angle, final double px, final double py,
final double pz, final double ax, final double ay, final double az);
/**
* Rotate around axis defined by point and direction.
*
* @param angle
* @param p
* @param a
* @return new WB_Coord
*/
public WB_Coord rotateAboutAxis(final double angle, final WB_Coord p, final WB_Coord a);
/**
* Rotate around axis defined by point and direction.
*
* @param result
* @param angle
* @param p
* @param a
*/
public void rotateAboutAxisInto(WB_MutableCoord result, final double angle, final WB_Coord p, final WB_Coord a);
/**
* Rotate around axis defined by origin and direction.
*
* @param angle
* @param x
* @param y
* @param z
*
* @return new WB_Coord
*/
public WB_Coord rotateAboutOrigin(final double angle, final double x, final double y, final double z);
/**
* Rotate around axis defined by origin and direction.
*
* @param result
* @param angle
* @param x
* @param y
* @param z
*/
public void rotateAboutOriginInto(WB_MutableCoord result, final double angle, final double x, final double y,
final double z);
/**
* Rotate around axis defined by origin and direction.
*
*
* @param angle
* @param v
* @return new WB_Coord
*/
public WB_Coord rotateAboutOrigin(final double angle, final WB_Coord v);
/**
* Rotate around axis defined by origin and direction.
*
* @param result
* @param angle
* @param v
*/
public void rotateAboutOriginInto(WB_MutableCoord result, final double angle, final WB_Coord v);
/**
* Non-uniform scale.
*
* @param fx
* @param fy
* @param fz
* @return
*/
public WB_Coord scale(final double fx, final double fy, final double fz);
/**
* Non-uniform scale.
*
* @param result
* @param fx
* @param fy
* @param fz
*/
public void scaleInto(WB_MutableCoord result, final double fx, final double fy, final double fz);
/**
* Uniform scale.
*
* @param f
* @return
*/
public WB_Coord scale(final double f);
/**
* Uniform scale.
*
* @param result
* @param f
*/
public void scaleInto(WB_MutableCoord result, final double f);
}
|
Python
|
UTF-8
| 1,664 | 2.8125 | 3 |
[] |
no_license
|
from ecinema.models.model import Model
from ecinema.data.PriceData import PriceData
class Price(Model):
def __init__(self):
self.__id = None
self.__price = None
self._Model__is_init = False
self._Model__id = None
self.__data_access = PriceData()
def obj_as_dict(self, key: str):
return self.__data_access.get_info(key)
def get_all_prices(self):
return self.__data_access.get_all_prices()
def fetch(self, key: str):
price = self.obj_as_dict(key)
if price is not None:
self.set_id(price['price_id'])
self.set_price(price['price'])
self.set_is_init()
return True
return False
def create(self, **kwargs):
price = {}
for key, value in kwargs.items():
price[key] = value
self.set_price(price['price'])
self.set_is_init()
member_tup = (self.get_price())
self.set_id(self.__data_access.insert_info(member_tup))
def save(self) -> bool:
if not self.is_initialized():
return False
member_tup = (self.get_price(), self.get_id())
self.__data_access.update_info(member_tup)
return True
def delete(self, key: str):
self.__data_access.delete(key)
def get_price(self) -> str:
return self.__price
def set_price(self, price: str):
self.__price = price
def get_tax_price(self):
tax = self.__data_access.get_tax()
return float(tax['price'])
def get_online_fee(self):
fee = self.__data_access.get_fee()
return float(fee['price'])
|
JavaScript
|
UTF-8
| 752 | 3.71875 | 4 |
[
"MIT"
] |
permissive
|
/**
*
* # Ease
*
* Produces a function for composing easing functions.
* ```
* ease(0.00, 0.25, t => sin(t * 25))(ease(0.25, 1.00, t => 5)())
* ```
*
**/
export const ease = (start = 0.0, end = 1.0, op = (t) => 1) => {
const compose = (next = (t) => 1) => {
const fn = (t) => {
if (t >= start && t <= end) {
return op((t - start) / (end - start));
} else {
return next(t);
}
};
return fn;
};
return compose;
};
export const linear = (start, end) => (t) => start + t * (end - start);
ease.linear = linear;
ease.signature =
'ease(start:number = 0, end:number = 1, op:function) -> function';
linear.signature = 'linear(start:number = 0, end:number = 1) -> function';
export default ease;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.