language
stringclasses 15
values | src_encoding
stringclasses 34
values | length_bytes
int64 6
7.85M
| score
float64 1.5
5.69
| int_score
int64 2
5
| detected_licenses
listlengths 0
160
| license_type
stringclasses 2
values | text
stringlengths 9
7.85M
|
---|---|---|---|---|---|---|---|
C#
|
UTF-8
| 849 | 3.59375 | 4 |
[] |
no_license
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace ConsoleApplication1
{
public class CoolFunctions
{
public int iterfactorial(int n)
{
int response = 1;
int answer = 1;
Console.WriteLine("Please enter a number....");
response = Int32.Parse(Console.ReadLine());
for (var i = n; i >= 1; i--)
{
answer *= i;
}
response =
response = answer;
}
public static int[] InsertionSort(int[] inputs)
{
//step 1: iterate
var answer = new int[inputs.Length];
for( var i = 0; i < inputs.Length; i++)
{
}
return answer;
}
}
}
|
Markdown
|
UTF-8
| 3,985 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
###Validation
[](https://codeclimate.com/github/Resize/Validation)
[](https://codeclimate.com/github/Resize/Validation)
An extensive pack of customizable and extensible validation to your objects.
###Installation
```shell
$ gem install resize-validation
```
```shell
$ require 'resize-validation'
```
###Usage
A minimal implementation could be:
```ruby
Resize::Validation::validate(email: 'londerson@gmail.com')
# => true
```
```ruby
Resize::Validation::validate(string: 5)
# => false
```
raise custom exception:
```ruby
Resize::Validation::validate!(integer: 'five')
# => "The input five does not match the rule integer (TypeError)"
```
A composable implementation could be:
```ruby
Resize::Validation::validates do |v|
v.validate(integer: 5)
v.validate(uppercase: "NAME")
v.validate(boolean: false)
end
# => true
```
```ruby
Resize::Validation::validates do |v|
v.validate(integer: "5")
v.validate(uppercase: "NAME")
v.validate(boolean: false)
end
# => false
```
Catching errors:
```ruby
Resize::Validation::validates! do |v|
v.validate(integer: 5)
v.validate(uppercase: "NAME")
v.validate(boolean: false)
end
# => {:status=>true, :errors=>[]}
```
```ruby
Resize::Validation::validates! do |v|
v.validate(integer: "5")
v.validate(uppercase: "NAME")
v.validate(boolean: false)
end
# => {:status=>false, :errors=>["The input 5 does not match the rule integer"]}
```
Rules
-----
### Numeric
* [is_boolean](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_boolean.rb "Title")
* [is_positive](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_positive.rb "Title")
* [is_negative](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_negative.rb "Title")
* [is_multiple](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_multiple.rb "Title")
* [is_integer](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_integer.rb "Title")
### Date
* [is_date](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_date.rb "Title")
### Types
* [is_string](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_string.rb "Title")
### Strings
* [is_empty](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_empty.rb "Title")
* [is_downcase](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_downcase.rb "Title")
* [is_uppercase](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_uppercase.rb "Title")
### Others
* [is_email](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_email.rb "Title")
* [is_json](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_json.rb "Title")
* [is_rg](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_rg.rb "Title")
* [is_domain](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_uppercase.rb "Title")
* [is_equals](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_equals.rb "Title")
* [is_contains](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_contains.rb "Title")
* [is_roman](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_roman.rb "Title")
* [is_begins](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_begins.rb "Title")
* [is_ends](https://github.com/Resize/Validation/blob/master/lib/resize/rules/is_ends.rb "Title")
###Contributing
1. Fork it ( https://github.com/Resize/Validation/fork )
2. Create your feature branch (`git checkout -b new_feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin new_feature`)
5. Create a new Pull Request
###License
Copyright © 2014 Londerson Araújo, released under the MIT license
|
Python
|
UTF-8
| 1,422 | 2.515625 | 3 |
[] |
no_license
|
#!/usr/bin/env python
"""\
Flask S3 Example
"""
import sha
import hmac
from uuid import uuid4
from json import dumps
from base64 import b64encode
from datetime import datetime, timedelta
from flask import Flask, render_template, request, jsonify
# Flask application
app = Flask(__name__)
app.config.from_pyfile('config.py')
# Views
@app.route('/')
def index():
return render_template('index.html')
@app.route('/signed_urls')
def signed_urls():
def make_policy():
policy_object = {
'expiration': (datetime.now() + timedelta(hours=24)).strftime('%Y-%m-%dT%H:%M:%S.000Z'),
'conditions': [
{'bucket': app.config['AWS_S3_BUCKET']},
{'acl': 'public-read'},
['starts-with', '$key', 'uploads/'],
{'success_action_status': '201'}
]
}
return b64encode(dumps(policy_object))
def sign_policy(policy):
return b64encode(hmac.new(app.config['AWS_SECRET_ACCESS_KEY'], policy, sha).digest())
title = request.args['title']
path = uuid4().hex + '/' + title
policy = make_policy()
return jsonify({
'policy': policy,
'signature': sign_policy(policy),
'key': 'uploads/' + path,
'success_action_redirect': '/',
})
# Run development server
if __name__ == '__main__':
app.run(app.config.get('HOST'), app.config.get('PORT'), app.debug)
|
Python
|
UTF-8
| 3,597 | 3.5 | 4 |
[] |
no_license
|
"""
A simple module for sending/receiving data via serial (USB) to the board.
"""
import pdb
import time
import json
## Note:
# The following article can be referenced to help JSON serialize a custom class:
# https://medium.com/python-pandemonium/json-the-python-way-91aac95d4041
def char_sum(s):
return sum(bytes(s, 'utf-8'))
def safe_json(data):
"""
Checks if the input data is serializable via JSON
"""
if data is None:
return True
elif isinstance(data, (str, bool, int, float)):
return True
elif isinstance(data, (tuple, list)):
return all(safe_json(x) for x in data)
elif isinstance(data, dict):
return all(isinstance(k, str) and safe_json(v) for k, v in data.items())
return False
def send(board, data):
"""
Sends data over serial to the board (HITL)
"""
if not safe_json(data):
raise ValueError("FAIL: sending data that is unserializable via JSON.")
board.reset_output_buffer() # clear the current buffer in case previously sent data was not recieved
msg = json.dumps(data)
to_send = json.dumps((msg, char_sum(msg))) + '\r\n'
board.write(to_send.encode())
while board.in_waiting == 0:
pass
board.read_until() # here because for some reason the sent sensors are echoed back
def receive(board, timeout=1.0):
"""
Receives data over serial sent by the board (HITL)
note that the function will wait for 1 seconds max
"""
start = time.time()
while board.in_waiting == 0:
if (time.time() - start) > timeout:
return None
while board.in_waiting > 0: # keep trying until buffer is empty
try:
encoded = board.read_until()
msg = json.loads(encoded)
assert char_sum(msg[0]) == msg[1], "checksum failed"
return json.loads(msg[0])
except (json.decoder.JSONDecodeError, AssertionError):
if encoded == b"Traceback (most recent call last):\r\n":
# the board code has errored out
print("ERROR: board code has errored out:\n")
time.sleep(.1)
while board.in_waiting > 0:
print(board.read_until())
quit()
return False
def board_communicate(board, sensors, max_attempts=6):
"""
Publishes the latest sensor measurements, then polls the board for the
latest commanded dipole.
"""
send(board, sensors)
fails = 0
while fails < max_attempts:
data = receive(board)
if data == False:
# the board sent something unreadable
send(board, "DATA_RECEIVE_ERROR")
fails += 1
# print("data was false") # for debugging
elif data == None:
# the board did not send anything - is likely stuck on input()
send(board, sensors)
fails += 1
# print("data was none") # for debugging
elif data == "DATA_RECEIVE_ERROR":
# the board did not understand what was last sent
send(board, sensors)
fails += 1
# print("board didn't understand") # for debugging
elif type(data) == list and data[0] == "PASSTHROUGH_MESSAGE":
# simply allow the message to pass through and continue
print(data)
else:
return data
time.sleep(.1) # add a slight delay -- ONLY if some of the three errors above occurred
raise RuntimeError("could not communicate with board in {} attempts.".format(max_attempts))
|
C++
|
UTF-8
| 2,587 | 2.59375 | 3 |
[] |
no_license
|
#ifndef CARCORE_H
#define CARCORE_H
#include <Servo.h>
#include "Arduino.h"
#include <stdint.h>
#include <math.h>
#include "PID.h"
/*****Defines******/
#define END 0
#define Obstacle 1
#define Drive 2
#define turnRight 0
#define turnLeft 1
#define noTurn 2
#define CRUISE_SPEED 1400
#define MIN_SPEED 1450
#define MAX_SPEED 1350
#define MIN_ACTUATOR_VOLTAGE 0.7
#define MAX_ACTUATOR_VOLTAGE 3
#define CAR_DIRECTION_FORWARD 0
#define CAR_DIRECTION_LEFT 1
#define CAR_DIRECTION_RIGHT 2
#define CAR_DIRECTION_STOP 3
#define THRESHOLD 100
const int readEndPin = 52; // digital pin as interrupt from break
const int goPin = 49;
const int turnrightPin = 53; // DUE digital pins 51 53 control turning
const int turnleftPin = 51;
const int readActuatorPin = A7; // reads 0 - 3.3v feedback
const int steptoTurn = 400;
const int power = 500;
const int iniMotorPower = 250;
//const float lenghtCar = 1.48;
//const float diameterCar = 0.95;
/**End of Defines**/
class carcore
{
public:
carcore();
void stabilizer(float hangle, float cangle, uint16_t deltaTime); //Function runs PIDs
//void setThrottle(uint16_t val);
void setDirection(uint8_t dir); //Function sets direction
void navigate(int angle); //Function provides movement
void updateMotors(void); //Function updates motors
//float readActuatorPosition(void);
//void readSensors(void) Function from Lawrance
//void checkLR(void);
//void calculatePID(void);
void calculateSteeringAngle(int angle);
//void calculateMotorSteering(void);
//void motorPIDcontrol(void); will be stabilizer function
//void motorObstacleControl(void); First bang bang function for steering
void stopMotors(void);
void actuatorPosition(void);
void steeringControl(void);
void readSensor(void);
//void foobar(void);
unsigned int currentDirection;
private:
PID* xController;
//unsigned int currentDirection;
int sensor;
int state;
int turn;
float actuatorFeedback;
int obstacleLeft;
int obstacleRight;
float delta;
float headingAngle;
float currentAngle;
struct motors
{
Servo left;
unsigned int left_setpoint;
unsigned int left_presetpoint;
unsigned int left_speed;
Servo right;
unsigned int right_setpoint;
unsigned int right_presetpoint;
unsigned int right_speed;
}motors;
struct steeringctl
{
float actuatorActualvoltage;
float actuatorDesiredvoltage;
float VoltageToTravel;
float actuatorWait;
float absVoltageToTravel;
float distanceToTravel;
}steeringctl;
};
#endif
|
Markdown
|
UTF-8
| 5,837 | 3.5625 | 4 |
[] |
no_license
|
# 解释器模式
## 需求
通过解释器模式来实现四则运算,如计算 `a+b-c` 的值,具体要求
1. 输入表达式的形式,比如 `a+b+c-d+e`,要求表达式的字母不能重复
2. 再分别输入 `a ,b, c, d, e` 的值
3. 最后求出结果:如图

## 传统解决方案
1. 编写一个方法,接收表达式的形式,然后根据用户输入的数值进行解析,得到结果
2. 问题分析:如果加入新的运算符,比如 * / ( 等等,不利于扩展,另外让一个方法来解析会造成程序结构混乱, 不够清晰.
3. 解决方案:可以考虑使用解释器模式, 即: `表达式 -> 解释器(可以有多种) -> 结果`
## 解释器模式解决
### 基本介绍
1. 在编译原理中,一个算术表达式通过 **词法分析器** 形成 **词法单元** ,而后这些词法单元再通过 **语法分析器** 构建 **语法分析树** ,最终形成一颗抽象的语法分析树。这里的 **词法分析器和语法分析器都可以看做是解释器**
2. 解释器模式(Interpreter Pattern):是指给定一个语言(表达式),定义它的文法的一种表示,并定义一个解释器, 使用该解释器来解释语言中的句子(表达式)
3. 应用场景
* 应用可以将一个需要解释执行的语言中的句子表示为一个抽象语法树
* 一些重复出现的问题可以用一种简单的语言来表达
* 一个简单语法需要解释的场景
4. 这样的例子还有,比如编译器、运算表达式计算、正则表达式、机器人等
### 类图

### 代码实现
* Exression
```java
public abstract class AbstractExression {
public abstract int interpreter(Map<String, Integer> var);
}
public class VarExpression extends AbstractExression{
private String varKey;
public VarExpression(String varKey) {
super();
this.varKey = varKey;
}
@Override
public int interpreter(Map<String, Integer> var) {
return var.get(varKey);
}
}
public abstract class SymbolExression extends AbstractExression {
protected AbstractExression left;
protected AbstractExression right;
public SymbolExression(AbstractExression left, AbstractExression right) {
super();
this.left = left;
this.right = right;
}
}
public class AddExression extends SymbolExression{
public AddExression(AbstractExression left, AbstractExression right) {
super(left, right);
}
@Override
public int interpreter(Map<String, Integer> var) {
return left.interpreter(var) + right.interpreter(var);
}
}
public class SubExpression extends SymbolExression {
public SubExpression(AbstractExression left, AbstractExression right) {
super(left, right);
}
@Override
public int interpreter(Map<String, Integer> var) {
return left.interpreter(var) - right.interpreter(var);
}
}
```
* Caculator
```java
public class Caculator {
/**
* 计算表达式的值
* @param expressionStr 表达式
* @param var 表达式变量对应的值
* @return 计算的结果
*/
public int compute(String expressionStr, Map<String, Integer> var) {
char[] expChars = expressionStr.toCharArray();
Stack<AbstractExression> stack = new Stack<>();
// 构造表达式
for (int i = 0; i < expChars.length; i++) {
char currentExpChar = expChars[i];
AbstractExression leftExression = null;
AbstractExression rightExression = null;
String nextVar = null;
switch (currentExpChar) {
case '+':
leftExression = stack.pop();
nextVar = String.valueOf(expChars[++i]);
rightExression = new VarExpression(nextVar);
stack.push(new AddExression(leftExression, rightExression));
break;
case '-':
leftExression = stack.pop();
nextVar = String.valueOf(expChars[++i]);
rightExression = new VarExpression(nextVar);
stack.push(new SubExpression(leftExression, rightExression));
break;
default:
stack.push(new VarExpression(String.valueOf(currentExpChar)));
break;
}
}
AbstractExression exression = stack.pop();
// 计算值
return exression.interpreter(var);
}
}
```
* Client
```java
public class Client {
public static void main(String[] args) throws IOException {
String expressionStr = getExpStr();// a+b-c
Map<String, Integer> var = getValue(expressionStr);// {a=20,b=10,c=5}
Caculator caculator = new Caculator();
int result = caculator.compute(expressionStr, var);
System.out.println(String.format("%s=%d", expressionStr, result));
}
// 获得表达式
public static String getExpStr() throws IOException {
System.out.print("请输入表达式:");
return (new BufferedReader(new InputStreamReader(System.in))).readLine();
}
// 获得值映射
public static Map<String, Integer> getValue(String expressionStr) throws IOException {
Map<String, Integer> map = new HashMap<>();
for (char ch : expressionStr.toCharArray()) {
if (ch != '+' && ch != '-') {
if (!map.containsKey(String.valueOf(ch))) {
System.out.print("请输入" + String.valueOf(ch) + "的值:");
String in = (new BufferedReader(new InputStreamReader(System.in))).readLine();
map.put(String.valueOf(ch), Integer.valueOf(in));
}
}
}
return map;
}
}
```
---
## 解释器模式在spring中的应用

## 注意事项
1. 当有一个语言需要解释执行,可将该语言中的句子表示为一个抽象语法树,就可以考虑使用解释器模式,让程序具有良好的扩展性
2. 应用场景:编译器、运算表达式计算、正则表达式、机器人等
3. 使用解释器可能带来的问题:解释器模式会引起类膨胀、解释器模式采用递归调用方法,将会导致调试非常复杂、效率可能降低.
|
C#
|
UTF-8
| 1,413 | 2.625 | 3 |
[
"MIT"
] |
permissive
|
/*
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Runtime.Serialization;
namespace HTLib2.Bioinfo
{
public partial class Pdb
{
public class Residue
{
public string resName = "";
public int resSeq = -1;
public Atom[] resAtoms = new Atom[0];
public override string ToString()
{
return string.Format("{0} {1} : {2} atoms", resSeq, resName, resAtoms.Length);
}
}
public Residue[] CollectResidues()
{
List<Residue> residues = new List<Residue>();
residues.Add(new Residue());
residues.Last().resName = atoms[0].resName;
residues.Last().resSeq = atoms[0].resSeq ;
foreach(Atom atom in atoms)
{
if(residues.Last().resSeq != atom.resSeq)
{
residues.Add(new Residue());
residues.Last().resName = atom.resName;
residues.Last().resSeq = atom.resSeq;
}
List<Atom> resAtoms = new List<Atom>(residues.Last().resAtoms);
resAtoms.Add(atom);
residues.Last().resAtoms = resAtoms.ToArray();
}
return residues.ToArray();
}
}
}
*/
|
Python
|
UTF-8
| 434 | 4.21875 | 4 |
[] |
no_license
|
'''
Accept number of rows and number of columns from user and display below pattern.
input: iRow = 3 iCol=4
output:
1 1 1 1 1
2 2 2 2 2
3 3 3 3 3
'''
def patternc(iRow, iCol):
for i in range(iRow):
for j in range(iCol):
print(i+1,end=" ");
print();
def main():
irow = int(input("Enter rowsize:"));
icol = int(input("Enter colsize:"));
patternc(irow,icol);
if __name__ == "__main__":
main();
|
Markdown
|
UTF-8
| 2,962 | 3.703125 | 4 |
[] |
no_license
|
# 실행 컨텍스트 생성 과정
<br>
- [실행 컨텍스트 생성 과정](#실행-컨텍스트-생성-과정)
- [예제](#예제)
- [1 활성 객체 생성](#1-활성-객체-생성)
- [2 arguments 객체 생성](#2-arguments-객체-생성)
- [3 스코프 정보 생성](#3-스코프-정보-생성)
- [4 변수 생성](#4-변수-생성)
- [5 this 바인딩](#5-this-바인딩)
- [6 코드 실행](#6-코드-실행)
<br>
## 예제
아래 예제를 통해 생성 과정을 알아본다.
```js
function execute(param1, param2) {
var a = 1, b = 2;
function func() {
return a + b;
}
return param1 + param2 + func();
}
execute(3, 4);
```
## 1 활성 객체 생성
<p align="center"><img src="./image/context2.png" width="400"></p>
* 활성 객체란?
* 실행 컨텍스트가 생성되면 **해당 컨텍스트에서 실행에 필요한 여러 가지 정보를 담을 객체**를 생성한다.
* 이를 **활성 객체**라 한다.
* 활성 객체에 담을 데이터
* 활성 객체에 앞으로 매개변수나 사용자가 정의한 변수 및 객체를 저장하고, 새로 만들어진 컨텍스트로 접근 가능하게 되어 있다.
<br>
## 2 arguments 객체 생성
<p align="center"><img src="./image/context3.png" width="400"></p>
* 앞서 만들어진 활성 객체는 `arguments` 프로퍼티로 이 `arguments` 객체를 참조한다.
<br>
## 3 스코프 정보 생성
<p align="center"><img src="./image/context4.png" width="400"></p>
* 현재 컨텍스트의 유효 범위를 나타내는 스코프 정보를 생성한다.
* 현재 컨텍스트에서 특정 변수에 접근해야 할 경우, 이 연결 리스트(스코프 정보)를 활용한다.
* **이 리스트를 스코프 체인이라고 한다. (`[[scope]]`)**
<br>
## 4 변수 생성
<p align="center"><img src="./image/context5.png" width="400"></p>
* 현재 컨텍스트에서 사용되는 지역 변수를 생성한다.
* 변수 객체 vs 활성 객체
* 문서마다 어떤 곳에서는 활성 객체, 어떤 곳에서는 변수 객체라고 한다.
* `변수 객체 === 활성 객체`
<br>
## 5 this 바인딩
<p align="center"><img src="./image/context6.png" width="400"></p>
* this 키워드로 사용하는 값이 활당된다.
* 자세한 내용은 [여기](https://github.com/binghe819/TIL/blob/master/JS/JS%20%EA%B8%B0%EC%B4%88%20%EC%A0%95%EB%A6%AC/%ED%95%A8%EC%88%98/%ED%95%A8%EC%88%98%20%ED%98%B8%EC%B6%9C%EA%B3%BC%20this.md)에서 확인
* `this`가 참조하는 객체가 없으면 전역 객체를 참조한다.
<br>
## 6 코드 실행
이렇게 하나의 실행 컨텍스트가 생성되고, 변수 객체가 만들어진 후에, 코드에 있는 여러 코드가 실행된다.
예를 들어, 변수의 초기화 및 연산, 또 다른 함수 실행 등등.
> **전역 실행 컨텍스트는 `arguments` 객체가 없으며, 전역 객체 하나만을 포함하는 스코프 체인이 있다.**
|
Java
|
UTF-8
| 2,282 | 2.28125 | 2 |
[] |
no_license
|
package glory.controller;
import glory.entity.Chat;
import glory.entity.User;
import glory.service.ChatService;
import glory.service.impl.ChatServiceImpl;
import glory.util.ResponseJson;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
@Controller
@RequestMapping("Chat")
public class ChatController {
@Autowired(required = true)
private ChatService chatService = new ChatServiceImpl();
/**
* 聊天室页面
* @param request
* @return string
*/
@RequestMapping(value = "Chat",method = RequestMethod.GET)
public String create(HttpServletRequest request){
return "/chat/chat";
}
/**
* Ajax提交 获取新消息
* @param lastMsgTime
* @return ResponseJson
*/
@RequestMapping(value = "getMessage",method = RequestMethod.POST)
@ResponseBody
public ResponseJson getMessage(@RequestParam("lastMsgTime") String lastMsgTime){
List<Chat> chats = chatService.getNewMessage(lastMsgTime);
ResponseJson responseJson;
if (chats != null) {
responseJson = new ResponseJson(0, chats, "成功");
} else {
responseJson = new ResponseJson(104, null, "失败");
}
return responseJson;
}
/**
* Ajax提交 发送消息
* @param message
* @param user_id
* @return ResponseJson
*/
@RequestMapping(value = "Send",method = RequestMethod.POST)
@ResponseBody
public ResponseJson saveMessage(@RequestParam("message") String message, @RequestParam("user_id") int user_id){
User user = new User();
user.setId(user_id);
Chat chat = new Chat();
chat.setContent(message);
chat.setChatByUser(user);
chatService.savaMessage(chat);
ResponseJson responseJson;
responseJson = new ResponseJson(0, null, "成功");
return responseJson;
}
}
|
C
|
UTF-8
| 257 | 3.484375 | 3 |
[] |
no_license
|
struct ListNode* reverseList(struct ListNode* head) {
if (head == NULL || head->next == NULL)
return head;
struct ListNode *ptr = head;
head = reverseList(ptr->next);
ptr->next->next = ptr;
ptr->next = NULL;
return head;
}
|
Java
|
UTF-8
| 10,694 | 1.851563 | 2 |
[] |
no_license
|
package com.alokomkar.rxmoviedb.moviedetails;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.os.Parcelable;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.widget.NestedScrollView;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.SnapHelper;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.alokomkar.rxmoviedb.MovieApplication;
import com.alokomkar.rxmoviedb.NavigationListener;
import com.alokomkar.rxmoviedb.R;
import com.alokomkar.rxmoviedb.base.Constants;
import com.alokomkar.rxmoviedb.moviedetails.model.MovieDetailsResponse;
import com.alokomkar.rxmoviedb.moviedetails.model.Result;
import com.alokomkar.rxmoviedb.moviedetails.model.ReviewResult;
import com.alokomkar.rxmoviedb.movielist.Movie;
import com.alokomkar.rxmoviedb.utils.GravitySnapHelper;
import com.alokomkar.rxmoviedb.utils.ItemOffsetDecoration;
import com.alokomkar.rxmoviedb.youtube.FragmentDemoActivity;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
import static android.view.View.GONE;
import static android.view.View.VISIBLE;
/**
* Created by Alok on 16/06/17.
*/
public class MovieDetailsLandscapeFragment extends Fragment implements MovieDetailsContract.View,TrailerAdapter.OnTrailerClick,View.OnClickListener {
@BindView(R.id.title)
TextView title;
@BindView(R.id.movieLimit)
TextView movieLimit;
@BindView(R.id.starCastText)
TextView starCastText;
@BindView(R.id.starCast)
TextView starCast;
@BindView(R.id.description)
TextView description;
@BindView(R.id.trailerText)
TextView trailerText;
@BindView(R.id.trailerRecyclerView)
RecyclerView trailerRecyclerView;
@BindView(R.id.rootCardView)
RelativeLayout rootCardView;
@BindView(R.id.scrolling_container)
NestedScrollView scrollingContainer;
@BindView(R.id.rating)
TextView mRating;
@BindView(R.id.summaryText)
TextView mSummaryText;
@BindView(R.id.reviews_label)
TextView reviews;
@BindView(R.id.reviews)
LinearLayout reviewsContainer;
@BindView(R.id.progressBar)
ProgressBar mProgressBar;
Unbinder unbinder;
private int movieId;
private MovieDetailsPresenter movieDetailsPresenter;
private TrailerAdapter mTrailerAdapter;
private String TAG = MovieDetailsLandscapeFragment.class.getSimpleName();
private NavigationListener navigationListener;
private Movie movie;
private MovieDetailsResponse movieDetailsResponse;
private List<Result> movieTrailers;
private List<ReviewResult> movieReviews;
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if( savedInstanceState != null ) {
movie = savedInstanceState.getParcelable(Constants.MOVIE);
movieId = movie.getId();
movieTrailers = savedInstanceState.getParcelableArrayList(Constants.MOVIE_TRAILERS);
movieDetailsResponse = savedInstanceState.getParcelable(Constants.MOVIE_DETAILS);
movieReviews = savedInstanceState.getParcelableArrayList(Constants.MOVIE_REVIEWS);
}
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View fragmentView = inflater.inflate(R.layout.fragment_movie_details, container, false);
unbinder = ButterKnife.bind(this, fragmentView);
if( savedInstanceState == null ) {
movie = getArguments().getParcelable(Constants.MOVIE);
movieId = movie.getId();
movieDetailsPresenter = new MovieDetailsPresenter(this, MovieApplication.getInstance().getNetModule().getRetrofit(), movieId);
movieDetailsPresenter.start();
}
else {
movie = savedInstanceState.getParcelable(Constants.MOVIE);
movieId = movie.getId();
movieTrailers = savedInstanceState.getParcelableArrayList(Constants.MOVIE_TRAILERS);
movieDetailsResponse = savedInstanceState.getParcelable(Constants.MOVIE_DETAILS);
movieReviews = savedInstanceState.getParcelableArrayList(Constants.MOVIE_REVIEWS);
setMovieDetails(movieDetailsResponse);
setTrailers(movieTrailers);
setReviews(movieReviews);
}
return fragmentView;
}
@Override
public void onDestroyView() {
super.onDestroyView();
unbinder.unbind();
}
@Override
public void setPresenter(Object presenter) {
}
@Override
public void showProgress() {
if( mProgressBar != null )
mProgressBar.setVisibility(View.VISIBLE);
}
@Override
public void hideProgress() {
if( mProgressBar != null )
mProgressBar.setVisibility(View.GONE);
}
@Override
public void showMessage() {
}
@Override
public void setMovieDetails(MovieDetailsResponse details) {
if( title == null ) {
return;
}
if( details != null ) {
movieDetailsResponse = details;
title.setText(details.getOriginalTitle());
movieLimit.setText("Release Date: " + details.getReleaseDate());
starCastText.setText("Language: " + details.getOriginalLanguage());
description.setText(details.getOverview());
mRating.setText(String.format(getContext().getString(R.string.rating), String.valueOf(details.getVoteAverage())));
}
}
@Override
public void setTrailers(List<Result> trailers) {
if( trailerRecyclerView == null ) {
return;
}
if( trailers != null ) {
if (trailers.size() == 0) {
trailerRecyclerView.setVisibility(GONE);
trailerText.setVisibility(GONE);
} else {
movieTrailers = trailers;
trailerRecyclerView.setVisibility(VISIBLE);
trailerText.setVisibility(VISIBLE);
if( mTrailerAdapter == null ) {
LinearLayoutManager linearLayoutManager = new LinearLayoutManager(getContext());
linearLayoutManager.setOrientation(LinearLayoutManager.HORIZONTAL);
trailerRecyclerView.setLayoutManager(linearLayoutManager);
ItemOffsetDecoration itemDecoration = new ItemOffsetDecoration(getContext(), R.dimen.item_offset);
trailerRecyclerView.addItemDecoration(itemDecoration);
SnapHelper snapHelper = new GravitySnapHelper(Gravity.START);
trailerRecyclerView.setOnFlingListener(null);
snapHelper.attachToRecyclerView(trailerRecyclerView);
}
mTrailerAdapter = new TrailerAdapter(getContext(), trailers, this);
trailerRecyclerView.setAdapter(mTrailerAdapter);
if( navigationListener != null ) {
navigationListener.setCurrentTrailerId(trailers.get(0).getKey());
}
}
}
else {
trailerRecyclerView.setVisibility(GONE);
trailerText.setVisibility(GONE);
}
}
@Override
public void setReviews(List<ReviewResult> reviews) {
if( reviewsContainer == null ) {
return;
}
if( reviews != null ) {
if ( reviews.size() == 0 ) {
this.reviews.setVisibility(View.GONE);
reviewsContainer.setVisibility(View.GONE);
}
else {
movieReviews = reviews;
this.reviews.setVisibility(View.VISIBLE);
reviewsContainer.setVisibility(View.VISIBLE);
reviewsContainer.removeAllViews();
LayoutInflater inflater = getActivity().getLayoutInflater();
for (ReviewResult review : reviews)
{
ViewGroup reviewContainer = (ViewGroup) inflater.inflate(R.layout.review, reviewsContainer, false);
TextView reviewAuthor = ButterKnife.findById(reviewContainer, R.id.review_author);
TextView reviewContent = ButterKnife.findById(reviewContainer, R.id.review_content);
reviewAuthor.setText(review.getAuthor());
reviewContent.setText(review.getContent());
reviewContent.setOnClickListener(this);
reviewsContainer.addView(reviewContainer);
}
}
}
else {
this.reviews.setVisibility(View.GONE);
reviewsContainer.setVisibility(View.GONE);
}
}
@Override
public void onTrailerClick(int position, List<Result> mTrailerResults) {
Intent intent = new Intent(getContext(), FragmentDemoActivity.class);
intent.putExtra("key",mTrailerResults.get(position).getKey());
getContext().startActivity(intent);
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if( context instanceof NavigationListener ) {
navigationListener = (NavigationListener) context;
}
}
@Override
public void onDetach() {
navigationListener = null;
super.onDetach();
}
@Override
public void onClick(View v) {
switch (v.getId())
{
case R.id.review_content:
onReviewClick((TextView) v);
break;
}
}
private void onReviewClick(TextView view) {
if (view.getMaxLines() == 5)
{
view.setMaxLines(500);
} else
{
view.setMaxLines(5);
}
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putParcelable(Constants.MOVIE_DETAILS, movieDetailsResponse);
outState.putParcelableArrayList(Constants.MOVIE_TRAILERS, (ArrayList<? extends Parcelable>) movieTrailers);
outState.putParcelable(Constants.MOVIE, movie);
outState.putParcelableArrayList(Constants.MOVIE_REVIEWS, (ArrayList<? extends Parcelable>) movieReviews);
}
}
|
Shell
|
UTF-8
| 2,539 | 3.15625 | 3 |
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -x
set -e
set -o pipefail
sudo apt-get install -y axel time
echo "Downloading misc tools"
sudo rm -f /etc/apt/sources.list.d/cassandra.sources.list
echo "deb http://debian.datastax.com/community stable main" | sudo tee -a /etc/apt/sources.list.d/cassandra.sources.list
curl -L http://debian.datastax.com/debian/repo_key | sudo apt-key add -
sudo apt-get update > aptlog &
APT_GET_UPDATE_PID=$!
axel http://d3kbcqa49mib13.cloudfront.net/spark-1.3.1-bin-hadoop1.tgz > sparkdl &
SPARK_DL_PID=$!
axel http://mirrors.ibiblio.org/apache/kafka/0.8.1.1/kafka_2.9.2-0.8.1.1.tgz > kafkadl &
KAFKA_DL_PID=$!
axel http://mirror.cogentco.com/pub/apache/flume/1.5.0.1/apache-flume-1.5.0.1-bin.tar.gz > flumedl &
FLUME_DL_PID=$!
wait $SPARK_DL_PID
sudo mkdir -p /etc/apt/sources.list.d/
echo "install urllib3"
sudo pip install urllib3
wait $SPARK_DL_PID || echo "Spark DL finished early"
tar -xf spark-1.3.1-bin-hadoop1.tgz
wait $APT_GET_UPDATE_PID
echo "Installing protobuf"
sudo apt-get install protobuf-compiler
echo $?
# Set up cassandra
echo "Waiting for apt-get update to finish"
wait $APT_GET_UPDATE_PID || echo "apt-get update finished early"
echo "Setting up dsc (cassandra)"
sleep 1;
#sudo apt-get -y --force-yes remove cassandra cassandra-tools
#sudo rm -rf /etc/security/limits.d/cassandra.conf || echo "No cassandra security conf"
#yes | sudo apt-get -y --force-yes install dsc21 > dscinstall.log
#yes | sudo apt-get -y --force-yes install cassandra-tools > ctoolsinstall.log
echo "Starting cassandra"
sudo /etc/init.d/cassandra start
echo $?
echo "set up hive directories"
export IAM=`whoami`
sudo mkdir -p /user/hive && sudo chown -R $IAM /user/hive
echo "done with setup"
# Set up kafka
echo "Setting up kafka"
wait $KAFKA_DL_PID || echo "Kafka DL finished early"
tar -xzf kafka_2.9.2-0.8.1.1.tgz
cd kafka_2.9.2-0.8.1.1
echo "Starting zookeeper"
./bin/zookeeper-server-start.sh config/zookeeper.properties &
echo "Starting kafka"
sleep 5
./bin/kafka-server-start.sh config/server.properties &
sleep 5
# publish a pandas topic to kafka
./bin/kafka-topics.sh --zookeeper localhost:2181 --topic pandas --partition 1 --replication-factor 1 --create
./bin/kafka-topics.sh --zookeeper localhost:2181 --topic logs --partition 1 --replication-factor 1 --create
cd ..
# set up flume
wait $FLUME_DL_PID || echo "Flume DL finished early"
echo "Setting up flume"
tar -xf apache-flume-1.5.0.1-bin.tar.gz
cd apache-flume-1.5.0.1-bin
./bin/flume-ng agent -n panda --conf-file ../files/flumeconf.cfg &
disown $!
cd ..
echo $?
|
SQL
|
UTF-8
| 2,506 | 3.203125 | 3 |
[] |
no_license
|
-- phpMyAdmin SQL Dump
-- version 4.8.3
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Generation Time: 15-Abr-2019 às 02:10
-- Versão do servidor: 5.7.21-log
-- versão do PHP: 7.2.9
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `clientes`
--
-- --------------------------------------------------------
--
-- Estrutura da tabela `cliente`
--
CREATE TABLE `cliente` (
`id` int(11) NOT NULL,
`nome` varchar(255) NOT NULL,
`usuario` varchar(64) NOT NULL,
`senha` varchar(255) NOT NULL,
`cpf` varchar(14) NOT NULL,
`rg` varchar(13) NOT NULL,
`data_nasc` date NOT NULL,
`email` varchar(255) NOT NULL,
`tel1` varchar(15) NOT NULL,
`tel2` varchar(15) NOT NULL,
`tel3` varchar(15) NOT NULL,
`tel4` varchar(15) NOT NULL,
`nivel` int(1) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Extraindo dados da tabela `cliente`
--
INSERT INTO `cliente` (`id`, `nome`, `usuario`, `senha`, `cpf`, `rg`, `data_nasc`, `email`, `tel1`, `tel2`, `tel3`, `tel4`, `nivel`) VALUES
(1, 'Hudson Libério Leão', 'hudsonliberio', '202cb962ac59075b964b07152d234b70', '111.111.111-10', 'MG-11.111.110', '1995-08-28', 'hudsonleaoti@gmail.com', '(37) 99985-5488', '', '', '(37) 3234-2380', 3),
(2, 'Usuário Administrador', 'admin', '202cb962ac59075b964b07152d234b70', '111.111.111-11', 'MG-11.111.111', '1995-08-28', 'hudson@gmail.com', '(37) 99999-9999', '', '', '', 2),
(3, 'Usuário Super Admin', 'superadmin', '202cb962ac59075b964b07152d234b70', '111.111.111-12', 'MG-11.111.112', '1994-08-11', 'superadmin@gmail.com', '(37) 99999-9992', '', '', '', 3),
(4, 'Usuário Normal', 'normal', '202cb962ac59075b964b07152d234b70', '111.111.111-12', 'MG-11.111.113', '1995-08-28', 'normal@gmail.com', '(99) 99999-9993', '', '', '', 1);
--
-- Indexes for dumped tables
--
--
-- Indexes for table `cliente`
--
ALTER TABLE `cliente`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `cliente`
--
ALTER TABLE `cliente`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
Java
|
UTF-8
| 774 | 1.945313 | 2 |
[] |
no_license
|
/**
*
*/
package com.jv.web.controller;
import java.util.Locale;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import com.jv.web.BaseController;
/**
* @author jewelvary
*
*/
@Controller
@RequestMapping("/components")
public class ComponentsController extends BaseController{
// @RequestMapping("")
// public String root(Locale locale) {
// return "redirect:/index.html";
// }
/** Home page.
* @throws Exception */
@RequestMapping("/base/{p}.html")
public String index(@PathVariable String p) throws Exception {
logger.info(p);
return "admin/components/"+p;
}
}
|
C#
|
UTF-8
| 2,204 | 2.53125 | 3 |
[] |
no_license
|
using System;
using System.Collections.Generic;
using Arash.Membership.Model;
using Arash.Utility.Common;
using Paradiso.Infrastructure.Data;
using Arash.Infrastructure.Data;
namespace Arash.Core.Manager
{
public class MemberManager : IMemberManager
{
private IRepository<Member> _repository;
public MemberManager(IRepository<Member> repository)
{
_repository = repository;
}
public Member MakeInstance()
{
return _repository.NewEntityInstance();
}
public void Add(Member entity)
{
_repository.Add(entity);
_repository.Save();
}
public void Edit(Member entity)
{
_repository.Save();
}
public void Remove(Member entity)
{
_repository.Remove(entity);
_repository.Save();
}
public int GetCount(Func<Member, bool> predicate = null)
{
return predicate == null
? _repository.GetCount(p => true)
: _repository.GetCount(predicate);
}
public Member Get(Func<Member, bool> predicate = null)
{
return predicate == null
? _repository.Get(p => true)
: _repository.Get(predicate);
}
public IEnumerable<Member> GetAll(Func<Member, bool> predicate = null, int start = 0, int count = 6)
{
return predicate == null
? _repository.GetAll(p => true)
: start == 0
? _repository.GetAll(predicate)
: _repository.GetAll(predicate, start, count);
}
public Member Authenticate(string username, string password)
{
var entity = _repository.Get(p => p.Username == username);
if (entity == null)
return null;
var passwordHash = PasswordGenerator.GetHashPassword(password);
if (!string.Equals(passwordHash, password))
return null;
return entity;
}
}
}
|
Java
|
UTF-8
| 5,338 | 2.0625 | 2 |
[] |
no_license
|
package com.dreamdigitizers.megamelodies.views.classes.fragments.screens;
import android.support.v4.media.MediaBrowserCompat;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import com.dreamdigitizers.androidbaselibrary.views.classes.fragments.screens.ScreenBase;
import com.dreamdigitizers.megamelodies.R;
import com.dreamdigitizers.megamelodies.models.Playlist;
import com.dreamdigitizers.megamelodies.presenters.classes.PresenterFactory;
import com.dreamdigitizers.megamelodies.presenters.interfaces.IPresenterWrapperPlaylists;
import com.dreamdigitizers.megamelodies.views.classes.services.support.MediaMetadataBuilder;
import com.dreamdigitizers.megamelodies.views.interfaces.IViewWrapperPlaylists;
public class ScreenWrapperPlaylists extends ScreenBase<IPresenterWrapperPlaylists> implements IViewWrapperPlaylists, ScreenPlaylists.IOnPlaylistSelected {
private FrameLayout mPlaceHolderChildScreen;
private ScreenPlaylists mScreenPlaylists;
private ScreenPlaylist mScreenPlaylist;
private ScreenBase mLastShownChildScreen;
public ScreenWrapperPlaylists() {
this.mScreenPlaylists = new ScreenPlaylists();
this.mScreenPlaylists.setOnPlaylistSelectedListener(this);
this.mScreenPlaylist = new ScreenPlaylist();
this.mLastShownChildScreen = this.mScreenPlaylists;
}
@Override
public boolean onBackPressed() {
if (!this.mScreenPlaylists.isHidden()) {
return this.mScreenPlaylists.onBackPressed();
} else if (!this.mScreenPlaylist.isHidden()) {
boolean isHandled = this.mScreenPlaylist.onBackPressed();
if (isHandled) {
return true;
}
this.showScreenPlaylists();
return true;
}
return false;
}
@Override
public void onShow() {
this.getChildFragmentManager()
.beginTransaction()
.show(this.mLastShownChildScreen)
.commit();
this.mLastShownChildScreen.onShow();
}
@Override
public void onHide() {
this.getChildFragmentManager()
.beginTransaction()
.hide(this.mLastShownChildScreen)
.commit();
this.mLastShownChildScreen.onHide();
}
@Override
protected boolean shouldSetThisScreenAsCurrentScreen() {
return false;
}
@Override
public int getScreenId() {
return 0;
}
@Override
protected IPresenterWrapperPlaylists createPresenter() {
return (IPresenterWrapperPlaylists) PresenterFactory.createPresenter(IPresenterWrapperPlaylists.class, this);
}
@Override
protected View loadView(LayoutInflater pInflater, ViewGroup pContainer) {
View rootView = pInflater.inflate(R.layout.screen__wrapper_playlists, pContainer, false);
return rootView;
}
@Override
protected void retrieveScreenItems(View pView) {
this.mPlaceHolderChildScreen = (FrameLayout) pView.findViewById(R.id.placeHolderChildScreen);
}
@Override
protected void mapInformationToScreenItems(View pView) {
this.getChildFragmentManager()
.beginTransaction()
.add(R.id.placeHolderChildScreen, this.mScreenPlaylists)
.add(R.id.placeHolderChildScreen, this.mScreenPlaylist)
.hide(this.mScreenPlaylists)
.hide(this.mScreenPlaylist)
.commit();
}
@Override
protected int getTitle() {
return 0;
}
@Override
public void onPlaylistSelected(MediaBrowserCompat.MediaItem pMediaItem) {
Playlist playlist = (Playlist) pMediaItem.getDescription().getExtras().getSerializable(MediaMetadataBuilder.BUNDLE_KEY__PLAYLIST);
this.mScreenPlaylist.setPlaylist(playlist);
this.showScreenPlaylist();
}
private void showScreenPlaylists() {
if (!this.mScreenPlaylist.isHidden()) {
this.mScreenPlaylist.onHide();
this.mScreenPlaylists.onShow();
this.getChildFragmentManager()
.beginTransaction()
.setCustomAnimations(
com.dreamdigitizers.androidbaselibrary.R.anim.slide_in_from_left,
com.dreamdigitizers.androidbaselibrary.R.anim.slide_out_to_right)
.hide(this.mScreenPlaylist)
.show(this.mScreenPlaylists)
.commit();
this.mLastShownChildScreen = this.mScreenPlaylists;
}
}
private void showScreenPlaylist() {
if (this.mScreenPlaylist.isHidden()) {
this.mScreenPlaylists.onHide();
this.mScreenPlaylist.onShow();
this.getChildFragmentManager()
.beginTransaction()
.setCustomAnimations(
com.dreamdigitizers.androidbaselibrary.R.anim.slide_in_from_right,
com.dreamdigitizers.androidbaselibrary.R.anim.slide_out_to_left)
.hide(this.mScreenPlaylists)
.show(this.mScreenPlaylist)
.commit();
this.mLastShownChildScreen = this.mScreenPlaylist;
}
}
}
|
Go
|
UTF-8
| 281 | 2.875 | 3 |
[] |
no_license
|
package core
import "strings"
// 功能:返回没有以符号 - 开头的参数
func excludeOptions(args []string) []string {
ret := make([]string, 0, len(args))
for i := range args {
if !strings.HasPrefix(args[i], "-") {
ret = append(ret, args[i])
}
}
return ret
}
|
PHP
|
UTF-8
| 2,632 | 2.53125 | 3 |
[] |
no_license
|
<?php
namespace Tests\Feature;
use Task_Manager\User;
use Tests\TestCase;
use Illuminate\Support\Facades\Hash;
class UserTest extends TestCase
{
protected $user;
protected $password;
protected $userData;
public function setUp(): void
{
parent::setUp();
$this->password = 'Very secret';
$this->userData = [
'name' => 'John Doe',
'email' => 'example@mail.test',
'password' => Hash::make($this->password)
];
$this->user = factory(User::class)->create($this->userData);
}
public function testUserRegistrationRoute()
{
$response = $this->get(route('register'));
$response->assertOk();
}
public function testUserLoginRoute()
{
$response = $this->get(route('login'));
$response->assertOk();
}
public function testUserLogin()
{
$response = $this->post(route('login'), array_merge($this->userData, ['password' => $this->password]));
$response->assertRedirect(route('home'));
}
public function testUserLogout()
{
$response = $this->actingAs($this->user)->post('logout');
$response->assertRedirect('/');
}
public function testUserShow()
{
$response = $this->actingAs($this->user)->get(route('users.show', $this->user));
$response->assertOk();
}
public function testUserRegistration()
{
$userData = [
'name' => 'Jane Doe',
'email' => 'jane@example.test',
'password' => '123456789',
'password_confirmation' => '123456789'
];
$response = $this->post(route('register'), $userData);
$response->assertRedirect(route('home'));
$this->assertDatabaseHas('users', [
'name' => $userData['name'],
'email' => $userData['email']]);
}
public function testUserDeleting()
{
$response = $this->actingAs($this->user)->delete(route('users.destroy', $this->user));
$response->assertRedirect(route('welcome'));
$this->assertDatabaseHas('users', [
'name' => $this->user['name'], 'email' => $this->user['email']
]);
}
public function testUserUpdate()
{
$newUserData = ['name' => 'Jane Doe', 'email' => 'jane@example.test'];
$response = $this->actingAs($this->user)->patch(route('users.update', $this->user), $newUserData);
$response->assertRedirect(route('users.show', $this->user));
$this->assertDatabaseHas('users', [
'name' => $newUserData['name'],
'email' => $newUserData['email']
]);
}
}
|
JavaScript
|
UTF-8
| 1,253 | 3.0625 | 3 |
[] |
no_license
|
function setup(){
createCanvas(400,400);
}
function draw(){
if(mouseIsPressed==true){
background(186, 202, 245,1);
let red = color(255,0,0);
let green = color(0,255,0);
let blue = color(0,0,255);
let black = color(0,0,0);
let white = color(255,255,255);
let yellow = color(255,255,0);
let pink = color(255,0,255);
let cyan = color(0,255,255);
stroke(mouseX,mouseX,mouseY,mouseY);
fill(black);
rect(mouseX,mouseY,mouseX,mouseY);
fill(red);
rect(mouseY,mouseX,mouseY,mouseX)
fill(blue);
rect((mouseX/8),(mouseY/6),(mouseX/4),(mouseX*2))
fill(yellow)
rect((mouseY*3),(mouseY/8),(mouseY/-10),(mouseY/6))
fill(pink)
rect((mouseX*2),(mouseX*-2),(mouseX/3),(mouseX*-2))
fill(cyan)
rect((mouseX*(1/2)),250,(mouseX*(1/5)),100)
fill(yellow)
rect((mouseY*3),mouseY,mouseY,(mouseY/2))
fill(black)
rect(250,250,25,25)
fill(green)
rect((mouseX+100),(mouseX+100),((-1*mouseX)*(mouseY/2)/30),13)
fill(mouseX-100,mouseX-100,mouseY-100)
rect((mouseX-100),(mouseX+200),(mouseX/4),(mouseY*6));
x=400
y=0
rect(mouseX-25,mouseY-200,(mouseY*3),mouseX+0)
}
}
// // for (i<10;i++){
// rect(0,30,20,80);
// rotate(PI/10);
|
Markdown
|
UTF-8
| 2,774 | 3.640625 | 4 |
[
"BSD-3-Clause"
] |
permissive
|
# "Smart Filter" Algorithm
The "Smart Filter" dialog implements a partial assignments algorithm for finding possible filters that can be applied in order to find "interesting" variables.
Informally, the function tries to find variable assignments that cause all other variables to either become dont care variables, or get a fixed assignment.
The intuition behind the algorithm is that such assignments find "important" variables that determine all the other variables.
The algorithm goes over each possible assignment for each variable, and checks if it determines all other variables as previously described. If such an assignment is found, the algorithm tries to find more such variables recursively for each assignment of the variable that was found. Otherwise, the algorithm stops, and doesn't try pairs, triplets or more variables. The algorithm gets a `depth` parameter to decide the maximum recursion depth.
The result of the algorithm is a set of assignments that, together, cover all the possible assignments. The result depends on the variable order, and therefore the dialog allows changing the variable order and re-running the algorithm.
Here is the pseudo-code of the algorithm implemented in `PartialAssignment::getSmallestPartialAssignments`:
```py
def smartFilter(BDD successors, int depth):
"""
The resulting "PartialAssignment" object can be seen as a tree. Each node in the tree is a possible assignment.
"""
# If we got a full assignment, we're finished
if isFullAssignment(successors):
return PartialAssignment(COMPLETE, True)
# Else, if we reached depth 0 then the assignment is incomplete
if depth == 0:
return PartialAssignment(INCOMPLETE)
# If there is no single determining variable, we finish this branch of the search
var = findDeterminingVar(successors)
if not var:
return PartialAssignment(INCOMPLETE)
result = PartialAssignment()
for val in possibleValues(var):
if isFullAssignment(successors && var=val):
# var=val determines everything
result.addChild(COMPLETE, var=val)
else:
# The current value doesn't determine, look for other determining variables recursively
child = smartFilter(successors && var=val, depth - 1)
child.assignment = (var=val)
result.addChild(child.isComplete, child)
return result
# Helper functions
def isFullAssignment(successors):
# Return true if all variables are fixed/don't care
def findDeterminingVar(BDD successors):
for var in allVars:
for val in possibleValues(var):
if isFullAssignment(successors && var=val):
return var
# There's no single determining variable
return None
```
|
Swift
|
UTF-8
| 1,636 | 3.375 | 3 |
[] |
no_license
|
//
// Created by Erik Little on 4/5/18.
//
import Foundation
import Kit
/// The point of a game of Builders is for players to construct the best hotel in Pottersville.
///
/// This represents the hotel that the player is building.
///
/// A `Hotel` consists of a number floors. Each floor must have TODO number of cards a floor needs.
public struct Hotel {
/// The number of floors that have been constructed.
public private(set) var floorsBuilt = 0
// TODO The rules should determine the criteria for a floor being built. So this should receive a context.
/// Calculates whether or not this player has built any new floors.
///
/// This removes cards from `fromPlayedCards` that were used to build a floor.
internal mutating func calculateNewFloors(fromPlayedCards cards: inout BuildersHand) {
// TODO do we need to check for workers? Or is that guarded during play and accident?
var metal = false
var wiring = false
var insulation = false
var glass = false
for material in cards.materials {
switch material.blockType {
case .insulation:
insulation = true
case .glass:
glass = true
case .metal:
metal = true
case .wiring:
wiring = true
case .wood:
// TODO wood and other score boosters
continue
}
}
// TODO nail down what is required
if metal && insulation && glass && wiring {
floorsBuilt += 1
cards.removeAll()
}
}
}
|
PHP
|
UTF-8
| 3,224 | 2.5625 | 3 |
[] |
no_license
|
<?php
namespace App\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity(repositoryClass="App\Repository\PayementRepository")
*/
class Payement
{
/**
* @ORM\Id()
* @ORM\GeneratedValue()
* @ORM\Column(type="integer")
*/
private $id;
/**
* @ORM\Column(type="string", length=255, nullable=true)
*/
private $name;
/**
* @ORM\Column(type="string", length=255, nullable=true)
*/
private $familyName;
/**
* @ORM\Column(type="string", length=255, nullable=true)
*/
private $city;
/**
* @ORM\Column(type="integer", nullable=true)
*/
private $codePostal;
/**
* @ORM\Column(type="integer", nullable=true)
*/
private $creditCardNumber;
/**
* @ORM\Column(type="integer", nullable=true)
*/
private $codeSecurity;
/**
* @ORM\Column(type="datetime", nullable=true)
*/
private $cbExpire;
/**
* @ORM\Column(type="boolean", nullable=true)
*/
private $payementPaid;
/**
* @ORM\Column(type="float")
*/
private $totalPrice;
public function getId(): ?int
{
return $this->id;
}
public function getName(): ?string
{
return $this->name;
}
public function setName(string $name): self
{
$this->name = $name;
return $this;
}
public function getFamilyName(): ?string
{
return $this->familyName;
}
public function setFamilyName(string $familyName): self
{
$this->familyName = $familyName;
return $this;
}
public function getCity(): ?string
{
return $this->city;
}
public function setCity(string $city): self
{
$this->city = $city;
return $this;
}
public function getCodePostal(): ?int
{
return $this->codePostal;
}
public function setCodePostal(int $codePostal): self
{
$this->codePostal = $codePostal;
return $this;
}
public function getCreditCardNumber(): ?int
{
return $this->creditCardNumber;
}
public function setCreditCardNumber(int $creditCardNumber): self
{
$this->creditCardNumber = $creditCardNumber;
return $this;
}
public function getCodeSecurity(): ?int
{
return $this->codeSecurity;
}
public function setCodeSecurity(int $codeSecurity): self
{
$this->codeSecurity = $codeSecurity;
return $this;
}
public function getCbExpire(): ?\DateTimeInterface
{
return $this->cbExpire;
}
public function setCbExpire(\DateTimeInterface $cbExpire): self
{
$this->cbExpire = $cbExpire;
return $this;
}
public function getPayementPaid(): ?bool
{
return $this->payementPaid;
}
public function setPayementPaid(bool $payementPaid): self
{
$this->payementPaid = $payementPaid;
return $this;
}
public function getTotalPrice(): ?float
{
return $this->totalPrice;
}
public function setTotalPrice(float $totalPrice): self
{
$this->totalPrice = $totalPrice;
return $this;
}
}
|
C#
|
UTF-8
| 1,007 | 2.625 | 3 |
[
"MIT"
] |
permissive
|
/* Copyright (c) 2017 ExT (V.Sigalkin) */
using System;
namespace extOSC.Core.Packers
{
public class OSCPackerFloat : OSCPacker<float>
{
#region Public Methods
public override OSCValueType GetPackerType()
{
return OSCValueType.Float;
}
#endregion
#region Protected Methods
protected override float BytesToValue(byte[] bytes, ref int start)
{
const int size = sizeof(float);
var data = new byte[size];
for (var i = 0; i < size; i++)
{
data[i] = bytes[start];
start++;
}
return BitConverter.ToSingle(BitConverter.IsLittleEndian ? ReverseBytes(data) : data, 0);
}
protected override byte[] ValueToBytes(float value)
{
var bytes = BitConverter.GetBytes(value);
return BitConverter.IsLittleEndian ? ReverseBytes(bytes) : bytes;
}
#endregion
}
}
|
Python
|
UTF-8
| 1,601 | 3.25 | 3 |
[] |
no_license
|
import collections
import numpy as np
from typing import List
# Runtime: 32 ms, faster than 93.80% of Python3 online submissions for Find And Replace in String.
# Memory Usage: 14.1 MB, less than 5.06% of Python3 online submissions for Find And Replace in String.
class Solution:
def findReplaceString(self, S: str, indexes: List[int], sources: List[str], targets: List[str]) -> str:
res = list(sorted([[i, x, y]
for i, x, y in zip(indexes, sources, targets)]))
#print(res)
differences = [len(target) - len(src) for _, src, target in res]
offset_sum = 0
for entry, z in zip(res, differences):
i, x, y = entry
new_i = i + offset_sum
#Find: 93.8%
findRes = S.find(x, new_i, new_i + len(x))
#String slice: 21.69%
#findRes = S[new_i: new_i+ len(x)] == x
if findRes:
offset_sum += z
S = S[:new_i] + y + S[new_i + len(x):]
#print(S)
return S
# for i, x, y, z in zip(indexes, sources, targets, differences):
# new_i = i + offset_sum
# findRes = S.find(x, new_i)
# if findRes == new_i:
# offset_sum += z
# S = S[:new_i] + y + S[new_i + len(x):]
# return S
s = Solution()
print(s.findReplaceString("abcd", [0,2], ["a","cd"], ["eee","ffff"]))
print(s.findReplaceString("abcd", [1,2], ["b","cd"], ["eee","ffff"]))
#"vbfrssozp"
print(s.findReplaceString("vmokgggqzp",
[3,5,1],
["kg","ggq","mo"],
["s","so","bfr"]))
#vbfkqqqzp
#
|
Shell
|
UTF-8
| 1,037 | 2.90625 | 3 |
[] |
no_license
|
#!@shell@
systemConfig=@systemConfig@
export PATH=@path@/bin/
# Print a greeting.
echo
echo -e "\e[1;32m<<< vpsAdminOS Stage 2 >>>\e[0m"
echo
mkdir -p /proc /sys /dev /tmp /var/log /etc /root /run /nix/var/nix/gcroots
mount -t proc proc /proc
if [ @procHidePid@ ]; then
mount -o remount,rw,hidepid=2 /proc
fi
mount -t sysfs sys /sys
mount -t devtmpfs devtmpfs /dev
mkdir -p /dev/pts /dev/shm
mount -t devpts -ogid=3 devpts /dev/pts
mount -t tmpfs tmpfs /run
mount -t tmpfs tmpfs /dev/shm
ln -sfn /run /var/run
ln -sf /proc/mounts /etc/mtab
touch /run/{u,w}tmp
hostname @hostName@
$systemConfig/activate
# Record the boot configuration.
ln -sfn "$systemConfig" /run/booted-system
# Prevent the booted system form being garbage-collected If it weren't
# a gcroot, if we were running a different kernel, switched system,
# and garbage collected all, we could not load kernel modules anymore.
ln -sfn /run/booted-system /nix/var/nix/gcroots/booted-system
# Run any user-specified commands.
@shell@ @postBootCommands@
exec runit
|
C++
|
UTF-8
| 5,557 | 2.796875 | 3 |
[] |
no_license
|
/*
* Robot Navigation Program
* www.robotnav.com
*
* (C) Copyright 2010 - 2014 Lauro Ojeda
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <iostream>
#include <cmath>
#include <sys/time.h>
#include "Robot.h"
#include "MathFunctions.h"
const int MAX_ENCODER_CONT_SEC = 1000000; //The default limit used for the LEGO EV3 should be very large (infinite), this is only needed for high count encoders
using namespace std;
Robot::Robot(float period, float track, float encoderScaleFactor)
{
//This constructor does not do anything with the motor and sensor information.
//That information is handled directly by each robot sub-class. This extra arguments are needed
//to allow class virtualization.
// Robot parameters
mPeriod = period;
mTrack = track;
mEncoderScaleFactor = encoderScaleFactor;
mEncoderCountSecLimit = MAX_ENCODER_CONT_SEC;
//Initialize timing variables
mWaitForPeriod = true;
mCounter = 1;
gettimeofday(&mStartTimeSec, 0);
checkTimming(); //Initialize static current_time variables
cout << "Robot ready!\n";
}
Robot::~Robot()
{
cout << "Robot closed!\n";
}
void Robot::checkTimming()
{
static int s_mean_time_usec = 0;
static int s_max_time_usec = 0;
static int s_min_time_usec = SEC2USEC; //Use an imposibly large value
static timeval s_last_time = mStartTimeSec;
static int s_next_period_time_set_usec = (int)((float)(mPeriod * SEC2USEC));
// Wait untill set period is met
timeval current_time;
int time_diff_usec;
do{
gettimeofday(¤t_time, 0);
time_diff_usec = SEC2USEC*(current_time.tv_sec - s_last_time.tv_sec) + (current_time.tv_usec - s_last_time.tv_usec);
}while(time_diff_usec < s_next_period_time_set_usec && mWaitForPeriod);
s_last_time = current_time;
// Compute timming statistics
if(time_diff_usec < s_min_time_usec)
s_min_time_usec = time_diff_usec;
else if(time_diff_usec > s_max_time_usec)
s_max_time_usec = time_diff_usec;
s_mean_time_usec = (int)((float)(s_mean_time_usec*(mCounter - 1) + time_diff_usec) / mCounter);
// Display info
cout << "TIME[" << mCounter << "/" << current_time.tv_sec - mStartTimeSec.tv_sec << "] T: " << time_diff_usec << " aveT: " << s_mean_time_usec << " minT: " << s_min_time_usec << " maxT: " << s_max_time_usec << endl;
mCounter++;
}
void Robot::speedRate2Counts(float speed, float rate, int *pCountSec)
{
//Compute left and right encoder counts per second
float left_speed = (speed - rate * mTrack / 2.0) / mEncoderScaleFactor;
float right_speed = (speed + rate * mTrack / 2.0) / mEncoderScaleFactor;
double hold1 = rate * mTrack;
double hold2 = hold1 / 2.0;
double hold3 = speed - hold2;
double hold4 = hold3 / mEncoderScaleFactor;
cout << endl << "rate * mTrack: " << hold1 << endl;
cout << "^ /2: " << hold2 << endl;
cout << "speed - ^: " << hold3 << endl;
cout << "^ / mEncoderScaleFactor: " << hold4 << endl << endl;
cout << endl << "left [counts/sec]: " << left_speed << endl;
cout << "right [counts/sec]: " << right_speed << endl << endl;
double counts_revolution = 568;
cout << endl << "speed in rpm after distribution:" << endl;
cout << "left_speed: " << left_speed * 1/counts_revolution * 60 << endl;
cout << "right_speed: " << right_speed * 1/counts_revolution * 60 << endl << endl;
//Round values to the clossest integer
pCountSec[LEFT] = (left_speed > 0) ? (left_speed + .5) : (left_speed - 0.5);
pCountSec[RIGHT] = (right_speed > 0) ? (right_speed + .5) : (right_speed - 0.5);
//If there is some speed, even very tiny, we want to keep it. This prevents the robot from getting stuck.
if(!pCountSec[LEFT] && left_speed) pCountSec[LEFT] = (left_speed > 0) ? 1 : -1;
if(!pCountSec[RIGHT] && right_speed) pCountSec[RIGHT] = (right_speed > 0) ? 1 : -1;
// Verify that the motor speed does not exceed the limitations of the encoder counter reader
// this limit is set by the microcontroller that reads the encoder signals
if((fabs(pCountSec[LEFT]) > mEncoderCountSecLimit || fabs(pCountSec[RIGHT]) > mEncoderCountSecLimit))
{
float speed_reduction = (fabs(pCountSec[LEFT]) > fabs(pCountSec[RIGHT])) ? fabs(pCountSec[LEFT]) : fabs(pCountSec[RIGHT]);
speed_reduction /= mEncoderCountSecLimit;
cout << "Max encoder speed exceeded by: " << speed_reduction << " Bef: "<<pCountSec[LEFT] << " " << pCountSec[RIGHT];
pCountSec[LEFT] /= speed_reduction;
pCountSec[RIGHT] /= speed_reduction;
cout << " LIMIT: "<< mEncoderCountSecLimit ;
cout << " Aft: "<<pCountSec[LEFT] << " " << pCountSec[RIGHT] << endl;
}
cout << "ROBOT: " << speed << " " << math_functions::rad2deg(rate) << " " << left_speed << "/" << (int)pCountSec[LEFT] << " " << right_speed << "/" << (int)pCountSec[RIGHT] << endl;
}
void Robot::setEncoderLimit(int pCountSecLimit)
{ // This is only useful is using a different encoder that has significantly larger counts per revolution
// in this cases, the EV3 block may not be able to read the encoders
mEncoderCountSecLimit = pCountSecLimit;
}
|
Ruby
|
UTF-8
| 520 | 2.609375 | 3 |
[
"MIT"
] |
permissive
|
class EslintJunitFailure
attr_accessor :line, :severity, :message, :file_path
def initialize(failure:, path: Dir.pwd)
self.file_path = failure.parent.parent.attributes['name'].value.gsub("#{path}/", '')
self.message = failure.attributes['message'].value
failure_text_match = failure_text_regex.match(failure.text)
self.line = failure_text_match[1].to_i
self.severity = failure_text_match[3]
end
private
def failure_text_regex
/line (\d+), col (\d+), (Error|Warning) - (.*)/
end
end
|
JavaScript
|
UTF-8
| 17,104 | 2.9375 | 3 |
[] |
no_license
|
window.onload = function(){
//Botones formulario:
btnCancelar = document.getElementById('cancelar');
btnGuardar = document.getElementById('guardar');
btnGestion = document.getElementById('gestion');
//Botones clientes:
btnBaja = document.getElementById("borrarCliente");
//Funciones en funcionamiento:
btnCancelar.onclick = limpiarForm;
btnGuardar.onclick = almacenarClientes;
btnGestion.addEventListener("click", recuperarAlmacenamiento);
//Cargamos el localStorage:
}
function validarDNI(dni){
//Recogemos las diferentes variables:
var numero;
var letr;
var letra;
var expresion_regular_dni;
expresion_regular_dni = /^\d{8}[a-zA-Z]$/;
if(expresion_regular_dni.test(dni) == true){
numero = dni.substr(0,dni.length-1);
letr = dni.substr(dni.length-1,1);
//Operación que obtiene la posición de la letra en el DNI:
numero = numero % 23;
letra ='TRWAGMYFPDXBNJZSQVHLCKET';
letra = letra.substring(numero,numero+1);
if (letra!=letr.toUpperCase()) {
return false;
}
} else {
return false;
}
}
function validarFechaNacimiento(fecha){
var fechaSplit = fecha.split("/");
var dia = fechaSplit[0];
var mes = fechaSplit[1];
var anyo = fechaSplit[2];
var date = new Date(anyo,mes,'0');
if (!isNaN(dia) && anyo > 1900) {
if((dia-0)>(date.getDate()-0)){
return false;
}
}
}
function validarEmail(email) {
var regex = /^([a-zA-Z0-9_\.\-])+\@(([a-zA-Z0-9\-])+\.)+([a-zA-Z0-9]{2,4})+$/;
return regex.test(email) ? true : false;
}
function validarContrasenya(contrasenya){
var mayuscula = false;
var minuscula = false;
var numero = false;
var caracter_raro = false;
if(contrasenya.length >= 8){
for(var i = 0;i<contrasenya.length;i++){
if(contrasenya.charCodeAt(i) >= 65 && contrasenya.charCodeAt(i) <= 90){
mayuscula = true;
} else if(contrasenya.charCodeAt(i) >= 97 && contrasenya.charCodeAt(i) <= 122){
minuscula = true;
} else if(contrasenya.charCodeAt(i) >= 48 && contrasenya.charCodeAt(i) <= 57){
numero = true;
} else {
caracter_raro = true;
}
}
if(mayuscula == true && minuscula == true && caracter_raro == true && numero == true){
return true;
}
}
return false;
}
function validarCampos(){
//Recogemos los campos a validar:
let nombre = document.getElementById('nombre');
let apellidos = document.getElementById('apellidos');
let dni = document.getElementById('dni');
let fechaNac = document.getElementById('fechaNac');
let email = document.getElementById('email');
let contrasenyaP = document.getElementById('contrasenyaP');
let contrasenyaR = document.getElementById('contrasenyaR');
//Recogemos los campos donde anoraremos errores:
let nombreError = document.getElementById('errorNombre');
let apellidosError = document.getElementById('errorApellidos');
let dniError = document.getElementById('errorDNI');
let fechaNacError = document.getElementById('errorFechaNac');
let emailError = document.getElementById('errorEmail');
let contrasenyaPError = document.getElementById('errorContrasenyaP');
let contrasenyaRError = document.getElementById('errorContrasenyaR');
// Ponemos colores para fallo:
nombre.style.backgroundColor = "#FFF";
dni.style.backgroundColor = "#FFF";
//Asignamos una variable para comprobar el estado de las validaciones:
let resultado = true;
//Validamos el nombre y los apellidos
if (nombre.value == null || nombre.value.length == 0 || /^\s+$/.test(nombre.value)) {
nombre.style.backgroundColor = "rgba(255,155,155,0.4)";
nombre.focus();
nombreError.innerHTML = "El nombre no puede estar vacío";
resultado = false;
} else {
nombreError.innerHTML = "";
}
if (apellidos.value == null || apellidos.value.length == 0 || /^\s+$/.test(apellidos.value)) {
apellidos.style.backgroundColor = "rgba(255,155,155,0.4)";
apellidos.focus();
apellidosError.innerHTML = "Los apellidos no pueden estar vacíos";
resultado = false;
} else {
apellidosError.innerHTML = "";
}
//Comprobamos el DNI usando la función correspondiente:
if(dni.value == 0 || validarDNI(dni.value) == false){
dni.style.backgroundColor = "rgba(255,155,155,0.4)";
dni.focus();
dniError.innerHTML = "Hay un error en su DNI";
resultado = false;
} else {
dniError.innerHTML = "";
}
//Validamos fecha de nacimiento usando la función correspondiente:
if (fechaNac.value == 0 || validarFechaNacimiento(fechaNac.value) == false){
fechaNac.style.backgroundColor = "rgba(255,155,155,0.4)";
fechaNac.focus();
fechaNacError.innerHTML = "Debe usted introducir una fecha";
resultado = false;
} else {
fechaNacError.innerHTML = "";
}
//Validamos el correo electrónico:
if (email.value == 0 || validarEmail(email.value) == false){
email.style.backgroundColor = "rgba(255,155,155,0.4)";
email.focus();
emailError.innerHTML = "Introduzca su email";
resultado = false;
} else {
emailError.innerHTML = "";
}
//Validamos la primera contraseña:
// if(validarContrasenya(contrasenyaP.value) == false){
// contrasenyaP.style.backgroundColor = "rgba(255,155,155,0.4)";
// contrasenyaP.focus();
// contrasenyaPError.innerHTML = "Introduzca una contraseña válida. Recuerde que ha de tener al menos 8 caracteres, mayúsculas, minúsculas, números y algun signo de puntuación";
// resultado = false;
// } else {
// contrasenyaPError.innerHTML = "";
// }
// //Validamos segunda contraseña:
// if(contrasenyaR.value != contrasenyaP.value){
// contrasenyaR.style.backgroundColor = "rgba(255,155,155,0.4)";
// contrasenyaR.focus();
// contrasenyaRError.innerHTML = "Las contraseñas no coinciden";
// } else {
// contrasenyaRError.innerHTML = "";
// }
return resultado;
}
function limpiarForm(){
//Obtenemos el formulario y lo ponemos en blanco:
form = document.getElementById('clientesForm');
form.reset();
}
function almacenarClientes(event){
//Validamos primero los datos, para ello recogemos el resultado de validar en una variable.
resultado = validarCampos();
Clientes = [];
//Recogemos las variables necesarias:
let nombreC = document.getElementById('nombre').value;
let apellidosC = document.getElementById('apellidos').value;
let dniC = document.getElementById('dni').value;
let fechaNacC = document.getElementById('fechaNac').value;
let emailC = document.getElementById('email').value;
let contrasenyaPC = document.getElementById('contrasenyaP').value;
if(resultado){
if(localStorage.getItem("Clientes") == null){
//Creamos un objeto de tipo cliente:
var id = 1;
newCliente = {"id" : id,
"nombre" : nombreC,
"apellidos" : apellidosC,
"dni" : dniC,
"fechaNac" : fechaNacC,
"email": emailC,
"contrasenya": contrasenyaPC
};
Clientes.push(newCliente);
localStorage.setItem("Clientes", JSON.stringify(Clientes));
} else {
var nuevoCliente;
var arrayClientes;
if (id==null || id == ""){
arrayClientes = JSON.parse(localStorage.getItem("Clientes"));
nuevoCliente = { id : arrayClientes.length + 1,
nombre : nombreC,
apellidos : apellidosC,
dni : dniC,
fechaNac : fechaNacC,
email: emailC,
contrasenya: contrasenyaPC
};
//Añadimos el nuevo cliente al array:
arrayClientes.push(nuevoCliente);
} else {
arrayClientes = JSON.parse(localStorage.getItem("Clientes"));
idClienteMod = id - 1;
arrayClientes[idClienteMod] = {
id : id,
nombre : nombreC,
apellidos : apellidosC,
dni : dniC,
fechaNac : fechaNacC,
email: emailC,
contrasenya: contrasenyaPC
};
}
localStorage.setItem("Clientes", JSON.stringify(arrayClientes));
}
}
limpiarForm();
}
function recuperarAlmacenamiento(){
//Obtenemos la tabla:
var tabla = document.getElementById('tablaClientes');
//tabla = document.getElementById('tablaClientes');
var fila = tabla.getElementsByTagName("tr");
for (let i = fila.length-1; i > 0; i--) {
tabla.removeChild(fila[i]);
}
//Recuperamos el objeto del localstorage:
var clientes = JSON.parse(localStorage.getItem("Clientes"));
if(clientes == null){
alert("No hay clientes registrados");
} else {
for (var i = 0; i < clientes.length; i++) {
//Recogemos en un objeto la información que haya en el localStorage pasandolo a string con parse:
// //Para ir añanadiendo filas primero cogemos la referencia de donde las queremos insertar:
// tbody = document.getElementsByTagName('tbody')[0];
//Creamos la fila:
nuevaFila = document.createElement('tr');
nuevaFila.setAttribute('id', clientes[i].id)
nuevaFila.setAttribute('class', 'nuevo_Cliente');
//Añadimos la primera celda ID:
nuevaCelda = document.createElement('td');
nuevaCelda.setAttribute('class', 'idN');
contenido = document.createTextNode(clientes[i].id);
nuevaCelda.appendChild(contenido);
nuevaFila.appendChild(nuevaCelda);
tabla.appendChild(nuevaFila);
//Añadimos la segunda celda NOMBRE:
nuevaCelda = document.createElement('td');
nuevaCelda.setAttribute('class', 'nombre');
contenido = document.createTextNode(clientes[i].nombre);
nuevaCelda.appendChild(contenido);
nuevaFila.appendChild(nuevaCelda);
tabla.appendChild(nuevaFila);
//Añadimos la tercera celda APELLIDOS:
nuevaCelda = document.createElement('td');
nuevaCelda.setAttribute('class', 'apellidos');
contenido = document.createTextNode(clientes[i].apellidos);
nuevaCelda.appendChild(contenido);
nuevaFila.appendChild(nuevaCelda);
tabla.appendChild(nuevaFila);
//Añadimos la cuarta celda DNI:
nuevaCelda = document.createElement('td');
nuevaCelda.setAttribute('class', 'dni');
contenido = document.createTextNode(clientes[i].dni);
nuevaCelda.appendChild(contenido);
nuevaFila.appendChild(nuevaCelda);
tabla.appendChild(nuevaFila);
//Añadimos la quinta celda FECHA NACIMIENTO:
nuevaCelda = document.createElement('td');
nuevaCelda.setAttribute('class', 'fechaNac');
contenido = document.createTextNode(clientes[i].fechaNac);
nuevaCelda.appendChild(contenido);
nuevaFila.appendChild(nuevaCelda);
tabla.appendChild(nuevaFila);
//Añadimos la quinta celda EMAIL:
nuevaCelda = document.createElement('td');
nuevaCelda.setAttribute('class', 'email');
contenido = document.createTextNode(clientes[i].email);
nuevaCelda.appendChild(contenido);
nuevaFila.appendChild(nuevaCelda);
tabla.appendChild(nuevaFila);
//Añadimos la quinta celda EMAIL:
nuevaCelda = document.createElement('td');
nuevaCelda.setAttribute('class', 'contrasenya');
contenido = document.createTextNode(clientes[i].contrasenya);
nuevaCelda.appendChild(contenido);
nuevaFila.appendChild(nuevaCelda);
tabla.appendChild(nuevaFila);
//Añadimos la septa celda BOTON EDITAR:
nuevaCelda = document.createElement('td');
nuevaCelda.setAttribute('id', 'celda1');
nuevaCelda.innerHTML = '<button id="Editar">Editar</button>';
nuevaCelda.addEventListener("click", editarCliente);
nuevaFila.appendChild(nuevaCelda);
tabla.appendChild(nuevaFila);
//Añadimos la septima celda BOTON BORRAR:
nuevaCelda = document.createElement('td');
nuevaCelda.setAttribute('id', 'celda2');
nuevaCelda.innerHTML = '<button id="baja">Dar de baja</button>';
nuevaCelda.addEventListener("click", function(){
//Si pongo ("idN")[i] peta pero si pongo ("idN")[1] funciona
id = document.getElementsByClassName("idN")[1].innerText;
alert(id);
borrarCliente(id);
});
nuevaFila.appendChild(nuevaCelda);
tabla.appendChild(nuevaFila);
// btnBaja = document.getElementById("borrarCliente");
// btnBaja.addEventListener("click",borrarCliente());
}
}
}
function borrarCliente(id){
clientes = JSON.parse(localStorage.getItem("Clientes"));
console.log(clientes);
clientes.splice(id-1, 1);
console.log(clientes);
//window.location.href = "formularioClientes.html";
// for (let i = 0; i < clientes.length; i++) {
// alert(clientes[i].id);
// if(clientes[i].id == id){
// clientes.splice(id, 1);
// break;
// }
// }
}
// function borrarCliente(){
// this.parentNode.remove();
// miObj = JSON.parse(localStorage.getItem("Clientes"));
// //console.log(miObj[2].id);
// // for (let i = 0; i < miObj.length; i++) {
// // if(miObj[i].id == i){
// // miObj.splice(i, 1);
// // break;
// // }
// // }
// actualizarClientes();
// }
function editarCliente(){
alert("editamos");
}
function actualizarClientes(){
//Primero borramos lo que haya y así guardamos actualizando los datos:
// localStorage.clear();
//Obtenemos la tabla:
tabla = document.getElementById('tablaClientes');
//tabla = document.getElementById('tablaClientes');
fila = tabla.getElementsByClassName("nuevo_Cliente");
clientes = fila.length;
//arrayClientes = JSON.parse(localStorage.getItem("Clientes"));
localStorage.clear();
//clientesInicio = arrayClientes.length;
//Calculamos:
//Como no cogemos todas las filas, sino solo las de los artículos creados podemos comenzar en 0.
//Obtenemos el valor de la celda que tiene como clase total.
//parseFloat -> convertir una cadena en un número.
if(localStorage.getItem("Clientes") == null){
//alert('entramos');
var arrayClientes;
for (var i = 0; i < clientes; i++) {
nuevoCliente = { id : fila[i].getElementsByClassName('id')[0].innerText,
nombre : fila[i].getElementsByClassName('nombre')[0].innerText,
apellidos : fila[i].getElementsByClassName('apellidos')[0].innerText,
dni : fila[i].getElementsByClassName('dni')[0].innerText,
fechaNac : fila[i].getElementsByClassName('fechaNac')[0].innerText,
email: fila[i].getElementsByClassName('email')[0].innerText,
contrasenya: fila[i].getElementsByClassName('contrasenya')[0].innerText
};
localStorage.setItem("Clientes", JSON.stringify(nuevoCliente));
//Añadimos el nuevo cliente al array:
// arrayClientes.push(nuevoCliente);
// localStorage.setItem("Clientes", JSON.stringify(arrayClientes));
}
}
}
|
Python
|
UTF-8
| 2,675 | 3.375 | 3 |
[] |
no_license
|
"""Casting strategy"""
import spell_data as sd
class CastingStrategy:
"""
Basic implementation of a casting strategy.
Picks a spell and a target to cast it on.
"""
def __init__(self, talents):
self.talents = talents
self.name = "Basic Strategy"
def pick_spell(self, deficit, mana, h, **_):
# pick spell by deficit
choices = {
"10917": "Flash Heal (Rank 7)",
# "10916": "Flash Heal (Rank 6)",
# "10915": "Flash Heal (Rank 5)",
"9474": "Flash Heal (Rank 4)",
# "9473": "Flash Heal (Rank 3)",
# "9472": "Flash Heal (Rank 2)",
# "2061": "Flash Heal (Rank 1)",
# "2053": "Lesser Heal (Rank 3)",
# "10965": "Greater Heal (Rank 4)",
# "10964": "Greater Heal (Rank 3)",
# "10963": "Greater Heal (Rank 2)",
# "2060": "Greater Heal (Rank 1)",
# "6064": "Heal (Rank 4)",
# "6063": "Heal (Rank 3)",
# "2055": "Heal (Rank 2)",
# "2054": "Heal (Rank 1)",
}.keys()
# filter choices by mana available
choices = filter(lambda sid: sd.spell_mana(sid, talents=self.talents) < mana, choices)
# convert to healing
heals = map(lambda sid: (sid, sd.spell_heal(sid) + sd.spell_coefficient(sid) * h), choices)
# pick max heal with small amount of overhealing
heals = filter(lambda x: 0.80 * x[1] < -deficit, heals)
try:
spell_id, heal = max(heals, key=lambda x: x[1])
except ValueError:
return 0, 0, 0
mana = sd.spell_mana(spell_id)
cast_time = 1.5 if "Flash" in sd.spell_name(spell_id) else 2.5
return heal, mana, cast_time
class SingleSpellStrategy(CastingStrategy):
"""
Casting strategy that only casts a signel spell by rank.
"""
def __init__(self, talents, spell_id):
super().__init__(talents)
self.spell_id = spell_id
self.name = f"Only {sd.spell_name(spell_id)}"
def pick_spell(self, deficit, mana, h, **_):
# pick spell by deficit
spell_id = self.spell_id
base_heal = sd.spell_heal(spell_id)
coef = sd.spell_coefficient(spell_id)
spell_mana = sd.spell_mana(spell_id, talents=self.talents)
if spell_mana > mana:
# print(f"{spell_mana} > {mana}")
# could not cast
return 0, 0, 0
heal = base_heal + h * coef # include random variation and crit maybe?
cast_time = 1.5 if "Flash" in sd.spell_name(spell_id) else 2.5
return heal, spell_mana, cast_time
|
Java
|
UTF-8
| 667 | 1.90625 | 2 |
[] |
no_license
|
package com.appzoneltd.lastmile.microservice.manualdistribution.dao;
import java.util.List;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import org.springframework.data.repository.query.Param;
import com.appzoneltd.lastmile.microservice.manualdistribution.entity.PickupOrDeliveryRequest;
public interface PickupOrDeliveryReqRepo extends CrudRepository<PickupOrDeliveryRequest, Long> {
@Query(value = "SELECT DISTINCT pid FROM PickupOrDeliveryRequest pid WHERE pid.jobOrderId=:id ORDER BY pid.created DESC")
public List<PickupOrDeliveryRequest> getOrderInfo(@Param("id") Long jobOrderId);
}
|
Python
|
UTF-8
| 545 | 3.296875 | 3 |
[
"MIT"
] |
permissive
|
#Import statements
from tkinter import*
#Create root window
mw = Tk()
mw.geometry("300x300")
#Create Radio Button
rb1 = Radiobutton(mw,text = "C")
rb2 = Radiobutton(mw,text = "C++")
rb3 = Radiobutton(mw,text = "Java")
rb4 = Radiobutton(mw,text = "Python")
rb5 = Radiobutton(mw,text = "Perl")
rb1.grid(row = 0,column = 0,sticky=W)
rb2.grid(row = 1,column = 0,sticky=W)
rb3.grid(row = 2,column = 0,sticky=W)
rb4.grid(row = 3,column = 0,sticky=W)
rb5.grid(row = 4,column = 0,sticky=W)
#Call Main Loop Method
mw.mainloop()
|
Java
|
UTF-8
| 2,360 | 3.078125 | 3 |
[] |
no_license
|
package com.ym.utils.util;
import java.util.ArrayList;
import java.util.List;
/**
* Description: 微信砍价工具
*
* @author : 唐夏联
* @date : 2018年11月19日
* Company : 上海煜墨信息科技有限公司
* Copyright : Copyright (c) 2018
* @version : 1.0 Modified by 唐夏联 at 2018年11月19日
*/
public class WXBargainUtils {
/**
* Description: 生成微信砍价数组
*
* @author : 唐夏联
* @date : 2018年11月19日
* Company : 上海煜墨信息科技有限公司
* Copyright : Copyright (c) 2018
* @version : 1.0 Modified by 唐夏联 at 2018年11月19日
*/
private static final Double TIMES = 2.1;
// 获取砍价集合
public static List<Double> splitRedPackets(Double money, int count, Double minMoney, Double maxMoney) {
if (!isRight(money, count, minMoney, maxMoney)) {
return null;
}
List<Double> list = new ArrayList<Double>();
Double max = (Double) (money * TIMES / count);
max = max > maxMoney ? maxMoney : max;
for (int i = 0; i < count; i++) {
Double one = randomRedPacket(money, minMoney, max, count - i, minMoney, maxMoney);
list.add(one);
money -= one;
}
return list;
}
private static boolean isRight(Double money, int count, Double minMoney, Double maxMoney) {
double avg = money / count;
if (avg < minMoney) {
return false;
} else if (avg > maxMoney) {
return false;
}
return true;
}
private static Double randomRedPacket(Double money, Double mins, Double maxs, int count, Double minMoney,
Double maxMoney) {
if (count == 1) {
return (double) (Math.round(money * 100)) / 100;
}
if (mins == maxs) {
return mins;// 如果最大值和最小值一样,就返回mins
}
Double max = maxs > money ? money : maxs;
Double one = ((Double) Math.random() * (max - mins) + mins);
one = (double) (Math.round(one * 10)) / 10;
Double moneyOther = money - one;
if (isRight(moneyOther, count - 1, minMoney, maxMoney)) {
return one;
} else {
// 重新分配
Double avg = moneyOther / (count - 1);
if (avg < minMoney) {
return randomRedPacket(money, mins, one, count, minMoney, maxMoney);
} else if (avg > maxMoney) {
return randomRedPacket(money, one, maxs, count, minMoney, maxMoney);
}
}
return one;
}
}
|
JavaScript
|
UTF-8
| 857 | 2.734375 | 3 |
[
"MIT"
] |
permissive
|
import home from './home';
import menu from './menu';
import contact from './contact';
const nav = () => {
const navbar = document.querySelector('#navbar');
const main = document.querySelector('#content');
const navigation = document.createElement('ul');
navigation.classList.add('list');
navbar.appendChild(navigation);
const createLink = (linkText, callMethod) => {
const listItem = document.createElement('li');
listItem.classList.add('list-item');
navigation.appendChild(listItem);
const linkItem = document.createElement('a');
linkItem.innerHTML = linkText;
listItem.appendChild(linkItem);
linkItem.addEventListener('click', () => {
main.innerHTML = '';
callMethod();
});
};
createLink('Home', home);
createLink('Menu', menu);
createLink('Contact', contact);
};
export default nav;
|
SQL
|
UTF-8
| 637 | 2.859375 | 3 |
[] |
no_license
|
set echo off
set feedback on
set verify on
set heading on
/* start C:\Users\evana\Desktop\IS480\practices\outter_join_practice\tables.sql */
drop table loan;
drop table customers;
create table loan(
loanNo varchar2(2),
branch varchar2(2),
amt number(6,2),
primary key(loanNo,branch));
create table customers(
custNo varchar2(2),
loanNo varchar2(2),
primary key(custNo,loanNo));
insert into loan values('L1','B1',1000);
insert into loan values('L2','B2',2000);
insert into loan values('L3','B3',1500);
insert into customers values('C1','L1');
insert into customers values('C2','L2');
insert into customers values('C3','L4');
|
PHP
|
UTF-8
| 245 | 3.4375 | 3 |
[] |
no_license
|
<?php
$a = 2;
$b = 3;
if ($a < $b):
echo "Zmienna \$a jest mniejsza od \$b";
elseif($a > $b):
echo "Zmienna \$a jest większa od \$b";
else:
echo "Zmienna \$a jest równa \$b";
endif;
?>
|
Python
|
UTF-8
| 567 | 2.71875 | 3 |
[] |
no_license
|
import base64
vals = [0xA884DF8AB2FBC902, 0xE0D28ACBFB46461A, 0x6178F0BE4CD508AC, 0x603AD81291B66724, 0xDE5CDDE19279A148, 0x70E60361F80E8EB4]
to_xor = [0xD8BDEEE9C2938E66, 0xD598D291C97F7779, 0x0D32C3E736983BF4, 0x0C428F73FC8F2140, 0xA419A7AFE834F505, 0x4DAB6D008D6DF4F9]
def xor(a,b):
c = 0
for i in range(8):
tmp1 = (a >> 8*i) & 0xFF
tmp2 = (b >> 8*i) & 0xFF
tmp3 = (tmp1 ^ tmp2)
c = (c << 8) | tmp3
return c
to_dec = b""
for i in range(6):
tmp = hex(xor(vals[i],to_xor[i]))[2:]
to_dec += bytes.fromhex(tmp)
print(base64.b64decode(to_dec).decode())
|
Python
|
UTF-8
| 1,193 | 2.828125 | 3 |
[] |
no_license
|
from math import *
import numpy as np
from matrix import matrix
from parametricObject import parametricObject
class parametricTorus(parametricObject):
def __init__(self, T=matrix(np.identity(4)), innerRadius=10.0, outerRadius=5.0, color=(255, 255, 255),
reflectance=(0.2, 0.4, 0.4, 1.0), uRange=(0.0, 2.0 * pi), vRange=(0.0, 2.0 * pi),
uvDelta=(pi / 18.0, pi / 9.0)):
super().__init__(T, color, reflectance, uRange, vRange, uvDelta)
self.__innerRadius = innerRadius
self.__outerRadius = outerRadius
def getPoint(self, u, v):
P = matrix(np.ones((4, 1)))
P.set(0, 0, (self.__innerRadius + self.__outerRadius * cos(v)) * cos(u))
P.set(1, 0, (self.__innerRadius + self.__outerRadius * cos(v)) * sin(u))
P.set(2, 0, self.__outerRadius * sin(v))
return P
def setInnerRadius(self, innerRadius):
self.__innerRradius = innerRadius
def setOuterRadius(self, outerRadius):
self.__outerRadius = outerRadius
def getInnerRadius(self):
return self.__innerRadius
def getOuterRadius(self):
return self.__outerRadius
|
C#
|
UTF-8
| 394 | 2.578125 | 3 |
[
"MIT"
] |
permissive
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;
namespace BrainFxxkInterpreter
{
public class StdIO : IO
{
public int Read()
{
return Console.ReadKey().KeyChar;
}
public void Write(object chr)
{
Console.Write(chr);
}
}
}
|
Java
|
UTF-8
| 729 | 2.953125 | 3 |
[] |
no_license
|
package org.coding.sort;
import java.util.Iterator;
public class SortPage implements Iterator<Row> {
private int allowedPageSize;
private RowBatch rowBatch;
private int addedSize;
public SortPage(int pageSize) {
this.allowedPageSize = (pageSize * 95) / 100;
this.rowBatch = new RowBatch();
}
public void addRow(Row row) {
rowBatch.addRow(row);
addedSize += row.getSize();
}
public boolean canAdd() {
return addedSize < allowedPageSize;
}
@Override public boolean hasNext() {
return rowBatch.hasNext();
}
@Override public Row next() {
return rowBatch.next();
}
public void sort() {
this.rowBatch.sort();
}
public int getSize() {
return addedSize;
}
}
|
TypeScript
|
UTF-8
| 2,739 | 3.265625 | 3 |
[
"MIT"
] |
permissive
|
type Controller = StructureController;
interface SignDefinition {
username: string;
text: string;
time: number,
datetime: Date;
}
/**
* Claim this structure to take control over the room. The controller structure
* cannot be damaged or destroyed. It can be addressed by `Room.controller`
* property.
*/
declare class StructureController extends OwnedStructure {
/**
* Current controller level, from 0 to 8.
*/
public readonly level: number;
/**
* The current progress of upgrading the controller to the next level.
*/
public readonly progress: number;
/**
* The progress needed to reach the next level.
*/
public readonly progressTotal: number;
/**
* An object with the controller reservation info if present: username, ticksToEnd
*/
public readonly reservation: Reservation;
/**
* How many ticks of safe mode are remaining, or undefined.
*/
public readonly safeMode: number | undefined;
/**
* Safe mode activations available to use.
*/
public readonly safeModeAvailable: number;
/**
* During this period in ticks new safe mode activations will be blocked, undefined if cooldown is inactive.
*/
public readonly safeModeCooldown: number | undefined;
/**
* An object with the controller sign info if present
*/
public readonly sign: SignDefinition;
/**
* The amount of game ticks when this controller will lose one level. This timer can be reset by using
* Creep.upgradeController.
*/
public readonly ticksToDowngrade: number;
/**
* The amount of game ticks while this controller cannot be upgraded due to attack.
*/
public readonly upgradeBlocked: number;
/**
* A unique object identificator. You can use Game.getObjectById method to retrieve an object instance by its id.
*
* NOTE: we override the room from Structure since we are guaranteed the type
*/
public readonly id: StructureId<Controller>;
/**
* One of the STRUCTURE_* constants.
*
* NOTE: we override the room from Structure since we are guaranteed the type
*/
public readonly structureType: StructureType<Controller>;
/**
* CPU cost: CONST
*
* Make your claimed controller neutral again.
*
* @returns Return code: OK, ERR_NOT_OWNER
*/
public unclaim(): ResponseCode;
/**
* Activate safe mode if available.
* @returns Result Code: OK, ERR_NOT_OWNER, ERR_BUSY, ERR_NOT_ENOUGH_RESOURCES, ERR_TIRED
*/
public activateSafeMode(): ResponseCode;
}
interface Reservation {
/**
* The name of a player who reserved this controller.
*/
readonly username: string;
/**
* The amount of game ticks when the reservation will end.
*/
readonly ticksToEnd: number;
}
|
C
|
UTF-8
| 2,917 | 4.0625 | 4 |
[] |
no_license
|
#include<stdio.h>
#include<stdlib.h>
#define SIZE 5
int next=0;
void take(int *);
void insertatend(int *,int);
void insertatbeg(int *,int);
void insertatpos(int *,int,int);
void display(int *);
void deleteatend(int* );
void deleteatbeg(int *);
void deleteatpos(int *,int);
void deleteatpos(int *,int);
void reverse_display(int *);
void main()
{
static int arr[SIZE];
int choice, pos;
int num;
while(1)
{
printf("\nEnter your choice : ");
printf("\n1.Insert at End\n2.Insert at Beginning");
printf("\n3.Insert at Position\n4.Delete at End");
printf("\n5.Delete from Beginning\n6.Delete from a Position");
printf("\n7.Display\n8.Reverse Display\n9.Exit ::");
scanf("%d",&choice);
switch(choice)
{
case 1: take(&num);
insertatend(arr,num);
break;
case 2: take(&num);
insertatbeg(arr,num);
break;
case 3: printf("\aEnter the position at which you want to add the : ");
scanf("%d",&pos);
take(&num);
if(pos==1)
insertatbeg(arr,num);
else if(pos<1)
printf("Position not valid");
else
insertatpos(arr,pos,num);
break;
case 4: deleteatend(arr);
break;
case 5: deleteatbeg(arr);
break;
case 6: printf("\aEnter the position at which you want to Delete : ");
scanf("%d",&pos);
deleteatpos(arr,pos);
break;
case 7: display(arr);
break;
case 8: reverse_display(arr);
break;
case 9: exit(1);
default:
break;
}
}
}
void take(int *a)
{
printf("\aEnter the Data : ");
scanf("%d",&(*a));
}
void insertatend(int arr[SIZE],int data)
{
if(next==SIZE)
{
printf("\aThe Array is full");
return;
}
else
{
arr[next++]=data;
}
}
void insertatbeg(int arr[SIZE],int data)
{
if(next==SIZE)
{
printf("\aThe Array is full");
return;
}
else
{
for(int i=(SIZE-2) ; i>=0 ; i--)
{
arr[i+1]=arr[i];
}
arr[0]=data;
next++;
}
}
void display(int arr[SIZE])
{
if(next==0)
{
printf("\aThe Array is empty");
return;
}
for(int i=0 ; i!=next ; i++)
{
printf(" %d---> ", arr[i]);
}
}
void insertatpos(int arr[SIZE],int pos,int data)
{
if(next==SIZE)
{
printf("\aThe Array is full");
return;
}
else
{
int i=SIZE-2;
for(i;i>=(pos-1);i--)
{
arr[i+1]=arr[i];
}
arr[pos-1]=data;
next++;
}
}
void deleteatend(int arr[SIZE])
{
if(next==0)
{
printf("\aThe Array is empty");
return;
}
next--;
arr[next]=0;
}
void deleteatbeg(int arr[SIZE])
{
if(next==0)
{
printf("\aThe Array is empty");
return;
}
else
{ for(int i=0;i<=(SIZE-2);i++)
{
arr[i]=arr[i+1];
}
}
next--;
}
void deleteatpos(int arr[SIZE],int pos)
{
if(next==0)
{
printf("\aThe Array is empty");
return;
}
else
{
arr[pos-1]=0;
for(int i=(pos-1);i<(SIZE-1);i++)
{
arr[i]=arr[i+1];
}
}
next--;
}
void reverse_display(int arr[SIZE])
{
for(int temp=next-1;temp>=0;temp--)
{
printf(" <---%d ",arr[temp]);
}
}
|
C
|
GB18030
| 2,717 | 2.5625 | 3 |
[] |
no_license
|
#include "uart2.h"
#include "msg.h"
UART4_MSG_S g_stUart2Msg;
/*ӵPC10 PC11ӿڣUART4
dzʼصUSART*/
void UART2_Configuration(void)
{
GPIO_InitTypeDef GPIO_InitStructure;
USART_InitTypeDef USART_InitStructure;
NVIC_InitTypeDef NVIC_InitStructure;
RCC_APB2PeriphClockCmd(RCC_APB2Periph_GPIOA, ENABLE );
RCC_APB1PeriphClockCmd(RCC_APB1Periph_USART2, ENABLE );
GPIO_InitStructure.GPIO_Pin = GPIO_Pin_2; //UART4 TX
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_AF_PP; //
GPIO_InitStructure.GPIO_Speed = GPIO_Speed_50MHz;
GPIO_Init(GPIOA, &GPIO_InitStructure); //˿C
GPIO_InitStructure.GPIO_Pin = GPIO_Pin_3; //UART4 RX
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_IN_FLOATING; //룻
GPIO_Init(GPIOA, &GPIO_InitStructure); //˿C
USART_InitStructure.USART_BaudRate = WHEEL_ADP_USART_BAUD_RATE; //ʣ
USART_InitStructure.USART_WordLength = USART_WordLength_8b; //λ8λ
USART_InitStructure.USART_StopBits = USART_StopBits_1; //ֹͣλ1λ
USART_InitStructure.USART_Parity = USART_Parity_No ; //Уλ
USART_InitStructure.USART_HardwareFlowControl = USART_HardwareFlowControl_None;
//Ӳأ
USART_InitStructure.USART_Mode = USART_Mode_Rx | USART_Mode_Tx;
//շģʽ
USART_Init(USART2, &USART_InitStructure);//ôڲ
NVIC_PriorityGroupConfig(NVIC_PriorityGroup_4); //ж飬4λռȼ4λӦȼ
NVIC_InitStructure.NVIC_IRQChannel = USART2_IRQn; //жϺţ
NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 7; //ռȼ
NVIC_InitStructure.NVIC_IRQChannelSubPriority = 1; //Ӧȼ
NVIC_InitStructure.NVIC_IRQChannelCmd = ENABLE;
NVIC_Init(&NVIC_InitStructure);
USART_ITConfig(USART2, USART_IT_RXNE, ENABLE);
USART_Cmd(USART2, ENABLE); //ʹܴڣ
}
void UART2_Init()
{
UART2_Configuration();
}
void UART2_Send_Bytes(u8 *Data, u8 len) //ͣ
{
int i;
for(i = 0; i < len; i++)
{
USART_SendData(USART2,Data[i]);
while( USART_GetFlagStatus(USART2, USART_FLAG_TC) == RESET );
}
}
//0d 0a
void UART2_IRQHandler(void) //жϴ
{
if(USART_GetITStatus(USART2, USART_IT_RXNE) == SET) //жǷжϣ
{
USART_ReceiveData(USART2);
USART_SendData(USART2, 96);
while( USART_GetFlagStatus(USART2, USART_FLAG_TC) == RESET );
USART_ClearITPendingBit(USART2, USART_IT_RXNE); //жϱ־
}
}
|
Swift
|
UTF-8
| 8,276 | 3.203125 | 3 |
[] |
no_license
|
//
// ContentView.swift
// My BMI
//
// Created by Joshua on 2021-03-23.
//
import SwiftUI
struct BMIView: View {
@State var bmi = BMICalculator(height: 0.0, weight: 0.0)
@State var isHeightMetric = true
@State var isWeightMetric = true
@State var isPopoverPresented = false
@State var isChanged = false
var body: some View {
ZStack {
// MARK: Background
Color(.black)
.edgesIgnoringSafeArea(.all)
AnimatedBackground()
.edgesIgnoringSafeArea(.all)
.blur(radius: 25)
.scaleEffect(1.2)
.opacity(0.9) // slightly dim to improve contrast with text
VStack {
Spacer()
// MARK: Top Heading
// When app opens up...
if !isChanged {
Text("Calculate Your BMI")
.font(.largeTitle)
.fontWeight(.heavy)
.foregroundColor(.white)
.multilineTextAlignment(.center)
Text("For best results, please be\nas accurate as possible.")
.font(.body)
.fontWeight(.medium)
.foregroundColor(.white)
.multilineTextAlignment(.center)
.opacity(0.8)
.padding()
}
// As soon as you move a slider...
else if isChanged {
// Dynamic person icon
Image(systemName: "figure.stand")
.font(.system(size: determineHeight(), weight: determineWeight()))
.frame(width: 200, height: 200, alignment: .bottom)
.foregroundColor(.white)
}
Spacer()
// MARK: Height Slider
VStack {
HStack {
Text("Height")
.font(.title2)
.fontWeight(.semibold)
.foregroundColor(.white)
.opacity(0.8)
Spacer()
Text("\(getHeight(cm: bmi.height, isMetric: isHeightMetric))")
.font(Font.system(.largeTitle, design: .rounded).weight(.heavy))
.foregroundColor(.white)
.onTapGesture { isHeightMetric.toggle() } // tap to change
}.padding(.horizontal)
Slider(value: $bmi.height, in: 120...220, onEditingChanged: { _ in
if self.isChanged == false { self.isChanged = true }
})
.accentColor(Color("purpleHighlight"))
.padding()
.onTapGesture { isChanged = true }
.onAppear {
if(self.bmi.height < 1) {
self.bmi.height = 170
}
}
}
// MARK: Weight Slider
VStack {
HStack {
Text("Weight")
.font(.title2)
.fontWeight(.semibold)
.foregroundColor(.white)
.opacity(0.8)
Spacer()
Text("\(getWeight(kg: bmi.weight, isMetric: isWeightMetric, indicator: true))")
.font(Font.system(.largeTitle, design: .rounded).weight(.heavy))
.foregroundColor(.white)
.onTapGesture { isWeightMetric.toggle() } // tap to change
}.padding(.horizontal)
Slider(value: $bmi.weight, in: 20...150, onEditingChanged: { _ in
if self.isChanged == false { self.isChanged = true }
})
.accentColor(Color("purpleHighlight"))
.padding()
.onAppear {
if(self.bmi.weight < 1) {
self.bmi.weight = 85
}
}
}
// MARK: Calculate Button
Button(action: {
bmi.calculateBMI(height: bmi.height, weight: bmi.weight)
self.isPopoverPresented.toggle()
}) {
ZStack {
Color("purpleHighlight")
.opacity(0.8)
Text("Calculate")
.font(.title)
.fontWeight(.semibold)
.foregroundColor(.white)
.padding()
}
.frame(maxWidth: 500, maxHeight: 70)
.cornerRadius(15.0)
.padding(.vertical)
}
.sheet(isPresented: $isPopoverPresented, content: {
ResultView(bmi: bmi, isHeightMetric: isHeightMetric, isWeightMetric: isWeightMetric)
})
}.padding()
}
}
// Dynamically determine height of symbol based on height slider
func determineHeight() -> CGFloat {
return CGFloat(self.bmi.height / 1.2) // fluid transition
}
// Dynamically determine weight of symbol based on weight slider
func determineWeight() -> Font.Weight {
switch self.bmi.weight {
case 0 ..< 30:
return .ultraLight
case 30 ..< 45:
return .thin
case 45 ..< 60:
return .light
case 60 ..< 75:
return .regular
case 75 ..< 90:
return .medium
case 90 ..< 105:
return .semibold
case 105 ..< 120:
return .bold
case 120 ..< 135:
return .heavy
default:
return .black
}
}
}
// Custom animated background
struct AnimatedBackground: View {
@State var start = UnitPoint(x: 0, y: -2)
@State var end = UnitPoint(x: 4, y: 0)
let timer = Timer.publish(every: 1, on: .main, in: .default).autoconnect()
let colors = [Color(.black), Color(.black), Color("purpleDark"), Color("purpleLight"), Color("purpleHighlight")]
var body: some View {
LinearGradient(gradient: Gradient(colors: colors), startPoint: start, endPoint: end)
.animation(Animation.easeInOut(duration: 10).repeatForever())
.onReceive(timer, perform: { _ in
self.start = UnitPoint(x: 4, y: 0)
self.end = UnitPoint(x: 0, y: 2)
self.start = UnitPoint(x: -4, y: 20)
self.start = UnitPoint(x: 4, y: 0)
})
}
}
// Get height in centimetres or feet and inches
func getHeight(cm: Float, isMetric: Bool) -> String {
if isMetric {
return "\(String(format: "%.0f", cm)) cm"
} else {
let feet = cm * 0.0328084
let feetDisplay = Int(floor(feet))
let feetRemainder: Float = ((feet * 100).truncatingRemainder(dividingBy: 100) / 100)
let inches = Int(floor(feetRemainder * 12))
return "\(feetDisplay)' \(inches)\""
}
}
// Get weight in kilograms or pounds
func getWeight(kg: Float, isMetric: Bool, indicator: Bool) -> String {
var weightString: String
if isMetric {
weightString = String(format: "%.0f", kg)
if indicator { weightString += " kg" }
} else {
let lbs = kg * 2.205
weightString = String(format: "%.0f", lbs)
if indicator { weightString += " lbs" }
}
return weightString
}
// MARK: Previews
struct BMIView_Previews: PreviewProvider {
static var previews: some View {
BMIView()
.previewDevice("iPhone 12")
}
}
|
Python
|
UTF-8
| 2,541 | 3.484375 | 3 |
[] |
no_license
|
class linearQueue:
def __init__(self,size,init=None):
self.size=size
self.init=init
self.front=-1
self.rear=-1
self.storage=[init]*self.size
def enQueue(self,item):
if not self.isFull():
self.rear+=1
self.storage[self.rear]=item
else:
print("Queue is full")
def deQueue(self):
if not self.isEmpty():
self.front+=1
temp=self.storage[self.front]
self.storage[self.front]=self.init
return temp
else:
print("Queue is Empty")
def isEmpty(self):
return self.front==self.rear
def isFull(self):
return self.rear==(self.size-1)
def Qpeek(self):
return self.storage[self.front]
class CircleQueue:
def __init__(self,size,init=None):
self.size=size
self.init=init
self.front=0
self.rear=0
self.storage=[init]*self.size
def enQueue(self,item):
if not self.isFull():
self.rear=(self.rear+1)%self.size
self.storage[self.rear]=item
else:
print("Queue is full")
def deQueue(self):
if not self.isEmpty():
self.front=(self.front+1)%self.size
temp=self.storage[self.front]
self.storage[self.front]=self.init
return temp
else:
print("Queue is Empty")
def isEmpty(self):
return self.front==self.rear
def isFull(self):
return (self.rear+1)%self.size==self.front
def Qpeek(self):
return self.storage[self.front]
class Node:
def __init__(self,item):
self.item=item
self.next=None
def setNext(self,node):
self.next=node
class LinkedQueue:
def __init__(self):
self.front=None
self.rear=None
def enQueue(self,item):
node=Node(item)
if self.isEmpty():
self.front=node
else :
self.rear.setNext(node)
self.rear=node
def deQueue(self):
if self.isEmpty():
print("Empty Queue")
return
result=self.front.item
self.front=self.front.next
if self.isEmpty():
self.rear=None
return result
def isEmpty(self):
return self.front==None
q=LinkedQueue()
a=1
m=1000
while(m>0):
if q.isEmpty():
q.enQueue((a,1))
ta,tm=q.deQueue()
m-=tm
q.enQueue((ta,tm+1))
a+=1
q.enQueue((a,1))
print(ta,tm)
|
PHP
|
UTF-8
| 1,050 | 2.828125 | 3 |
[] |
no_license
|
<?php namespace MailTracker;
class Service {
/**
* Database instance.
*
* @var MailTracker\DatabaseInterface
*/
protected $db;
/**
* Construct a new service.
*
* @access public
* @param DatabaseInterface $db
* @return void
*/
public function __construct(DatabaseInterface $db)
{
$this->db = $db;
}
/**
* Generate a new tracking.
*
* @access public
* @param array $data
* @return string
*/
public function generate($data = array())
{
return $this->db->create($data);
}
/**
* Check/validate a tracking code.
*
* @access public
* @param string $trackingCode
* @return boolean
*/
public function check($trackingCode)
{
return $this->db->find($trackingCode);
}
/**
* Serve the tracking image, ideally this should be call from a
* controller.
*
* @access public
* @return string
*/
public function serve()
{
return (object) array(
"contentType" => "image/gif",
"data" => "R0lGODlhAQABAIAAAP///wAAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw==",
);
}
}
|
PHP
|
UTF-8
| 1,933 | 3.09375 | 3 |
[] |
no_license
|
<?php
// process input variables
if (isset($_REQUEST['plist'])) {
if (strip_tags($_REQUEST['plist']) == 1) {
$page = 1;
$title = 'Page 1';
} else if (strip_tags($_REQUEST['plist']) == 2) {
$page = 2;
$title = 'Page 2';
} else {
$title = 'Invalid Selection';
}
} else {
$page = 'default';
$title = 'Pick Your Page';
}
?><!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title><?php echo $title ?></title>
<script language="JavaScript">
// function to submit form if a page is chosen
function doSubmit() {
if(document.navForm.plist.selectedIndex != 0) {
document.navForm.submit();
}
}
</script>
</head>
<body>
<?php
if ($page == 1) {
?>
<h1>Page 1</h1>
<p>You have selected page one content. You may use the navigation list below to change this.</p>
<?php
} else if ($page == 2) {
?>
<h1>Page 2</h1>
<p>You have selected page two content. You may use the navigation list below to change this.</p>
<?php
} else {
?>
<h1>Select Your Page</h1>
<?php
}
?>
<form action="<?= $_SERVER['PHP_SELF'] ?>" name="navForm">
<label>
<select name="plist" onchange="doSubmit();">
<option value="">Select Your Page</option>
<option value="1" <?php if ($page == 1) {
echo 'selected="selected"';
} ?>>Page 1</option>
<option value="2" <?php if ($page == 2) {
echo 'selected="selected"';
} ?>>Page 2</option>
</select>
</label>
</form>
<p>
<a href="https://validator.w3.org/check/referer">validate HTML</a>
| <a href="https://jigsaw.w3.org/css-validator/check/referer">validate CSS</a>
</p>
</body>
</html>
|
SQL
|
UTF-8
| 815 | 3.984375 | 4 |
[] |
no_license
|
/*
Udacity's certified program > Business Analytics Nanodegree
SQL practice > Lesson 1 : SQL Basics
When you use the BETWEEN operator in SQL, do the results include the values of
your endpoints, or not? Figure out the answer to this important question by
writing a query that displays the order date and gloss_qty data for all orders
where gloss_qty is between 24 and 29. Then look at your output to see if the
BETWEEN operator included the begin and end values or not.
*/
SELECT occurred_at, gloss_qty
FROM orders
WHERE gloss_qty BETWEEN 24 AND 29;
/*
Observation: yes the results include the endpoints 24 and 29. This means the
between endpoints are inclusive on both left and right ends.
So the BETWEEN statement in this query is equivalent to having written
"WHERE gloss_qty >= 24 AND gloss_qty <= 29."
*/
|
Shell
|
UTF-8
| 1,105 | 3.234375 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] |
permissive
|
VERSION=$1
if [ -z "$VERSION" ]
then
echo $0 version
exit
fi
NAME=`json -f package.json productName`
NAME_ESCAPED=`echo $NAME | sed 's/\ /\\ /'`
VERSION_ESCAPED=`echo $VERSION | sed 's/\ /\\ /'`
DIR=$NAME_ESCAPED-$VERSION_ESCAPED
ARCH=x64
PLATFORM=darwin
json -I -f package.json -e "this.version='${VERSION}'"
./scripts/build.sh
rm -rf "release-builds/${DIR}/"
mkdir -p "release-builds/${DIR}/${DIR}"
cp -R "release-builds/${NAME_ESCAPED}-${PLATFORM}-${ARCH}/${NAME_ESCAPED}.app" \
"release-builds/${DIR}/${DIR}/${NAME_ESCAPED}.app"
echo "Drag ${NAME}.app to Applications folder to install" > "release-builds/${DIR}/${DIR}/Notes.txt"
echo "@codeblaan at github" >> "release-builds/${DIR}/${DIR}/Notes.txt"
ln -s /Applications "release-builds/${DIR}/${DIR}/Applications"
hdiutil create \
-fs HFS+ \
-srcfolder "release-builds/${DIR}/${DIR}/" \
"release-builds/${DIR}/${DIR}.dmg"
zip -r "release-builds/${DIR}/${DIR}.dmg.zip" "release-builds/${DIR}/${DIR}.dmg"
cd "release-builds/${DIR}/"
shasum -a 256 "${DIR}.dmg.zip" > "${DIR}.dmg.zip.sha256"
shasum -c "${DIR}.dmg.zip.sha256"
open .
|
SQL
|
UTF-8
| 45,630 | 3.234375 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
-- Valentina Studio --
-- MySQL dump --
-- ---------------------------------------------------------
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
-- ---------------------------------------------------------
-- CREATE DATABASE "sgc" ----------------------------------
CREATE DATABASE IF NOT EXISTS `sgc` CHARACTER SET utf8 COLLATE utf8_general_ci;
USE `sgc`;
-- ---------------------------------------------------------
-- CREATE TABLE "Bitacora" ---------------------------------
-- DROP TABLE "Bitacora" ---------------------------------------
DROP TABLE IF EXISTS `Bitacora` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "Bitacora" -------------------------------------
CREATE TABLE `Bitacora` (
`BitacoraId` Int( 11 ) AUTO_INCREMENT NOT NULL,
`FechaHora` DateTime NULL,
`CampanhaId` Int( 11 ) NOT NULL,
`PerfilId` Int( 11 ) NOT NULL,
`UsuarioId` VarChar( 255 ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
PRIMARY KEY ( `BitacoraId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB
AUTO_INCREMENT = 3;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE TABLE "Perfil" -----------------------------------
-- DROP TABLE "Perfil" -----------------------------------------
DROP TABLE IF EXISTS `Perfil` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "Perfil" ---------------------------------------
CREATE TABLE `Perfil` (
`PerfilId` Int( 11 ) NOT NULL,
`FotoReferencia` VarChar( 45 ) CHARACTER SET utf8 COLLATE utf8_general_ci NULL,
`PlayaId` Int( 11 ) NOT NULL,
`DescMedicion` VarChar( 200 ) CHARACTER SET utf8 COLLATE utf8_general_ci NULL,
PRIMARY KEY ( `PerfilId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE TABLE "Campanha" ---------------------------------
-- DROP TABLE "Campanha" ---------------------------------------
DROP TABLE IF EXISTS `Campanha` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "Campanha" -------------------------------------
CREATE TABLE `Campanha` (
`CampanhaId` Int( 11 ) AUTO_INCREMENT NOT NULL,
`Estado` Bit( 1 ) NOT NULL,
`TamanhoMedicion` Decimal( 3, 0 ) NOT NULL,
`PlayaId` Int( 255 ) NOT NULL,
`UsuarioId` Int( 11 ) NOT NULL COMMENT 'ID del Usuario Encargado
',
`FechaHora` DateTime NOT NULL,
PRIMARY KEY ( `CampanhaId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB
AUTO_INCREMENT = 9;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE TABLE "Ciudad" -----------------------------------
-- DROP TABLE "Ciudad" -----------------------------------------
DROP TABLE IF EXISTS `Ciudad` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "Ciudad" ---------------------------------------
CREATE TABLE `Ciudad` (
`CiudadId` Int( 11 ) AUTO_INCREMENT NOT NULL,
`Nombre` VarChar( 45 ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`RegionId` Int( 11 ) NOT NULL,
PRIMARY KEY ( `CiudadId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB
AUTO_INCREMENT = 9;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE TABLE "FotoPlaya" --------------------------------
-- DROP TABLE "FotoPlaya" --------------------------------------
DROP TABLE IF EXISTS `FotoPlaya` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "FotoPlaya" ------------------------------------
CREATE TABLE `FotoPlaya` (
`FotoId` Int( 11 ) AUTO_INCREMENT NOT NULL,
`Autor` VarChar( 45 ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`FechaHora` DateTime NOT NULL,
`Descripcion` VarChar( 200 ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`PlayaId` Int( 11 ) NOT NULL,
PRIMARY KEY ( `FotoId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB
AUTO_INCREMENT = 1;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE TABLE "IntegranteBitacora" -----------------------
-- DROP TABLE "IntegranteBitacora" -----------------------------
DROP TABLE IF EXISTS `IntegranteBitacora` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "IntegranteBitacora" ---------------------------
CREATE TABLE `IntegranteBitacora` (
`IntegranteBitacoraId` Int( 11 ) AUTO_INCREMENT NOT NULL,
`UsuarioId` Int( 11 ) NOT NULL,
`BitacoraId` Int( 11 ) NOT NULL,
PRIMARY KEY ( `IntegranteBitacoraId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB
AUTO_INCREMENT = 1;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE TABLE "Marea" ------------------------------------
-- DROP TABLE "Marea" ------------------------------------------
DROP TABLE IF EXISTS `Marea` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "Marea" ----------------------------------------
CREATE TABLE `Marea` (
`MareaId` Int( 11 ) AUTO_INCREMENT NOT NULL,
`Altura` Decimal( 2, 2 ) NOT NULL,
`FechaHora` DateTime NOT NULL,
`Tipo` VarChar( 45 ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`CiudadId` Int( 11 ) NOT NULL,
PRIMARY KEY ( `MareaId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB
AUTO_INCREMENT = 1;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE TABLE "Medicion" ---------------------------------
-- DROP TABLE "Medicion" ---------------------------------------
DROP TABLE IF EXISTS `Medicion` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "Medicion" -------------------------------------
CREATE TABLE `Medicion` (
`Estacion` Int( 11 ) NOT NULL,
`Comentario` VarChar( 500 ) CHARACTER SET utf8 COLLATE utf8_general_ci NULL,
`MedicionId` Int( 11 ) AUTO_INCREMENT NOT NULL,
`EstacionMedicion` VarChar( 45 ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`BitacoraId` Int( 11 ) NOT NULL,
`DistVertical` Decimal( 2, 2 ) NOT NULL,
`DistHorizontal` Decimal( 2, 2 ) NOT NULL,
PRIMARY KEY ( `MedicionId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB
AUTO_INCREMENT = 1;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE TABLE "OleajePlaya" ------------------------------
-- DROP TABLE "OleajePlaya" ------------------------------------
DROP TABLE IF EXISTS `OleajePlaya` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "OleajePlaya" ----------------------------------
CREATE TABLE `OleajePlaya` (
`FechaHora` DateTime NOT NULL,
`Direccion` VarChar( 45 ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`OleajePlayaId` Int( 11 ) AUTO_INCREMENT NOT NULL,
`Periodo` Int( 11 ) NOT NULL,
`PlayaId` Int( 11 ) NOT NULL,
PRIMARY KEY ( `OleajePlayaId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB
AUTO_INCREMENT = 1;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE TABLE "Pais" -------------------------------------
-- DROP TABLE "Pais" -------------------------------------------
DROP TABLE IF EXISTS `Pais` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "Pais" -----------------------------------------
CREATE TABLE `Pais` (
`PaisId` Int( 11 ) AUTO_INCREMENT NOT NULL,
`Nombre` VarChar( 45 ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
PRIMARY KEY ( `PaisId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB
AUTO_INCREMENT = 13;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE TABLE "Playa" ------------------------------------
-- DROP TABLE "Playa" ------------------------------------------
DROP TABLE IF EXISTS `Playa` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "Playa" ----------------------------------------
CREATE TABLE `Playa` (
`PlayaId` Int( 11 ) AUTO_INCREMENT NOT NULL,
`Nombre` VarChar( 45 ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`Orientacion` VarChar( 45 ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`DescGeneral` VarChar( 500 ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`DescTecnica` VarChar( 500 ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`FotoSuperior` VarChar( 500 ) CHARACTER SET utf8 COLLATE utf8_general_ci NULL,
`FotoPrincipal` VarChar( 500 ) CHARACTER SET utf8 COLLATE utf8_general_ci NULL,
`CiudadId` Int( 11 ) NOT NULL,
PRIMARY KEY ( `PlayaId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB
AUTO_INCREMENT = 17;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE TABLE "Pronostico" -------------------------------
-- DROP TABLE "Pronostico" -------------------------------------
DROP TABLE IF EXISTS `Pronostico` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "Pronostico" -----------------------------------
CREATE TABLE `Pronostico` (
`PronosticoId` Int( 11 ) AUTO_INCREMENT NOT NULL,
`Fecha` VarChar( 45 ) CHARACTER SET utf8 COLLATE utf8_general_ci NULL,
`TiempoId` Int( 11 ) NOT NULL,
`CiudadId` Int( 11 ) NOT NULL,
PRIMARY KEY ( `PronosticoId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB
AUTO_INCREMENT = 1;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE TABLE "Region" -----------------------------------
-- DROP TABLE "Region" -----------------------------------------
DROP TABLE IF EXISTS `Region` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "Region" ---------------------------------------
CREATE TABLE `Region` (
`RegionId` Int( 11 ) AUTO_INCREMENT NOT NULL,
`Nombre` VarChar( 45 ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`PaisId` Int( 11 ) NOT NULL,
PRIMARY KEY ( `RegionId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB
AUTO_INCREMENT = 10;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE TABLE "Tiempo" -----------------------------------
-- DROP TABLE "Tiempo" -----------------------------------------
DROP TABLE IF EXISTS `Tiempo` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "Tiempo" ---------------------------------------
CREATE TABLE `Tiempo` (
`TiempoId` Int( 11 ) AUTO_INCREMENT NOT NULL,
`Descripcion` VarChar( 45 ) CHARACTER SET utf8 COLLATE utf8_general_ci NULL,
PRIMARY KEY ( `TiempoId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB
AUTO_INCREMENT = 1;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE TABLE "Usuario" ----------------------------------
-- DROP TABLE "Usuario" ----------------------------------------
DROP TABLE IF EXISTS `Usuario` CASCADE;
-- -------------------------------------------------------------
-- CREATE TABLE "Usuario" --------------------------------------
CREATE TABLE `Usuario` (
`UsuarioId` Int( 11 ) AUTO_INCREMENT NOT NULL,
`NombreApellido` VarChar( 100 ) CHARACTER SET utf8 COLLATE utf8_general_ci NULL,
`Tipo` VarChar( 45 ) CHARACTER SET utf8 COLLATE utf8_general_ci NULL COMMENT 'Tipo de usuario
tecnico
jefe proyecto
',
`Correo` VarChar( 255 ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
PRIMARY KEY ( `UsuarioId` ) )
CHARACTER SET = utf8
COLLATE = utf8_general_ci
ENGINE = InnoDB
AUTO_INCREMENT = 4;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- Dump data of "Bitacora" ---------------------------------
/*!40000 ALTER TABLE `Bitacora` DISABLE KEYS */
INSERT INTO `Bitacora`(`BitacoraId`,`FechaHora`,`CampanhaId`,`PerfilId`,`UsuarioId`) VALUES ( '1', '2017-07-10 00:00:00', '1', '0', '' );
INSERT INTO `Bitacora`(`BitacoraId`,`FechaHora`,`CampanhaId`,`PerfilId`,`UsuarioId`) VALUES ( '2', '2017-07-10 00:00:00', '1', '1', '' );
/*!40000 ALTER TABLE `Bitacora` ENABLE KEYS */
-- ---------------------------------------------------------
-- Dump data of "Perfil" -----------------------------------
/*!40000 ALTER TABLE `Perfil` DISABLE KEYS */
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '0', NULL, 'desc-med-perf-0', '1' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '1', NULL, 'desc-med-perf-1', '1' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '2', NULL, 'desc-med-perf-2', '1' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '3', NULL, 'desc-med-perf-3', '1' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '4', NULL, 'desc-med-perf-4', '1' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '5', NULL, 'desc-med-perf-5', '1' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '6', NULL, 'desc-med-perf-6', '2' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '7', NULL, 'desc-med-perf-7', '2' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '8', NULL, 'desc-med-perf-8', '2' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '9', NULL, 'desc-med-perf-9', '2' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '10', NULL, 'desc-med-perf-10', '2' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '11', NULL, 'desc-med-perf-11', '3' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '12', NULL, 'desc-med-perf-12', '3' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '13', NULL, 'desc-med-perf-13', '3' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '14', NULL, 'desc-med-perf-14', '4' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '15', NULL, 'desc-med-perf-15', '4' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '16', NULL, 'desc-med-perf-16', '4' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '17', NULL, 'desc-med-perf-17', '13' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '18', NULL, 'desc-med-perf-18', '13' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '19', NULL, 'desc-med-perf-19', '13' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '20', NULL, 'desc-med-perf-20', '14' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '21', NULL, 'desc-med-perf-21', '14' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '22', NULL, 'desc-med-perf-22', '14' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '23', NULL, 'desc-med-perf-23', '15' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '24', NULL, 'desc-med-perf-24', '15' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '25', NULL, 'desc-med-perf-25', '15' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '26', NULL, 'desc-med-perf-26', '16' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '27', NULL, 'desc-med-perf-27', '16' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '28', NULL, 'desc-med-perf-28', '16' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '29', NULL, 'desc-med-perf-29', '5' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '30', NULL, 'desc-med-perf-30', '5' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '31', NULL, 'desc-med-perf-31', '5' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '32', NULL, 'desc-med-perf-32', '6' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '33', NULL, 'desc-med-perf-33', '6' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '34', NULL, 'desc-med-perf-34', '6' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '35', NULL, 'desc-med-perf-35', '7' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '36', NULL, 'desc-med-perf-36', '7' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '37', NULL, 'desc-med-perf-37', '7' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '38', NULL, 'desc-med-perf-38', '8' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '39', NULL, 'desc-med-perf-39', '8' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '40', NULL, 'desc-med-perf-40', '8' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '41', NULL, 'desc-med-perf-41', '9' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '42', NULL, 'desc-med-perf-42', '9' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '43', NULL, 'desc-med-perf-43', '9' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '44', NULL, 'desc-med-perf-44', '10' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '45', NULL, 'desc-med-perf-45', '10' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '46', NULL, 'desc-med-perf-46', '10' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '47', NULL, 'desc-med-perf-47', '11' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '48', NULL, 'desc-med-perf-48', '11' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '49', NULL, 'desc-med-perf-49', '11' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '50', NULL, 'desc-med-perf-50', '12' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '51', NULL, 'desc-med-perf-51', '12' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '52', NULL, 'desc-med-perf-52', '12' );
INSERT INTO `Perfil`(`PerfilId`,`FotoReferencia`,`DescMedicion`,`PlayaId`) VALUES ( '53', NULL, 'desc-med-perf-53', '13' );
/*!40000 ALTER TABLE `Perfil` ENABLE KEYS */
-- ---------------------------------------------------------
-- Dump data of "Campanha" ---------------------------------
/*!40000 ALTER TABLE `Campanha` DISABLE KEYS */
INSERT INTO `Campanha`(`CampanhaId`,`FechaHora`,`TamanhoMedicion`,`Estado`,`UsuarioId`,`PlayaId`) VALUES ( '1', '2017-07-09 00:00:00', '1', b'1', '3', '1' );
INSERT INTO `Campanha`(`CampanhaId`,`FechaHora`,`TamanhoMedicion`,`Estado`,`UsuarioId`,`PlayaId`) VALUES ( '2', '2017-06-09 00:00:00', '1', b'0', '3', '1' );
INSERT INTO `Campanha`(`CampanhaId`,`FechaHora`,`TamanhoMedicion`,`Estado`,`UsuarioId`,`PlayaId`) VALUES ( '3', '2017-05-09 00:00:00', '1', b'0', '3', '1' );
INSERT INTO `Campanha`(`CampanhaId`,`FechaHora`,`TamanhoMedicion`,`Estado`,`UsuarioId`,`PlayaId`) VALUES ( '4', '2017-07-09 00:00:00', '1', b'1', '3', '2' );
INSERT INTO `Campanha`(`CampanhaId`,`FechaHora`,`TamanhoMedicion`,`Estado`,`UsuarioId`,`PlayaId`) VALUES ( '5', '2017-06-09 00:00:00', '1', b'0', '3', '2' );
INSERT INTO `Campanha`(`CampanhaId`,`FechaHora`,`TamanhoMedicion`,`Estado`,`UsuarioId`,`PlayaId`) VALUES ( '6', '2017-07-09 00:00:00', '1', b'1', '3', '3' );
INSERT INTO `Campanha`(`CampanhaId`,`FechaHora`,`TamanhoMedicion`,`Estado`,`UsuarioId`,`PlayaId`) VALUES ( '7', '2017-06-09 00:00:00', '1', b'0', '3', '3' );
INSERT INTO `Campanha`(`CampanhaId`,`FechaHora`,`TamanhoMedicion`,`Estado`,`UsuarioId`,`PlayaId`) VALUES ( '8', '2017-05-09 00:00:00', '1', b'0', '3', '3' );
/*!40000 ALTER TABLE `Campanha` ENABLE KEYS */
-- ---------------------------------------------------------
-- Dump data of "Ciudad" -----------------------------------
/*!40000 ALTER TABLE `Ciudad` DISABLE KEYS */
INSERT INTO `Ciudad`(`CiudadId`,`Nombre`,`RegionId`) VALUES ( '1', 'vina', '1' );
INSERT INTO `Ciudad`(`CiudadId`,`Nombre`,`RegionId`) VALUES ( '2', 'valparaiso', '1' );
INSERT INTO `Ciudad`(`CiudadId`,`Nombre`,`RegionId`) VALUES ( '3', 'la serena', '2' );
INSERT INTO `Ciudad`(`CiudadId`,`Nombre`,`RegionId`) VALUES ( '4', 'coquimbo', '2' );
INSERT INTO `Ciudad`(`CiudadId`,`Nombre`,`RegionId`) VALUES ( '5', 'rio de janeiro', '8' );
INSERT INTO `Ciudad`(`CiudadId`,`Nombre`,`RegionId`) VALUES ( '6', 'niterói', '8' );
INSERT INTO `Ciudad`(`CiudadId`,`Nombre`,`RegionId`) VALUES ( '7', 'salvador ', '9' );
INSERT INTO `Ciudad`(`CiudadId`,`Nombre`,`RegionId`) VALUES ( '8', 'porto seguro', '9' );
/*!40000 ALTER TABLE `Ciudad` ENABLE KEYS */
-- ---------------------------------------------------------
-- Dump data of "FotoPlaya" --------------------------------
-- ---------------------------------------------------------
-- Dump data of "IntegranteBitacora" -----------------------
-- ---------------------------------------------------------
-- Dump data of "Marea" ------------------------------------
-- ---------------------------------------------------------
-- Dump data of "Medicion" ---------------------------------
-- ---------------------------------------------------------
-- Dump data of "OleajePlaya" ------------------------------
-- ---------------------------------------------------------
-- Dump data of "Pais" -------------------------------------
/*!40000 ALTER TABLE `Pais` DISABLE KEYS */
INSERT INTO `Pais`(`PaisId`,`Nombre`) VALUES ( '1', 'chile' );
INSERT INTO `Pais`(`PaisId`,`Nombre`) VALUES ( '2', 'argentina' );
INSERT INTO `Pais`(`PaisId`,`Nombre`) VALUES ( '3', 'brasil' );
INSERT INTO `Pais`(`PaisId`,`Nombre`) VALUES ( '4', 'perú' );
INSERT INTO `Pais`(`PaisId`,`Nombre`) VALUES ( '5', 'ecuador' );
INSERT INTO `Pais`(`PaisId`,`Nombre`) VALUES ( '6', 'colombia' );
INSERT INTO `Pais`(`PaisId`,`Nombre`) VALUES ( '7', 'venezuela' );
INSERT INTO `Pais`(`PaisId`,`Nombre`) VALUES ( '8', 'uruguay' );
INSERT INTO `Pais`(`PaisId`,`Nombre`) VALUES ( '9', 'panama' );
INSERT INTO `Pais`(`PaisId`,`Nombre`) VALUES ( '10', 'costa rica' );
INSERT INTO `Pais`(`PaisId`,`Nombre`) VALUES ( '11', 'cuba' );
INSERT INTO `Pais`(`PaisId`,`Nombre`) VALUES ( '12', 'puerto rico' );
/*!40000 ALTER TABLE `Pais` ENABLE KEYS */
-- ---------------------------------------------------------
-- Dump data of "Playa" ------------------------------------
/*!40000 ALTER TABLE `Playa` DISABLE KEYS */
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '1', 'la salinas', 'orientacion-playa1', 'descripcion-general-playa1', 'descripcion-tecnica-playa1', NULL, NULL, '1' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '2', 'reñaca', 'orientacion-playa2', 'descripcion-general-playa2', 'descripcion-tecnica-playa2', NULL, NULL, '1' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '3', 'torpeñaca', 'orientacion-playa3', 'descripcion-general-playa3', 'descripcion-tecnica-playa3', NULL, NULL, '2' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '4', 'san mateo beach', 'orientacion-playa4', 'descripcion-general-playa4', 'descripcion-tecnica-playa4', NULL, NULL, '2' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '5', 'icarai', 'orientacion-icarai', 'desc-gral-icarai', 'desc-tec-icarai', NULL, NULL, '6' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '6', 'camboinhas', 'orientacio-camboinhas', 'desc-gral-camboinhas', 'desc-tec-camboinhas', NULL, NULL, '6' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '7', 'ipanema', 'orientacio-ipanema', 'desc-gral-ipanema', 'desc-tec-ipanema', NULL, NULL, '5' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '8', 'copacabana', 'orientacio-copacabana', 'desc-gral-copacabana', 'desc-tec-copacabana', NULL, NULL, '5' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '9', 'stella maris', 'orientacio-stella maris', 'desc-gral-stella maris', 'desc-tec-stella maris', NULL, NULL, '7' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '10', 'porto da barra', 'orientacio-porto da barra', 'desc-gral-porto da barra', 'desc-tec-porto da barra', NULL, NULL, '7' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '11', 'trancoso', 'orientacio-trancoso', 'desc-gral-trancoso', 'desc-tec-trancoso', NULL, NULL, '8' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '12', 'los coqueiros', 'orientacio-los coqueiros', 'desc-gral-los coqueiros', 'desc-tec-los coqueiros', NULL, NULL, '8' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '13', 'peñuelas', 'orientacio-peñuelas', 'desc-gral-peñuelas', 'desc-tec-peñuelas', NULL, NULL, '4' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '14', 'la herradura', 'orientacio-la herradura', 'desc-gral-la herradura', 'desc-tec-la herradura', NULL, NULL, '4' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '15', 'la serena', 'orientacio-la serena', 'desc-gral-la serena', 'desc-tec-la serena', NULL, NULL, '3' );
INSERT INTO `Playa`(`PlayaId`,`Nombre`,`Orientacion`,`DescGeneral`,`DescTecnica`,`FotoSuperior`,`FotoPrincipal`,`CiudadId`) VALUES ( '16', 'cuatro esquinas', 'orientacio-cuatro esquinas', 'desc-gral-cuatro esquinas', 'desc-tec-cuatro esquinas', NULL, NULL, '3' );
/*!40000 ALTER TABLE `Playa` ENABLE KEYS */
-- ---------------------------------------------------------
-- Dump data of "Pronostico" -------------------------------
-- ---------------------------------------------------------
-- Dump data of "Region" -----------------------------------
/*!40000 ALTER TABLE `Region` DISABLE KEYS */
INSERT INTO `Region`(`RegionId`,`Nombre`,`PaisId`) VALUES ( '1', 'Valparaiso', '1' );
INSERT INTO `Region`(`RegionId`,`Nombre`,`PaisId`) VALUES ( '2', 'Coquimbo', '1' );
INSERT INTO `Region`(`RegionId`,`Nombre`,`PaisId`) VALUES ( '3', 'Antofagasta', '1' );
INSERT INTO `Region`(`RegionId`,`Nombre`,`PaisId`) VALUES ( '4', 'Ar-01', '2' );
INSERT INTO `Region`(`RegionId`,`Nombre`,`PaisId`) VALUES ( '5', 'Ar-02', '2' );
INSERT INTO `Region`(`RegionId`,`Nombre`,`PaisId`) VALUES ( '6', 'pe-01', '4' );
INSERT INTO `Region`(`RegionId`,`Nombre`,`PaisId`) VALUES ( '7', 'pe-02', '4' );
INSERT INTO `Region`(`RegionId`,`Nombre`,`PaisId`) VALUES ( '8', 'rio de janeiro', '3' );
INSERT INTO `Region`(`RegionId`,`Nombre`,`PaisId`) VALUES ( '9', 'bahia', '3' );
/*!40000 ALTER TABLE `Region` ENABLE KEYS */
-- ---------------------------------------------------------
-- Dump data of "Tiempo" -----------------------------------
-- ---------------------------------------------------------
-- Dump data of "Usuario" ----------------------------------
/*!40000 ALTER TABLE `Usuario` DISABLE KEYS */
INSERT INTO `Usuario`(`UsuarioId`,`NombreApellido`,`Tipo`,`Correo`) VALUES ( '1', 'claudio araya', 'tecnico', 'claudio.araya@alumnos.uv.cl' );
INSERT INTO `Usuario`(`UsuarioId`,`NombreApellido`,`Tipo`,`Correo`) VALUES ( '2', 'daniel toro', 'tecnico', 'daniel.toro@alumnos.uv.cl' );
INSERT INTO `Usuario`(`UsuarioId`,`NombreApellido`,`Tipo`,`Correo`) VALUES ( '3', 'sebastian rubio', 'jp', 'sebastian.rubio@laumnos.uv.cl' );
/*!40000 ALTER TABLE `Usuario` ENABLE KEYS */
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_BITACORA_SGC_CAMPANHA1_idx" --------
-- CREATE INDEX "fk_SGC_BITACORA_SGC_CAMPANHA1_idx" ------------
CREATE INDEX `fk_SGC_BITACORA_SGC_CAMPANHA1_idx` USING BTREE ON `Bitacora`( `CampanhaId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_BITACORA_SGC_PERFIL1_idx" ----------
-- CREATE INDEX "fk_SGC_BITACORA_SGC_PERFIL1_idx" --------------
CREATE INDEX `fk_SGC_BITACORA_SGC_PERFIL1_idx` USING BTREE ON `Bitacora`( `PerfilId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_PERFIL_SGC_PLAYA1_idx" -------------
-- CREATE INDEX "fk_SGC_PERFIL_SGC_PLAYA1_idx" -----------------
CREATE INDEX `fk_SGC_PERFIL_SGC_PLAYA1_idx` USING BTREE ON `Perfil`( `PlayaId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_CAMPANHA_SGC_USUARIO1_idx" ---------
-- CREATE INDEX "fk_SGC_CAMPANHA_SGC_USUARIO1_idx" -------------
CREATE INDEX `fk_SGC_CAMPANHA_SGC_USUARIO1_idx` USING BTREE ON `Campanha`( `UsuarioId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "lnk_SGC_PLAYA_SGC_CAMPANHA" ---------------
-- CREATE INDEX "lnk_SGC_PLAYA_SGC_CAMPANHA" -------------------
CREATE INDEX `lnk_SGC_PLAYA_SGC_CAMPANHA` USING BTREE ON `Campanha`( `PlayaId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_CIUDAD_SGC_REGION1_idx" ------------
-- CREATE INDEX "fk_SGC_CIUDAD_SGC_REGION1_idx" ----------------
CREATE INDEX `fk_SGC_CIUDAD_SGC_REGION1_idx` USING BTREE ON `Ciudad`( `RegionId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_FOTO_PLAYA_SGC_PLAYA1_idx" ---------
-- CREATE INDEX "fk_SGC_FOTO_PLAYA_SGC_PLAYA1_idx" -------------
CREATE INDEX `fk_SGC_FOTO_PLAYA_SGC_PLAYA1_idx` USING BTREE ON `FotoPlaya`( `PlayaId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_INTEGRANTES_BITACORA_SGC_BITACORA1_idx"
-- CREATE INDEX "fk_SGC_INTEGRANTES_BITACORA_SGC_BITACORA1_idx"
CREATE INDEX `fk_SGC_INTEGRANTES_BITACORA_SGC_BITACORA1_idx` USING BTREE ON `IntegranteBitacora`( `BitacoraId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_INTEGRANTES_BITACORA_SGC_USUARIO1_idx"
-- CREATE INDEX "fk_SGC_INTEGRANTES_BITACORA_SGC_USUARIO1_idx" -
CREATE INDEX `fk_SGC_INTEGRANTES_BITACORA_SGC_USUARIO1_idx` USING BTREE ON `IntegranteBitacora`( `UsuarioId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_MAREA_SGC_CIUDAD1_idx" -------------
-- CREATE INDEX "fk_SGC_MAREA_SGC_CIUDAD1_idx" -----------------
CREATE INDEX `fk_SGC_MAREA_SGC_CIUDAD1_idx` USING BTREE ON `Marea`( `CiudadId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_MEDICION_SGC_BITACORA1_idx" --------
-- CREATE INDEX "fk_SGC_MEDICION_SGC_BITACORA1_idx" ------------
CREATE INDEX `fk_SGC_MEDICION_SGC_BITACORA1_idx` USING BTREE ON `Medicion`( `BitacoraId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_OLEAJE_PLAYA_SGC_PLAYA1_idx" -------
-- CREATE INDEX "fk_SGC_OLEAJE_PLAYA_SGC_PLAYA1_idx" -----------
CREATE INDEX `fk_SGC_OLEAJE_PLAYA_SGC_PLAYA1_idx` USING BTREE ON `OleajePlaya`( `PlayaId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_PLAYA_SGC_CIUDAD1_idx" -------------
-- CREATE INDEX "fk_SGC_PLAYA_SGC_CIUDAD1_idx" -----------------
CREATE INDEX `fk_SGC_PLAYA_SGC_CIUDAD1_idx` USING BTREE ON `Playa`( `CiudadId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_PRONOSTICO_TIEMPO_SGC_CIUDAD1_idx" -
-- CREATE INDEX "fk_SGC_PRONOSTICO_TIEMPO_SGC_CIUDAD1_idx" -----
CREATE INDEX `fk_SGC_PRONOSTICO_TIEMPO_SGC_CIUDAD1_idx` USING BTREE ON `Pronostico`( `CiudadId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_PRONOSTICO_TIEMPO_SGC_TIEMPO1_idx" -
-- CREATE INDEX "fk_SGC_PRONOSTICO_TIEMPO_SGC_TIEMPO1_idx" -----
CREATE INDEX `fk_SGC_PRONOSTICO_TIEMPO_SGC_TIEMPO1_idx` USING BTREE ON `Pronostico`( `TiempoId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE INDEX "fk_SGC_REGION_SGC_PAIS_idx" ---------------
-- CREATE INDEX "fk_SGC_REGION_SGC_PAIS_idx" -------------------
CREATE INDEX `fk_SGC_REGION_SGC_PAIS_idx` USING BTREE ON `Region`( `PaisId` );
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "fk_SGC_BITACORA_SGC_CAMPANHA1" -------------
-- DROP LINK "fk_SGC_BITACORA_SGC_CAMPANHA1" -------------------
ALTER TABLE `Bitacora` DROP FOREIGN KEY `fk_SGC_BITACORA_SGC_CAMPANHA1`;
-- -------------------------------------------------------------
-- CREATE LINK "fk_SGC_BITACORA_SGC_CAMPANHA1" -----------------
ALTER TABLE `Bitacora`
ADD CONSTRAINT `fk_SGC_BITACORA_SGC_CAMPANHA1` FOREIGN KEY ( `SGC_CAMPANHA_CAMP_ID` )
REFERENCES `Campanha`( `CAMP_ID` )
ON DELETE No Action
ON UPDATE No Action;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "fk_SGC_BITACORA_SGC_PERFIL1" ---------------
-- DROP LINK "fk_SGC_BITACORA_SGC_PERFIL1" ---------------------
ALTER TABLE `Bitacora` DROP FOREIGN KEY `fk_SGC_BITACORA_SGC_PERFIL1`;
-- -------------------------------------------------------------
-- CREATE LINK "fk_SGC_BITACORA_SGC_PERFIL1" -------------------
ALTER TABLE `Bitacora`
ADD CONSTRAINT `fk_SGC_BITACORA_SGC_PERFIL1` FOREIGN KEY ( `SGC_PERFIL_PERF_NUMERO` )
REFERENCES `Perfil`( `PERF_NUMERO` )
ON DELETE No Action
ON UPDATE No Action;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "lnk_SGC_PLAYA_SGC_CAMPANHA" ----------------
-- DROP LINK "lnk_SGC_PLAYA_SGC_CAMPANHA" ----------------------
ALTER TABLE `Campanha` DROP FOREIGN KEY `lnk_SGC_PLAYA_SGC_CAMPANHA`;
-- -------------------------------------------------------------
-- CREATE LINK "lnk_SGC_PLAYA_SGC_CAMPANHA" --------------------
ALTER TABLE `Campanha`
ADD CONSTRAINT `lnk_SGC_PLAYA_SGC_CAMPANHA` FOREIGN KEY ( `SGC_PLAYA_PLA_ID` )
REFERENCES `Playa`( `PLA_ID` )
ON DELETE No Action
ON UPDATE No Action;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "lnk_SGC_USUARIO_SGC_CAMPANHA" --------------
-- DROP LINK "lnk_SGC_USUARIO_SGC_CAMPANHA" --------------------
ALTER TABLE `Campanha` DROP FOREIGN KEY `lnk_SGC_USUARIO_SGC_CAMPANHA`;
-- -------------------------------------------------------------
-- CREATE LINK "lnk_SGC_USUARIO_SGC_CAMPANHA" ------------------
ALTER TABLE `Campanha`
ADD CONSTRAINT `lnk_SGC_USUARIO_SGC_CAMPANHA` FOREIGN KEY ( `SGC_USUARIO_USR_ID` )
REFERENCES `Usuario`( `USR_ID` )
ON DELETE Cascade
ON UPDATE Cascade;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "fk_SGC_CIUDAD_SGC_REGION1" -----------------
-- DROP LINK "fk_SGC_CIUDAD_SGC_REGION1" -----------------------
ALTER TABLE `Ciudad` DROP FOREIGN KEY `fk_SGC_CIUDAD_SGC_REGION1`;
-- -------------------------------------------------------------
-- CREATE LINK "fk_SGC_CIUDAD_SGC_REGION1" ---------------------
ALTER TABLE `Ciudad`
ADD CONSTRAINT `fk_SGC_CIUDAD_SGC_REGION1` FOREIGN KEY ( `SGC_REGION_REG_ID` )
REFERENCES `Region`( `REG_ID` )
ON DELETE No Action
ON UPDATE No Action;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "fk_SGC_FOTO_PLAYA_SGC_PLAYA1" --------------
-- DROP LINK "fk_SGC_FOTO_PLAYA_SGC_PLAYA1" --------------------
ALTER TABLE `FotoPlaya` DROP FOREIGN KEY `fk_SGC_FOTO_PLAYA_SGC_PLAYA1`;
-- -------------------------------------------------------------
-- CREATE LINK "fk_SGC_FOTO_PLAYA_SGC_PLAYA1" ------------------
ALTER TABLE `FotoPlaya`
ADD CONSTRAINT `fk_SGC_FOTO_PLAYA_SGC_PLAYA1` FOREIGN KEY ( `SGC_PLAYA_PLA_ID` )
REFERENCES `Playa`( `PLA_ID` )
ON DELETE No Action
ON UPDATE No Action;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "fk_SGC_INTEGRANTES_BITACORA_SGC_BITACORA1" -
-- DROP LINK "fk_SGC_INTEGRANTES_BITACORA_SGC_BITACORA1" -------
ALTER TABLE `IntegranteBitacora` DROP FOREIGN KEY `fk_SGC_INTEGRANTES_BITACORA_SGC_BITACORA1`;
-- -------------------------------------------------------------
-- CREATE LINK "fk_SGC_INTEGRANTES_BITACORA_SGC_BITACORA1" -----
ALTER TABLE `IntegranteBitacora`
ADD CONSTRAINT `fk_SGC_INTEGRANTES_BITACORA_SGC_BITACORA1` FOREIGN KEY ( `BIT_ID` )
REFERENCES `Bitacora`( `BIT_ID` )
ON DELETE No Action
ON UPDATE No Action;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "fk_SGC_MAREA_SGC_CIUDAD1" ------------------
-- DROP LINK "fk_SGC_MAREA_SGC_CIUDAD1" ------------------------
ALTER TABLE `Marea` DROP FOREIGN KEY `fk_SGC_MAREA_SGC_CIUDAD1`;
-- -------------------------------------------------------------
-- CREATE LINK "fk_SGC_MAREA_SGC_CIUDAD1" ----------------------
ALTER TABLE `Marea`
ADD CONSTRAINT `fk_SGC_MAREA_SGC_CIUDAD1` FOREIGN KEY ( `SGC_CIUDAD_CIU_ID` )
REFERENCES `Ciudad`( `CIU_ID` )
ON DELETE No Action
ON UPDATE No Action;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "fk_SGC_MEDICION_SGC_BITACORA1" -------------
-- DROP LINK "fk_SGC_MEDICION_SGC_BITACORA1" -------------------
ALTER TABLE `Medicion` DROP FOREIGN KEY `fk_SGC_MEDICION_SGC_BITACORA1`;
-- -------------------------------------------------------------
-- CREATE LINK "fk_SGC_MEDICION_SGC_BITACORA1" -----------------
ALTER TABLE `Medicion`
ADD CONSTRAINT `fk_SGC_MEDICION_SGC_BITACORA1` FOREIGN KEY ( `SGC_BITACORA_BIT_ID` )
REFERENCES `Bitacora`( `BIT_ID` )
ON DELETE No Action
ON UPDATE No Action;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "fk_SGC_OLEAJE_PLAYA_SGC_PLAYA1" ------------
-- DROP LINK "fk_SGC_OLEAJE_PLAYA_SGC_PLAYA1" ------------------
ALTER TABLE `OleajePlaya` DROP FOREIGN KEY `fk_SGC_OLEAJE_PLAYA_SGC_PLAYA1`;
-- -------------------------------------------------------------
-- CREATE LINK "fk_SGC_OLEAJE_PLAYA_SGC_PLAYA1" ----------------
ALTER TABLE `OleajePlaya`
ADD CONSTRAINT `fk_SGC_OLEAJE_PLAYA_SGC_PLAYA1` FOREIGN KEY ( `SGC_PLAYA_PLA_ID` )
REFERENCES `Playa`( `PLA_ID` )
ON DELETE No Action
ON UPDATE No Action;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "fk_SGC_PERFIL_SGC_PLAYA1" ------------------
-- DROP LINK "fk_SGC_PERFIL_SGC_PLAYA1" ------------------------
ALTER TABLE `Perfil` DROP FOREIGN KEY `fk_SGC_PERFIL_SGC_PLAYA1`;
-- -------------------------------------------------------------
-- CREATE LINK "fk_SGC_PERFIL_SGC_PLAYA1" ----------------------
ALTER TABLE `Perfil`
ADD CONSTRAINT `fk_SGC_PERFIL_SGC_PLAYA1` FOREIGN KEY ( `SGC_PLAYA_PLA_ID` )
REFERENCES `Playa`( `PLA_ID` )
ON DELETE No Action
ON UPDATE No Action;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "fk_SGC_PLAYA_SGC_CIUDAD1" ------------------
-- DROP LINK "fk_SGC_PLAYA_SGC_CIUDAD1" ------------------------
ALTER TABLE `Playa` DROP FOREIGN KEY `fk_SGC_PLAYA_SGC_CIUDAD1`;
-- -------------------------------------------------------------
-- CREATE LINK "fk_SGC_PLAYA_SGC_CIUDAD1" ----------------------
ALTER TABLE `Playa`
ADD CONSTRAINT `fk_SGC_PLAYA_SGC_CIUDAD1` FOREIGN KEY ( `SGC_CIUDAD_CIU_ID` )
REFERENCES `Ciudad`( `CIU_ID` )
ON DELETE No Action
ON UPDATE No Action;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "fk_SGC_PRONOSTICO_TIEMPO_SGC_CIUDAD1" ------
-- DROP LINK "fk_SGC_PRONOSTICO_TIEMPO_SGC_CIUDAD1" ------------
ALTER TABLE `Pronostico` DROP FOREIGN KEY `fk_SGC_PRONOSTICO_TIEMPO_SGC_CIUDAD1`;
-- -------------------------------------------------------------
-- CREATE LINK "fk_SGC_PRONOSTICO_TIEMPO_SGC_CIUDAD1" ----------
ALTER TABLE `Pronostico`
ADD CONSTRAINT `fk_SGC_PRONOSTICO_TIEMPO_SGC_CIUDAD1` FOREIGN KEY ( `SGC_CIUDAD_CIU_ID` )
REFERENCES `Ciudad`( `CIU_ID` )
ON DELETE No Action
ON UPDATE No Action;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "fk_SGC_PRONOSTICO_TIEMPO_SGC_TIEMPO1" ------
-- DROP LINK "fk_SGC_PRONOSTICO_TIEMPO_SGC_TIEMPO1" ------------
ALTER TABLE `Pronostico` DROP FOREIGN KEY `fk_SGC_PRONOSTICO_TIEMPO_SGC_TIEMPO1`;
-- -------------------------------------------------------------
-- CREATE LINK "fk_SGC_PRONOSTICO_TIEMPO_SGC_TIEMPO1" ----------
ALTER TABLE `Pronostico`
ADD CONSTRAINT `fk_SGC_PRONOSTICO_TIEMPO_SGC_TIEMPO1` FOREIGN KEY ( `SGC_TIEMPO_TIE_ID` )
REFERENCES `Tiempo`( `TIE_ID` )
ON DELETE No Action
ON UPDATE No Action;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
-- CREATE LINK "fk_SGC_REGION_SGC_PAIS" --------------------
-- DROP LINK "fk_SGC_REGION_SGC_PAIS" --------------------------
ALTER TABLE `Region` DROP FOREIGN KEY `fk_SGC_REGION_SGC_PAIS`;
-- -------------------------------------------------------------
-- CREATE LINK "fk_SGC_REGION_SGC_PAIS" ------------------------
ALTER TABLE `Region`
ADD CONSTRAINT `fk_SGC_REGION_SGC_PAIS` FOREIGN KEY ( `SGC_PAIS_PAI_ID` )
REFERENCES `Pais`( `PAI_ID` )
ON DELETE Cascade
ON UPDATE Cascade;
-- -------------------------------------------------------------
-- ---------------------------------------------------------
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-- ---------------------------------------------------------
|
TypeScript
|
UTF-8
| 316 | 2.703125 | 3 |
[
"MIT"
] |
permissive
|
import EventHandler from './EventHandler';
export default class DOMEventHandler implements EventHandler {
addEventListenerToClass(clazz: string, event: string, fn: any) {
const elements: any = document.querySelectorAll(clazz);
for (const element of elements) {
element.addEventListener(event, fn);
}
}
}
|
C++
|
UTF-8
| 24,677 | 3.203125 | 3 |
[] |
no_license
|
#include <iostream>
#include "Interpreter.h"
void Interpreter::type_mismatch_error(Token* token) {
std::string message = "Type mismatch.";
std::string file_path = token->file;
SyntaxError(file_path, token->line, token->column, message).cast();
}
void Interpreter::value_error(Token* token) {
std::string message = "Value cannot be converted to " + token->value + ".";
std::string file_path = token->file;
ValueError(file_path, token->line, token->column, message).cast();
}
Interpreter::Interpreter() {
memory_block = new Memory(0, NULL);
semantic_analyzer = new SemanticAnalyzer();
}
void Interpreter::enter_new_memory_block() {
memory_block = new Memory(memory_block->memory_level + 1, memory_block);
}
void Interpreter::leave_memory_block() {
if(memory_block->enclosing_memory_block->memory_level != 0) {
memory_block = memory_block->enclosing_memory_block;
}
}
MemoryValue* Interpreter::visit(AST* node) {
if(BinaryOperator* ast = dynamic_cast<BinaryOperator*>(node)) {
return visit_binary_op(ast);
} else if(UnaryOperator* ast = dynamic_cast<UnaryOperator*>(node)) {
return visit_unary_op(ast);
} else if(Value* ast = dynamic_cast<Value*>(node)) {
return visit_value(ast);
} else if(Compare* ast = dynamic_cast<Compare*>(node)) {
return visit_compare(ast);
} else if(Compound* ast = dynamic_cast<Compound*>(node)) {
return visit_compound(ast);
} else if(Assign* ast = dynamic_cast<Assign*>(node)) {
return visit_assign(ast);
} else if(Variable* ast = dynamic_cast<Variable*>(node)) {
return visit_variable(ast);
} else if(NoOperator* ast = dynamic_cast<NoOperator*>(node)) {
return visit_no_operator(ast);
} else if(DoubleCondition* ast = dynamic_cast<DoubleCondition*>(node)) {
return visit_double_condition(ast);
} else if(Negation* ast = dynamic_cast<Negation*>(node)) {
return visit_negation(ast);
} else if(VariableDeclaration* ast = dynamic_cast<VariableDeclaration*>(node)) {
return visit_var_declaration(ast);
} else if(IfCondition* ast = dynamic_cast<IfCondition*>(node)) {
return visit_if_condition(ast);
} else if(Print* ast = dynamic_cast<Print*>(node)) {
return visit_print(ast);
} else if(ArrayInit* ast = dynamic_cast<ArrayInit*>(node)) {
return visit_array_init(ast);
} else if(ArrayAccess* ast = dynamic_cast<ArrayAccess*>(node)) {
return visit_array_access(ast);
} else if(FunctionInit* ast = dynamic_cast<FunctionInit*>(node)) {
return visit_function_init(ast);
} else if(FunctionCall* ast = dynamic_cast<FunctionCall*>(node)) {
return visit_function_call(ast);
} else if(Return* ast = dynamic_cast<Return*>(node)) {
return visit_return(ast);
} else if(WhileLoop* ast = dynamic_cast<WhileLoop*>(node)) {
return visit_while_loop(ast);
} else if(CastValue* ast = dynamic_cast<CastValue*>(node)) {
return visit_cast_value(ast);
} else if(Import* ast = dynamic_cast<Import*>(node)) {
return visit_import(ast);
} else if(ObjectDive* ast = dynamic_cast<ObjectDive*>(node)) {
return visit_object_dive(ast);
}
std::string message = "Unknown AST branch.";
int line = node->token->line;
int column = node->token->column;
std::string file_path = node->token->file;
Error(file_path, line, column, message).cast();
}
MemoryValue* Interpreter::visit_binary_op(BinaryOperator* op) {
SingularMemoryValue* left = (SingularMemoryValue*) visit(op->left);
SingularMemoryValue* right = (SingularMemoryValue*) visit(op->right);
switch(op->op->type) {
case TokenType::PLUS:
{
if(left->type == Type::STRING) {
if(right->type != Type::STRING) {
type_mismatch_error(op->right->token);
}
std::string a = left->value;
std::string b = right->value;
return new SingularMemoryValue(a + b, Type::STRING);
} else if(left->type == Type::FLOAT) {
if(right->type != Type::FLOAT) {
type_mismatch_error(op->right->token);
}
double x = std::stod(left->value);
double y = std::stod(right->value);
return new SingularMemoryValue(std::to_string(x + y), Type::FLOAT);
}
type_mismatch_error(op->left->token);
}
case TokenType::MINUS:
{
if(left->type != Type::FLOAT || right->type != Type::FLOAT) {
type_mismatch_error(op->right->token);
}
double x = std::stod(left->value);
double y = std::stod(right->value);
return new SingularMemoryValue(std::to_string(x - y), Type::FLOAT);
}
case TokenType::DIV:
{
if(left->type != Type::FLOAT || right->type != Type::FLOAT) {
type_mismatch_error(op->right->token);
}
double x = std::stod(left->value);
double y = std::stod(right->value);
return new SingularMemoryValue(std::to_string(x / y), Type::FLOAT);
}
case TokenType::MULT:
{
if(left->type != Type::FLOAT || right->type != Type::FLOAT) {
type_mismatch_error(op->right->token);
}
double x = std::stod(left->value);
double y = std::stod(right->value);
return new SingularMemoryValue(std::to_string(x * y), Type::FLOAT);
}
case TokenType::INT_DIV:
{
if(left->type != Type::FLOAT || right->type != Type::FLOAT) {
type_mismatch_error(op->right->token);
}
int x = std::stoi(left->value);
int y = std::stoi(right->value);
return new SingularMemoryValue(std::to_string(x / y), Type::FLOAT);
}
case TokenType::MODULO:
{
if(left->type != Type::FLOAT || right->type != Type::FLOAT) {
type_mismatch_error(op->right->token);
}
double x = std::stod(left->value);
double y = std::stod(right->value);
return new SingularMemoryValue(std::to_string(fmod(x, y)), Type::FLOAT);
}
}
}
SingularMemoryValue* Interpreter::visit_unary_op(UnaryOperator* op) {
SingularMemoryValue* expr = (SingularMemoryValue*) visit(op->expr);
if(op->op->type_of(TokenType::MINUS)) {
if(expr->type == Type::FLOAT) {
double value = std::stod(expr->value);
return new SingularMemoryValue(std::to_string(-value), Type::FLOAT);
} else {
type_mismatch_error(op->expr->token);
}
}
return expr;
}
SingularMemoryValue* Interpreter::visit_value(Value* val) {
Type type;
if(val->token->type_of(TokenType::FLOAT)) {
type = Type::FLOAT;
} else if(val->token->type_of(TokenType::BOOLEAN)) {
type = Type::BOOLEAN;
} else if(val->token->type_of(TokenType::STRING)) {
type = Type::STRING;
} else if(val->token->type_of(TokenType::NONE)) {
type = Type::NONE;
}
return new SingularMemoryValue(val->value, type);
}
SingularMemoryValue* Interpreter::visit_compare(Compare* c) {
for(int i = 0; i < c->operators.size(); i++) {
Token* op = c->operators.at(i);
AST* left = c->comparables[i];
AST* right = c->comparables[i + 1];
SingularMemoryValue* left_memory_value = (SingularMemoryValue*) visit(left);
SingularMemoryValue* right_memory_value = (SingularMemoryValue*) visit(right);
std::string left_value = left_memory_value->value;
std::string right_value = right_memory_value->value;
if(op->type_of(TokenType::EQUALS)) {
if(left_memory_value->type == Type::FLOAT && right_memory_value->type == Type::FLOAT) {
double left_val = std::stod(left_value);
double right_val = std::stod(right_value);
if(left_val != right_val) {
return new SingularMemoryValue(Values::FALSE, Type::BOOLEAN);
}
} else if(left_value != right_value) {
return new SingularMemoryValue(Values::FALSE, Type::BOOLEAN);
}
} else if(op->type_of(TokenType::NOT_EQUALS)) {
if(left_memory_value->type == Type::FLOAT && right_memory_value->type == Type::FLOAT) {
double left_val = std::stod(left_value);
double right_val = std::stod(right_value);
if(left_val == right_val) {
return new SingularMemoryValue(Values::FALSE, Type::BOOLEAN);
}
} else if(left_value == right_value) {
return new SingularMemoryValue(Values::FALSE, Type::BOOLEAN);
}
} else if(op->type_of(TokenType::MORE_OR_EQ)) {
if(left_memory_value->type == Type::FLOAT && right_memory_value->type == Type::FLOAT) {
double left_val = std::stod(left_value);
double right_val = std::stod(right_value);
if(left_val < right_val) {
return new SingularMemoryValue(Values::FALSE, Type::BOOLEAN);
}
} else {
type_mismatch_error(left->token);
}
} else if(op->type_of(TokenType::LESS_OR_EQ)) {
if(left_memory_value->type == Type::FLOAT && right_memory_value->type == Type::FLOAT) {
double left_val = std::stod(left_value);
double right_val = std::stod(right_value);
if(left_val > right_val) {
return new SingularMemoryValue(Values::FALSE, Type::BOOLEAN);
}
} else {
type_mismatch_error(left->token);
}
} else if(op->type_of(TokenType::LESS)) {
if(left_memory_value->type == Type::FLOAT && right_memory_value->type == Type::FLOAT) {
double left_val = std::stod(left_value);
double right_val = std::stod(right_value);
if(left_val >= right_val) {
return new SingularMemoryValue(Values::FALSE, Type::BOOLEAN);
}
} else {
type_mismatch_error(left->token);
}
} else if(op->type_of(TokenType::MORE)) {
if(left_memory_value->type == Type::FLOAT && right_memory_value->type == Type::FLOAT) {
double left_val = std::stod(left_value);
double right_val = std::stod(right_value);
if(left_val <= right_val) {
return new SingularMemoryValue(Values::FALSE, Type::BOOLEAN);
}
} else {
type_mismatch_error(left->token);
}
}
}
return new SingularMemoryValue(Values::TRUE, Type::BOOLEAN);
}
MemoryValue* Interpreter::visit_compound(Compound* comp) {
if(memory_block->memory_level == 0) {
enter_new_memory_block();
}
for(AST* node : comp->children) {
if(Return* ret = dynamic_cast<Return*>(node)) {
if(comp->inside_func) {
MemoryValue* return_value = visit_return(ret);
leave_memory_block();
return return_value;
}
std::string message = "Return statement without function declaration.";
int line = ret->token->line;
int column = ret->token->column;
std::string file_path = ret->token->file;
SyntaxError(file_path, line, column, message).cast();
}
MemoryValue* value = visit(node);
if(comp->inside_func && value != NULL) {
leave_memory_block();
return value;
}
}
leave_memory_block();
if(memory_block->memory_level == 1) {
Memory* object_memory = memory_block;
return new Object(object_memory);
}
return NULL;
}
MemoryValue* Interpreter::visit_assign(Assign* assign) {
AST* left = assign->left;
if(Variable* var = dynamic_cast<Variable*>(left)) {
std::string var_name = var->value;
memory_block->put(var_name, visit(assign->right));
} else if(ArrayAccess* arr_acc = dynamic_cast<ArrayAccess*>(left)) {
Array* arr = (Array*) visit(arr_acc->array);
SingularMemoryValue* index = (SingularMemoryValue*) visit(arr_acc->index);
if(index->type != Type::FLOAT) {
type_mismatch_error(arr_acc->index->token);
}
MemoryValue* new_val = visit(assign->right);
arr->elements.at(std::stoi(index->value)) = new_val;
}
return NULL;
}
MemoryValue* Interpreter::visit_variable(Variable* var) {
MemoryValue* val = memory_block->get(var->value, false);
if(val != NULL) {
return val;
} else {
std::string message = "Variable has not been initialized.";
int line = var->token->line;
int column = var->token->column;
std::string file_path = var->token->file;
NameError(file_path, line, column, message).cast();
}
}
MemoryValue* Interpreter::visit_no_operator(NoOperator* no_op) {
return NULL;
}
SingularMemoryValue* Interpreter::visit_double_condition(DoubleCondition* cond) {
std::string left_value = ((SingularMemoryValue*) visit(cond->left))->value;
std::string right_value = ((SingularMemoryValue*) visit(cond->right))->value;
if(cond->token->type_of(TokenType::AND)) {
if(left_value == Values::TRUE && right_value == Values::TRUE) {
return new SingularMemoryValue(Values::TRUE, Type::BOOLEAN);
} else {
return new SingularMemoryValue(Values::FALSE, Type::BOOLEAN);
}
} else if(cond->token->type_of(TokenType::OR)) {
if(left_value == Values::TRUE || right_value == Values::TRUE) {
return new SingularMemoryValue(Values::TRUE, Type::BOOLEAN);
} else {
return new SingularMemoryValue(Values::FALSE, Type::BOOLEAN);
}
}
}
SingularMemoryValue* Interpreter::visit_negation(Negation* neg) {
SingularMemoryValue* value = (SingularMemoryValue*) visit(neg->statement);
if(value->type != Type::BOOLEAN) {
type_mismatch_error(neg->statement->token);
}
if(value->value == Values::TRUE) {
return new SingularMemoryValue(Values::FALSE, Type::BOOLEAN);
} else if(value->value == Values::FALSE) {
return new SingularMemoryValue(Values::TRUE, Type::BOOLEAN);
}
}
MemoryValue* Interpreter::visit_var_declaration(VariableDeclaration* decl) {
for(Assign* assignment : decl->assignments) {
visit(assignment);
}
return NULL;
}
MemoryValue* Interpreter::visit_if_condition(IfCondition* cond) {
AST* condition = cond->condition;
Compound* statement = cond->statement;
std::string cond_value = ((SingularMemoryValue*) visit(condition))->value;
MemoryValue* return_val = NULL;
if(cond_value == Values::TRUE) {
enter_new_memory_block();
return_val = visit(statement);
} else {
for(IfCondition* else_ : cond->elses) {
std::string else_cond_value = ((SingularMemoryValue*) visit(else_->condition))->value;
if(else_cond_value == Values::TRUE) {
enter_new_memory_block();
return visit(else_->statement);
}
}
}
return return_val;
}
MemoryValue* Interpreter::visit_print(Print* print) {
MemoryValue* printable_value = visit(print->printable);
std::cout << printable_value->str() << std::endl;
return NULL;
}
Array* Interpreter::visit_array_init(ArrayInit* array_init) {
std::vector<MemoryValue*> elements;
for(AST* el : array_init->elements) {
MemoryValue* element = visit(el);
elements.push_back(element);
}
return new Array(elements);
}
MemoryValue* Interpreter::visit_array_access(ArrayAccess* access) {
MemoryValue* arr = visit(access->array);
if(arr->type != Type::ARRAY) {
std::string message = "Given object is not an array.";
int line = access->array->token->line;
int column = access->array->token->column;
std::string file_path = access->array->token->file;
SyntaxError(file_path, line, column, message).cast();
}
Array* array = (Array*) arr;
MemoryValue* index = visit(access->index);
if(index->type != Type::FLOAT) {
type_mismatch_error(access->index->token);
}
SingularMemoryValue* _index = (SingularMemoryValue*) index;
int i = std::stoi(_index->value);
if(i > array->elements.size() - 1) {
std::string message = "Index out of bounds.";
int line = access->index->token->line;
int column = access->index->token->column;
std::string file_path = access->index->token->file;
SyntaxError(file_path, line, column, message).cast();
}
return array->elements.at(i);
}
Function* Interpreter::visit_function_init(FunctionInit* func_init) {
memory_block->put(func_init->func_name, new Function(func_init));
return NULL;
}
MemoryValue* Interpreter::visit_function_call(FunctionCall* func_call) {
MemoryValue* func = visit(func_call->function);
if(func->type != Type::FUNCTION) {
std::string message = "Given object is not a function.";
int line = func_call->function->token->line;
int column = func_call->function->token->column;
std::string file_path = func_call->function->token->file;
SyntaxError(file_path, line, column, message).cast();
}
Function* function = (Function*) func;
enter_new_memory_block();
VariableDeclaration* func_params = function->func->params;
if(func_params != NULL) {
visit(func_params);
if(func_params->variables.size() != func_call->params.size()) {
std::string message = "Inconsistent number of arguments.";
int line = func_call->function->token->line;
int column = func_call->function->token->column;
std::string file_path = func_call->function->token->file;
SyntaxError(file_path, line, column, message).cast();
}
for(int i = 0; i < func_params->variables.size(); i++) {
Variable* param = func_params->variables.at(i);
AST* actual_param = func_call->params.at(i);
Assign* assign = new Assign(param, new Token(TokenType::ASSIGN, "="), actual_param);
visit(assign);
}
} else {
if(func_call->params.size() > 0) {
std::string message = "Function " + function->func->func_name + " has no arguments, but " +
std::to_string(func_call->params.size()) + " were given.";
int line = func_call->function->token->line;
int column = func_call->function->token->column;
std::string file_path = func_call->function->token->file;
SyntaxError(file_path, line, column, message).cast();
}
}
MemoryValue* ret = visit(function->func->block);
leave_memory_block();
if(ret == NULL) {
return new SingularMemoryValue(Values::NONE, Type::NONE);
}
return ret;
}
MemoryValue* Interpreter::visit_return(Return* ret) {
return visit(ret->returnable);
}
MemoryValue* Interpreter::visit_while_loop(WhileLoop* while_loop) {
AST* condition = while_loop->condition;
Compound* statement = while_loop->statement;
std::string cond_value = ((SingularMemoryValue*) visit(condition))->value;
MemoryValue* return_val = NULL;
while(cond_value == Values::TRUE) {
enter_new_memory_block();
return_val = visit(statement);
cond_value = ((SingularMemoryValue*) visit(condition))->value;
}
return return_val;
}
SingularMemoryValue* Interpreter::visit_cast_value(CastValue* cast) {
MemoryValue* memory_val = visit(cast->value);
if(SingularMemoryValue* memory_value = dynamic_cast<SingularMemoryValue*>(memory_val)) {
std::string value = memory_value->value;
Type type = memory_value->type;
switch(cast->type->type) {
case TokenType::CAST_FLOAT:
{
int dots = 0;
for(char c : value) {
if(!isdigit(c) && c != '.') {
value_error(cast->type);
}
if(c == '.') {
dots++;
if(dots > 1) {
value_error(cast->type);
}
}
}
std::string new_value = std::to_string(std::stod(value));
return new SingularMemoryValue(new_value, Type::FLOAT);
}
case TokenType::CAST_INT:
{
int dots = 0;
for(char c : value) {
if(!isdigit(c) && c != '.') {
value_error(cast->type);
}
if(c == '.') {
dots++;
if(dots > 1) {
value_error(cast->type);
}
}
}
std::string new_value = std::to_string(std::stoi(value));
return new SingularMemoryValue(new_value, Type::FLOAT);
}
case TokenType::CAST_STRING:
{
return new SingularMemoryValue(value, Type::STRING);
}
case TokenType::CAST_BOOL:
{
if(value == Values::TRUE || value == Values::FALSE) {
return new SingularMemoryValue(value, Type::BOOLEAN);
}
value_error(cast->type);
}
}
} else if(Array* array = dynamic_cast<Array*>(memory_val)) {
switch(cast->type->type) {
case TokenType::CAST_STRING:
{
return new SingularMemoryValue(array->str(), Type::STRING);
}
case TokenType::CAST_INT:
{
int length = array->elements.size();
return new SingularMemoryValue(std::to_string(length), Type::FLOAT);
}
case TokenType::CAST_FLOAT:
{
double length = array->elements.size();
return new SingularMemoryValue(std::to_string(length), Type::FLOAT);
}
case TokenType::CAST_BOOL:
{
int length = array->elements.size();
if(length > 0) {
return new SingularMemoryValue(Values::TRUE, Type::BOOLEAN);
}
return new SingularMemoryValue(Values::FALSE, Type::BOOLEAN);
}
}
}
value_error(cast->type);
}
Object* Interpreter::visit_import(Import* import) {
std::string name = import->name;
std::string path = import->path;
if(import->token->type_of(TokenType::BUILT_IN_LIB)) {
std::cout << "Built in lib" << std::endl;
return NULL;
}
Object* object = (Object*) Interpreter().evaluate(directory + path);
memory_block->put(name, object);
}
MemoryValue* Interpreter::visit_object_dive(ObjectDive* dive) {
MemoryValue* parent = visit(dive->parent);
if(Object* object = dynamic_cast<Object*>(parent)) {
Memory* enclosing_memory = memory_block;
memory_block = object->object_memory;
MemoryValue* value = visit(dive->child);
memory_block = enclosing_memory;
return value;
}
std::string message = "Variable is not object type.";
int line = dive->token->line;
int column = dive->token->column;
std::string file_path = dive->token->file;
ValueError(file_path, line, column, message).cast();
}
std::string get_dir_from_path(std::string path) {
std::string directory;
const size_t last_slash_index = path.find_last_of('\\/');
if (std::string::npos != last_slash_index) {
directory = path.substr(0, last_slash_index + 1);
}
return directory;
}
MemoryValue* Interpreter::evaluate(std::string path) {
this->directory = get_dir_from_path(path);
Lexer* lexer = new Lexer(path);
Parser* parser = new Parser(lexer);
AST* tree = parser->parse();
semantic_analyzer->visit(tree);
return visit(tree);
}
|
Python
|
UTF-8
| 1,431 | 3.4375 | 3 |
[] |
no_license
|
# List of disctionaries
favorite_languages_poll = {
'jen':['python', 'ruby'],
'sarah':['c'],
'edward':['ruby', 'perl'],
'phil':["python", "java"],
'chad':["c", 'c++', 'assembly'],
}
for name, languages in favorite_languages_poll.items():
if len(languages) > 1:
print(name.title() + "'s favorite languages are:")
else:
print(name.title() + "'s favorite languages is:")
for language in languages:
print('\t' + language.title())
# A dictionary in a dictionary: username(email) as unique key, and
# poll info dictionary as the value
users_favorite_languages = {
'jen@gmail.com': {
'first_name': 'jen',
'languages': ['python', 'ruby'],
},
'sarah@gmail.com': {
'first_name': 'sarah',
'languages': ['c'],
},
'edward@yahoo.com': {
'first_name': 'edward',
'languages': ['ruby', 'perl'],
},
'phil@yahoo.com': {
'first_name': 'phil',
'languages': ["python", "java"],
},
'chad@nokia.com': {
'first_name': 'chad',
'languages': ["c", 'c++', 'assembly'],
},
'sarah@yahoo.com': {
'first_name': 'sarah',
'languages': ['python', 'java'],
},
}
for username, favorite_languages in users_favorite_languages.items():
print("\nUsername: " + username)
first_name = favorite_languages['first_name']
languages = favorite_languages['languages']
print('\tFirst name: ' + first_name)
lan_display = ''
for language in languages:
if len(lan_display) > 0:
lan_display += ', '
lan_display += language.title()
print('\tFavorite languages: ' + lan_display)
|
PHP
|
UTF-8
| 1,042 | 2.5625 | 3 |
[] |
no_license
|
<?php
/**
* Created by PhpStorm.
* User: dj
* Date: 2018/3/19
* Time: 14:52
*/
class RecommendMod extends CBaseMod {
public $plateList;
/**
* 构造函数
*/
public function __construct () {
parent::__construct('recommend');
$this->plateList = [
[
'id' => 1 ,
'name' => '兼职'
] ,
[
'id' => 2 ,
'name' => '全职'
] ,
[
'id' => 3 ,
'name' => '培训'
]
];
}
/**
* @todo 获取列表信息
* @author Malcolm (2018年04月12日)
*/
public function getListInfo ( $id ) {
$info = parent::getInfo($id);
$data = [
'recommend_id' =>$info['id'],
'recommend_title' =>$info['title'],
'recommend_type' =>$info['type'],
'recommend_plate' =>$info['plate'],
'recommend_type_id' =>$info['type_id'],
'recommend_cover' =>$info['cover'],
'recommend_url' =>$info['content'],
'recommend_add_date' =>date('Y/m/d',$info['add_time']),
];
return $data;
}
}
|
Java
|
UTF-8
| 8,247 | 2.40625 | 2 |
[] |
no_license
|
package org.example;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.support.ui.ExpectedCondition;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.Select;
import org.openqa.selenium.support.ui.WebDriverWait;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.concurrent.TimeUnit;
public class Pratical
{
static WebDriver driver;
public static void sleep1(int n)
{
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
e.printStackTrace();
}}
public static void waituntilElementIsClickable(By by, int time)
{
WebDriverWait wait = new WebDriverWait(driver, time);
wait.until(ExpectedConditions.elementToBeClickable(by));
}
public static void clickOnElement(By by)
{
driver.findElement(by).click();
}
public static void typetext(By by, String text)
{
driver.findElement(by).sendKeys(text);
}
public static void selectByVisbleTextFromDropDownByVisibleText(By by, String text)
{
Select select = new Select(driver.findElement(by));
select.selectByVisibleText(text);
}
public static void selectFromDropDownByIndex(By by, int n)
{
Select select = new Select(driver.findElement(by));
select.selectByIndex(n);
}
public static void selectfromDropDownByValue(By by, String value)
{
Select select = new Select(driver.findElement(by));
select.selectByValue(value);
}
public static long timestamp()
{
return (System.currentTimeMillis());
}
@BeforeMethod
public static void setBrowser()
{
System.setProperty("webdriver.chrome.driver","C:\\soft\\chromedriver.exe" );
driver = new ChromeDriver();
driver.manage().window().maximize();
driver.manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS);
driver.get("https://demo.nopcommerce.com");
}
@AfterMethod
public static void setcloseBrowser()
{
driver.close();
}
@Test
public void userShouldBeResisterSuccessfully()
{
clickOnElement(By.xpath("//a[@class=\"ico-register\"]"));// click on register button
clickOnElement(By.xpath("//input[@id=\"gender-male\"]")); //click on male
typetext(By.xpath("//input[@id=\"FirstName\"]"),"Mital");// enter first name
typetext(By.xpath("//input[@id=\"LastName\"]"),"sharma");// enter surname
// select date from drop down
selectByVisbleTextFromDropDownByVisibleText(By.xpath("//select[@name=\"DateOfBirthDay\"]"),"20");
// select month from drop down
selectFromDropDownByIndex(By.xpath("//select[@name=\"DateOfBirthMonth\"]"),2);
// select a year drop down
selectfromDropDownByValue(By.xpath("//select[@name=\"DateOfBirthYear\"]"),"1985");
// enter yout email address
driver.findElement(By.xpath("//input[@id=\"Email\"]")).sendKeys("mital+"+timestamp()+"@gmail.com");
typetext(By.xpath("//input[@id=\"Company\"]"),"Abcltd"); // enter company name
clickOnElement(By.xpath("//input[@type=\"checkbox\"]")); // unclick on checkbox
typetext(By.xpath("//input[@id=\"Password\"]"),"mum123");// enter your password
typetext(By.xpath("//input[@id=\"ConfirmPassword\"]"),"mum123"); // confirm your password
clickOnElement(By.xpath("//input[@id=\"register-button\"]")); // click on register button
String expected = "Your registration completed"; // expected result
String actucl = driver.findElement(By.xpath("//div[@class=\"result\"]")).getText();// actucl result from website
Assert.assertEquals(actucl,expected, "text do not match");// comparing two texts
System.out.println(actucl);// checking the output
}
@Test
public void registerUserShouldBeAbleToReferAProductToAFriendSuccessfully()
{
clickOnElement(By.xpath("//a[@class=\"ico-register\"]"));// click on register button
clickOnElement(By.xpath("//input[@id=\"gender-male\"]")); //click on male
typetext(By.xpath("//input[@id=\"FirstName\"]"),"Mital");// enter first name
typetext(By.xpath("//input[@id=\"LastName\"]"),"sharma");// enter surname
// select date from drop down
selectByVisbleTextFromDropDownByVisibleText(By.xpath("//select[@name=\"DateOfBirthDay\"]"),"20");
// select month from drop down
selectFromDropDownByIndex(By.xpath("//select[@name=\"DateOfBirthMonth\"]"),2);
// select a year drop down
selectfromDropDownByValue(By.xpath("//select[@name=\"DateOfBirthYear\"]"),"1985");
// enter your email address
driver.findElement(By.xpath("//input[@id=\"Email\"]")).sendKeys("mital+"+timestamp()+"@gmail.com");
typetext(By.xpath("//input[@id=\"Company\"]"),"Abcltd"); // enter company name
clickOnElement(By.xpath("//input[@type=\"checkbox\"]")); // unclick on checkbox
typetext(By.xpath("//input[@id=\"Password\"]"),"mum123");// enter your password
typetext(By.xpath("//input[@id=\"ConfirmPassword\"]"),"mum123"); // confirm your password
clickOnElement(By.xpath("//input[@id=\"register-button\"]")); // click on register button
clickOnElement(By.xpath("//ul[@class=\"top-menu notmobile\"]/li/a[@href=\"/computers\"]")); // click on computre
clickOnElement(By.xpath("//ul[@class=\"sublist\"]/li[1]/a"));// click on desktop
clickOnElement(By.xpath("//h2/a[text()=\"Build your own computer\"]")); // click on computer product
clickOnElement(By.xpath("//input[@value=\"Email a friend\"]")); // click on email a friend
typetext(By.xpath("//input[@id=\"FriendEmail\"]"),"sharmajigna+"+timestamp()+"@gmail.com"); // enter friend email details
//writing a message for a friend
typetext(By.xpath("//textarea[@id=\"PersonalMessage\"]"),"suggesting you this computer as per our conversation");
clickOnElement(By.xpath("//input[@name=\"send-email\"]"));// click on send button
String expected = "Your message has been sent."; // show your expected text
String actual = driver.findElement(By.xpath("//div[@class=\"result\"]")).getText();// get actual text from website
Assert.assertEquals(actual,expected,"both text do not match");// comparing both text
System.out.println(actual);// printing both text
}
@Test
public void UserShouldBeAbleToAddProductToBasketSuccessfully()
{
clickOnElement(By.xpath("//div[@class=\"header-menu\"]/ul[1]/li[5]/a"));// click on books
//getting text to check if correct product been selected
String expected1 = driver.findElement(By.xpath("//div/div/div[1]/div/div[2]/h2/a[text()=\"Fahrenheit 451 by Ray Bradbury\"]")).getText();
// click on add to cart button for first book
clickOnElement(By.xpath("//div[@class=\"product-grid\"]/div[1]/div[1]/div[1]/div[2]/div[3]/div[2]/input[1]"));
// sleep1(2000);
//getting text to check if correct product been selected
String expected2 = driver.findElement(By.xpath("//div/div/div[2]/div/div[2]/h2/a[text()=\"First Prize Pies\"]")).getText();
//sleep1(2000);
// click on add to cart button for second book
clickOnElement(By.xpath("//div[@class=\"product-grid\"]/div[1]/div[2]/div[1]/div[2]/div[3]/div[2]/input[1]"));
// click on shopping cart at the top
clickOnElement(By.xpath("//a/span[@class=\"cart-label\"]"));
// getting text from shopping cart to compare
String actual1 = driver.findElement(By.xpath("//td/a[text()=\"Fahrenheit 451 by Ray Bradbury\"]")).getText();
// comparing first book
Assert.assertEquals(actual1,expected1,"both message do not match");// comparing first book
System.out.println(actual1);// checking output
// getting text from shopping cart to compare
String actual2 = driver.findElement(By.xpath("//tbody/tr[2]/td[4]/a[@class=\"product-name\"]")).getText();
// comparing second book
Assert.assertEquals(actual2,expected2,"it not same book in te shopping cart");
// checking the ouput
System.out.println(actual2);
}
}
|
Python
|
UTF-8
| 1,318 | 5 | 5 |
[
"Apache-2.0"
] |
permissive
|
# Gerard Hanlon, 30.01.2018
# A program that displays Fibonacci numbers.
def fib(n):
"""This function returns the nth Fibonacci numbers."""
i = 0 # variable i = the first fibonacci number
j = 1 # variable j = the second fibonacci number
n = n - 1 # variable n = n - 1
while n >= 0: # while n is greater than 0
i, j = j, i + j # 0, 1 = 1, 0 + 1
n = n - 1 # we want the script to add the number preceeding it
return i # return the new value of i
name = "Hanlon" # My surname
first = name[0] # The first letter of my Surname- H
last = name [-1] # The last letter of my surname- N
firstno = ord (first) # The fibonacci number for 8- H is the 8th letter in the alphabet
lastno = ord(last) # The fibonacci for number 14- N is the 14th letter in the alphabet
x = firstno + lastno # x = the final fibonacci number we are looking for- The fibonacci numbers of the first and last letters of my surname added together
ans = fib(x) # ans = the fibonacci of x
print("My surname is", name) # prints my surname
print("The first letter", first, "is number", firstno) # returns the fibonacci of the first letter of my surname
print("The last letter", last, "is number", lastno) # returns the fibonacci number of the last letter of my surname
print("Fibonacci number", x, "is", ans) # x = the total fibonacci number
|
PHP
|
UTF-8
| 3,241 | 2.546875 | 3 |
[
"MIT"
] |
permissive
|
<?php
/***************************************************************************
* *
* (c) 2004 Vladimir V. Kalynyak, Alexey V. Vinokurov, Ilya M. Shalnev *
* *
* This is commercial software, only users who have purchased a valid *
* license and accept to the terms of the License Agreement can install *
* and use this program. *
* *
****************************************************************************
* PLEASE READ THE FULL TEXT OF THE SOFTWARE LICENSE AGREEMENT IN THE *
* "copyright.txt" FILE PROVIDED WITH THIS DISTRIBUTION PACKAGE. *
****************************************************************************/
namespace Tygh;
use Exception;
class HybridProvidersFoursquare extends \Hybrid_Providers_Foursquare
{
private static $apiVersion = array('v' => '20120610');
private static $defPhotoSize = '100x100';
/**
* load the user profile from the IDp api client
*/
public function getUserProfile()
{
$data = $this->api->api('users/self', 'GET', self::$apiVersion);
if (!isset($data->response->user->id)) {
throw new Exception('User profile request failed! ' . $this->providerId . ' returned an invalid response:' . Hybrid_Logger::dumpData( $data ), 6);
}
$data = $data->response->user;
$this->user->profile->identifier = $data->id;
$this->user->profile->firstName = $data->firstName;
$this->user->profile->lastName = empty($data->lastName) ? '' : $data->lastName;
$this->user->profile->displayName = $this->buildDisplayName($this->user->profile->firstName, $this->user->profile->lastName);
$this->user->profile->photoURL = $this->buildPhotoURL($data->photo->prefix, $data->photo->suffix);
$this->user->profile->profileURL = 'https://www.foursquare.com/user/' . $data->id;
$this->user->profile->gender = $data->gender;
$this->user->profile->city = $data->homeCity;
$this->user->profile->email = $data->contact->email;
$this->user->profile->emailVerified = $data->contact->email;
return $this->user->profile;
}
/**
* Builds the user name
*
* @param string $firstName The value of the first name
* @param string $lastName The value of the last name
*
* @return string of the user name
*/
private function buildDisplayName($firstName, $lastName)
{
return trim($firstName . ' ' . $lastName);
}
/**
* Builds the photo url
*
* @param string $prefix The value of the start photo url
* @param string $suffix The value of the finish photo url
*
* @return string of the photo url
*/
private function buildPhotoURL($prefix, $suffix)
{
if (isset($prefix) && isset($suffix)) {
return $prefix . ((isset($this->config['params']['photo_size'])) ? ($this->config['params']['photo_size']) : (self::$defPhotoSize)) . $suffix;
}
return '';
}
}
|
Java
|
UHC
| 2,393 | 2.234375 | 2 |
[] |
no_license
|
package com.example.bluedrop;
import android.app.ActionBar;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
public class MainActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//ŸƲ ǹ
ActionBar abar = getActionBar();
abar.hide();
// ư 4
Button ActivityBtn = (Button)findViewById(R.id.btn_activity); //Ȱ
Button FollowBtn = (Button)findViewById(R.id.btn_follow); //ȷο
Button ProfileBtn = (Button)findViewById(R.id.btn_profile); //
Button SettingBtn = (Button)findViewById(R.id.btn_setting); //
ActivityBtn.setOnClickListener(new OnClickListener(){
public void onClick(View v){
Intent moveToActivity = new Intent(getApplicationContext(),OutActivity.class);
startActivity(moveToActivity);
}
});
FollowBtn.setOnClickListener(new OnClickListener(){
public void onClick(View v){
Intent moveToFollow = new Intent(getApplicationContext(),FollowActivity.class);
startActivity(moveToFollow);
}
});
ProfileBtn.setOnClickListener(new OnClickListener(){
public void onClick(View v){
Intent moveToProfile = new Intent(getApplicationContext(),ProfileActivity.class);
startActivity(moveToProfile);
}
});
SettingBtn.setOnClickListener(new OnClickListener(){
public void onClick(View v){
Intent moveToSetting = new Intent(getApplicationContext(),SettingsActivity.class);
startActivity(moveToSetting);
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
|
Java
|
UTF-8
| 2,576 | 2.890625 | 3 |
[] |
no_license
|
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
/**
* BP3 coding challenge solution
*/
public class app {
public static void main(String[] args) {
ArrayList<Integer> servicetasks = new ArrayList<Integer>();
JSONParser parser = new JSONParser();
try {
JSONObject obj = (JSONObject) parser.parse(new FileReader(".//res//diagram.json"));
JSONArray nodes = (JSONArray) obj.get("nodes");
JSONArray edges = (JSONArray) obj.get("edges");
for (int i = 0; i < nodes.size(); i++) {
JSONObject node = (JSONObject) nodes.get(i);
String type = (String) node.get("type");
if(type.equals("ServiceTask")) {
int id = ((Long) node.get("id")).intValue();
servicetasks.add(id);
nodes.remove(i);
}
}
for (int k = 0; k < edges.size(); k++) {
JSONObject edge = (JSONObject) edges.get(k);
int from = ((Long) edge.get("from")).intValue();
int to = ((Long) edge.get("to")).intValue();
if(servicetasks.contains(to)) {
for (int j = k+1; j < edges.size(); j++) {
JSONObject edgeTemp = (JSONObject) edges.get(j);
int from2 = ((Long) edgeTemp.get("from")).intValue();
int to2 = ((Long) edgeTemp.get("to")).intValue();
if(to == from2) {
edge.replace("to", to2);
}
}
}
if(servicetasks.contains(from)) {
edges.remove(k);
}
}
JSONObject newObj= new JSONObject();
newObj.put("nodes", nodes);
newObj.put("edges", edges);
FileWriter fileWriter = new FileWriter(".//res//New-Diagram.json");
fileWriter.write(newObj.toJSONString());
fileWriter.flush();
fileWriter.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (ParseException e) {
e.printStackTrace();
}
}
}
|
Python
|
UTF-8
| 4,263 | 4.28125 | 4 |
[] |
no_license
|
"""
Preparation for Phone Interview
Answer the questions below, and send them back via email ahead of the phone
interview. At the phone interview, please have your homework in front of you
and be prepared to discuss your answers with the interviewer.
Algorithms
Implement the method nextNum() and a minimal but effective set of unit tests.
Implement in the language of your choice, Python is preferred, but Java and
other languages are completely fine. Make sure your code is exemplary, as if
it was going to be shipped as part of a production system.
As a quick check, given Random Numbers are [-1, 0, 1, 2, 3] and Probabilities
are [0.01, 0.3, 0.58, 0.1, 0.01] if we call nextNum() 100 times we may get the
following results. As the results are random, these particular results are
unlikely.
-1: 1 times
0: 22 times
1: 57 times
2: 20 times
3: 0 times
Languages Python
You may use random.random() which returns a pseudo random number between 0
and 1.
"""
from __future__ import division
import bisect
import collections
import random
def cumulative_sum(values):
"""
Returns a list containing the cumulative sum of the
input values. For example, given
[0.1,0.2,0.2,0.5]
returns
[0.1,0.3,0.5,1.0]
"""
cumulative_values = []
cumulative_value = 0.0
for value in values:
cumulative_value += value
cumulative_values.append(cumulative_value)
return cumulative_values
def find_index_of_leftmost_value_greater_than_x(values, x):
"""
Utility function to search a sorted list and return the
index of the position of the first value greater than
the test value.
"""
index = bisect.bisect_right(values, x)
if index != len(values):
return index
else:
raise StandardError(
"Cannot find valid index for {} in {}".format(x, values)
)
class RandomGen(object):
def __init__(self, random_nums, probabilities):
"""
Initialise the random number generator with a set of allowed numbers,
and probabilities for each number to occur.
"""
if any(x < 0.0 for x in probabilities):
raise StandardError(
"Negative probabilities ({}) passed".format(probabilities)
)
if abs(sum(probabilities) - 1.0) > 1e-10:
raise StandardError(
"Probabilities ({}) do not sum to 1.0".format(probabilities)
)
if len(random_nums) != len(probabilities):
raise StandardError(
"len(probabilities) ({}) != len(random_nums) ({})".format(
len(probabilities), len(random_nums)
))
self._random_nums = random_nums
self._probabilities = probabilities
self._cumulative_probabilities = cumulative_sum(probabilities)
def next_num(self):
"""
Returns one of the randomNums. When this method is called
multiple times over a long period, it should return the
numbers roughly with the initialized probabilities.
"""
random_value = random.random()
index_of_number = find_index_of_leftmost_value_greater_than_x(
self._cumulative_probabilities,
random_value
)
return self._random_nums[index_of_number]
if __name__ == "__main__":
# Example use-case
number_of_calls = 1000
valid_numbers = [1, 2, 3, 4, 5]
probabilities = [0.1, 0.4, 0.01, 0.001, 0.489]
generator = RandomGen(valid_numbers, probabilities)
counter = collections.defaultdict(int)
for x in xrange(number_of_calls):
random_number = generator.next_num()
counter[random_number] += 1
print("Generated {} random numbers".format(number_of_calls))
print("Valid values are {}".format(valid_numbers))
print("Probabilities are {}".format(probabilities))
print("Printing expected and actual counts for each allowed value")
for index, number in enumerate(valid_numbers):
expected = int(number_of_calls * probabilities[index])
actual = counter[number]
print(
"{}: Expected = {}. Actual = {}".format(number, expected, actual)
)
|
Python
|
UTF-8
| 11,188 | 3.453125 | 3 |
[
"MIT"
] |
permissive
|
import re
import sys
class Student:
"""
representing a single student with references to their wished packages
"""
def __init__(self, number, wishes):
self.number = number
# tuple with one package number per wish (1st wish, 2nd wish...)
self.wishes = wishes
def __repr__(self):
return f"id: {self.number}; wishes: {self.wishes}"
class Package:
"""
representing a single package with references to all the students wanting this package
"""
def __init__(self, number, wishers):
self.number = number
# tuple with one list per wish (1st wish, 2nd wish...) with all the students (wishers) wanting this package
self.wishers = wishers
def __repr__(self):
return f"id: {self.number}; wishers: {self.wishers}"
class Selection:
"""
containing all the students and packages
methods can assign students to packages in three different ways:
1. assign_all_cleanly:
try to assign packages that are only wanted by a single student
-> no package gets taken away from a different student wanting it the same way or more
2. assign_all_uncleanly:
assign students to packages with taking packages away from students wanting it the same way
leaving only completely unwanted packages
3. assign_all_dirtily:
assign packages to students, including completely unwanted packages
no packages or student stays unassigned
"""
def __init__(self, students, packages):
if len(students) != len(packages):
raise ValueError("The amount of students doesn't match the amount of packages!")
# key: student number, value: Student instance
self.students = students
# key: package number, value: Package instance
self.packages = packages
# key: package number, value: student number
self.assigned_students = {}
self.amount_wishes = len(self.students[1].wishes)
def __repr__(self):
return f"{len(self.students)} students with {self.amount_wishes} wishes each; " \
f"{len(self.assigned_students)} students already have a package assigned to them"
def get_unassigned_wishers(self, package_number, wish_id):
"""
get all the students wanting this package (according to wish_id) that aren't assigned yet
"""
# get Package object
package = self.packages[package_number]
# get all students having this package as their wish (according to wish_id) that aren't assigned
return [wisher for wisher in package.wishers[wish_id] if wisher not in self.assigned_students.keys()]
def assign(self, student_number, package_number):
"""
assign student to package
"""
if student_number in self.assigned_students.keys() or package_number in self.assigned_students.values():
raise ValueError("Trying to assign an already assigned student or an already assigned package!")
self.assigned_students[student_number] = package_number
def assign_package_if_possible(self, package_number, wish_id):
"""
recursive function (calling resolve_after_assignment)
see if this package is only wanted by one student (according to wish_id)
and then assign it
return True when this is a package wanted by multiple students, else False
"""
# get unassigned wishers
unassigned_wishers = self.get_unassigned_wishers(package_number, wish_id)
# when only a single student wants this package, they get it
if len(unassigned_wishers) == 1:
this_student_number = unassigned_wishers[0]
# assign this student to the wanted package if they aren't assigned yet
if this_student_number not in self.assigned_students.keys():
self.assign(this_student_number, package_number)
# see if that assignment resolved a problem with a more important wish
# <- one student less to find a package for
self.resolve_after_assignment(this_student_number, wish_id - 1)
# when this package is wanted by multiple students
elif len(self.packages[package_number].wishers[wish_id]) > 1:
return True
return False
def resolve_after_assignment(self, student_number, wish_id):
"""
recursive function (calling itself and assign_package_if_possible)
see if an assignment (student_number got assigned) resolved a problem with a more important wish (wish_id)
"""
# when this wish doesn't exists
if wish_id < 0:
return
# now this package has one student less wanting it
package_number = self.students[student_number].wishes[wish_id]
# when this package is not assigned yet
if package_number not in self.assigned_students.values():
# see if it can be assigned -> resolve even more problems if possible
self.assign_package_if_possible(package_number, wish_id)
# do the same with the next more important wish
self.resolve_after_assignment(student_number, wish_id - 1)
def assign_packages(self, wish_id, disallowed_packages):
"""
(cleanly) assign all packages that are wanted by only one student (according to wish_id)
and check if that assignment solved a problem with a more important wish
return all the packages that are wanted by multiple students
"""
# numbers of all packages wanted by multiple students
highly_wanted_package = []
for package_number in self.packages.keys():
# don't try to assign this package if it is disallowed or already assigned
if package_number not in disallowed_packages and package_number not in self.assigned_students.values():
# assign if possible
is_highly_wanted = self.assign_package_if_possible(package_number, wish_id)
# add this package if it is highly wanted
if is_highly_wanted:
highly_wanted_package.append(package_number)
return highly_wanted_package
def assign_all_cleanly(self):
"""
try to assign packages that are only wanted by a single student
-> no package gets taken away from a different student wanting it the same way or more
"""
# these packages are wanted so much that less relevant wishes can't be used to assign it
highly_wanted_packages = []
for wish_id in range(self.amount_wishes):
# assign everything possible for this wish_id
# and try to resolve as many problems in more important wishes as possible
highly_wanted_packages += self.assign_packages(wish_id, highly_wanted_packages)
def assign_all_uncleanly(self):
"""
go through all wishes and just assign the first student with their wished package
-> other students wanting this package in the same way won't get it
"""
# go through all unassigned students
for wish_id in range(self.amount_wishes):
for student_number in self.students.keys():
if student_number in self.assigned_students.keys():
# this student is already assigned
continue
wished_package = self.students[student_number].wishes[wish_id]
# just assign it if the package is not assigned yet
if wished_package not in self.assigned_students.values():
self.assign(student_number, wished_package)
def assign_all_dirtily(self):
"""
go through all unassigned students and unassigned packages and just assign with no regard to their wishes
should only be used when there are only unwanted packages left to be assigned
"""
# get all unassigned packages
unassigned_package_numbers = [package_number for package_number in self.packages.keys()
if package_number not in self.assigned_students.values()]
for wish_id in range(self.amount_wishes):
for student_number in self.students.keys():
if student_number not in self.assigned_students.keys():
# just take and delete the last unassigned package number and assign it
self.assign(student_number, unassigned_package_numbers.pop())
def load_file(filepath):
with open(filepath, "r", encoding="utf-8") as file:
lines = [line.strip() for line in file]
# the first line only contains the amount of students, which won't be used
return lines[1:]
def load_students_and_packages(lines):
"""
get lines read from the file and create a Selection object
"""
# get the amount of wishes every student has
wishes_amount = len(re.split(r"[\W]+", lines[0]))
# key: package number, value: Package instance
# fill it with no student wanting any packages
packages = {number: Package(number, tuple([] for _ in range(wishes_amount))) for number in range(1, len(lines) + 1)}
# key: student number, value: Student instance
students = {}
for student_idx, line in enumerate(lines):
"""
fill students
"""
# split line into words
wishes = tuple(int(package_number) for package_number in re.split(r"[\W]+", line))
# create Student instance and add to dict
student = Student(student_idx + 1, wishes)
students[student.number] = student
"""
fill packages
"""
# go through every wished package
for wish_idx, package_number in enumerate(student.wishes):
# append student number to the wished-by-list
packages[package_number].wishers[wish_idx].append(student.number)
return Selection(students, packages)
def main():
if len(sys.argv) != 2:
raise ValueError("Specify filepath of the input file!")
# load students and packages form file
lines = load_file(sys.argv[1])
selection = load_students_and_packages(lines)
# assign all packages and students
selection.assign_all_cleanly()
selection.assign_all_uncleanly()
selection.assign_all_dirtily()
# print results
print("student number, [wishes], assigned package")
for student_number in selection.students:
# convert this student's wishes into a usable string
wishes = [str(wish) for wish in selection.students[student_number].wishes]
wishes_string = "\t".join(wishes)
# get the package that got assigned to this student
assigned_package = selection.assigned_students[student_number]
print(f"{student_number}\t\t{wishes_string}\t\t{assigned_package}")
if __name__ == "__main__":
main()
|
Java
|
UTF-8
| 658 | 1.789063 | 2 |
[] |
no_license
|
package com.hjy.shop.service;
import com.hjy.shop.dto.PageBean;
import com.hjy.shop.entity.CategorySecond;
import com.hjy.shop.entity.Product;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* Created by admin on 2017/3/9.
*/
public interface CategorySecondService {
List<CategorySecond> queryAllProductByCid(Integer cid);
PageBean<CategorySecond> queryAllByPage(Integer pageNow);
CategorySecond queryByCsid(Integer csid);
void update(CategorySecond currCategorySecond);
void delete(CategorySecond categorySecond);
void add(CategorySecond categorySecond);
List<CategorySecond> queryAll();
}
|
Python
|
UTF-8
| 1,265 | 3.703125 | 4 |
[] |
no_license
|
from graph.incidence_matrix_graph import IncidenceMatrixGraph
class OrientedIncidenceMatrixGraph(IncidenceMatrixGraph):
def insert_edge(self, edge, weight):
'''Add an edge between specified vertices
Args:
edge (list): list of 2 vertex indices to connect
weight (int): Weight of the edge added
Raises:
Exception: edge contains indices of vertices not present in the graph
Example:
graph.insert_edge([0, 3], 5) - connects vertices 0 and 3 with an edge of weight 5'''
v1, v2 = edge
if self.has_vertex(v1) and self.has_vertex(v2):
self.matrix[v1][v2] = weight
else:
raise Exception('Vertices of the edge must be contained in the graph')
def remove_edge(self, edge):
'''Removes edge from the graph
Args:
edge (list): list of two vertices on the ends of the edge
Raises:
Exception: edge contains indices of vertices not present in the graph
Example:
graph.remove_edge([0, 3])'''
if self.has_edge(edge):
v1, v2 = edge
self.matrix[v1][v2] = None
else:
raise Exception('Edge must be contained in the graph')
|
Shell
|
UTF-8
| 196 | 2.609375 | 3 |
[] |
no_license
|
#!/bin/bash
if [ $# -eq 0 ]; then
echo "Usage: divider file"
exit 1
fi
printf '\033]1337;File=inline=1;height=100%%;width=100%%;preserveAspectRatio=0'
printf ":"
base64 < $1
printf '\a\n'
|
JavaScript
|
UTF-8
| 2,405 | 3.6875 | 4 |
[] |
no_license
|
function memoize(func) {
var cache = {};
return function (arg) {
if (cache[arg]) {
console.log("캐시결과에서 바로 리턴", arg);
return cache[arg];
}
console.log("본체 실행함.", arg);
return (cache[arg] = func.apply(this, arguments));
};
}
var mult5 = memoize(function (a) {
return a * 5;
});
console.log(mult5(1));
console.log(mult5(2));
console.log(mult5(1));
console.log(mult5(2));
var add = memoize(function (a, b) {
return a + b;
});
console.log(add(3, 5));
console.log(add(3, 10)); // multiple arguments를 사용할수 없다.
const _ = require("partial-js");
// underscore의 memoize 내부
_.memoize = function (func, hasher) {
var memoize = function (key) {
var cache = memoize.cache;
var address = "" + (hasher ? hasher.apply(this, arguments) : key);
// 캐시한 결과가 null, 0, undefined 일수 있기 때문에 has 사용
if (!_.has(cache, address)) cache[address] = func.apply(this, arguments);
return cache[address];
};
memoize.cache = {}; // 메모리 관리 할수 있도록
return memoize;
};
// partial의 memoize2 (memoize는 underscorejs와 동일함)
var f1 = _.memoize2(function (obj) {
console.log("함수본체에 들어옴");
return obj.a + 10;
});
var obj1 = { a: 1 };
var obj2 = { a: 2 };
console.log(f1(obj1));
console.log(f1(obj1)); // 캐시
console.log(f1(obj1)); // 캐시
console.log(f1(obj2));
console.log(f1(obj2)); // 캐시
// memoize2 는 각 함수들에 대한 결과값을 인자로 사용된 객체에 담아두므로 한번 사용하고
// 버리는 객체라면 그 값은 별도의 관리 없이도 메모리에서 비워진다. 캐시를 별도로 관리안해도 알아서 해줌.
var evens = _.memoize2(function (list) {
console.log("함수본체에 들어와서 루프 실행");
return _.filter(list, function (num) {
return num % 2 == 0;
});
});
// mutable 예제
var list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
console.log(evens(list));
console.log(evens(list)); // 캐시를 사용하여 loop를 돌지 않음
list.push(11);
list.push(12);
console.log(evens(list)); // 캐시를 사용하여 12가 나오지 안음.
// immutable 예제
var list2 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
console.log(evens(list2));
console.log(evens(list2)); // 캐시 사용
list2 = list2.concat(11, 12);
console.log(evens(list2));
console.log(evens(list2)); // zotltkdyd
|
SQL
|
UTF-8
| 431 | 2.875 | 3 |
[] |
no_license
|
CREATE TABLE IF NOT EXISTS `user_tasks`
(
`id` int(11) NOT NULL AUTO_INCREMENT,
`title` varchar(256) NOT NULL,
`priority` tinyint(1) NOT NULL DEFAULT 1,
`user_id` int(11) NULL,
`created` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`due_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`)
) ENGINE = InnoDB
DEFAULT CHARSET = utf8;
|
Python
|
UTF-8
| 490 | 2.6875 | 3 |
[] |
no_license
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 6 21:54:02 2020
@author: Jacob
"""
import numpy as np
from motor_control import servoController, safeCleanUp
pwm_array = list(500*np.linspace(0,1)) + list(500*np.linspace(1,0))
motor_list = [16,18,22] # the + , -, and enabler GPIO pins
try:
servoController(motor_list, pwm_array, direction='CCW')
servoController(motor_list, pwm_array, verbose=True)
except KeyboardInterrupt:
pass
safeCleanUp()
|
JavaScript
|
UTF-8
| 605 | 4.15625 | 4 |
[] |
no_license
|
let palindrome = "Sir - Tetris";
const checker = input => {
let word= sanitizer(input);
let isPalindrome = false;
for(let i = 0; i < word.length/2; i++){
console.log(word[i], word[word.length - (i + 1)]);
if(word[i] === word[word.length - (i + 1)]) {
isPalindrome = true
} else {
isPalindrome = false
break
}
console.log(isPalindrome);
}
return isPalindrome
}
const sanitizer = input => {
return input.toLowerCase().trim().split(" ").join("").replace("-", "")
}
console.log('result', checker(palindrome));
|
Python
|
UTF-8
| 2,211 | 3.09375 | 3 |
[] |
no_license
|
# -*- coding: utf-8 -*-
"""
File: plot_functions.py.
Author: Christian Vergara Retamal - Joaquín Callejón Guzmán
Email: chrvergararetamal[at]gmail[dot]com
Github: https://github.com/cvergararetamal
Description: Plot functions - Consultoria I
"""
import sys
import math
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from os import path
import re
import nltk
import Levenshtein as lvs
from wordcloud import WordCloud, STOPWORDS
from nltk.corpus import stopwords
# Plot media de una variable
def plot_mean_custom(var, titulo = 'Valor por defecto'):
tmp = var.dropna() # Borramos, si es que existen nulls
plt.hist(tmp, color = 'dodgerblue')
plt.title(titulo, size = 17)
plt.axvline(tmp.mean(), color = 'tomato', linewidth = 2,
linestyle = '--', label = 'Media de {}'.format(titulo))
plt.legend()
# Plot comportamiento de pregunta
def plot_by_quest(df, pregunta):
aux_dict = df[pregunta].value_counts().to_dict()
plt.bar(*zip(*aux_dict.items()))
plt.title('Comportamiento de respuestas de {}'.format(pregunta))
plt.show()
# Plot comportamiento de una pregunta por institución
def plot_quest_by_inst(df, pregunta, institucion):
df_temp = df[df['institucion']==institucion]
aux_dict = df_temp[pregunta].value_counts().to_dict()
plt.bar(*zip(*aux_dict.items()))
plt.title('{} en {}'.format(pregunta, institucion))
# WordCloud para comentarios de una pregunta
def wordCloud(dataframe, pregunta, palabras):
df_temp = pd.DataFrame()
df_temp[pregunta] = dataframe[pregunta]
comment_words = ''
stopwords = set(STOPWORDS)
# Agregamos las palabras en español a las 'stopwords'
stopwords.update(palabras)
for val in df_temp[pregunta].values:
val = str(val)
tokens = val.split()
for i in range(len(tokens)):
tokens[i] = tokens[i].lower()
comment_words += " ".join(tokens)+" "
wordcloud = WordCloud(width=800, height=800, stopwords = stopwords, min_font_size= 10).generate(comment_words)
plt.figure(figsize = (10, 10), facecolor = None)
plt.imshow(wordcloud, interpolation="bilinear")
plt.axis("off")
plt.tight_layout(pad = 0)
plt.show()
|
Markdown
|
UTF-8
| 14,298 | 3.25 | 3 |
[] |
no_license
|
# Assignment: Predict heart disease risk
In this assignment you're going to build an app that can predict the heart disease risk in a group of patients.
The first thing you will need for your app is a data file with patients, their medical info, and their heart disease risk assessment. We're going to use the famous [UCI Heart Disease Dataset](https://archive.ics.uci.edu/ml/datasets/heart+Disease) which has real-life data from 303 patients.
Download the [Processed Cleveland Data](https://archive.ics.uci.edu/ml/machine-learning-databases/heart-disease/processed.cleveland.data) file and save it as **processed.cleveland.data.csv**.
The data file looks like this:

It’s a CSV file with 14 columns of information:
* Age
* Sex: 1 = male, 0 = female
* Chest Pain Type: 1 = typical angina, 2 = atypical angina , 3 = non-anginal pain, 4 = asymptomatic
* Resting blood pressure in mm Hg on admission to the hospital
* Serum cholesterol in mg/dl
* Fasting blood sugar > 120 mg/dl: 1 = true; 0 = false
* Resting EKG results: 0 = normal, 1 = having ST-T wave abnormality, 2 = showing probable or definite left ventricular hypertrophy by Estes’ criteria
* Maximum heart rate achieved
* Exercise induced angina: 1 = yes; 0 = no
* ST depression induced by exercise relative to rest
* Slope of the peak exercise ST segment: 1 = up-sloping, 2 = flat, 3 = down-sloping
* Number of major vessels (0–3) colored by fluoroscopy
* Thallium heart scan results: 3 = normal, 6 = fixed defect, 7 = reversible defect
* Diagnosis of heart disease: 0 = normal risk, 1-4 = elevated risk
The first 13 columns are patient diagnostic information, and the last column is the diagnosis: 0 means a healthy patient, and values 1-4 mean an elevated risk of heart disease.
You are going to build a binary classification machine learning model that reads in all 13 columns of patient information, and then makes a prediction for the heart disease risk.
Let’s get started. You need to build a new application from scratch by opening a terminal and creating a new NET Core console project:
```bash
$ dotnet new console -o Heart
$ cd Heart
```
Now install the following ML.NET packages:
```bash
$ dotnet add package Microsoft.ML
$ dotnet add package Microsoft.ML.FastTree
```
Now you are ready to add some classes. You’ll need one to hold patient info, and one to hold your model predictions.
Modify the Program.cs file like this:
```csharp
using System;
using System.IO;
using Microsoft.ML;
using Microsoft.ML.Data;
namespace Heart
{
/// <summary>
/// The HeartData record holds one single heart data record.
/// </summary>
public class HeartData
{
[LoadColumn(0)] public float Age { get; set; }
[LoadColumn(1)] public float Sex { get; set; }
[LoadColumn(2)] public float Cp { get; set; }
[LoadColumn(3)] public float TrestBps { get; set; }
[LoadColumn(4)] public float Chol { get; set; }
[LoadColumn(5)] public float Fbs { get; set; }
[LoadColumn(6)] public float RestEcg { get; set; }
[LoadColumn(7)] public float Thalac { get; set; }
[LoadColumn(8)] public float Exang { get; set; }
[LoadColumn(9)] public float OldPeak { get; set; }
[LoadColumn(10)] public float Slope { get; set; }
[LoadColumn(11)] public float Ca { get; set; }
[LoadColumn(12)] public float Thal { get; set; }
[LoadColumn(13)] public int RawLabel { get; set; }
}
/// <summary>
/// The HeartPrediction class contains a single heart data prediction.
/// </summary>
public class HeartPrediction
{
[ColumnName("PredictedLabel")] public bool Prediction;
public float Probability;
public float Score;
}
// the rest of the code goes here....
}
```
The **HeartData** class holds one single patient record. Note how each field is tagged with a **LoadColumn** attribute that tells the CSV data loading code which column to import data from.
There's also a **HeartPrediction** class which will hold a single heart disease prediction. There's a boolean **Prediction**, a **Probability** value, and the **Score** the model will assign to the prediction.
Now look at the final column in the data file. Our label is an integer value between 0-4, with 0 meaning 'no risk' and 1-4 meaning 'elevated risk'.
But you're building a Binary Classifier which means your model needs to be trained on boolean labels.
So you'll have to somehow convert the 'raw' numeric label (stored in the **RawLabel** field) to a boolean value.
To set that up, you'll need two helper classes:
```csharp
/// <summary>
/// The FromLabel class is a helper class for a column transformation.
/// </summary>
public class FromLabel
{
public int RawLabel;
}
/// <summary>
/// The ToLabel class is a helper class for a column transformation.
/// </summary>
public class ToLabel
{
public bool Label;
}
// the rest of the code goes here....
```
Note the **FromLabel** class that contains the 'raw' unprocessed numeric label value, and the **ToLabel** class that contains the final boolean label value.
Now you're going to load the training data in memory:
```csharp
/// <summary>
/// The application class.
/// </summary>
public class Program
{
// filenames for training and test data
private static string dataPath = Path.Combine(Environment.CurrentDirectory, "processed.cleveland.data.csv");
/// <summary>
/// The main applicaton entry point.
/// </summary>
/// <param name="args">The command line arguments.</param>
public static void Main(string[] args)
{
// set up a machine learning context
var context = new MLContext();
// load data
Console.WriteLine("Loading data...");
var data = context.Data.LoadFromTextFile<HeartData>(dataPath, hasHeader: false, separatorChar: ',');
// split the data into a training and test partition
var partitions = context.Data.TrainTestSplit(data, testFraction: 0.2);
// the rest of the code goes here....
}
}
```
This code uses the method **LoadFromTextFile** to load the CSV data directly into memory. The class field annotations tell the method how to store the loaded data in the **HeartData** class.
The **TrainTestSplit** method then splits the data into a training partition with 80% of the data and a test partition with 20% of the data.
Now you’re ready to start building the machine learning model:
```csharp
// set up a training pipeline
// step 1: convert the label value to a boolean
var pipeline = context.Transforms.CustomMapping<FromLabel, ToLabel>(
(input, output) => { output.Label = input.RawLabel > 0; },
"LabelMapping"
)
// step 2: concatenate all feature columns
.Append(context.Transforms.Concatenate(
"Features",
"Age",
"Sex",
"Cp",
"TrestBps",
"Chol",
"Fbs",
"RestEcg",
"Thalac",
"Exang",
"OldPeak",
"Slope",
"Ca",
"Thal"))
// step 3: set up a fast tree learner
.Append(context.BinaryClassification.Trainers.FastTree(
labelColumnName: "Label",
featureColumnName: "Features"));
// train the model
Console.WriteLine("Training model...");
var model = pipeline.Fit(partitions.TrainSet);
// the rest of the code goes here....
```
Machine learning models in ML.NET are built with pipelines, which are sequences of data-loading, transformation, and learning components.
This pipeline has the following components:
* A **CustomMapping** that transforms the numeric label to a boolean value. We define 0 values as healthy, and anything above 0 as an elevated risk.
* **Concatenate** which combines all input data columns into a single column called 'Features'. This is a required step because ML.NET can only train on a single input column.
* A **FastTree** classification learner which will train the model to make accurate predictions.
The **FastTreeBinaryClassificationTrainer** is a very nice training algorithm that uses gradient boosting, a machine learning technique for classification problems.
With the pipeline fully assembled, you can train the model with a call to **Fit**.
You now have a fully- trained model. So now it's time to take the test partition, predict the diagnosis for each patient, and calculate the accuracy metrics of the model:
```csharp
// make predictions for the test data set
Console.WriteLine("Evaluating model...");
var predictions = model.Transform(partitions.TestSet);
// compare the predictions with the ground truth
var metrics = context.BinaryClassification.Evaluate(
data: predictions,
labelColumnName: "Label",
scoreColumnName: "Score");
// report the results
Console.WriteLine($" Accuracy: {metrics.Accuracy}");
Console.WriteLine($" Auc: {metrics.AreaUnderRocCurve}");
Console.WriteLine($" Auprc: {metrics.AreaUnderPrecisionRecallCurve}");
Console.WriteLine($" F1Score: {metrics.F1Score}");
Console.WriteLine($" LogLoss: {metrics.LogLoss}");
Console.WriteLine($" LogLossReduction: {metrics.LogLossReduction}");
Console.WriteLine($" PositivePrecision: {metrics.PositivePrecision}");
Console.WriteLine($" PositiveRecall: {metrics.PositiveRecall}");
Console.WriteLine($" NegativePrecision: {metrics.NegativePrecision}");
Console.WriteLine($" NegativeRecall: {metrics.NegativeRecall}");
Console.WriteLine();
// the rest of the code goes here....
```
This code calls **Transform** to set up a diagnosis for every patient in the set, and **Evaluate** to compare these predictions to the ground truth and automatically calculate all evaluation metrics:
* **Accuracy**: this is the number of correct predictions divided by the total number of predictions.
* **AreaUnderRocCurve**: a metric that indicates how accurate the model is: 0 = the model is wrong all the time, 0.5 = the model produces random output, 1 = the model is correct all the time. An AUC of 0.8 or higher is considered good.
* **AreaUnderPrecisionRecallCurve**: an alternate AUC metric that performs better for heavily imbalanced datasets with many more negative results than positive.
* **F1Score**: this is a metric that strikes a balance between Precision and Recall. It’s useful for imbalanced datasets with many more negative results than positive.
* **LogLoss**: this is a metric that expresses the size of the error in the predictions the model is making. A logloss of zero means every prediction is correct, and the loss value rises as the model makes more and more mistakes.
* **LogLossReduction**: this metric is also called the Reduction in Information Gain (RIG). It expresses the probability that the model’s predictions are better than random chance.
* **PositivePrecision**: also called ‘Precision’, this is the fraction of positive predictions that are correct. This is a good metric to use when the cost of a false positive prediction is high.
* **PositiveRecall**: also called ‘Recall’, this is the fraction of positive predictions out of all positive cases. This is a good metric to use when the cost of a false negative is high.
* **NegativePrecision**: this is the fraction of negative predictions that are correct.
* **NegativeRecall**: this is the fraction of negative predictions out of all negative cases.
When monitoring heart disease, you definitely want to avoid false negatives because you don’t want to be sending high-risk patients home and telling them everything is okay.
You also want to avoid false positives, but they are a lot better than a false negative because later tests would probably discover that the patient is healthy after all.
To wrap up, You’re going to create a new patient record and ask the model to make a prediction:
```csharp
// set up a prediction engine
Console.WriteLine("Making a prediction for a sample patient...");
var predictionEngine = context.Model.CreatePredictionEngine<HeartData, HeartPrediction>(model);
// create a sample patient
var heartData = new HeartData()
{
Age = 36.0f,
Sex = 1.0f,
Cp = 4.0f,
TrestBps = 145.0f,
Chol = 210.0f,
Fbs = 0.0f,
RestEcg = 2.0f,
Thalac = 148.0f,
Exang = 1.0f,
OldPeak = 1.9f,
Slope = 2.0f,
Ca = 1.0f,
Thal = 7.0f,
};
// make the prediction
var prediction = predictionEngine.Predict(heartData);
// report the results
Console.WriteLine($" Age: {heartData.Age} ");
Console.WriteLine($" Sex: {heartData.Sex} ");
Console.WriteLine($" Cp: {heartData.Cp} ");
Console.WriteLine($" TrestBps: {heartData.TrestBps} ");
Console.WriteLine($" Chol: {heartData.Chol} ");
Console.WriteLine($" Fbs: {heartData.Fbs} ");
Console.WriteLine($" RestEcg: {heartData.RestEcg} ");
Console.WriteLine($" Thalac: {heartData.Thalac} ");
Console.WriteLine($" Exang: {heartData.Exang} ");
Console.WriteLine($" OldPeak: {heartData.OldPeak} ");
Console.WriteLine($" Slope: {heartData.Slope} ");
Console.WriteLine($" Ca: {heartData.Ca} ");
Console.WriteLine($" Thal: {heartData.Thal} ");
Console.WriteLine();
Console.WriteLine($"Prediction: {(prediction.Prediction ? "Elevated heart disease risk" : "Normal heart disease risk" )} ");
Console.WriteLine($"Probability: {prediction.Probability:P2} ");
```
This code uses the **CreatePredictionEngine** method to set up a prediction engine. The two type arguments are the input data class and the class to hold the prediction. And once the prediction engine is set up, you can simply call **Predict** to make a single prediction.
The code creates a patient record for a 36-year old male with asymptomatic chest pain and a bunch of other medical info. What’s the model going to predict?
Time to find out. Go to your terminal and run your code:
```bash
$ dotnet run
```
What results do you get? What is your accuracy, precision, recall, AUC, AUCPRC, and F1 value?
Is this dataset balanced? Which metrics should you use to evaluate your model? And what do the values say about the accuracy of your model?
And what about our patient? What did your model predict?
Think about the code in this assignment. How could you improve the accuracy of the model? What are your best AUC and AUCPRC values?
Share your results in our group!
|
Markdown
|
UTF-8
| 5,339 | 3.546875 | 4 |
[] |
no_license
|
三十四
木兰花在将陈万贯推向窗口之际,又道:“博物院中连二接三发生凶杀案,你当然已经知道了,你为什么还按时送货来?”
“那……”陈万贯犹豫了一下,“每次送货之前,博物院中的那人,照例会和我通一个电话,昨天我收到了这个电话,所以今天依期送货来了。”
木兰花的心中,陡地一喜,道:“那电话中的声音,是怎么样的?你快向我形容一下。”
“那是经过声波扭曲的,听来像一个小孩子的聋音。”
木兰花“唔”地一声,心中暗忖,博物院中的这个神秘人物,行事当真小心得可以,即使他在和陈万贯通电话的时候,仍然改变了原来的声音。
要改变一个人的声音,那是十分容易的事情,最简单的方法,便是将所讲的话,先将录音机用慢速度录了下来,然后再用快速度放出来,那么你的声音就会改变得谁也认不出来了!今晚博物院三楼出了事,如果陈万贯被捕的消息不泄漏出去,那么这个人是不是会和陈万贯通电话来询问呢?
木兰花已拟定了一个计划:她先将陈万贯押到警局去,向陈万贯逼问他和博物院中那人通电话的方法,然后等候那个人的电话!
她想了一想,伸手向窗口指了指,道:“由这个窗口爬下去,别玩任何花样,要知道你是始终在我手枪的射程之内的,如果你怀疑我的枪法,你只管试试和子弹赛跑好了。”
“我……不敢……”陈万贯哆嗦着,跨出窗外去,他整个人几乎是顺着水管,向下疾滑下去的,他没有跌死,也算是奇迹
木兰花跟着迅速地向下爬去,两个人一前一后,出了小巷,绕到了博物院的正门,然后穿过了马路,截到了一辆的士,来到了警局。
在总部中,高翔正在忙碌地工作着。
他没有在那一小块织绵上得到什么线索,但是却在柯一梦的衣领之中,找到了一小包毒品,他正准备和木兰花联络,木兰花就押着陈万贯到了。
到了警局,在强光灯的照射之下,陈万贯将他和博物院中神秘人物联络的方法,一五一十,照实如数讲了出来。
联络的方法是十分巧妙的,如果陈万贯有事要通知那个神秘人物,他就在中午时分,打电话到一具公共电话亭去,公共电话亭中的电话铃在响,当然不会有人接听,但是那个神秘人物却不知用什么方法,可以听到铃声,那是预定的响六次,收线,再响六次。
(木兰花和高翔估计,不是那神秘人物在电话亭附近装有录音机,便是他派人在附近,用远程偷听器,不断地在听着电话是否有铃声)。
陈万贯的电话,那神秘人物是知道的。
当陈万贯用这种方式通知了那神秘人物之后,那神秘人物,自然会主动和陈万贯连络了,从这种联络方式来看,那神秘人物的重要,还在陈万贯之上!
因为那神秘人物,可以随时主动向陈万贯联络,而陈万贯却不能直接与他通话的。在整个贩毒组织之中,陈万其是在较公开的地位,而那神秘人物,却隐蔽得多!
用那种神秘气氛来隐藏自己的人,当然是一个地位十分重要的人物了,高翔和木兰花略一商议,便有了决定。
他们决定放陈万贯回去。
当然不是真的放他回去,而是由警方先派人将陈万贯家中的人,全看管起来,由警方派干员,扮成陈万贯家的司机、花匠、工人,高翔自己就扮作陈万贯的秘书,将陈万贯置于严密的看管之中,然后,再在陈万贯家的电话在线,迅速装置路线追踪设备。
那样,打进来和陈万贯通话的电话,在三分钟之内,就可以查出电话的来源了。陈万贯也已答应绝对和警方合作。
一切都在黑夜中进行,到天亮,已经完全布置就绪了,如果不是有人亲眼看到木兰花将陈万贯押出博物院的话,那是绝不会知道陈万贯已落在警方手中的了!
木兰花和高翔两人,也对整件事件,作出了初步的结论:博物院中的凶杀,和柯一梦之死,全是那神秘人物一手造成的。
而那个神秘人物,当然就是一直伏在本市,隐伏得如此巧妙,使得警方明知有这样一个人,却一点线索也抓不到的贩毒头子。
几件凶案,全是和贩毒有关的,柯一梦本来显然和毒贩有关(因为他的衣领中有毒品),他的被杀,是由于内哄,但更有可能是秘密被偶然发现,是以要将他们三个人,杀了来灭口,使秘密不致外泄。
至于谷老爷子,那是一个悲剧性的人物,被人利用了他过去的地位和悲剧,来作凶杀的挡箭牌,使警方不要追究凶案。
这的确是一个十分巧妙的安排。
照木兰花和高翔的推断,首先被害的两人,就是在这种巧妙的安排之下被杀害的,他们两人,多半是偶然发现了秘密,必需杀之灭口,所以柯一梦便想到了谷老爷子,伪称已发现了当年的秘密,将谷老爷子引了回来,然后才下手杀害了两人。
|
Python
|
UTF-8
| 636 | 3.015625 | 3 |
[] |
no_license
|
class Solution:
def countLatticePoints(self, circles: List[List[int]]) -> int:
res = set()
def get_points(x, y, r):
points = []
for x1 in range(x - r, x + r + 1):
diff_y_sqr = r ** 2 - (x1 - x) ** 2
y1 = math.ceil(y - math.sqrt(diff_y_sqr))
y2 = math.floor(y + math.sqrt(diff_y_sqr))
for y3 in range(y1, y2 + 1):
points.append((x1, y3))
return points
for x, y, r in circles:
points= get_points(x, y, r)
res |= set(points)
return len(res)
|
Markdown
|
UTF-8
| 3,199 | 3.234375 | 3 |
[] |
no_license
|
<!--
title: REST
weight: 4706
-->
# REST Trigger
This trigger provides your flogo application the ability to start a flow via REST over HTTP
## Installation
```bash
flogo install github.com/project-flogo/contrib/trigger/rest
```
## Metadata
```json
{
"settings": [
{
"name": "port",
"type": "int",
"required" : true
}
],
"handler": {
"settings": [
{
"name": "method",
"type": "string",
"required" : true,
"allowed" : ["GET", "POST", "PUT", "PATCH", "DELETE"]
},
{
"name": "path",
"type": "string",
"required" : true
}
]
},
"output": [
{
"name": "pathParams",
"type": "params"
},
{
"name": "queryParams",
"type": "params"
},
{
"name": "headers",
"type": "params"
},
{
"name": "content",
"type": "object"
}
],
"reply": [
{
"name": "code",
"type": "int"
},
{
"name": "data",
"type": "any"
}
]
}
```
### Details
#### Trigger Settings:
| Setting | Required | Description |
|:---------|:---------|:------------|
| port | true | The port to listen on
#### Handler Settings:
| Setting | Required | Description |
|:---------|:---------|:------------|
| method | true | The HTTP method (ie. GET,POST,PUT,PATCH or DELETE)
| path | true | The resource path
#### Output:
|Name | Description |
|:--------|:------------|
| pathParams | The path params, ex. /device/:id, 'id' would be a path param
| queryParams | The query params
| headers | The headers
| content | The content of the request
#### Reply:
|Name | Description |
|:--------|:------------|
| code | The http code to reply with
| data | The data to reply with
## Example Configurations
Triggers are configured via the triggers.json of your application. The following are some example configuration of the REST Trigger.
### POST
Configure the Trigger to handle a POST on /device
```json
{
"triggers": [
{
"id": "flogo-rest",
"ref": "github.com/project-flogo/contrib/trigger/rest",
"settings": {
"port": "8080"
},
"handlers": [
{
"settings": {
"method": "POST",
"path": "/device"
},
"action": {
"ref": "github.com/project-flogo/flow",
"settings": {
"flowURI": "res://flow:new_device_flow"
}
}
}
]
}
]
}
```
### GET
Configure the Trigger to handle a GET on /device/:id
```json
{
"triggers": [
{
"id": "flogo-rest",
"ref": "github.com/project-flogo/contrib/trigger/rest",
"settings": {
"port": "8080"
},
"handlers": [
{
"settings": {
"method": "GET",
"path": "/device/:id"
},
"action": {
"ref": "github.com/project-flogo/flow",
"settings": {
"flowURI": "res://flow:get_device_flow"
},
"input":{
"deviceId":"=$.pathParams.id"
}
}
}
]
}
]
}
```
|
Python
|
UTF-8
| 768 | 2.953125 | 3 |
[] |
no_license
|
import sys
sys.stdin = open("다솔이의다이아몬드.txt","r")
def dfs(y,x):
for nxt in range(8):
ny = y+dy[nxt]
nx = x+dx[nxt]
if 0<=ny<5 and 0<=nx<x_length and result[ny][nx] == '.':
result[ny][nx] = '#'
for tc in range(int(input())):
data = input()
n = len(data)
x_length = 4*n+1
result = [ ['.']*x_length for _ in range(5) ]
for i in range(n):
result[2][4*i+2] = data[i]
dx = [-2,2,-1,1,0,0,-1,1]
dy = [0,0,-1,-1,-2,2,1,1]
for y in range(5):
for x in range(x_length):
if result[y][x] != '.' and result[y][x] != '#':
dfs(y,x)
for y in range(5):
for x in range(x_length):
print(result[y][x],end="")
print()
|
Markdown
|
UTF-8
| 2,810 | 2.75 | 3 |
[] |
no_license
|
# The Black Pearl
The Black Pearl is a Crew Web Page created for DGS-1 to better equip analysts by reliably directing them to mission critical intelligence resources.

## Stable Executables
In the [master/artifacts](https://gitlab.devops.geointservices.io/dgs1sdt/blackpearl/tree/master/artifacts) folder, you can find the latest stable builds of The Black Pearl.
### Differences
* io: Meant for deployment on geointservices.io. Uses Basic Authentication at website.com/login to sign in.
* muhj: Meant for deployment on muhj workstations. Uses Smart Card Authentication at website.com/ to sign in.
### Endpoints
* /admin: Administrator functions such as updating clocks, weather, acronyms, etc.
* /metrics: Metric information based on the usage of The Black Pearl
* /login: (IO Only) Allows the user to log in with either of the following accounts:
* CROSS.JORDAN.MIDDLE.0123456789
* YODA.MASTER.MIDDLE.0123456789
## Installing (Production)
### Prerequisites
* [mySQL 5.7](https://downloads.mysql.com/archives/installer/)
* [Java 8](https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)
### Instructions
Download the [latest stable jar](https://gitlab.devops.geointservices.io/dgs1sdt/blackpearl/tree/master/artifacts) for your use case.
Run the following commands in Command Prompt to create the database:
```
mysql -u root -p -e "create database blackpearldev;"
mysql -u root -p -e "create user 'blackpearl'@'localhost';"
mysql -u root -p -e "GRANT ALL PRIVILEGES ON blackpearldev.* TO 'blackpearl'@'localhost';"
```

Create the following [Environmental Variables](https://java.com/en/download/help/path.xml):
```
BLACKPEARL_DB_URL : jdbc:mysql://localhost:3306/blackpearldev?useSSL=false
BLACKPEARL_DB_USERNAME : blackpearl
```

Run the following command to start the Black Pearl.
```
java -jar <path-to-executable>\blackpearl-<type>.jar
```
Access The Black Pearl at one of the following urls.
```
io: http://localhost:8080/
muhj: https://localhost:8080/
```
## Installing (Development)
If you wish to install The Black Pearl on a development computer, follow the Production commands, then follow these instructions.
### Prerequisites
* [Node](https://nodejs.org/en/)
* [Yarn](https://yarnpkg.com/en/docs/install)
* [Git](https://git-scm.com/download)
### Instructions
Clone the repository
```
git clone git@gitlab.devops.geointservices.io:dgs1sdt/blackpearl.git
```
Install Dependencies & Test
```
cd <git_dir>\blackpearl\scripts
.\tests.sh
```
|
SQL
|
UTF-8
| 1,070 | 3.53125 | 4 |
[] |
no_license
|
use world;
-- aggregate Functions
select sum(population) as total_populasi
from city
where CountryCode = 'IDN';
select count(name) as total_city
from city
where CountryCode = 'IDN';
select avg(population) as rata_rata_populasi
from city
where CountryCode = 'IDN';
select min(population) as populasi_terkecil
from city
where CountryCode = 'IDN';
select max(population) as populasi_terbesar
from city
where CountryCode = 'IDN';
use world;
-- scalar Functions
select name, round(LifeExpectancy)
from country;
-- hasil round(, 2) 2 angka dibelakang koma
select name, region, round(Population/SurfaceArea, 2) as kepadatan_penduduk
from country
where region = 'southeast asia';
-- menghitung panjang dari string
select name, length(name) as length_name
from country
where region = 'southeast asia'
order by length_name desc;
-- mengubah huruf ke besar dan kecil
select ucase(name), population
from country
where region = 'southeast asia'
order by population desc;
select lcase(name), population
from country
where region = 'southeast asia'
order by population desc;
|
Shell
|
UTF-8
| 796 | 2.9375 | 3 |
[] |
permissive
|
#!/bin/sh
# Purpose:
# Fifth step in cross-layer alignment.
#
# > cross_thisblock -script=scriptpath
#
# Do one block alignment job, data read from 'blockdat.txt'.
#
# Options:
# -evalalldz ;force evaluation of all maxdz layers
# -abdbg ;make diagnostic images and exit (Z^Z-1)
# -abdbg=k ;make diagnostic images and exit (Z^k)
# -abctr=0 ;debug at this a-to-b angle
export MRC_TRIM=12
if (($# == 1))
then
last=$1
else
last=$2
fi
cd ..
for lyr in $(seq $1 $last)
do
echo $lyr
if [ -d "$lyr" ]
then
cd $lyr
for jb in $(ls -d * | grep -E 'D[0-9]{1,}_[0-9]{1,}')
do
cd $jb
QSUB_1NODE.sht 7 "x$jb-$lyr" "" 1 8 "cross_thisblock -script=../../scriptparams.txt"
cd ..
done
cd ..
fi
done
|
Python
|
UTF-8
| 7,008 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
""" Defining the MNSIT dataset"""
from __future__ import absolute_import
import gzip
import os
import sys
import urllib.request
import numpy as np
import tensorflow as tf
from ttools.core.datasets import Dataset
from ttools.utils.utils import image_to_tfexample
class MNIST(Dataset):
# Initing dataset
def __init__(self, name='MNIST'):
super().__init__(name)
self._data_url = 'http://yann.lecun.com/exdb/mnist/'
self._image_size = 28
self._num_channels = 1
self.new_data = tf.gfile.Exists(self._get_output_filename(self.get_data_dir(), 'train')) \
and tf.gfile.Exists(self._get_output_filename(self.get_data_dir(), 'test'))
def _extract_labels(self, filename, num_labels):
"""Extract the labels into a vector of int64 label IDs.
Args:
filename: The path to an MNIST labels file.
num_labels: The number of labels in the file.
Returns:
A numpy array of shape [number_of_labels]
"""
print('Extracting labels from: ', filename)
with gzip.open(filename) as bytestream:
bytestream.read(8)
buf = bytestream.read(1 * num_labels)
labels = np.frombuffer(buf, dtype=np.uint8).astype(np.int64)
return labels
def _extract_images(self, filename, num_images):
"""Extract the images into a numpy array.
Args:
filename: The path to an MNIST images file.
num_images: The number of images in the file.
Returns:
A numpy array of shape [number_of_images, height, width, channels].
"""
print('Extracting images from: ', filename)
with gzip.open(filename) as bytestream:
bytestream.read(16)
buf = bytestream.read(
self._image_size * self._image_size * num_images * self._num_channels)
data = np.frombuffer(buf, dtype=np.uint8)
data = data.reshape(num_images, self._image_size, self._image_size, self._num_channels)
return data
def _download_data(self, files_to_download):
for filename in files_to_download:
filepath = os.path.join(self._data_dir, filename)
if not os.path.exists(filepath):
print('Downloading file %s ...', filename)
def _progress(count, block_size, total_size):
sys.stdout.write('\r>> Downloading %.1f%%' % (
float(count * block_size) / float(total_size) * 100.0))
sys.stdout.flush()
filepath, _ = urllib.request.urlretrieve(self._data_url+filename,
filepath,
_progress)
print()
with tf.gfile.GFile(filepath) as f:
size = f.size()
print('Downloaded', filename, size, 'bytes.')
else:
print('File %s Exists.', filename)
def _process(self):
train_data_filename = 'train-images-idx3-ubyte.gz'
train_labels_filename = 'train-labels-idx1-ubyte.gz'
test_data_filename = 't10k-images-idx3-ubyte.gz'
test_labels_filename = 't10k-labels-idx1-ubyte.gz'
all_files_to_download = [train_data_filename, train_labels_filename,
test_data_filename, test_labels_filename]
all_files_downloaded = all_files_to_download
training_filename = self._get_output_filename(self._data_dir, 'train')
testing_filename = self._get_output_filename(self._data_dir, 'test')
if tf.gfile.Exists(training_filename) and tf.gfile.Exists(testing_filename):
print('Dataset files already exist. Exiting without re-creating them.')
return
self._download_data(all_files_to_download)
num_train_images = 60000
num_test_images = 10000
self._write_to_tfrecords(train_data_filename, train_labels_filename,
training_filename, num_train_images)
self._write_to_tfrecords(test_data_filename, test_labels_filename,
testing_filename, num_test_images)
return all_files_downloaded
def _get_output_filename(self, data_dir, split_name):
return '%s/mnist_%s.tfrecord' % (data_dir, split_name)
def _add_to_tfrecords(self, images, labels,
num_datapoints, tfrecord_writer):
"""Loads data from the binary MNIST files and writes files to a TFRecord.
Args:
data_filename: The filename of the MNIST images.
labels_filename: The filename of the MNIST labels.
num_images: The number of images in the dataset.
tfrecord_writer: The TFRecord writer to use for writing.
"""
shape = (self._image_size, self._image_size, self._num_channels)
with tf.Graph().as_default():
image = tf.placeholder(dtype=tf.uint8, shape=shape)
float_image = tf.cast(image, dtype=tf.float32)
with tf.Session('') as sess:
for j in range(num_datapoints):
sys.stdout.write('\r>> Converting image %d/%d with size' % (j + 1, num_datapoints))
sys.stdout.flush()
img = sess.run(float_image, feed_dict={image: images[j]})
example = image_to_tfexample(
tf.compat.as_bytes(img.tostring()), 'png'.encode(),
self._image_size, self._image_size,
int(labels[j]))
tfrecord_writer.write(example.SerializeToString())
def _write_to_tfrecords(self, data_filename,
labels_filename,
output_filename,
num_points):
# If the tfrecord already exists, return
if os.path.exists(output_filename):
return
data = self._extract_images(os.path.join(self._data_dir, data_filename),
num_points)
labels = self._extract_labels(os.path.join(self._data_dir, labels_filename),
num_points)
with tf.python_io.TFRecordWriter(output_filename) as tfrecord_writer:
self._add_to_tfrecords(data, labels, num_points, tfrecord_writer)
def _clean_temp_files(self, filenames):
for filename in filenames:
filepath = os.path.join(self._data_dir, filename)
tf.gfile.Remove(filepath)
def get_data_shape(self):
return (self._image_size, self._image_size, self._num_channels)
def run(self):
downloaded_files = self._process()
self._clean_temp_files(downloaded_files)
print("\n\nAll data downloaded and converted to TFRecords.")
|
Python
|
UTF-8
| 453 | 4.1875 | 4 |
[] |
no_license
|
# Try running this program.
# Then change it to generate another subplot with the product of y1 and y2.
import numpy as np
import matplotlib.pyplot as plt
plt.figure(1)
t = np.arange(0.0, 5.0, 0.1) # try printing t
print(t)
plt.subplot(3, 1, 1)
y1 = np.exp(-t)
plt.plot(t, y1, 'b') # try 'g' or 'bo' or 'k+'
plt.subplot(3, 1, 2)
y2 = np.cos(2*np.pi*t)
plt.plot(t, y2, 'ro-')
#exercício10
plt.subplot(3,1,3)
plt.plot(t, y1*y2, 'g')
plt.show()
|
Java
|
UTF-8
| 4,680 | 2.015625 | 2 |
[] |
no_license
|
package id.ac.polinema.intentexercise;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.Toast;
import com.google.android.material.textfield.TextInputEditText;
import com.google.android.material.textfield.TextInputLayout;
import com.mobsandgeeks.saripaar.ValidationError;
import com.mobsandgeeks.saripaar.Validator;
import com.mobsandgeeks.saripaar.annotation.ConfirmPassword;
import com.mobsandgeeks.saripaar.annotation.Domain;
import com.mobsandgeeks.saripaar.annotation.Email;
import com.mobsandgeeks.saripaar.annotation.NotEmpty;
import com.mobsandgeeks.saripaar.annotation.Password;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.List;
import id.ac.polinema.intentexercise.model.UserModel;
public class RegisterActivity extends AppCompatActivity implements Validator.ValidationListener {
public static final String USER_KEY = "USER_KEY";
Bitmap bitmap ; // store the image in your bitmap
private static final int GALLERY_REQUEST_CODE = 1;
private static final String TAG = RegisterActivity.class.getCanonicalName();
@NotEmpty
private TextInputEditText fullnameInput;
@Email
private TextInputEditText emailInput;
@Password
private TextInputEditText passwordInput;
@ConfirmPassword
private TextInputEditText confirmPassword;
@Domain
private TextInputEditText homepageInput;
private TextInputEditText aboutInput;
private ImageView image_profile;
Validator validator;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_register);
fullnameInput = findViewById(R.id.text_fullname);
emailInput = findViewById(R.id.text_email);
passwordInput = findViewById(R.id.text_password);
confirmPassword = findViewById(R.id.text_confirm_password);
homepageInput = findViewById(R.id.text_homepage);
aboutInput = findViewById(R.id.text_about);
image_profile= findViewById(R.id.image_profile);
validator = new Validator(this);
validator.setValidationListener(this);
}
public void handleOk(View view) {
validator.validate();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if(resultCode == RESULT_CANCELED){
return;
}
if(requestCode == GALLERY_REQUEST_CODE){
if(data!= null){
try{
Uri imageUri = data.getData();
bitmap = MediaStore.Images.Media.getBitmap(this.getContentResolver(), imageUri);
image_profile.setImageBitmap(bitmap);
}catch (IOException e){
Toast.makeText(this, "Can't load image", Toast.LENGTH_SHORT).show();
Log.e(TAG, e.getMessage());
}
}
}
}
public void handleEditPhoto(View view) {
Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(intent, GALLERY_REQUEST_CODE);
}
@Override
public void onValidationSucceeded() {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.PNG, 50, baos);
Intent intent = new Intent(this, ProfileActivity.class );
intent.putExtra(USER_KEY,new UserModel(fullnameInput.getText().toString(),
emailInput.getText().toString(),
passwordInput.getText().toString(),
homepageInput.getText().toString(),
aboutInput.getText().toString()));
intent.putExtra("profileImage",baos.toByteArray());
startActivity(intent);
}
@Override
public void onValidationFailed(List<ValidationError> errors) {
for (ValidationError error : errors) {
View view = error.getView();
String message = error.getCollatedErrorMessage(this);
// Display error messages ;)
if (view instanceof EditText) {
((EditText) view).setError(message);
} else {
Toast.makeText(this, message, Toast.LENGTH_LONG).show();
}
}
}
}
|
Shell
|
UTF-8
| 733 | 2.921875 | 3 |
[] |
no_license
|
#!/bin/bash
#
# variables from arguments string in jdl
#
RUN_DIR=$1
INPUT_NAME=$2
ANALYSIS_FILE_NAME=$3
DET_TYPE=$4
echo $RUN_DIR
echo $INPUT_NAME
echo $ANALYSIS_FILE_NAME
echo $DET_TYPE
bash
echo ""
echo "CMSSW on Condor"
echo ""
START_TIME=`/bin/date`
echo "started at $START_TIME"
CMSSWBASE=/home/jengbou/workspace/CMSSW_7_6_3/src/
#
# setup CMSSW software environment at UMD
#
export VO_CMS_SW_DIR=/sharesoft/cmssw
. $VO_CMS_SW_DIR/cmsset_default.sh
cd ${CMSSWBASE}
eval `scramv1 runtime -sh`
#G4BASE=/data/users/jengbou/workspace/UserCode/geant4.10.03-install/bin
G4BASE=/data/users/avermeer/geant4/geant4.9.6.p03-build/bin
cd ${G4BASE}
. geant4.sh
cd $RUN_DIR
./LYSim ${DET_TYPE} ${INPUT_NAME} ${ANALYSIS_FILE_NAME}
|
PHP
|
UTF-8
| 3,028 | 2.65625 | 3 |
[] |
no_license
|
<?php
/**
* Created by PhpStorm.
* User: Aymen
* Date: 01/10/2018
* Time: 15:38
*/
namespace App\Controller\Rest ;
use App\Entity\Tracker;
use Doctrine\ORM\EntityManager;
use FOS\RestBundle\Controller\FOSRestController;
use FOS\RestBundle\Controller\Annotations as Rest;
use FOS\RestBundle\View\View;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
class TrackerController extends FOSRestController
{
/**
* Creates a tracker resource
* @Rest\Post("/tracker")
* @return View
*/
public function AddTrackerAction(Request $request):View{
/** @var EntityManager $em */
$em = $this->getDoctrine()->getManager();
// get All values from Http request Body using keys
//convert string time to suitable time format equivalent to Doctrine DateTime
$time = \DateTime::createFromFormat('Y-m-d H:i:s', $request->get('time'));
$description = $request->get("description");
// fetching user object from database using user primary key (Id())
$user = $em->getRepository('App:User')->find($request->get('userId'));
// creating instance of Tracker object
$tracker = new Tracker();
//setting all the attributes
$tracker->setTime($time);
$tracker->setDescription($description);
$tracker->setUser($user);
// save the object in the database
$em->persist($tracker);
$em->flush();
return new View($tracker,Response::HTTP_CREATED);
}
/**
* Fetching all trackers from database
* @Rest\Get("/trackers")
* @return View
*/
public function getAllTrackersAction(Request $request):View{
/** @var EntityManager $em */
$em = $this->getDoctrine()->getManager();
// fetching all data from database
$qb = $em->getRepository('App:Tracker')->findAll();
//making pagination using knp_paginator and load 5 items for single pagintion
$paginator = $this->get('knp_paginator');
$pagination = $paginator->paginate($qb, $request->get('page', 1), 5);
return new View($pagination,Response::HTTP_ACCEPTED);
}
/**
* Fetching all trackers relative to a user
* @Rest\Get("/trackers/{userId}")
* @return View
*/
public function getAllTrackersByUserIdAction(Request $request,$userId):View{
/** @var EntityManager $em */
$em = $this->getDoctrine()->getManager();
// getting the user object by the id
$user = $em->getRepository('App:User')->find($userId);
// fetching all data from database relative to the user
$qb = $em->getRepository('App:Tracker')->findBy(array("user"=>$user));
//making pagination using knp_paginator and load 5 items for single pagintion
$paginator = $this->get('knp_paginator');
$pagination = $paginator->paginate($qb, $request->get('page', 1), 5);
return new View($pagination,Response::HTTP_ACCEPTED);
}
}
|
Markdown
|
UTF-8
| 3,569 | 3.1875 | 3 |
[] |
no_license
|
# RU
# Пасьянс «Косынка», или «Клондайк» — старинный карточный пасьянс.
## Правила
Играется одной колодой в 52 карты. Цель игры — разложить карты по мастям в порядке от туза до короля в четыре стопки (их иногда называют базовыми, или «домами»).
Карту можно перекладывать на другую рангом выше, но другого цвета (чёрного или красного). В каждую из четырёх базовых стопок (домов), по которым необходимо разложить все карты,
сначала кладутся тузы, затем двойки, тройки и так далее до короля. Карты можно сдавать из оставшейся от раздачи колоды (в левом верхнем углу) либо по одной, либо по три штуки,
в зависимости от модификации. В свободную ячейку (не дом) можно положить только короля. Игра заканчивается, когда все карты разложены. Цель игры состоит в том,
чтобы разложить все карты в четыре стопки по возрастанию, начиная с туза, так, чтобы карты одной масти находились в одной стопке.
## Описание проекта:
Данный проект является копией классической «Косынки», но с некоторыми изменениями в правилах игры. Игра написана на C# Windows Forms, создана для курсового/дипломного проекта.
Одно из любимых заданий Российской системы образования для выпускников.
В данном проекте используется 104 карты(две одинаковые колоды) остальной функционал идентичен с классикой.
____
# ENG
# Solitaire «Klondike», or «Klondike» - an old card solitaire.
## Rules
It is played with one deck of 52 cards. The object of the game is to arrange the cards by suit in order from ace to king in four piles (sometimes called basic or "houses").
The card can be shifted to another one of higher rank, but of a different color (black or red). In each of the four basic piles (houses), in which all cards must be laid,
first aces are put, then deuces, threes and so on up to the king.The cards can be dealt from the deck remaining from the deal (in the upper left corner) either one or three pieces,
depending on modification. Only the king can be placed in a free cell (not a house). The game ends when all cards are laid out. The aim of the game is
to arrange all the cards in four piles in ascending order, starting with the ace, so that the cards of the same suit are in the same pile.
## Project Description:
This project is a copy of the classic "Kerchiefs", but with some changes in the rules of the game. The game is written in C # Windows Forms, created for a course/diploma project.
One of the favorite tasks of the Russian education system for graduates.
This project uses 104 cards (two identical decks), the rest of the functionality is identical to the classics.
|
Ruby
|
UTF-8
| 1,539 | 4.0625 | 4 |
[] |
no_license
|
# Method: triangle
# Argument: 3 numeric values
# Side effects: None
# Return value: A symbol. :equilateral, :isosceles, :scalene, or :invalid
# Depending on what a triangle made up of sides whose lengths
# are given by the 3 numeric arguments would be.
# Edge cases:
# When the 2 smaller sides are exactly equal to the larger side, the method
# should return :invalid
# triangle(10, 7, 3) => :invalid
# When 1 or more sides is 0 or negative, the method should return invalid.
# triangle(0, 0, 0) => :invalid
#
# Algorithm:
# side1, side2, side3 <= arguments
# 1) sides = [side1, side2, side3]
# 2) If any of the sides is 0: return invalid
# 3) if largest side is larger than or equal to the other 2 combined, return
# :invalid
# 4) If all sides are equal retrun :equilateral
# 5) If 2 sides are equal retun :isosceles
# 6) If none of the previous conditions were met, return :scalene
def triangle(side1, side2, side3)
sides = [side1, side2, side3].sort
case
when sides.any? { |side| side == 0 } then :invalid
when sides.last >= sides.first(2).inject(:+) then :invalid
when sides.last == sides.first then :equilateral
when sides[0] == sides[1] || sides[1] == sides[2] then :isosceles
else :scalene
end
end
p triangle(3, 3, 3) == :equilateral
p triangle(3, 3, 1.5) == :isosceles
p triangle(3, 4, 5) == :scalene
p triangle(0, 3, 3) == :invalid
p triangle(3, 1, 1) == :invalid
|
Java
|
UTF-8
| 743 | 2.203125 | 2 |
[] |
no_license
|
package org.nerdizin.eztrial.web.validator;
import org.nerdizin.eztrial.web.model.common.Pagination;
import org.springframework.validation.Errors;
import org.springframework.validation.Validator;
public class PaginationValidator implements Validator {
@Override
public boolean supports(final Class<?> aClass) {
return Pagination.class.equals(aClass);
}
@Override
public void validate(final Object o, final Errors errors) {
final Pagination pagination = (Pagination) o;
if (pagination.getSortBy() == null || pagination.getSortBy().length == 0) {
pagination.setSortBy(new String[]{"oid"});
}
if (pagination.getRows() < 1) {
pagination.setRows(1);
}
if (pagination.getPage() < 0) {
pagination.setPage(0);
}
}
}
|
Markdown
|
UTF-8
| 6,020 | 2.953125 | 3 |
[
"MIT"
] |
permissive
|
# Graph Convolutional Networks
## Notation/Setup
- Inputs
- Node-level features matrix X (NxD)
- Adjacency matrix A (NxN)
- Output
- Node-level output features matrix Z (NxF)
- Foward propagation
- H^(l+1) = f(H^l, A)
- H^(0) = X, H^(L) = Z
- The specific models differ only in how f(⋅,⋅)f(⋅,⋅) is chosen and parameterized
## Intuition (Blog)
f(H^l, A) = ReLU(A H^(l) W^(l))
- H^(l): l^th layer activations
- W^(l): l^th layer weight matrix
- A: adjacency matrix
- A sums up feature vectors of neighboring nodes but not the feature vector of the main node. Fix is too add self loops:
- f(H^l, A) = ReLU((A+I) H^(l) W^(l))
- Now, A sums of features vectors of neighboring nodes and node's feature vector. Doing so monotoncially increases the feature vector values. This is does not behave well with backprop. Scaling fix is to take average of feature vectors
- f(H^l, A) = ReLU(inv(D')A'H^(l)W^(l))
- so each nonzero value in inv(D')A' is 1/(d+1)
- Kipf paper: f(H^(l), A) = ReLU(inv(D^0.5)Ain(D^0.5)HW)
- inv(D^0.5)Ain(D^0.5) is like the normalzied laplacian
- each nonzero value in matrix is 1/sqrt(di*dj) if i and j are connected
- if not features available, let X=I
- Multilayer forward propagation
- H^(1) = g(inv(D')A'H^(0) W^(0)) = g(inv(D')A' X W^l)
- H^(2) = g(inv(D')A'H^1 W^1)
- H^1 is a nonlinear transformation of a weighted linear combination of its features and the feature vectors of the neighbors
- H^1 features charactersize the neighborhoods of the nodes
- H^2 is a nonlinear transformation of a weighted linear combination of the neighborhoods of all neighbors of a node
- The 3-layer GCN now performs three propagation steps during the forward pass and effectively convolves the 3rd-order neighborhood of every node (all nodes up to 3 "hops" away
- Interpreting GCN model: Weisfeiler-Lehman algorithm with nonlinear transformation layer-wise; iteratively sharing feature vectors to "convolve" the k-th order neighborhood of every node
- h^(l+1)_(vi) = g(sum over nbors j+i: 1/c(ij) h^l_(vj) W^l)
- Extending GCN for semi-supervised learning
- We observe that the 3-layer GCN model manages to linearly separate the communities, given only one labeled example per class. This is a somewhat remarkable result, given that the model received no feature description of the nodes
## Paper
### Introduction
- objective: classifying nodes (such as documents) in a graph (such as a citation network), where labels are only available for a small subset of nodes
- traditional graph based semi supervised learning loss = L0 + L_reg
- L0 is the loss w.r.t the labelled examples
- L_reg is graph laplacian regularization: \sum_{i,j in E} ||f(x_i)-f(x_j)||^2
- Problem: edge does not imply similarity
- approach: encode structure using NN f(X,A), X feature matrix, training on supervised target L0 using nodes with labels, use structure to distribute gradient information to unlabelled nodes; without explicit graph regularization
### Fast approximate convolutions on graphs
setup
- self loop adj. A'=A+I
- GCN propagation: H(l+1) = g(inv(D'^0.5)A'inv(D'^0.5)H(l)W(l))
- H(l) shape (N, D) D is number of input features
spectral convolution
- signal x (N, 1), scalar for each node
- diagnol matrix g_theta = diag(theta), theta shape (N,1)
- U is eigenvector matrix of normalized laplacian L
- L = inv(D^0.5)(D-A)inv(D^0.5) = UKU^T
- U is L's eigenmatrix
- K is diagnol eigval matrix
- spectral convolution star(g_theta, x):
- star(g_theta, x) = (U g_theta U^T)x
- think of g_theta as function of eigmatrix K
- too expensive, O(N^2) multiplication, eigendecomp. of large matrices too expensive, approximation needed
- linear combination of chebyshev polynomials to approximate spectral convolution
- learn weights of linear combination to learn convolution parameters
- approximation K-localized, only depends on nodes K steps away from node
- A neural network model based on graph convolutions can therefore be built by stacking multiple convolutional layers, each layer followed by a point-wise non-linearity
layer wise linear model
- restrict convolution to K=1. So each conv. star(g,x) is a linear function of L (normalized laplcian) passed through nonlinear activtion
- then star(g,x) = \theta'_0 x + \theta'_1 (L-I)x
- = theta'0 x - theta'1 inv(D^0.5)Ainv(D^0.5)x
- parameters theta0 and theta1 per conv; sucessive convs (e.g. total k) convolve k^th order neighborhood
LHW -> UKU^THW -> Ug_thetaU^THW, g_theta filter -> first-order linear approximation + activation -> stack K times for Kth order neighborhood convolution
### Semi-supervised classification
layer-wise propagation allows efficient information propagation to unlabelled nodes. Now, train on labelled nodes and use structure + content to predict unlabelled. Evaluate semi-supervised classication use cross entropy error (like LR but multiple classes)
full gradient descent + sparse matrix A + dropout
featureless X if X=I
### Related work
- graph based semi supervised learning
- graph regularization like label prop.
- graph embedding like node2vec, deepwalk
- problem: multi-step pipeline
- neural networks on graphs
- Contraction maps as propagation repeatedly
- Convolution like propagation, not scalable, node degree specific weights
- graph as sequences with some ordering
- spectral approaches to encode graphs
- problems: not scalable to large graphs, does not directly encode graph structure
### Conclusion
- memory requirements not viable for large dense graphs
- equal importance to self loops and edges: A'=A+kI
- directed networks/edge orientation ignored
- approximations information loss (locality)
1. graph convolution neural network approach based on first-order fast approximation of spectral convolution on graphs to encode structure + content
2. useful embdeddings generated for semi-supervised classification
3. Methods based on graph-Laplacian regularization are most likely limited due to their assumption that edges encode mere similarity of nodes.
|
PHP
|
UTF-8
| 1,869 | 2.578125 | 3 |
[] |
no_license
|
<?php
if(!isset($_GET["filter"])){
$filter = "";
}else{
$filter = $_GET["filter"];
}
require "Connect_DB.php";
$sql = "SELECT Item_Image, Item_Series, Item_Name, Item_ID
FROM Item_Info
WHERE Item_Series LIKE '%".$filter."%'
";
$result = $conn->query($sql);
$Item_ID = array();
$Item_Image = array();
$Item_Name = array();
$i=0;
$max = 0;
if ($result->num_rows > 0) {
while ($row = $result->fetch_assoc()) {
if($i < 30){
$Item_ID[$i] = $row["Item_ID"];
$Item_Image[$i] = $row["Item_Image"];
$Item_Name[$i] = $row["Item_Name"];
}
$i++;
$max= $i;
}
}else{
echo "ไม่มีข้อมูลที่ค้นหา";
}
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Home - ToysStudio</title>
<link rel="stylesheet" type="text/css" href="css/reset.css">
<link rel="stylesheet" type="text/css" href="css/style.css">
</head>
<body>
<?php include 'header.php'; ?>
<div class="container bg2 cb">
<div class="wrapper">
<div class="con-card pt-20">
<div class="con-head">
<h1 class="fl"><?php echo $filter ?></h1>
</div>
<?php
for($i=0;$i<$max;$i++){
?>
<a href="item.php?Item_ID=<?php echo $Item_ID[$i]; ?>">
<div class="card220">
<div class="card-img">
<img src="img/item/<?php echo $Item_Image[$i]; ?>.png" alt="">
</div>
<div class="card-con"><?php echo $Item_Name[$i]; ?></div>
</div>
</a>
<?php
}
?>
<div class="cb"></div>
</div><!-- con card -->
</div><!-- wrapper -->
</div><!-- container -->
<?php include 'footer.php'; ?>
</body>
</html>
<?php
$conn->close();
?>
|
Markdown
|
UTF-8
| 1,243 | 3.40625 | 3 |
[] |
no_license
|
# KOTLIN :mobile_phone_off:
### **COMO DECLARAR VARIÁVEIS**
- **var** (valor mutável, ComelCase) - Variável que pode ter ser valor alterado durante o código;
- **val** (valor imutável, ComelCase) - Variável que terá somente o valor atribuído (similar ao final em Java);
- **const val** (valor imutável, SNAKE_CASE) - Constante cujo valor é atribuído durante compilação
**comelCase** - A segunda palavra, fica maiúscula
**SNAKE_CASE** - Somente letras maiúscula
- **const val** não é aceito dentro da função **main**
### **VALOR NULO E OPERADORES ARITMÉTICOS BÁSICOS**
**Nullability**
- Qualquer tipo pode ser nulo, porém isso deve ser explicitado na declaração de variáveis através do uso da interrogação (?);
- A inferência de tipo não atribui nullability;
**Operadores Aritméticos**
- Os operadores podem ser chamados tanto como expressão quanto como comando. O resultado será o mesmo.
- A função de soma também funciona para concatenar Strings.
**Função** **Expressão** **Comando** **Atribuição**
Soma a + b a.plus(b) a+=b
Subtração a - b a.minus(b) a-=b
Multiplicação a * b a.times(b) a*=b
Divisão a / b a.div(b) a/=b
resto a % b a.mod(b) a%=b
|
Java
|
UTF-8
| 289 | 2.828125 | 3 |
[] |
no_license
|
package org.epicp.gamestate;
public enum Direction {
//Do not change order.
NORTH, WEST, EAST, SOUTH;
public boolean isOpposite(Direction otherDirection) {
return ordinal() + otherDirection.ordinal() == 3;
}
public Direction getOpposite() {
return values()[3-ordinal()];
}
}
|
Markdown
|
UTF-8
| 903 | 3.109375 | 3 |
[] |
no_license
|
+ Choose a thing/noun of any kind, then write an express server with a GET route that sends back an array of that thing. X
+ Your GET endpoint should be able to check for any query parameters that may have been passed into the url of the request and filter the results based on those query parameters. X
# EXTRA CREDIT:
- Write another route where an API user can filter by a maximum price AND a minimum price. You can make the maximum default to 1000000 and the minimum default to 0
?????
- Consolidate the two end points you've already written.
# If you are stuck:
USE THE LINES OF CODE BELOW AFTER ATTEMPTING TO WRITE THE CODE YOURSELF.
This doesn't give you all the answers, but may help you on your way.
app.get("/", (req, res)=>{
console.log(req.query);
});
A really good method to filter out all of the items you need is the filter method.
|
C++
|
UTF-8
| 24,142 | 2.515625 | 3 |
[
"MIT"
] |
permissive
|
#include "TransformationUtility.h"
#include <opencv2/legacy/legacy.hpp>
#include "../Macros.h"
#include <iostream>
#include <../eigen/Eigen/Dense>
#include <../eigen/Eigen/SVD>
using pcl::visualization::PointCloudColorHandlerGenericField;
using pcl::visualization::PointCloudColorHandlerCustom;
//our visualizer
pcl::visualization::PCLVisualizer *pclVisualizer;
//its left and right viewports
int vp_1, vp_2;
using namespace std;
using namespace cv;
using namespace Eigen;
#pragma region Surface Reconstruction
pcl::PolygonMesh::Ptr TransformationUtility::SurfaceReconstruction(PointCloudT::Ptr cloud)
{
//downSampling
PointCloudT::Ptr downSampledCloud (new PointCloudT);
pcl::VoxelGrid<PointT> grid;
grid.setLeafSize (0.005, 0.005, 0.005);
grid.setInputCloud (cloud);
grid.filter (*downSampledCloud);
PointCloudT::Ptr filtered(new PointCloudT());
PassThrough<PointT> filter;
//filter.setInputCloud(downSampledCloud);//use for downSampled clouds
filter.setInputCloud(cloud);
filter.filter(*filtered);
cout << "passthrough filter complete" << endl;
//up sampling
/*cout << "begin moving least squares" << endl;
MovingLeastSquares<PointXYZ, PointXYZ> mls;
mls.setInputCloud(filtered);
mls.setSearchRadius(0.01);
mls.setPolynomialFit(true);
mls.setPolynomialOrder(2);
mls.setUpsamplingMethod(MovingLeastSquares<PointXYZ, PointXYZ>::SAMPLE_LOCAL_PLANE);
mls.setUpsamplingRadius(0.005);
mls.setUpsamplingStepSize(0.003);
PointCloudT::Ptr cloud_smoothed (new PointCloudT());
mls.process(*cloud_smoothed);
cout << "MLS complete" << endl;*/
cout << "begin normal estimation" << endl;
NormalEstimationOMP<PointXYZ, Normal> ne;
ne.setNumberOfThreads(4);
ne.setInputCloud(filtered);
ne.setRadiusSearch(0.01);
Eigen::Vector4f centroid;
compute3DCentroid(*filtered, centroid);
ne.setViewPoint(centroid[0], centroid[1], centroid[2]);
pcl::PointCloud<Normal>::Ptr cloud_normals (new pcl::PointCloud<Normal>());
ne.compute(*cloud_normals);
cout << "normal estimation complete" << endl;
cout << "reverse normals' direction" << endl;
for(size_t i = 0; i < cloud_normals->size(); ++i)
{
cloud_normals->points[i].normal_x *= -1;
cloud_normals->points[i].normal_y *= -1;
cloud_normals->points[i].normal_z *= -1;
}
cout << "combine points and normals" << endl;
pcl::PointCloud<PointNormal>::Ptr cloud_smoothed_normals(new pcl::PointCloud<PointNormal>());
concatenateFields(*filtered, *cloud_normals, *cloud_smoothed_normals);
cout << "begin poisson reconstruction" << endl;
Poisson<PointNormal> poisson;
poisson.setDepth(9);
//poisson.setScale(1);
poisson.setInputCloud(cloud_smoothed_normals);
PolygonMesh::Ptr mesh(new PolygonMesh());
poisson.reconstruct(*mesh); //warning this one can take, more than you might expect.
return mesh;
}
#pragma endregion
#pragma region PointCloud iterative registration
//convenient structure to handle our pointclouds
struct PCD
{
PointCloudT::Ptr cloud;
std::string f_name;
PCD() : cloud (new PointCloudT) {};
};
struct PCDComparator
{
bool operator () (const PCD& p1, const PCD& p2)
{
return (p1.f_name < p2.f_name);
}
};
// Define a new point representation for < x, y, z, curvature >
class MyPointRepresentation : public pcl::PointRepresentation <PointNormalT>
{
using pcl::PointRepresentation<PointNormalT>::nr_dimensions_;
public:
MyPointRepresentation ()
{
// Define the number of dimensions
nr_dimensions_ = 4;
}
// Override the copyToFloatArray method to define our feature vector
virtual void copyToFloatArray (const PointNormalT &p, float * out) const
{
// < x, y, z, curvature >
out[0] = p.x;
out[1] = p.y;
out[2] = p.z;
out[3] = p.curvature;
}
};
////////////////////////////////////////////////////////////////////////////////
/** \brief Display source and target on the first viewport of the visualizer
*
*/
void showCloudsLeft(const PointCloudT::Ptr cloud_target, const PointCloudT::Ptr cloud_source)
{
pclVisualizer->removePointCloud ("vp1_target");
pclVisualizer->removePointCloud ("vp1_source");
PointCloudColorHandlerCustom<PointT> tgt_h (cloud_target, 0, 255, 0);
PointCloudColorHandlerCustom<PointT> src_h (cloud_source, 255, 0, 0);
pclVisualizer->addPointCloud (cloud_target, tgt_h, "vp1_target", vp_1);
pclVisualizer->addPointCloud (cloud_source, src_h, "vp1_source", vp_1);
PCL_INFO ("Press q to begin the registration.\n");
pclVisualizer-> spin();
}
////////////////////////////////////////////////////////////////////////////////
/** \brief Display source and target on the second viewport of the visualizer
*
*/
void showCloudsRight(const PointCloudWithNormals::Ptr cloud_target, const PointCloudWithNormals::Ptr cloud_source)
{
pclVisualizer->removePointCloud ("source");
pclVisualizer->removePointCloud ("target");
PointCloudColorHandlerGenericField<PointNormalT> tgt_color_handler (cloud_target, "curvature");
if (!tgt_color_handler.isCapable ())
PCL_WARN ("Cannot create curvature color handler!");
PointCloudColorHandlerGenericField<PointNormalT> src_color_handler (cloud_source, "curvature");
if (!src_color_handler.isCapable ())
PCL_WARN ("Cannot create curvature color handler!");
pclVisualizer->addPointCloud (cloud_target, tgt_color_handler, "target", vp_2);
pclVisualizer->addPointCloud (cloud_source, src_color_handler, "source", vp_2);
pclVisualizer->spinOnce();
}
////////////////////////////////////////////////////////////////////////////////
/** \brief Load a set of PCD files that we want to register together
* \param argc the number of arguments (pass from main ())
* \param argv the actual command line arguments (pass from main ())
* \param models the resultant vector of point cloud datasets
*/
void loadData (int argc, char **argv, std::vector<PCD, Eigen::aligned_allocator<PCD> > &models)
{
}
////////////////////////////////////////////////////////////////////////////////
/** \brief Align a pair of PointCloud datasets and return the result
* \param cloud_src the source PointCloud
* \param cloud_tgt the target PointCloud
* \param output the resultant aligned source PointCloud
* \param final_transform the resultant transform between source and target
*/
void pairAlign (const PointCloudT::Ptr cloud_src, const PointCloudT::Ptr cloud_tgt, PointCloudT::Ptr output, Eigen::Matrix4f &final_transform, bool downsample = false)
{
//
// Downsample for consistency and speed
// \note enable this for large datasets
PointCloudT::Ptr src (new PointCloudT);
PointCloudT::Ptr tgt (new PointCloudT);
pcl::VoxelGrid<PointT> grid;
if (downsample)
{
grid.setLeafSize (0.05, 0.05, 0.05);
grid.setInputCloud (cloud_src);
grid.filter (*src);
grid.setInputCloud (cloud_tgt);
grid.filter (*tgt);
}
else
{
src = cloud_src;
tgt = cloud_tgt;
}
// Compute surface normals and curvature
PointCloudWithNormals::Ptr points_with_normals_src (new PointCloudWithNormals);
PointCloudWithNormals::Ptr points_with_normals_tgt (new PointCloudWithNormals);
pcl::NormalEstimation<PointT, PointNormalT> norm_est;
pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ> ());
norm_est.setSearchMethod (tree);
norm_est.setKSearch (30);
norm_est.setInputCloud (src);
norm_est.compute (*points_with_normals_src);
pcl::copyPointCloud (*src, *points_with_normals_src);
norm_est.setInputCloud (tgt);
norm_est.compute (*points_with_normals_tgt);
pcl::copyPointCloud (*tgt, *points_with_normals_tgt);
//
// Instantiate our custom point representation (defined above) ...
MyPointRepresentation point_representation;
// ... and weight the 'curvature' dimension so that it is balanced against x, y, and z
float alpha[4] = {1.0, 1.0, 1.0, 1.0};
point_representation.setRescaleValues (alpha);
//
// Align
pcl::IterativeClosestPointNonLinear<PointNormalT, PointNormalT> reg;
reg.setTransformationEpsilon (1e-6);
// Set the maximum distance between two correspondences (src<->tgt) to 10cm
// Note: adjust this based on the size of your datasets
reg.setMaxCorrespondenceDistance (0.1);
// Set the point representation
reg.setPointRepresentation (boost::make_shared<const MyPointRepresentation> (point_representation));
reg.setInputSource (points_with_normals_src);
reg.setInputTarget (points_with_normals_tgt);
//
// Run the same optimization in a loop and visualize the results
Eigen::Matrix4f Ti = Eigen::Matrix4f::Identity (), prev, targetToSource;
PointCloudWithNormals::Ptr reg_result = points_with_normals_src;
reg.setMaximumIterations (2);
for (int i = 0; i < 30; ++i)
{
PCL_INFO ("Iteration Nr. %d.\n", i);
// save cloud for visualization purpose
points_with_normals_src = reg_result;
// Estimate
reg.setInputSource (points_with_normals_src);
reg.align (*reg_result);
//accumulate transformation between each Iteration
Ti = reg.getFinalTransformation () * Ti;
//if the difference between this transformation and the previous one
//is smaller than the threshold, refine the process by reducing
//the maximal correspondence distance
if (fabs ((reg.getLastIncrementalTransformation () - prev).sum ()) < reg.getTransformationEpsilon ())
reg.setMaxCorrespondenceDistance (reg.getMaxCorrespondenceDistance () - 0.001);
prev = reg.getLastIncrementalTransformation ();
// visualize current state
//showCloudsRight(points_with_normals_tgt, points_with_normals_src);
}
//
// Get the transformation from target to source
targetToSource = Ti.inverse();
//
// Transform target back in source frame
pcl::transformPointCloud (*cloud_tgt, *output, targetToSource);
/*pclVisualizer->removePointCloud ("source");
pclVisualizer->removePointCloud ("target");
PointCloudColorHandlerCustom<PointT> cloud_tgt_h (output, 0, 255, 0);
PointCloudColorHandlerCustom<PointT> cloud_src_h (cloud_src, 255, 0, 0);
pclVisualizer->addPointCloud (output, cloud_tgt_h, "target", vp_2);
pclVisualizer->addPointCloud (cloud_src, cloud_src_h, "source", vp_2);
PCL_INFO ("Press q to continue the registration.\n");
pclVisualizer->spin ();
pclVisualizer->removePointCloud ("source");
pclVisualizer->removePointCloud ("target");*/
//add the source to the transformed target
*output += *cloud_src;
final_transform = targetToSource;
}
std::shared_ptr<cv::Matx44f> TransformationUtility::IterativePointCloudMatchTransformation(PointCloudT::Ptr trainingPointCloud, PointCloudT::Ptr testPointCloud)
{
// Create a PCLVisualizer object
/*
pclVisualizer = new pcl::visualization::PCLVisualizer ("Pairwise Incremental Registration example");
pclVisualizer->createViewPort (0.0, 0, 0.5, 1.0, vp_1);
pclVisualizer->createViewPort (0.5, 0, 1.0, 1.0, vp_2);
*/
PointCloudT::Ptr result (new PointCloudT), source, target;
Eigen::Matrix4f pairTransform;
source = trainingPointCloud;
target = testPointCloud;
// Add visualization data
//showCloudsLeft(source, target);
PointCloudT::Ptr temp (new PointCloudT);
pairAlign (source, target, temp, pairTransform, true);
std::shared_ptr<cv::Matx44f> iterativeTransformationMatrix(new cv::Matx44f());
(*iterativeTransformationMatrix)(0,0) = pairTransform(0,0);(*iterativeTransformationMatrix)(0,1) = pairTransform(1,0);
(*iterativeTransformationMatrix)(0,2) = pairTransform(2,0);(*iterativeTransformationMatrix)(0,3) = pairTransform(3,0);
(*iterativeTransformationMatrix)(1,0) = pairTransform(0,1);(*iterativeTransformationMatrix)(1,1) = pairTransform(1,1);
(*iterativeTransformationMatrix)(1,2) = pairTransform(2,1);(*iterativeTransformationMatrix)(1,3) = pairTransform(3,1);
(*iterativeTransformationMatrix)(2,0) = pairTransform(0,2);(*iterativeTransformationMatrix)(2,1) = pairTransform(1,2);
(*iterativeTransformationMatrix)(2,2) = pairTransform(2,2);(*iterativeTransformationMatrix)(2,3) = pairTransform(3,2);
(*iterativeTransformationMatrix)(3,0) = pairTransform(0,3);(*iterativeTransformationMatrix)(3,1) = pairTransform(1,3);
(*iterativeTransformationMatrix)(3,2) = pairTransform(2,3);(*iterativeTransformationMatrix)(3,3) = pairTransform(3,3);
return iterativeTransformationMatrix;
}
#pragma endregion
std::vector<Match3D> *TransformationUtility::Create3DMatchPoints(std::vector<bool> &mask, std::vector<std::vector<cv::DMatch>> &matches, BaseKinectModel &trainKinectModel, std::vector<cv::KeyPoint> &trainKeypoints, BaseKinectModel &testKinectModel, std::vector<cv::KeyPoint> &testKeypoints)
{
vector<Match3D> *matches3D = new vector<Match3D>();
for (int i = 0; i < mask.size(); i++)
{
if (mask[i] == true)
{
DMatch currentMatch = matches[i][0];
KeyPoint testPairKeyPoint = testKeypoints[currentMatch.queryIdx];
KeyPoint trainPairKeyPoint = trainKeypoints[currentMatch.trainIdx];
int trainPairIndex = (int)(((trainKinectModel.grayImage.cols) * round(trainPairKeyPoint.pt.y)) + (int)trainPairKeyPoint.pt.x);
int testPairIndex = (int)(((testKinectModel.grayImage.cols) * round(testPairKeyPoint.pt.y)) + (int)testPairKeyPoint.pt.x);
if ((trainKinectModel.pointCloud[trainPairIndex].x == 0.0 && trainKinectModel.pointCloud[trainPairIndex].y == 0.0 && trainKinectModel.pointCloud[trainPairIndex].z == 0.0)
|| (testKinectModel.pointCloud[testPairIndex].x == 0.0 && testKinectModel.pointCloud[testPairIndex].y == 0.0 && testKinectModel.pointCloud[testPairIndex].z == 0.0))
{///eliminate zero Point3f's
mask[i] = false;
}
else
{
Match3D match3d = Match3D(testKinectModel.pointCloud[testPairIndex], trainKinectModel.pointCloud[trainPairIndex]);
(*matches3D).push_back(match3d);
}
}
}
return matches3D;
}
cv::Matx44f *TransformationUtility::CreateTransformation(std::vector<Match3D> &matches3D)
{
cv::Matx44f *transformation = new cv::Matx44f();
#pragma region obtain mid points of matches and seperated consecutive matches
Point3f trainMidPoint = Point3f(0,0,0); //d_ mid
Point3f testMidPoint = Point3f(0,0,0); //m_ mid
for (int i = 0; i < matches3D.size(); i++)
{
trainMidPoint += matches3D[i].trainPair;
testMidPoint += matches3D[i].queryPair;
}
trainMidPoint.x = trainMidPoint.x / matches3D.size();
trainMidPoint.y = trainMidPoint.y / matches3D.size();
trainMidPoint.z = trainMidPoint.z / matches3D.size();
testMidPoint.x = testMidPoint.x / matches3D.size();
testMidPoint.y = testMidPoint.y / matches3D.size();
testMidPoint.z = testMidPoint.z / matches3D.size();
#pragma endregion
#pragma region pull the all points to around origin midpoints traslated to the 0,0,0 point and finding the H matrix
double HMatrix11 = 0.0;
double HMatrix12 = 0.0;
double HMatrix13 = 0.0;
double HMatrix21 = 0.0;
double HMatrix22 = 0.0;
double HMatrix23 = 0.0;
double HMatrix31 = 0.0;
double HMatrix32 = 0.0;
double HMatrix33 = 0.0;
vector<Match3D> matches3DWhichAreTranslatedAroundTheMidPoints = matches3D;
for (int i = 0; i <matches3DWhichAreTranslatedAroundTheMidPoints.size(); i++)
{
matches3DWhichAreTranslatedAroundTheMidPoints[i].trainPair -= trainMidPoint; //d_i - d_ = d_c_i
matches3DWhichAreTranslatedAroundTheMidPoints[i].queryPair -= testMidPoint; //m_i - m_ = m_c_i
HMatrix11 += matches3DWhichAreTranslatedAroundTheMidPoints[i].trainPair.x * matches3DWhichAreTranslatedAroundTheMidPoints[i].queryPair.x;
HMatrix12 += matches3DWhichAreTranslatedAroundTheMidPoints[i].trainPair.x * matches3DWhichAreTranslatedAroundTheMidPoints[i].queryPair.y;
HMatrix13 += matches3DWhichAreTranslatedAroundTheMidPoints[i].trainPair.x * matches3DWhichAreTranslatedAroundTheMidPoints[i].queryPair.z;
HMatrix21 += matches3DWhichAreTranslatedAroundTheMidPoints[i].trainPair.y * matches3DWhichAreTranslatedAroundTheMidPoints[i].queryPair.x;
HMatrix22 += matches3DWhichAreTranslatedAroundTheMidPoints[i].trainPair.y * matches3DWhichAreTranslatedAroundTheMidPoints[i].queryPair.y;
HMatrix23 += matches3DWhichAreTranslatedAroundTheMidPoints[i].trainPair.y * matches3DWhichAreTranslatedAroundTheMidPoints[i].queryPair.z;
HMatrix31 += matches3DWhichAreTranslatedAroundTheMidPoints[i].trainPair.z * matches3DWhichAreTranslatedAroundTheMidPoints[i].queryPair.x;
HMatrix32 += matches3DWhichAreTranslatedAroundTheMidPoints[i].trainPair.z * matches3DWhichAreTranslatedAroundTheMidPoints[i].queryPair.y;
HMatrix33 += matches3DWhichAreTranslatedAroundTheMidPoints[i].trainPair.z * matches3DWhichAreTranslatedAroundTheMidPoints[i].queryPair.z;
}
#pragma endregion
#pragma region SVD
/*cv::Matx33f src = cv::Matx33f();
src(0,0) = HMatrix11; src(0,1) = HMatrix12; src(0,2) = HMatrix13;
src(1,0) = HMatrix21; src(1,1) = HMatrix22; src(1,2) = HMatrix23;
src(2,0) = HMatrix31; src(2,1) = HMatrix32; src(2,2) = HMatrix33;
cv::Matx31f w;
cv::Matx33f u;
cv::Matx33f vt;
cv::SVD::compute(src, w, u, vt, 0);*/
Matrix3d eigenSrc = Eigen::Matrix3d::Identity();
eigenSrc << HMatrix11, HMatrix21, HMatrix31,
HMatrix12, HMatrix22, HMatrix32,
HMatrix13, HMatrix23, HMatrix33;
Eigen::JacobiSVD<MatrixXd> svd(eigenSrc, ComputeFullU | ComputeFullV);
Eigen::Matrix3d eigenV = svd.matrixV();
Eigen::Matrix3d eigenU = svd.matrixU();
#pragma endregion
#pragma region Rotation Matrix
Eigen::Matrix3d eigenUMultipleVT = eigenU * eigenV.transpose();
(*transformation)(0,0) = eigenUMultipleVT(0,0);
(*transformation)(0,1) = eigenUMultipleVT(0,1);
(*transformation)(0,2) = eigenUMultipleVT(0,2);
(*transformation)(0,3) = 0.0;
(*transformation)(1,0) = eigenUMultipleVT(1,0);
(*transformation)(1,1) = eigenUMultipleVT(1,1);
(*transformation)(1,2) = eigenUMultipleVT(1,2);
(*transformation)(1,3) = 0.0;
(*transformation)(2,0) = eigenUMultipleVT(2,0);
(*transformation)(2,1) = eigenUMultipleVT(2,1);
(*transformation)(2,2) = eigenUMultipleVT(2,2);
(*transformation)(2,3) = 0.0;
(*transformation)(3,0) = 0.0;
(*transformation)(3,1) = 0.0;
(*transformation)(3,2) = 0.0;
(*transformation)(3,3) = 1.0;
#pragma endregion
#pragma region camera translation
cv::Matx14f translation;
cv::Matx14f midTestPointMat = cv::Matx14f();
midTestPointMat(0,0) = testMidPoint.x; midTestPointMat(0,1) = testMidPoint.y;
midTestPointMat(0,2) = testMidPoint.z; midTestPointMat(0,3) = 1.0;
cv::Matx14f midTrainPointMat = cv::Matx14f();
midTrainPointMat(0,0) = trainMidPoint.x; midTrainPointMat(0,1) = trainMidPoint.y;
midTrainPointMat(0,2) = trainMidPoint.z; midTrainPointMat(0,3) = 1.0;
TransformationUtility::TransformSingleMatrix(midTestPointMat, (*transformation));
translation = midTrainPointMat - midTestPointMat;
(*transformation)(3,0) = translation(0,0);
(*transformation)(3,1) = translation(0,1);
(*transformation)(3,2) = translation(0,2);
(*transformation)(3,3) = 1.0;
#pragma endregion
return transformation;
}
std::vector<SCPoint3D> *TransformationUtility::Transform(BaseKinectModel &testModel, cv::Matx44f &transformationMatrix)
{
std::vector<SCPoint3D> *result = new std::vector<SCPoint3D>();
(*result).reserve(testModel.pointCloud.size());
cv::Mat grayImageWith3Channels;
if (testModel.image.cols == 0)
{//if no image found then use grayImage
cvtColor(testModel.grayImage, grayImageWith3Channels, CV_GRAY2RGB);
}
for (int i = 0; i < testModel.pointCloud.size(); i++)
{
if (testModel.pointCloud[i].z != 0.0 && testModel.pointCloud[i].y != 0.0 && testModel.pointCloud[i].x != 0.0)
{//if point is valid
Point3f pt = testModel.pointCloud[i];
Matx14f ptMatrix = TransformationUtility::TransformSinglePoint2Matrix(pt, transformationMatrix);
if (testModel.image.cols > 0)
{//if color image exists
(*result).push_back(SCPoint3D(ptMatrix,
testModel.image.at<cv::Vec3b>(i / testModel.image.cols, i % testModel.image.cols)));
}
else if (testModel.grayImage.cols > 0)
{//if gray color image exists
(*result).push_back(SCPoint3D(ptMatrix,
grayImageWith3Channels.at<cv::Vec3b>(i / grayImageWith3Channels.cols, i % grayImageWith3Channels.cols)));
}
else
{//if no image found, then use white as color
cv::Vec3b white(255, 255, 255);
(*result).push_back(SCPoint3D(ptMatrix, cv::Vec3b(255, 255, 255)));
}
}
}
return result;
}
std::vector<Match3D> *TransformationUtility::RANSAC(std::vector<Match3D> &matches3D, int numberOfIteration, float threshold)
{//requeiress min 3 Match3D
std::vector<unsigned int> bestMatchesIndices;
int cBest = INT_MIN;
for (int iteration = 0; iteration < numberOfIteration; iteration++)
{
int c = 0;
std::vector<unsigned int> *random3 = TransformationUtility::Generate3UniqueRandom(matches3D.size());
std::vector<Match3D> pickedMatches3D = std::vector<Match3D>();
pickedMatches3D.reserve(3);
pickedMatches3D.push_back(matches3D[(*random3)[0]]);
pickedMatches3D.push_back(matches3D[(*random3)[1]]);
pickedMatches3D.push_back(matches3D[(*random3)[2]]);
cv::Matx44f *candTransformation = TransformationUtility::CreateTransformation(pickedMatches3D);
std::vector<unsigned int> candMatchesIndices = std::vector<unsigned int>();
candMatchesIndices.reserve(matches3D.size());
double euclideanDistance;
for (unsigned int matchIndex = 0; matchIndex < matches3D.size(); matchIndex++)
{
TransformationUtility::EuclideanDistanceBetweenTwoPoint(euclideanDistance,
matches3D[matchIndex].trainPair,
TransformationUtility::TransformSinglePoint2Point(matches3D[matchIndex].queryPair, *candTransformation));
if (euclideanDistance < threshold)
{
c++;
candMatchesIndices.push_back(matchIndex);
}
}
if (c > cBest && TransformationUtility::IsTransformationMatrixRightHanded(*candTransformation))
{
cBest = c;
bestMatchesIndices = candMatchesIndices;
}
//clear heap
delete candTransformation;
delete random3;
}
std::vector<Match3D> *result = new std::vector<Match3D>();
(*result).reserve(bestMatchesIndices.size());
for (int i = 0; i < bestMatchesIndices.size(); i++)
{
(*result).push_back(matches3D[bestMatchesIndices[i]]);
}
return result;
}
#pragma region Helpers
//void TransformationUtility::TransformSinglePoint(cv::Matx41f &pt, cv::Matx44f &transformationMatrix)
//{
// Matx44f transformationT;
// cv::transpose(transformationMatrix, transformationT);
// pt = transformationT * pt;
//}
cv::Matx14f TransformationUtility::TransformSinglePoint2Matrix(cv::Point3f &point, cv::Matx44f &transformationMatrix)
{
Matx14f pt(point.x, point.y, point.z, 1.0f);
TransformationUtility::TransformSingleMatrix(pt, transformationMatrix);
return pt;
}
cv::Point3f TransformationUtility::TransformSinglePoint2Point(cv::Point3f &point, cv::Matx44f &transformationMatrix)
{
Matx14f pt(point.x, point.y, point.z, 1.0f);
TransformationUtility::TransformSingleMatrix(pt, transformationMatrix);
return Point3f(pt(0,0), pt(0,1), pt(0,2));
}
void TransformationUtility::TransformSingleMatrix(cv::Matx14f &pt, cv::Matx44f &transformationMatrix)
{
pt = pt * transformationMatrix;
}
std::vector<unsigned int> *TransformationUtility::Generate3UniqueRandom(unsigned int ceil)
{
std::vector<unsigned int> *uniqueNumbers = new std::vector<unsigned int>(3);
if (ceil >= 3)
{
(*uniqueNumbers)[0] = (std::rand() % ceil);
(*uniqueNumbers)[1] = (std::rand() % (ceil - 1));
if ((*uniqueNumbers)[0] == (*uniqueNumbers)[1])
{
(*uniqueNumbers)[1] += 1;
}
(*uniqueNumbers)[2] = (std::rand() % (ceil - 2));
if ((*uniqueNumbers)[2] == (*uniqueNumbers)[0])
{
(*uniqueNumbers)[2] += 1;
}
if ((*uniqueNumbers)[2] == (*uniqueNumbers)[1])
{
(*uniqueNumbers)[2] =+ 1;
}
}
return uniqueNumbers;
}
bool TransformationUtility::IsTransformationMatrixRightHanded(cv::Matx44f &transformation)
{
double determinant = cv::determinant(transformation);
if (determinant > 0.0)
{
return true;
}
return false;
}
void TransformationUtility::EuclideanDistanceBetweenTwoPoint(double &euclideanDistance, cv::Point3f &pointA, cv::Point3f &pointB)
{
euclideanDistance = cv::norm(pointA - pointB);
}
#pragma endregion
|
Python
|
UTF-8
| 496 | 2.96875 | 3 |
[] |
no_license
|
# -*- coding: UTF-8 -*-
# Created by thpffcj on 2019/9/20.
import pandas as pd
import matplotlib.pyplot as plt
data1 = pd.read_csv("卡纳达气候数据/Alberta/CALGARY_1956_1979.csv", skiprows=18)
data2 = pd.read_csv("卡纳达气候数据/Alberta/CALGARY_1881_2012.csv", skiprows=18)
pd.set_option('display.max_columns', 40)
pd.set_option('display.width', 1000)
print(data2)
# time = data1["Date/Time"]
# temp = data1["Mean Temp (°C)"]
#
# plt.plot(time, temp)
# plt.legend()
# plt.show()
|
Python
|
UTF-8
| 16,314 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
# -*- coding: utf-8 -*-
import time
import matplotlib.pyplot as plt
SMALL_FONT = 10
LARGE_FONT = 14
FIG_SIZE = (8, 6)
img_specs = {
'mnist' : {
'pix_row' : 1,
'pix_col' : 26,
'img_width' : 28,
'img_height' : 28
},
'cifar' : {
'pix_row' : 1,
'pix_col' : 30,
'img_width' : 32,
'img_height' : 32
},
'digits': {
'pix_row' : 0,
'pix_col' : 7,
'img_width' : 8,
'img_height' : 8
}
}
def plotter(x,
y = [],
plot_dict = {},
fig_dims = (7, 5),
title = 'Model',
title_dict = {},
ylabel = 'y-axis',
ylabel_dict = {},
xlabel = 'x-axis',
xlabel_dict = {},
legend = [], # ['train', 'valid'],
legend_dict = {},
file_path = '',
to_save = False,
plot_type = 'line',
cmap_name = None,
cmap_number = 10,
grid_on = True):
fig, ax = plt.subplots()
fig.set_size_inches(fig_dims)
ax.set_axisbelow(True)
ax.minorticks_on()
if grid_on:
ax.grid(which = 'major', linestyle = '-', linewidth = 0.5, color = 'grey')
ax.grid(which = 'minor', linestyle = ':', linewidth = 0.5, color = 'red')
if plot_type == 'line':
for i in range(len(y)):
ax.plot(x, y[i], **plot_dict)
if plot_type == 'scatter':
if cmap_name is not None:
plot_dict.update(cmap = plt.cm.get_cmap(cmap_name, cmap_number))
plot = ax.scatter(x[:, 0], x[:, 1], **plot_dict)
fig.colorbar(plot, ax = ax)
else:
ax.scatter(x[:, 0], x[:, 1], **plot_dict)
if y is not None:
ax.scatter(y[:, 0], y[:, 1], **{'c' : 'red'}) # centroids for k-means
ax.set_title(title, **title_dict)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
ax.legend(legend, **legend_dict)
if to_save:
fig.savefig(file_path)
return plt
def plot_pca(components,
n_components = 2,
colour_array = None,
model_name = 'PCA',
to_save = False,
fig_dims = FIG_SIZE, #(10, 8),
title_dict = {'size' : SMALL_FONT}):
file_path = '../plots/decompositions/'+('{}{}{}{}{}'.format(model_name,
'_',
n_components,
'_Components_',
time.strftime("%Y-%m-%d_%H-%M-%S"),'.png'))
plt_dict = {
'c' : colour_array,
'edgecolor' : 'none',
'alpha' : 0.5,
's' : 50
}
plt = plotter(components,
y = None,
plot_dict = plt_dict,
fig_dims = fig_dims,
title = 'Model {}'.format(model_name.upper()),
title_dict = title_dict,
xlabel = 'PC 1',
ylabel = 'PC 2',
file_path = file_path,
to_save = to_save,
plot_type = 'scatter',
cmap_name = 'tab10',
cmap_number = 10,
grid_on = False)
plt.show()
def plot_kmeans(data,
labels = None,
centroids = None,
model_name = 'K-Means',
model_clusters = 1,
to_save = False,
fig_dims = FIG_SIZE,
title_dict = {'size' : SMALL_FONT}):
file_path = '../plots/clusters/'+('{}{}{}{}{}'.format(model_name,
'_',
model_clusters,
'_Clusters_',
time.strftime("%Y-%m-%d_%H-%M-%S"),'.png'))
plt = plotter(data,
y = centroids,
plot_dict = {'c' : labels},
fig_dims = fig_dims,
title = 'Model {}'.format(model_name.title()),
title_dict = title_dict,
file_path = file_path,
to_save = to_save,
plot_type = 'scatter')
plt.show()
def plot_metric(metric,
epoch,
train,
valid,
model_name = '',
to_save = False,
plot_dict = {'linewidth' : 0.8},
fig_dims = FIG_SIZE,
title_dict = {'size' : SMALL_FONT},
ylabel_dict = {'size' : SMALL_FONT},
xlabel_dict = {'size' : SMALL_FONT},
legend = ['train', 'valid'],
legend_dict = {'loc' : 'upper right'}):
file_path = '../plots/metrics/'+('{}{}{}{}{}'.format(model_name,
'_',
metric,
'_',
time.strftime("%Y-%m-%d_%H-%M-%S"),'.png'))
plt = plotter(range(epoch),
[train, valid],
plot_dict = plot_dict,
fig_dims = fig_dims,
title = 'Model {}'.format(metric.title()),
title_dict = title_dict,
ylabel = metric.title(),
ylabel_dict = ylabel_dict,
xlabel = 'Iterations',
xlabel_dict = xlabel_dict,
legend = legend,
legend_dict = legend_dict,
file_path = file_path,
to_save = to_save)
plt.show()
def plot_opt_viz(dims,
x,
y,
z,
f_solution,
overlay = 'plot',
to_save = False,
title = 'Optimization',
title_dict = {'size' : LARGE_FONT},
fig_dims = FIG_SIZE,
xticks_dict = {'size' : LARGE_FONT},
yticks_dict = {'size' : LARGE_FONT},
xlabel = r'$\theta^1$',
xlabel_dict = {'size' : LARGE_FONT},
ylabel = r'$\theta^2$',
ylabel_dict = {'size' : LARGE_FONT},
legend = ['train', 'valid'],
legend_dict = {}):
if dims == 3:
fig = plt.figure(figsize = fig_dims)
if overlay == 'wireframe':
from mpl_toolkits.mplot3d import axes3d # for 3d projections
ax = fig.add_subplot(111, projection = '3d')
plt.scatter(y[:,0], y[:,1], s = f_solution, c = 'r')
ax.plot_wireframe(x[0], x[1], z, rstride = 5, cstride = 5, linewidth = 0.5)
elif overlay == 'contour':
ax = fig.add_subplot(111)
plt.scatter(y[:,0], y[:,1], s = f_solution, c = 'r')
ax.contour(x[0], x[1], z, 20, cmap = plt.cm.jet)
ax.set_xlabel(xlabel, **xlabel_dict)
ax.set_ylabel(ylabel, **ylabel_dict)
elif dims == 2:
plt.figure(figsize = fig_dims)
plt.xticks(**xticks_dict)
plt.yticks(**yticks_dict)
plt.plot(x, y)
plt.scatter(z, f_solution, color = 'r')
plt.xlabel(xlabel, **xlabel_dict)
plt.ylabel(ylabel, **ylabel_dict)
if to_save:
plt.suptitle(('{}{}'.format(dims, 'D Surfaces')), fontsize = 14)
plt.savefig('../plots/'+('{}{}{}{}'.format(overlay, '_', dims, 'd.png')))
plt.show()
def plot_img_samples(train_data, train_target = None, fig_dims = (6, 6), dataset = 'digits', channels = 1):
fig = plt.figure(figsize = fig_dims)
fig.subplots_adjust(left = 0, right = 1, bottom = 0, top = 1, hspace = 0.05, wspace = 0.05)
for i in range(36):
digit = fig.add_subplot(6, 6, i+1, xticks = [], yticks = [])
if channels == 3:
color_img = train_data[i].reshape(channels,
img_specs[dataset]['img_height'],
img_specs[dataset]['img_width']).transpose([1, 2, 0])
digit.imshow(color_img, interpolation = 'nearest')
else:
digit.imshow(train_data[i].reshape(img_specs[dataset]['img_height'],
img_specs[dataset]['img_width']),
cmap = plt.cm.binary, interpolation = 'nearest')
if train_target is not None:
digit.text(img_specs[dataset]['pix_row'],
img_specs[dataset]['pix_col'],
str(train_target.astype('int')[i]))
plt.show()
def plot_tiled_img_samples(train_data, train_target = None, fig_dims = (6, 6), dataset = 'digits', channels = 1):
fig = plt.figure(figsize = fig_dims)
fig.subplots_adjust(left = 0, right = 1, bottom = 0, top = 1, hspace = 0.05, wspace = 0.05)
for i in range(36):
digit = fig.add_subplot(6, 6, i+1)
digit.grid(which = 'major', linestyle = ':', linewidth = 0.5, color = 'blue')
digit.grid(which = 'minor', linestyle = ':', linewidth = 0.5, color = 'blue')
digit.xaxis.set_ticklabels([])
digit.yaxis.set_ticklabels([])
digit.minorticks_on()
if channels == 3:
color_img = train_data[i].reshape(channels, 32, 32).transpose([1, 2, 0])
digit.imshow(color_img, interpolation = 'nearest')
else:
digit.imshow(train_data[i].reshape(img_specs[dataset]['img_height'],
img_specs[dataset]['img_width']),
cmap = plt.cm.binary, interpolation = 'nearest')
if train_target is not None:
digit.text(img_specs[dataset]['pix_row'],
img_specs[dataset]['pix_col'],
str(train_target.astype('int')[i]))
plt.show()
def plot_img_results(test_data, test_label, predictions, fig_dims = (6, 6), dataset = 'digits', channels = 1):
fig = plt.figure(figsize = fig_dims)
fig.subplots_adjust(left = 0, right = 1, bottom = 0, top = 1, hspace = 0.05, wspace = 0.05)
for i in range(36):
digit = fig.add_subplot(6, 6, i + 1, xticks = [], yticks = [])
if channels == 3:
color_img = test_data[i].reshape(channels,
img_specs[dataset]['img_height'],
img_specs[dataset]['img_width']).transpose([1, 2, 0])
digit.imshow(color_img, interpolation = 'nearest')
else:
digit.imshow(test_data.reshape(-1,
img_specs[dataset]['img_height'],
img_specs[dataset]['img_width'])[i],
cmap = plt.cm.binary, interpolation = 'nearest')
if predictions[i] == test_label[i]:
digit.text(img_specs[dataset]['pix_row'],
img_specs[dataset]['pix_col'],
str(predictions[i]), color = 'green')
else:
digit.text(img_specs[dataset]['pix_row'],
img_specs[dataset]['pix_col'],
str(predictions[i]), color = 'red')
plt.show()
def plot_generated_img_samples(test_label,
predictions,
fig_dims = (6, 6),
dataset = 'digits',
channels = 1,
to_save = False,
iteration = 0,
model_name = ''):
fig = plt.figure(figsize = fig_dims)
fig.subplots_adjust(left = 0, right = 1, bottom = 0, top = 1, hspace = 0.05, wspace = 0.05)
for i in range(36):
digit = fig.add_subplot(6, 6, i+1, xticks = [], yticks = [])
if channels == 3:
color_img = predictions[i].reshape(channels,
img_specs[dataset]['img_height'],
img_specs[dataset]['img_width']).transpose([1, 2, 0])
digit.imshow(color_img, interpolation = 'nearest')
else:
digit.imshow(predictions.reshape(-1,
img_specs[dataset]['img_height'],
img_specs[dataset]['img_width'])[i],
cmap = plt.cm.binary, interpolation = 'nearest')
if test_label is not None:
digit.text(img_specs[dataset]['pix_row'],
img_specs[dataset]['pix_col'],
str(test_label[i]), color = 'blue')
if to_save:
plt.suptitle(('{}{}'.format('Generator Epoch: ', iteration)), y = 1.05, fontsize = 12).set_color('blue')
plt.savefig('../plots/generated/'+('{}{}{}'.format(model_name, '_', iteration, '.png')))
plt.show(block = False) if to_save else plt.show(block = True)
def plot_regression_results(train_data,
train_label,
test_data,
test_label,
input_data,
pred_line,
mse,
super_title,
y_label,
x_label,
model_name = '',
to_save = False,
fig_dims = FIG_SIZE,
font_size = 10):
plt.figure(figsize = fig_dims)
cmap = plt.get_cmap('summer')
train = plt.scatter(train_data, train_label, color = cmap(0.8), s = 12)
test = plt.scatter(test_data, test_label, color = cmap(0.4), s = 12)
# minimum parameters: plt.plot(input_data, pred_line, '*', color = 'green', markersize = 4)
plt.plot(input_data, pred_line, marker = '*', color = 'green', markersize = 4, linestyle = 'none')
plt.suptitle(super_title)
if mse is not None:
plt.title("MSE: {:4.2f}".format(mse), size = font_size)
plt.xlabel(x_label)
plt.ylabel(y_label)
plt.legend((train, test), ("Train", "Test"), loc = 'upper left')
if to_save:
plt.savefig('../plots/metrics/'+('{}{}{:4.2f}{}{}{}'.format(model_name,
'_mse_',
mse,
'_',
time.strftime("%Y-%m-%d_%H-%M-%S"),'.png')))
plt.show()
|
Java
|
UTF-8
| 1,515 | 2.125 | 2 |
[] |
no_license
|
package com.jov.isaac.is;
import android.content.Intent;
import android.os.Bundle;
import android.view.MenuItem;
import android.view.View;
/**
* Created by shuwei on 15/12/28.
*/
public class MoreActivity extends BaseActivity implements View.OnClickListener{
private View v1,v2,v3;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_more);
getActionBar().setDisplayHomeAsUpEnabled(true);
v1 = findViewById(R.id.m1_lay);
v2 = findViewById(R.id.m2_lay);
v3 = findViewById(R.id.m3_lay);
v1.setOnClickListener(this);
v2.setOnClickListener(this);
v3.setOnClickListener(this);
}
@Override
public void onClick(View v) {
Bundle bundle = new Bundle();
if(v1==v){
bundle.putString("html","1.html");
bundle.putString("title","Boss Rush");
switchTo(WebAcitivity.class,bundle);
return;
}
if(v2==v){
bundle.putString("html","3.html");
bundle.putString("title", "套装");
switchTo(WebAcitivity.class,bundle);
return;
}
if(v3==v){
bundle.putString("html","2.html");
bundle.putString("title","捐款机和献血机");
switchTo(WebAcitivity.class,bundle);
return;
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == android.R.id.home) {
finish();
return true;
}
return super.onOptionsItemSelected(item);
}
}
|
Java
|
UTF-8
| 900 | 2.640625 | 3 |
[
"MIT"
] |
permissive
|
package com.github.mjaroslav.shikimori4java.object;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.val;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
@RequiredArgsConstructor
public enum MangaOrder {
ID("id"), RANKED("ranked"), KIND("kind"), POPULARITY("popularity"), NAME("name"), AIRED_ON("aired_on"),
VOLUMES("volumes"), CHAPTERS("chapters"), STATUS("status"), RANDOM("random"), UNKNOWN("unknown");
@Getter
@NotNull
private final String value;
public boolean isKnown() {
return this != UNKNOWN;
}
@NotNull
public static MangaOrder fromValue(@Nullable String value) {
for (val check : values())
if (check.value.equals(value))
return check;
return UNKNOWN;
}
@Override
public String toString() {
return value;
}
}
|
SQL
|
UTF-8
| 1,787 | 3.59375 | 4 |
[] |
no_license
|
--/***************************************************************/
-- Developer: Julie Laursen
--
-- Program #: 8
--
-- File Name: Program 8.sql
--
-- Course: ITSE 1345 Introduction to Oracle SQL and PL/SQL
--
-- Due Date: 3/15
--
-- Instructor: Fred Kumi
--
-- Chapter:
--
-- Description:
-- Create a procedure named DDCK_SP that confirms whether a monthly pledge amount is the corect amount
--
set Serveroutput On;
CREATE OR REPLACE PROCEDURE DDCK_SP (pledge_id IN NUMBER, payment_amt IN NUMBER) --DD_Pledge.Pledgeamt%TYPE)
IS
CURSOR cur_pledge IS
SELECT Pledgeamt, Paymonths
FROM DD_Pledge
WHERE idPledge = pledge_id;
lv_count number;
BEGIN
--open cur_pledge;
lv_count := 0;
FOR rec_purch IN cur_pledge LOOP
lv_count := lv_count + 1;
IF rec_purch.Paymonths = 0 THEN
IF rec_purch.Pledgeamt = payment_amt THEN
DBMS_OUTPUT.put_line('The amount entered is correct');
ELSE
RAISE_APPLICATION_ERROR(-20050, 'Incorrect payment amount -- planned payment ' || rec_purch.Pledgeamt || ' should be raised. The ' ||
payment_amt || ' should be replaced by the correct payment amount');
END IF;
ELSIF (rec_purch.Pledgeamt / rec_purch.Paymonths) = payment_amt THEN
DBMS_OUTPUT.put_line('The amount entered is correct');
ELSE
RAISE_APPLICATION_ERROR(-20050, 'Incorrect payment amount -- planned payment ' || (rec_purch.Pledgeamt / rec_purch.Paymonths) || ' should be raised. The ' ||
payment_amt || ' should be replaced by the correct payment amount');
END IF;
END LOOP;
if (lv_count = 0) then
RAISE_APPLICATION_ERROR(-20050, 'No payment information found');
END IF;
END DDCK_SP;
/
|
Swift
|
UTF-8
| 1,352 | 2.921875 | 3 |
[] |
no_license
|
// ETML
// Auteur : Luca Bohlen
// Date : 12.05.2021
// Description : Ce script gère la liaison avec la base de données pour les magasins.
import Foundation
import FirebaseFirestore
struct Shop{
var id: String
var uid = ""
var name: String
var dateAdded: Date
var shopLocation: String
var categoryLocation: [String: String]
var json: [String: Any] {
return [
"id": self.id,
"name": self.name,
"dateAdded": Timestamp(date: self.dateAdded),
"shopLocation": self.shopLocation,
"categoryLocation": self.categoryLocation,
"uid": self.uid
]
}
init(id: String, name: String, dateAdded: Date, shopLocation: String, categoryLocation: [String: String], uid: String){
self.id = id
self.uid = uid
self.name = name
self.dateAdded = dateAdded
self.shopLocation = shopLocation
self.categoryLocation = categoryLocation
}
init(data: [String: Any]){
self.id = data["id"] as! String
self.name = data["name"] as! String
self.dateAdded = (data["dateAdded"] as! Timestamp).dateValue()
self.shopLocation = data["shopLocation"] as! String
self.uid = data["uid"] as! String
self.categoryLocation = data["categoryLocation"] as! [String: String]
}
}
|
Markdown
|
UTF-8
| 1,826 | 3.140625 | 3 |
[
"MIT"
] |
permissive
|
# Global User Module
```elixir
defmodule MyApp.Accounts.User do
schema "users" do
field :first_name, :string
field :last_name, :string
field :skype, :string
field :email, :string
field :encrypted_password, :string
end
end
```
This anti-pattern manifests in two ways:
1. Adding fields to `Accounts.User` that are not authentication-related
2. Passing the full `Accounts.User` to other domains' functions
The solution is to move all non-authentication fields to a `Profiles`
domain:
```elixir
defmodule MyApp.Profiles.Profile do
schema "profiles" do
field :user_id, :integer
field :first_name, :string
field :last_name, :string
field :skype, :string
end
end
```
Whenever you need profile information, fetch it using a `Profiles` domain
function.
```elixir
Profiles.get_profile(user_id) # Or an Accounts.Token struct
```
Pass either a raw `user_id` or `Accounts.Token` struct (as described in the
[Authorization guide](authorization.html)), not the full user.
### Why
Global user modules become a dangerous form of global state. Everything
in the application begins to rely on them. A change to the user potentially
affects everything in the application.
By limiting the user to only authentication-related fields, and only
passing user_id instead of the whole user, you get several benefits:
- Each domain can handle its user-related data internally, instead of
relying on it coming in from outside. This allows great flexibility
within each domain.
- Refactoring any domain is easy, because only `user_id` is shared. You
can change anything about how authentication is done, and nothing
will break.
See ["What's wrong with a global User module?"](https://medium.com/appunite-edu-collection/whats-wrong-with-a-global-user-module-ed7ed013a519)
for more details.
|
TypeScript
|
UTF-8
| 8,562 | 2.859375 | 3 |
[
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] |
permissive
|
import {
ConstValue,
FieldDefinition,
FunctionDefinition,
FunctionType,
PropertyAssignment,
SyntaxType,
ThriftStatement,
} from '@creditkarma/thrift-parser'
import { IResolveContext, IResolveResult } from '../types'
import { resolveIdentifierDefinition } from './resolveIdentifierDefinition'
import { resolveIdentifierWithAccessor } from './resolveIdentifierWithAccessor'
interface IIdentifiersForFieldTypeConfig {
// Is this identifier being resolved in a context where we need to know the underlying type of typedefs?
resolveTypedefs?: boolean
// This flag toggles whether we need to continue recursing along typedefs and struct defs
recursiveResolve?: boolean
}
/**
* Resolve all identifiers used by a given field type
* @param fieldType The field type we are checking for identifiers
* @param results (MUTATED) result set which will contain all fieldtype identifiers
* @param context Resolver context used for finding identifier definitions
* @param config Further behavior configuration
*/
function identifiersForFieldType(
fieldType: FunctionType,
results: Set<string>,
context: IResolveContext,
config: IIdentifiersForFieldTypeConfig = {},
): void {
const { resolveTypedefs = false, recursiveResolve = false } = config
switch (fieldType.type) {
case SyntaxType.Identifier:
results.add(fieldType.value)
if (resolveTypedefs) {
const result: IResolveResult = resolveIdentifierDefinition(
fieldType,
{
currentNamespace: context.currentNamespace,
namespaceMap: context.namespaceMap,
},
)
const definition = result.definition
const namespace = result.namespace
if (definition.type === SyntaxType.TypedefDefinition) {
identifiersForFieldType(
definition.definitionType,
results,
context,
)
}
if (
recursiveResolve &&
definition.type === SyntaxType.StructDefinition
) {
for (const field of definition.fields) {
// HACK(josh): If the definition namespace is not part of an identifier
// fieldtype we must stub it in for it to be referenced properly. This is
// because of how resolveIdentifierDefinition works. There should be a better
// way which preserves current namespace
let { fieldType: defFieldType } = field
if (defFieldType.type === SyntaxType.Identifier) {
defFieldType = resolveIdentifierWithAccessor(
defFieldType,
namespace,
context.currentNamespace,
)
// Do result check to avoid infinite tail recursion
if (!results.has(defFieldType.value)) {
identifiersForFieldType(
defFieldType,
results,
context,
config,
)
}
} else {
identifiersForFieldType(
defFieldType,
results,
context,
)
}
}
}
}
break
case SyntaxType.MapType:
identifiersForFieldType(fieldType.keyType, results, context)
identifiersForFieldType(fieldType.valueType, results, context)
break
case SyntaxType.SetType:
case SyntaxType.ListType:
identifiersForFieldType(fieldType.valueType, results, context)
break
}
}
function identifiersForConstValue(
constValue: ConstValue | null,
results: Set<string>,
): void {
if (constValue !== null) {
switch (constValue.type) {
case SyntaxType.Identifier:
results.add(constValue.value)
break
case SyntaxType.ConstList:
constValue.elements.forEach((next: ConstValue) => {
identifiersForConstValue(next, results)
})
break
case SyntaxType.ConstMap:
constValue.properties.forEach((next: PropertyAssignment) => {
identifiersForConstValue(next.name, results)
identifiersForConstValue(next.initializer, results)
})
}
}
}
/**
* We're going to loop through the provided statements and find the Identifiers being used by these statements.
*
* The complicating factor here is that this is used to determine imports for a given file. In some cases a
* file may need an identifier not explicitly in the AST node. For instance if a Identifer refers to a typedef
* that aliases a map we may need to know the key and value types of the map so the including file can import
* those types to handle encoding/decoding of those types.
*/
export function identifiersForStatements(
statements: Array<ThriftStatement>,
context: IResolveContext,
): Array<string> {
const results: Set<string> = new Set()
statements.forEach((next: ThriftStatement) => {
switch (next.type) {
case SyntaxType.IncludeDefinition:
case SyntaxType.CppIncludeDefinition:
case SyntaxType.NamespaceDefinition:
case SyntaxType.EnumDefinition:
// Ignore
break
case SyntaxType.ConstDefinition:
identifiersForFieldType(next.fieldType, results, context, {
recursiveResolve: true,
resolveTypedefs: true,
})
identifiersForConstValue(next.initializer, results)
break
case SyntaxType.TypedefDefinition:
identifiersForFieldType(next.definitionType, results, context)
break
case SyntaxType.StructDefinition:
case SyntaxType.UnionDefinition:
case SyntaxType.ExceptionDefinition:
next.fields.forEach((field: FieldDefinition) => {
identifiersForFieldType(field.fieldType, results, context, {
resolveTypedefs: true,
})
identifiersForConstValue(field.defaultValue, results)
})
break
case SyntaxType.ServiceDefinition:
if (next.extends) {
results.add(next.extends.value)
}
next.functions.forEach((func: FunctionDefinition) => {
func.fields.forEach((field: FieldDefinition) => {
identifiersForFieldType(
field.fieldType,
results,
context,
{
resolveTypedefs: true,
},
)
identifiersForConstValue(field.defaultValue, results)
})
func.throws.forEach((field: FieldDefinition) => {
identifiersForFieldType(
field.fieldType,
results,
context,
{
resolveTypedefs: true,
},
)
identifiersForConstValue(field.defaultValue, results)
})
identifiersForFieldType(func.returnType, results, context, {
resolveTypedefs: true,
})
})
break
default:
const _exhaustiveCheck: never = next
throw new Error(`Non-exhaustive match for ${_exhaustiveCheck}`)
}
})
return Array.from(results)
}
|
Python
|
UTF-8
| 785 | 3.0625 | 3 |
[] |
no_license
|
from string import ascii_lowercase
import sys
def check_recycle(n, m):
n = str(n)
m = str(m)
if len(n) <> len(m):
return 0
t = n + n
if t.find(m) > -1:
return 1
return 0
def process(A, B):
count = 0
for n in range(A,B):
for m in range(n+1, B+1):
val = check_recycle(n,m)
if val:
count += val
return count
if __name__ == '__main__':
fname = sys.argv[1]
rf = open(fname + '.in', 'r')
of = open(fname + '.out', 'w')
tt = int(rf.readline())
for t in range(tt):
input = rf.readline().strip()
arr = input.split(' ')
A,B = int(arr[0]), int(arr[1])
output = process(A,B)
of.write('Case #%d: %d\n' % (t+1, output))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.