language
stringclasses
15 values
src_encoding
stringclasses
34 values
length_bytes
int64
6
7.85M
score
float64
1.5
5.69
int_score
int64
2
5
detected_licenses
listlengths
0
160
license_type
stringclasses
2 values
text
stringlengths
9
7.85M
Markdown
UTF-8
4,173
3.765625
4
[]
no_license
## Data Structures Data structures in IFJ15 are implemented to be as generic as possible. They accept keys and items of type `void*` (with exceptions) and don't care if you interpret that as an `int` or a pointer to some structure. **Warning:** This means that the data structures don't do type checking. They make no assumptions about the type of things you put in them and it's up to you to make sure they are homogeneous. Every data structure has a `*_init()` function. You should use this function to initialize your data structure. Additional information on this can be found in the [Memory Management](/doc/Memory.md) section. ### API #### Hash table - `void htable_set(htable_t* htable, char* key, void* item)` - Inserts `item` into hash table `htable` under the key `key`. If an entry under the `key` already exists, it overrides it. So be careful with that. - `void htable_get(htable_t* htable, char* key)` - Returns the element from `htable` under the key `key`. If there is no such element, returns `NULL`. - `void htable_remove(htable_t* htable, char* key)` - Removes the element under key `key` from `htable`. - `void* htable_pop(htable_t* htable, char* key)` - Returns the `item` saved under `key` in `htable` and removes the entry from the hash table. #### Vector *Note: Implemented as a resizable array for more uniform memory access.* - `void vector_push_back(vector_t* vector, void* item)` - Inserts `item` at the end of `vector`. - `void vector_push_front(vector_t* vector, void* item)` - Inserts `item` at the beginning of `vector`. **Warning:** This method shifts the indexes of your elements. Also, use `vector_push_back` in preference to this method, unless you have a reason otherwise, due to higher efficiency during resizing, if you only use `vector_push_back`. - `void* vector_pop_back(vector_t* vector)` - Returns the last element of `vector` and removes it from `vector`. Returns `NULL` and a warning to `stderr` if you try to use this method on an empty vector. - `void* vector_pop_front(vector_t* vector)` - Returns the first element of `vector` and removes it from `vector`. Returns `NULL` and a warning if you try to use this method on an empty vector. - `void* vector_at(vector_t* vector, size_t pos)` - Random access method for the vector. Returns the element at `pos` in `vector`. #### Pointer table *Note: The pointer table differs from the hash table in that it's optimized to hash integers/pointers as keys instead of strings.* - `void ptable_insert(ptable_t* ptable, void* ptr, void* item)` - Inserts `item` into the hash table `ptable` under the key `ptr`. If an entry under the `ptr` already exists, it overrides it. So be careful with that. - `void* ptable_pop(ptable_t* ptable, char* key)` - Returns the `item` saved under `key` in `ptable` and removes the entry from the pointer table. #### Unique Linked List *Note: Used for implementation of Hash table and Pointer table. For most use cases, you should use Vector over ULL for greater efficiency.* - `void ulist_set(ulist_t* ulist, void* key, void* item)` - Inserts `item` into the linked ulist `ulist` under key `key`. - This function has a nice trick (implemented using C11 \_Generic support), which is that if you pass it a `char*`, it uses a `strcmp` to compare the keys, otherwise defaults to `==`. If it's a `char*` it needs to be **a zero terminated char array.** - `void* ulist_get(ulist_t* ulist, void* key)` - Returns `item` associated with `key` from `ulist` or `NULL` if the key is not inside the list. You need to check for that. - `void* ulist_pop(ulist_t* ulist, void*/char* key)` - Returns the item associated with `key` from `ulist` and removes the entry from ulist. If no entry in `ulist` is associated with `key`, returns `NULL`. ### Important Tips - **All of the data structures in IFJ15 save keys and items by reference.** This means you need to dynamically allocate, and deallocate the keys and items separately from the data structures (unless they are global / can be contained in a void* directly) for them to remain accessible once they leave scope of where they were inserted into the data structures.
C++
UTF-8
1,236
2.640625
3
[]
no_license
#include "stdafx.h" #include "CommonTouchMessageHandler.h" #include "logger.h" #define LOG_TAG "TouchHandler" CommonTouchMessageHandler::CommonTouchMessageHandler() { memset( mTouchPoints, 0, sizeof( TouchPoint ) * cMaxNumFingers ); } CommonTouchMessageHandler::~CommonTouchMessageHandler() { } void CommonTouchMessageHandler::ProcessTouchInput( TouchEventType type, int fingerId, int x, int y ) { // LOG_INFO( "%d [ %d ] : %d, %d", type, fingerId, x, y ); if ( IsVaildId( fingerId ) == false ) { return; } TouchPoint& current = mTouchPoints[fingerId]; if ( type == TOUCH_UP ) { current.isTouched = false; } else { current.isTouched = true; } current.x = x; current.y = y; } bool CommonTouchMessageHandler::IsVaildId( int id ) { return (id >= 0) && (id < cMaxNumFingers); } bool CommonTouchMessageHandler::IsTouchDown( int id ) const { if ( IsVaildId( id ) == false ) { return false; } return mTouchPoints[id].isTouched; } int CommonTouchMessageHandler::GetTouchX( int id ) const { if ( IsVaildId( id ) == false ) { return 0; } return mTouchPoints[id].x; } int CommonTouchMessageHandler::GetTouchY( int id ) const { if ( IsVaildId( id ) == false ) { return 0; } return mTouchPoints[id].y; }
Java
UTF-8
1,801
2.109375
2
[]
no_license
package net.arunoday.web.book; import java.util.HashMap; import net.arunoday.entity.Book; import net.arunoday.web.BasePage; import net.arunoday.web.book.model.BookDetachableModel; import org.apache.wicket.PageParameters; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.markup.html.panel.EmptyPanel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.ResourceModel; import org.wicketstuff.annotation.mount.MountPath; /** * Book search page * * @author Aparna Chaudhary (aparna.chaudhary@gmail.com) */ @MountPath(path = "search") @SuppressWarnings("unchecked") public class SearchBooksPage extends BasePage { private static final long serialVersionUID = 1L; public SearchBooksPage() { add(new SearchBooksPanel("searchPanel")); add(new EmptyPanel("resultPanel")); } /** * Constructor that is invoked when page is invoked without a session. * * @param parameters Page parameters */ public SearchBooksPage(PageParameters parameters) { add(new SearchBooksPanel("searchPanel")); add(new EmptyPanel("resultPanel")); } @SuppressWarnings("serial") public SearchBooksPage(IModel searchBooksModel) { add(new SearchBooksPanel("searchPanel", searchBooksModel)); add(new BooksSearchResultPanel("resultPanel", searchBooksModel) { protected void onBookSelect(AjaxRequestTarget target, BookDetachableModel model) { Long id = ((Book) model.getObject()).getId(); HashMap map = new HashMap(); map.put("id", id); // setResponsePage(BookPage.class, new PageParameters(map)); } }); } protected IModel getTitleModel() { return new ResourceModel("searchBooksPage.title"); } }
Python
UTF-8
4,485
3.171875
3
[ "MIT" ]
permissive
#!/usr/bin/env python3 # Author:: Justin Flannery (mailto:juftin@juftin.com) """ Camply Configuration Script """ from collections import OrderedDict from datetime import datetime import logging from os.path import isfile from time import sleep from camply.config import FileConfig logger = logging.getLogger(__name__) def get_log_input(message: str): """ Create a log message with a nice log format :) Parameters ---------- message: str The message you'd like to print before getting input """ datetime_string = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S,%f')[:-3] input_string = f"{datetime_string} [ INPUT]: {message} : " value = input(input_string) return value def double_check(message: str) -> bool: """ Double check if a step should be taken within CLI Parameters ---------- message: str Message to log in interactive shell """ operation_eval = True while operation_eval: first_confirmation = get_log_input(message=message) if first_confirmation.lower() == "y": second_confirmation = get_log_input("Are you sure? (y/n)") if second_confirmation.lower() != "y": logging.info("Okay, skipping") return False elif second_confirmation.lower() == "y": return True elif first_confirmation.lower() == "n": logging.info("Okay, skipping") return False else: logging.warning("Make sure to enter a 'y' or 'n'") operation_eval = True def check_dot_camply_file() -> bool: """ Check to see if the `.camply` file already exists, and return the file existence status Returns ------- bool """ if isfile(FileConfig.DOT_CAMPLY_FILE) is True: logger.info("Skipping configuration. `.camply` file already exists: " f"{FileConfig.DOT_CAMPLY_FILE}") return True else: return False def generate_configuration() -> OrderedDict: """ Generate the Camply Configuration Config Returns ------- OrderedDict Dict of configuration values """ config_dict = FileConfig.DOT_CAMPLY_FIELDS.copy() for field, field_dict in config_dict.items(): default_value = field_dict["default"] field_note = field_dict['notes'] if field_note is not None: logger.info(f"{field}: {field_note}") message = f"Enter value for `{field}`" if default_value != "": message += f" (default: `{default_value}`)" logged_input = get_log_input(message=message).strip() config_value = logged_input if logged_input != '' else default_value config_dict[field] = config_value return config_dict def write_config_to_file(config_dict: OrderedDict) -> None: """ Write the Configuration Object to a file Parameters ---------- config_dict : OrderedDict Configuration Object """ string_list = [ "# CAMPLY CONFIGURATION FILE. ", "# SEE https://github.com/juftin/camply/blob/main/example.camply FOR MORE DETAILS", "" ] for config_key, config_value in config_dict.items(): string_list.append(f'{config_key}="{config_value}"') string_list.append("") with open(FileConfig.DOT_CAMPLY_FILE, "w") as file_object: file_object.write("\n".join(string_list)) file_object.seek(0) def generate_dot_camply_file(): """ Perform the larger Dot Camply File Generation """ logger.info("Running camply configuration.") logger.info("This process generates a configuration file " "(https://github.com/juftin/camply/blob/main/example.camply)") logger.info("Do not include quotes around values") logger.info("To skip a configuration field or keep it as default, just press <Enter>.") sleep(1.5) if isfile(FileConfig.DOT_CAMPLY_FILE): logger.warning(f".camply file already exists on this machine: {FileConfig.DOT_CAMPLY_FILE}") overwrite = double_check("Would you like to overwrite your `.camply` configuration file?") if overwrite is False: exit(0) config = generate_configuration() if double_check(f"Are you ready to publish this to a file at {FileConfig.DOT_CAMPLY_FILE}"): write_config_to_file(config_dict=config) logger.info(f"`.camply` file written to machine: {FileConfig.DOT_CAMPLY_FILE}")
C++
UTF-8
3,956
3.109375
3
[]
no_license
// // Client.cpp // lab5 // Client implementation // #include "Client.h" #include "Account.h" #include "ODAccount.h" #include <iomanip> //------------------------------------------------------------------------------ // Constructor // create a client and all of their accounts and history of transactions Client::Client(string last, string first,int id, int mm, int pmm, int ltb, int stb, int if500, int cvf, int gef, int gif, int vf, int vsi) { ID = id; owner = new Owner(last, first); cHistory = new ClientHistory(); //ODAccounts take a reference to the pointer of the backup account accounts[MM] = new ODAccount(mm,&accounts[PMM]); accounts[PMM] = new ODAccount(pmm,&accounts[MM]); accounts[LTB] = new ODAccount(ltb,&accounts[STB]); accounts[STB] = new ODAccount(stb,&accounts[LTB]); accounts[IF500] = new Account(if500); accounts[CVF] = new Account(cvf); accounts[GEF] = new Account(gef); accounts[GIF] = new Account(gif); accounts[VF] = new Account(vf); accounts[VSI] = new Account(vsi); } //------------------------------------------------------------------------------ // Destructor // deallocate memory from client Client::~Client() { delete owner; for(int i = 0; i < FINAL_acct_ENTRY; i++) { delete accounts[i]; } delete cHistory; } //------------------------------------------------------------------------------ // getID // Returns the clients ID int Client::getID() const { return ID; } //------------------------------------------------------------------------------ // getHistory // display the history of transactions void Client::getHistory() const { cout << "History of trancactions for client " << owner->getFirstName() << " " << owner->getLastName() << ", client ID = " << ID << endl; cHistory->displayHistory(); cout << endl; } //------------------------------------------------------------------------------ // getOwner // retrieve clients information /*void Client::getOwner() const { cout << *owner; }*/ //------------------------------------------------------------------------------ // operator > // bool Client::operator>( const Client& compareTo) const { return ID > compareTo.ID; } //------------------------------------------------------------------------------ // operator < // bool Client::operator<(const Client& compareTo) const { return ID < compareTo.ID; } //------------------------------------------------------------------------------ // operator == // bool Client::operator==(const Client& compareTo) const { return ID == compareTo.ID; } //------------------------------------------------------------------------------ // addToHistory // add a transaction to the history void Client::addToHistory(Transaction tIn) { cHistory->addToHistory(tIn); } //------------------------------------------------------------------------------ // depositToAccount // deposit an amount into a clients account bool Client::depositToAccount(int amount, int account) { return accounts[account%10]->deposit(amount); } //------------------------------------------------------------------------------ // withdrawFromAccount // withdraw funds from a clients account int Client::widthdrawlFromAccount(int amount, int account) { return accounts[account%10]->withdraw(amount); } //------------------------------------------------------------------------------ // operator << // overload to output a client's acconts starting and current balance ostream& operator<<(ostream& output, const Client& T) { output << setw(5) << left << T.ID << " " << T.owner->getFirstName() << " " << T.owner->getLastName() <<endl; output << setw(18) << left << "Initial Balances: "; for(int i = 0; i < FINAL_acct_ENTRY; i++) { output << setw(6) << right << T.accounts[i]->getStartBal(); } output << endl; output << setw(18) << left << "Final Balances: "; for(int i = 0; i < FINAL_acct_ENTRY; i++) { output << setw(6) << right << T.accounts[i]->getBalance(); } output << endl; return output; }
Python
UTF-8
1,365
3.359375
3
[]
no_license
import numpy from pylab import * # 1a: Compute convolution of a 2D (grayscale) image and a 2D filter def zeroPad(img, h, w, h_pad, w_pad): # zero matrix ni_h = h + h_pad ni_w = w + w_pad new_img = numpy.zeros((ni_h, ni_w), 'uint8') # copy image into matrix n = 0 for i in range(h_pad // 2, ni_h - h_pad // 2): m = 0 for j in range(w_pad // 2, ni_w - w_pad // 2): new_img[i][j] = img[n][m] m = m + 1 n = n + 1 return new_img def flipFilter(f): return f[::-1,::-1] def getPixelValue(padded_img, filter, i, j): f_h, f_w = filter.shape summed = 0 for k in range(f_h): n_j = j for l in range(f_w): summed = summed + (filter[k][l] * padded_img[i][n_j]) n_j = n_j + 1 i = i + 1 return summed def convolution(image, filter): # grey img_grey = ( image[...,0] + image[...,1] + image[...,2] ) // 3 # get padding f_h, f_w = filter.shape h, w = img_grey.shape # height, width, channels h_pad = f_h - 1 w_pad = f_w - 1 padded = zeroPad(img_grey, h, w, h_pad, w_pad) # flip filter flipped = flipFilter(filter) # multiply img_copy = img_grey.copy() for i in range(h): for j in range(w): img_copy[i][j] = getPixelValue(padded, flipped, i, j) imshow(img_copy) gray() show() if __name__ == '__main__': print "hi" f = numpy.array([[1,1,1],[1,1,1],[1,1,1]]) print f im = imread("image.jpg") convolution(im, f)
C#
UTF-8
2,132
2.78125
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
using System; using System.IO; using Org.BouncyCastle.Utilities; namespace Org.BouncyCastle.Tls { public sealed class PskIdentity { private readonly byte[] m_identity; private readonly long m_obfuscatedTicketAge; public PskIdentity(byte[] identity, long obfuscatedTicketAge) { if (null == identity) throw new ArgumentNullException("identity"); if (identity.Length < 1 || !TlsUtilities.IsValidUint16(identity.Length)) throw new ArgumentException("should have length from 1 to 65535", "identity"); if (!TlsUtilities.IsValidUint32(obfuscatedTicketAge)) throw new ArgumentException("should be a uint32", "obfuscatedTicketAge"); this.m_identity = identity; this.m_obfuscatedTicketAge = obfuscatedTicketAge; } public int GetEncodedLength() { return 6 + m_identity.Length; } public byte[] Identity { get { return m_identity; } } public long ObfuscatedTicketAge { get { return m_obfuscatedTicketAge; } } public void Encode(Stream output) { TlsUtilities.WriteOpaque16(Identity, output); TlsUtilities.WriteUint32(ObfuscatedTicketAge, output); } public static PskIdentity Parse(Stream input) { byte[] identity = TlsUtilities.ReadOpaque16(input, 1); long obfuscatedTicketAge = TlsUtilities.ReadUint32(input); return new PskIdentity(identity, obfuscatedTicketAge); } public override bool Equals(object obj) { PskIdentity that = obj as PskIdentity; if (null == that) return false; return this.m_obfuscatedTicketAge == that.m_obfuscatedTicketAge && Arrays.FixedTimeEquals(this.m_identity, that.m_identity); } public override int GetHashCode() { return Arrays.GetHashCode(m_identity) ^ m_obfuscatedTicketAge.GetHashCode(); } } }
Java
UTF-8
400
2.109375
2
[]
no_license
package si.fri.prpo.skupina27.storitve.dtos; public class DodajanjeVratDto { private int sobaId; private int stVrat; public int getSobaId() { return sobaId; } public void setSobaId(int sobaId) { this.sobaId = sobaId; } public int getStVrat() { return stVrat; } public void setStVrat(int stVrat) { this.stVrat = stVrat; } }
TypeScript
UTF-8
997
2.609375
3
[]
no_license
import { Injectable } from '@angular/core'; import { FormControl, FormGroup, ValidationErrors } from '@angular/forms'; import { Observable } from 'rxjs'; import { delay } from 'rxjs/operators'; import { HttpClient } from '@angular/common/http'; @Injectable() export class ValidationService { constructor( private http: HttpClient ) { } public usernameSpecialSymbols(control: FormControl): ValidationErrors | null { const valid = /^[a-zA-Z]*$/.test(control.value); return valid ? null : { username: 'Should contains only letters ' }; } public equalValidator({value}: FormGroup): ValidationErrors | null { const [password, cpassword] = Object.values(value); return password === cpassword ? null : { password: 'Password do not match' }; } public uniqueUsername({value: username}: FormControl): Observable<ValidationErrors | null> { return this.http.post('/auth/checkUsername', {username}).pipe(delay(3000)); } }
C++
UTF-8
4,503
3.734375
4
[]
no_license
/* Ly Cao CS2124 ELAB This program combines Hydrocarbon of the same formulas but different names and output these formulas in the order according to the number of carbons and hydrogens they have. */ #include <iostream> #include <fstream> #include <string> #include <vector> using namespace std; //consistent naming convention struct Hydrocarbon{ vector<string> names; int numOfCarbons; int numOfHydrogens; }; void fillTheVector(ifstream& ifs, vector<Hydrocarbon>& hydrocarbons); void sortFormulas(vector<Hydrocarbon>& hydrocarbons); void displayVector(const vector<Hydrocarbon>& hydrocarbons); size_t findLocation(const vector<Hydrocarbon>& hydrocarbons, int numOfCarbons, int numOfHydrogens); void addAMolecule(vector<Hydrocarbon>& hydrocarbons, string& name, int numOfCarbons, int numOfHydrogens); void openStream(ifstream& hydrocarbon_formulas); int main(){ ifstream hydrocarbon_formulas; openStream(hydrocarbon_formulas); vector<Hydrocarbon> hydrocarbons; fillTheVector(hydrocarbon_formulas, hydrocarbons); hydrocarbon_formulas.close(); sortFormulas(hydrocarbons); displayVector(hydrocarbons); } void fillTheVector(ifstream& ifs, vector<Hydrocarbon>& hydrocarbons){ char carbon; char hydrogen; string name; int numOfCarbons; int numOfHydrogens; while(ifs >> name >> carbon >> numOfCarbons >> hydrogen >> numOfHydrogens){ addAMolecule(hydrocarbons, name, numOfCarbons, numOfHydrogens); } } void sortFormulas(vector<Hydrocarbon>& hydrocarbons){ //e.g: size_t makes into int => casting (1 data type to another) for(size_t i = 0; i < hydrocarbons.size(); ++i){ bool done = true; for(size_t j = 0; j < hydrocarbons.size() - 1 - i; ++j){ if(hydrocarbons[j].numOfCarbons > hydrocarbons[j+1].numOfCarbons){ Hydrocarbon temp = hydrocarbons[j]; hydrocarbons[j] = hydrocarbons[j+1]; hydrocarbons[j+1] = temp; done = false; } else if(hydrocarbons[j].numOfCarbons == hydrocarbons[i].numOfCarbons && hydrocarbons[j].numOfHydrogens > hydrocarbons[i].numOfHydrogens){ Hydrocarbon temp = hydrocarbons[j]; hydrocarbons[j] = hydrocarbons[j+1]; hydrocarbons[j+1] = temp; done = false; } } if(done){ break; } } } void displayVector(const vector<Hydrocarbon>& hydrocarbons){ for(int i = 0; i < hydrocarbons.size(); ++i){ cout << 'C' << hydrocarbons[i].numOfCarbons << 'H' << hydrocarbons[i].numOfHydrogens; for(int j = 0; j < hydrocarbons[i].names.size(); ++j){ cout << " " + hydrocarbons[i].names[j]; cout << " "; } cout << endl; } } size_t findLocation(const vector<Hydrocarbon>& hydrocarbons, int numOfCarbons, int numOfHydrogens){ for(size_t i = 0; i < hydrocarbons.size(); ++i){ if(hydrocarbons[i].numOfCarbons == numOfCarbons && hydrocarbons[i].numOfHydrogens == numOfHydrogens){ return i; } } return hydrocarbons.size(); } void addAMolecule(vector<Hydrocarbon>& hydrocarbons, string& name, int numOfCarbons, int numOfHydrogens){ int locationOfFormula = findLocation(hydrocarbons, numOfCarbons, numOfHydrogens); if(locationOfFormula == hydrocarbons.size()){ Hydrocarbon hydrocarbon; hydrocarbon.names.push_back(name); hydrocarbon.numOfCarbons = numOfCarbons; hydrocarbon.numOfHydrogens = numOfHydrogens; hydrocarbons.push_back(hydrocarbon); } else{ hydrocarbons[locationOfFormula].names.push_back(name); } } void openStream(ifstream& hydrocarbon_formulas){ string fileName; cout << "Please enter the correct filename" << endl; cin >> fileName; // hydrocarbon_formulas = ifstream(fileName); hydrocarbon_formulas.open(fileName); //this is only checking the outer varibale, not the variable created inside while loop while(!hydrocarbon_formulas){ //already clear the outer variable here hydrocarbon_formulas.clear(); cout << "Please enter the correct filename" << endl; cin >> fileName; //this is a local variable // ifstream hydrocarbon_formulas(fileName); //hydrocarbon_formulas = ifstream(fileName); hydrocarbon_formulas.open(fileName); } }
TypeScript
UTF-8
1,334
2.546875
3
[]
no_license
import * as request from 'request'; import { TStrings } from '../models/strings'; import { logError } from '../utils'; export interface IGetProjectStringsOptions { token: string; projectId: string; langs: string[]; } export interface IGetProjectStringsResponse { strings?: TStrings; } export function getProjectStrings(options: IGetProjectStringsOptions): Promise<TStrings | null> { return new Promise<TStrings | null>((resolve, reject) => { request.post({ formData: { api_token: options.token, icu_numeric: 0, id: options.projectId, langs: JSON.stringify(options.langs), placeholder_format: 'icu', platform_mask: 4, plural_format: 'icu', }, url: 'https://api.lokalise.co/api/string/list', }, (err, httpResponse, body: string) => { if (err) { logError(err); reject(null); return; } try { const data: IGetProjectStringsResponse = JSON.parse(body) as IGetProjectStringsResponse; if (data && data.strings) { resolve(data.strings); } else { reject(null); } } catch (error) { logError(error); reject(null); } }); }); }
Python
UTF-8
1,452
3.21875
3
[]
no_license
#!/usr/bin/env python # coding: utf-8 # In[1]: import pandas myData = pandas.read_csv('rentals.csv') # In[2]: print (myData) # In[3]: print (myData.shape) # In[4]: type(myData) # In[5]: myData.head() # In[6]: myData.head(10) # In[7]: myData.tail() # In[8]: myData.tail(8) # In[9]: myData.describe() # In[10]: from pandas.plotting import scatter_matrix myData.plot(kind='box',subplots=True,layout= (2,2),sharex=False,sharey=False) scatter_matrix(myData) import matplotlib.pyplot as plt plt.show() # In[11]: dataX = pandas.DataFrame({'area': myData.area}) # In[12]: dataX # In[13]: dataY = pandas.DataFrame({'cost': myData['cost']}) dataY # In[14]: from sklearn.model_selection import train_test_split trainX, testX, trainY, testY = train_test_split(dataX, dataY, test_size = 0.20, random_state = 11) # In[15]: trainX.head() # In[16]: trainY.head() # In[17]: trainX.shape # In[18]: testX.head() # In[45]: from sklearn.linear_model import SGDRegressor model = SGDRegressor(shuffle = False, eta0 = .0000001, max_iter =100, tol = 1000) # In[46]: model.fit(trainX, trainY.values.ravel()) # In[47]: print ('Coefficients: ', model.coef_) print ('Intercept: ', model.intercept_) print('iterations ran :',model.n_iter_); print ('R2: ', model.score (testX, testY)) # In[22]: ywhat = model.predict(testX) ywhat # In[ ]:
Python
UTF-8
1,139
3.390625
3
[]
no_license
#!/usr/bin/python import socket serverName = socket.gethostname() serverIP = socket.gethostbyname(serverName) serverPort = 12000 serverSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # Establish a welcoming door, we will wait and listen for some client to knock on the door serverSocket.bind((serverIP, serverPort)) # This line has the server listen for TCP connection requests from the client. # The parameter specifies the maximum number of queued connections. serverSocket.listen(1) print('The server is ready to receive') while 1: # When a client knocks on this door, the program invokes the accept() method for serverSocket # which creates a new socket in the server, called connectionSocket, dedicated to this particular client. # The client and server complete the handshaking, creating a TCP connection between the client's clientSocket # and the server's connectionSocket. Arrive in order connectionSocket, addr = serverSocket.accept() sentence = connectionSocket.recv(1024) capitalizeSentence = sentence.upper() connectionSocket.send(capitalizeSentence) connectionSocket.close()
Java
UTF-8
772
1.867188
2
[]
no_license
package com.waben.stock.monitor; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.netflix.hystrix.EnableHystrix; import org.springframework.cloud.netflix.hystrix.dashboard.EnableHystrixDashboard; import org.springframework.cloud.netflix.turbine.EnableTurbine; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RestController; @SpringBootApplication @EnableHystrixDashboard @EnableTurbine @RestController public class MonitorApplication { public static void main(String[] args) { SpringApplication.run(MonitorApplication.class, args); } @GetMapping("/echo") public String echo() { return "echo"; } }
Java
UTF-8
6,625
1.6875
2
[]
no_license
// Test2 // C:/AXE_Projects/TestHarnessAPI/data/Tests.xml:Tests Test2 package axe; import objectmap.objectmap; import resources.axe.*; import com.jacob.activeX.ActiveXComponent; import com.jacob.com.Variant; import com.jacob.com.Dispatch; import com.rational.test.ft.*; import com.rational.test.ft.object.interfaces.*; import com.rational.test.ft.object.interfaces.SAP.*; import com.rational.test.ft.object.interfaces.WPF.*; import com.rational.test.ft.object.interfaces.dojo.*; import com.rational.test.ft.object.interfaces.siebel.*; import com.rational.test.ft.object.interfaces.flex.*; import com.rational.test.ft.object.interfaces.generichtmlsubdomain.*; import com.rational.test.ft.script.*; import com.rational.test.ft.value.*; import com.rational.test.ft.vp.*; import com.ibm.rational.test.ft.object.interfaces.sapwebportal.*; import java.io.IOException; import javax.xml.parsers.ParserConfigurationException; import org.w3c.dom.*; import java.io.File; import helpers.CustomLib; import java.io.BufferedWriter; import java.io.FileWriter; public class Test2 extends Test2Helper { public static void testMain(Object[] args) { objectmap map = new objectmap(); axeInit("C:\\AXE_Projects\\TestHarnessAPI\\TestHarnessAPI.tpj", "debug"); Dispatch.call(axe, "TestBegin", "Test2" ,"Get a multi return from regions", "C:/AXE_Projects/TestHarnessAPI/data/Tests.xml", "Tests", "C:/AXE_Projects/TestHarnessAPI/results/debug/results.xml",""); try { // // C:/AXE_Projects/TestHarnessAPI/data/Tests.xml:regionsInfoByIana Test02 Dispatch.call(axe, "SubtestBegin", "Test02", "Multi returns", "C:/AXE_Projects/TestHarnessAPI/data/Tests.xml", "regionsInfoByIana"); Dispatch.call(axe, "StepBegin", "[]regionsInfoByIana", "load.file(regionsInfoByIana.xml)", ""); Dispatch.put(axe, "ResultCode", 0); Dispatch.put(axe, "Data", Dispatch.call(axe, "LoadFile", "regionsInfoByIana.xml").toString()); axeStepEnd(); Dispatch.call(axe, "StepBegin", "[]regionsInfoByIana", "Set", ""); Dispatch.put(axe, "ResultCode", 0); Dispatch.call(harness, "WsInitialise", Dispatch.call(axe, "GetRunCategoryOption", "webservice", "MobileFish"), "regionsInfoByIana", "http://www.mobilefish.com/services/web_service/countries.php", Dispatch.get(axe, "Data").toString(), Dispatch.call(axe, "GetRunCategoryOptions", "namespace"), Dispatch.call(axe, "GetRunCategoryOptionProperty", "webservice", "MobileFish", "username"), Dispatch.call(axe, "GetRunCategoryOptionProperty", "webservice", "MobileFish", "password")); axeStepEnd(); Dispatch.call(axe, "StepBegin", "ianacode", "Set", "za"); Dispatch.put(axe, "ResultCode", 0); Dispatch.call(harness, "WsSetParameter", "ianacode", "za"); axeStepEnd(); Dispatch.call(axe, "StepBegin", "[]regionsInfoByIana", "Invoke", ""); Dispatch.put(axe, "ResultCode", 0); Dispatch.call(harness, "WsInvoke"); axeStepEnd(); Dispatch.call(axe, "StepBegin", "[]regionsInfoByIana", "get", ""); Dispatch.put(axe, "ResultCode", 0); Dispatch.put(axe, "Value", Dispatch.call(harness, "WsResponse")); axeStepEnd(); Dispatch.call(axe, "StepBegin", "[]regionsInfoByIana", "val.notequal", ""); Dispatch.put(axe, "ResultCode", 0); Dispatch.put(axe, "ResultCode", Dispatch.call(axe, "StepValidateNotEqual", "", Dispatch.get(axe, "Value"))); axeStepEnd(); Dispatch.call(axe, "StepBegin", "[]regionsInfoByIana", "get.nodecount", ""); Dispatch.put(axe, "ResultCode", 0); try { BufferedWriter out = new BufferedWriter(new FileWriter("test.txt")); out.write(Dispatch.get(axe, "Value").toString()); out.close(); } catch (Exception e) { System.out.println(e.getMessage()); } CustomLib n = new CustomLib(); Dispatch.put(axe, "ResultCode", Dispatch.call(axe,"StepValidateNotEqual","",n.getTheCount())) ; axeStepEnd(); Dispatch.call(axe, "StepBegin", "ListResponse", "get", ""); Dispatch.put(axe, "ResultCode", 0); Dispatch.put(axe, "Value", Dispatch.call(harness, "WsGetResponseNodeText", "//allregionslist")); axeStepEnd(); Dispatch.call(axe, "StepBegin", "ListResponse", "val.notequal", ""); Dispatch.put(axe, "ResultCode", 0); Dispatch.put(axe, "ResultCode", Dispatch.call(axe, "StepValidateNotEqual", "", Dispatch.get(axe, "Value"))); axeStepEnd(); Dispatch.call(axe, "StepBegin", "ListResponse", "get", ""); Dispatch.put(axe, "ResultCode", 0); Dispatch.put(axe, "Value", Dispatch.call(harness, "WsGetResponseNodeText", "//allregionslist")); axeStepEnd(); Dispatch.call(axe, "StepBegin", "ListResponse", "val.notequal", ""); Dispatch.put(axe, "ResultCode", 0); Dispatch.put(axe, "ResultCode", Dispatch.call(axe, "StepValidateNotEqual", "", Dispatch.get(axe, "Value"))); axeStepEnd(); Dispatch.call(axe, "StepBegin", "ListResponseItem_2", "get", ""); Dispatch.put(axe, "ResultCode", 0); Dispatch.put(axe, "Value", Dispatch.call(harness, "WsGetResponseNodeText", "//allregionslist/item[2]")); axeStepEnd(); Dispatch.call(axe, "StepBegin", "ListResponseItem_2", "val.notequal", ""); Dispatch.put(axe, "ResultCode", 0); Dispatch.put(axe, "ResultCode", Dispatch.call(axe, "StepValidateNotEqual", "", Dispatch.get(axe, "Value"))); axeStepEnd(); Dispatch.call(axe, "StepBegin", "ListResponseItem_2_Code", "get", ""); Dispatch.put(axe, "ResultCode", 0); Dispatch.put(axe, "Value", Dispatch.call(harness, "WsGetResponseNodeText", "//allregionslist/item[2]/ianacode")); axeStepEnd(); Dispatch.call(axe, "StepBegin", "ListResponseItem_2_Code", "val.notequal", "za1"); Dispatch.put(axe, "ResultCode", 0); Dispatch.put(axe, "ResultCode", Dispatch.call(axe, "StepValidateNotEqual", "za1", Dispatch.get(axe, "Value"))); axeStepEnd(); Dispatch.call(axe, "StepBegin", "ListResponseRegion", "get", ""); Dispatch.put(axe, "ResultCode", 0); Dispatch.put(axe, "Value", Dispatch.call(harness, "WsGetResponseNodeText", "//allregionslist/item/regionname")); axeStepEnd(); Dispatch.call(axe, "StepBegin", "ListResponseRegion", "val", "Eastern Cape"); Dispatch.put(axe, "ResultCode", 0); Dispatch.put(axe, "ResultCode", Dispatch.call(axe, "StepValidateEqual", "Eastern Cape", Dispatch.get(axe, "Value"))); axeStepEnd(); axeSubtestEnd(); // // } catch(Exception ex) { axeTestAbort(ex.getMessage()); return; } Dispatch.call(axe, "TestEnd"); unregisterAll(); } }
Markdown
UTF-8
12,994
3.25
3
[ "MulanPSL-2.0", "LicenseRef-scancode-mulanpsl-2.0-en", "LicenseRef-scancode-unknown-license-reference" ]
permissive
--- --- --- title: 运输 --- 在整个民国时期,运输的发展始终是中国经济最弱的一环。无论从微观或从宏观来看,这都是显而易见的。中国的主要工厂汉阳铁厂,在1919年生产1吨生铁成本是48.50元;而满洲本溪的日本铁厂,1915年生产1吨生铁成本只需22.00元。本溪本地产的焦炭5.74元1吨。由于粤汉铁路修筑进展缓慢,从300英里外的江西萍乡用木船运送焦炭到汉阳,每吨成本上升到24.54元。[96]由于上述两家铁厂都是从自己控制的矿山中得到原料,所以其生产成本的差异,绝非1915年和1919年的市场价格不同所致。 苦力劳动工钱低得令人难以置信,而在地方运输中起主要作用的脚夫经济效率更低。一位考察者报告称: 在四川省,从渭河流域到成都平原的大路上,我们可能遇见背负160磅重棉花包的苦力。他们背着这些东西,一天走15英里,共要走750英里,一天一角七分钱(墨西哥银元),相当于一角四分钱一吨/英里。按照这个价钱,把一吨货物运送750英里,要花费106.25元;而铁路运输却只要15元,是人力运输费用的1/7。京奉铁路[97]为开滦煤矿公司运煤,一吨/英里不到一分钱,用苦力运棉花,路上要用50天,而铁路只用两天,从而节省48天的利息,并在更好的条件下卸棉花。[98] 在中国,几种主要的运输方式运货的比较费用,估计如下(分/吨公里):帆船,2分至12分;轮船和汽艇,2分至15分;铁路,3.2分至17分;大车,5分至16.5分;独轮车,10分至14分;骆驼,10分至20分;卡车,10分至56分;驴、骡和马,13.3分至25分;人力搬运,14分至50分;黄包车,20分至35分。[99]整个民国时期,大宗货物继续使用传统方式运输。例如非典型的1933年表明,旧式运输方式(12亿元)所占国民收入,是现代运输方面(4.3亿元)的三倍。 若有一个四通八达的铁路网,就能大大降低运输费用,并可以促进内地的开发。此外,经铁路运输货物,往往可以避免各地设卡抽取厘金或地方的过境税;而且修通一条铁路,会促使沿线度量衡制度和货币的统一。英属印度的例子说明,一个巨大的铁路网可以同一个落后的农业经济并存,仅靠扩展铁路里程的长度,并不能自动导致经济的发展。无论如何,民国时期的铁路里程长度、分布的不均衡和运营的效率,都是不够的。第二次世界大战结束时,包括满洲和台湾在内,中国共有干线和支线铁路24945公里。[100]民国各个时期修建的铁路,按习惯分期如下: 地图4 到1949年为止的铁路 中国的第一条铁路,是怡和洋行和其他外国人未经清政府许可修建的,从吴淞到上海,长15公里,1876年通车;因受到官方和地方的强烈反对,为清政府收买后拆毁。直到1894年至1895年中国为日本战败时为止,因受地方人士与官吏的反对,使铁路建设毫无进展。此后,一方面“自强派”使朝廷认识到,修建铁路作为朝廷反对外国进一步侵略的必要手段;另一方面中国暴露出的软弱,吸引了外国资本的投入,把对建设铁路投资,看成是外国的政治影响和经济渗透的手段。到1894年,中国仅铺轨364公里铁路。从1895年至1911年,是中国铁路建设的第一次高潮,共完成铁路建设9253公里,大部分是用外国贷款兴建的。在9253公里的总长度中,俄国修建横穿满洲的中东铁路[101],以及向南自哈尔滨至大连延长线的南满铁路,占去了2425公里。 在清朝的最后10年间,各地绅商进行私营修建铁路计划失败后,清政府实行铁路国有计划,由此导致清朝被推翻的直接原因。在袁世凯和军阀政权时期,一直到1927年,中国的铁路建设显然慢了下来。几条私营铁路的国有化也没有遇到强烈反对(这对清政府曾是致命的),大部分私人股份都兑换成了不兑现的政府债券。中国政府与外国债权人虽商定了新的贷款,重新谈判了一些1912年以前的贷款,但第一次世界大战使欧洲对中国铁路的投资停止了。当新的四国财团1920年集会时,北京政府与美国的愿望相反,拒绝与其进行交易。中国仅限于完成了北京至归绥的京绥线,以及粤汉铁路和陇海铁路的一部分,总里程为1700公里。在满洲,建成了与此里程相等的几条铁路,其中包括日本投资建设的几条南满铁路支线;张作霖也用京奉铁路的经济收益,投资修建了与日本人竞争的路线。中国在华北的建设和在满洲的新路线,一方面是出于对日本的战略考虑,另一方面也出于经济上的需要。 在1928年至1937年间,中国在关内修建的铁路将近3400公里,包括完成全线的粤汉铁路、浙赣铁路和同蒲铁路;这些铁路的修建,主要不是靠外国借款。浙赣铁路主要由中国银行提供贷款,同蒲铁路是由山西省自筹税收集款。在满足军费需求和还本付息之外,南京政府能够从中得到用于经济复兴的资金就所剩无几了。在这同一时期,满洲建成了4500公里铁路,主要是日本在1931年后新建的,是作为伪满洲国发展工业的基地计划的一部分。在中日战争的巨大困难时期,中国在未沦陷区建成了1500公里铁路,对中国的经济和军事起了重要作用;日本人在中日战争时期也在满洲增建了许多路线。 在50年来修建的铁路中,有将近40%在满洲,有32%在关内的长江以北,22%在华南,4%在台湾。在人口稠密的华南,铁路里程相对来说是很小,这证明前现代精巧的帆船和舢板与现代的轮船和汽艇水路运输网的持久性,继续有效地同蒸汽机火车竞争。从土地面积和人口比例来看,满洲的情况远较中国其他任何地区为好,也反映满洲工业化程度较高。没有铁路穿过富饶的四川省,也没有铁路抵达西部的甘肃、新疆和西藏。中国幅员广大,与少得可怜的铁路里程太不相称;中国铁路发展还十分杂乱无章,路线的分布也常常是不经济的。从中国全境来看,一个更合乎需要的铁路系统,应当是以汉口为中心的辐射网。而中国实际的铁路系统,却是一个平行的铁路网,并且过分集中于华北和东部。在满洲曾发展过辐射与平行相结合的铁路网,但由于20年代中国与日本在东北的竞争,这种铁路网受到不经济的复线损害。 中国铁路系统的建设,曾涉及大量从英国、比利时、日本、德国、法国、美国和荷兰的借款;这个借款顺序,是按1898—1937年每个国家的铁路借款总额排列的。这些借款集中在清末民初(条件常常涉及外国对修建路线的实际控制),也反映了外国辛迪加为铁路特许权和借款合同而进行的竞争;同时也是这些国家在政治上和金融上的明争暗斗以及阴谋诡计。铁路债务的偿还来自路线的营业收入;从1925年到1935年,大多数铁路借款都拖欠未还。到1935年12月31日,未清偿的铁路债务总数,包括本利,合计为53827443英镑,或891920730元。[102]铁路债券的下跌,以陇海铁路为例,竟跌到票面价值的11%。 中国政府的铁路收益支付能力,仅够付给债券持有者的利息。在1916—1939年期间,平均每年营业的净收入,占轨道和设备成本的7.4%,而铁路借款的利率为5%—8%。虽然中国铁路营运效率明显低于南满铁路,但从经济效益上来看,政府的铁路收益还是可行的,给民国时期的民间带来了经济增长,并能产生为数不大的利润。在这20年中,铁路平均营业净收入的35%用在借款利息的支付,大部营业净收入——例如在1926年、1927年和1930—1934年,有50%以上移交给中国政府,用于一般的开支[103];1921年至1936年移交给政府的款项,相当于增加铁路设备支出的两倍。 中国政府铁路很少盈利的主要原因,是民国时期不断的国内纷争。互相混战的军阀,不仅征用铁路运送军队,甚至把客运和货运的收益用作维持其军队的军费。例如1912年至1925年间,京汉铁路的客运(按人英里计算)有21%是军运;1920年至1931年间,北宁铁路有17%的客运为军运。[104]除了战争直接破坏(这可能最小),路轨和车辆的维修完全被忽视。在这20多年中,铁道部门通常只能从几条支线中得到稳定的收入,而整个铁道系统则日益变得陈旧和效率低下。 在1912年至1947年期间,直至20年代中期,中国政府的铁路运输,客运和货运都是逐年增长(见表19)。 表19 中国政府铁路的客运和货运的指数,1912—1947年 续表 资料来源:严中平:《中国近代经济史统计资料选辑》,第207—208、217页。 北伐战争和南京政权的建立,一度影响了客货运输。但到了相对平静的30年代,铁路运输不仅有了恢复,而且超过已往的水平。1937年至1945年间,日本人占据了中国大部分铁路,国民政府被迫转移至内地。这些在此一时期的数字中也有反映。 中国政府铁路的营业收入,约有40%来自客运,其中相当大一部分是运兵;矿产占货运的一半。占据货运重要性第二位的是农产品。货运的一般形式,是把农产品和矿石从内地运到沿海的条约口岸,由条约口岸再把工业品运到内地。民国的前10年中,农产品运输的增加,正反映前述农业趋向商品作物产量的增长。特别是在满洲,华北也是如此,如表12和表13所示,铁路推进了农业产量的缓慢增长。如表19所显示的,世界经济的不景气对中国经济作物的影响,以及中日战争爆发前农业的复苏。 关于公路的里程,1912年以前,中国不存在行驶机动车的道路。在1937年7月之前,中国完成了约11.6万公里,其中4万公里铺了路面。[105]这些公路的修建,大多数是在1928年以后。当年即修筑公路3.2万公里,均由全国经济委员会公路总局所承担;既是为了军事上的需要,也是为了商业上的需要服务。例如七省公路建设计划,由河南、湖北、安徽、江西、江苏、浙江、湖南七省合作,用公路系统把国民政府最具有实力的省份连成一体。公路的修筑虽然少了一些,并且又十分简陋,但在中国关内的分布,比铁路的分布还是合理一些。 中日战争促进了内地公路的兴修,其中包括著名的滇缅公路。但在1949年,如同1912年一样,中国内陆地区的货物运输,仍然主要使用传统的水陆运输工具,很少使用汽车或火车。例如,在1941年9月,在江苏、浙江、安徽三省,共有118292艘在汪伪政府的船民协会登记,共计有850705吨位,水手459178名。[106]帆船运输,还是长江下游、华中和华南短途大宗运输的主要手段。在上述地区,许多世纪以来,河流、湖泊和运河连接起来,形成一个广大复杂的运输网。与之相对照的,各港口之间的贸易,早在19世纪的90年代,就已基本上采用轮船运输——主要是外国人的船。但在几个条约口岸海关申报和结关的中国帆船,其总吨数自1912年至1922年大致依旧不变,只是到了20年代以后才急剧下降。[107]在20世纪的前数十年中,中国主要河流行驶的轮船运输在稳定增长,从登记的船只总吨数增加中可以得到证明。1000吨以下船只,从1913年的42577吨增至1933年的246988吨。但内河的帆船,在许多地方仍持续相当长的时间。例如长江在宜昌以上,帆船的总吨数,从19世纪90年代到1917年还略有增长,直到20世纪20年代才开始下降。南宁和梧州之间的西江,也是到了20世纪20年代,轮船才取代了帆船。[108] 运输部门也和别的部门一样,中国经济在20世纪前半期变化很小。这个很平常的事实却往往被掩盖起来,置于视线之外;而把不相称的注意力放在经济中很小的现代部门,既表现在官方的言行中,也表现在中国经济学家的著作中;既表现在拟写给外国人看的年鉴和报告中,也表现在非中国学者所指导对1949年以前中国经济的研究中——只有日本人在这个问题上,对中国有一个比较“现实主义”看法。南京政府放弃了对土地问题的解决,却主要从现代经济部门榨取收益,这等于建造空中楼阁。
Python
UTF-8
950
3.15625
3
[]
no_license
def file_compare(file1,file2): count='' try: with open(file1) as fl1,open(file2) as fl2: f1=fl1.readlines() f2=fl2.readlines() count=0 for i in range(len(f1)): list1=list() if f1[i]!=f2[i]: for i1 in range(len(f1[i])): print(i1) if f1[i][i1]!=f2[i][i1]: list1.append(i1+1) count+=1 if list1!=[]: dif[i+1]=list1 except OSError: print('文件错误') return count file1=input('请输入文件1:') file2=input('请输入文件2:') dif=dict() num=file_compare(file1,file2) if num==0: print('两个文件相同') elif num>0: print('两个文件共有%s处不同'%num) for i in dif: print('第%d行的第%s处不同'%(i,dif[i]))
SQL
UTF-8
1,706
4
4
[]
no_license
/* 3. Stored Procedure Write a stored procedure that adds a lineitem into an order that already exists (AddLineItemSP.sql). This stored procedure will receive three parameters:  an orderid, a partid and a quantity. Issue an INSERT to the ORDERITEMS table. When the INSERT is executed, the trigger on INSERT for the ORDERITEM table will be fired.  Since the value of the Detail column will be determined inside of the INSERT trigger, you will need to provide column names on the INSERT command for just the three columns that you have data for. Remember that when you write an INSERT where the values inserted do not include every column in the table, you need to include the column names with the INSERT. Exception handling must be included. If you attempt to INSERT a partid that does not exist, a system exception will be invoked based on the foreign key constraint. */ CREATE OR REPLACE PROCEDURE AddLineItemSP ( inpOrderID IN ORDERS.OrderID%TYPE, inpPartID IN INVENTORY.PartID%TYPE, inpQty IN INVENTORY.StockQty%TYPE) IS BEGIN DECLARE OrderQtyTooHigh EXCEPTION; PRAGMA EXCEPTION_INIT(OrderQtyTooHigh, -20001); v_code NUMBER; v_errm VARCHAR2(64); BEGIN --start process of adding rows INSERT INTO ORDERITEMS (Orderid, Partid, Qty) VALUES (inpOrderID, inpPartID, inpQty); COMMIT; DBMS_OUTPUT.PUT_LINE('Items added to order ' || inpOrderID); EXCEPTION WHEN OrderQtyTooHigh THEN ROLLBACK; DBMS_OUTPUT.PUT_LINE('Insufficient items in stock. Order not updated.'); --other errors: WHEN OTHERS THEN v_code := SQLCODE; v_errm := SUBSTR(SQLERRM, 1 , 64); DBMS_OUTPUT.PUT_LINE('Error code ' || v_code || ': ' || v_errm); END; END; /
PHP
UTF-8
782
2.609375
3
[]
no_license
<?php namespace Potogan\DoctrineBundle\Query\Functions\Mysql; use Doctrine\ORM\Query\AST\Functions\FunctionNode; use Doctrine\ORM\Query\Parser; use Doctrine\ORM\Query\Lexer; use Doctrine\ORM\Query\SqlWalker; /** * BitAndGroupingFunction ::= "GROUP_AND_OR" "(" ArithmeticPrimary ")" */ class BitAndGrouping extends FunctionNode { public $needle = null; public $haystack = null; public function parse(Parser $parser) { $parser->match(Lexer::T_IDENTIFIER); $parser->match(Lexer::T_OPEN_PARENTHESIS); $this->needle = $parser->ArithmeticPrimary(); $parser->match(Lexer::T_CLOSE_PARENTHESIS); } public function getSql(SqlWalker $sqlWalker) { return 'BIT_AND(' . $this->needle->dispatch($sqlWalker) . ')'; } }
TypeScript
UTF-8
3,823
2.734375
3
[ "MIT" ]
permissive
import { AgentClient } from "../src/AgentClient"; import { RestClient } from "../src/RestClient"; import { isValidSkillsWithPriorities } from "../src/Utils"; import { doesNotReject } from "assert"; import { SkillPriority } from "../src"; describe("AgentClient.ts", () => { let client: AgentClient; beforeEach(() => { // Cast to any if mocking and not fulfilling the static type let restClient: any = RestClient as jest.Mock client = new AgentClient("1", restClient) }); test("createAgentAndStation throws an error given an invalid password", async () => { expect.assertions(1); let skillsWithPriorities = [{ "skillNumber": 100, "skillPriority": 5 }] await expect( client.createAgentAndStation("agent1", "badpassword", skillsWithPriorities)).rejects.toEqual("invalid password") }) test("createAgentAndStation throws an error given an invalid username", async () => { expect.assertions(1); let skillsWithPriorities = [{ "skillNumber": 100, "skillPriority": 5 }] expect(client.createAgentAndStation("a", "Passw0rd@", skillsWithPriorities)).rejects.toEqual("invalid username") }) test("createAgentAndStation throws an error given an invalid skills", async () => { expect.assertions(1); let skillsWithPriorities = [] as SkillPriority[] expect(client.createAgentAndStation("agent1", "Passw0rd@", skillsWithPriorities)).rejects.toEqual("invalid skills") }) test("generateAvayaPassword should return last 6 characters", () => { let actual = client.generateAvayaPassword("agentLoginId") expect(actual).toEqual("oginId") }) test("generateAvayaPassword should return full string", () => { let actual = client.generateAvayaPassword("abc") expect(actual).toEqual("abc") }) test("generateSecurityCode should return last 4 chars", () => { let actual = client.generateSecurityCode("securityCode") expect(actual).toEqual("Code") }) test("generateSecurityCode should return all chars", () => { let actual = client.generateSecurityCode("abc") expect(actual).toEqual("abc") }) test("stationExtension: { toString: () => any }", () => { let stationExtension: { toString: () => any } = "hello world" expect(stationExtension.toString()).toEqual("hello world") }) test("redo less than max retries should return true", async () => { const retries = 7 const millis = 5 let count = 0 const callback = () => { count++ if (count < 3) { return Promise.resolve(false) } return Promise.resolve(true) } let result = await client.redo(callback, retries, millis) expect(result).toBeTruthy() }, 10000) test("redo greater than max retries should return false", async () => { const retries = 3 const millis = 5 let count = 0 const callback = () => { count++ console.log(`callback count=${count}`) if (count < 10) { console.log(`callback if count=${count}`) return Promise.resolve(false) } console.log(`callback outside if count=${count}`) return Promise.resolve(true) } let result = await client.redo(callback, retries, millis) expect(result).toBeFalsy() }) test("checkAgentPromise should return false", async () => { let result = await client.existsAgent(Promise.reject(true)) expect(result).toBeFalsy() }) test("checkAgentPromise should return true", async () => { let result = await client.existsAgent(Promise.resolve(true)) expect(result).toBeTruthy() }) })
Java
UTF-8
334
1.820313
2
[]
no_license
package br.com.jump.core.dao.pessoa.iface; import br.com.jump.model.enums.ETipoPessoa; import br.com.jump.model.pessoa.Cliente; import in.macor.core.dao.iface.IGenericDao; /** * Created by macorin on 05/08/14. */ public interface IClienteDao extends IGenericDao<Cliente, Long> { public Cliente findByPessoa(Long idPessoa); }
Markdown
UTF-8
823
2.53125
3
[ "Apache-2.0" ]
permissive
Sope ==== > Marathi, IPA: /sə/ /o/ /pə/ /e/ (Adjective: Achieved without great effort) **Sope** is set of utilities and library functions that helps with ETL development using **Apache Spark**. The project contains following sub-modules - *sope-spark* - *sope-etl* 1. **sope-spark**: This module contains library functions and a Scala internal **dsl** library that assists with writing **Spark SQL** ETL transformations in concise manner [More information](sope-spark/README.md) 2. **sope-etl**: This module contains a **YAML** based external transformer with easy to use ETL constructs. [More information](sope-etl/README.md) ##### Building the project: The project only supports Spark versions 2.x onwards. Use **mvn clean package** to build the project. Import the generated jars in your project.
C#
UTF-8
1,207
3.40625
3
[]
no_license
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace CompositeMenu { public class Waitress { private IMenu _pancakeHouseMenu; private IMenu _dinerMenu; public Waitress(IMenu pancakeHouseMenu, IMenu dinerMenu) { _pancakeHouseMenu = pancakeHouseMenu; _dinerMenu = dinerMenu; } public void PrintMenu() { IEnumerator<MenuItem> pancakeIterator = _pancakeHouseMenu.GetEnumerator(); IEnumerator<MenuItem> dinerIterator = _dinerMenu.GetEnumerator(); Console.WriteLine("MENU\n-----\nBREAKFAST"); PrintMenu(pancakeIterator); Console.WriteLine("\nLUNCH"); PrintMenu(dinerIterator); } private void PrintMenu(IEnumerator<MenuItem> enumerator) { while (enumerator.MoveNext()) { MenuItem menuItem = enumerator.Current; Console.Write($"{menuItem.Name}, "); Console.Write($"{menuItem.Price} -- "); Console.WriteLine(menuItem.Description); } } } }
JavaScript
UTF-8
2,478
2.640625
3
[]
no_license
var fileSystem = require('fs'); var Contact = require('../models/contact').init(); var contacts = [ { _id: guid(), name: 'Tony Stark', email: 'iamironman@email.com' }, { _id: guid(), name: 'Steve Rogers', email: 'captainamerica@email.com' }, { _id: guid(), name: 'Natasha Romanoff', email: 'blackwidow@email.com' } ]; module.exports.list = function(req, res){ var promise = Contact.find().exec(); promise.then(function(data){ res.json(data); },function(error){ res.status(500).json(error); }); }; module.exports.save = function(req, res){ var contact = req.body; Contact.create(contact).then(function(data){ res.status(201).json(data); },function(error){ res.status(500).json(error); }); } module.exports.getById = function(req, res){ } module.exports.delete = function(req, res){ } // module.exports.list = function(req, res){ // res.json(contacts); // }; // // module.exports.save = function(req, res){ // var contact = req.body; // contact['_id'] = guid(); // contacts.push(contact); // console.log(contacts); // res.sendStatus(200).end(); // }; // // module.exports.getById = function(req, res){ // var id = req.params.id; // console.log('Id: '+id); // // var contact = contacts.filter(function(contact){ // return contact._id == id; // })[0]; // // contact ? res.json(contact) : res.status(404).send('Contato não encontrado'); // }; // // module.exports.delete = function(req, res){ // var id = req.params.id; // console.log('removendo: '+id); // // contacts = contacts.filter(function(contact){ // return contact._id != id; // }); // // res.sendStatus(204).end(); // }; // function guid() { function _p8(s) { var p = (Math.random().toString(16)+"000000000").substr(2,8); return s ? "-" + p.substr(0,4) + "-" + p.substr(4,4) : p ; } return _p8() + _p8(true) + _p8(true) + _p8(); } var writeToFile = function(){ var seen = []; var text = JSON.stringify(req, function(key, val) { if (val != null && typeof val == "object") { if (seen.indexOf(val) >= 0) { return; } seen.push(val); } return val; }); fileSystem.writeFile('./log.json', text, function(err){ if(err) return console.log("==> error"); console.log('==> saved'); }); }
Python
UTF-8
1,820
2.578125
3
[]
no_license
# ライブラリのインポート import sys # import heapq,copy import pprint as pp # from collections import deque # pypy3用 # import pypyjit # 再帰制御解放 # pypyjit.set_param('max_unroll_recursion=-1') # sys.setrecursionlimit(10**6) from logging import getLogger, StreamHandler, DEBUG # 入力のマクロ def II(): return int(sys.stdin.readline()) def MI(): return map(int, sys.stdin.readline().split()) def LI(): return list(map(int, sys.stdin.readline().split())) def LLI(rows_number): return [LI() for _ in range(rows_number)] # デバッグ出力の作成 logger = getLogger(__name__) handler = StreamHandler() handler.setLevel(DEBUG) logger.setLevel(DEBUG) logger.addHandler(handler) logger.propagate = False # クラス+メソッドを一関数 xdebug=logger.debug ppp=pp.pprint # Const MAXSIZE = ( 1 << 59 ) -1 MINSIZE = -( 1 << 59) + 1 N,A,B,C = MI() BanbuList=list() for _ in range(0,N): m = II() BanbuList.append(m) def dfs(n,a,b,c): if n ==N : minR = min(a,b,c) if minR == 0: # この場合どう増減魔法を用いても達成は出来ない return MAXSIZE # 達成する場合の増減魔法の量 # 最後の10*3は最初の1本目に0->X に付ける際だけ合成魔法は # 要らないのでこれを削る magP = abs(a-A)+abs(b-B)+abs(c-C)-(10*3) return magP # n番目の竹をAに使う場合 resA = dfs(n+1,a+BanbuList[n],b,c)+10 # n番目の竹をBに使う場合 resB = dfs(n+1,a,b+BanbuList[n],c)+10 # n番目の竹をCに使う場合 resC = dfs(n+1,a,b,c+BanbuList[n])+10 # n番目の竹は使わない場合 res0 = dfs(n+1,a,b,c) return min(resA,resB,resC,res0) print(dfs(0,0,0,0))
Java
UHC
1,477
3.90625
4
[]
no_license
package java02_Control_01if.Exam; import java.util.Scanner; /* * ȭ ϴ ˷ ֱ * ⺻ ڵ忡  ȭϴ ̴. * * Ǯ ȭ ó * 1. Է ޴´. * 2. ù ° ° ؼ ù ° ũ ù ° Ѵ. * 3. ù ° ° ؼ ° ũ ° Ѵ. * 4. ù ° ° ؼ ù ° Ѵ. * * 3 4 ش. * 1. Է ޴´. * 2. ù ° ° ؼ ù ° ũ ù ° Ѵ. * 3. ù ° ° ؼ ° ũų ° Ѵ. */ public class Ex06Quiz02 { public static void main(String[] args) { // TODO Auto-generated method stub Scanner in = new Scanner(System.in); System.out.println(" Էϼ"); int num1 = in.nextInt(); System.out.println(" Էϼ"); int num2 = in.nextInt(); if(num1>num2) System.out.println("ū : "+num1); if(num1<=num2) System.out.println("ū : "+num2); //if(num1==num2) System.out.println("ū : "+num1); } }
Python
UTF-8
361
3.578125
4
[]
no_license
class Queue: def __init__(self): self.queue = [] def enqueue(self, item): self.queue.append(item) def dequeue(self): x = None try: x = self.queue[0] del self.queue[0] except IndexError: pass return x def size(self): return len(self.queue)
SQL
UTF-8
97
3.0625
3
[]
no_license
SELECT COUNT(*) AS "Number of movies ending with tion" FROM movies WHERE title LIKE '%tion';
Java
UTF-8
280
1.859375
2
[]
no_license
package test.payload; import lombok.Data; import javax.validation.constraints.NotBlank; @Data public class IdTmpReq { @NotBlank public String id; public String getId() { return id; } public void setId(String id) { this.id = id; } }
Java
UTF-8
905
2.09375
2
[ "Apache-2.0" ]
permissive
package com.alibaba.dubbo.rpc; import com.alibaba.dubbo.common.URL; import org.apache.dubbo.rpc.ProtocolServer; import java.util.Collections; import java.util.List; @Deprecated public interface Protocol extends org.apache.dubbo.rpc.Protocol { <T> Exporter<T> export(Invoker<T> invoker) throws RpcException; <T> Invoker<T> refer(Class<T> aClass, URL url) throws RpcException; @Override default <T> org.apache.dubbo.rpc.Exporter<T> export(org.apache.dubbo.rpc.Invoker<T> invoker) throws RpcException { return this.export(new Invoker.CompatibleInvoker<>(invoker)); } @Override default <T> org.apache.dubbo.rpc.Invoker<T> refer(Class<T> aClass, org.apache.dubbo.common.URL url) throws RpcException { return this.refer(aClass, new URL(url)); } @Override default List<ProtocolServer> getServers() { return Collections.emptyList(); } }
C#
UTF-8
5,014
2.609375
3
[]
no_license
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; using System.Speech.Synthesis; using System.Threading; namespace Lee_y_Escribe { public partial class FVocalA : Form { SpeechSynthesizer voz = new SpeechSynthesizer(); int Time1 = 0; public FVocalA() { InitializeComponent(); Velocidad.Visible = false; PAnillo.Visible = false; PAbeja.Visible = false; PAvion.Visible = false; BRepA.Enabled = false; } private void Narrador(object texto) { voz.SelectVoiceByHints(VoiceGender.Female); voz.Rate = Velocidad.Value; voz.SetOutputToDefaultAudioDevice(); voz.Speak(texto.ToString()); } private void Tiempo1_Tick(object sender, EventArgs e) { Time1 += 1; if (Time1 == 1) { Thread tarea = new Thread(new ParameterizedThreadStart(Narrador)); tarea.Start("Maravilloso, " + System.Environment.NewLine + "Aquí está la primera vocal, " + System.Environment.NewLine + "esta se llama, a, si tedas cuenta," + System.Environment.NewLine + " tiene una forma redonda y una pequeña raya a un lado, " + System.Environment.NewLine + "para pronunciar el nombre de esta letra lo hacemos con la boca abierta grande y decimos, a, " + System.Environment.NewLine + "al hablar pronunciamos muchas palabras que empiezan con la letra, a, como por ejemplo" + System.Environment.NewLine + "Abeja" + System.Environment.NewLine + "Avión" + System.Environment.NewLine + "Anillo." + System.Environment.NewLine + "Hay muchas palabras que inician con la letra, a, puedes practicar junto a un adulto y veras que no es dificil"); } if (Time1 == 29) { PAbeja.Visible = true; } if (Time1 == 31) { PAvion.Visible = true; } if (Time1 == 33) { PAnillo.Visible = true; BRepA.Enabled = true; Tiempo1.Stop(); } } private void BRepA_Click(object sender, EventArgs e) { PAnillo.Visible = false; PAbeja.Visible = false; PAvion.Visible = false; Time1 = -1; Tiempo1.Start(); } private void BAtrasA_Click_1(object sender, EventArgs e) { voz.Pause(); this.Close(); } private void PAbeja_Click(object sender, EventArgs e) { Thread Abe = new Thread(new ParameterizedThreadStart(Narrador)); Abe.Start("Abeja "); } private void PAvion_Click(object sender, EventArgs e) { Thread Plane = new Thread(new ParameterizedThreadStart(Narrador)); Plane.Start("Avión "); } private void PAnillo_Click(object sender, EventArgs e) { Thread Anil = new Thread(new ParameterizedThreadStart(Narrador)); Anil.Start("Anillo "); } private void pictureBox1_Click(object sender, EventArgs e) { Thread A = new Thread(new ParameterizedThreadStart(Narrador)); A.Start("A "); } private void PAbeja_MouseHover(object sender, EventArgs e) { PAbeja.Size = new Size(167, 207); } private void PAbeja_MouseLeave(object sender, EventArgs e) { PAbeja.Size = new Size(157, 197); } private void PAvion_MouseHover(object sender, EventArgs e) { PAvion.Size = new Size(214, 131); } private void PAvion_MouseLeave(object sender, EventArgs e) { PAvion.Size = new Size(204, 121); } private void PAnillo_MouseHover(object sender, EventArgs e) { PAnillo.Size = new Size(100, 140); } private void PAnillo_MouseLeave(object sender, EventArgs e) { PAnillo.Size = new Size(90, 130); } private void pictureBox1_MouseHover(object sender, EventArgs e) { pictureBox1.Size = new Size(355, 330); } private void pictureBox1_MouseLeave(object sender, EventArgs e) { pictureBox1.Size = new Size(345, 320); } } }
Go
UTF-8
234
2.875
3
[]
no_license
package main import ( "fmt" "net/http" ) func sayhelloName(w http.ResponseWriter, r *http.Request){ fmt.Fprintf(w, "Go Web Hello World!") } func main() { http.HandleFunc("/", sayhelloName) http.ListenAndServe(":8083", nil) }
C
UTF-8
1,283
3.59375
4
[]
no_license
#include "msort.h" #include <stdlib.h> void merge(void **destination, void** leftArr, void** rightArr, int leftLength, int rightLength, Compare* compare){ int i = 0,j = 0,k = 0; for(; i < leftLength && j < rightLength;){ if(compare(leftArr[i], rightArr[j]) < 0){ destination[k] = leftArr[i]; i++; } else{ destination[k] = rightArr[j]; j++; } k++; } for(;j < rightLength; j++,k++) destination[k] = rightArr[j]; for(;i < leftLength;i++,k++) destination[k] = leftArr[i]; } void sort(void** base, int noOfElements, Compare* comparator){ int mid = noOfElements/2, leftLength = mid, rightLength = noOfElements - mid, i; void** leftArr = calloc(leftLength, sizeof(void*)); void** rightArr = calloc(rightLength, sizeof(void*)); if(noOfElements <= 1) return; for(i = 0; i < leftLength ;i++) leftArr[i] = base[i]; for(i = mid; i < noOfElements ;i++) rightArr[i-mid] = base[i]; sort(leftArr, leftLength, comparator); sort(rightArr, rightLength, comparator); merge(base, leftArr, rightArr, leftLength, rightLength, comparator); free(leftArr); free(rightArr); }
C
UTF-8
361
3.515625
4
[]
no_license
#include "lists.h" /** * sum_dlistint - finds sum of all node data values * @head: pointer to head of list * * Return: sum of node values */ int sum_dlistint(dlistint_t *head) { dlistint_t *temp; int sum = 0; if (!head) return (sum); temp = head; while (temp->next) { sum += temp->n; temp = temp->next; } sum += temp->n; return (sum); }
C++
UTF-8
2,989
2.546875
3
[]
no_license
#include "common.h" #include "detectLoop.h" #ifndef SLIMINST_H #define SLIMINST_H using namespace llvm; using namespace std; class ModuleMeta{ public: ModuleMeta(Module &M):__M(M), __maxF(0), __maxB(0), __pFofMaxB(0), __loopBBLs(new unordered_set<BasicBlock const*>()), __numBBLs(0), __numLoops(0), __numType1Loops(0), __numType2Loops(0), __numFuncs(0){ __initEverything(); }; //output the meta information //Function (or BBL or Loop) ID is a numerical ID //For BBL without name, set one //Function ID : Function Name //Function ID : {<BBL ID : BBL Name>} //Function ID : {<Loop ID : Loop Type>} void outputModuleMetaToFile(); //initilization void __initEverything(); //part 1 Module &__M; //part 2 unsigned long __maxF; unsigned long __maxB; Function const * __pFofMaxB;//a pointer to the function that has the largest number of BBLs void __initMaxFandB(); //part 3 //0~9 is reserved //10~10+__maxF-1 is function IDs //10+__maxF~10+__maxF+__maxB-1 is BBLIDs //the BBID of the start of the loop is loop ID //The input Module could have BBL without name, if so, set the name as FuncName_BBL_# map<string, int> FunctionName2ID; map<string, map<int, string>> BBLID2Name; map<string, map<int, BasicBlock*>> BBLID2Addr; map<string, map<int, int>> LoopID2Type; void __initFunctionName2ID(); void __initBBLID2Name(); shared_ptr< unordered_set<BasicBlock const*> > __loopBBLs; //info unsigned int __numBBLs; unsigned int __numLoops; unsigned int __numType1Loops; unsigned int __numType2Loops; unsigned int __numFuncs; void displayStatInfo(); }; class ModuleMembers{ public: ModuleMembers(Module& M); Module& __M; Type* voidTy; Type* shortTy; Type* intTy; Type* longTy; PointerType* ptr16Ty; PointerType* ptr32Ty; Function* log; Function* logCounter; Function* init; Function* fork32; Function* fork64; Function* fkill; Function* ffflush; Function* eval; GlobalVariable* gvar_addr; private: bool checkRep(); }; class SlimInst{ public: SlimInst(Module &M, ModuleMeta const * pMeta, ModuleMembers const * pMbr):__M(M), __pMeta(pMeta), __pMbr(pMbr){ assert(__pMeta); assert(__pMbr); }; //output max number of functions and basic blocks void displayNumFuncAndBBL(); //run the instrumentation bool run(); private: Module &__M; ModuleMeta const* __pMeta; ModuleMembers const* __pMbr; //insert instructions to entry block //insert instructions to non-loop block //insert basic blocks before and after loop blocks void __instFunc(Function * F); void __instFuncMin(Function * F); void __instFuncMax(Function * F); //instrumentation void __instLogBBL(BasicBlock * BBL, unsigned short BBID); void __instType1LoopBBL(Function& F, BasicBlock * BBL, unsigned short loopID); void __instMainOrStartFuncEntryBBL(Function& F); void __instCallInst(CallInst* callInst,Instruction* next); }; #endif
Java
UTF-8
355
2.359375
2
[]
no_license
package ro.jademy.carrental.cars; import ro.jademy.carrental.cars.parts.Engine; import java.math.BigDecimal; public class Dacia extends Car { public Dacia(String model, Integer year, Engine engine, CarDetails details, BigDecimal basePrice, boolean isRented) { super("Dacia", model, year, engine, details, basePrice, isRented); } }
Java
UTF-8
2,703
2.1875
2
[]
no_license
package com.itheima.health.controller; import com.alibaba.dubbo.config.annotation.Reference; import com.itheima.health.constant.MessageConstant; import com.itheima.health.utils.POIUtils; import com.itheima.health.entity.Result; import com.itheima.health.pojo.OrderSetting; import com.itheima.health.service.OrderSettingService; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; @RestController @RequestMapping("/orderSetting") public class OrderSettingControlle { @Reference private OrderSettingService orderSettingService; //更新预约表数据 @RequestMapping("/upload") public Result upload(MultipartFile excelFile){ try { List<String[]> list =POIUtils.readExcel(excelFile); if (list!=null&&list.size()>0) { List<OrderSetting> orderSettingList=new ArrayList<>(); for (String[] strings : list) { OrderSetting orderSetting=new OrderSetting(new Date(strings[0]),Integer.parseInt(strings[1])); orderSettingList.add(orderSetting); } orderSettingService.add(orderSettingList); } } catch (Exception e) { e.printStackTrace(); return new Result(false, MessageConstant.IMPORT_ORDERSETTING_FAIL); } return new Result(true, MessageConstant.IMPORT_ORDERSETTING_SUCCESS); } //更新预约设置 @RequestMapping("/reservationByMonth") public Result reservationByMonth(@RequestParam String date){ try { //获取当月每天的预约情况 List<Map>list= orderSettingService.reservationByMonth(date); return new Result(true, MessageConstant.GET_ORDERSETTING_SUCCESS,list); } catch (Exception e) { e.printStackTrace(); return new Result(false, MessageConstant.GET_ORDERSETTING_FAIL); } } //预约设置 @RequestMapping("/reservation") public Result reservation(@RequestBody OrderSetting orderSetting){ try { orderSettingService.reservation(orderSetting); } catch (Exception e) { e.printStackTrace(); return new Result(false, MessageConstant.IMPORT_ORDERSETTING_FAIL); } return new Result(true, MessageConstant.IMPORT_ORDERSETTING_SUCCESS); } }
C#
UTF-8
1,241
2.625
3
[]
no_license
using System.Collections; using System.Collections.Generic; using UnityEngine; public class RandomArrowSpawner : MonoBehaviour { public GameObject arrowRightSpawner; public GameObject arrowLeftSpawner; public float spawnRate = 2f; float nextSpawn = 0.0f; int whatToSpawn; Vector2 whereToSpawnW; int score; void Start() { } void Update() { // print("TIEMPO: " + Time.time); timer(); } void timer() { if (Time.time > nextSpawn) { whatToSpawn = Random.Range(1, 3); // print("RANDOM NUMBER " + whatToSpawn); switch (whatToSpawn) { case 1: arrowRightSpawner.gameObject.SetActive(true); arrowLeftSpawner.gameObject.SetActive(false); // print("RIGHT"); break; case 2: arrowLeftSpawner.gameObject.SetActive(true); arrowRightSpawner.gameObject.SetActive(false); // print("LEFT"); break; } nextSpawn = Time.time + spawnRate; // print("TIEMPO: " + Time.time); } } }
C++
UTF-8
825
2.90625
3
[]
no_license
#include<iostream> #include<cctype> #include<string> using namespace std; int main(){ string s1,s2,s3,s4; cin>>s1>>s2>>s3>>s4; int len1=s1.length()<s2.length()?s1.length():s2.length(); int len2=s3.length()<s4.length()?s3.length():s4.length(); char c1,c2; int c3; int i=0; for(;i<len1;i++){ if(s1[i]==s2[i] && s1[i]>='A' && s1[i]<='G'){ c1=s1[i]; break; } } for(i++;i<len1;i++){ if(s1[i]==s2[i] && (isdigit(s1[i]) || (s1[i]>='A' && s1[i]<='N'))){ c2=s1[i]; break; } } for(int i=0;i<len2;i++){ if(s3[i]==s4[i] && isalpha(s3[i])){ c3=i; break; } } string week[7]={"MON","TUE","WED","THU","FRI","SAT","SUN"}; cout<<week[c1-'A']<<' '; if(isalpha(c2)){ printf("%02d:",c2-'A'+10); } else{ printf("%02d:",c2-'0'); } printf("%02d",c3); }
Markdown
UTF-8
646
2.59375
3
[]
no_license
There are 4 different types of NoSQL databases: 1. Key-Value Store - Has a big hash table of keys and values (examples include Riak, Amazon S3) 2. Document-based Store - Stores documents made up of tagged elements (CouchDB, MongoDB) 3. Column-based Store - Each storage block contains data from only one column (HBase, Cassandra) 4. Graph-based - A network database that uses edges and nodes to represent and store data (Neo4J) Source: 3pillarglobal.com Following have atleast one Python adapter: Amazon S3, MongoDB, Cassandra, Neo4J P.S: There are high chances that the databases I didn't mention in the above list also have Python adapters.
Python
UTF-8
1,184
3.359375
3
[]
no_license
# MNIST(Modified National Institude of Standards and Technology) # - 손으로 직접 쓴 숫자(필기체 숫자)들로 이루어진 데이터 셋 # - 0 ~ 9까지의 숫자 이미지로 구성되며, 60,000개의 트레이닝 데이터와 # 10,000개의 테스트 데이터로 이루어져 있음. # - 28x28 size import sys, os sys.path.append(os.pardir) # 부모 디렉토리의 파일을 가져 올 수 있도록 설정. import numpy as np from dataset.mnist import load_mnist from PIL import Image # pip install image def img_show(img): # 데이터를 이미지로 볼 수 있도록 해준다. pil_img = Image.fromarray(np.uint8(img)) # 정수를 담을 8비트라는 의미. uint8 양수 값만 담을 수 있는 한 바이트의 자료형 pil_img.show() if __name__ == '__main__': (x_train, t_train),(x_test, t_test) = load_mnist() # 다운 받은 파일을 가져온다 # (훈련용 이미지, 레이블 ) (테스트 이미지,레이블) img = x_train[10] label = t_train[10] print(label) # 3 print(img.shape) # (784, ) img = img.reshape(28, 28) # 형상을 원래 이미지의 크기로 변형 print(img.shape) img_show(img)
PHP
UTF-8
4,962
2.859375
3
[]
no_license
<?php /** * Blog Helper class file. * * varios methods for use in the blog app * * Copyright (c) 2009 Carl Sutton ( dogmatic69 ) * * Licensed under The MIT License * Redistributions of files must retain the above copyright notice. * * @filesource * @copyright Copyright (c) 2009 Carl Sutton ( dogmatic69 ) * @link http://infinitas-cms.org * @package blog * @subpackage blog.views.helpers.blog * @license http://www.opensource.org/licenses/mit-license.php The MIT License */ class BlogHelper extends AppHelper { var $helpers = array( // cake helpers 'Html', 'Form', 'Text', 'Time', // core helpers 'Libs.Image', 'Libs.Design', 'Libs.Wysiwyg', 'Events.Event' ); /** * Date format from {@see TimeHelper} */ var $dateFormat = 'niceShort'; /** * the layout style. * * @param string $ */ var $layout = 'list'; /** * Internal list of errors * * @param array $ of errors */ var $errors = array(); /** * Internal use of the post. */ var $data = array(); var $showIntro = true; /** * generate metadata. * * takes an array of posts and generates data for the page. Can use * a finr( 'all' ) or find( 'first' ) to generate the meta tags. * * Takes the tags from the post and generates keywords. * * Takes the body of the post and generates a description based on the * tags using excerpt. * * @todo -c"BlogHelper" Implement BlogHelper. * - do something if there is no tags * - do something when there is no description * - find out more about seo * * this method simply echos the metadata. * @param array $posts from a ->find() call * @return bool false when no data passed */ function metaData($posts = array()) { if (!isset($posts[0])) { if (!isset($posts)) { return false; } $posts = array($posts); } foreach($posts as $post) { $tags = Set::extract('/Tag/name', $post); $keywords = implode(',', $tags); $description = array(); foreach($tags as $tag) { $description[] = $this->Text->excerpt($post['Post']['body'], $tag, 50); } shuffle($description); echo $this->Html->meta('keywords', $keywords); echo $this->Html->meta('description', substr(str_replace('...', '', implode(' ', $description)), 0, 255)); } return true; } function formatUrl($url) { // if has http(s):// just return // if only www. add http:// and return // if only site.com add http://www. and return return $url; } /** * trying to implement something like cakes Text::autoLinkUrls() that * will automaticaly put <b> tags around keywords in the posts. */ function highlightTags($text, $tags = null) { if (!$tags) { return $text; } $linkOptions = 'array()'; $tags = '#(' . implode('|', $tags) . ')#'; return preg_replace_callback($tags, create_function( '$matches', 'return "<b>".$matches[0]."</b>";' ), $text ); } /** * Create Pagination for linked posts. * * This will generate a list of links of all posts that are together. If the * parent and child posts are empty its assumed that the post does not have any * more linked posts to display so it will just return. * * @param mixed $post the data from ->read() * @return mixed $out is the links or '' for non linked posts */ function pagination($post){ if (empty($post['ParentPost']['id']) && empty($post['ChildPost'])) { return ''; } $this->currentCategory = $post['Category']['slug']; $out = '<ul>'; $post['Post']['plugin'] = 'blog'; $post['Post']['controller'] = 'posts'; $post['Post']['action'] = 'view'; $eventData = $this->Event->trigger('blog.slugUrl', array('type' => 'posts', 'data' => $post)); if (empty($post['ParentPost']['id'])) { $out .= '<li>'; $out .= $this->Html->link($post['Post']['title'], current($eventData['slugUrl'])); $out .= '</li>'; foreach($post['ChildPost'] as $child ){ $child = array_merge($post['Post'], $child); $eventData = $this->Event->trigger('blog.slugUrl', array('type' => 'posts', 'data' => $child)); $out .= '<li>'; $out .= $this->Html->link($child['title'], current($eventData['slugUrl'])); $out .= '</li>'; } } else{ $post['Post'] = array_merge($post['Post'], $post['ParentPost']); $eventData = $this->Event->trigger('blog.slugUrl', array('type' => 'posts', 'data' => $post)); $out .= '<li>'; $out .= $this->Html->link($post['ParentPost']['title'], current($eventData['slugUrl'])); $out .= '</li>'; foreach($post['ParentPost']['ChildPost'] as $child ){ $child = array_merge($post['Post'], $child['Post']); $eventData = $this->Event->trigger('blog.slugUrl', array('type' => 'posts', 'data' => $child)); $out .= '<li>'; $out .= $this->Html->link($child['title'], current($eventData['slugUrl'])); $out .= '</li>'; } } $out .= '</ul>'; return $out; } }
Python
UTF-8
682
3.3125
3
[]
no_license
#Name: Haira Aguilar #Date: 9/13/19 #Program Name: ping-log.py #Run Python IDLE #Ctrl + C to stop import subprocess, time, sys def ping(): while True: stat = subprocess.call(["ping", server], stdout = pingStat) print() if stat == 0: print("SERVER STATUS: ", server, " is UP!") else: print("SERVER STATUS: ", server, " is DOWN!") #Adjust time here time.sleep(5) #Input the ip address that you'd like to ping server = input("Enter ip address: ") #Outputs the status results to a file with open("ping-log.txt", "w") as pingStat: sys.stdout = pingStat ping()
Python
UTF-8
2,141
2.640625
3
[]
no_license
import numpy as np import os import cv2 import pickle import random from sklearn.preprocessing import LabelEncoder from sklearn.preprocessing import OneHotEncoder from sklearn.model_selection import train_test_split DATADIR = ".\\dataset-original" CATEGORIES = ["cardboard","glass","metal","paper","plastic","trash"] IMG_SIZE = 227 training_data = [] training_label = [] label_encoder = LabelEncoder() onehot_encoder = OneHotEncoder(sparse=False) integer_encoded = label_encoder.fit_transform(CATEGORIES) integer_encoded = integer_encoded.reshape(len(integer_encoded), 1) onehot_encoded = onehot_encoder.fit_transform(integer_encoded) def create_training_data(): for category in CATEGORIES: path = os.path.join(DATADIR, category) class_num = CATEGORIES.index(category) # class_num = onehot_encoded[category] for img in os.listdir(path): try: img_array = cv2.imread(os.path.join(path, img), cv2.IMREAD_COLOR) new_array = cv2.resize(img_array, (IMG_SIZE, IMG_SIZE)) training_data.append([new_array, onehot_encoded[class_num]]) print(len(training_data)) except Exception as e: pass create_training_data() random.shuffle(training_data) x = [] y = [] for feature, label in training_data: x.append(feature) y.append(label) print(len(x)) x = np.array(x).reshape(-1, IMG_SIZE, IMG_SIZE, 3) y = np.array(y).reshape(-1, 6) pickle_out = open("X.pickle","wb") pickle.dump(x, pickle_out) pickle_out.close() pickle_out = open("y.pickle","wb") pickle.dump(y, pickle_out) pickle_out.close() x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=0, stratify=y) pickle_out = open("train_x.pickle","wb") pickle.dump(x_train, pickle_out) pickle_out.close() pickle_out = open("test_x.pickle","wb") pickle.dump(x_test, pickle_out) pickle_out.close() pickle_out = open("train_y.pickle","wb") pickle.dump(y_train, pickle_out) pickle_out.close() pickle_out = open("test_y.pickle","wb") pickle.dump(y_test, pickle_out) pickle_out.close() #tensorboard --logdir=logs/
Python
UTF-8
213
3.40625
3
[]
no_license
def indexes(s, word): res = [] for i in range(len(s)): if s[i] == word: res.append(i) print(res) indexes('mississippi', 's') indexes('mississippi', 'i') indexes('mississippi', 'a')
JavaScript
UTF-8
411
3.796875
4
[]
no_license
var obj = { a: 1, b: 2, c: 3 }; for (var p in obj) { console.log("The value of obj." + p + " is " + obj[p]); } /** if (someCondition) { doSomething(); } else if (someOtherCondition) { doSomeOtherThing(); } else if (yetAnotherCondition) { doYetAnotherThing(); } else { doSomethingElse(); } if (x == 'undefined') **/
Java
UTF-8
708
2
2
[]
no_license
package org.ucomplex.ucomplex.Modules.MessagesList.model; import java.util.ArrayList; import java.util.List; /** * --------------------------------------------------- * Created by Sermilion on 02/06/2017. * Project: UComplex * --------------------------------------------------- * <a href="http://www.ucomplex.org">www.ucomplex.org</a> * <a href="http://www.github.com/sermilion>github</a> * --------------------------------------------------- */ public final class MessagesListRaw { private final List<MessagesListItem> dialogs; public MessagesListRaw() { this.dialogs = new ArrayList<>(); } public List<MessagesListItem> getDialogs() { return dialogs; } }
C++
UTF-8
3,223
3.015625
3
[ "Apache-2.0" ]
permissive
/** * This sketch is part of IOAbstraction, it shows how to use the EEPROM abstraction, * for which you can choose from NoEeprom, AvrEeprom and I2cAt24C based eeproms. * This example chooses AvrEeprom, but could equally be replaced by any of the others. * * This allows any libraries or code you write to work easily across 8 and 32 bit * machines by allowing you to decide what type of eeprom you have at compile / runtime. * * Note that running this sketch WILL WRITE INTO THE SELECTED ROM at the location * starting at romStart. * * It writes a byte, int, double and string to the eeprom and reads them back. * * Documentation and reference: * * https://www.thecoderscorner.com/products/arduino-downloads/io-abstraction/ * https://www.thecoderscorner.com/ref-docs/ioabstraction/html/index.html */ // We have a direct dependency on Wire and Arduino ships it as a library for every board // therefore to ensure compilation we include it here. #include <Wire.h> // you always needs this include. #include <EepromAbstractionWire.h> #include <TaskManagerIO.h> const unsigned int romStart = 800; // When you want to use the AVR built in EEPROM support (only available on AVR) // comment / uncomment to select I2cAt24Eeprom anEeprom(0x50, PAGESIZE_AT24C128); const char strData[100] = { "This is a quite long string that should need to be handled in many parts with wait states"}; void setup() { Serial.begin(115200); while(!Serial); // if you are using the i2c eeprom, you must include this line below, not needed otherwise. Wire.begin(); Serial.println("Eeprom example starting"); // clear the ROM first.. for(int i=romStart;i<(romStart+100);i++) anEeprom.write8(i, 0); Serial.println(anEeprom.hasErrorOccurred() ? "Write failure" : "Write success"); // now write the values to the rom. 8, 16 and 32 bit anEeprom.write8(romStart, (byte)42); anEeprom.write16(romStart + 1, 0xface); anEeprom.write32(romStart + 3, 0xf00dface); Serial.println(anEeprom.hasErrorOccurred() ? "Write failure" : "Write success"); // lastly write an array to the rom. anEeprom.writeArrayToRom(romStart + 7, (const unsigned char*)strData, sizeof strData); Serial.println(anEeprom.hasErrorOccurred() ? "Write failure" : "Write success"); Serial.println("Eeprom example written initial values"); } void loop() { Serial.print("Reading back byte: "); Serial.println(anEeprom.read8(romStart)); Serial.print("Reading back word: 0x"); Serial.println(anEeprom.read16(romStart + 1), HEX); Serial.print("Reading back long: 0x"); Serial.println(anEeprom.read32(romStart + 3), HEX); // finally we'll do hard comparisons against the array, as it's hard to check by hand. char readBuffer[100]; anEeprom.readIntoMemArray((unsigned char*)readBuffer, romStart + 7, sizeof readBuffer); Serial.print("Rom Array: "); Serial.println(readBuffer); Serial.print("String is same: "); Serial.println(strcmp(readBuffer, strData)==0 ? "YES":"NO"); // we can check if there are any errors writing by calling hasErrorOccurred, for AVR there is never an error. // but for i2c variants there may well be. Serial.println(anEeprom.hasErrorOccurred() ? "Read error" : "Successfully"); delay(10000); }
Python
UTF-8
544
2.609375
3
[ "BSD-3-Clause" ]
permissive
from numpy import isnan from wonambi import Dataset from .paths import mff_file d = Dataset(mff_file) def test_mff_read(): d.read_data(begtime=10, endtime=20) def test_mff_before_start(): data = d.read_data(begsam=-100, endsam=10) assert isnan(data.data[0][0, 0]) def test_mff_after_end(): n_samples = d.header['n_samples'] data = d.read_data(begsam=n_samples - 100, endsam=n_samples + 100) assert isnan(data.data[0][0, -1]) def test_mff_markers(): markers = d.read_markers() assert len(markers) == 8
Java
UTF-8
40,184
2.65625
3
[ "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
package com.mixpanel.mixpanelapi; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.Date; import java.util.Iterator; import java.util.Map; import java.util.TimeZone; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; /** * This class writes JSONObjects of a form appropriate to send as Mixpanel events and * updates to profiles via the MixpanelAPI class. * * Instances of this class can be instantiated separately from instances of MixpanelAPI, * and the resulting messages are suitable for enqueuing or sending over a local network. */ public class MessageBuilder { private static final String ENGAGE_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss"; private final String mToken; public MessageBuilder(String token) { mToken = token; } /*** * Creates a message tracking an event, for consumption by MixpanelAPI * See: * * https://help.mixpanel.com/hc/en-us/articles/360000857366-Guide-to-Mixpanel-Basics * * for a detailed discussion of event names, distinct ids, event properties, and how to use them * to get the most out of your metrics. * * @param distinctId a string uniquely identifying the individual cause associated with this event * (for example, the user id of a signing-in user, or the hostname of a server) * @param eventName a human readable name for the event, for example "Purchase", or "Threw Exception" * @param properties a JSONObject associating properties with the event. These are useful * for reporting and segmentation of events. It is often useful not only to include * properties of the event itself (for example { 'Item Purchased' : 'Hat' } or * { 'ExceptionType' : 'OutOfMemory' }), but also properties associated with the * identified user (for example { 'MemberSince' : '2012-01-10' } or { 'TotalMemory' : '10TB' }) * @return event message for consumption by MixpanelAPI */ public JSONObject event(String distinctId, String eventName, JSONObject properties) { long time = System.currentTimeMillis(); // Nothing below should EVER throw a JSONException. try { JSONObject dataObj = new JSONObject(); dataObj.put("event", eventName); JSONObject propertiesObj = null; if (properties == null) { propertiesObj = new JSONObject(); } else { propertiesObj = new JSONObject(properties.toString()); } if (! propertiesObj.has("token")) propertiesObj.put("token", mToken); if (! propertiesObj.has("time")) propertiesObj.put("time", time); if (! propertiesObj.has("mp_lib")) propertiesObj.put("mp_lib", "jdk"); if (distinctId != null) propertiesObj.put("distinct_id", distinctId); dataObj.put("properties", propertiesObj); JSONObject envelope = new JSONObject(); envelope.put("envelope_version", 1); envelope.put("message_type", "event"); envelope.put("message", dataObj); return envelope; } catch (JSONException e) { throw new RuntimeException("Can't construct a Mixpanel message", e); } } /** * Sets a property on the profile associated with the given distinctId. When * sent, this message will overwrite any existing values for the given * properties. So, to set some properties on user 12345, one might call: * <pre> * {@code * JSONObject userProperties = new JSONObject(); * userProperties.put("Company", "Uneeda Medical Supply"); * userProperties.put("Easter Eggs", "Hatched"); * JSONObject message = messageBuilder.set("12345", userProperties); * mixpanelApi.sendMessage(message); * } * </pre> * * @param distinctId a string uniquely identifying the profile to change, * for example, a user id of an app, or the hostname of a server. If no profile * exists for the given id, a new one will be created. * @param properties a collection of properties to set on the associated profile. Each key * in the properties argument will be updated on on the profile. * @return user profile set message for consumption by MixpanelAPI */ public JSONObject set(String distinctId, JSONObject properties) { return set(distinctId, properties, null); } /** * Sets a property on the profile associated with the given distinctId. When * sent, this message will overwrite any existing values for the given * properties. So, to set some properties on user 12345, one might call: * <pre> * {@code * JSONObject userProperties = new JSONObject(); * userProperties.put("Company", "Uneeda Medical Supply"); * userProperties.put("Easter Eggs", "Hatched"); * JSONObject message = messageBuilder.set("12345", userProperties); * mixpanelApi.sendMessage(message); * } * </pre> * * @param distinctId a string uniquely identifying the profile to change, * for example, a user id of an app, or the hostname of a server. If no profile * exists for the given id, a new one will be created. * @param properties a collection of properties to set on the associated profile. Each key * in the properties argument will be updated on on the profile * @param modifiers Modifiers associated with the update message. (for example "$time" or "$ignore_time"). * this can be null- if non-null, the keys and values in the modifiers * object will be associated directly with the update. * @return user profile set message for consumption by MixpanelAPI */ public JSONObject set(String distinctId, JSONObject properties, JSONObject modifiers) { return peopleMessage(distinctId, "$set", properties, modifiers); } /** * Sets a property on the profile associated with the given distinctId, * only if that property is not already set on the associated profile. So, * to set a new property on on user 12345 if it is not already present, one * might call: * <pre> * {@code * JSONObject userProperties = new JSONObject(); * userProperties.put("Date Began", "2014-08-16"); * * // "Date Began" will not be overwritten, but if it isn't already * // present it will be set when we send this message. * JSONObject message = messageBuilder.setOnce("12345", userProperties); * mixpanelApi.sendMessage(message); * } * </pre> * * @param distinctId a string uniquely identifying the profile to change, * for example, a user id of an app, or the hostname of a server. If no profile * exists for the given id, a new one will be created. * @param properties a collection of properties to set on the associated profile. Each key * in the properties argument will be updated on on the profile * @return user profile setOnce message for consumption by MixpanelAPI */ public JSONObject setOnce(String distinctId, JSONObject properties) { return setOnce(distinctId, properties, null); } /** * Sets a property on the profile associated with the given distinctId, * only if that property is not already set on the associated profile. So, * to set a new property on on user 12345 if it is not already present, one * might call: * <pre> * {@code * JSONObject userProperties = new JSONObject(); * userProperties.put("Date Began", "2014-08-16"); * * // "Date Began" will not be overwritten, but if it isn't already * // present it will be set when we send this message. * JSONObject message = messageBuilder.setOnce("12345", userProperties); * mixpanelApi.sendMessage(message); * } * </pre> * * @param distinctId a string uniquely identifying the profile to change, * for example, a user id of an app, or the hostname of a server. If no profile * exists for the given id, a new one will be created. * @param properties a collection of properties to set on the associated profile. Each key * in the properties argument will be updated on on the profile * @param modifiers Modifiers associated with the update message. (for example "$time" or "$ignore_time"). * this can be null- if non-null, the keys and values in the modifiers * object will be associated directly with the update. * @return user profile setOnce message for consumption by MixpanelAPI */ public JSONObject setOnce(String distinctId, JSONObject properties, JSONObject modifiers) { return peopleMessage(distinctId, "$set_once", properties, modifiers); } /** * Deletes the profile associated with the given distinctId. * * <pre> * {@code * JSONObject message = messageBuilder.delete("12345"); * mixpanelApi.sendMessage(message); * } * </pre> * * @param distinctId a string uniquely identifying the profile to delete * @return user profile delete message for consumption by MixpanelAPI */ public JSONObject delete(String distinctId) { return delete(distinctId, null); } /** * Deletes the profile associated with the given distinctId. * * <pre> * {@code * JSONObject message = messageBuilder.delete("12345"); * mixpanelApi.sendMessage(message); * } * </pre> * * @param distinctId a string uniquely identifying the profile to delete * @param modifiers Modifiers associated with the update message. (for example "$time" or "$ignore_time"). * this can be null- if non-null, the keys and values in the modifiers * object will be associated directly with the update. * @return user profile delete message for consumption by MixpanelAPI */ public JSONObject delete(String distinctId, JSONObject modifiers) { return peopleMessage(distinctId, "$delete", new JSONObject(), modifiers); } /** * For each key and value in the properties argument, adds that amount * to the associated property in the profile with the given distinct id. * So, to maintain a login count for user 12345, one might run the following code * at every login: * <pre> * {@code * Map<String, Long> updates = new HashMap<String, Long>(); * updates.put('Logins', 1); * JSONObject message = messageBuilder.set("12345", updates); * mixpanelApi.sendMessage(message); * } * </pre> * @param distinctId a string uniquely identifying the profile to change, * for example, a user id of an app, or the hostname of a server. If no profile * exists for the given id, a new one will be created. * @param properties a collection of properties to change on the associated profile, * each associated with a numeric value. * @return user profile increment message for consumption by MixpanelAPI */ public JSONObject increment(String distinctId, Map<String, Long> properties) { return increment(distinctId, properties, null); } /** * For each key and value in the properties argument, adds that amount * to the associated property in the profile with the given distinct id. * So, to maintain a login count for user 12345, one might run the following code * at every login: * <pre> * {@code * Map<String, Long> updates = new HashMap<String, Long>(); * updates.put('Logins', 1); * JSONObject message = messageBuilder.set("12345", updates); * mixpanelApi.sendMessage(message); * } * </pre> * @param distinctId a string uniquely identifying the profile to change, * for example, a user id of an app, or the hostname of a server. If no profile * exists for the given id, a new one will be created. * @param properties a collection of properties to change on the associated profile, * each associated with a numeric value. * @param modifiers Modifiers associated with the update message. (for example "$time" or "$ignore_time"). * this can be null- if non-null, the keys and values in the modifiers * object will be associated directly with the update. * @return user profile increment message for consumption by MixpanelAPI */ public JSONObject increment(String distinctId, Map<String, Long> properties, JSONObject modifiers) { JSONObject jsonProperties = new JSONObject(properties); return peopleMessage(distinctId, "$add", jsonProperties, modifiers); } /** * For each key and value in the properties argument, attempts to append * that value to a list associated with the key in the identified profile. * @param distinctId a string uniquely identifying the profile to change, * for example, a user id of an app, or the hostname of a server. If no profile * exists for the given id, a new one will be created. * @param properties properties for the append operation * @return user profile append message for consumption by MixpanelAPI */ public JSONObject append(String distinctId, JSONObject properties) { return append(distinctId, properties, null); } /** * For each key and value in the properties argument, attempts to append * that value to a list associated with the key in the identified profile. * @param distinctId a string uniquely identifying the profile to change, * for example, a user id of an app, or the hostname of a server. If no profile * exists for the given id, a new one will be created. * @param properties properties for the append operation * @param modifiers Modifiers associated with the update message. (for example "$time" or "$ignore_time"). * this can be null- if non-null, the keys and values in the modifiers * object will be associated directly with the update. * @return user profile append message for consumption by MixpanelAPI */ public JSONObject append(String distinctId, JSONObject properties, JSONObject modifiers) { return peopleMessage(distinctId, "$append", properties, modifiers); } /** * For each key and value in the properties argument, attempts to remove * that value from a list associated with the key in the specified user profile. * @param distinctId a string uniquely identifying the profile to change, * for example, a user id of an app, or the hostname of a server. If no profile * exists for the given id, a new one will be created. * @param properties properties for the remove operation * @return user profile remove message for consumption by MixpanelAPI */ public JSONObject remove(String distinctId, JSONObject properties) { return remove(distinctId, properties, null); } /** * For each key and value in the properties argument, attempts to remove * that value from a list associated with the key in the specified user profile. * @param distinctId a string uniquely identifying the profile to change, * for example, a user id of an app, or the hostname of a server. If no profile * exists for the given id, a new one will be created. * @param properties properties for the remove operation * @param modifiers Modifiers associated with the update message. (for example "$time" or "$ignore_time"). * this can be null- if non-null, the keys and values in the modifiers * object will be associated directly with the update. * @return user profile remove message for consumption by MixpanelAPI */ public JSONObject remove(String distinctId, JSONObject properties, JSONObject modifiers) { return peopleMessage(distinctId, "$remove", properties, modifiers); } /** * Merges list-valued properties into a user profile. * The list values in the given are merged with the existing list on the user profile, * ignoring duplicate list values. * @param distinctId a string uniquely identifying the profile to change, * for example, a user id of an app, or the hostname of a server. If no profile * exists for the given id, a new one will be created. * @param properties properties for the union operation * @return user profile union message for consumption by MixpanelAPI */ public JSONObject union(String distinctId, Map<String, JSONArray> properties) { return union(distinctId, properties, null); } /** * Merges list-valued properties into a user profile. * The list values in the given are merged with the existing list on the user profile, * ignoring duplicate list values. * @param distinctId a string uniquely identifying the profile to change, * for example, a user id of an app, or the hostname of a server. If no profile * exists for the given id, a new one will be created. * @param properties properties for the union operation * @param modifiers Modifiers associated with the update message. (for example "$time" or "$ignore_time"). * this can be null- if non-null, the keys and values in the modifiers * object will be associated directly with the update. * @return user profile union message for consumption by MixpanelAPI */ public JSONObject union(String distinctId, Map<String, JSONArray> properties, JSONObject modifiers) { JSONObject jsonProperties = new JSONObject(properties); return peopleMessage(distinctId, "$union", jsonProperties, modifiers); } /** * Removes the properties named in propertyNames from the profile identified by distinctId. * @param distinctId a string uniquely identifying the profile to change, * for example, a user id of an app, or the hostname of a server. If no profile * exists for the given id, a new one will be created. * @param propertyNames properties for the unset operation * @return user profile unset message for consumption by MixpanelAPI */ public JSONObject unset(String distinctId, Collection<String> propertyNames) { return unset(distinctId, propertyNames, null); } /** * Removes the properties named in propertyNames from the profile identified by distinctId. * @param distinctId a string uniquely identifying the profile to change, * for example, a user id of an app, or the hostname of a server. If no profile * exists for the given id, a new one will be created. * @param propertyNames properties for the unset operation * @param modifiers Modifiers associated with the update message. (for example "$time" or "$ignore_time"). * this can be null- if non-null, the keys and values in the modifiers * object will be associated directly with the update. * @return user profile unset message for consumption by MixpanelAPI */ public JSONObject unset(String distinctId, Collection<String> propertyNames, JSONObject modifiers) { JSONArray propNamesArray = new JSONArray(propertyNames); return peopleMessage(distinctId, "$unset", propNamesArray, modifiers); } /** * Tracks revenue associated with the given distinctId. * * @param distinctId an identifier associated with a profile * @param amount a double revenue amount. Positive amounts represent income for your business. * @param properties can be null. If provided, a set of properties to associate with * the individual transaction. * @return user profile trackCharge message for consumption by MixpanelAPI */ public JSONObject trackCharge(String distinctId, double amount, JSONObject properties) { return trackCharge(distinctId, amount, properties, null); } /** * Tracks revenue associated with the given distinctId. * * @param distinctId an identifier associated with a profile * @param amount a double revenue amount. Positive amounts represent income for your business. * @param properties can be null. If provided, a set of properties to associate with * the individual transaction. * @param modifiers can be null. If provided, the keys and values in the object will * be merged as modifiers associated with the update message (for example, "$time" or "$ignore_time") * @return user profile trackCharge message for consumption by MixpanelAPI */ public JSONObject trackCharge(String distinctId, double amount, JSONObject properties, JSONObject modifiers) { JSONObject transactionValue = new JSONObject(); JSONObject appendProperties = new JSONObject(); try { transactionValue.put("$amount", amount); DateFormat dateFormat = new SimpleDateFormat(ENGAGE_DATE_FORMAT); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); transactionValue.put("$time", dateFormat.format(new Date())); if (null != properties) { for (Iterator<?> iter = properties.keys(); iter.hasNext();) { String key = (String) iter.next(); transactionValue.put(key, properties.get(key)); } } appendProperties.put("$transactions", transactionValue); return this.append(distinctId, appendProperties, modifiers); } catch (JSONException e) { e.printStackTrace(); throw new RuntimeException("Cannot create trackCharge message", e); } } /** * Formats a generic user profile message. * Use of this method requires familiarity with the underlying Mixpanel HTTP API, * and it may be simpler and clearer to use the pre-built functions for setting, * incrementing, and appending to properties. Use this method directly only * when interacting with experimental APIs, or APIS that the rest of this library * does not yet support. * * The underlying API is documented at https://developer.mixpanel.com/docs/http * * @param distinctId a string uniquely identifying the individual cause associated with this event * (for example, the user id of a signing-in user, or the hostname of a server) * @param actionType a string associated in the HTTP api with the operation (for example, $set or $add) * @param properties a payload of the operation. Will be converted to JSON, and should be of types * Boolean, Double, Integer, Long, String, JSONArray, JSONObject, the JSONObject.NULL object, or null. * NaN and negative/positive infinity will throw an IllegalArgumentException * @param modifiers if provided, the keys and values in the modifiers object will * be merged as modifiers associated with the update message (for example, "$time" or "$ignore_time") * @return generic user profile message for consumption by MixpanelAPI * * @throws IllegalArgumentException if properties is not intelligible as a JSONObject property * * @see MessageBuilder#set(String distinctId, JSONObject properties) * @see MessageBuilder#delete(String distinctId) * @see MessageBuilder#append(String distinctId, JSONObject properties, JSONObject modifiers) */ public JSONObject peopleMessage(String distinctId, String actionType, Object properties, JSONObject modifiers) { JSONObject dataObj = new JSONObject(); if (null == properties) { throw new IllegalArgumentException("Cannot send null properties, use JSONObject.NULL instead"); } try { dataObj.put(actionType, properties); } catch (JSONException e) { throw new IllegalArgumentException("Cannot interpret properties as a JSON payload", e); } // At this point, nothing should ever throw a JSONException try { dataObj.put("$token", mToken); dataObj.put("$distinct_id", distinctId); dataObj.put("$time", System.currentTimeMillis()); if (null != modifiers) { final String[] keys = JSONObject.getNames(modifiers); if (keys != null) { for(String key : keys) { dataObj.put(key, modifiers.get(key)); } } } JSONObject envelope = new JSONObject(); envelope.put("envelope_version", 1); envelope.put("message_type", "people"); envelope.put("message", dataObj); return envelope; } catch (JSONException e) { throw new RuntimeException("Can't construct a Mixpanel message", e); } } /** * Sets properties on the group profile identified by the given groupKey * and groupId, creating the profile if needed. Existing values for the * given properties are replaced. Example: * <pre> * {@code * JSONObject groupProperties = new JSONObject(); * groupProperties.put("$name", "Acme Incorporated"); * groupProperties.put("Industry", "Manufacturing"); * JSONObject message = messageBuilder.groupSet("company", "Acme Inc.", groupProperties); * mixpanelApi.sendMessage(message); * } * </pre> * * @param groupKey the property that connects event data for Group Analytics * @param groupId the identifier for a specific group * @param properties a collection of properties to set on the associated profile. Each key * in the properties argument will be updated on on the profile. * @return group profile set message for consumption by MixpanelAPI */ public JSONObject groupSet(String groupKey, String groupId, JSONObject properties) { return groupSet(groupKey, groupId, properties, null); } /** * Sets properties on the group profile identified by the given groupKey * and groupId, creating the profile if needed. Existing values for the * given properties are replaced. Example: * <pre> * {@code * JSONObject groupProperties = new JSONObject(); * groupProperties.put("$name", "Acme Incorporated"); * groupProperties.put("Industry", "Manufacturing"); * JSONObject message = messageBuilder.groupSet("company", "Acme Inc.", groupProperties); * mixpanelApi.sendMessage(message); * } * </pre> * * @param groupKey the property that connects event data for Group Analytics * @param groupId the identifier for a specific group * @param properties a collection of properties to set on the associated profile. Each key * in the properties argument will be updated on on the profile. * @param modifiers Modifiers associated with the update message. (for example "$time" or "$ignore_time"). * this can be null- if non-null, the keys and values in the modifiers * object will be associated directly with the update. * @return group profile set message for consumption by MixpanelAPI */ public JSONObject groupSet(String groupKey, String groupId, JSONObject properties, JSONObject modifiers) { return groupMessage(groupKey, groupId, "$set", properties, modifiers); } /** * Sets properties if they do not already exist on the group profile identified by the given groupKey * and groupId. Example: * <pre> * {@code * JSONObject groupProperties = new JSONObject(); * groupProperties.put("First Purchase", "Steel"); * JSONObject message = messageBuilder.groupSetOnce("company", "Acme Inc.", groupProperties); * mixpanelApi.sendMessage(message); * } * </pre> * * @param groupKey the property that connects event data for Group Analytics * @param groupId the identifier for a specific group * @param properties a collection of properties to set on the associated profile. Each key * in the properties argument will be updated on on the profile. * @return group profile setOnce message for consumption by MixpanelAPI */ public JSONObject groupSetOnce(String groupKey, String groupId, JSONObject properties) { return groupSetOnce(groupKey, groupId, properties, null); } /** * Sets properties if they do not already exist on the group profile identified by the given groupKey * and groupId. Example: * <pre> * {@code * JSONObject groupProperties = new JSONObject(); * groupProperties.put("First Purchase", "Steel"); * JSONObject message = messageBuilder.groupSetOnce("company", "Acme Inc.", groupProperties); * mixpanelApi.sendMessage(message); * } * </pre> * * @param groupKey the property that connects event data for Group Analytics * @param groupId the identifier for a specific group * @param properties a collection of properties to set on the associated profile. Each key * in the properties argument will be updated on on the profile. * @param modifiers Modifiers associated with the update message. (for example "$time" or "$ignore_time"). * this can be null- if non-null, the keys and values in the modifiers * object will be associated directly with the update. * @return group profile setOnce message for consumption by MixpanelAPI */ public JSONObject groupSetOnce(String groupKey, String groupId, JSONObject properties, JSONObject modifiers) { return groupMessage(groupKey, groupId, "$set_once", properties, modifiers); } /** * Deletes the group profile identified by the given groupKey and groupId. * * <pre> * {@code * JSONObject message = messageBuilder.groupDelete("company", "Acme Inc."); * mixpanelApi.sendMessage(message); * } * </pre> * * @param groupKey the property that connects event data for Group Analytics * @param groupId the identifier for a specific group * @return group profile delete message for consumption by MixpanelAPI */ public JSONObject groupDelete(String groupKey, String groupId) { return groupDelete(groupKey, groupId, null); } /** * Deletes the group profile identified by the given groupKey and groupId. * * <pre> * {@code * JSONObject message = messageBuilder.groupDelete("company", "Acme Inc."); * mixpanelApi.sendMessage(message); * } * </pre> * * @param groupKey the property that connects event data for Group Analytics * @param groupId the identifier for a specific group * @param modifiers Modifiers associated with the update message. (for example "$time" or "$ignore_time"). * this can be null- if non-null, the keys and values in the modifiers * object will be associated directly with the update. * @return group profile delete message for consumption by MixpanelAPI */ public JSONObject groupDelete(String groupKey, String groupId, JSONObject modifiers) { return groupMessage(groupKey, groupId, "$delete", new JSONObject(), modifiers); } /** * For each key and value in the properties argument, attempts to remove * that value from a list associated with the key in the specified group profile. * @param groupKey the property that connects event data for Group Analytics * @param groupId the identifier for a specific group * @param properties properties for the remove operation * @return group profile remove message for consumption by MixpanelAPI */ public JSONObject groupRemove(String groupKey, String groupId, JSONObject properties) { return groupRemove(groupKey, groupId, properties, null); } /** * For each key and value in the properties argument, attempts to remove * that value from a list associated with the key in the specified group profile. * @param groupKey the property that connects event data for Group Analytics * @param groupId the identifier for a specific group * @param properties properties for the remove operation * @param modifiers Modifiers associated with the update message. (for example "$time" or "$ignore_time"). * this can be null- if non-null, the keys and values in the modifiers * object will be associated directly with the update. * @return group profile remove message for consumption by MixpanelAPI */ public JSONObject groupRemove(String groupKey, String groupId, JSONObject properties, JSONObject modifiers) { return groupMessage(groupKey, groupId, "$remove", properties, modifiers); } /** * Merges list-valued properties into a group profile. * The list values given are merged with the existing list on the group profile, * ignoring duplicate list values. * @param groupKey the property that connects event data for Group Analytics * @param groupId the identifier for a specific group * @param properties properties for the union operation * @return group profile union message for consumption by MixpanelAPI */ public JSONObject groupUnion(String groupKey, String groupId, Map<String, JSONArray> properties) { return groupUnion(groupKey, groupId, properties, null); } /** * Merges list-valued properties into a group profile. * The list values given are merged with the existing list on the group profile, * ignoring duplicate list values. * @param groupKey the property that connects event data for Group Analytics * @param groupId the identifier for a specific group * @param properties properties for the union operation * @param modifiers Modifiers associated with the update message. (for example "$time" or "$ignore_time"). * this can be null- if non-null, the keys and values in the modifiers * object will be associated directly with the update. * @return group profile union message for consumption by MixpanelAPI */ public JSONObject groupUnion(String groupKey, String groupId, Map<String, JSONArray> properties, JSONObject modifiers) { JSONObject jsonProperties = new JSONObject(properties); return groupMessage(groupKey, groupId, "$union", jsonProperties, modifiers); } /** * Removes the properties named in propertyNames from the group profile identified by groupKey and groupId. * @param groupKey the property that connects event data for Group Analytics * @param groupId the identifier for a specific group * @param propertyNames properties for the unset operation * @return group profile unset message for consumption by MixpanelAPI */ public JSONObject groupUnset(String groupKey, String groupId, Collection<String> propertyNames) { return groupUnset(groupKey, groupId, propertyNames, null); } /** * Removes the properties named in propertyNames from the group profile identified by groupKey and groupId. * @param groupKey the property that connects event data for Group Analytics * @param groupId the identifier for a specific group * @param propertyNames properties for the unset operation * @param modifiers Modifiers associated with the update message. (for example "$time" or "$ignore_time"). * this can be null- if non-null, the keys and values in the modifiers * object will be associated directly with the update. * @return group profile unset message for consumption by MixpanelAPI */ public JSONObject groupUnset(String groupKey, String groupId, Collection<String> propertyNames, JSONObject modifiers) { JSONArray propNamesArray = new JSONArray(propertyNames); return groupMessage(groupKey, groupId, "$unset", propNamesArray, modifiers); } /** * Formats a generic group profile message. * Use of this method requires familiarity with the underlying Mixpanel HTTP API, * and it may be simpler and clearer to use the pre-built update methods. Use this * method directly only when interacting with experimental APIs, or APIS that the * rest of this library does not yet support. * * The underlying API is documented at https://mixpanel.com/help/reference/http * * @param groupKey string identifier for the type of group, e.g. 'Company' * @param groupId unique string identifier for the group, e.g. 'Acme Inc.' * @param actionType a string associated in the HTTP api with the operation (for example, $set or $add) * @param properties a payload of the operation. Will be converted to JSON, and should be of types * Boolean, Double, Integer, Long, String, JSONArray, JSONObject, the JSONObject.NULL object, or null. * NaN and negative/positive infinity will throw an IllegalArgumentException * @param modifiers if provided, the keys and values in the modifiers object will * be merged as modifiers associated with the update message (for example, "$time" or "$ignore_time") * @return generic group profile message for consumption by MixpanelAPI * * @throws IllegalArgumentException if properties is not intelligible as a JSONObject property * * @see MessageBuilder#groupSet(String groupKey, String groupId, JSONObject properties) * @see MessageBuilder#groupSetOnce(String groupKey, String groupId, JSONObject properties) * @see MessageBuilder#groupRemove(String groupKey, String groupId, JSONObject properties) * @see MessageBuilder#groupDelete(String groupKey, String groupId) */ public JSONObject groupMessage(String groupKey, String groupId, String actionType, Object properties, JSONObject modifiers) { JSONObject dataObj = new JSONObject(); if (null == properties) { throw new IllegalArgumentException("Cannot send null properties, use JSONObject.NULL instead"); } try { dataObj.put(actionType, properties); } catch (JSONException e) { throw new IllegalArgumentException("Cannot interpret properties as a JSON payload", e); } // At this point, nothing should ever throw a JSONException try { dataObj.put("$token", mToken); dataObj.put("$group_key", groupKey); dataObj.put("$group_id", groupId); dataObj.put("$time", System.currentTimeMillis()); if (null != modifiers) { final String[] keys = JSONObject.getNames(modifiers); if (keys != null) { for(String key : keys) { dataObj.put(key, modifiers.get(key)); } } } JSONObject envelope = new JSONObject(); envelope.put("envelope_version", 1); envelope.put("message_type", "group"); envelope.put("message", dataObj); return envelope; } catch (JSONException e) { throw new RuntimeException("Can't construct a Mixpanel message", e); } } }
Markdown
UTF-8
882
2.609375
3
[ "MIT" ]
permissive
--- id: "6F8ACC" date: "2019-08-18T15:53:00.000Z" title: "Facebook Page is Live" description: "" tags: - Social Media instagram: https://www.instagram.com/p/B1UUFsKnvCL/ titlecard: top --- This week I launched a Facebook page for Curvy and Trans. It mirrors a lot of the content here on Instagram, but also receives things that are difficult to share here (such as links to articles and videos), and resharing of any FB posts that I find pertinent to either fat positively, plus size fashion, or the trans experience. I have also created a Facebook Group, connected to this page, called The Trans Fats. My goal is to create a space for trans focused discussions and support on body positivity and fat positivity. Cis people and non-fats are welcome, just recognize that it is a trans and fat centering group. ## [Head on over to the page...](https://facebook.com/curvyandtrans)
Python
UTF-8
26,242
2.890625
3
[]
no_license
""" Class definition and utilities for the Local Outlier Factor tool. """ import time as _time import logging as _logging from array import array as _array import graphlab as _gl import graphlab.connect as _mt from graphlab.toolkits._model import CustomModel as _CustomModel import graphlab.toolkits._internal_utils as _tkutl from graphlab.toolkits._private_utils import _summarize_accessible_fields from graphlab.toolkits._model import ProxyBasedModel as _ProxyBasedModel from graphlab.toolkits._model import PythonProxy as _PythonProxy def get_default_options(): """ Information about local outlier factor parameters. Returns ------- out : SFrame Each row in the output SFrames correspond to a parameter, and includes columns for default values, lower and upper bounds, description, and type. """ out = _gl.SFrame({ 'name': ['distance', 'num_neighbors', 'threshold_distances', 'verbose'], 'default_value': ['None', '5', 'True', 'True'], 'parameter_type': ['String, function, or composite distance', 'int', 'bool', 'bool'], 'lower_bound': ['None', '1', 'False', 'False'], 'upper_bound': ['None', 'None', 'True', 'True'], 'description': ['Name of a distance function or a composite distance function.', 'Number of neighbors to consider for each point.', 'Whether computed distances should be thresholded.', 'Progress printing flag.']}) return out def create(dataset, features=None, label=None, distance=None, num_neighbors=5, threshold_distances=True, verbose=True): """ Create a :class:`LocalOutlierFactorModel`. This mode contains local outlier factor (LOF) scores for the training data passed to this model, and can predict the LOF score for new observations. The LOF method scores each data instance by computing the ratio of the average densities of the instance's neighbors to the density of the instance itself. The higher the score, the more likely the instance is to be an outlier *relative to its neighbors*. A score of 1 or less means that an instance has a density similar (or higher) to its neighbors and is unlikely to be an outlier. The model created by this function contains an SFrame called 'scores' that contains the computed local outlier factors. The `scores` SFrame has four columns: - *row_id*: the row index of the instance in the input dataset. If a label column is passed, the labels (and the label name) are passed through to this column in the output. - *density*: the density of instance as estimated by the LOF procedure. - *neighborhood_radius*: the distance from the instance to its furthest neighbor (defined by 'num_neighbors', and used for predicting the LOF for new points). - *anomaly_score*: the local outlier factor. For more information on the LOF method and the computation used for each of these columns, please see the Notes and References sections below. Parameters ---------- dataset : SFrame Input dataset. The 'dataset' SFrame must include the features specified in the 'features' or 'distance' parameter (additional columns are ignored). features : list[string], optional Names of feature columns. 'None' (the default) indicates that all columns should be used. Each column can be one of the following types: - *Numeric*: values of numeric type integer or float. - *Array*: array of numeric (integer or float) values. Each array element is treated as a separate variable in the model. - *Dictionary*: key-value pairs with numeric (integer or float) values. Each key indicates a separate variable in the model. - *String*: string values. Please note: if 'distance' is specified as a composite distance, then that parameter controls which features are used in the model. Also note that the column of row labels is automatically removed from the features, if there is a conflict. label : str, optional Name of the input column containing row labels. The values in this column must be integers or strings. If not specified, row numbers are used by default. distance : string or list[list], optional Function to measure the distance between any two input data rows. If left unspecified, a distance function is automatically constructed based on the feature types. The distance may be specified by either a string or composite distance: - *String*: the name of a standard distance function. One of 'euclidean', 'squared_euclidean', 'manhattan', 'levenshtein', 'jaccard', 'weighted_jaccard', 'cosine', or 'dot_product'. Please see the :mod:`distances` module for more details. - *Composite distance*: the weighted sum of several standard distance functions applied to various features. This is specified as a list of distance components, each of which is itself a list containing three items: 1. list or tuple of feature names (strings) 2. standard distance name (string) 3. scaling factor (int or float) num_neighbors : int, optional Number of neighbors to consider for each point. threshold_distances : bool, optional If True (the default), the distance between two points is thresholded. This reduces noise and can improve the quality of results, but at the cost of slower computation. See the notes below for more detail. verbose : bool, optional If True, print progress updates and model details. Returns ------- model : LocalOutlierFactorModel A trained :class:`LocalOutlierFactorModel`, which contains an SFrame called 'scores' that includes the 'anomaly score' for each input instance. See Also -------- LocalOutlierFactorModel, graphlab.toolkits.nearest_neighbors Notes ----- - The LOF method scores each data instance by computing the ratio of the average densities of the instance's neighbors to the density of the instance itself. According to the LOF method, the estimated density of a point :math:`p` is the number of :math:`p`'s neighbors divided by the sum of distances to the instance's neighbors. In the following, suppose :math:`N(p)` is the set of neighbors of point :math:`p`, :math:`k` is the number of points in this set (i.e. the 'num_neighbors' parameter), and :math:`d(p, x)` is the distance between points :math:`p` and :math:`x` (also based on a user-specified distance function). .. math:: \hat{f}(p) = \\frac{k}{\sum_{x \in N(p)} d(p, x)} - The LOF score for point :math:`p` is then the ratio of :math:`p`'s density to the average densities of :math:`p`'s neighbors: .. math:: LOF(p) = \\frac{\\frac{1}{k} \sum_{x \in N(p)} \hat{f}(x)}{\hat{f}(p)} - If the 'threshold_distances' flag is set to True, exact distances are replaced by "thresholded" distances. Let Suppose :math:`r_k(x)` is the distance from :math:`x` to its :math:`k`'th nearest neighbor. Then the thresholded distance from point :math:`p` to point :math:`x_i` is .. math:: d^*(p, x) = \max\{r_k(x), d(p, x)\} This adaptive thresholding is used in the original LOF paper (see the References section) to reduce noise in the computed distances and improve the quality of the final LOF scores. - For features that all have the same type, the distance parameter may be a single standard distance function name (e.g. "euclidean"). In the model, however, all distances are first converted to composite distance functions; as a result, the 'distance' field in the model is always a composite distance. - Standardizing features is often a good idea with distance-based methods, but this model does *not* standardize features. - If there are several observations located at an identical position, the LOF values can be undefined. An LOF score of "nan" means that a point is either in or near a set of co-located points. - This implementation of LOF forces the neighborhood of each data instance to contain exactly 'num_neighbors' points, breaking ties arbitrarily. This differs from the original LOF paper (see References below), which allows neighborhoods to expand if there are multiple neighbors at exactly the same distance from an instance. References ---------- - Breunig, M. M., Kriegel, H., Ng, R. T., & Sander, J. (2000). `LOF: Identifying Density-Based Local Outliers <http://people.cs.vt.edu/badityap/classes/cs6604-Fall13/readings/breunig-2000.pdf>`_, pp 1-12. Examples -------- >>> sf = graphlab.SFrame({'x0': [0., 1., 1., 0., 1., 0., 5.], ... 'x1': [2., 1., 0., 1., 2., 1.5, 2.5]}) >>> lof = graphlab.local_outlier_factor.create(sf, num_neighbors=3) >>> lof['scores'] +--------+----------------+----------------+---------------------+ | row_id | density | anomaly_score | neighborhood_radius | +--------+----------------+----------------+---------------------+ | 0 | 0.927050983125 | 1.03785526045 | 1.0 | | 3 | 0.962144739546 | 0.919592692017 | 1.0 | | 1 | 0.765148090776 | 1.14822979837 | 1.0 | | 6 | 0.230412599692 | 3.52802012342 | 4.71699056603 | | 2 | 0.71140803489 | 1.26014768739 | 1.80277563773 | | 5 | 0.962144739546 | 0.919592692017 | 1.11803398875 | | 4 | 0.962144739546 | 0.919592692017 | 1.11803398875 | +--------+----------------+----------------+---------------------+ [7 rows x 4 columns] """ ## Start the training time clock and instantiate an empty model _mt._get_metric_tracker().track( 'toolkit.anomaly_detection.local_outlier_factor.create') logger = _logging.getLogger(__name__) start_time = _time.time() ## Validate the input dataset _tkutl._raise_error_if_not_sframe(dataset, "dataset") _tkutl._raise_error_if_sframe_empty(dataset, "dataset") ## Validate the number of neighbors, mostly to make the error message use # the right parameter name. if not isinstance(num_neighbors, int): raise TypeError("Input 'num_neighbors' must be an integer.") if num_neighbors <= 0: raise ValueError("Input 'num_neighbors' must be larger than 0.") if num_neighbors > dataset.num_rows(): num_neighbors = dataset.num_rows() if verbose: logger.info("Input 'num_neighbors' is larger than the number " + "of rows in the input 'dataset'. Resetting " + "'num_neighbors' to the dataset length.") ## Validate the row label against the features *using the nearest neighbors # tool with only one row of data. This is a hack - we should encapsulate # the validation steps in nearest neighbors and do them here first. validation_model = _gl.nearest_neighbors.create(dataset[:1], label=label, features=features, distance=distance, method='brute_force', verbose=False) ## Compute the similarity graph based on k and radius, without self-edges, # but keep it in the form of an SFrame. Do this *without* the row label, # because I need to sort on the row number, and row labels that aren't # already in order will be screwed up. knn_model = _gl.nearest_neighbors.create(dataset, distance=validation_model.distance, method='brute_force', verbose=verbose) knn = knn_model.similarity_graph(k=num_neighbors, radius=None, include_self_edges=False, output_type='SFrame', verbose=verbose) ## Bias the distances by making them at least equal to the *reference* # point's k'th neighbor radius. This is "reach-distance" in the original # paper. if threshold_distances is True: radii = knn.groupby('query_label', {'neighborhood_radius': _gl.aggregate.MAX('distance')}) knn = knn.join(radii, on={'reference_label': 'query_label'}, how='left') knn['distance'] = knn.apply( lambda x: x['distance'] if x['distance'] > x['neighborhood_radius'] \ else x['neighborhood_radius']) ## Find the sum of distances from each point to its neighborhood, then # compute the "local reachability density (LRD)". This is not remotely a # valid density estimate, but it does have the form of mass / volume, # where the mass is estimated by the number of neighbors in point x's # neighborhood, and the volume is estimated by the sum of the distances # between x and its neighbors. # ## NOTE: if a vertex is co-located with all of its neighbors, the sum of # distances will be 0, in which case the inverse distance sum value is # 'inf'. scores = knn.groupby('query_label', {'dist_sum': _gl.aggregate.SUM('distance')}) scores['density'] = float(num_neighbors) / scores['dist_sum'] ## Join the density of each point back to the nearest neighbors results, # then get the average density of each point's neighbors' densities. knn = knn.join(scores, on={'reference_label': 'query_label'}, how='left') scores2 = knn.groupby('query_label', {'average_neighbor_density': _gl.aggregate.AVG('density')}) ## Combine each point's density and average neighbor density into one # SFrame, then compute the local outlier factor (LOF). scores = scores.sort('query_label') scores2 = scores2.sort('query_label') scores['anomaly_score'] = scores2['average_neighbor_density'] / scores['density'] ## Add each point's neighborhood radius to the output SFrame. if threshold_distances is True: radii = radii.sort('query_label') scores['neighborhood_radius'] = radii['neighborhood_radius'] ## Remove the extraneous columns from the output SFrame and format. scores = scores.remove_column('dist_sum') ## Substitute in the row labels. if label is None: row_label_name = 'row_id' scores = scores.rename({'query_label': row_label_name}) else: row_label_name = label scores = scores.remove_column('query_label') col_names = scores.column_names() scores[row_label_name] = dataset[label] scores = scores[[row_label_name] + col_names] ## Post-processing and formatting state = { 'nearest_neighbors_model': knn_model, 'verbose': verbose, 'threshold_distances': threshold_distances, 'num_neighbors': num_neighbors, 'num_examples': dataset.num_rows(), 'distance': knn_model['distance'], 'num_distance_components': knn_model['num_distance_components'], 'features': knn_model['features'], 'row_label_name': row_label_name, 'num_features': knn_model['num_features'], 'unpacked_features': knn_model['unpacked_features'], 'num_unpacked_features': knn_model['num_unpacked_features'], 'scores': scores, 'training_time': _time.time() - start_time} model = LocalOutlierFactorModel(state) return model class LocalOutlierFactorModel(_CustomModel, _ProxyBasedModel): """ Local outlier factor model. The LocalOutlierFactorModel contains the local outlier factor scores for training data passed to the 'create' function, as well as a 'predict' method for scoring new data. Outliers are determined by comparing the probability density estimate of each point to the density estimates of its neighbors. This model should not be constructed directly. Instead, use :func:`graphlab.anomaly_detection.create` or :func:`graphlab.anomaly_detectcion.local_outlier_factor.create` to create an instance of this model. Please see the API docs for the ``create`` method, as well as the references below or the `Anomaly Detection chapter of the User Guide <https://turi.com/learn/userguide/anomaly_detection/intro.html>`_ for more information on the Local Outlier Factor method. See Also -------- create References ---------- - Breunig, M. M., Kriegel, H., Ng, R. T., & Sander, J. (2000). `LOF: Identifying Density-Based Local Outliers <http://people.cs.vt.edu/badityap/classes/cs6604-Fall13/readings/breunig-2000.pdf>`_, pp 1-12. Examples -------- >>> sf = graphlab.SFrame({'x0': [0., 1., 1., 0., 1., 0., 5.], ... 'x1': [2., 1., 0., 1., 2., 1.5, 2.5]}) >>> lof = graphlab.local_outlier_factor.create(sf, num_neighbors=3) >>> lof['scores'] +--------+----------------+----------------+---------------------+ | row_id | density | anomaly_score | neighborhood_radius | +--------+----------------+----------------+---------------------+ | 0 | 0.927050983125 | 1.03785526045 | 1.0 | | 3 | 0.962144739546 | 0.919592692017 | 1.0 | | 1 | 0.765148090776 | 1.14822979837 | 1.0 | | 6 | 0.230412599692 | 3.52802012342 | 4.71699056603 | | 2 | 0.71140803489 | 1.26014768739 | 1.80277563773 | | 5 | 0.962144739546 | 0.919592692017 | 1.11803398875 | | 4 | 0.962144739546 | 0.919592692017 | 1.11803398875 | +--------+----------------+----------------+---------------------+ [7 rows x 4 columns] """ _PYTHON_LOF_MODEL_VERSION = 2 def __init__(self, state={}): if 'nearest_neighbors_model' not in state: state['nearest_neighbors_model'] = None if state['nearest_neighbors_model'] and not isinstance(state['nearest_neighbors_model'], _gl.nearest_neighbors._nearest_neighbors.NearestNeighborsModel): raise TypeError("The internal nearest neighbors model for LocalOutlierFactorModel is not correct.") self.__proxy__ = _PythonProxy(state) def _get_version(self): return self._PYTHON_LOF_MODEL_VERSION def _save_impl(self, pickler): """ Save the model as a directory, which can be loaded with the :py:func:`~graphlab.load_model` method. Parameters ---------- pickler : GLPickler An opened GLPickle archive (Do not close the archive). See Also -------- graphlab.load_model Examples -------- >>> model.save('my_model_file') >>> loaded_model = graphlab.load_model('my_model_file') """ state = self.__proxy__ pickler.dump(state) @classmethod def _load_version(self, unpickler, version): """ Load a previously saved LocalOutlierFactorModel instance. Parameters ---------- unpickler : GLUnpickler A GLUnpickler file handler. version : int Version number maintained by the class writer. """ if version < 1: nn_model = unpickler.load() state = unpickler.load() state['nearest_neighbors_model'] = nn_model else: state = unpickler.load() if version < 2: state['row_label_name'] = 'row_id' return LocalOutlierFactorModel(state) def __str__(self): """ Return a string description of the model to the ``print`` method. Returns ------- out : string A description of the LocalOutlierFactorModel. """ return self.__repr__() def __repr__(self): """ Print a string description of the model when the model name is entered in the terminal. """ width = 40 key_str = "{:<{}}: {}" sections, section_titles = self._get_summary_struct() accessible_fields = { "scores": "Local outlier factor for each row in the input dataset.", "nearest_neighbors_model": "Model used internally to compute nearest neighbors."} out = _tkutl._toolkit_repr_print(self, sections, section_titles, width=width) out2 = _summarize_accessible_fields(accessible_fields, width=width) return out + "\n" + out2 def _get_summary_struct(self): """ Returns a structured description of the model, including (where relevant) the schema of the training data, description of the training data, training statistics, and model hyperparameters. Returns ------- sections : list (of list of tuples) A list of summary sections. Each section is a list. Each item in a section list is a tuple of the form: ('<label>','<field>') section_titles: list A list of section titles. The order matches that of the 'sections' object. """ model_fields = [ ('Number of examples', 'num_examples'), ('Number of feature columns', 'num_features'), ('Number of neighbors', 'num_neighbors'), ('Use thresholded distances', 'threshold_distances'), ('Number of distance components', 'num_distance_components'), ('Row label name', 'row_label_name')] training_fields = [ ('Total training time (seconds)', 'training_time')] section_titles = ['Schema', 'Training summary'] return([model_fields, training_fields], section_titles) def get_current_options(self): """ Return a dictionary with the options used to define and create the current LocalOutlierFactorModel instance. """ return {k: self.__proxy__[k] for k in get_default_options()['name']} def predict(self, dataset, verbose=True): """ Compute local outlier factors for new data. The LOF scores for new data instances are based on the neighborhood statistics for the data used when the model was created. Each new point is scored independently. Parameters ---------- dataset : SFrame Dataset of new points to score with LOF against the training data already stored in the model. verbose : bool, optional If True, print progress updates and model details. Returns ------- out : SArray LOF score for each new point. The output SArray is sorted to match the order of the 'dataset' input to this method. Examples -------- >>> sf = graphlab.SFrame({'x0': [0., 1., 1., 0., 1., 0., 5.], ... 'x1': [2., 1., 0., 1., 2., 1.5, 2.5]}) >>> m = graphlab.local_outlier_factor.create(sf, num_neighbors=3) ... >>> sf_new = graphlab.SFrame({'x0': [0.5, 4.5], ... 'x1': [1., 4.0]}) >>> m.predict(sf_new) dtype: float Rows: 2 [0.9317508614964032, 2.905646339288692] """ _mt._get_metric_tracker().track( 'toolkit.anomaly_detection.local_outlier_factor.predict') ## Validate the input dataset _tkutl._raise_error_if_not_sframe(dataset, "dataset") _tkutl._raise_error_if_sframe_empty(dataset, "dataset") num_neighbors = self.__proxy__['num_neighbors'] ## Query the knn model with the new points. knn = self.__proxy__['nearest_neighbors_model'].query(dataset, k=num_neighbors, verbose=verbose) ## Join the reference data's neighborhood statistics to the nearest # neighbors results. knn = knn.join(self.__proxy__['scores'], on={'reference_label': 'row_id'}, how='left') # Compute reachability distance for each new point and its # neighborhood. if self.__proxy__['threshold_distances'] is True: knn['distance'] = knn.apply( lambda x: x['distance'] \ if x['distance'] > x['neighborhood_radius'] \ else x['neighborhood_radius']) ## Find the sum of distances from each point to its neighborhood, then # compute the "local reachability density" for each query point. scores = knn.groupby('query_label', {'dist_sum': _gl.aggregate.SUM('distance')}) scores['density'] = float(num_neighbors) / scores['dist_sum'] ## Find the average density for each query point's neighbors. scores2 = knn.groupby('query_label', {'average_neighbor_density': _gl.aggregate.AVG('density')}) ## Join the point densities and average neighbor densities into a # single SFrame and compute the local outlier factor. scores = scores.join(scores2, on='query_label') scores['anomaly_score'] = \ scores['average_neighbor_density'] / scores['density'] ## Remove extraneous columns and format. scores = scores.sort('query_label', ascending=True) return scores['anomaly_score'] @classmethod def _get_queryable_methods(cls): """ Return a list of method names that are queryable through Predictive Service. """ return {'predict': {'dataset': 'sframe'}}
Java
UTF-8
602
2.53125
3
[]
no_license
package fr.obd2Reader.command; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; public class EngineFuelRateCommand extends ObdCommand implements CompatibleCommand{ public EngineFuelRateCommand(OutputStream out, InputStream in){ super("01 5E", "Engine Fuel Rate", out, in); setUnit("L/h"); } public boolean isCompatible(ArrayList<Byte> vehicleRef){ return ((vehicleRef.get(8) & 4) == 4); } public void compute(){ sendCommand(); read(); setData((float)(((getInBuff().get(0)*256) + getInBuff().get(1))*0.05)); } }
Markdown
UTF-8
8,985
2.734375
3
[]
no_license
--- copyright: years: 2017, 2018 lastupdated: "2018-05-17" --- {:new_window: target="\_blank"} {:shortdesc: .shortdesc} {:screen: .screen} {:codeblock: .codeblock} {:pre: .pre} # リソース・レベル・アクセス制御の構成 {: #configure_RLAC} リソース・レベル・アクセス制御を使用すれば、デバイスを管理するためにユーザーと API キーのアクセスを制御できます。 それぞれのユーザーや API キーで管理できる組織内のデバイスを定義するために、リソース・グループを使用します。 ユーザーや API キーに「役割とグループ」のペアを割り当てることによって、指定のグループに含まれているデバイスに対して、指定の役割でカバーされている操作だけを実行できる、という動作を定義することも可能です。 リソース・レベル・アクセス制御の詳細については、[リソース・レベル・アクセス制御の概説](rlac_overview.html)と [{{site.data.keyword.iot_short_notm}}アクセス制御 API の資料 ![外部リンク・アイコン](../../../icons/launch-glyph.svg "外部リンク・アイコン")](https://docs.internetofthings.ibmcloud.com/apis/swagger/v0002-beta/security-subjects-beta.html){: new_window} を参照してください。 ## リソース・レベル・アクセス制御の構成 - プロセス・フロー {: #RLAC_process} リソース・レベル・アクセス制御を有効にして使用するための一般的なプロセス・フローは、以下のとおりです。 1. [組織を作成します](../iotplatform_overview.html#organizations)。 2. [ユーザーを作成し](../add_users.html#adding-new-users)、[API キー](../platform_authorization.html#api-key)を作成します。 3. [リソース・グループを作成します](rlac.html#create_delete_group)。 4. [「役割とグループ」のマッピングをユーザーと API キーに割り当てます](rlac.html#assign_roletogroup)。 5. [リソース・グループのデバイスを追加します](rlac.html#add_device)。 6. [リソース・レベル・アクセス制御を有効にします](rlac.html#RLAC_enable)。 リソース・レベル・アクセス制御は、各種のデバイス関連 API に適用されます。 影響を受ける API のリストについては、[リソース・レベル・アクセス制御が適用される API](rlac_overview.html#RLAC_enforced_APIs) を参照してください。 ## リソース・グループの作成と削除 {: #create_delete_group} リソース・グループは、接続先のゲートウェイとは独立して作成し、削除できます。 リソース・グループを作成して、グループの詳細を返すには、以下の API を使用します。 POST /api/v0002/groups { "name": "groupA", "description": "Devices in the red group", "searchTags": ["red"] } リソース・グループを削除すると、そのグループに入っていたデバイスがグループから削除されますが、デバイス自体にそれ以外の影響はありません。 リソース・グループを削除するには、以下の API を使用します。 DELETE /api/v0002/groups/{groupUid} ## ユーザーや API キーへの「役割とグループ」のペアの割り当て {: #assign_roletogroup} ユーザーや API キーを一群のデバイスだけに制限するには、ユーザーや API キーに「役割とグループ」のペアを割り当てる必要があります。 リソース・グループが設定されていないユーザーや API キーは、組織内の全デバイスを無制限で管理できます。 「役割とグループ」のペアを使用する場合、API キーに設定できる役割は 1 つだけになります。 rolesToGroups で指定する役割と roles で指定する役割は一致していなければなりません。 ユーザーに「役割とグループ」のペアを割り当てるには、以下の API を使用します。 PUT /api/v0002/authorization/users/{userUid}/roles { "roles": [ "PD_ADMIN_USER" ], "rolesToGroups": { "PD_ADMIN_USER": [ "groupUID" ] } } API キーに「役割とグループ」のペアを割り当てるには、以下の API を使用します。 PUT /api/v0002/authorization/apikeys/{apikeyUid}/role { "roles": [ "PD_OPERATOR_APP" ], "rolesToGroups": { "PD_OPERATOR_APP": [ "groupUID" ] } } ## リソース・グループのデバイスの追加と削除 {: #add_device} 「役割とグループ」のペアが設定されているユーザーや API キーがデバイスを管理するには、そのユーザーや API キーに割り当てられているリソース・グループのメンバーとして対象のデバイスを追加しなければなりません。 リソース・グループにデバイスを追加する時には、デバイスの追加先のグループを要求のパスで指定し、追加するデバイスを要求の本体で指定する必要があります。 リソース・グループに複数のデバイスを同時に追加するには、以下の API を使用します。 PUT /api/v0002/bulk/devices/{groupId}/add [ { "typeId":"{typeUid}", "deviceId":"{deviceUid}" } ] リソース・グループからデバイスを削除する時には、グループを要求のパスで指定し、デバイスを要求の本体で指定します。 リソース・グループから複数のデバイスを除去するには、以下の API を使用します。 PUT /api/v0002/bulk/devices/{groupUid}/remove [ { "typeId":"{typeUid}", "deviceId":"{deviceUid}" } ] 要求のスキーマと応答の詳細については、[{{site.data.keyword.iot_short_notm}}アクセス制御 API の資料 ![外部リンク・アイコン](../../../icons/launch-glyph.svg "外部リンク・アイコン")](https://docs.internetofthings.ibmcloud.com/apis/swagger/v0002-beta/security-subjects-beta.html){: new_window} を参照してください。 ## リソース・レベル・アクセス制御の有効化 {: #RLAC_enable} 組織でリソース・レベル・アクセス制御を有効にすると、ユーザーや API キーを割り当て先のリソース・グループだけに制限できます。 リソース・レベル・アクセス制御を使用するには、以下の API を使用して、組織レベルの構成フラグを有効にしなければなりません。 PUT /api/v0002/accesscontrol { "enable": true } ## リソース・グループの検索 {: #find_group} リソース・グループに検索タグを関連付けることができます。 以下の API によって、検索タグを使用してリソース・グループの詳細を取得できます。 GET /api/v0002/groups この API によって、使用した検索タグに関連するリソース・グループが返されます。 検索タグを指定しない場合は、すべてのリソース・グループが返されます。 ユーザーに割り当てられているリソース・グループの固有の ID を検索するには、以下の API を使用します。 GET /api/v0002/authorization/users/{userUid} API キーに割り当てられているリソース・グループの固有の ID を検索するには、以下の API を使用します。 GET /api/v0002/authorization/apikeys/{apikeyUid} ## リソース・グループの照会 {: #query_group} リソース・グループ内のすべてのデバイスの全プロパティー、グループ内のすべてのデバイスの固有 ID、リソース・グループのプロパティーを返すために、さまざまなパラメーターを使用してリソース・グループを照会できます。 指定のリソース・グループ内のすべてのデバイスの完全なプロパティーを返すには、以下の API を使用します。 GET /api/v0002/bulk/devices/{groupUid} リソース・グループのメンバーの固有 ID のみを返すには、以下の API を使用します。 GET /api/v0002/bulk/devices/{groupUid}/ids リソース・グループのプロパティー (パスで指定されている名前、説明、検索タグ、固有 ID など) を返すには、以下の API を使用します。 GET /api/v0002/groups/{groupUid} この API では、リソース・グループのメンバー・リストは返されません。 ## グループ・プロパティーの更新 {: #update_group} グループのプロパティーを更新するには、以下の API を使用します。 PUT /api/v0002/groups/{groupId}
Markdown
UTF-8
12,719
2.53125
3
[ "CC-BY-4.0", "MIT" ]
permissive
--- title: CThreadPool 类 ms.date: 11/04/2016 f1_keywords: - CThreadPool - ATLUTIL/ATL::CThreadPool - ATLUTIL/ATL::CThreadPool::CThreadPool - ATLUTIL/ATL::CThreadPool::AddRef - ATLUTIL/ATL::CThreadPool::GetNumThreads - ATLUTIL/ATL::CThreadPool::GetQueueHandle - ATLUTIL/ATL::CThreadPool::GetSize - ATLUTIL/ATL::CThreadPool::GetTimeout - ATLUTIL/ATL::CThreadPool::Initialize - ATLUTIL/ATL::CThreadPool::QueryInterface - ATLUTIL/ATL::CThreadPool::QueueRequest - ATLUTIL/ATL::CThreadPool::Release - ATLUTIL/ATL::CThreadPool::SetSize - ATLUTIL/ATL::CThreadPool::SetTimeout - ATLUTIL/ATL::CThreadPool::Shutdown helpviewer_keywords: - CThreadPool class ms.assetid: 06683718-01b9-413c-9481-2dc1734ec70f ms.openlocfilehash: 12b28cd4f54fa426bb6ad2b2710d62b426ada2b6 ms.sourcegitcommit: 1f009ab0f2cc4a177f2d1353d5a38f164612bdb1 ms.translationtype: MT ms.contentlocale: zh-CN ms.lasthandoff: 07/27/2020 ms.locfileid: "87226537" --- # <a name="cthreadpool-class"></a>CThreadPool 类 此类提供处理工作项队列的工作线程池。 ## <a name="syntax"></a>语法 ``` template <class Worker, class ThreadTraits = DefaultThreadTraits> class CThreadPool : public IThreadPoolConfig ``` #### <a name="parameters"></a>参数 *工人*<br/> 类符合[工作原型](../../atl/reference/worker-archetype.md),后者提供用于处理线程池上排队的工作项的代码。 *ThreadTraits*<br/> 提供用于在池中创建线程的函数的类。 ## <a name="members"></a>成员 ### <a name="public-constructors"></a>公共构造函数 |名称|说明| |----------|-----------------| |[CThreadPool::CThreadPool](#cthreadpool)|线程池的构造函数。| |[CThreadPool:: ~ CThreadPool](#dtor)|线程池的析构函数。| ### <a name="public-methods"></a>公共方法 |“属性”|说明| |----------|-----------------| |[CThreadPool:: AddRef](#addref)|`IUnknown::AddRef` 的实现。| |[CThreadPool::GetNumThreads](#getnumthreads)|调用此方法可获取池中的线程数。| |[CThreadPool::GetQueueHandle](#getqueuehandle)|调用此方法以获取用于对工作项进行排队的 IO 完成端口的句柄。| |[CThreadPool:: GetSize](#getsize)|调用此方法可获取池中的线程数。| |[CThreadPool::GetTimeout](#gettimeout)|调用此方法以获取线程池等待线程关闭的最长时间(以毫秒为单位)。| |[CThreadPool:: Initialize](#initialize)|调用此方法以初始化线程池。| |[CThreadPool:: QueryInterface](#queryinterface)|`IUnknown::QueryInterface` 的实现。| |[CThreadPool::QueueRequest](#queuerequest)|调用此方法可将工作项排队,以便由池中的线程进行处理。| |[CThreadPool:: Release](#release)|`IUnknown::Release` 的实现。| |[CThreadPool:: SetSize](#setsize)|调用此方法可设置池中的线程数。| |[CThreadPool:: SetTimeout](#settimeout)|调用此方法以设置线程池等待线程关闭的最长时间(以毫秒为单位)。| |[CThreadPool:: Shutdown](#shutdown)|调用此方法可关闭线程池。| ## <a name="remarks"></a>备注 缓冲池时,将创建并销毁池中的线程。 将在池中每个工作线程的堆栈上创建类*辅助角色*的实例。 每个实例将在线程的生存期内生存。 在创建线程后立即开始, *Worker* `Initialize` 将在与该线程关联的对象上调用 Worker::。 紧跟在析构之前,将调用*Worker*:: `Terminate` 。 两种方法都必须接受 **`void`** <strong>\*</strong> 参数。 通过[CThreadPool:: Initialize](#initialize)的*pvWorkerParam*参数将此参数的值传递给线程池。 当队列中有工作项并且辅助线程可用于工作时,工作线程将从队列中拉取一个项,并 `Execute` 为该线程调用*worker*对象的方法。 然后,将三个项传递给方法:队列中的项、 `pvWorkerParam` 传递给*worker*:: `Initialize` 和*worker*::的项, `Terminate` 以及指向用于 IO 完成端口队列的[重叠](/windows/win32/api/minwinbase/ns-minwinbase-overlapped)结构的指针。 *辅助*类通过提供 Typedef, *worker*::来声明将在线程池上排队的项的类型 `RequestType` 。 此类型必须能够与 ULONG_PTR 强制转换。 *辅助角色*类的一个示例是[CNonStatelessWorker 类](../../atl/reference/cnonstatelessworker-class.md)。 ## <a name="inheritance-hierarchy"></a>继承层次结构 `IUnknown` [IThreadPoolConfig](../../atl/reference/ithreadpoolconfig-interface.md) `CThreadPool` ## <a name="requirements"></a>要求 **标头:** atlutil ## <a name="cthreadpooladdref"></a><a name="addref"></a>CThreadPool:: AddRef `IUnknown::AddRef` 的实现。 ``` ULONG STDMETHODCALLTYPE AddRef() throw(); ``` ### <a name="return-value"></a>返回值 始终返回1。 ### <a name="remarks"></a>备注 此类不使用引用计数实现生存期控制。 ## <a name="cthreadpoolcthreadpool"></a><a name="cthreadpool"></a>CThreadPool::CThreadPool 线程池的构造函数。 ``` CThreadPool() throw(); ``` ### <a name="remarks"></a>备注 将超时值初始化为 ATLS_DEFAULT_THREADPOOLSHUTDOWNTIMEOUT。 默认时间为36秒。 如有必要,你可以在包括 atlutil 之前为此符号定义你自己的正整数值。 ## <a name="cthreadpoolcthreadpool"></a><a name="dtor"></a>CThreadPool:: ~ CThreadPool 线程池的析构函数。 ``` ~CThreadPool() throw(); ``` ### <a name="remarks"></a>备注 调用[CThreadPool:: Shutdown](#shutdown)。 ## <a name="cthreadpoolgetnumthreads"></a><a name="getnumthreads"></a>CThreadPool::GetNumThreads 调用此方法可获取池中的线程数。 ``` int GetNumThreads() throw(); ``` ### <a name="return-value"></a>返回值 返回池中的线程数。 ## <a name="cthreadpoolgetqueuehandle"></a><a name="getqueuehandle"></a>CThreadPool::GetQueueHandle 调用此方法以获取用于对工作项进行排队的 IO 完成端口的句柄。 ``` HANDLE GetQueueHandle() throw(); ``` ### <a name="return-value"></a>返回值 返回队列句柄,如果尚未初始化线程池,则返回 NULL。 ## <a name="cthreadpoolgetsize"></a><a name="getsize"></a>CThreadPool:: GetSize 调用此方法可获取池中的线程数。 ``` HRESULT STDMETHODCALLTYPE GetSize(int* pnNumThreads) throw(); ``` ### <a name="parameters"></a>参数 *pnNumThreads*<br/> 弄成功接收池中的线程数的变量的地址。 ### <a name="return-value"></a>返回值 如果成功,则返回 S_OK; 否则返回错误 HRESULT。 ## <a name="cthreadpoolgettimeout"></a><a name="gettimeout"></a>CThreadPool::GetTimeout 调用此方法以获取线程池等待线程关闭的最长时间(以毫秒为单位)。 ``` HRESULT STDMETHODCALLTYPE GetTimeout(DWORD* pdwMaxWait) throw(); ``` ### <a name="parameters"></a>参数 *pdwMaxWait*<br/> 弄成功时的变量地址,接收线程池等待线程关闭的最长时间(以毫秒为单位)。 ### <a name="return-value"></a>返回值 如果成功,则返回 S_OK; 否则返回错误 HRESULT。 ### <a name="remarks"></a>备注 如果没有为该方法提供其他值, [CThreadPool:: Shutdown](#shutdown)将使用此超时值。 ## <a name="cthreadpoolinitialize"></a><a name="initialize"></a>CThreadPool:: Initialize 调用此方法以初始化线程池。 ``` HRESULT Initialize( void* pvWorkerParam = NULL, int nNumThreads = 0, DWORD dwStackSize = 0, HANDLE hCompletion = INVALID_HANDLE_VALUE) throw(); ``` ### <a name="parameters"></a>参数 *pvWorkerParam*<br/> 要传递到工作线程对象的 `Initialize` 、和方法的辅助参数 `Execute` `Terminate` 。 *nNumThreads*<br/> 池中请求的线程数。 如果*nNumThreads*为负,则其绝对值将乘以计算机中的处理器数,以获取线程的总数。 如果*nNumThreads*为零,ATLS_DEFAULT_THREADSPERPROC 将乘以计算机中的处理器数,以获取线程的总数。 默认值为每个处理器2个线程。 如有必要,你可以在包括 atlutil 之前为此符号定义你自己的正整数值。 *dwStackSize*<br/> 池中每个线程的堆栈大小。 *hCompletion*<br/> 要与完成端口关联的对象的句柄。 ### <a name="return-value"></a>返回值 如果成功,则返回 S_OK; 否则返回错误 HRESULT。 ## <a name="cthreadpoolqueryinterface"></a><a name="queryinterface"></a>CThreadPool:: QueryInterface `IUnknown::QueryInterface` 的实现。 ``` HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void** ppv) throw(); ``` ### <a name="remarks"></a>备注 可以为 `IUnknown` 和[IThreadPoolConfig](../../atl/reference/ithreadpoolconfig-interface.md)接口成功查询此类的对象。 ## <a name="cthreadpoolqueuerequest"></a><a name="queuerequest"></a>CThreadPool::QueueRequest 调用此方法可将工作项排队,以便由池中的线程进行处理。 ``` BOOL QueueRequest(Worker::RequestType request) throw(); ``` ### <a name="parameters"></a>参数 *请求*<br/> 要排队的请求。 ### <a name="return-value"></a>返回值 如果成功,则返回 TRUE,否则返回 FALSE。 ### <a name="remarks"></a>备注 此方法将工作项添加到队列。 池中的线程将按接收项的顺序从队列中选取项。 ## <a name="cthreadpoolrelease"></a><a name="release"></a>CThreadPool:: Release `IUnknown::Release` 的实现。 ``` ULONG STDMETHODCALLTYPE Release() throw(); ``` ### <a name="return-value"></a>返回值 始终返回1。 ### <a name="remarks"></a>备注 此类不使用引用计数实现生存期控制。 ## <a name="cthreadpoolsetsize"></a><a name="setsize"></a>CThreadPool:: SetSize 调用此方法可设置池中的线程数。 ``` HRESULT STDMETHODCALLTYPE SetSizeint nNumThreads) throw(); ``` ### <a name="parameters"></a>参数 *nNumThreads*<br/> 池中请求的线程数。 如果*nNumThreads*为负,则其绝对值将乘以计算机中的处理器数,以获取线程的总数。 如果*nNumThreads*为零,ATLS_DEFAULT_THREADSPERPROC 将乘以计算机中的处理器数,以获取线程的总数。 默认值为每个处理器2个线程。 如有必要,你可以在包括 atlutil 之前为此符号定义你自己的正整数值。 ### <a name="return-value"></a>返回值 如果成功,则返回 S_OK; 否则返回错误 HRESULT。 ### <a name="remarks"></a>备注 如果指定的线程数小于池中当前的线程数,则该对象会将关闭消息放在队列上以由等待线程选取。 当正在等待的线程从队列中提取消息时,它会通知线程池并退出线程过程。 此过程将重复进行,直到池中的线程数达到指定的数字或在[GetTimeout](#gettimeout)SetTimeout 指定的时间段内未退出任何线程为止 / [SetTimeout](#settimeout)。 在这种情况下,该方法将返回一个与 WAIT_TIMEOUT 相对应的 HRESULT,并取消挂起的关闭消息。 ## <a name="cthreadpoolsettimeout"></a><a name="settimeout"></a>CThreadPool:: SetTimeout 调用此方法以设置线程池等待线程关闭的最长时间(以毫秒为单位)。 ``` HRESULT STDMETHODCALLTYPE SetTimeout(DWORD dwMaxWait) throw(); ``` ### <a name="parameters"></a>参数 *dwMaxWait*<br/> 线程池等待线程关闭所需的最长时间(以毫秒为单位)。 ### <a name="return-value"></a>返回值 如果成功,则返回 S_OK; 否则返回错误 HRESULT。 ### <a name="remarks"></a>备注 超时初始化为 ATLS_DEFAULT_THREADPOOLSHUTDOWNTIMEOUT。 默认时间为36秒。 如有必要,你可以在包括 atlutil 之前为此符号定义你自己的正整数值。 请注意, *dwMaxWait*是池等待单个线程关闭的时间。 从池中删除多个线程所需的最长时间可能略微小于*dwMaxWait*乘以线程数。 ## <a name="cthreadpoolshutdown"></a><a name="shutdown"></a>CThreadPool:: Shutdown 调用此方法可关闭线程池。 ```cpp void Shutdown(DWORD dwMaxWait = 0) throw(); ``` ### <a name="parameters"></a>参数 *dwMaxWait*<br/> 线程池等待线程关闭所需的最长时间(以毫秒为单位)。 如果未提供任何值,则此方法将使用[CThreadPool:: SetTimeout](#settimeout)设置的超时值。 ### <a name="remarks"></a>备注 此方法将关闭请求发送到池中的所有线程。 如果超时时间已到,则此方法将在任何未退出的线程上调用[TerminateThread](/windows/win32/api/processthreadsapi/nf-processthreadsapi-terminatethread) 。 此方法是从类的析构函数自动调用的。 ## <a name="see-also"></a>另请参阅 [IThreadPoolConfig 接口](../../atl/reference/ithreadpoolconfig-interface.md)<br/> [DefaultThreadTraits](atl-typedefs.md#defaultthreadtraits)<br/> [类](../../atl/reference/atl-classes.md)
TypeScript
UTF-8
3,560
2.53125
3
[]
no_license
import { Subject } from "rxjs"; import IPCMessageRequestModel from "../../../shared-network/ipc/models/IPCMessageRequestModel"; import IPCMessageResponseModel from "../../../shared-network/ipc/models/IPCMessageResponseModel"; import { fork } from "child_process"; import { forkOptions } from "../../../shared-network/ipc/config/IPCForkSettings"; import onProcessMessageReceivedHandler from "../../../shared-network/ipc/functions/onProcessMessageReceivedHandler"; import createIPCSender from "../../../shared-network/ipc/functions/createIPCSender"; import { childRouter } from "./childRouter"; import { processSenderType } from "../../../shared-network/ipc/models/types/processSenderType"; import bindIPCRequestReceiverHandler from "../../../shared-network/ipc/functions/bindIPCRequestReceiverHandler"; import wait from "../../../functions/wait"; const requestStream = new Subject<IPCMessageRequestModel>(); const responseStream = new Subject<IPCMessageResponseModel>(); const child = fork("child.ts"); child.on("exit", code => { console.log("Exited with a code", code); }); //setInterval(() => { console.log("child listener count", child.listeners.length) },50) //let processSenderCount = 0; const processSender: processSenderType = message => { return new Promise((resolve, reject) => { child.send && child.send(message, error => { //console.log("sent message count:", processSenderCount++); if (error) { console.log(error); reject(error); } else { resolve(); } }); }); }; // let processMessageCount = 0; // process.on("message", () => { // console.log("process message count", ++processMessageCount) // }) //initializeMessageDistributor.call(child,child,requestStream,responseStream) const ipcSender = createIPCSender(childRouter, responseStream, processSender); child.on("message", message => { onProcessMessageReceivedHandler(requestStream, responseStream, message); }); async function asyncScope() { for (let i = 1; i < 1000; i++) { const start = process.hrtime(); const result = await ipcSender.multiplyByTwo(i); const result2 = await ipcSender.someAsyncFunction(i).catch(console.log); const end = process.hrtime(start); console.log(`Seconds: ${end[0]}, Nanoseconds: ${end[1]}, iteration: ${i}, result: ${result} result2: ${result2}`); //console.log(i) //await wait(1) } console.log("end"); } const sentNumberObject = { 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 7: 0, 8: 0, 9: 0, 10: 0, 11: 0, 12: 0, 13: 0, 14: 0, 15:0 } const numberObject = { 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 7: 0, 8: 0, 9: 0, 10: 0, 11: 0, 12: 0, 13: 0, 14: 0, 15:0 } function loop2(number: number) { return setInterval(async () => { console.log("called from number", number) sentNumberObject[number]++ const start = process.hrtime(); const result = await ipcSender.multiplyByTwo(2); const end = process.hrtime(start); numberObject[number]++ console.log(`Seconds: ${end[0]}, Nanoseconds: ${end[1]} number: ${number}`); }, 0); } wait(5000).then(async () => { const intervals = [] for (let i = 1; i <= 15; i++){ intervals.push(loop2(i) as never) } await wait(60000) intervals.forEach(i => clearInterval(i)) await wait(100) console.log(Object.values(numberObject).reduce<number>((acc,number)=>acc+number,0)) console.log("Received", JSON.stringify(numberObject)) console.log("Sent ", JSON.stringify(sentNumberObject)) process.exit(0) });
C
UTF-8
2,121
2.59375
3
[]
no_license
/* ************************************************************************** */ /* */ /* ::: :::::::: */ /* ft_width_for_int.c :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: amace-ty <marvin@42.fr> +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2019/04/05 17:12:05 by amace-ty #+# #+# */ /* Updated: 2019/10/12 13:13:57 by amace-ty ### ########.fr */ /* */ /* ************************************************************************** */ #include "../inc/ft_printf.h" static char *ft_width_for_mz_2(char *str, char *new_str, t_print *new, char *hoba) { int i; free(hoba); i = -1; if (new->flag_accuracy && new->accuracy < (int)ft_strlen(str)) ft_memset(new_str, ' ', (size_t)new->width); else ft_memset(new_str, '0', (size_t)new->width); if (new->flag_otr_i || new->flag_plus || new->flag_space) { new_str[0] = str[0]; while (ft_strlen(str) - ++i > 0) new_str[new->width - i] = str[ft_strlen(str) - i]; } else { while ((int)ft_strlen(str) - ++i >= 0) new_str[new->width - i] = str[ft_strlen(str) - i]; } return (new_str); } char *ft_width_for_minus_and_zero(char *str, t_print *new) { char *new_str; int i; char *hoba; INT_I; new_str = ft_strnew((size_t)new->width); if (new->flag_minus) { ft_memset(new_str, ' ', (size_t)new->width); while (++i < (int)ft_strlen(str)) new_str[i] = str[i]; } else { if (new->flag_accuracy && new->accuracy > (int)ft_strlen(hoba)) { ft_memset(new_str, ' ', (size_t)new->width); while ((int)ft_strlen(str) - ++i >= 0) new_str[new->width - i] = str[ft_strlen(str) - i]; } else return (ft_width_for_mz_2(str, new_str, new, hoba)); } free(hoba); return (new_str); }
Python
UTF-8
185
3.265625
3
[]
no_license
#!/usr/bin/env python3 x, y = 1, 1 num = 0 for a in range(30): num += 1 if a != 29: print("%7d"%x, end = ' ') else: print("%7d"%x) x, y = y, x + y
Markdown
UTF-8
6,398
3.8125
4
[]
permissive
--- layout: post title: Variance in Scala categories: [Blogging, Scala] tags: [scala, functional programming, variance] seo: date_modified: 2020-03-25 06:51:27 -0300 --- There are different types of polymorphism in Scala. Inheritance, Parametric polymorphism (Generics in Java), etc. We are concerned about Parametric Polymorphism in this post and a classic example would include the containers, `List[+T]`. A List has a type parameter 'T' meaning you could create a list of Ints, Doubles, String, People, Universes. Wait, but what is this little '+' that gets prepended to the type parameter? Enter Variance. ### Variance Prepending with a '+' means it is covariant and with a '-' means contravariant. Let's discuss both them with examples. Imagine a Fruit Orchard. {% highlight scala %} object Orchard { trait Fruit { def name: String } case class Apple(name: String = "Apple") extends Fruit case class Orange(name: String = "Orange") extends Fruit case class Basket[+T](item: T) } {% endhighlight %} For simplicity, let us assume that a basket contains only one item. We will get one apple basket and an orange basket from the Orchard {% highlight scala %} object Community { import Orchard._ def main(args: Array[String]) = { val aBasket = Basket(Apple()) val oBasket = Basket(Orange()) } } {% endhighlight %} Our community have lots of fruit lovers who would love to eat the fruits from their orchard. {% highlight scala %} object Community { case class FruitLover(name: String) { def take(fruitBasket: Basket[Fruit]): Unit = println(name + " ate " + fruitBasket.item.name + " from " + fruitBasket) } def main(args: Array[String]) = { ... FruitLover("Sam").take(aBasket) FruitLover("Frodo").take(oBasket) ... } } {% endhighlight %} The FruitLover's "take" method expects a Basket of Fruit. Now since, we have marked Basket as covariant(`Basket[+T]`) it means, that `Basket[Apple]` or `Basket[Orange]` can be passed for a `Basket[Fruit]`. A extends B and a function asks for a Container[B]. If in the problem domain passing `Container[A]` for `Container[B]` makes sense, then the Container is covariant. ### Contravariance We noticed that the apple in the "aBasket" had gone bad and has to be replaced with another apple. Could occur with any Basket, so let's add a replace method into Basket. {% highlight scala %} case class Basket[+T](item: T) { def replace(another: T): Basket[T] = this.copy(item=another) //compile fails } {% endhighlight %} The compiler throws weird error saying that `covariant type T appears in a contravariant position`. *Why does this occur?* Scala being a pure object oriented language stores functions as object as well. Single argument functions are represented as `trait Function1[-A, +B]`. A refers to the argument type and B the return type. Why is A contravariant and B covariant? ### Function Subtyping Just like how we defined subtyping of Basket, we also need to define subtyping for functions. Functions being first-class in Scala, can be passed as arguments to functions. Thus a subtype of a function refers to those functions that could be substituted instead of this. From the definition, subtype of single argument functions include those functions, whose return value is a subtype however, "the argument is a supertype". Let us extend our example of Basket to have a method "makeJam" that takes a recipe and applies it to the Basket's contents. {% highlight scala %} case class Basket[+T](item: T) { def makeJam(recipe: T => Jam) = recipe(item) } {% endhighlight %} We have a RecipeStore that has a collection of recipes to make apple and orange jam. {% highlight scala %} object RecipeStore { val appleJamRecipe = (apple: Apple) => Jam(apple.name) //Just a dummy jam-making function. val orangeJamRecipe = (oranges: Orange) => Jam(oranges.name) } def main(args: Array[String]): Unit = { import RecipeStore._ aBasket.makeJam(appleJamRecipe) } {% endhighlight %} Now, our RecipeStore also has a magic recipe to make Jam out of any fruit. {% highlight scala %} val fruitJamRecipe = (fruit: Fruit) => Jam(fruit.name) {% endhighlight %} Meanwhile, we have added a Fuji apple variety into our Orchard and the RecipeStore has a specific recipe for preparing FujiApple Jam. {% highlight scala %} case class FujiApple(override val name: String = "FujiApple") extends Apple val fujiAppleJamRecipe = (fuji: FujiApple) => Jam(fuji.name) {% endhighlight %} Now, it should be possible to apply the generic "fruitJamRecipe" to our apple basket. However, our "fujiAppleJamRecipe" is specific to Fuji apples and cannot be used for preparing jam from any apple. {% highlight scala %} aBasket.makeJam(appleJamRecipe) //passes aBasket.makeJam(fruitJamRecipe) //passes aBasket.makeJam(fujiAppleJamRecipe) //compile fails {% endhighlight %} This is exactly why the argument in Function1 was contravariant[-A]. If someone expects, a function from `Apple => Jam`, it should be possible to pass `Fruit => Jam`, but not `FujiApple => Jam`. A and supertypes of A could be passed. In Function1, the return value type should be covariant `+R`, because we expect to access some members/features of the returned value, which means that it should be (or extend) R. Lets get back to the cryptic error of covariant in contravariant position. {% highlight scala %} case class Basket[+T](item: T) { def replace(another: T): Basket[T] = this.copy(item=another) //compile fails } {% endhighlight %} So, in order to achieve function subtyping, we saw that arg was contravariant `[A-]`. However, in the context of `Basket[+T]`, T is covariant. Thus, the error. There is another type of variance called Invariance which means the container is neither covariant nor contravariant. This is the default type of Variance and most of the classes that we create are generally Invariant. So, we saw about the three types of variance in Scala and how to identify them in your problem space. If `A extends B`, and 1. `Container[A]` could be substituted for `Container[B]`, then the Container is covariant. 2. `Container[B]` could be substituted for `Container[A]`, then the Container is contravariant. 3. Else, if neither relation is maintained, then it is invariant. Variance is an idea borrowed from the mathematics field of Category Theory. We would talk more about that in the subsequent post.
C++
UTF-8
1,130
3.765625
4
[ "MIT" ]
permissive
// // Brief: Same as chapter5_4, only with 3 years of sales data and a 2D array // Topics: loops, 2d arrays // #include <iostream> int main () { using namespace std; const string months[12] = { "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" }; int sales[3][12]; const int year = 2014; // three years of sales data for (int z = 0; z < 3; z++) { for (int i = 0; i < 12; i++) { cout << "Enter the sales for " << months[i] << " (" << (year+z) << "): "; cin >> sales[z][i]; } } // determine final sales by iterating through the sales array cout << "***************************************" << endl; int sumCombined = 0; for (int z = 0; z < 3; z++) { int yearlySum = 0; for (int i = 0; i < 12; i++) { sumCombined += sales[z][i]; yearlySum += sales[z][i]; } cout << "Total number of books sold for " << (year+z) << ": " << yearlySum << endl; } cout << "Total number of books sold for all years: " << sumCombined << endl; return 0; }
C
UTF-8
11,551
3.3125
3
[ "Apache-2.0" ]
permissive
/** Functions for starting a subprocess and communicating with it. */ #include <errno.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <sys/types.h> #include <sys/wait.h> #include <unistd.h> #include "capabilities.h" #include "subprocess.h" #define MAX_ERROR_OUTPUT_SIZE 1000 typedef struct { int parent_out, child_in; int child_out, parent_in; } io_pipes_t; /** Create pipes for communicating with an external subprocess. * * @param pipes An io_pipes_t object that will be initialised to contain two * pipes. Must be allocated, will be overwritten. * @return 0 on success, -1 on error. */ static int create_pipes(io_pipes_t * pipes) { int fds[2]; if (pipe(fds) != 0) { perror("Allocating first pipe"); goto exit_0; } pipes->parent_out = fds[1]; pipes->child_in = fds[0]; if (pipe(fds) != 0) { perror("Allocating second pipe"); goto exit_1; } pipes->child_out = fds[1]; pipes->parent_in = fds[0]; return 0; exit_1: close(pipes->parent_out); close(pipes->child_in); exit_0: return -1; } /** Set up pipes on the child process side. * * Reconfigures standard in, out and error to use the given pipes to * communicate, and closes the unused fds as appropriate. * * @param pipes Pipes to use to communicate with the parent. * @return 0 on success, -1 on error. */ static int setup_pipes_for_child(io_pipes_t const * pipes) { int ret = 0; if (dup2(pipes->child_in, STDIN_FILENO) < 0) { perror("Redirecting stdin"); goto exit_0; } if (dup2(pipes->child_out, STDOUT_FILENO) < 0) { perror("Redirecting stdout"); goto exit_0; } if (dup2(pipes->child_out, STDERR_FILENO) < 0) { perror("Redirecting stderr"); goto exit_0; } if (close(pipes->parent_out) != 0) { perror("Closing pipe in child"); ret = -1; } if (close(pipes->child_in) != 0) { perror("Closing pipe in child"); ret = -1; } if (close(pipes->child_out) != 0) { perror("Closing pipe in child"); ret = -1; } if (close(pipes->parent_in) != 0) { perror("Closing pipe in child"); ret = -1; } return ret; exit_0: return -1; } /** Set up pipes on the parent process side. * * This closes unused fds as appropriate. * * @param pipes Pipes to use to communicate with the child. * @return 0 on success, -1 on error. */ static int setup_pipes_for_parent(io_pipes_t const * pipes) { int ret = 0; if (close(pipes->child_in) != 0) { perror("Closing pipe in parent"); ret = -1; } if (close(pipes->child_out) != 0) { perror("Closing pipe in parent"); ret = -1; } return ret; } /** Write a data buffer to a file descriptor. * * @param fd The file descriptor to write to. * @param data The data to write. * @param len Length of the data to write. * @return 0 on success, -1 on error, in which case errno will be set. */ static int write_all(int fd, char const * data, ssize_t len) { ssize_t num_written; while (len > 0) { num_written = write(fd, data, len); if (num_written < 0) return -1; data += num_written; len -= num_written; } return 0; } /** Read data from a file descriptor until it closes. * * @param fd The file descriptor to read from * @param buffer (out) The buffer the data has been written to. This must be * freed by the caller using free(). Will be unchanged in case of * error. * @param size The number of chars of data in the buffer. Will be unchanged in * case of error * @return 0 on success, -1 on failure. */ static int read_all(int fd, const char ** buffer, ssize_t * size) { // Note: chunk_size must be larger than the size of any sensitive output // the called program may print on standard out/error, to avoid realloc() // spraying partial copies of it all over RAM. Currently, we only have // Wireguard private keys, which are 44 bytes, so 1k is fine. const ssize_t chunk_size = 1024; ssize_t num_read, total_read = 0; char *buf = NULL; ssize_t buf_size = 0; do { if (buf_size == total_read) { buf = (char*)realloc(buf, buf_size + chunk_size); if (buf == NULL) { perror("When reading external program stdout/err"); goto exit_0; } buf_size += chunk_size; } num_read = read(fd, buf + total_read, buf_size - total_read); if (num_read < 0) { perror("Reading from external program stdout/err"); goto exit_0; } total_read += num_read; } while (num_read > 0); *buffer = buf; *size = total_read; return 0; exit_0: free(buf); return -1; } /** Run a command and optionally communicate with it. * * Warning: if you may get more than 1kB of sensitive data on standard out * and/or standard error, see the comment at read_all(). * * @param filename The file to execute. * @param argv Arguments to pass (may be NULL). * @param env Environment variables to set (may be NULL). * @param in_buf Data to send to stdin (may be NULL). * @param in_size Length of input data. * @param exit_code (out) The exit code of the process run, 255 if there was an * error starting it, or -signal if it was terminated with a * signal. * @param out_buf (out) Pointer to a buffer with output received from the * command. This buffer will be allocated by this function, and * must be freed using free() by the caller after use. * @param out_size (out) Pointer to a variable to store the number of chars in * the output buffer in. * @return 0 on success, -1 on error. */ int run( const char * filename, const char * const argv[], const char * const env[], const char * in_buf, ssize_t in_size, int * exit_code, const char ** out_buf, ssize_t * out_size) { io_pipes_t pipes; int child_pid; const char *none = NULL; const char * out_buf_ = NULL; ssize_t out_size_ = 0; int status; int ret = 0; if (create_pipes(&pipes) != 0) return -1; child_pid = fork(); if (child_pid == 0) { // child if (setup_pipes_for_child(&pipes) != 0) exit(255); if (argv == NULL) argv = &none; if (env == NULL) env = &none; if (set_ambient_capabilities() != 0) { exit(255); } // execve() is mistyped for backward compatibility, // so need to const-cast here. execve(filename, (char * const *)argv, (char * const *)env); perror("Executing command"); exit(255); } else if (child_pid > 0) { // parent if (setup_pipes_for_parent(&pipes) != 0) { goto exit_0; } if (in_buf != NULL) { if (write_all(pipes.parent_out, in_buf, in_size) != 0) { perror("Writing to stdin in parent"); ret = -1; } } if (close(pipes.parent_out) != 0) { perror("Closing stdin pipe in parent"); ret = -1; }; if (out_buf != NULL) { if (read_all(pipes.parent_in, &out_buf_, &out_size_) != 0) { fprintf(stderr, "Error reading from stdout/stderr"); ret = -1; } } if (close(pipes.parent_in) != 0) { perror("Closing stdout/err pipe in parent"); ret = -1; } waitpid(child_pid, &status, 0); if (WIFEXITED(status)) *exit_code = WEXITSTATUS(status); else if (WIFSIGNALED(status)) *exit_code = -WTERMSIG(status); *out_buf = out_buf_; *out_size = out_size_; return ret; } // fork error if we get here, try to clean up exit_0: close(pipes.parent_out); close(pipes.child_in); close(pipes.child_out); close(pipes.parent_in); return -1; } /** Print the output from a called program. * * @param out_buf The output buffer. * @param out_size Size of the output. * */ void print_error_output(const char * out_buf, ssize_t out_size) { char * err_buf; if (out_size > MAX_ERROR_OUTPUT_SIZE) out_size = MAX_ERROR_OUTPUT_SIZE; err_buf = (char*)malloc(out_size + 1); if (err_buf) { strncpy(err_buf, out_buf, out_size); err_buf[out_size] = '\0'; fprintf(stderr, "%s", err_buf); free(err_buf); } } /** Run and check that it was successful. * * If either an error occurs when calling the process, or the process itself * exits with a non-zero exit code, an error message and the initial output will * be printed on stderr, and the output variables will not be assigned to. * Otherwise, the outputs are set and 0 is returned. * * Warning: if you may get more than 1kB of sensitive data on standard out * and/or standard error, see the comment at read_all(). * * @param filename The file to execute. * @param argv Arguments to pass (may be NULL). * @param env Environment variables to set (may be NULL). * @param in_buf Data to send to stdin (may be NULL). * @param in_size Length of input data. * @param exit_code (out) The exit code of the process run, 255 if there was an * error starting it, or -signal if it was terminated with a * signal. * @param out_buf (out) Pointer to a buffer with output received from the * command. This buffer will be allocated by this function, and * must be freed using free() by the caller after use. If NULL is * passed, any output will be discarded. * @param out_size (out) Pointer to a variable to store the number of chars in * the output buffer in. May bu NULL if out_buf is NULL. * @return 0 on success, 1 on error. */ int run_check( const char * filename, const char * const argv[], const char * const env[], const char * in_buf, ssize_t in_size, const char ** out_buf, ssize_t * out_size) { int exit_code = 0; const char * out_buf_ = NULL; ssize_t out_size_ = 0l; if ( run( filename, argv, env, in_buf, in_size, &exit_code, &out_buf_, &out_size_) != 0) { fprintf(stderr, "Error running %s:\n", filename); goto exit_0; } if (exit_code != 0) { fprintf(stderr, "%s returned an error:\n", filename); goto exit_0; } if (out_buf) { *out_buf = out_buf_; *out_size = out_size_; } else free((void*)out_buf_); return 0; exit_0: print_error_output(out_buf_, out_size_); free((void*)out_buf_); return 1; } /** Run and check that it was successful. * * If either an error occurs when calling the process, or the process itself * exits with a non-zero exit code, an error message and the initial output will * be printed on stderr. * * This is a simplified version, it doesn't do input or output and always uses * an empty environment. * * @param filename The file to execute. * @param argv Arguments to pass (may be NULL). * @return 0 on success, 1 on error. */ int run_check2(const char * filename, const char * const argv[]) { return run_check(filename, argv, NULL, NULL, 0l, NULL, NULL); }
C
UTF-8
2,410
2.546875
3
[]
no_license
#include <regx51.h> //we use keil c51 #include "gpio.h" #include "rtc1.h" //we use tmr1 for rtc //use black roman's zero cumulative error approach //this approach will generate an output pulse whose long-term accuracy depends only on the crystal/clock source //however, it does generate jitter between clocks: the longer the periods, the bigger the jitter //as such, it is desirable in those applications that require long-term accuracy void (*_rtc1_isr_ptr)(void); //function ptr unsigned long _rtc1_trigger=RTC_1000ms; //rtc trigger unsigned long _rtc1_count=0; //rtc count: count up to rtc1_trigger volatile RTC_TIME _rtc1 ={ //global time keeper for tc1 0, 58, 59, 23, 0}; //rtc1 error term //use a positive number if rtc runs too slow; //use a negative number if rtc runs too fast void _rtc1_isr(void) interrupt TF1_VECTOR { //clear the flag //automatically done by hardware _rtc1_count+= 0x10000ul+RTC1_ERROR; //tmr0 in 16 bit mode if (_rtc1_count < _rtc1_trigger) return; //life goes on else { _rtc1_count-=_rtc1_trigger; //reset _rtc1_count; rtc1_update(); //update rtc1 _rtc1_isr_ptr(); //call the handler } } void rtc1_empty_ptr(void) { //empty ptr } //initialize the timer //prescaler not used - for compatability reasons only //8-bit period void rtc1_init(unsigned long trigger) { TR1=0; //turn off the timer _rtc1_isr_ptr=rtc1_empty_ptr; //point isr to the empty handler _rtc1_count=0; //reset rtc_count _rtc1_trigger = trigger; //assign the trigger TMOD = (TMOD & 0x0f) | 0x10; //rtc1 in mode 1: 16 bit tmr TH1=0; //set the autoreload period TL1=0; //reset the timer / counter ET1=1; //enable rtc1 interrupt TR1=1; //turn on the timer } //set up the isr handler void rtc1_act(void (*isr_ptr)(void)) { //set up the isr pointer _rtc1_isr_ptr=isr_ptr; } void rtc1_update(void) { rtc1.half_sec+=1; //increment half_sec indicator if (rtc1.half_sec==2) { //overflown? rtc1.half_sec=0; //reset rtc1.half_sec rtc1.sec+=1; //increment sec if (rtc1.sec==60) { //overflown? rtc1.sec=0; //reset rtc1.sec rtc1.min+=1; //increment min if (rtc1.min==60) { //overflown? rtc1.min=0; //reset min rtc1.hour+=1; //increment hour if (rtc1.hour==24) { //overlown? rtc1.hour=0; //reset hour rtc1.day+=1; //increment day } } } } }
Python
UTF-8
10,158
2.90625
3
[]
no_license
import requests import datetime import json import time import boto3 import uuid import io import pandas as pd def createBucketName(bucketPrefix): """ Creates and returns a globally unique bucket name from the prefix provided using a uuid4. The generated bucket name must be between 3 and 63 chars long. A uuid4’s string representation is 36 characters long so the prefix provided must be less than 27 chars long. """ return ''.join([bucketPrefix, str(uuid.uuid4())]) def createBucket(bucketPrefix, s3Connection): """ Creates an S3 bucket with a globally unique name made from the prefix provided. The prefix provided must be less than 27 chars long. Returns the boto3 response and the bucket name. """ session = boto3.session.Session() currentRegion = session.region_name bucketName = createBucketName(bucketPrefix) s3Connection.create_bucket( Bucket=bucketName, CreateBucketConfiguration={ 'LocationConstraint': currentRegion}) print("S3 bucket created:", bucketName, currentRegion) return bucketName def fixturesRequest(idLeague): """ Sends a request to API Football to get fixtures for a league using its id. Returns the response as a json object. """ url = "https://api-football-v1.p.rapidapi.com/v2/fixtures/league/" + idLeague querystring = {"timezone": "Europe/London"} headers = { 'x-rapidapi-key': "XXX", # Replace XXX by your API key 'x-rapidapi-host': "api-football-v1.p.rapidapi.com" } response = requests.request("GET", url, headers=headers, params=querystring) return response.json() def statisticsRequest(idFixture): """ Sends a request to API Football to get the statistics for a specified fixture. Returns the response as a json object. """ url = "https://api-football-v1.p.rapidapi.com/v2/statistics/fixture/" + str(idFixture) headers = { 'x-rapidapi-key': "XXX", # Replace XXX by your API key 'x-rapidapi-host': "api-football-v1.p.rapidapi.com" } response = requests.request("GET", url, headers=headers) return response.json() def uploadJsonToS3(jsonObject, bucket, s3Connection, prefix, name): """ Uploads json object to S3 by encoding it in utf-8. """ data = json.dumps(jsonObject).encode('UTF-8') # The key has a uuid prefix to avoid partition issue key = ''.join([prefix, str(uuid.uuid4().hex[:6]), '-', name]) s3Connection.put_object(Body=data, Bucket=bucket, Key=key) print(key + ' uploaded into ' + bucket) def uploadCsvToS3(df, bucket, s3Connection, prefix, name): """ Converts a dataframe to a csv, then uploads the csv file directly to S3 without storing it locally. """ csvBuffer = io.StringIO() df.to_csv(csvBuffer, index=False) # The key has a uuid prefix to avoid partition issue key = ''.join([prefix, str(uuid.uuid4().hex[:6]), '-', name]) s3Connection.put_object(Body=csvBuffer.getvalue(), Bucket=bucket, Key=key) print(key + ' uploaded into ' + bucket) def uploadFixturesCsvToS3(data, bucket, s3Connection, prefix, name): """ Converts a fixture json file to a dataframe containing the relevant data. Converts the dataframe to a csv. Uploads the csv file directly to S3 without storing it locally. """ # Process the json object's data to a dataframe df = pd.DataFrame(columns=['idFixture', 'status', 'date', 'time', 'idHomeTeam', 'idAwayTeam', 'goalsHomeTeam', 'goalsAwayTeam']) for fixture in data['api']['fixtures']: idFixture = fixture['fixture_id'] status = fixture['status'] date = fixture['event_date'][:10] time = fixture['event_date'][11:16] idHomeTeam = fixture['homeTeam']['team_id'] idAwayTeam = fixture['awayTeam']['team_id'] goalsHomeTeam = fixture['goalsHomeTeam'] goalsAwayTeam = fixture['goalsAwayTeam'] row = {'idFixture': idFixture, 'status': status, 'date': date, 'time': time, 'idHomeTeam': idHomeTeam, 'idAwayTeam': idAwayTeam, 'goalsHomeTeam': goalsHomeTeam, 'goalsAwayTeam': goalsAwayTeam} df = df.append(row, ignore_index=True) # Convert df to csv and upload it to S3 into the 'processed-data' folder uploadCsvToS3(df, bucket, s3Connection, prefix, name) def statisticsGetProcessUpload(data, bucket, s3Connection): # Create a dataframe to prepare statistics data ingestion in csv df = pd.DataFrame(columns=['idFixture', 'idHomeTeam', 'idAwayTeam', 'shotsOnGoalHomeTeam', 'shotsOnGoalAwayTeam', 'shotsInsideBoxHomeTeam', 'shotsInsideBoxAwayTeam', 'totalShotsHomeTeam', 'totalShotsAwayTeam', 'ballPossessionHomeTeam', 'ballPossessionAwayTeam']) # Get statistics for each finished fixtures from API Football # WARNING!!! API Football is free until 100 requests / day, and costs around €0.00450 / request beyond # As there are 10 fixtures / round, running this script after the 9th round will incur some costs for fixture in data['api']['fixtures']: status = fixture['status'] if status != 'Match Finished': continue idFixture = fixture['fixture_id'] statisticsJson = statisticsRequest(idFixture) # Upload statistics json object into 'raw-data' folder prefix = 'raw-data/api-football/statistics/' name = ''.join(['statistics-', str(idFixture), '.json']) uploadJsonToS3(statisticsJson, bucket, s3Connection, prefix, name) # Process each statistics json data to a new dataframe row idHomeTeam = fixture['homeTeam']['team_id'] idAwayTeam = fixture['awayTeam']['team_id'] shotsOnGoalHomeTeam = statisticsJson['api']['statistics']['Shots on Goal']['home'] shotsOnGoalAwayTeam = statisticsJson['api']['statistics']['Shots on Goal']['away'] shotsInsideBoxHomeTeam = statisticsJson['api']['statistics']['Shots insidebox']['home'] shotsInsideBoxAwayTeam = statisticsJson['api']['statistics']['Shots insidebox']['away'] totalShotsHomeTeam = statisticsJson['api']['statistics']['Total Shots']['home'] totalShotsAwayTeam = statisticsJson['api']['statistics']['Total Shots']['away'] ballPossessionHomeTeam = statisticsJson['api']['statistics']['Ball Possession']['home'] ballPossessionAwayTeam = statisticsJson['api']['statistics']['Ball Possession']['away'] row = {'idFixture': idFixture, 'idHomeTeam': idHomeTeam, 'idAwayTeam': idAwayTeam, 'shotsOnGoalHomeTeam': shotsOnGoalHomeTeam, 'shotsOnGoalAwayTeam': shotsOnGoalAwayTeam, 'shotsInsideBoxHomeTeam': shotsInsideBoxHomeTeam, 'shotsInsideBoxAwayTeam': shotsInsideBoxAwayTeam, 'totalShotsHomeTeam': totalShotsHomeTeam, 'totalShotsAwayTeam': totalShotsAwayTeam, 'ballPossessionHomeTeam': ballPossessionHomeTeam, 'ballPossessionAwayTeam': ballPossessionAwayTeam} df = df.append(row, ignore_index=True) # Sleep 2,1 seconds between each statistics request to avoid the 30 requests / minute API Football limitation time.sleep(2.1) # Upload statistics as a csv file into 'processed-data' folder prefix = 'processed-data/api-football/statistics/' todayDate = datetime.datetime.today().strftime('%Y-%m-%d') name = ''.join(['statistics-', todayDate, '.csv']) uploadCsvToS3(df, bucket, s3Connection, prefix, name) def main(): s3_client = boto3.client('s3') # 1/ Creation of the "data lake" bucket dataLakeBucketName = createBucket('datalake-', s3_client) # 2/ Upload teamcodes.csv to the data lake into a folder named 'processed-data' s3_client.upload_file('teamcodes.csv', dataLakeBucketName, 'processed-data/teamcodes.csv') print('processed-data/teamcodes.csv uploaded into ' + dataLakeBucketName) # 3/ Get every previous fixtures and their statistics from API Football # Upload the json objects to the datalake into the 'raw-data' folder # Convert the json objects to csv # Upload csv directly to the datalake into the 'processed-data' folder todayDate = datetime.datetime.today().strftime('%Y-%m-%d') idPremierLeaguePreviousSeason = '524' idPremierLeagueCurrentSeason = '2790' # PREVIOUS SEASON FIXTURES # Get fixtures from API Football previousSeasonFixturesJson = fixturesRequest(idPremierLeaguePreviousSeason) # Upload fixtures json object into 'raw-data' folder prefix = 'raw-data/api-football/fixtures/' name = ''.join(['fixtures-', todayDate, '.json']) uploadJsonToS3(previousSeasonFixturesJson, dataLakeBucketName, s3_client, prefix, name) # Process and upload fixtures as a csv file into 'processed-data' folder prefix = 'processed-data/api-football/fixtures/' name = ''.join(['fixtures-', todayDate, '.csv']) uploadFixturesCsvToS3(previousSeasonFixturesJson, dataLakeBucketName, s3_client, prefix, name) # CURRENT SEASON FIXTURES # Get fixtures from API Football currentSeasonFixturesJson = fixturesRequest(idPremierLeagueCurrentSeason) # Upload fixtures json object into 'raw-data' folder prefix = 'raw-data/api-football/fixtures/' name = ''.join(['fixtures-', todayDate, '.json']) uploadJsonToS3(currentSeasonFixturesJson, dataLakeBucketName, s3_client, prefix, name) # Process and upload fixtures as a csv file into 'processed-data' folder prefix = 'processed-data/api-football/fixtures/' name = ''.join(['fixtures-', todayDate, '.csv']) uploadFixturesCsvToS3(currentSeasonFixturesJson, dataLakeBucketName, s3_client, prefix, name) # PREVIOUS SEASON STATISTICS statisticsGetProcessUpload(previousSeasonFixturesJson, dataLakeBucketName, s3_client) # CURRENT SEASON STATISTICS statisticsGetProcessUpload(currentSeasonFixturesJson, dataLakeBucketName, s3_client) print('Data lake deployed successfully!') if __name__ == '__main__': main()
C++
GB18030
606
2.59375
3
[]
no_license
#ifndef MYDATABASE_H #define MYDATABASE_H #include"connectionpool.h" /* * ݿ * ʵݿ * :addData(QString const& strSql) * ɾ:deleteData(QString const& strSql) * :changeData(QString const& strSql) * :selectData(QString const& strSql) * */ class MyDatabase { private: //QString m_strPath; //ļ· public: MyDatabase(); bool addData(QString const& strSql) ; bool deleteData(QString const& strSql); bool changeData(QString const& strSql); bool selectData(QString const& strSql); }; #endif // MYDATABASE_H
Java
UTF-8
487
1.9375
2
[]
no_license
package com.alphacat.vo; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import java.util.Map; /** * 方框标注:map中存放标签-结果键值对 * @author 161250102 */ @Data @AllArgsConstructor @NoArgsConstructor public class SquareVO { private int squareIndex; private int x; private int y; private int w; private int h; private Map<String, String> labelData; private String description; //整体描述 }
Java
UTF-8
20,858
1.710938
2
[]
no_license
package com.unisoft.algotrader.provider.ib.api.event; import com.unisoft.algotrader.model.event.data.Bar; import com.unisoft.algotrader.model.event.execution.ExecutionReport; import com.unisoft.algotrader.model.event.execution.Order; import com.unisoft.algotrader.model.refdata.Instrument; import com.unisoft.algotrader.provider.ib.api.model.bulletin.NewsBulletinType; import com.unisoft.algotrader.provider.ib.api.model.contract.ContractSpecification; import com.unisoft.algotrader.provider.ib.api.model.contract.UnderlyingCombo; import com.unisoft.algotrader.provider.ib.api.model.data.*; import com.unisoft.algotrader.provider.ib.api.model.execution.IBCommissionReport; import com.unisoft.algotrader.provider.ib.api.model.fa.FinancialAdvisorDataType; import com.unisoft.algotrader.provider.ib.api.model.order.OrderExecution; import com.unisoft.algotrader.provider.ib.api.model.order.OrderStatus; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.util.List; /** * Created by alex on 9/3/15. */ public class DefaultIBEventHandler implements IBEventHandler{ private static final Logger LOG = LogManager.getLogger(DefaultIBEventHandler.class); @Override public void onAccountSummaryEvent(AccountSummaryEvent accountSummaryEvent) { LOG.debug(accountSummaryEvent); } @Override public void onAccountSummaryEvent(long requestId, String account, String tag, String value, String currency) { LOG.debug("onAccountSummaryEvent reqId {}, account {}, tag {}, value {}, currency {}", requestId, account, tag, value, currency); } @Override public void onAccountSummaryEndEvent(AccountSummaryEndEvent accountSummaryEndEvent) { LOG.debug(accountSummaryEndEvent); } @Override public void onAccountSummaryEndEvent(long requestId) { LOG.debug("onAccountSummaryEndEvent reqId {}", requestId); } @Override public void onAccountUpdateTimeEvent(AccountUpdateTimeEvent accountUpdateTimeEvent) { LOG.debug(accountUpdateTimeEvent); } @Override public void onAccountUpdateTimeEvent(String time) { LOG.debug("onAccountUpdateTimeEvent time {}", time); } @Override public void onAccountUpdateValueEndEvent(AccountUpdateValueEndEvent accountUpdateValueEndEvent) { LOG.debug(accountUpdateValueEndEvent); } @Override public void onAccountUpdateValueEndEvent(String accountName) { LOG.debug("onAccountUpdateValueEndEvent accountName {}", accountName); } @Override public void onAccountUpdateValueEvent(AccountUpdateValueEvent accountUpdateValueEvent) { LOG.debug(accountUpdateValueEvent); } @Override public void onAccountUpdateValueEvent(String key, String value, String currency, String accountName) { LOG.debug("onAccountUpdateValueEvent key {}, value {}, currency {}, accountName {}", key, value, currency, accountName); } @Override public void onBondInstrumentSpecificationEvent(BondInstrumentSpecificationEvent bondInstrumentSpecificationEvent) { LOG.debug(bondInstrumentSpecificationEvent); } @Override public void onBondInstrumentSpecificationEvent(long requestId, ContractSpecification contractSpecification) { LOG.debug("onBondInstrumentSpecificationEvent requestId {}, contractSpecification {}", requestId, contractSpecification); } @Override public void onCommissionReportEvent(CommissionReportEvent commissionReportEvent) { LOG.debug(commissionReportEvent); } @Override public void onCommissionReportEvent(IBCommissionReport IBCommissionReport) { LOG.debug("onCommissionReportEvent commissionReport {}", IBCommissionReport); } @Override public void onCompositeTickEvent(CompositeTickEvent compositeTickEvent) { LOG.debug(compositeTickEvent); } @Override public void onDeltaNeutralValidationEvent(DeltaNeutralValidationEvent deltaNeutralValidationEvent) { LOG.debug(deltaNeutralValidationEvent); } @Override public void onDeltaNeutralValidationEvent(long requestId, UnderlyingCombo underlyingCombo) { LOG.debug("onDeltaNeutralValidationEvent requestId {}, underlyingCombo {}", requestId, underlyingCombo); } @Override public void onDeltaNeutralValidationEvent(long requestId, int instId, double delta, double price) { LOG.debug("onDeltaNeutralValidationEvent requestId {}, instId {}, delta {}, price {}", requestId, instId, delta, price); } @Override public void onDisplayGroupListEvent(DisplayGroupListEvent displayGroupListEvent) { LOG.debug(displayGroupListEvent); } @Override public void onDisplayGroupListEvent(long requestId, String groups) { LOG.debug("onDisplayGroupListEvent requestId {}, groups {}", requestId, groups); } @Override public void onDisplayGroupUpdatedEvent(DisplayGroupUpdatedEvent displayGroupUpdatedEvent) { LOG.debug(displayGroupUpdatedEvent); } @Override public void onDisplayGroupUpdatedEvent(long requestId, String contractInfo) { LOG.debug("onDisplayGroupUpdatedEvent requestId {}, contractInfo {}", requestId, contractInfo); } @Override public void onExecutionReportEvent(ExecutionReportEvent executionReportEvent) { LOG.debug(executionReportEvent); } @Override public void onExecutionReportEvent(long requestId, Instrument instrument, ExecutionReport executionReport) { LOG.debug("onExecutionReportEvent requestId {}, instrument {}, executionReport {}", requestId, instrument, executionReport); } @Override public void onExecutionReportEndEvent(ExecutionReportEndEvent executionReportEndEvent) { LOG.debug(executionReportEndEvent); } @Override public void onExecutionReportEndEvent(long requestId) { LOG.debug("onExecutionReportEndEvent requestId {}", requestId); } @Override public void onFinancialAdvisorConfigurationEvent(FinancialAdvisorConfigurationEvent financialAdvisorConfigurationEvent) { LOG.debug(financialAdvisorConfigurationEvent); } @Override public void onFinancialAdvisorConfigurationEvent(FinancialAdvisorDataType dataTypeValue, String xml) { LOG.debug("onFinancialAdvisorConfigurationEvent dataTypeValue {}, xml {}", dataTypeValue, xml); } @Override public void onFundamentalDataEvent(FundamentalDataEvent fundamentalDataEvent) { LOG.debug(fundamentalDataEvent); } @Override public void onFundamentalDataEvent(long requestId, String xml) { LOG.debug("onFundamentalDataEvent requestId {}, xml {}", requestId, xml); } @Override public void onHistoricalDataEvent(HistoricalDataEvent historicalDataEvent) { LOG.debug(historicalDataEvent); } @Override public void onHistoricalDataEvent(long requestId, Bar bar) { LOG.debug("onHistoricalDataEvent requestId {}, bar {}", requestId, bar); } @Override public void onHistoricalDataEvent(long requestId, String dateTime, double open, double high, double low, double close, int volume, int tradeNumber, double weightedAveragePrice, boolean hasGap) { LOG.debug("onHistoricalDataEvent requestId {}, dateTime {}, open {}, high {}, low {}, close {}, volume {}, tradeNumber {}, weightedAveragePrice {}, hasGap {}", requestId, dateTime, open, high, low, close, volume, tradeNumber, weightedAveragePrice, hasGap); } @Override public void onHistoricalDataListEvent(HistoricalDataListEvent historicalDataListEvent) { LOG.debug(historicalDataListEvent); } @Override public void onHistoricalDataListEvent(long requestId, List<Bar> bars) { LOG.debug("onHistoricalDataListEvent requestId {}, bars {}", requestId, bars); } @Override public void onInstrumentSpecificationEndEvent(ContractSpecificationEndEvent contractSpecificationEndEvent) { LOG.debug(contractSpecificationEndEvent); } @Override public void onInstrumentSpecificationEndEvent(long requestId) { LOG.debug("onInstrumentSpecificationEndEvent requestId {}", requestId); } @Override public void onInstrumentSpecificationEvent(ContractSpecificationEvent contractSpecificationEvent) { LOG.debug(contractSpecificationEvent); } @Override public void onInstrumentSpecificationEvent(long requestId, ContractSpecification contractSpecification) { LOG.debug("onInstrumentSpecificationEvent requestId {}, instrumentSpecification {}", requestId, contractSpecification); } @Override public void onManagedAccountListEvent(ManagedAccountListEvent managedAccountListEvent) { LOG.debug(managedAccountListEvent); } @Override public void onManagedAccountListEvent(String commaSeparatedAccountList) { LOG.debug("onManagedAccountListEvent commaSeparatedAccountList {}", commaSeparatedAccountList); } @Override public void onMarketDataTypeEvent(MarketDataTypeEvent marketDataTypeEvent) { LOG.debug(marketDataTypeEvent); } @Override public void onMarketDataTypeEvent(long requestId, MarketDataType marketDataType) { LOG.debug("onMarketDataTypeEvent requestId {}, marketDataType {}", requestId, marketDataType); } @Override public void onMarketDepthLevelTwoUpdateEvent(MarketDepthLevelTwoUpdateEvent marketDepthLevelTwoUpdateEvent) { LOG.debug(marketDepthLevelTwoUpdateEvent); } @Override public void onMarketDepthLevelTwoUpdateEvent(long requestId, int rowId, String marketMakerName, Operation operation, BookSide bookSide, double price, int size) { LOG.debug("onMarketDepthLevelTwoUpdateEvent requestId {}, rowId {}, marketMakerName {}, operation {}, bookSide {}, price {}, size {}", requestId, rowId, marketMakerName, operation, bookSide, price, size); } @Override public void onMarketDepthUpdateEvent(MarketDepthUpdateEvent marketDepthUpdateEvent) { LOG.debug(marketDepthUpdateEvent); } @Override public void onMarketDepthUpdateEvent(long requestId, int rowId, Operation operation, BookSide bookSide, double price, int size) { LOG.debug("onMarketDepthUpdateEvent requestId {}, rowId {}, operation {}, bookSide {}, price {}, size {}", requestId, rowId, operation, bookSide, price, size); } @Override public void onMarketScannerDataEvent(MarketScannerDataEvent marketScannerDataEvent) { LOG.debug(marketScannerDataEvent); } @Override public void onMarketScannerDataEvent(long requestId, MarketScannerData marketScannerData) { LOG.debug("onMarketScannerDataEvent requestId {}, marketScannerData {}", requestId, marketScannerData); } @Override public void onMarketScannerDataEvent(long requestId, int ranking, ContractSpecification contractSpecification, String distance, String benchmark, String projection, String comboLegDescription) { LOG.debug("onMarketScannerDataEvent requestId {}, ranking {}, instrumentSpecification {}, distance {}, benchmark {}, projection {}, comboLegDescription {}", requestId, ranking, contractSpecification, distance, benchmark, projection, comboLegDescription); } @Override public void onMarketScannerDataListEvent(MarketScannerDataListEvent marketScannerDataListEvent) { LOG.debug(marketScannerDataListEvent); } @Override public void onMarketScannerDataListEvent(long requestId, List<MarketScannerData> marketScannerData) { LOG.debug("onMarketScannerDataListEvent requestId {}, marketScannerData {}", requestId, marketScannerData); } @Override public void onMarketScannerValidParametersEvent(MarketScannerValidParametersEvent marketScannerValidParametersEvent) { LOG.debug(marketScannerValidParametersEvent); } @Override public void onMarketScannerValidParametersEvent(String xml) { LOG.debug("onMarketScannerValidParametersEvent xml {}", xml); } @Override public void onNewsBulletinUpdateEvent(NewsBulletinUpdateEvent newsBulletinUpdateEvent) { LOG.debug(newsBulletinUpdateEvent); } @Override public void onNewsBulletinUpdateEvent(int newsBulletinId, NewsBulletinType newBulletinTypeValue, String message, String exchange) { LOG.debug("onNewsBulletinUpdateEvent newsBulletinId {}, newBulletinTypeValue {}, message {}, exchange {}", newsBulletinId, newBulletinTypeValue, message, exchange); } @Override public void onNextValidOrderIdEvent(NextValidOrderIdEvent nextValidOrderIdEvent) { LOG.debug(nextValidOrderIdEvent); } @Override public void onNextValidOrderIdEvent(int nextValidOrderId) { LOG.debug("onNextValidOrderIdEvent nextValidOrderId {}", nextValidOrderId); } @Override public void onOrderStatusUpdateEvent(OrderStatusUpdateEvent orderStatusUpdateEvent) { LOG.debug(orderStatusUpdateEvent); } @Override public void onOrderStatusUpdateEvent(long orderId, OrderStatus orderStatus, int filledQuantity, int remainingQuantity, double averageFilledPrice, int permanentId, int parentOrderId, double lastFilledPrice, int clientId, String heldCause) { LOG.debug("onOrderStatusUpdateEvent orderId {}, orderStatus {}, filledQuantity {}, remainingQuantity {}, averageFilledPrice {}, permanentId {}, parentOrderId {}, lastFilledPrice {}, clientId {}, heldCause {}", orderId, orderStatus, filledQuantity, remainingQuantity, averageFilledPrice, permanentId, parentOrderId, lastFilledPrice, clientId, heldCause); } @Override public void onPositionEvent(PositionEvent positionEvent) { LOG.debug(positionEvent); } @Override public void onPositionEvent(String account, Instrument instrument, int pos, double avgCost) { LOG.debug("onPositionEvent account {}, instrument {}, pos {}, avgCost {}", account, instrument, pos, avgCost); } @Override public void onPositionEndEvent(PositionEndEvent positionEndEvent) { LOG.debug(positionEndEvent); } @Override public void onPositionEndEvent() { LOG.debug("onPositionEndEvent"); } @Override public void onPortfolioUpdateEvent(PortfolioUpdateEvent portfolioUpdateEvent) { LOG.debug(portfolioUpdateEvent); } @Override public void onPortfolioUpdateEvent(Instrument instrument, int marketPosition, double marketPrice, double marketValue, double averageCost, double unrealizedProfitAndLoss, double realizedProfitAndLoss, String accountName) { LOG.debug("onPortfolioUpdateEvent instrument {}, marketPosition {}, marketPrice {}, marketValue {}, averageCost {}, unrealizedProfitAndLoss {}, realizedProfitAndLoss {}, accountName {}", instrument, marketPosition, marketPrice, marketValue, averageCost, unrealizedProfitAndLoss, realizedProfitAndLoss, accountName); } @Override public void onRealTimeBarEvent(RealTimeBarEvent realTimeBarEvent) { LOG.debug(realTimeBarEvent); } @Override public void onRealTimeBarEvent(long requestId, long timestamp, double open, double high, double low, double close, long volume, double weightedAveragePrice, int tradeNumber) { LOG.debug("onRealTimeBarEvent requestId {}, timestamp {}, open {}, high {}, low {}, close {}, volume {}, weightedAveragePrice {}, tradeNumber {}", requestId, timestamp, open, high, low, close, volume, weightedAveragePrice, tradeNumber); } @Override public void onRetrieveOpenOrderEndEvent(RetrieveOpenOrderEndEvent retrieveOpenOrderEndEvent) { LOG.debug(retrieveOpenOrderEndEvent); } @Override public void onRetrieveOpenOrderEndEvent() { LOG.debug("onRetrieveOpenOrderEndEvent"); } @Override public void onRetrieveOpenOrderEvent(RetrieveOpenOrderEvent retrieveOpenOrderEvent) { LOG.debug("onRetrieveOpenOrderEvent retrieveOpenOrderEvent {}", retrieveOpenOrderEvent); } @Override public void onRetrieveOpenOrderEvent(long orderId, Instrument instrument, Order order, OrderExecution orderExecution) { LOG.debug("onRetrieveOpenOrderEvent orderId {}, instrument {}, order {}, orderExecution {}", orderId, instrument, order, orderExecution); } @Override public void onServerCurrentTimeEvent(ServerCurrentTimeEvent serverCurrentTimeEvent) { LOG.debug(serverCurrentTimeEvent); } @Override public void onServerCurrentTimeEvent(long timestamp) { LOG.debug("onServerCurrentTimeEvent timestamp {}", timestamp); } @Override public void onServerMessageEvent(ServerMessageEvent serverMessageEvent) { LOG.debug(serverMessageEvent); } @Override public void onServerMessageEvent(long requestId, int code, String message) { LOG.debug("onServerMessageEvent requestId {}, code {}, message {}", requestId, code, message); } @Override public void onTickEfpEvent(TickEfpEvent tickEfpEvent) { LOG.debug(tickEfpEvent); } @Override public void onTickEfpEvent(long requestId, TickType tickType, double basisPoints, String formattedBasisPoints, double impliedFuturePrice, int holdDays, String futureExpiry, double dividendImpact, double dividendToExpiry) { LOG.debug("onTickEfpEvent requestId {}, tickType {}, basisPoints {}, formattedBasisPoints {}, impliedFuturePrice {}, holdDays {}, futureExpiry {}, dividendImpact {}, dividendToExpiry {}", requestId, tickType, basisPoints, formattedBasisPoints, impliedFuturePrice, holdDays, futureExpiry, dividendImpact, dividendToExpiry); } @Override public void onTickGenericEvent(TickGenericEvent tickGenericEvent) { LOG.debug(tickGenericEvent); } @Override public void onTickGenericEvent(long requestId, TickType tickType, double value) { LOG.debug("onTickGenericEvent requestId {}, tickType {}, value {}", requestId, tickType, value); } @Override public void onTickOptionComputationEvent(TickOptionComputationEvent tickOptionComputationEvent) { LOG.debug(tickOptionComputationEvent); } @Override public void onTickOptionComputationEvent(long requestId, TickType tickType, double impliedVolatility, double delta, double price, double presentValueDividend, double gamma, double vega, double theta, double underlyingPrice) { LOG.debug("onTickOptionComputationEvent requestId {}, tickType {}, impliedVolatility {}, delta {}, price {}, presentValueDividend {}, gamma {}, vega {}, theta {}, underlyingPrice {}", requestId, tickType, impliedVolatility, delta, price, presentValueDividend, gamma, vega, theta, underlyingPrice); } @Override public void onTickPriceEvent(TickPriceEvent tickPriceEvent) { LOG.debug(tickPriceEvent); } @Override public void onTickPriceEvent(long requestId, TickType tickType, double price, boolean autoExecute) { LOG.debug("onTickPriceEvent requestId {}, tickType {}, price {}, autoExecute {}", requestId, tickType, price, autoExecute); } @Override public void onTickSizeEvent(TickSizeEvent tickSizeEvent) { LOG.debug(tickSizeEvent); } @Override public void onTickSizeEvent(long requestId, TickType tickType, int size) { LOG.debug("onTickSizeEvent requestId {}, tickType {}, size {}", requestId, tickType, size); } @Override public void onTickSnapshotEndEvent(TickSnapshotEndEvent tickSnapshotEndEvent) { LOG.debug(tickSnapshotEndEvent); } @Override public void onTickSnapshotEndEvent(long requestId) { LOG.debug("onTickSnapshotEndEvent requestId {}", requestId); } @Override public void onTickStringEvent(TickStringEvent tickStringEvent) { LOG.debug(tickStringEvent); } @Override public void onTickStringEvent(long requestId, TickType tickType, String value) { LOG.debug("onTickStringEvent requestId {}, tickType {}, value {}", requestId, tickType, value); } @Override public void onVerifyMessageAPIEvent(VerifyMessageAPIEvent verifyMessageAPIEvent) { LOG.debug(verifyMessageAPIEvent); } @Override public void onVerifyMessageAPIEvent(String apiData) { LOG.debug("onVerifyMessageAPIEvent apiData {}", apiData); } @Override public void onVerifyCompletedEvent(VerifyCompletedEvent verifyCompletedEvent) { LOG.debug(verifyCompletedEvent); } @Override public void onVerifyCompletedEvent(boolean isSuccessful, String errorText) { LOG.debug("onVerifyCompletedEvent isSuccessful {}, errorText {}", isSuccessful, errorText); } }
C#
UTF-8
3,637
2.5625
3
[]
no_license
using UnityEngine; namespace ImGui { /** * (0, 0) -> (w, 0) * | | * v v * (0, h) -> (w, h) * * root -> max * | | * v v * min --> span */ public class DrawContext { public readonly Vector2 root; public readonly float width, height; public float left { get { return root.x; } } public float right { get { return root.x + width; } } public float top { get { return root.y; } } public float bottom { get { return root.y + height; } } public Vector2 size { get { return new Vector2(width, height); } } public Vector2 span { get { return new Vector2(root.x + width, root.y + height); } } public Vector2 min { get { return new Vector2(root.x, root.y + height); } } public Vector2 max { get { return new Vector2(root.x + width, root.y); } } // TODO: Finish this... Vector2 cursorPrev_; float lineHeightPrev_; public Vector2 contentSize { get; private set; } public Vector2 cursor { get; private set; } public float padLeft { get; private set; } public float padTop { get; private set; } public float padRight { get; private set; } public float padBottom { get; private set; } public DrawContext(Vector2 root, Vector2 extent) { this.root = root; cursor = root; width = Mathf.Abs(extent.x); height = -Mathf.Abs(extent.y); } public Vector2 FromRoot(Vector2 extent) { return new Vector2(root.x + Mathf.Abs(extent.x), root.y - Mathf.Abs(extent.y)); } public Vector2 FromMax(Vector2 extent) { return new Vector2(max.x - Mathf.Abs(extent.x), max.y - Mathf.Abs(extent.y)); } public Vector2 FromCursor(Vector2 extent) { return new Vector2(cursor.x + Mathf.Abs(extent.x), cursor.y - Mathf.Abs(extent.y)); } /** * root * I * I cursor --- * I | | * -------- pos * <ItemSpacing.y> * <-- * * root * I * I cursor --- * I | | * I -------- pos * I * <ItemSpacing.y> * <-- */ public void NextItem(Vector2 pos, Vector2 spacing, bool newLine = true) { contentSize = new Vector2(Mathf.Max(contentSize.x, pos.x - root.x), Mathf.Max(contentSize.y, root.y - pos.y)); lineHeightPrev_ = Mathf.Min(pos.y, lineHeightPrev_); cursorPrev_ = new Vector2(pos.x, cursor.y); cursor = newLine ? new Vector2(root.x, lineHeightPrev_ - spacing.y) : new Vector2(cursorPrev_.x + spacing.x, cursorPrev_.y); } /** * root * ---------cursorPre_ <-- * | | <ItemSpacing.x> | * ---------- * <ItemSpacing.y> * cursor */ public void BackCursor(Vector2 spacing) { cursor = new Vector2(cursorPrev_.x + spacing.x, cursorPrev_.y); } public void ShiftCursor(Vector2 offset) { cursor += offset; } public bool Contains(Vector2 point) { const float e = 0.01f; return left - e < point.x && point.x < right + e && bottom - e < point.y && point.y < top + e; } public DrawContext Pad(float x, float y) { return Pad(x, y, x, y); } public DrawContext Pad(float left, float top, float right, float bottom) { padLeft = left; padTop = top; padRight = right; padBottom = bottom; var r = new Vector2(root.x + left, root.y - top); var s = new Vector2(width - left - right, height + top + bottom); if (s.x <= 0 || s.y >= 0) return new DrawContext(r, Vector2.zero); return new DrawContext(r, s); } public Vector2 CalcSize(Vector2 contentSize, bool fitWidth = true, bool fitHeight = true) { return new Vector2( fitWidth ? contentSize.x + padLeft + padRight : width, fitHeight ? contentSize.y + padTop + padBottom : height); } } }
Python
UTF-8
350
3.765625
4
[ "MIT" ]
permissive
# program to Display the pattern # * # * * # * * * # n inputs the number of lines n=input("Enter the limit: ") # num is set as * num="*" # nested for to printing each line for i in range(1,n+1): for j in range(1,i+1): # print num in each line and "," is important to print next output in saame line print num, # "/n" is next line print "\n"
SQL
WINDOWS-1252
221
3.03125
3
[]
no_license
--up_sal.sql create or replace procedure up_sal (vempno in emp.empno%type) --⿡ 7900 is begin update emp set sal = sal * 1.1 where empno = vempno end select * from emp where empno in(7900,7902)
Markdown
UTF-8
1,297
2.78125
3
[]
no_license
# Protofuf case Study ## Description You are tasked with building 2 applications: one that receives events and one that stores the events. These 2 applications must communicate with each other via Google Protocol Buffers. The first application receives logs of events via HTTP and must pass them to the second application for storing them in the database (for the case study we will not use a database, logging them to a file on the disk will suffice). ## Solution - The log receiving application is created in Java and connects via sockets to the saving application created in python. - The receiving app will automatically run the receiving app. ## Set up Open a terminal on the project folder. Use maven to run the service: ``` $ mvn clean install ``` Go to target dir and run the .jar file: ``` $ java -jar log-receiver-0.0.1-SNAPSHOT.jar ``` The service will run on the port 8090. ## How to use the service? POST request example: ``` { "timestamp" : 1518609008, "userId" : 1123, "event" : "2 hours of downtime occurred due to the release of version 1.0.5 of the system" } ``` To test an example in the app you can go to http://localhost:8090/swagger-ui.html log-controller -> POST ### Note: The data will be saved in the file ./src/main/resources/logReport.txt
Java
UTF-8
737
3.125
3
[]
no_license
package LoginBank; import java.util.Scanner; /** * * @author dell */ public class DataInput { public int getIntInput(String msg, int min, int max) { Scanner sc = new Scanner(System.in); boolean check = true; int result = 0; while (check) { System.out.print(msg); try { result = Integer.parseInt(sc.nextLine()); if (result < min || result > max) { System.out.println("Out of range"); continue; } check = false; } catch (NumberFormatException e) { System.out.println("Number Invalid"); } } return result; } }
Rust
UTF-8
4,194
3.15625
3
[ "MIT" ]
permissive
use super::*; use std::fs::metadata; use std::path::Path; type Subject = Disk; const PATH: &'static str = "/tmp/superpermutation-test"; fn subject(test_id: &'static str, gzip: bool) -> Subject { let path = format!("{}/{}", PATH, test_id); Subject::new(path, gzip) } fn bucket() -> VecDeque<Candidate> { (0..1000).map(|_| Candidate::seed(5)).collect() } mod new { use super::*; #[test] fn it_builds_the_struct_with_the_path() { let subject = subject("test-1", false); assert_eq!(subject.path, "/tmp/superpermutation-test/test-1"); } #[test] fn it_creates_a_directory_at_the_path() { subject("test-2", false); assert_eq!(Path::new(PATH).exists(), true); } } mod basename { use super::*; #[test] fn it_returns_a_name_based_on_the_number_of_wasted_symbols_and_permutations() { let subject = subject("test-3", false); let actual = subject.basename(3, 4); let name = "test-3/candidates-with-3-wasted-symbols-and-4-permutations.dat"; let expected = format!("{}/{}", PATH, name); assert_eq!(actual, expected); } } mod filename_for_reading { use super::*; #[test] fn it_returns_none_if_no_file_exists() { let subject = subject("test-4", false); let filename = subject.filename_for_reading(3, 4); assert_eq!(filename, None); } #[test] fn it_returns_the_name_of_the_first_available_file() { let subject = subject("test-5", false); subject.write(bucket(), 3, 4); // 0 subject.write(bucket(), 3, 4); // 1 subject.write(bucket(), 3, 4); // 2 let filename = subject.filename_for_reading(3, 4).unwrap(); assert_eq!(&filename[70..], "-4-permutations.dat.0"); let filename = subject.filename_for_reading(3, 4).unwrap(); assert_eq!(&filename[70..], "-4-permutations.dat.1"); let filename = subject.filename_for_reading(3, 4).unwrap(); assert_eq!(&filename[70..], "-4-permutations.dat.2"); } } mod filename_for_writing { use super::*; #[test] fn it_adds_a_suffix_to_the_basename() { let subject = subject("test-6", false); let filename = subject.filename_for_writing(3, 4); assert_eq!(&filename[70..], "-4-permutations.dat.0"); } #[test] fn it_increments_the_index_each_time() { let subject = subject("test-7", false); let filename = subject.filename_for_writing(3, 4); assert_eq!(&filename[70..], "-4-permutations.dat.0"); let filename = subject.filename_for_writing(3, 4); assert_eq!(&filename[70..], "-4-permutations.dat.1"); let filename = subject.filename_for_writing(3, 4); assert_eq!(&filename[70..], "-4-permutations.dat.2"); } } mod write { use super::*; #[test] fn it_writes_the_bucket_to_a_file() { let subject = subject("test-8", false); subject.write(bucket(), 3, 4); let filename = subject.filename_for_reading(3, 4).unwrap(); assert_eq!(Path::new(&filename).exists(), true); } } mod read { use super::*; #[test] fn it_reads_the_bucket_from_a_file() { let subject = subject("test-9", false); subject.write(bucket(), 3, 4); let bucket_from_file = subject.read(3, 4); assert_eq!(bucket_from_file, Some(bucket())); } } mod gzip_compression { use super::*; #[test] fn it_writes_a_smaller_file_to_disk() { let with_gzip = subject("test-11", true); let without_gzip = subject("test-12", false); with_gzip.write(bucket(), 3, 4); without_gzip.write(bucket(), 5, 6); let file1 = with_gzip.filename_for_reading(3, 4).unwrap(); let file2 = without_gzip.filename_for_reading(5, 6).unwrap(); let with_gzip_size = metadata(file1).unwrap().len(); let without_gzip_size = metadata(file2).unwrap().len(); assert_eq!(with_gzip_size > 0, true); assert_eq!(with_gzip_size < without_gzip_size, true); let compression_rate = without_gzip_size / with_gzip_size; assert_eq!(compression_rate > 200, true); } }
PHP
UTF-8
429
2.71875
3
[]
no_license
<?php declare(strict_types=1); namespace Knp\Rad\User\Salt\Generator; use Knp\Rad\User\Salt\Generator; class PseudoRandomBytesGenerator implements Generator { private int $length; public function __construct(int $length) { $this->length = $length; } /** * {@inheritdoc} */ public function generate(): string { return openssl_random_pseudo_bytes($this->length); } }
Python
UTF-8
2,899
3.421875
3
[]
no_license
import scipy.stats import numpy as np import point_process.poisson import matplotlib.pyplot as plt def uniform_disk(x, y, r): """ Равномерно распределённая в круге точка :param x: координата X центра круга :param y: координата Y центра круга :param r: радиус круга :return: """ r = scipy.stats.uniform(0, r ** 2.0).rvs() theta = scipy.stats.uniform(0, 2 * np.pi).rvs() xt = np.sqrt(r) * np.cos(theta) yt = np.sqrt(r) * np.sin(theta) return x + xt, y + yt def matern_point_process(kappa, r, mu, dx, include_parents=False, logging_on=True): """ A Poisson( kappa ) number of parents are created, each forming a Poisson( mu ) numbered cluster of points, distributed uniformly in a circle of radius `r` :param kappa: параметр Пуассоновского распределения, определяющего количество точек в единичном квадрате :param r: радиус круга, в котором для каждого родителя создаются потомки :param mu: параметр Пуассоновского распределения, определяющего количество потомков :param dx: длина стороны квадрата :param include_parents: вывести родительские события :param logging_on: текстовый вывод :return: """ # create a set of parent points from a Poisson( kappa ) # distribution on the square region [0,Dx] X [0,Dx] parents = point_process.poisson.poisson_homogeneous_point_process(kappa, dx) # M is the number of parents m = parents.shape[0] # an empty list for the Matern process points matern_points = list() # for each parent point.. for i in range(m): # determine a number of children according # to a Poisson( mu ) distribution n = scipy.stats.poisson(mu).rvs() # for each child point.. for j in range(n): # produce a uniformly distributed child point from a # circle of radius `r`, centered about a parent point x, y = uniform_disk(parents[i, 0], parents[i, 1], r) # add the child point to the list MP matern_points.append([x, y]) # return a numpy array matern_points = np.array(matern_points) if include_parents: if logging_on: print("{0} Matern distributed points generated.".format(len(matern_points) + len(parents))) return matern_points, parents else: if logging_on: print("{0} Matern distributed points generated.".format(len(matern_points))) return matern_points
Java
UTF-8
3,909
2.109375
2
[ "Apache-2.0" ]
permissive
/** * Copyright (C) 2014 Roman Ripp * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rripp.android.glass.speedometer; import rripp.android.glass.speedometer.R; import rripp.android.glass.speedometer.ui.SpeedometerView; import android.app.Activity; import android.content.ComponentName; import android.content.Intent; import android.content.ServiceConnection; import android.os.Bundle; import android.os.IBinder; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; public class MenuActivity extends Activity { private SpeedometerView mView; private SpeedometerService mService; private boolean mAttachedToWindow; private boolean mOptionsMenuOpen; private ServiceConnection mConnection = new ServiceConnection() { @Override public void onServiceConnected(ComponentName name, IBinder service) { if (service instanceof SpeedometerService.SpeedometerBinder) { mView = ((SpeedometerService.SpeedometerBinder) service).getView(); mService = ((SpeedometerService.SpeedometerBinder) service).getService(); openOptionsMenu(); } // No need to keep the service bound. unbindService(this); } @Override public void onServiceDisconnected(ComponentName name) { // Nothing to do here. } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); bindService(new Intent(this, SpeedometerService.class), mConnection, 0); } @Override public void onAttachedToWindow() { super.onAttachedToWindow(); mAttachedToWindow = true; openOptionsMenu(); } @Override public void onDetachedFromWindow() { super.onDetachedFromWindow(); mAttachedToWindow = false; } @Override public void openOptionsMenu() { if (!mOptionsMenuOpen && mAttachedToWindow){ mOptionsMenuOpen = true; super.openOptionsMenu(); } } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.spedo, menu); return true; } @Override public boolean onPrepareOptionsMenu(Menu menu) { boolean visible = true; menu.findItem(R.id.units).setVisible(visible); menu.findItem(R.id.kmh).setVisible(visible); menu.findItem(R.id.mph).setVisible(visible); menu.findItem(R.id.exit).setVisible(visible); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle item selection. //TODO reset max speed than changing units, implement reset option switch (item.getItemId()) { case R.id.kmh: mService.resetSpeed(); mView.useSiMetrics(true, this); return true; case R.id.mph: mService.resetSpeed(); mView.useSiMetrics(false, this); return true; case R.id.exit: stopService(new Intent(this, SpeedometerService.class)); return true; default: return super.onOptionsItemSelected(item); } } @Override public void onOptionsMenuClosed(Menu menu) { mOptionsMenuOpen = false; finish(); } }
Java
UTF-8
544
2.484375
2
[]
no_license
package com.fivehl.tp2.Factory; import com.fivehl.tp2.Model.CreditCard; import com.fivehl.tp2.util.GenericHelper; public class CreditCardFactory { public static CreditCard createCreditCard(String nameOnCard, String expiredDate ) { String cardNumber = GenericHelper.generateId(); CreditCard creditCard = new CreditCard.Builder() .setCardNumber(cardNumber) .setNameOnCard(nameOnCard) .setExpiredDate(expiredDate) .build(); return creditCard; } }
Python
UTF-8
839
3.3125
3
[ "BSD-3-Clause" ]
permissive
#!/usr/bin/env python3 # silly exercise that does nothing useful # this is a learning exercise to build on equally useless things. import os import random import subprocess banner = "====================" print(banner) print("Welcome to File Picker 3000\n\n") fileloc = str(input("Type location to pick random files\n\n")) filecount = int(input("How many random picks do you want?\n\n")) print(banner) if filecount >= 1000: print("Come on dude chill, pick less than 1000 results") exit(1) if fileloc == "~": myuser = subprocess.check_output("whoami", shell=True, stderr=subprocess.STDOUT) myuser = myuser.decode('ascii') myuser = myuser.strip('\n') fileloc = "/home/" + myuser def picker(n): wordguy = os.listdir(fileloc) for i in range(0,n): print(i,random.choice(wordguy)) picker(filecount)
Markdown
UTF-8
4,599
2.71875
3
[]
no_license
# StoredProcedures Plik opisujący załączone procedury. W bazie danych każda z tych procedur jest utworzona w odrębnym schemacie, agregującym encje z wybranego modułu aplikacji w celu łatwiejszego nadawania uprawnień oraz przejrzystości bazy. --------------------------------------------------------------------------------------------------------------------------------- --------------------------------------------------------------------------------------------------------------------------------- #createClient 1. Procedura tworząca nowego klienta w bazie. 2. Przyjmuje argument XML 3. Zawira blok try oraz catch w razie niepowodzenia operacji wszystko zostaje cofnięte do stanu sprzed wywołania procedury 4. Za pomocą jednej procedury robię INSERT do czterech tabel Zarys działania W bazie została utowrzona tabela GeneralDetails w celu przechowywania wspólnych danych dla wielu Encji oraz w celu możliwości przechowywania, np. wielu adresów oraz kontaktów danego klienta. Dzięki takiemu rozwiązaniu nie musiałam duplikować tabel oraz podczas pisania zapytań nie miałam problemu z wyciągnięciem potrzebnych danych. Kolumna SHA256 - Jest generowana automatycznie na poziomie API przed wysłaniem do bazy, cel tej kolumny jest taki, że podczas pracy z plikiem XML oraz insertem do wielu tabel potrzebna jest kolumna unikalna predefiniowana, nie może to być GUID wiersza, ponieważ jest on nadawany podczas insertu, a ja potrzebowałam unikalnej wartości przed operacją INSERT w celu relacyjnego powiązania wszystkich tabel w trakcie jednego wywołania procedury. Przykład wykorzystania właściwości tej tabeli jest w linii 37. (Select Id from person.GeneralDetails where Sha256 = m.value('../GeneralDetails[1]/Sha256[1]', 'nvarchar(300)')) as hashsha256, m.value('UserName[1]', 'nvarchar(50)') as username Wiążemy tabele na podstawie relacji za pomocą kodu SHA256 i wybieramy ID potrzebnego wiersza. Dzięki temu mamy poprawnie powiązane tabele w bazie oraz mamy możliwośc INSERTU do wielu tabel bez względu na złożonośc dokumentu XML oraz jego zagnieżdżenia. Nastepuje COMMIT TRAN, czyli zatwierdzenie transakcji i wszystkie dane zostają umieszczone w bazie. W bloku CATCH sprawdzamy wartość @@TRANCOUNT, cofamy wszystkie zmiany wywołane przez procedurę oraz wyrzucamy błąd do API ---------------------------------------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------------------------------------- #getClassification 1. Procedura pobierająca wszystkie faktury danego klienta 2. Przyjmuje jeden argument NVARCHAR 3. Po wykonaniu procedury otrzymujemy zagnieżdżony plik JSON Zarys działania Procedura przyjmuje jako argument Id klienta, dla którego ma być zwrócona kolekcja utworzonych przez niego klasyfikacji, które z kolei mają zagnieżdżone w sobie reguły, a reguły z kolei mają zagnieżdżone tagi. Przykładowy plik JSON wygenerowany dzięki procedurze można zobaczyć jako plik ExampleClassification.json, niektóre klasyfikacje mogą nie mieć przypisanych reguł. W procedurze można dowolnie modyfikować czyli dodawać bądź usuwać zestaw zwracanych danych. ----------------------------------------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------------------------------------- -- updateInvoice 1. Procedura przyjmuje argument XML 2. Za pomocą procedury jesteśmy w stanie przeprowadzić aktualizację łącznie na siedmiu tabelach, wszystkich tych. Zarys działania Procedura ma na celu zaktualizowanie danych, które zostały zeskanowane przez system OCR, przykładowy plik XML wysyłany za pomocą API można zobaczyć w repozytorium jako exampleXMLforUpdateInvoice. Zakładając, że 5 lat temu kontraktor A miał adres X. Dowiadujemy się, że od dnia jutrzejszego wszystkie faktury jakie nasza firma będzie dostawac, będą z nowym adresem B. Taka sytuacja, wymysza zachowywanie danych historycznych na starszych fakturach oraz dodawanie nowych faktur z nowym adresem, w ty celu została stworzona tabela funkcyjna Invoices_Addresses przechowująca klucze zatwierdzonych faktur oraz adresów. Dzięki temu faktury sprzed 4 lat nie zmienią nagle starego adresu kontraktora na nowy za sprawą operacji UPDATE, ponieważ jesteśmy w stanie wskazać adres który powinien być zaktualizowany.
Shell
UTF-8
349
3.625
4
[ "MIT" ]
permissive
#!/bin/sh # Module Name : user.sh # Description : Module for simple user-related functionalities # Naming Convention : uppercase & leading __ for global variables, lowercase for local variables # function user_is_root : check if current user is root user_is_root () { if [ $EUID == 0 ]; then return 1 else return 0 fi }
Java
UTF-8
4,726
2.015625
2
[ "Apache-2.0" ]
permissive
package com.hutchgrant.contacts; import java.util.Collections; import java.util.LinkedList; import java.util.List; import com.hutchgrant.Elements.Invite; import com.hutchgrant.Elements.LifeInvite; import com.hutchgrant.app.TunaApp; import com.hutchgrant.coconut.R; import com.nostra13.universalimageloader.core.DisplayImageOptions; import com.nostra13.universalimageloader.core.ImageLoader; import com.nostra13.universalimageloader.core.assist.ImageLoadingListener; import com.nostra13.universalimageloader.core.assist.SimpleImageLoadingListener; import com.nostra13.universalimageloader.core.display.FadeInBitmapDisplayer; import com.nostra13.universalimageloader.core.display.RoundedBitmapDisplayer; import android.app.Activity; import android.content.Context; import android.graphics.Bitmap; import android.os.Bundle; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.CheckBox; import android.widget.ImageView; import android.widget.ListAdapter; import android.widget.ListView; import android.widget.TextView; import android.widget.AdapterView.OnItemClickListener; public class LifeInviteList extends Activity implements OnItemClickListener{ protected ImageLoader imageLoader = ImageLoader.getInstance(); LifeInvite life; ListView listView; ItemAdapter dataAdapter; DisplayImageOptions options; @Override public void onCreate(Bundle savedInstanceState){ super.onCreate(savedInstanceState); setContentView(R.layout.phone_contact_list); listView = (ListView) this.findViewById(R.id.list_contacts); options = new DisplayImageOptions.Builder() .showStubImage(R.drawable.ic_launcher) .showImageForEmptyUri(R.drawable.ic_launcher) .showImageOnFail(R.drawable.ic_launcher) .cacheInMemory(true) .cacheOnDisc(true) .displayer(new RoundedBitmapDisplayer(20)) .build(); fillList(); } public void getInvites(){ TunaApp app = (TunaApp) ((Activity) this).getApplication(); life = new LifeInvite(); life = app.getInvites(true, "sent"); } public void fillList(){ getInvites(); dataAdapter = new ItemAdapter(this, life); listView.setAdapter(dataAdapter); listView.setOnItemClickListener(this); } @Override public void onItemClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) { } class ItemAdapter extends BaseAdapter { private ImageLoadingListener animateFirstListener = new AnimateFirstDisplayListener(); LifeInvite life; String imageUrls[]; Context ctx; private class ViewHolder { public TextView text; public ImageView image; public CheckBox cb; } public ItemAdapter(){ this.imageUrls = new String[1]; this.life = new LifeInvite(); } public ListAdapter getInstance() { return this; } public ItemAdapter(Context context, LifeInvite arr){ this.life = new LifeInvite(); this.life = arr; this.imageUrls = new String[arr.invites.size()]; this.ctx = context; } @Override public int getCount() { return imageUrls.length; } @Override public Object getItem(int position) { return position; } @Override public long getItemId(int position) { return position; } @SuppressWarnings("null") @Override public View getView(final int position, View convertView, ViewGroup parent) { View view = convertView; final ViewHolder holder; boolean contactBox = false; if (convertView == null) { view = getLayoutInflater().inflate(R.layout.item_list_image, parent, false); holder = new ViewHolder(); holder.text = (TextView) view.findViewById(R.id.text); holder.image = (ImageView) view.findViewById(R.id.image); holder.cb = (CheckBox)view.findViewById(R.id.CheckBox); view.setTag(holder); } else { holder = (ViewHolder) view.getTag(); } Invite inv = this.life.invites.get(position); holder.text.setText(inv.RecipientName); // holder.cb.setChecked(inv.); holder.cb.setTag(inv); holder.cb.setVisibility(View.VISIBLE); imageLoader.displayImage(imageUrls[position], holder.image, options, animateFirstListener); return view; } } private static class AnimateFirstDisplayListener extends SimpleImageLoadingListener { static final List<String> displayedImages = Collections.synchronizedList(new LinkedList<String>()); @Override public void onLoadingComplete(String imageUri, View view, Bitmap loadedImage) { if (loadedImage != null) { ImageView imageView = (ImageView) view; boolean firstDisplay = !displayedImages.contains(imageUri); if (firstDisplay) { FadeInBitmapDisplayer.animate(imageView, 500); displayedImages.add(imageUri); } } } } }
Java
UTF-8
819
2.109375
2
[]
no_license
package com.ljm.springbootdemo.properteis; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; @Configuration //使用配置加载器 @ConfigurationProperties(prefix = "loginvo.lijm") //指定属性前缀 @PropertySource("classpath:test.properties") //指定配置文件路径 public class LoginVo { private String username; private String password; public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } }
Python
UTF-8
4,985
2.703125
3
[]
no_license
# !usr/bin/env python import os import sys import yaml import csv import urllib import urllib.request import urllib.error import re # 版本列表 VERSION_DICT = { '1.7.10': '2020709689:4449', '1.8.9': '2020709689:5806', '1.9.4': '2020709689:6084', '1.10.2': '2020709689:6170', '1.11.2': '2020709689:6452', '1.12.2': '2020709689:6756' } with open('config.yml', 'r') as f: config = yaml.load(f) project_url_root = config['project_url_root'] mod_list = config['mod_list'] project_list = config['project_list'] file_list = config['file_list'] user_agent_default = config['user_agent'] def get_html(url, user_agent=user_agent_default, retries=2): """通过地址获取网页""" headers = {'User-agent': user_agent} request = urllib.request.Request(url, headers=headers) try: html = urllib.request.urlopen(request).read() except urllib.error.URLError as e: print('urllib.error:{0}\nerror url:{1}\n'.format(e.reason, url)) html = None if retries > 0: if hasattr(e, 'code') and 500 <= e.code < 600: return get_html(url, user_agent, retries - 1) return html def _project_searcher(name): """负责在curseforge上按mod名称查找对应项目页""" html = str(get_html('{0}/search?search={1}'.format(project_url_root, name.replace(' ', '+')))) project_names = re.finditer(r'<a class="my-auto" href="/minecraft/mc-mods/(.*?)">\\r\\n\s*<h3 class=".*?">(.*?)</h3>', html) print("--------------------------------[{0}]的匹配项:--------------------------------".format(name)) result = [] for i,v in enumerate(project_names): result.append(v.group(1)) print('{0}{1}. [ {2}----{3} ]\n'.format('\t' * i, i + 1, v.group(1), v.group(2))) print("----------------------------------------------------------------") return result def _file_searcher(project, version): """在curseforge的项目页下查找对应版本的文件id和文件名""" html = str(get_html('{0}/{1}/files/all?filter-game-version={2}'.format(project_url_root, project, version))) file_id = re.search(r'<a data-action="file-link" href="/minecraft/mc-mods/.*?/files/(\d+)">.*?</a>', html) if file_id: html = str(get_html('{0}/{1}/files/{2}'.format(project_url_root, project, file_id.group(1)))) file_name = re.search(r'<span class="font-bold text-sm leading-loose">Filename</span>\\r\\n\s*<span class="text-sm">(.*?)</span>', html) if file_name: return file_id.group(1), file_name.group(1) return None, None def _search_project_by_name(name): """查找项目名称的交互界面""" search_result = _project_searcher(name) while True: input_str = input("请输入匹配的序号,[ 回车 ]默认为[ 1 ],[ 空格 ]则跳过放弃,[ rs ]手动输入关键字并重新搜索:") if input_str == ' ': input_value = -1 break elif input_str == '': if len(search_result) > 0: input_value = 0 break else: input_value = -1 elif input_str.strip() == 'rs': fixed_name = input("请重新输入[ {0} ]的名称:\n".format(name)) search_result = _project_searcher(fixed_name) else: input_value = int(input_str) - 1 if input_str.isdigit() else -1 if 0 <= input_value < len(search_result): break result = search_result[input_value] if 0 <= input_value < len(search_result) else '' print("[ {0} ]的匹配项为[ {1} ]\n".format(name, result)) return result def _search_file_by_version(version='1.12.2'): version_code = VERSION_DICT[version] sys.stdout.write(u'\r当前抓取的mod:') with open(project_list, 'r') as pl: csv_data = csv.reader(pl) if os.path.exists(file_list): os.remove(file_list) with open (file_list, 'w+') as fl: for i,p in csv_data: file_info = _file_searcher(p, version_code) fl.write('{0},{1},{2},{3}\n'.format(i, p, file_info[0], file_info[1])) sys.stdout.write(u'\r \r当前抓取的mod:' + i) with open(mod_list, 'r') as ml: input_str = input("""---------------------------------------------------------------- * 刷新mod列表请按[ 1 ] * 刷新mod版本请按[ 2 ] ---------------------------------------------------------------""") if input_str == '1': csv_data = csv.reader(ml) if os.path.exists(project_list): os.remove(project_list) with open(project_list, 'w+') as pl: for i,n,_,_ in csv_data: pl.write('{0},{1}\n'.format(i, _search_project_by_name(n))) elif input_str == '2': _search_file_by_version() else: pass
Java
UTF-8
409
1.640625
2
[]
no_license
package com.laoh.core.annotation; import com.laoh.core.WxConstants; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * @author hyd * @date 2020/8/15 12:49 */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) @Msg(name = WxConstants.XML_MSG_LINK) public @interface Link { }
Ruby
UTF-8
1,976
2.609375
3
[]
no_license
require 'rubygems' require 'nokogiri' require 'open-uri' HEADERS = ["photo", "title", "price", "quantity", "availible", "ends"] DIAPER_NAMES = ["bububebe", "elemental", "lil joeys", "rumparooz", "bright star baby", "crankypants"] start_url = "http://hyenacart.com/stores/Spots_corner/index.php?title=1&desc=1&tags=1&allnot=&category=0&u=&submit=Search&all=" def parse_file url, search_terms results = [] search_terms.each do |search_term| search_term.gsub!(/\s/,"+") full_search = url + search_term doc = Nokogiri::HTML(open(full_search)) rows = doc.css('table.storeTable tr') # remove header row rows.shift rows.each do |row| datum = {} cols = row.css('td') HEADERS.each_with_index do |header, index| datum[header] = cols[index].text.strip end results << datum end end results end results = parse_file start_url, DIAPER_NAMES results.each do |result| ScraperWiki.save_sqlite(HEADERS, result) end require 'rubygems' require 'nokogiri' require 'open-uri' HEADERS = ["photo", "title", "price", "quantity", "availible", "ends"] DIAPER_NAMES = ["bububebe", "elemental", "lil joeys", "rumparooz", "bright star baby", "crankypants"] start_url = "http://hyenacart.com/stores/Spots_corner/index.php?title=1&desc=1&tags=1&allnot=&category=0&u=&submit=Search&all=" def parse_file url, search_terms results = [] search_terms.each do |search_term| search_term.gsub!(/\s/,"+") full_search = url + search_term doc = Nokogiri::HTML(open(full_search)) rows = doc.css('table.storeTable tr') # remove header row rows.shift rows.each do |row| datum = {} cols = row.css('td') HEADERS.each_with_index do |header, index| datum[header] = cols[index].text.strip end results << datum end end results end results = parse_file start_url, DIAPER_NAMES results.each do |result| ScraperWiki.save_sqlite(HEADERS, result) end
PHP
UTF-8
850
2.609375
3
[]
no_license
<?php /** * Created by PhpStorm. * User: shumer * Date: 6/16/14 * Time: 1:09 PM */ class Instruction extends CActiveRecord { public static function model($className = __CLASS__) { return parent::model($className); } public function rules() { return array( array('exercise_id, title', 'required'), array('title', 'length', 'min'=> 2), array('exercise_id', 'numerical', 'integerOnly' => true), array('exercise_id', 'exist', 'className' => 'Exercise', 'attributeName' => 'id'), ); } public function tableName() { return 'instruction'; } public function relations() { return array( ); } public function attributeLabels() { return array( 'title' => 'Инструкция', ); } }
Markdown
UTF-8
1,653
2.609375
3
[ "WTFPL" ]
permissive
# AAXtoMP3 Read the original complete instruction @ https://github.com/KrumpetPirate/AAXtoMP3 This is a (much) shorter summary of MacOS practice. Essentially you want to find your 8 char long AuthCode by any means. The AuthCode remains same for same Audible account, across all your purchased Audible audiobooks. The next step is run ``` ./AAXtoMP3 -A XYZ12345 example.aax ``` where XYZ12345 is to be replaced with your AuthCode and example.aax is downloaded from Audible.com ## prerequisite * ffmpeg ``` brew install ffmpeg ``` ## AuthCode https://github.com/inAudible-NG/audible-activator is an example of getting your AuthCode. It uses selenium to automate Chrome. Essentially you want to manage yourself to get a link like: ```python3 url = "https://www.audible.com/license/licenseForCustomerToken?customer_token=ABCDEFG...=" ``` The easiest way in my opinion is to ask iTunes to open the link for you. When first time example.aax is loaded into iTunes, iTunes is smart enough to ask you about authorizing iTunes from Audible by leading you to browse some url like: ``` https://www.audible.com/player-auth-token?playerId=abcdefg....&playerManufacturer=itunes&playerModel=mac&playerType=software&serial=& ``` In this page, a button <Activate in iTunes> redirects you to ```python3 url_itms = "itms://www.audible.com/license/licenseForCustomerToken?customer_token=ABCDEFG...=" ``` change itms to https, you get ```url``` next step is to make a request: ```python3 import requests r = requests.get(url, headers = {'User-Agent': "Audible Download Manager"}) import common auth_code, _ = common.extract_activation_bytes(r.content) ```
Python
UTF-8
2,228
2.703125
3
[]
no_license
from kivy.app import App from kivy.uix.button import Button from kivy.core.window import Window from kivy.uix.screenmanager import Screen from kivy.properties import ObjectProperty from kivy.lang import Builder Builder.load_file(r'kivy_env\scr\nonMD\sell_public\sell_public.kv') from scr.nonMD.database_functions import sell_item class SellPublicScreen(Screen): lbl_quantity_ice_block = ObjectProperty() lbl_quantity_ice_bag = ObjectProperty() lbl_quantity_water_19L = ObjectProperty() user_logged = ObjectProperty() def update_quantity(self, value,product): if value > 0: temp = float(product.text) + value product.text = str(temp) elif float(product.text) > 0: temp = float(product.text) + value product.text = str(temp) def reset_screen(self): self.lbl_quantity_ice_block.text = '0.0' self.lbl_quantity_ice_bag.text = '0.0' self.lbl_quantity_water_19L.text = '0.0' def sell_items(self): print("running") print(self.user_logged.text[6:]) temp = True if float(self.lbl_quantity_ice_block.text) > 0: sell_item("ice block",self.lbl_quantity_ice_block.text, 160, self.user_logged.text[6:]) temp = False if float(self.lbl_quantity_ice_bag.text) > 0: sell_item("ice bag",self.lbl_quantity_ice_bag.text, 30, self.user_logged.text[6:]) temp = False if float(self.lbl_quantity_water_19L.text) > 0: sell_item("water 19L",self.lbl_quantity_water_19L.text, 14, self.user_logged.text[6:]) temp = False if temp == False: self.parent.parent.sales_public_screen.load_data() self.parent.current = 'sales_public' self.reset_screen() class SellPublicApp(App): def build(self): return SellPublicScreen() if __name__ == '__main__': window_size = [1080, 2280] # Pixels of OnePlus 6 divide_screen_by = 3 window_size[0] /= divide_screen_by window_size[1] /= divide_screen_by Window.size = (window_size[0], window_size[1]) sell_public_screen_app = SellPublicApp() sell_public_screen_app.run()
PHP
UTF-8
811
3
3
[ "Apache-2.0" ]
permissive
<?php /** * DropLeft function * * dropLeft :: [a, b] Int c -> [b] * @package bingo-functional * @author Lochemem Bruno Michael * @license Apache 2.0 */ namespace Chemem\Bingo\Functional\Algorithms; const dropLeft = "Chemem\\Bingo\\Functional\\Algorithms\\dropLeft"; function dropLeft(array $collection, int $number = 1, array $acc = []) : array { $colVals = array_values($collection); $colKeys = array_keys($collection); $valCount = count($collection); $dropFn = function (int $init, array $acc = []) use ( $colVals, $colKeys, &$dropFn, $valCount ) { if ($init >= $valCount) { return $acc; } $acc[$colKeys[$init]] = $colVals[$init]; return $dropFn($init + 1, $acc); }; return $dropFn($number); }
Python
UTF-8
1,463
2.828125
3
[ "MIT" ]
permissive
from os import PathLike from typing import Optional, Union, Tuple, List import pygame from pygame.color import Color from pygame.font import Font from pygame_widgets.widget import WidgetBase class Frame(WidgetBase): Color = Union[ Color, Tuple[int, int, int], List[int], int, Tuple[int, int, int, int] ] Rect = Tuple[int, int, int, int] def __init__(self, win: pygame.Surface, x: int, y: int, width: int, height: int, *, background_color: Optional[Color] = None, inner_padding: Optional[Rect] = None): super().__init__(win, x, y, width, height) if background_color is None: background_color = Color(250, 250, 250, 255) if inner_padding is None: inner_padding = (0, 0, 0, 0) self.background_color = background_color self.inner_padding = inner_padding def listen(self, events): super().listen(events) def draw(self): super().draw() pygame.draw.rect(self.win, self.background_color, self.outer_rect()) def outer_rect(self) -> Rect: return (self._x, self._y, self._width, self._height) def inner_rect(self) -> Rect: return (self._x + self.inner_padding[0], self._y + self.inner_padding[1], self._width - self.inner_padding[0] - self.inner_padding[2], self._height - self.inner_padding[1] - self.inner_padding[3])
JavaScript
UTF-8
2,555
2.875
3
[]
no_license
var db = firebase.firestore(); var List = [] function loadPage() { getUserLogged().then(user => { cvUser(user.uid).then(list => { List = list loadDataListCV(list) }) }) } function getUserLogged() { return new Promise((resove, reject) => { firebase.auth().onAuthStateChanged(function(user) { if (user) { resove(user) } else { reject("ERROR") } }); }) } function cvUser(id) { return new Promise((resove, reject) => { let listCV = [] db.collection("CV").where("idUser", "==", id) .get().then(function(querySnapshot) { querySnapshot.forEach(function(doc) { let cv = { id: doc.id, cvImage: doc.data().cvImage, idUser: doc.data().idUser } listCV.push(cv); }); resove(listCV); }); }) } function loadDataListCV(arr) { let list = arr.map(element => { return ` <div class="col-4 d-flex flex-column align-items-center"> <a href="${element.cvImage}" data-lightbox="mygallery"> <img class="col-12 item" src="${element.cvImage}"> </a> <button id="removeCV" type="button" onclick="removeCV('${element.id}')"> <svg width="1em" height="1em" viewBox="0 0 16 16" class="bi bi-trash-fill" fill="currentColor" xmlns="http://www.w3.org/2000/svg"> <path fill-rule="evenodd" d="M2.5 1a1 1 0 0 0-1 1v1a1 1 0 0 0 1 1H3v9a2 2 0 0 0 2 2h6a2 2 0 0 0 2-2V4h.5a1 1 0 0 0 1-1V2a1 1 0 0 0-1-1H10a1 1 0 0 0-1-1H7a1 1 0 0 0-1 1H2.5zm3 4a.5.5 0 0 1 .5.5v7a.5.5 0 0 1-1 0v-7a.5.5 0 0 1 .5-.5zM8 5a.5.5 0 0 1 .5.5v7a.5.5 0 0 1-1 0v-7A.5.5 0 0 1 8 5zm3 .5a.5.5 0 0 0-1 0v7a.5.5 0 0 0 1 0v-7z"/> </svg> </button> </div> `; }) document.getElementById("list").innerHTML = list.join(""); } function removeCV(id) { deleteDocFromCollection("CV", id); } function removeElement(id) { return List.filter(element => { element.id != id }) } function deleteDocFromCollection(nameCollection, id) { db.collection(nameCollection).doc(id).delete().then(function() { loadDataListCV(removeElement(id)) alert("Bạn đã xoá thành công") }).catch(function(error) { console.error("Error removing document: ", error); }); }
Python
UTF-8
3,200
2.984375
3
[]
no_license
import os import torch import time import numpy as np import matplotlib.pyplot as plt import copy class TextTrainer: def __init__(self, train_dataloader, val_dataloader, model, criterion, optimizer, scheduler, early_stoper, EPOCH): self.train_dataloader = train_dataloader self.val_dataloader = val_dataloader self.model = model self.criterion = criterion self.optimizer = optimizer self.scheduler = scheduler self.early_stoper = early_stoper self.EPOCH = EPOCH def train(self): model_history = [] train_losses = [] val_losses = [] start_time = time.time() for i in range(self.EPOCH): train_loss = 0 val_loss = 0 self.model.train() for x, label in self.train_dataloader: self.optimizer.zero_grad() pred = self.model(x) loss = self.criterion(pred, label) loss.backward() self.optimizer.step() train_loss += loss.item()*len(x) / len(self.train_dataloader) if self.scheduler != None: self.scheduler.step() train_losses.append(train_loss) self.model.eval() for x, label in self.val_dataloader: pred = self.model(x) loss = self.criterion(pred, label) val_loss += loss.item()*len(x) / len(self.val_dataloader) val_losses.append(val_loss) model_history.append(copy.deepcopy(self.model.state_dict())) print("Epoch {0}/{1}. Train loss {2:.4}. Val loss {3:.4}".format(i + 1, self.EPOCH, train_loss, val_loss)) if self.early_stoper != None: self.early_stoper(val_loss, self.model) if self.early_stoper.early_stop: print("Early stop is processed.") break self.train_losses = train_losses self.val_losses = val_losses self.model_history = model_history end_time = time.time() self.trained_time = end_time - start_time print("---FINISHED---") print("Trained time: {0} secs".format(round(self.trained_time,3))) def save_model(self, best_model=True): if not os.path.isdir('artifact'): os.mkdir('artifact') if best_model: best_model_idx = np.argmin(np.array(self.val_losses)) torch.save(self.model_history[best_model_idx], os.path.join('artifact', "best_epoch_model.pt")) else: torch.save(self.model_history[len(self.val_losses)-1], os.path.join('artifact', "last_epoch_model.pt")) def plot_loss_graph(self): fig, axes = plt.subplots(2,1, figsize=(10,6)) x = list(range(len(self.val_losses))) axes[0].plot(x, self.train_losses, "-o", color='blue') axes[0].set_title("Training loss") axes[0].grid() fig.tight_layout() axes[1].plot(x, self.val_losses, "-o", color='red') axes[1].set_title("Validating loss") axes[1].set_xlabel('Epochs') axes[1].grid() fig.tight_layout() plt.show()
Java
UTF-8
4,642
2.515625
3
[]
no_license
package mapperreduce; import mapperreduce.mappers.*; import mapperreduce.mr_beans.Bean01; import mapperreduce.partition.Bean01Partition; import mapperreduce.reducers.*; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; /** * Created by hanzhiqiang on 2019/3/10. */ public class WordCountmain { public static void main(String[] agrs){ try { Job job = Job.getInstance(); //设定MR的入口 job.setJarByClass(WordCountmain.class); if ("1".equals(agrs[0])) { //计算单词总数 //设定job的Mapper和输出类型 job.setMapperClass(MyMapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); //设定job的Reducer和输出类型 job.setReducerClass(MyReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); }else if("2".equals(agrs[0])){ //将所有字符串去重,并进行拼接 //设定job的Mapper和输出类型 job.setMapperClass(MyMapper1.class); job.setMapOutputKeyClass(IntWritable.class); job.setMapOutputValueClass(Text.class); //设定job的Reducer和输出类型 job.setReducerClass(MyReducer1.class); job.setOutputKeyClass(IntWritable.class); job.setOutputValueClass(Text.class); }else if("3".equals(agrs[0])){ //计算总工资 //设定job的Mapper和输出类型 job.setMapperClass(MyMapper2.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); //设定job的Reducer和输出类型 job.setReducerClass(MyReducer2.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); }else if("4".equals(agrs[0])){ //计算各部门的总工资 //设定job的Mapper和输出类型 job.setMapperClass(MyMapper3.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); //设定job的Reducer和输出类型 job.setReducerClass(MyReducer2.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); }else if("5".equals(agrs[0])){ //计算各部门的平均工资 //设定job的Mapper和输出类型 job.setMapperClass(MyMapper3.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); //设定job的Reducer和输出类型 job.setReducerClass(MyReducer3.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); }else if("bean".equals(agrs[0])){ //计算各部门的平均工资 //设定job的Mapper和输出类型 job.setMapperClass(MyMapper_bean.class); job.setMapOutputKeyClass(Bean01.class); job.setMapOutputValueClass(Bean01.class); //设定job的Reducer和输出类型 job.setReducerClass(MyReducer_bean.class); job.setOutputKeyClass(Bean01.class); job.setOutputValueClass(NullWritable.class); // job.setCombinerClass(MyCombanier.class); job.setPartitionerClass(Bean01Partition.class); job.setNumReduceTasks(3); } //设定整个任务的HDFS的输入和输出 FileInputFormat.setInputPaths(job, new Path(agrs[1])); // FileInputFormat.setInputPaths(job,new Path("/user/root/input/test.txt")); FileOutputFormat.setOutputPath(job, new Path(agrs[2]+agrs[0])); // FileOutputFormat.setOutputPath(job,new Path("/user/root/output1")); //提交任务 job.waitForCompletion(true); }catch (Exception e){ e.printStackTrace(); } } }
C#
UTF-8
1,690
3.046875
3
[]
no_license
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace NatlParkTracker { /// <summary> /// This is information about a visit to a park /// </summary> /// public enum TripType { Business, WithKids, Solo, Couple, Other } public class Visit { #region Variables private static int lastVisitNumber = 0; //private static double #endregion #region Properties public string Date { get; set; } public TripType TripType { get; set; } public string TripName { get; set; } public int VisitNumber { get; private set; } // number generated by the program with the constructor method public string ParkName { get; set; } public int DaysSpent { get; set; } public string Lodging { get; set; } public double VisitTotalSpent { get; private set; } public string TripNotes { get; set; } #endregion #region Constructor // run this method whenever "new visit" is invoked. this method has no return type, no parameters public Visit() // needs to be public so all can access and must be named the same as the class name. so when the "new Visit" is called { VisitNumber = ++lastVisitNumber; } #endregion #region Methods public double Spent (double amount) { VisitTotalSpent += amount; return VisitTotalSpent; } #endregion } }
Markdown
UTF-8
3,576
2.890625
3
[ "LicenseRef-scancode-public-domain" ]
permissive
> *The following text is extracted and transformed from the mylookout.com privacy policy that was archived on 2017-09-04. Please check the [original snapshot on the Wayback Machine](https://web.archive.org/web/20170904134958id_/https%3A//www.mylookout.com/about/privacy) for the most accurate reproduction.* # | Privacy & Security At Lookout, we’re committed to building products and services that we are proud to share with our family, friends, and people throughout the world. We are committed to security and are strong privacy advocates. Our Security and Privacy Principles guide the decisions we make at our company that help keep you safe and keep your information private. We employ advanced security practices to keep your data safe. We’re trusted security experts. We use information to provide better protection for you. We will always tell you what type of information we collect and how we use it. Lookout improves your privacy and security. #### We employ advanced security practices to keep your data safe. * It’s our responsibility to protect the data you entrust to us. Security isn’t just something we do, it’s the core of our business. * We always transmit your information securely using 128-bit SSL encryption. * We store data only in secure data centers protected by 24/7 security. * Our website and client software are hardened against attacks — we work with third-party penetration testers to simulate attacks on our own systems #### We’re trusted security experts. * Our team is staffed with security experts who are active in the security community and have previously worked at security companies such as Symantec and McAfee. * Our investors and leaders have helped build companies such as Sun Microsystems, PayPal, Juniper Networks, Good Technology, and Symantec. * Millions of people have already chosen to trust Lookout to keep their phone safe. #### We use information to provide better protection for you. * When you share information with us, we use it to provide better products and services for you and millions of other people who trust Lookout. * By anonymously aggregating information from users around the world, we can share insights that help make everyone safer. For example, when we share information about the most prevalent mobile malware in the wild, you can take steps to protect yourself. #### We will always tell you what type of information we collect and how we use it. * We strive to clearly explain the information we collect and how we use it to deliver better products and services. For example, we wrote our [privacy policy](https://web.archive.org/legal/privacy-policy) so normal people can read it. * You should never be surprised by what information we collect. We aim to make people aware of the information we collect and how it is being used to improve their mobile experience. #### Lookout improves your privacy and security. * By using Lookout, you’ll be protected from malware and spyware, your data will be backed up, and you’ll be able to secure your phone if it is lost or stolen. * Lookout’s mission is to keep people safe and secure — this applies to your personal information too. #### Want to learn more about Lookout's legal policies? For more detailed information, visit our [legal center](https://web.archive.org/legal). If you are a security researcher and need to report a security issue, view our [Responsible Disclosure of Security Vulnerabilities](https://web.archive.org/responsible-disclosure) page.
C
UTF-8
2,887
4.0625
4
[]
no_license
#include<stdio.h> #define PI 3.14 int main(){ // Format Specifier - It tells the compiler what type of data is in a varibale during taking input and displaying output to the user. // For Example:- // printf("This is a good boy %a.bf", var) will print var with b decimal points in a 'a' character space. int a = 8; float b = 7.333; printf("The value of a is %d and the value of b is %f\n", a, b); // Here :- // %d - Format Specifier for integer values // %f - Format Specifier for floating point values printf("%0.5f\n", b); // Five decimal places will be shown without offset as there are 5 characters(icluding decimal point) but we gave 0(0.5) // In simple words we have character space of 0(defaults - size of the specifed characters) with accuracy of 5 decimal places printf("%8.5f\n", b); // Five decimal places will be printed and as there sre 7 characeters(5 decimal places, decimal point and 7) so we get one space(offest) before the output. // In simple words we have character space of 8(which includes decimal point) with accuracy of 5 decimal places printf("%-10.5f Check\n", b); // We specify character space of 10 with 5 decimal points. // We have total 7 characters(7, decimal point, 5 decimal places) so we get 3 blank spaces // Here - sign specifies that the spaces will be placed after the characters are displayed // Important Format Specifiers:- // 1. %c - Character // 2. %d - Integer // 3. %f - Floating Point Number // 4. %l - Long Integer // 5. %lf - Double // 5. %Lf - Long Double // int n; // printf("Enter a Number\n"); // scanf("%Lf", &n); // printf("%d = %d", sizeof(long double), sizeof(n)); // Constants in C:- // A constant is a vbalue or variable that can't be changes in the program. // For Example:- 15, 2.3, "Tushar Dimri", etc. // There are 2 ways to define constants in C:- // 1. const keyword const float pi = 3.14; // We cannot change the value of pi. printf("%f\n", pi); // 2. #define preprocessor // Check line 2 of this code. printf("%0.2f\n", PI); // Escape Sequences in C:- // An escape sequence in C is a sequence of characters. It doesn't represent itself when used inside string literal or character.It is composed of 2 or more characters starting with a backslash(\) // For Example:- \n - New Line // Some Escape Sequences:- // 1. \a - Alram or Beep // 2. \b - Backspace // 3. \t - Tab(Horizontal) // 4. \' - Single Quote // 5. \" - Double Quote // 6. \\ - Backslash printf("Backslash - \\ \n"); printf("Newline Character - \\n \n"); printf("Tab\tCharacter\n"); // Comments in C:- // 1. // Single line Comment // 2. /* Multi Line Comment */ return 0; }
Java
UTF-8
3,829
2.359375
2
[]
no_license
package com.Smileyes.nsfw.user.service.impl; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.OutputStream; import java.util.List; import javax.annotation.Resource; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.springframework.stereotype.Service; import com.Smileyes.core.exception.ServiceException; import com.Smileyes.core.service.impl.BaseServiceImpl; import com.Smileyes.core.utils.ExcelUtil; import com.Smileyes.nsfw.role.entity.Role; import com.Smileyes.nsfw.user.dao.UserDao; import com.Smileyes.nsfw.user.entity.User; import com.Smileyes.nsfw.user.entity.UserRole; import com.Smileyes.nsfw.user.service.UserService; /* * UserService的实现类 * * @author Smileyes * */ @Service("userService") public class UserServiceImpl extends BaseServiceImpl<User> implements UserService { private UserDao userDao; @Resource public void setUserDao(UserDao userDao) { super.setBaseDao(userDao); this.userDao = userDao; } public UserDao getUserDao() { return userDao; } public List<User> findByNameOrAccount(String name) { return this.userDao.findByNameOrAccount(name); } public void exportExcel(OutputStream outputStream) throws ServiceException { HSSFWorkbook workbook = null; try { // 1.创建工作簿 workbook = new HSSFWorkbook(); // 2.获取数据列表 List<User> list = this.userDao.list(); // 3.将数据添加到工作簿中 ExcelUtil.exportExcel(workbook, list); // 4.输出文件 workbook.write(outputStream); } catch (Exception e) { throw new ServiceException("Srvice出错:" + e.getMessage()); } finally { try { if (workbook != null) { workbook.close(); } } catch (IOException e) { e.printStackTrace(); } } } public void importExcel(File excelFile, String excelFileName) throws ServiceException { Workbook workbook = null; FileInputStream fis = null; try { // 1.创建输入流 fis = new FileInputStream(excelFile); // 2.获得工作簿 workbook = (excelFileName.matches("^.+\\.(?i)(xls)$")) ? new HSSFWorkbook(fis) : new XSSFWorkbook(fis); // 3.读取excel文件内容 List<User> list = ExcelUtil.importExcel(workbook); // 添加用户 if (list != null && list.size() != 0) { for (User user : list) { if (!userDao.checkAccountAndId(user.getAccount(), user.getId())) { this.userDao.add(user); } } } } catch (Exception e) { throw new ServiceException("业务执行出错"); } finally { try { if (workbook != null) { workbook.close(); } if (fis != null) { fis.close(); } } catch (IOException e) { throw new ServiceException("Service出现IO异常:" + e.getMessage()); } } } public boolean checkAccountAndId(String account, String id) { return this.userDao.checkAccountAndId(account, id); } public void saveUserAndRole(User user, String... roles) { // 保存用户 this.userDao.add(user); // 保存用户的角色信息 for (String roleId : roles) { userDao.saveUserRole(new Role(roleId), user.getId()); } } // 更新用户与角色信息 public void updateUserAndRole(User user, String... roles) { // 更新用户 this.userDao.update(user); // 删除旧的角色信息 this.userDao.removeOldROle(user.getId()); // 保存新的角色信息 for (String roleId : roles) { userDao.saveUserRole(new Role(roleId), user.getId()); } } public List<UserRole> findRoleInfo(String id) { return userDao.findRoleInfo(id); } // 通过账户与密码查询用户 public User finByAccountAndPassword(String account, String password) { return this.userDao.finByAccountAndPassword(account, password); } }
C#
UTF-8
2,199
2.890625
3
[]
no_license
using Microsoft.Extensions.Localization; using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Linq; using System.Reflection; using System.Threading.Tasks; namespace WebApplication1.Data { public class Device { private readonly IStringLocalizer<Device> _localizer; public Device(IStringLocalizer<Device> localizer) { _localizer = localizer; } private DeviceStates _deviceState; [Display(Name = "Device State Since")] public DateTime? DeviceStateSince { get; set; } [Display(Name = "Device state")] public string DeviceStateDisplay { get { if (Convert.ToInt64(DeviceState) == 0) return _localizer[GetDisplayName(DeviceState)]; var result = new List<string>(); foreach (Enum value in Enum.GetValues(typeof(DeviceStates))) { if (Convert.ToInt64(value) > 0 && DeviceState.HasFlag(value)) { result.Add(_localizer[GetDisplayName(value)]); } } return string.Join(", ", result); } } private static string GetDisplayName(Enum enumValue) { return enumValue.GetType()? .GetMember(enumValue.ToString())? .First()? .GetCustomAttribute<DisplayAttribute>()? .Name; } public DeviceStates DeviceState { get { return _deviceState; } set { //only update on devicestate changes //TODO Thingk about this, if this is ok to only update when a state changes to OK ,Missing or Lampfailure; state changes to overtemp etc. are discarded if (value != _deviceState && (value.HasFlag(DeviceStates.Ok) || !IsDeviceStateOk())) { DeviceStateSince = DateTime.UtcNow; } _deviceState = value; } } public virtual bool IsDeviceStateOk() { return !DeviceState.HasFlag(DeviceStates.Disabled) && !DeviceState.HasFlag(DeviceStates.Missing) && !DeviceState.HasFlag(DeviceStates.LampFailure) && !DeviceState.HasFlag(DeviceStates.Faulty); } } }
Java
UTF-8
8,578
2.3125
2
[]
no_license
package mojang.entity.projectile; import mojang.*; import mojang.entity.Entity; import mojang.entity.LivingEntity; import mojang.tags.CompoundTag; import mojang.world.World; import java.io.IOException; import java.util.List; public class Arrow extends Entity { private int c = -1; private int d = -1; private int e = -1; private int f = 0; private boolean g = false; public int a = 0; public LivingEntity b; private int h; private int i = 0; public Arrow(World var1) { super(var1); this.a(0.5F, 0.5F); } public Arrow(World var1, double var2, double var4, double var6) { super(var1); this.a(0.5F, 0.5F); this.b(var2, var4, var6); this.aO = 0.0F; } public Arrow(World var1, LivingEntity var2) { super(var1); this.b = var2; this.a(0.5F, 0.5F); this.c(var2.aw, var2.ax + (double)var2.v(), var2.ay, var2.aC, var2.aD); this.aw -= (double)(fi.b(this.aC / 180.0F * 3.1415927F) * 0.16F); this.ax -= 0.10000000149011612D; this.ay -= (double)(fi.a(this.aC / 180.0F * 3.1415927F) * 0.16F); this.b(this.aw, this.ax, this.ay); this.aO = 0.0F; this.az = (double)(-fi.a(this.aC / 180.0F * 3.1415927F) * fi.b(this.aD / 180.0F * 3.1415927F)); this.aB = (double)(fi.b(this.aC / 180.0F * 3.1415927F) * fi.b(this.aD / 180.0F * 3.1415927F)); this.aA = (double)(-fi.a(this.aD / 180.0F * 3.1415927F)); this.a(this.az, this.aA, this.aB, 1.5F, 1.0F); } public void a(double var1, double var3, double var5, float var7, float var8) { float var9 = fi.a(var1 * var1 + var3 * var3 + var5 * var5); var1 /= (double)var9; var3 /= (double)var9; var5 /= (double)var9; var1 += this.bd.nextGaussian() * 0.007499999832361937D * (double)var8; var3 += this.bd.nextGaussian() * 0.007499999832361937D * (double)var8; var5 += this.bd.nextGaussian() * 0.007499999832361937D * (double)var8; var1 *= (double)var7; var3 *= (double)var7; var5 *= (double)var7; this.az = var1; this.aA = var3; this.aB = var5; float var10 = fi.a(var1 * var1 + var5 * var5); this.aE = this.aC = (float)(Math.atan2(var1, var5) * 180.0D / 3.1415927410125732D); this.aF = this.aD = (float)(Math.atan2(var3, (double)var10) * 180.0D / 3.1415927410125732D); this.h = 0; } public void a(double var1, double var3, double var5) { this.az = var1; this.aA = var3; this.aB = var5; if(this.aF == 0.0F && this.aE == 0.0F) { float var7 = fi.a(var1 * var1 + var5 * var5); this.aE = this.aC = (float)(Math.atan2(var1, var5) * 180.0D / 3.1415927410125732D); this.aF = this.aD = (float)(Math.atan2(var3, (double)var7) * 180.0D / 3.1415927410125732D); } } public void e_() throws IOException { super.e_(); if(this.aF == 0.0F && this.aE == 0.0F) { float var1 = fi.a(this.az * this.az + this.aB * this.aB); this.aE = this.aC = (float)(Math.atan2(this.az, this.aB) * 180.0D / 3.1415927410125732D); this.aF = this.aD = (float)(Math.atan2(this.aA, (double)var1) * 180.0D / 3.1415927410125732D); } if(this.a > 0) { --this.a; } if(this.g) { int var15 = this.as.a(this.c, this.d, this.e); if(var15 == this.f) { ++this.h; if(this.h == 1200) { this.J(); } return; } this.g = false; this.az *= (double)(this.bd.nextFloat() * 0.2F); this.aA *= (double)(this.bd.nextFloat() * 0.2F); this.aB *= (double)(this.bd.nextFloat() * 0.2F); this.h = 0; this.i = 0; } else { ++this.i; } mojang.ao var16 = mojang.ao.b(this.aw, this.ax, this.ay); ao var2 = mojang.ao.b(this.aw + this.az, this.ax + this.aA, this.ay + this.aB); nx var3 = this.as.a(var16, var2); var16 = mojang.ao.b(this.aw, this.ax, this.ay); var2 = mojang.ao.b(this.aw + this.az, this.ax + this.aA, this.ay + this.aB); if(var3 != null) { var2 = mojang.ao.b(var3.f.a, var3.f.b, var3.f.c); } Entity var4 = null; List var5 = this.as.b((Entity)this, this.aG.a(this.az, this.aA, this.aB).b(1.0D, 1.0D, 1.0D)); double var6 = 0.0D; float var10; for(int var8 = 0; var8 < var5.size(); ++var8) { Entity var9 = (Entity)var5.get(var8); if(var9.n_() && (var9 != this.b || this.i >= 5)) { var10 = 0.3F; co var11 = var9.aG.b((double)var10, (double)var10, (double)var10); nx var12 = var11.a(var16, var2); if(var12 != null) { double var13 = var16.c(var12.f); if(var13 < var6 || var6 == 0.0D) { var4 = var9; var6 = var13; } } } } if(var4 != null) { var3 = new nx(var4); } float var17; if(var3 != null) { if(var3.g != null) { if(var3.g.a(this.b, 4)) { this.as.a(this, "random.drr", 1.0F, 1.2F / (this.bd.nextFloat() * 0.2F + 0.9F)); this.J(); } else { this.az *= -0.10000000149011612D; this.aA *= -0.10000000149011612D; this.aB *= -0.10000000149011612D; this.aC += 180.0F; this.aE += 180.0F; this.i = 0; } } else { this.c = var3.b; this.d = var3.c; this.e = var3.d; this.f = this.as.a(this.c, this.d, this.e); this.az = (double)((float)(var3.f.a - this.aw)); this.aA = (double)((float)(var3.f.b - this.ax)); this.aB = (double)((float)(var3.f.c - this.ay)); var17 = fi.a(this.az * this.az + this.aA * this.aA + this.aB * this.aB); this.aw -= this.az / (double)var17 * 0.05000000074505806D; this.ax -= this.aA / (double)var17 * 0.05000000074505806D; this.ay -= this.aB / (double)var17 * 0.05000000074505806D; this.as.a(this, "random.drr", 1.0F, 1.2F / (this.bd.nextFloat() * 0.2F + 0.9F)); this.g = true; this.a = 7; } } this.aw += this.az; this.ax += this.aA; this.ay += this.aB; var17 = fi.a(this.az * this.az + this.aB * this.aB); this.aC = (float)(Math.atan2(this.az, this.aB) * 180.0D / 3.1415927410125732D); for(this.aD = (float)(Math.atan2(this.aA, (double)var17) * 180.0D / 3.1415927410125732D); this.aD - this.aF < -180.0F; this.aF -= 360.0F) { ; } while(this.aD - this.aF >= 180.0F) { this.aF += 360.0F; } while(this.aC - this.aE < -180.0F) { this.aE -= 360.0F; } while(this.aC - this.aE >= 180.0F) { this.aE += 360.0F; } this.aD = this.aF + (this.aD - this.aF) * 0.2F; this.aC = this.aE + (this.aC - this.aE) * 0.2F; float var18 = 0.99F; var10 = 0.03F; if(this.h_()) { for(int var19 = 0; var19 < 4; ++var19) { float var20 = 0.25F; this.as.a("bubble", this.aw - this.az * (double)var20, this.ax - this.aA * (double)var20, this.ay - this.aB * (double)var20, this.az, this.aA, this.aB); } var18 = 0.8F; } this.az *= (double)var18; this.aA *= (double)var18; this.aB *= (double)var18; this.aA -= (double)var10; this.b(this.aw, this.ax, this.ay); } public void a(CompoundTag var1) { var1.putShortTag("xTile", (short)this.c); var1.putShortTag("yTile", (short)this.d); var1.putShortTag("zTile", (short)this.e); var1.putByteTag("inTile", (byte)this.f); var1.putByteTag("shake", (byte)this.a); var1.putByteTag("inGround", (byte)(this.g?1:0)); } public void b(CompoundTag var1) { this.c = var1.getShort("xTile"); this.d = var1.getShort("yTile"); this.e = var1.getShort("zTile"); this.f = var1.getByte("inTile") & 255; this.a = var1.getByte("shake") & 255; this.g = var1.getByte("inGround") == 1; } public void b(eb var1) { if(!this.as.z) { if(this.g && this.b == var1 && this.a <= 0 && var1.e.a(new fp(Item.j.aW, 1))) { this.as.a(this, "random.pop", 0.2F, ((this.bd.nextFloat() - this.bd.nextFloat()) * 0.7F + 1.0F) * 2.0F); var1.b(this, 1); this.J(); } } } public float i_() { return 0.0F; } }
Python
UTF-8
1,235
2.625
3
[]
no_license
from django.core.management.base import BaseCommand from users.models import User class Command(BaseCommand): help = 'Fill DB with test users' def add_arguments(self, parser): parser.add_argument('count', type=int) def handle(self, *args, **options): count = options['count'] User.objects.all().delete() user = User.objects.create_superuser(username='django', email='django@email.com', password='geekbrains', first_name='django', last_name='geekbrains' ) print(f'Superuser {user} created') for i in range(count): user = User.objects.create_user(username=f'Test_{i}', email=f'Test_User_{i}@email.com', password=f'Test_User_{i}', first_name=f'Test_{i}', last_name=f'User_{i}' ) print(f'User {user} created') print('done')
JavaScript
UTF-8
2,395
3.0625
3
[]
no_license
import React, {Component} from "react" import Square from "./Square" class ParentComponent extends Component { constructor() { super() this.state = { colors: ["white", "white", "white", "white"] } this.handleClick = this.handleClick.bind(this) this.handleClick2 = this.handleClick2.bind(this) this.handleClick3 = this.handleClick3.bind(this) this.handleClick4 = this.handleClick4.bind(this) } handleClick() { this.setState(prevState => { if(prevState.colors[0] === "white"){ return { colors: ["black", "black", "black", "black"] } } else { return { colors: ["white", "white", "white", "white"] } } } ) } handleClick2() { this.setState(prevState => { return { colors: ["purple", "purple", prevState.colors[2], prevState.colors[3]] } }) } handleClick3() { this.setState(prevState => { return { colors: [prevState.colors[0], prevState.colors[1], "blue", prevState.colors[3]] } }) } handleClick4() { this.setState(prevState => { return { colors: [prevState.colors[0], prevState.colors[1], prevState.colors[3], "blue"] } }) } // handleClick2(){ // console.log("this was hit") // this.setState({colors: ["purple", "purple", "purple", "purple"]}) // } render () { const squareColorsMap = this.state.colors.map((color, id) => <Square key={id} color={color} />) return ( <div> <div className="container"> <button onClick= {this.handleClick}>HIT ME!</button> <button onClick= {this.handleClick2}>try me</button> <button onClick= {this.handleClick3}>YOU BE BLUE</button> <button onClick= {this.handleClick4}>YOU BLUE TOO</button> </div> <div className= "boxContainer"> {squareColorsMap} </div> </div> ) } } export default ParentComponent