text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
#!/usr/bin/env python
import argparse
import sys
import re
import os
import locale
import subprocess
from multiprocessing import Process
def dbquery(query):
import psycopg2
db = psycopg2.connect(dbname = "firmware", user = "firmadyne", password = "firmadyne", host = "127.0.0.1")
ret = None
try:
cur = db.cursor()
cur.execute(query)
except BaseException:
traceback.print_exc()
finally:
if cur:
ret = cur.fetchall()
cur.close()
return ret
def source(iid):
# source code analysis
script = os.getcwd() + '/analysis/source.sh'
p = subprocess.run([script, str(iid)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print(p.stdout.decode())
print(p.stderr.decode())
# calculate the score of security
resultdir = os.getcwd() + '/results/' + str(iid) + '/source'
firmware_score = 0
for (rootdir, dirs, files) in os.walk(resultdir):
for outfile in files:
if outfile.endswith('.dec.c.out'):
file_score = 0
# calculate the score of this file
for line in open(rootdir + '/' + outfile, "r"):
line = line.strip()
if re.search('Hits/KSLOC@level\+', line):
sp = line.split()
file_score += float(sp[3])
file_score += float(sp[5])
file_score += float(sp[7])
file_score += float(sp[9])
file_score += float(sp[11])
file_score += float(sp[13])
# file_score transition function
file_score = 10 - 600 / (file_score + 60)
# store the file_score information in the database
#print(rootdir + '/' + outfile + ": " + str(file_score))
firmware_score += file_score
# firmware_score transition function
firmware_score = 10 - 500 / (firmware_score + 50)
# store the firmware_score information in the database
#print(str(iid) + ": " + firmware_score)
def angr(iid):
print('warning: the Angr function is under development')
# TODO
def afl(iid):
sys.path.append('./analysis')
import afl
resultdir = os.getcwd() + '/results/' + iid + '/afl'
afl.process(iid, resultdir)
def netafl(iid, ip):
resultdir = os.getcwd() + '/results/' + iid + '/netafl'
script = os.getcwd() + '/analysis/netafl.py'
print('warning: the network AFL function is under development')
# TODO
def metasploit(iid, ip):
sys.path.append('./analysis/metasploit')
import runExploits
exploits = list (runExploits.METASPLOIT_EXPLOITS.keys()) + list (runExploits.SHELL_EXPLOITS.keys())
resultdir = os.getcwd() + '/results/' + iid + '/metasploit'
if not os.path.isdir(resultdir):
if os.path.exists(resultdir):
os.remove(resultdir)
os.makedirs(resultdir, 0o755)
outfile = resultdir + "/%(exploit)s.log"
runExploits.process(ip, exploits, outfile)
def extract(input_file):
sys.path.append('./scripts')
import extractor
e = extractor.Extractor(input_file, 'images', True, False, False, '127.0.0.1', None)
ocwd = os.getcwd()
(iid, repeated) = e.extract()
os.chdir(ocwd)
return (iid, repeated)
def importdb(iid):
sys.path.append('./db')
import importdb
image = './images/' + str(iid) + '.tar.gz'
importdb.getarch(image)
importdb.process(iid, image)
def makeimage(iid):
p = subprocess.run(['sudo', './qemu/scripts/makeImage.sh', str(iid)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print(p.stdout.decode())
print(p.stderr.decode())
def infernetwork(iid):
p = subprocess.run(['./qemu/scripts/inferNetwork.sh', str(iid)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print(p.stdout.decode())
print(p.stderr.decode())
def getIP(iid):
ip = None
import psycopg2
db = psycopg2.connect(dbname = "firmware", user = "firmadyne", password = "firmadyne", host = "127.0.0.1")
try:
cur = db.cursor()
cur.execute("SELECT ip FROM image WHERE id=" + iid)
except BaseException:
traceback.print_exc()
finally:
if cur:
ip = cur.fetchone()[0]
cur.close()
return ip
def rootfs_extracted(iid):
query = 'select rootfs_extracted from image where id=' + iid + ';'
return dbquery(query)[0][0]
def main():
os.chdir(os.path.dirname(os.path.realpath(__file__)))
parser = argparse.ArgumentParser(description="Linux-based firmware analysis")
parser.add_argument("input_file", action="store", help="Input firmware image")
parser.add_argument("-i", dest="id", action="store",
default=None, help="firmware ID")
parser.add_argument("-s", dest="source", action="store_true",
default=False, help="Enable source code analysis")
parser.add_argument("-a", dest="angr", action="store_true",
default=False, help="Enable static analysis with Angr")
parser.add_argument("-f", dest="afl", action="store_true",
default=False, help="Fuzzing the firmware binaries with AFL")
parser.add_argument("-n", dest="netafl", action="store_true",
default=False, help="Fuzzing the network services with AFL")
parser.add_argument("-m", dest="metasploit", action="store_true",
default=False, help="Penetration test with metasploit exploits")
arg = parser.parse_args()
(iid, repeated) = extract(arg.input_file)
if arg.id != None and iid != arg.id:
print('error: frontend firmware ID and backend image ID conflict')
sys.exit(1)
if not rootfs_extracted(iid):
print('error: cannot find rootfs')
sys.exit(1)
# importdb
if not repeated:
importdb(iid)
if arg.source:
iid = arg.id
s = Process(target=source, args=(iid,))
s.start()
# makeImage, inferNetwork
if not repeated:
makeimage(iid)
infernetwork(iid)
ip = getIP(iid)
if not ip:
print('warning: no interface detected')
if arg.angr:
a = Process(target=angr, args=(iid,))
a.start()
if arg.afl:
f = Process(target=afl, args=(iid,))
f.start()
if arg.netafl and ip:
n = Process(target=netafl, args=(iid, ip))
n.start()
if arg.metasploit and ip:
m = Process(target=metasploit, args=(iid, ip))
m.start()
# join
if arg.source:
s.join()
if arg.angr:
a.join()
if arg.afl:
f.join()
if arg.netafl and ip:
n.join()
if arg.metasploit and ip:
m.join()
if __name__ == '__main__':
main ()
|
niorehkids/firmanal
|
analyze.py
|
Python
|
mit
| 6,828 | 0.006737 |
"""Simulation of controlled dumbbell around Itokawa with
simulated imagery using Blender
This will generate the imagery of Itokawa from a spacecraft following
a vertical descent onto the surface.
4 August 2017 - Shankar Kulumani
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from scipy import integrate
import numpy as np
import pdb
import h5py, cv2
import visualization.plotting as plotting
from visualization import blender_camera
from dynamics import asteroid, dumbbell, controller, eoms
from kinematics import attitude
from visualization import blender
import inertial_driver as idriver
import relative_driver as rdriver
import datetime
def eoms_controlled_blender(t, state, dum, ast):
"""Inertial dumbbell equations of motion about an asteroid
This method must be used with the scipy.integrate.ode class instead of the
more convienent scipy.integrate.odeint. In addition, we can control the
dumbbell given full state feedback. Blender is used to generate imagery
during the simulation.
Inputs:
t - Current simulation time step
state - (18,) array which defines the state of the vehicle
pos - (3,) position of the dumbbell with respect to the
asteroid center of mass and expressed in the inertial frame
vel - (3,) velocity of the dumbbell with respect to the
asteroid center of mass and expressed in the inertial frame
R - (9,) attitude of the dumbbell with defines the
transformation of a vector in the dumbbell frame to the
inertial frame ang_vel - (3,) angular velocity of the dumbbell
with respect to the inertial frame and represented in the
dumbbell frame
ast - Asteroid class object holding the asteroid gravitational
model and other useful parameters
"""
# unpack the state
pos = state[0:3] # location of the center of mass in the inertial frame
vel = state[3:6] # vel of com in inertial frame
R = np.reshape(state[6:15],(3,3)) # sc body frame to inertial frame
ang_vel = state[15:18] # angular velocity of sc wrt inertial frame defined in body frame
Ra = attitude.rot3(ast.omega*t, 'c') # asteroid body frame to inertial frame
# unpack parameters for the dumbbell
J = dum.J
rho1 = dum.zeta1
rho2 = dum.zeta2
# position of each mass in the asteroid frame
z1 = Ra.T.dot(pos + R.dot(rho1))
z2 = Ra.T.dot(pos + R.dot(rho2))
z = Ra.T.dot(pos) # position of COM in asteroid frame
# compute the potential at this state
(U1, U1_grad, U1_grad_mat, U1laplace) = ast.polyhedron_potential(z1)
(U2, U2_grad, U2_grad_mat, U2laplace) = ast.polyhedron_potential(z2)
F1 = dum.m1*Ra.dot(U1_grad)
F2 = dum.m2*Ra.dot(U2_grad)
M1 = dum.m1 * attitude.hat_map(rho1).dot(R.T.dot(Ra).dot(U1_grad))
M2 = dum.m2 * attitude.hat_map(rho2).dot(R.T.dot(Ra).dot(U2_grad))
# generate image at this current state only at a specifc time
# blender.driver(pos, R, ast.omega * t, [5, 0, 1], 'test' + str.zfill(str(t), 4))
# use the imagery to figure out motion and pass to the controller instead
# of the true state
# calculate the desired attitude and translational trajectory
des_att_tuple = controller.body_fixed_pointing_attitude(t, state)
des_tran_tuple = controller.traverse_then_land_vertically(t, ast, final_pos=[0.550, 0, 0],
initial_pos=[2.550, 0, 0],
descent_tf=3600)
# input trajectory and compute the control inputs
# compute the control input
u_m = controller.attitude_controller(t, state, M1+M2, dum, ast, des_att_tuple)
u_f = controller.translation_controller(t, state, F1+F2, dum, ast, des_tran_tuple)
pos_dot = vel
vel_dot = 1/(dum.m1+dum.m2) *(F1 + F2 + u_f)
R_dot = R.dot(attitude.hat_map(ang_vel)).reshape(9)
ang_vel_dot = np.linalg.inv(J).dot(-np.cross(ang_vel,J.dot(ang_vel)) + M1 + M2 + u_m)
statedot = np.hstack((pos_dot, vel_dot, R_dot, ang_vel_dot))
return statedot
def eoms_controlled_blender_traverse_then_land(t, state, dum, ast):
"""Inertial dumbbell equations of motion about an asteroid
This method must be used with the scipy.integrate.ode class instead of the
more convienent scipy.integrate.odeint. In addition, we can control the
dumbbell given full state feedback. Blender is used to generate imagery
during the simulation.
The spacecraft will move horizontally for the first 3600 sec to a positon
[2.550, 0, 0] in the asteroid (and inertial) frame, then descend vertically
in the asteroid frame.
Inputs:
t - Current simulation time step
state - (18,) array which defines the state of the vehicle
pos - (3,) position of the dumbbell with respect to the
asteroid center of mass and expressed in the inertial frame
vel - (3,) velocity of the dumbbell with respect to the
asteroid center of mass and expressed in the inertial frame
R - (9,) attitude of the dumbbell with defines the
transformation of a vector in the dumbbell frame to the
inertial frame ang_vel - (3,) angular velocity of the dumbbell
with respect to the inertial frame and represented in the
dumbbell frame
ast - Asteroid class object holding the asteroid gravitational
model and other useful parameters
"""
# unpack the state
pos = state[0:3] # location of the center of mass in the inertial frame
vel = state[3:6] # vel of com in inertial frame
R = np.reshape(state[6:15],(3,3)) # sc body frame to inertial frame
ang_vel = state[15:18] # angular velocity of sc wrt inertial frame defined in body frame
Ra = attitude.rot3(ast.omega*(t - 3600), 'c') # asteroid body frame to inertial frame
# unpack parameters for the dumbbell
J = dum.J
rho1 = dum.zeta1
rho2 = dum.zeta2
# position of each mass in the asteroid frame
z1 = Ra.T.dot(pos + R.dot(rho1))
z2 = Ra.T.dot(pos + R.dot(rho2))
z = Ra.T.dot(pos) # position of COM in asteroid frame
# compute the potential at this state
(U1, U1_grad, U1_grad_mat, U1laplace) = ast.polyhedron_potential(z1)
(U2, U2_grad, U2_grad_mat, U2laplace) = ast.polyhedron_potential(z2)
F1 = dum.m1*Ra.dot(U1_grad)
F2 = dum.m2*Ra.dot(U2_grad)
M1 = dum.m1 * attitude.hat_map(rho1).dot(R.T.dot(Ra).dot(U1_grad))
M2 = dum.m2 * attitude.hat_map(rho2).dot(R.T.dot(Ra).dot(U2_grad))
# generate image at this current state only at a specifc time
# blender.driver(pos, R, ast.omega * t, [5, 0, 1], 'test' + str.zfill(str(t), 4))
# use the imagery to figure out motion and pass to the controller instead
# of the true state
# compute the control input
u_m = controller.attitude_traverse_then_land_controller(t, state, M1+M2, dum, ast)
u_f = controller.translation_traverse_then_land_controller(t, state, F1+F2, dum, ast)
pos_dot = vel
vel_dot = 1/(dum.m1+dum.m2) *(F1 + F2 + u_f)
R_dot = R.dot(attitude.hat_map(ang_vel)).reshape(9)
ang_vel_dot = np.linalg.inv(J).dot(-np.cross(ang_vel,J.dot(ang_vel)) + M1 + M2 + u_m)
statedot = np.hstack((pos_dot, vel_dot, R_dot, ang_vel_dot))
return statedot
def blender_traverse_then_land_sim():
# simulation parameters
output_path = './visualization/blender'
asteroid_name = 'itokawa_low'
# create a HDF5 dataset
hdf5_path = './data/itokawa_landing/{}_controlled_vertical_landing.hdf5'.format(
datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S"))
dataset_name = 'landing'
render = 'BLENDER'
image_modulus = 400
RelTol = 1e-6
AbsTol = 1e-6
ast_name = 'itokawa'
num_faces = 64
t0 = 0
dt = 1
tf = 7200
num_steps = 7200
periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607])
periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327])
ast = asteroid.Asteroid(ast_name,num_faces)
dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003)
# instantiate the blender scene once
camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name)
# get some of the camera parameters
K = blender_camera.get_calibration_matrix_K_from_blender(camera)
# set initial state for inertial EOMs
# initial_pos = np.array([2.550, 0, 0]) # km for center of mass in body frame
initial_pos = np.array([0, -2.550, 0])
initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos)
initial_R = attitude.rot3(np.pi/2).reshape(9) # transforms from dumbbell body frame to the inertial frame
initial_w = np.array([0.01, 0.01, 0.01])
initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w))
# instantiate ode object
# system = integrate.ode(eoms_controlled_blender)
system = integrate.ode(eoms_controlled_blender_traverse_then_land)
system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000)
system.set_initial_value(initial_state, t0)
system.set_f_params(dum, ast)
i_state = np.zeros((num_steps+1, 18))
time = np.zeros(num_steps+1)
i_state[0, :] = initial_state
with h5py.File(hdf5_path) as image_data:
# create a dataset
images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8')
RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12))
R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9))
ii = 1
while system.successful() and system.t < tf:
# integrate the system and save state to an array
time[ii] = (system.t + dt)
i_state[ii, :] = (system.integrate(system.t + dt))
# generate the view of the asteroid at this state
if int(time[ii]) % image_modulus== 0:
img, RT, R = blender.gen_image(i_state[ii,0:3], i_state[ii,6:15].reshape((3,3)),
ast.omega * (time[ii] - 3600),
camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene,
[5, 0, 1], 'test')
images[:, :, :, ii//image_modulus - 1] = img
RT_blender[ii//image_modulus -1, :] = RT.reshape(12)
R_i2bcam[ii//image_modulus -1, :] = R.reshape(9)
# do some image processing and visual odometry
print(system.t)
ii += 1
image_data.create_dataset('K', data=K)
image_data.create_dataset('i_state', data=i_state)
image_data.create_dataset('time', data=time)
def blender_vertical_landing_sim():
# simulation parameters
output_path = './visualization/blender'
asteroid_name = 'itokawa_low'
# create a HDF5 dataset
hdf5_path = './data/itokawa_landing/{}_controlled_vertical_landing.hdf5'.format(
datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S"))
dataset_name = 'landing'
render = 'BLENDER'
image_modulus = 200
RelTol = 1e-6
AbsTol = 1e-6
ast_name = 'itokawa'
num_faces = 64
t0 = 0
dt = 1
tf = 3600
num_steps = 3600
periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607])
periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327])
ast = asteroid.Asteroid(ast_name,num_faces)
dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003)
# instantiate the blender scene once
camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name)
# get some of the camera parameters
K = blender_camera.get_calibration_matrix_K_from_blender(camera)
# set initial state for inertial EOMs
initial_pos = np.array([2.550, 0, 0]) # km for center of mass in body frame
initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos)
initial_R = attitude.rot3(np.pi).reshape(9) # transforms from dumbbell body frame to the inertial frame
initial_w = np.array([0.01, 0.01, 0.01])
initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w))
# instantiate ode object
system = integrate.ode(eoms_controlled_blender)
system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000)
system.set_initial_value(initial_state, t0)
system.set_f_params(dum, ast)
i_state = np.zeros((num_steps+1, 18))
time = np.zeros(num_steps+1)
i_state[0, :] = initial_state
with h5py.File(hdf5_path) as image_data:
# create a dataset
images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8')
RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12))
R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9))
ii = 1
while system.successful() and system.t < tf:
# integrate the system and save state to an array
time[ii] = (system.t + dt)
i_state[ii, :] = (system.integrate(system.t + dt))
# generate the view of the asteroid at this state
if int(time[ii]) % image_modulus == 0:
img, RT, R = blender.gen_image(i_state[ii,0:3], i_state[ii,6:15].reshape((3, 3)),
ast.omega * time[ii],
camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene,
[5, 0, 1], 'test')
images[:, :, :, ii // image_modulus - 1] = img
RT_blender[ii // image_modulus - 1, :] = RT.reshape(12)
R_i2bcam[ii // image_modulus - 1, :] = R.reshape(9)
# do some image processing and visual odometry
ii += 1
image_data.create_dataset('K', data=K)
image_data.create_dataset('i_state', data=i_state)
image_data.create_dataset('time', data=time)
def blender_inertial_circumnavigate(gen_images=False):
"""Move around the asteroid in the inertial frame, but assume no rotation of the asteroid
"""
# simulation parameters
output_path = './visualization/blender'
asteroid_name = 'itokawa_high'
# create a HDF5 dataset
hdf5_path = './data/asteroid_circumnavigate/{}_inertial_no_ast_rotation.hdf5'.format(
datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S"))
dataset_name = 'landing'
render = 'BLENDER'
image_modulus = 1
RelTol = 1e-6
AbsTol = 1e-6
ast_name = 'itokawa'
num_faces = 64
t0 = 0
dt = 1
tf = 3600 * 4
num_steps = 3600 * 4
loops = 4
periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607])
periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327])
ast = asteroid.Asteroid(ast_name,num_faces)
dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003)
# instantiate the blender scene once
camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name)
# get some of the camera parameters
K = blender_camera.get_calibration_matrix_K_from_blender(camera)
# set initial state for inertial EOMs
initial_pos = np.array([3, 0, 0]) # km for center of mass in body frame
initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos)
initial_R = attitude.rot3(np.pi).reshape(9) # transforms from dumbbell body frame to the inertial frame
initial_w = np.array([0.01, 0.01, 0.01])
initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w))
# instantiate ode object
system = integrate.ode(eoms.eoms_controlled_inertial_circumnavigate)
system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000)
system.set_initial_value(initial_state, t0)
system.set_f_params(dum, ast, tf, loops)
i_state = np.zeros((num_steps+1, 18))
time = np.zeros(num_steps+1)
i_state[0, :] = initial_state
with h5py.File(hdf5_path) as image_data:
# create a dataset
if gen_images:
images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8')
RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12))
R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9))
ii = 1
while system.successful() and system.t < tf:
# integrate the system and save state to an array
time[ii] = (system.t + dt)
i_state[ii, :] = (system.integrate(system.t + dt))
# generate the view of the asteroid at this state
if int(time[ii]) % image_modulus == 0 and gen_images:
# img, RT, R = blender.gen_image(i_state[ii,0:3], i_state[ii,6:15].reshape((3, 3)),
# ast.omega * time[ii],
# camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene,
# [5, 0, 1], 'test')
img, RT, R = blender.gen_image_fixed_ast(i_state[ii,0:3],
i_state[ii,6:15].reshape((3,3)),
camera_obj, camera,
lamp_obj, lamp,
itokawa_obj, scene,
[5, 0, 1], 'test')
images[:, :, :, ii // image_modulus - 1] = img
RT_blender[ii // image_modulus - 1, :] = RT.reshape(12)
R_i2bcam[ii // image_modulus - 1, :] = R.reshape(9)
# do some image processing and visual odometry
ii += 1
image_data.create_dataset('K', data=K)
image_data.create_dataset('i_state', data=i_state)
image_data.create_dataset('time', data=time)
def blender_inertial_lissajous(gen_images=False):
"""Move around the asteroid in the inertial frame, but assume no rotation of the asteroid
"""
# simulation parameters
output_path = './visualization/blender'
asteroid_name = 'itokawa_high'
# create a HDF5 dataset
hdf5_path = './data/asteroid_circumnavigate/{}_inertial_no_ast_rotation_lissajous.hdf5'.format(
datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S"))
dataset_name = 'landing'
render = 'BLENDER'
image_modulus = 1
RelTol = 1e-6
AbsTol = 1e-6
ast_name = 'itokawa'
num_faces = 64
t0 = 0
dt = 1
tf = 3600 * 2
num_steps = 3600 * 2
loops = 2
periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607])
periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327])
ast = asteroid.Asteroid(ast_name,num_faces)
dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003)
# instantiate the blender scene once
camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name)
# get some of the camera parameters
K = blender_camera.get_calibration_matrix_K_from_blender(camera)
# set initial state for inertial EOMs
initial_pos = np.array([3, 3, 0]) # km for center of mass in body frame
initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos)
initial_R = attitude.rot3(np.pi).reshape(9) # transforms from dumbbell body frame to the inertial frame
initial_w = np.array([0.01, 0.01, 0.01])
initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w))
# instantiate ode object
system = integrate.ode(eoms.eoms_controlled_inertial_lissajous)
system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000)
system.set_initial_value(initial_state, t0)
system.set_f_params(dum, ast, tf, loops)
i_state = np.zeros((num_steps+1, 18))
time = np.zeros(num_steps+1)
i_state[0, :] = initial_state
with h5py.File(hdf5_path) as image_data:
# create a dataset
if gen_images:
images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8')
RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12))
R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9))
ii = 1
while system.successful() and system.t < tf:
# integrate the system and save state to an array
time[ii] = (system.t + dt)
i_state[ii, :] = (system.integrate(system.t + dt))
# generate the view of the asteroid at this state
if int(time[ii]) % image_modulus == 0 and gen_images:
# img, RT, R = blender.gen_image(i_state[ii,0:3], i_state[ii,6:15].reshape((3, 3)),
# ast.omega * time[ii],
# camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene,
# [5, 0, 1], 'test')
img, RT, R = blender.gen_image_fixed_ast(i_state[ii,0:3],
i_state[ii,6:15].reshape((3,3)),
camera_obj, camera,
lamp_obj, lamp,
itokawa_obj, scene,
[5, 0, 1], 'test')
images[:, :, :, ii // image_modulus - 1] = img
RT_blender[ii // image_modulus - 1, :] = RT.reshape(12)
R_i2bcam[ii // image_modulus - 1, :] = R.reshape(9)
# do some image processing and visual odometry
ii += 1
image_data.create_dataset('K', data=K)
image_data.create_dataset('i_state', data=i_state)
image_data.create_dataset('time', data=time)
def blender_inertial_quarter_equatorial(gen_images=False):
"""Move around the asteroid in the inertial frame, but assume no rotation of the asteroid
Moves in the xy positive quadrant in the equatorial plane
"""
# simulation parameters
output_path = './visualization/blender'
asteroid_name = 'itokawa_high'
# create a HDF5 dataset
hdf5_path = './data/asteroid_circumnavigate/{}_inertial_no_ast_rotation_quarter_xy.hdf5'.format(
datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S"))
dataset_name = 'landing'
render = 'BLENDER'
image_modulus = 1
RelTol = 1e-6
AbsTol = 1e-6
ast_name = 'itokawa'
num_faces = 64
t0 = 0
dt = 1
tf = 3600 * 4
num_steps = 3600 * 4
loops = 4
periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607])
periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327])
ast = asteroid.Asteroid(ast_name,num_faces)
dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003)
# instantiate the blender scene once
camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name)
# get some of the camera parameters
K = blender_camera.get_calibration_matrix_K_from_blender(camera)
# set initial state for inertial EOMs
initial_pos = np.array([3, 0, 0]) # km for center of mass in body frame
initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos)
initial_R = attitude.rot3(np.pi).reshape(9) # transforms from dumbbell body frame to the inertial frame
initial_w = np.array([0.01, 0.01, 0.01])
initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w))
# instantiate ode object
system = integrate.ode(eoms.eoms_controlled_inertial_quarter_equatorial)
system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000)
system.set_initial_value(initial_state, t0)
system.set_f_params(dum, ast, tf, loops)
i_state = np.zeros((num_steps+1, 18))
time = np.zeros(num_steps+1)
i_state[0, :] = initial_state
with h5py.File(hdf5_path) as image_data:
# create a dataset
if gen_images:
images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8')
RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12))
R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9))
ii = 1
while system.successful() and system.t < tf:
# integrate the system and save state to an array
time[ii] = (system.t + dt)
i_state[ii, :] = (system.integrate(system.t + dt))
# generate the view of the asteroid at this state
if int(time[ii]) % image_modulus == 0 and gen_images:
# img, RT, R = blender.gen_image(i_state[ii,0:3], i_state[ii,6:15].reshape((3, 3)),
# ast.omega * time[ii],
# camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene,
# [5, 0, 1], 'test')
img, RT, R = blender.gen_image_fixed_ast(i_state[ii,0:3],
i_state[ii,6:15].reshape((3,3)),
camera_obj, camera,
lamp_obj, lamp,
itokawa_obj, scene,
[5, 0, 1], 'test')
images[:, :, :, ii // image_modulus - 1] = img
RT_blender[ii // image_modulus - 1, :] = RT.reshape(12)
R_i2bcam[ii // image_modulus - 1, :] = R.reshape(9)
# do some image processing and visual odometry
ii += 1
image_data.create_dataset('K', data=K)
image_data.create_dataset('i_state', data=i_state)
image_data.create_dataset('time', data=time)
|
skulumani/asteroid_dumbbell
|
blender_sim.py
|
Python
|
gpl-3.0
| 26,548 | 0.007571 |
"""Functions for the backend of LetterBoy"""
def lb_standardcase():
"""Capitalise the first letter of each sentence, and set all others to lowercase."""
pass
def lb_uppercase():
"""Capitalise each letter."""
pass
def lb_lowercase():
"""Set all letters to lowercase."""
pass
def lb_camelcase():
"""Capitalise the first letter of each word, and set all others to lowercase."""
pass
def lb_staggercase():
"""Alternate each character between upper- and lower-case."""
pass
def lb_jumbles_nontrobo():
"""Jumble up text between the first and last letters in each word."""
pass
def lb_zcorrupt():
"""Add glitch text to the plaintext."""
pass
def lb_zstrip():
"""Remove glitch text."""
pass
|
Moth-Tolias/LetterBoy
|
LetterBoy_backend.py
|
Python
|
gpl-3.0
| 788 | 0.01269 |
import uuid
import datetime as dt
import json
import urllib.request
import urllib.parse
from Main.handlers.settings import RECAPTCHA_SECRET_KEY
def get_title(title=""):
if title == "":
return "GetCompany info"
else:
return title + " - GetCompany info"
def get_new_token():
return str(str(uuid.uuid4()) + str(uuid.uuid4())).replace("-", "")[:32]
def get_timestamp(datetime):
return int(dt.datetime.strptime(datetime, "%Y-%m-%d %H:%M:%S.%f").timestamp())
def remove_microseconds(datetime):
return dt.datetime.strptime(datetime, "%Y-%m-%d %H:%M:%S.%f")
def get_remote_IP(request):
ip = request.META.get('HTTP_CF_CONNECTING_IP')
if ip is None:
ip = request.META.get('REMOTE_ADDR')
return ip
def check_recaptcha(response, ip):
if response == "":
return False
data = urllib.parse.urlencode({"secret": RECAPTCHA_SECRET_KEY, "response": response, "remoteip": ip})
binary_data = data.encode('utf-8')
u = urllib.request.urlopen("https://www.google.com/recaptcha/api/siteverify", binary_data)
result = u.read()
recaptcha_result = json.loads(result.decode('utf-8'))
return recaptcha_result["success"]
|
G4brym/GetCompany.info
|
Main/handlers/utilities.py
|
Python
|
mit
| 1,191 | 0.006717 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class EffectiveNetworkSecurityRule(Model):
"""Effective network security rules.
:param name: The name of the security rule specified by the user (if
created by the user).
:type name: str
:param protocol: The network protocol this rule applies to. Possible
values are: 'Tcp', 'Udp', and 'All'. Possible values include: 'Tcp',
'Udp', 'All'
:type protocol: str or
~azure.mgmt.network.v2017_09_01.models.EffectiveSecurityRuleProtocol
:param source_port_range: The source port or range.
:type source_port_range: str
:param destination_port_range: The destination port or range.
:type destination_port_range: str
:param source_port_ranges: The source port ranges. Expected values include
a single integer between 0 and 65535, a range using '-' as seperator (e.g.
100-400), or an asterix (*)
:type source_port_ranges: list[str]
:param destination_port_ranges: The destination port ranges. Expected
values include a single integer between 0 and 65535, a range using '-' as
seperator (e.g. 100-400), or an asterix (*)
:type destination_port_ranges: list[str]
:param source_address_prefix: The source address prefix.
:type source_address_prefix: str
:param destination_address_prefix: The destination address prefix.
:type destination_address_prefix: str
:param source_address_prefixes: The source address prefixes. Expected
values include CIDR IP ranges, Default Tags (VirtualNetwork,
AureLoadBalancer, Internet), System Tags, and the asterix (*).
:type source_address_prefixes: list[str]
:param destination_address_prefixes: The destination address prefixes.
Expected values include CIDR IP ranges, Default Tags (VirtualNetwork,
AureLoadBalancer, Internet), System Tags, and the asterix (*).
:type destination_address_prefixes: list[str]
:param expanded_source_address_prefix: The expanded source address prefix.
:type expanded_source_address_prefix: list[str]
:param expanded_destination_address_prefix: Expanded destination address
prefix.
:type expanded_destination_address_prefix: list[str]
:param access: Whether network traffic is allowed or denied. Possible
values are: 'Allow' and 'Deny'. Possible values include: 'Allow', 'Deny'
:type access: str or
~azure.mgmt.network.v2017_09_01.models.SecurityRuleAccess
:param priority: The priority of the rule.
:type priority: int
:param direction: The direction of the rule. Possible values are: 'Inbound
and Outbound'. Possible values include: 'Inbound', 'Outbound'
:type direction: str or
~azure.mgmt.network.v2017_09_01.models.SecurityRuleDirection
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'protocol': {'key': 'protocol', 'type': 'str'},
'source_port_range': {'key': 'sourcePortRange', 'type': 'str'},
'destination_port_range': {'key': 'destinationPortRange', 'type': 'str'},
'source_port_ranges': {'key': 'sourcePortRanges', 'type': '[str]'},
'destination_port_ranges': {'key': 'destinationPortRanges', 'type': '[str]'},
'source_address_prefix': {'key': 'sourceAddressPrefix', 'type': 'str'},
'destination_address_prefix': {'key': 'destinationAddressPrefix', 'type': 'str'},
'source_address_prefixes': {'key': 'sourceAddressPrefixes', 'type': '[str]'},
'destination_address_prefixes': {'key': 'destinationAddressPrefixes', 'type': '[str]'},
'expanded_source_address_prefix': {'key': 'expandedSourceAddressPrefix', 'type': '[str]'},
'expanded_destination_address_prefix': {'key': 'expandedDestinationAddressPrefix', 'type': '[str]'},
'access': {'key': 'access', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'direction': {'key': 'direction', 'type': 'str'},
}
def __init__(self, **kwargs):
super(EffectiveNetworkSecurityRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.protocol = kwargs.get('protocol', None)
self.source_port_range = kwargs.get('source_port_range', None)
self.destination_port_range = kwargs.get('destination_port_range', None)
self.source_port_ranges = kwargs.get('source_port_ranges', None)
self.destination_port_ranges = kwargs.get('destination_port_ranges', None)
self.source_address_prefix = kwargs.get('source_address_prefix', None)
self.destination_address_prefix = kwargs.get('destination_address_prefix', None)
self.source_address_prefixes = kwargs.get('source_address_prefixes', None)
self.destination_address_prefixes = kwargs.get('destination_address_prefixes', None)
self.expanded_source_address_prefix = kwargs.get('expanded_source_address_prefix', None)
self.expanded_destination_address_prefix = kwargs.get('expanded_destination_address_prefix', None)
self.access = kwargs.get('access', None)
self.priority = kwargs.get('priority', None)
self.direction = kwargs.get('direction', None)
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_09_01/models/effective_network_security_rule.py
|
Python
|
mit
| 5,618 | 0.002492 |
# Copyright 2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import jsonpickle
from azure.cosmosdb.table import TableService
from azure.mgmt.storage.models import IPRule, \
NetworkRuleSet, StorageAccountUpdateParameters, VirtualNetworkRule
from azure.storage.blob import BlockBlobService
from azure.storage.common.models import RetentionPolicy, Logging
from azure.storage.file import FileService
from azure.storage.queue import QueueService
from c7n_azure.actions.base import AzureBaseAction
from c7n_azure.constants import BLOB_TYPE, FILE_TYPE, QUEUE_TYPE, TABLE_TYPE
from c7n_azure.filters import FirewallRulesFilter, ValueFilter
from c7n_azure.provider import resources
from c7n_azure.resources.arm import ArmResourceManager
from c7n_azure.storage_utils import StorageUtilities
from c7n_azure.utils import ThreadHelper
from netaddr import IPSet
from c7n.exceptions import PolicyValidationError
from c7n.filters.core import type_schema
from c7n.utils import local_session, get_annotation_prefix
@resources.register('storage')
class Storage(ArmResourceManager):
"""Storage Account Resource
:example:
Finds all Storage Accounts in the subscription.
.. code-block:: yaml
policies:
- name: find-all-storage-accounts
resource: azure.storage
"""
class resource_type(ArmResourceManager.resource_type):
doc_groups = ['Storage']
service = 'azure.mgmt.storage'
client = 'StorageManagementClient'
enum_spec = ('storage_accounts', 'list', None)
diagnostic_settings_enabled = False
resource_type = 'Microsoft.Storage/storageAccounts'
@Storage.action_registry.register('set-network-rules')
class StorageSetNetworkRulesAction(AzureBaseAction):
""" Set Network Rules Action
Updates Azure Storage Firewalls and Virtual Networks settings.
:example:
Find storage accounts without any firewall rules.
Configure default-action to ``Deny`` and then allow:
- Azure Logging and Metrics services
- Two specific IPs
- Two subnets
.. code-block:: yaml
policies:
- name: add-storage-firewall
resource: azure.storage
filters:
- type: value
key: properties.networkAcls.ipRules
value_type: size
op: eq
value: 0
actions:
- type: set-network-rules
default-action: Deny
bypass: [Logging, Metrics]
ip-rules:
- ip-address-or-range: 11.12.13.14
- ip-address-or-range: 21.22.23.24
virtual-network-rules:
- virtual-network-resource-id: <subnet_resource_id>
- virtual-network-resource-id: <subnet_resource_id>
"""
schema = type_schema(
'set-network-rules',
required=['default-action'],
**{
'default-action': {'enum': ['Allow', 'Deny']},
'bypass': {'type': 'array', 'items': {'enum': ['AzureServices', 'Logging', 'Metrics']}},
'ip-rules': {
'type': 'array',
'items': {'ip-address-or-range': {'type': 'string'}}
},
'virtual-network-rules': {
'type': 'array',
'items': {'virtual-network-resource-id': {'type': 'string'}}
}
}
)
def _prepare_processing(self,):
self.client = self.manager.get_client()
def _process_resource(self, resource):
rule_set = NetworkRuleSet(default_action=self.data['default-action'])
if 'ip-rules' in self.data:
rule_set.ip_rules = [
IPRule(
ip_address_or_range=r['ip-address-or-range'],
action='Allow') # 'Allow' is the only allowed action
for r in self.data['ip-rules']]
if 'virtual-network-rules' in self.data:
rule_set.virtual_network_rules = [
VirtualNetworkRule(
virtual_network_resource_id=r['virtual-network-resource-id'],
action='Allow') # 'Allow' is the only allowed action
for r in self.data['virtual-network-rules']]
if len(self.data.get('bypass', [])) > 0:
rule_set.bypass = ','.join(self.data['bypass'])
else:
rule_set.bypass = 'None'
self.client.storage_accounts.update(
resource['resourceGroup'],
resource['name'],
StorageAccountUpdateParameters(network_rule_set=rule_set))
@Storage.filter_registry.register('firewall-rules')
class StorageFirewallRulesFilter(FirewallRulesFilter):
def __init__(self, data, manager=None):
super(StorageFirewallRulesFilter, self).__init__(data, manager)
self._log = logging.getLogger('custodian.azure.storage')
@property
def log(self):
return self._log
def _query_rules(self, resource):
ip_rules = resource['properties']['networkAcls']['ipRules']
resource_rules = IPSet([r['value'] for r in ip_rules])
return resource_rules
@Storage.filter_registry.register('storage-diagnostic-settings')
class StorageDiagnosticSettingsFilter(ValueFilter):
"""Filters storage accounts based on its diagnostic settings. The filter requires
specifying the storage type (blob, queue, table, file) and will filter based on
the settings for that specific type.
:example:
Find all storage accounts that have a 'delete' logging setting disabled.
.. code-block:: yaml
policies:
- name: find-accounts-with-delete-logging-disabled
resource: azure.storage
filters:
- or:
- type: storage-diagnostic-settings
storage-type: blob
key: logging.delete
op: eq
value: False
- type: storage-diagnostic-settings
storage-type: queue
key: logging.delete
op: eq
value: False
- type: storage-diagnostic-settings
storage-type: table
key: logging.delete
op: eq
value: False
"""
schema = type_schema('storage-diagnostic-settings',
rinherit=ValueFilter.schema,
required=['storage-type'],
**{'storage-type': {
'type': 'string',
'enum': [BLOB_TYPE, QUEUE_TYPE, TABLE_TYPE, FILE_TYPE]}}
)
def __init__(self, data, manager=None):
super(StorageDiagnosticSettingsFilter, self).__init__(data, manager)
self.storage_type = data.get('storage-type')
self.log = logging.getLogger('custodian.azure.storage')
def process(self, resources, event=None):
session = local_session(self.manager.session_factory)
token = StorageUtilities.get_storage_token(session)
result, errors = ThreadHelper.execute_in_parallel(
resources=resources,
event=event,
execution_method=self.process_resource_set,
executor_factory=self.executor_factory,
log=self.log,
session=session,
token=token
)
return result
def process_resource_set(self, resources, event=None, session=None, token=None):
matched = []
for resource in resources:
settings = self._get_settings(resource, session, token)
filtered_settings = super(StorageDiagnosticSettingsFilter, self).process([settings],
event)
if filtered_settings:
matched.append(resource)
return matched
def _get_settings(self, storage_account, session=None, token=None):
storage_prefix_property = get_annotation_prefix(self.storage_type)
if not (storage_prefix_property in storage_account):
settings = StorageSettingsUtilities.get_settings(
self.storage_type, storage_account, session, token)
storage_account[storage_prefix_property] = json.loads(jsonpickle.encode(settings))
return storage_account[storage_prefix_property]
@Storage.action_registry.register('set-log-settings')
class SetLogSettingsAction(AzureBaseAction):
"""Action that updates the logging settings on storage accounts. The action requires
specifying an array of storage types that will be impacted by the action (blob, queue, table),
retention (number in days; 0-365), and an array of log settings to enable (read, write, delete).
The action will disable any settings not listed (e.g. by providing log: [write, delete], the
action will disable read).
:example:
Enable write and delete logging and disable read logging on blob storage,
and retain logs for 5 days.
.. code-block:: yaml
policies:
- name: enable-blob-storage-logging
resource: azure.storage
actions:
- type: set-log-settings
storage-types: [blob]
retention: 5
log: [write, delete]
"""
READ = 'read'
WRITE = 'write'
DELETE = 'delete'
schema = type_schema('set-log-settings',
required=['storage-types', 'log', 'retention'],
**{
'storage-types': {
'type': 'array',
'items': {
'type': 'string',
'enum': [BLOB_TYPE, QUEUE_TYPE, TABLE_TYPE]
}
},
'log': {
'type': 'array',
'items': {
'type': 'string',
'enum': [READ, WRITE, DELETE]
}
},
'retention': {'type': 'number'}
}
)
def __init__(self, data, manager=None):
super(SetLogSettingsAction, self).__init__(data, manager)
self.storage_types = data['storage-types']
self.logs_to_enable = data['log']
self.retention = data['retention']
self.log = logging.getLogger('custodian.azure.storage')
self.token = None
def validate(self):
if self.retention < 0 or self.retention > 365:
raise PolicyValidationError(
'attribute: retention can not be less than 0 or greater than 365')
def process_in_parallel(self, resources, event):
self.token = StorageUtilities.get_storage_token(self.session)
return super(SetLogSettingsAction, self).process_in_parallel(resources, event)
def _process_resource(self, resource, event=None):
retention = RetentionPolicy(enabled=self.retention != 0, days=self.retention)
log_settings = Logging(self.DELETE in self.logs_to_enable, self.READ in self.logs_to_enable,
self.WRITE in self.logs_to_enable, retention_policy=retention)
for storage_type in self.storage_types:
StorageSettingsUtilities.update_logging(storage_type, resource,
log_settings, self.session, self.token)
class StorageSettingsUtilities(object):
@staticmethod
def _get_blob_client_from_storage_account(storage_account, token):
return BlockBlobService(
account_name=storage_account['name'],
token_credential=token
)
@staticmethod
def _get_file_client_from_storage_account(storage_account, session):
primary_key = StorageUtilities.get_storage_primary_key(storage_account['resourceGroup'],
storage_account['name'],
session)
return FileService(
account_name=storage_account['name'],
account_key=primary_key
)
@staticmethod
def _get_table_client_from_storage_account(storage_account, session):
primary_key = StorageUtilities.get_storage_primary_key(storage_account['resourceGroup'],
storage_account['name'],
session)
return TableService(
account_name=storage_account['name'],
account_key=primary_key
)
@staticmethod
def _get_queue_client_from_storage_account(storage_account, token):
return QueueService(account_name=storage_account['name'], token_credential=token)
@staticmethod
def _get_client(storage_type, storage_account, session=None, token=None):
if storage_type == TABLE_TYPE or storage_type == FILE_TYPE:
client = getattr(StorageSettingsUtilities, '_get_{}_client_from_storage_account'
.format(storage_type))(storage_account, session)
else:
client = getattr(StorageSettingsUtilities, '_get_{}_client_from_storage_account'
.format(storage_type))(storage_account, token)
return client
@staticmethod
def get_settings(storage_type, storage_account, session=None, token=None):
client = StorageSettingsUtilities._get_client(storage_type, storage_account, session, token)
return getattr(client, 'get_{}_service_properties'.format(storage_type))()
@staticmethod
def update_logging(storage_type, storage_account, logging_settings, session=None, token=None):
client = StorageSettingsUtilities._get_client(storage_type, storage_account, session, token)
return getattr(client, 'set_{}_service_properties'
.format(storage_type))(logging=logging_settings)
|
FireballDWF/cloud-custodian
|
tools/c7n_azure/c7n_azure/resources/storage.py
|
Python
|
apache-2.0
| 14,922 | 0.002211 |
from django import template
from bookmarks.models import BookmarkInstance
from tagging.models import Tag
register = template.Library()
@register.inclusion_tag('bookmarks/tags.html')
def show_bookmarks_tags():
""" Show a box with tags for all articles that belong to current site.
"""
return {'bookmark_tags': Tag.objects.usage_for_queryset(queryset=BookmarkInstance.on_site.all(), counts=True, min_count=1)}
|
incuna/incuna-bookmarks
|
bookmarks/templatetags/bookmark_tags.py
|
Python
|
mit
| 421 | 0.007126 |
"""
Helper file to manage translations for the Meerkat Authentication module.
We have two types of translations, general and implementation specific
The general translations are extracted from the python, jijna2 and js files.
"""
from csv import DictReader
import argparse
import os
import shutil
import datetime
from babel.messages.pofile import read_po, write_po
from babel.messages.catalog import Catalog, Message
from babel._compat import BytesIO
parser = argparse.ArgumentParser()
parser.add_argument("action",
choices=["update-po", "initialise", "compile" ],
help="Choose action" )
parser.add_argument("-l", type=str,
help="Two letter langauge code")
if __name__ == "__main__":
args = parser.parse_args()
lang_dir = "meerkat_auth"
if args.action == "update-po":
os.system("pybabel extract -F babel.cfg -o {}/messages.pot .".format(lang_dir) )
os.system("pybabel update -i {}/messages.pot -d {}/translations".format(lang_dir, lang_dir) )
os.system("rm {}/messages.pot".format(lang_dir))
elif args.action == "initialise":
if args.l and len(args.l) == 2:
os.system("pybabel extract -F babel.cfg -o {}/messages.pot .".format(lang_dir) )
os.system("pybabel init -i {}/messages.pot -d {}/translations -l {}".format(
lang_dir, lang_dir,args.l
))
os.system("pybabel update -i {}/messages.pot -d {}/translations".format(lang_dir, lang_dir) )
os.system("rm {}/messages.pot".format(lang_dir))
else:
print("Need to specify a two letter language code")
elif args.action == "compile":
os.system("pybabel compile -d {}/translations".format(lang_dir))
|
meerkat-code/meerkat_auth
|
translate.py
|
Python
|
mit
| 1,780 | 0.008989 |
#/usr/bin/python
#!*-* coding:utf-8 *-*
# Este script es sofware libre. Puede redistribuirlo y/o modificarlo bajo
# los terminos de la licencia pública general de GNU, según es publicada
# por la free software fundation bien la versión 3 de la misma licencia
# o de cualquier versión posterior. (según su elección ).
# Si usted hace alguna modificación en esta aplicación, deberá siempre
# mencionar el autor original de la misma.
# Autor:
# Universidad Distrital Francisco Jose
# Grupo de fisica e informatica
# Diego Alberto Parra Garzón
# Dr Julian Andres Salamanca Bernal
# Colombia, Bogota D.C.
import serial
import os
import subprocess
import math
import time
import Gnuplot
from Tkinter import *
import tkMessageBox
import Tkinter
import shutil
class Gramo():
def Atenua(self):
bicho = Tk()
bicho.geometry("280x170+200+90")
bicho.config(bg="white")
bicho.title("Infrarossi")
bicho.resizable(width=0, height=0)
def Verifica():
print "ola"
def Salir():
tkMessageBox.showinfo("Infrarossi", message= "Saliendo .... ")
arduino = serial.Serial("/dev/rfcomm0", 9600)
arduino.write('aa')
exit()
exit()
def Grafica():
os.system("python g_p_Ate.py &")
def Comenzar1():
tkMessageBox.showinfo("Infrarossi", message= "Se procede a capturar datos, para detener el proceso cierre la ventana de captura de datos 'de color azul'")
os.system("xterm -T Infrarossi -geom 50x8+185+100 +cm -bg blue -e python bin/c_p_Ate.py &")
# os.system("python bin/c_p_Ate.py")
# --------------------------------CONFIGURACION DE VENTANA ------------------------------------------------------------------------------
X=8
Y=10
lblTitulo = Label(bicho, text="ATENUACION", fg = ("blue"), bg = ("white"), font = ("Century Schoolbook L",23)).place(x=30, y=20)
btnConectar1 = Button(bicho, text= " INICIAR ", width=5, height=1, command= Comenzar1).place(x=20+X, y=100+Y)
btnSalir = Button(bicho, text= " SALIR ", width=5, height=1, command= Salir).place(x=170+X, y=100+Y)
btnGrafica = Button(bicho, text= " GRAFICA ", width=5, height=1, command= Grafica).place(x=95+X, y=100+Y)
Verifica()
bicho.mainloop()
def __init__(self):
self.Atenua()
self.__del__()
def __del__(self):
print ("PROGRAMA TERMINADO")
modulo = Gramo()
|
Diego-debian/Free-infrarossi
|
free_infrarossi/bin/Atenuacion.py
|
Python
|
gpl-3.0
| 2,477 | 0.031617 |
import os
from conans.tools import unzip
import shutil
from conans.util.files import rmdir, mkdir
from conans.client.remote_registry import RemoteRegistry
from conans import tools
from conans.errors import ConanException
def _handle_remotes(registry_path, remote_file, output):
registry = RemoteRegistry(registry_path, output)
new_registry = RemoteRegistry(remote_file, output)
registry.define_remotes(new_registry.remotes)
def _handle_profiles(source_folder, target_folder, output):
mkdir(target_folder)
for root, _, files in os.walk(source_folder):
relative_path = os.path.relpath(root, source_folder)
if relative_path == ".":
relative_path = ""
for f in files:
profile = os.path.join(relative_path, f)
output.info(" Installing profile %s" % profile)
shutil.copy(os.path.join(root, f), os.path.join(target_folder, profile))
def _process_git_repo(repo_url, client_cache, output, runner, tmp_folder):
output.info("Trying to clone repo %s" % repo_url)
with tools.chdir(tmp_folder):
runner('git clone "%s" config' % repo_url, output=output)
tmp_folder = os.path.join(tmp_folder, "config")
_process_folder(tmp_folder, client_cache, output)
def _process_zip_file(zippath, client_cache, output, tmp_folder, remove=False):
unzip(zippath, tmp_folder)
if remove:
os.unlink(zippath)
_process_folder(tmp_folder, client_cache, output)
def _handle_conan_conf(current_conan_conf, new_conan_conf_path):
current_conan_conf.read(new_conan_conf_path)
with open(current_conan_conf.filename, "w") as f:
current_conan_conf.write(f)
def _process_folder(folder, client_cache, output):
for root, dirs, files in os.walk(folder):
for f in files:
if f == "settings.yml":
output.info("Installing settings.yml")
settings_path = client_cache.settings_path
shutil.copy(os.path.join(root, f), settings_path)
elif f == "conan.conf":
output.info("Processing conan.conf")
conan_conf = client_cache.conan_config
_handle_conan_conf(conan_conf, os.path.join(root, f))
elif f == "remotes.txt":
output.info("Defining remotes")
registry_path = client_cache.registry
_handle_remotes(registry_path, os.path.join(root, f), output)
else:
output.info("Copying file %s to %s" % (f, client_cache.conan_folder))
shutil.copy(os.path.join(root, f), client_cache.conan_folder)
for d in dirs:
if d == "profiles":
output.info("Installing profiles")
profiles_path = client_cache.profiles_path
_handle_profiles(os.path.join(root, d), profiles_path, output)
break
dirs[:] = [d for d in dirs if d not in ("profiles", ".git")]
def _process_download(item, client_cache, output, tmp_folder):
output.info("Trying to download %s" % item)
zippath = os.path.join(tmp_folder, "config.zip")
tools.download(item, zippath, out=output)
_process_zip_file(zippath, client_cache, output, tmp_folder, remove=True)
def configuration_install(item, client_cache, output, runner):
tmp_folder = os.path.join(client_cache.conan_folder, "tmp_config_install")
# necessary for Mac OSX, where the temp folders in /var/ are symlinks to /private/var/
tmp_folder = os.path.realpath(tmp_folder)
mkdir(tmp_folder)
try:
if item is None:
try:
item = client_cache.conan_config.get_item("general.config_install")
except ConanException:
raise ConanException("Called config install without arguments and "
"'general.config_install' not defined in conan.conf")
if item.endswith(".git"):
_process_git_repo(item, client_cache, output, runner, tmp_folder)
elif os.path.exists(item):
# is a local file
_process_zip_file(item, client_cache, output, tmp_folder)
elif item.startswith("http"):
_process_download(item, client_cache, output, tmp_folder)
else:
raise ConanException("I don't know how to process %s" % item)
finally:
if item:
client_cache.conan_config.set_item("general.config_install", item)
rmdir(tmp_folder)
|
lasote/conan
|
conans/client/conf/config_installer.py
|
Python
|
mit
| 4,484 | 0.001338 |
"""
RUN FROM THIS FILE
Alexandre Yang
ITP 115
Final Project
05/08/2014
Description:
Refer to readme.txt
"""
import pygame
from Oto import Oto
from Button import Button
from Label import Label
# Input: pygame.Surface, tuple, int, int, int, int
# Output: none
# Side-effect: Draws the grid on the screen
def drawBoard(surface, color, w, h, tileWidth, tileHeight):
# Draw lines
for x in range(tileWidth, w+1, tileWidth):
pygame.draw.line(surface, color, (x, 0), (x, h))
for y in range(tileHeight, h+1, tileHeight):
pygame.draw.line(surface, color, (0, y), (w, y))
# Input: int, int
# Output: pygame.sprite.Sprite
# Side-effect: none
# Description: Creates a sprite to represent the position of the mouse-click
def createMouseClick(mouseX, mouseY):
mouseClick = pygame.sprite.Sprite()
mouseClick.image = pygame.Surface((1, 1))
mouseClick.rect = mouseClick.image.get_rect()
mouseClick.rect.x = mouseX
mouseClick.rect.y = mouseY
return mouseClick
def main():
# Set general variables
screenW = 850
screenH = 775
boardW = 675
boardH = 675
tileWidth = 75
tileHeight = 75
running = True
screen = pygame.display.set_mode((screenW, screenH)) # Create pygame Surface
clock = pygame.time.Clock() # Create pygame Clock
BPM = 4
active = False
bgColor = 0, 0, 0
lineColor = 255, 255, 255
# Create sprite groups (necessary to call draw() method)
otoList = pygame.sprite.Group()
buttonList = pygame.sprite.Group()
labelList = pygame.sprite.Group()
# Create Menu Buttons and add them to buttonList sprite group
playButton = Button(screen, 100, boardH+40, 50, 50, "Play")
buttonList.add(playButton)
pauseButton = Button(screen, 200, boardH+40, 75, 50, "Pause")
buttonList.add(pauseButton)
clearButton = Button(screen, 320, boardH+40, 70, 50, "Clear")
buttonList.add(clearButton)
plusBPMButton = Button(screen, 430, boardH+40, 65, 50, "BPM+")
buttonList.add(plusBPMButton)
minusBPMButton = Button(screen, 530, boardH+40, 65, 50, "BPM-")
buttonList.add(minusBPMButton)
originalButton = Button(screen, 700, 30, 140, 50, "Original")
buttonList.add(originalButton)
clarinetButton = Button(screen, 700, 130, 140, 50, "Clarinet")
buttonList.add(clarinetButton)
guitarButton = Button(screen, 700, 220, 140, 50, "Guitar")
buttonList.add(guitarButton)
synthButton = Button(screen, 700, 320, 140, 50, "Synth")
buttonList.add(synthButton)
pianoButton = Button(screen, 700, 420, 140, 50, "Piano")
buttonList.add(pianoButton)
piano2Button = Button(screen, 700, 520, 140, 50, "Piano2")
buttonList.add(piano2Button)
trumpetButton = Button(screen, 700, 620, 140, 50, "Trumpet")
buttonList.add(trumpetButton)
# main Pygame loop
while running:
# Resets the screen
screen.fill(bgColor)
# Draws the grid
drawBoard(screen, lineColor, boardW, boardH, tileWidth, tileHeight)
# Draw menu
buttonList.draw(screen)
# Listen for events
for event in pygame.event.get():
# If user closes window
if event.type == pygame.QUIT:
running = False
# If user clicks mouse
elif event.type == pygame.MOUSEBUTTONDOWN:
mouseX, mouseY = pygame.mouse.get_pos()
# Rounds mouse positions down to nearest hundred (Used to position the cells and for simplicity)
otoPosX = (mouseX // tileWidth) * tileWidth
otoPosY = (mouseY//tileHeight) * tileHeight
# Create a tiny sprite where the mouse was clicked to use in collision detection
mouseClick = createMouseClick(mouseX, mouseY)
# If left button was clicked
if event.button == 1:
# Check to see if mouseClick collided with any sprite in the otoList
clickedBlock = pygame.sprite.spritecollide(mouseClick, otoList, False)
# Check to see if mouseClick collided with any menu button
clickedMenu = pygame.sprite.spritecollide(mouseClick, buttonList, False)
# If a cell was clicked, then delete it
if clickedBlock:
otoList.remove(clickedBlock[0])
# Handle the menu button click events
elif clickedMenu:
if clickedMenu[0] == playButton:
active = True
elif clickedMenu[0] == pauseButton:
active = False
elif clickedMenu[0] == clearButton:
otoList.empty()
elif clickedMenu[0] == plusBPMButton:
BPM += 1
elif clickedMenu[0] == minusBPMButton and BPM != 1:
BPM -= 1
elif clickedMenu[0] == originalButton:
Oto.changeInstrument("")
elif clickedMenu[0] == clarinetButton:
Oto.changeInstrument("clarinet")
elif clickedMenu[0] == guitarButton:
Oto.changeInstrument("Guitar")
elif clickedMenu[0] == synthButton:
Oto.changeInstrument("Synth")
elif clickedMenu[0] == pianoButton:
Oto.changeInstrument("Piano")
elif clickedMenu[0] == piano2Button:
Oto.changeInstrument("Piano2")
elif clickedMenu[0] == trumpetButton:
Oto.changeInstrument("trumpet")
# If the grid was clicked then create a new cell at the position (an 'Oto' object)
else:
if mouseY < boardH and mouseX < boardW:
oto = Oto(screen, tileWidth, tileHeight, boardW, boardH)
oto.rect.x = otoPosX
oto.rect.y = otoPosY
otoList.add(oto)
# if right button was clicked
elif event.button == 3:
clickedBlock = pygame.sprite.spritecollide(mouseClick, otoList, False)
# Rotate cell clockwise
if clickedBlock:
clickedBlock[0].changeState()
# Draw every cell to the screen
otoList.draw(screen)
# Move the cells
if active:
otoList.update()
# Check to see if any cells collided
for oto in otoList:
oto.checkCollision(otoList)
# Draw and update BPM label
BPMLabel = Label(screen, 620, boardH+40, 50, 50, str(BPM))
labelList.empty()
labelList.add(BPMLabel)
labelList.draw(screen)
# Update the screen
pygame.display.flip()
# Set the Frames Per Second
clock.tick(BPM)
main()
|
yangalex/Otomata-python
|
Otomata.py
|
Python
|
mit
| 7,139 | 0.001541 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('SocialNetworkModels', '0006_remove_comments_post_author'),
]
operations = [
migrations.AddField(
model_name='comments',
name='comment_author',
field=models.CharField(default='aaa', max_length=200),
preserve_default=False,
),
]
|
diego04/cmput410-project
|
Distributed_Social_Networking/SocialNetworkModels/migrations/0007_comments_comment_author.py
|
Python
|
apache-2.0
| 483 | 0 |
#!/usr/bin/env python
import sys
import socket
import colorsys
import time
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
except:
print('Failed to create socket')
sys.exit(1)
host = sys.argv[1];
port = 1337;
r = int(sys.argv[3])
g = int(sys.argv[4])
b = int(sys.argv[5])
msg = bytes([ 0x20 + int(sys.argv[2]), r, g, b, 0x1F, 0x20 + int(sys.argv[2]) ])
s.sendto(msg, (host, port))
|
Cytrill/tools
|
led_tools/set_led.py
|
Python
|
gpl-3.0
| 407 | 0.014742 |
from __future__ import print_function
import pandas
from sklearn.naive_bayes import MultinomialNB
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import LabelEncoder
def main():
train_all = pandas.DataFrame.from_csv('train.csv')
train = train_all[['Survived', 'Sex', 'Fare']][:200]
gender_label = LabelEncoder()
train.Sex = gender_label.fit_transform(train.Sex)
X = train[['Sex', 'Fare']]
y = train['Survived']
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.33, random_state=42)
clf = MultinomialNB()
clf.fit(X_train, y_train)
print('Accuracy: ', end='')
print(sum(clf.predict(X_test) == y_test) / float(len(y_test)))
if __name__ == '__main__':
main()
|
noelevans/sandpit
|
kaggle/titanic/categorical_and_scaler_prediction.py
|
Python
|
mit
| 773 | 0 |
import datetime
import decimal
import hashlib
import logging
from time import time
from django.conf import settings
from django.utils.encoding import force_bytes
from django.utils.timezone import utc
logger = logging.getLogger('django.db.backends')
class CursorWrapper:
def __init__(self, cursor, db):
self.cursor = cursor
self.db = db
WRAP_ERROR_ATTRS = frozenset(['fetchone', 'fetchmany', 'fetchall', 'nextset'])
def __getattr__(self, attr):
cursor_attr = getattr(self.cursor, attr)
if attr in CursorWrapper.WRAP_ERROR_ATTRS:
return self.db.wrap_database_errors(cursor_attr)
else:
return cursor_attr
def __iter__(self):
with self.db.wrap_database_errors:
for item in self.cursor:
yield item
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
# Close instead of passing through to avoid backend-specific behavior
# (#17671). Catch errors liberally because errors in cleanup code
# aren't useful.
try:
self.close()
except self.db.Database.Error:
pass
# The following methods cannot be implemented in __getattr__, because the
# code must run when the method is invoked, not just when it is accessed.
def callproc(self, procname, params=None):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
if params is None:
return self.cursor.callproc(procname)
else:
return self.cursor.callproc(procname, params)
def execute(self, sql, params=None):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
if params is None:
return self.cursor.execute(sql)
else:
return self.cursor.execute(sql, params)
def executemany(self, sql, param_list):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
return self.cursor.executemany(sql, param_list)
class CursorDebugWrapper(CursorWrapper):
# XXX callproc isn't instrumented at this time.
def execute(self, sql, params=None):
start = time()
try:
return super(CursorDebugWrapper, self).execute(sql, params)
finally:
stop = time()
duration = stop - start
sql = self.db.ops.last_executed_query(self.cursor, sql, params)
self.db.queries_log.append({
'sql': sql,
'time': "%.3f" % duration,
})
logger.debug(
'(%.3f) %s; args=%s', duration, sql, params,
extra={'duration': duration, 'sql': sql, 'params': params}
)
def executemany(self, sql, param_list):
start = time()
try:
return super(CursorDebugWrapper, self).executemany(sql, param_list)
finally:
stop = time()
duration = stop - start
try:
times = len(param_list)
except TypeError: # param_list could be an iterator
times = '?'
self.db.queries_log.append({
'sql': '%s times: %s' % (times, sql),
'time': "%.3f" % duration,
})
logger.debug(
'(%.3f) %s; args=%s', duration, sql, param_list,
extra={'duration': duration, 'sql': sql, 'params': param_list}
)
###############################################
# Converters from database (string) to Python #
###############################################
def typecast_date(s):
return datetime.date(*map(int, s.split('-'))) if s else None # returns None if s is null
def typecast_time(s): # does NOT store time zone information
if not s:
return None
hour, minutes, seconds = s.split(':')
if '.' in seconds: # check whether seconds have a fractional part
seconds, microseconds = seconds.split('.')
else:
microseconds = '0'
return datetime.time(int(hour), int(minutes), int(seconds), int((microseconds + '000000')[:6]))
def typecast_timestamp(s): # does NOT store time zone information
# "2005-07-29 15:48:00.590358-05"
# "2005-07-29 09:56:00-05"
if not s:
return None
if ' ' not in s:
return typecast_date(s)
d, t = s.split()
# Extract timezone information, if it exists. Currently we just throw
# it away, but in the future we may make use of it.
if '-' in t:
t, tz = t.split('-', 1)
tz = '-' + tz
elif '+' in t:
t, tz = t.split('+', 1)
tz = '+' + tz
else:
tz = ''
dates = d.split('-')
times = t.split(':')
seconds = times[2]
if '.' in seconds: # check whether seconds have a fractional part
seconds, microseconds = seconds.split('.')
else:
microseconds = '0'
tzinfo = utc if settings.USE_TZ else None
return datetime.datetime(
int(dates[0]), int(dates[1]), int(dates[2]),
int(times[0]), int(times[1]), int(seconds),
int((microseconds + '000000')[:6]), tzinfo
)
def typecast_decimal(s):
if s is None or s == '':
return None
return decimal.Decimal(s)
###############################################
# Converters from Python to database (string) #
###############################################
def rev_typecast_decimal(d):
if d is None:
return None
return str(d)
def truncate_name(name, length=None, hash_len=4):
"""Shortens a string to a repeatable mangled version with the given length.
"""
if length is None or len(name) <= length:
return name
hsh = hashlib.md5(force_bytes(name)).hexdigest()[:hash_len]
return '%s%s' % (name[:length - hash_len], hsh)
def format_number(value, max_digits, decimal_places):
"""
Formats a number into a string with the requisite number of digits and
decimal places.
"""
if value is None:
return None
if isinstance(value, decimal.Decimal):
context = decimal.getcontext().copy()
if max_digits is not None:
context.prec = max_digits
if decimal_places is not None:
value = value.quantize(decimal.Decimal(".1") ** decimal_places, context=context)
else:
context.traps[decimal.Rounded] = 1
value = context.create_decimal(value)
return "{:f}".format(value)
if decimal_places is not None:
return "%.*f" % (decimal_places, value)
return "{:f}".format(value)
def strip_quotes(table_name):
"""
Strip quotes off of quoted table names to make them safe for use in index
names, sequence names, etc. For example '"USER"."TABLE"' (an Oracle naming
scheme) becomes 'USER"."TABLE'.
"""
has_quotes = table_name.startswith('"') and table_name.endswith('"')
return table_name[1:-1] if has_quotes else table_name
|
mattseymour/django
|
django/db/backends/utils.py
|
Python
|
bsd-3-clause
| 7,044 | 0.000568 |
def pig_it(text):
return ' '.join([x[1:]+x[0]+'ay' if x.isalpha() else x for x in text.split()])
# 其实就是2个字符串过滤拼接,比移动方便多了,思路巧妙
# a if xx else b, 单行判断处理异常字符,xx为判断,标准套路
for x in text.split()
if x.isalpha()
x[1:]+x[0]+'ay'
else x
return ' '.join([ ])
|
lluxury/codewars
|
Simple Pig Latin.py
|
Python
|
mit
| 564 | 0.006198 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Grappler LayoutOptimizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import device_properties_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.client import session
from tensorflow.python.compat import compat
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import test_util
from tensorflow.python.grappler import cluster as gcluster
from tensorflow.python.grappler import tf_optimizer
from tensorflow.python.layers import convolutional as conv_layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import map_fn
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import saver as saver_lib
def _weight(shape):
"""Generates a weight of a given shape."""
return random_ops.truncated_normal(shape, seed=0, stddev=0.1)
def _bias(shape):
"""Generates a bias of a given shape."""
return constant_op.constant(0.1, shape=shape)
def _conv2d(x, w):
"""Returns a 2d convolution layer with full stride."""
return nn.conv2d(x, w, strides=[1, 1, 1, 1], padding='SAME')
def _max_pool_2x2(x):
"""Downsamples a feature map by 2X."""
return nn.max_pool(
x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
# Taken from tensorflow/examples/tutorials/mnist/mnist_deep.py
def _two_layer_model(x):
x_image = array_ops.reshape(x, [-1, 28, 28, 1])
w_conv1 = _weight([5, 5, 1, 32])
b_conv1 = _bias([32])
h_conv1 = nn.relu(_conv2d(x_image, w_conv1) + b_conv1)
h_pool1 = _max_pool_2x2(h_conv1)
w_conv2 = _weight([5, 5, 32, 64])
b_conv2 = _bias([64])
h_conv2 = nn.relu(_conv2d(h_pool1, w_conv2) + b_conv2)
h_pool2 = _max_pool_2x2(h_conv2)
return h_pool2
def _model_with_second_port():
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([2, 5, 5, 4], seed=0)
scale = constant_op.constant(0.1, shape=[4])
offset = constant_op.constant(0.3, shape=[4])
y, mean, _ = nn.fused_batch_norm(x, scale, offset)
mul = math_ops.add(y, mean)
output = array_ops.identity(mul)
return output
def _model_with_branch(x):
x_image = array_ops.reshape(x, [-1, 28, 28, 1])
w_conv1 = _weight([5, 5, 1, 32])
w_conv2 = _weight([5, 5, 1, 32])
c_conv1 = _conv2d(x_image, w_conv1)
c_conv2 = _conv2d(x_image, w_conv2)
add = math_ops.add(c_conv1, c_conv2)
return add
def _model_with_vec_and_4d(x):
x_image = array_ops.reshape(x, [-1, 28, 28, 1])
w_conv1 = _weight([5, 5, 1, 32])
c_conv1 = _conv2d(x_image, w_conv1)
vector = constant_op.constant(6.4, shape=[32])
add = math_ops.add(c_conv1, vector)
return add
def _loop():
random_seed.set_random_seed(0)
x1 = random_ops.truncated_normal([1, 784], seed=0)
x2 = random_ops.truncated_normal([1, 784], seed=0)
x3 = random_ops.truncated_normal([1, 784], seed=0)
x4 = random_ops.truncated_normal([1, 784], seed=0)
elems = (x1, x2, x3, x4)
outputs = map_fn.map_fn(_two_layer_model, elems, dtype=dtypes.float32)
return outputs
def _loop_with_branch():
random_seed.set_random_seed(0)
x1 = random_ops.truncated_normal([1, 784], seed=0)
x2 = random_ops.truncated_normal([1, 784], seed=0)
x3 = random_ops.truncated_normal([1, 784], seed=0)
x4 = random_ops.truncated_normal([1, 784], seed=0)
elems = (x1, x2, x3, x4)
outputs = map_fn.map_fn(_model_with_branch, elems, dtype=dtypes.float32)
return outputs
def _loop_with_vec_and_4d():
random_seed.set_random_seed(0)
x1 = random_ops.truncated_normal([1, 784], seed=0)
x2 = random_ops.truncated_normal([1, 784], seed=0)
x3 = random_ops.truncated_normal([1, 784], seed=0)
x4 = random_ops.truncated_normal([1, 784], seed=0)
elems = (x1, x2, x3, x4)
outputs = map_fn.map_fn(_model_with_vec_and_4d, elems, dtype=dtypes.float32)
return outputs
def _get_config(layout_optimizer=True):
if layout_optimizer:
rewrite_options = rewriter_config_pb2.RewriterConfig(
layout_optimizer=rewriter_config_pb2.RewriterConfig.ON,
# do not remove duplicated nodes
arithmetic_optimization=rewriter_config_pb2.RewriterConfig.OFF)
else:
rewrite_options = rewriter_config_pb2.RewriterConfig(
layout_optimizer=rewriter_config_pb2.RewriterConfig.OFF,
# do not remove duplicated nodes
arithmetic_optimization=rewriter_config_pb2.RewriterConfig.OFF)
rewrite_options.min_graph_nodes = -1
graph_options = config_pb2.GraphOptions(
rewrite_options=rewrite_options, build_cost_model=1)
config = config_pb2.ConfigProto(graph_options=graph_options)
config.graph_options.optimizer_options.opt_level = -1
return config
def _simple_metagraph(depthwise=False):
random_seed.set_random_seed(0)
x = variables.Variable(random_ops.truncated_normal([1, 200, 200, 3], seed=0))
conv = conv_layers.separable_conv2d if depthwise else conv_layers.conv2d
y = conv(x, 32, [3, 3])
z = conv(y, 32, [3, 3])
optimizer = gradient_descent.GradientDescentOptimizer(1e-4)
loss = math_ops.reduce_mean(z)
train_op = optimizer.minimize(loss)
graph = ops.get_default_graph()
graph.add_to_collection('train_op', train_op)
meta_graph = saver_lib.export_meta_graph(graph_def=graph.as_graph_def())
return meta_graph
def _get_cluster():
named_device = device_properties_pb2.NamedDevice()
named_device.name = '/GPU:0'
named_device.properties.type = 'GPU'
named_device.properties.num_cores = 24
named_device.properties.frequency = 1000
named_device.properties.environment['architecture'] = '4'
cluster = gcluster.Cluster(devices=[named_device])
return cluster
def _is_transpose(node):
return node.endswith('TransposeNHWCToNCHW-LayoutOptimizer') or node.endswith(
'TransposeNCHWToNHWC-LayoutOptimizer')
def _is_permute(node):
return node.endswith('VecPermuteNHWCToNCHW-LayoutOptimizer') or node.endswith(
'VecPermuteNCHWToNHWC-LayoutOptimizer')
@test_util.for_all_test_methods(test_util.no_xla_auto_jit,
'Test does not apply in XLA setting')
class LayoutOptimizerTest(test.TestCase):
"""Tests the Grappler layout optimizer."""
def _assert_trans_nchw_to_nhwc(self, name, nodes):
self.assertIn(name + '-TransposeNCHWToNHWC-LayoutOptimizer', nodes)
def _assert_trans_nhwc_to_nchw(self, name, nodes):
self.assertIn(name + '-TransposeNHWCToNCHW-LayoutOptimizer', nodes)
def _assert_map_nhwc_to_nchw(self, name, nodes):
self.assertIn(name + '-DimMapNHWCToNCHW-LayoutOptimizer', nodes)
def _assert_vec_nchw_to_nhwc(self, name, nodes):
self.assertIn(name + '-VecPermuteNCHWToNHWC-LayoutOptimizer', nodes)
def _assert_vec_nhwc_to_nchw(self, name, nodes):
self.assertIn(name + '-VecPermuteNHWCToNCHW-LayoutOptimizer', nodes)
def _train(self, checkpoint_path, layout_optimizer=False, restore=False):
ops.reset_default_graph()
graph = ops.get_default_graph()
with session.Session(
config=_get_config(layout_optimizer), graph=graph) as sess:
batch = 2
height = 6
width = 7
input_channels = 3
shape = [batch, height, width, input_channels]
image = array_ops.placeholder(dtype='float32', shape=shape)
conv1 = conv_layers.conv2d(image, 32, [3, 3])
conv2 = conv_layers.conv2d(conv1, 32, [3, 3])
optimizer = gradient_descent.GradientDescentOptimizer(0.01)
loss = math_ops.reduce_mean(conv2)
train_op = optimizer.minimize(loss)
saver = saver_lib.Saver(write_version=saver_pb2.SaverDef.V2)
if restore:
saver.restore(sess, checkpoint_path)
else:
self.evaluate(variables.global_variables_initializer())
np.random.seed(0)
for _ in range(2):
image_val = np.random.rand(*shape).astype(np.float32)
sess.run([loss, train_op], feed_dict={image: image_val})
if restore:
all_vars = ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
all_vars_values = [var.eval(session=sess) for var in all_vars]
return all_vars_values
else:
saver.save(sess, checkpoint_path)
@test_util.deprecated_graph_mode_only
def testTwoConvLayers(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
output = _two_layer_model(x)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Relu_1-0-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testSplitWithNonConstAxis(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
dim = array_ops.placeholder(dtype='int32')
split = array_ops.split(conv, 2, axis=dim)
scale = constant_op.constant(0.1, shape=[32])
offset = constant_op.constant(0.3, shape=[32])
bn0 = nn.fused_batch_norm(split[0], scale, offset)
bn1 = nn.fused_batch_norm(split[1], scale, offset)
add = bn0[0] + bn1[0]
output = array_ops.identity(add)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={dim: 3})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata, feed_dict={dim: 3})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('add_2-0-0', nodes)
self._assert_map_nhwc_to_nchw('split-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testSplitVWithNonConstAxis(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
dim = array_ops.placeholder(dtype='int32')
sizes = constant_op.constant([50, 10, 4], shape=[3])
split = gen_array_ops.split_v(
value=conv, size_splits=sizes, axis=dim, num_split=3)
output = math_ops.reduce_sum(split[0])
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={dim: 3})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata, feed_dict={dim: 3})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('SplitV-0-0', nodes)
self._assert_map_nhwc_to_nchw('SplitV-2', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testPadWithConstPaddings(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
paddings_val = [[1, 2], [3, 4], [5, 6], [7, 8]]
paddings = constant_op.constant(
paddings_val, dtype='int32', name='PaddingsConst')
pad = array_ops.pad(conv, paddings)
output = array_ops.identity(pad)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Pad-0-0', nodes)
self.assertIn('Pad-1-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testReduceSum(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv)
output = array_ops.identity(reduce_sum)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Three transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testCast(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
cast = math_ops.cast(conv, dtype='bool')
output = array_ops.identity(cast)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Cast-0-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testSqueeze(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[1, 2])
squeeze = array_ops.squeeze(reduce_sum)
output = array_ops.identity(squeeze)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Three transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testSqueezeAlongHW(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[1, 2], keepdims=True)
squeeze = array_ops.squeeze(reduce_sum, axis=[1, 2])
output = array_ops.identity(squeeze)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Three transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testSqueezeAlongNHW(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[0, 1, 2], keepdims=True)
squeeze = array_ops.squeeze(reduce_sum, axis=[0, 1, 2])
output = array_ops.identity(squeeze)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Three transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testReduceSumAlongHWC(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[1, 2, 3])
output = array_ops.identity(reduce_sum)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Three transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testReduceSumAlongNHW(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[0, 1, 2])
output = array_ops.identity(reduce_sum)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Three transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testReduceSumAlongC(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[3])
output = array_ops.identity(reduce_sum)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Three transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testReduceSumAlongCKeepDims(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[3], keepdims=True)
output = array_ops.identity(reduce_sum)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Sum-0-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testReduceSumAlongHKeepDims(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[2], keepdims=True)
output = array_ops.identity(reduce_sum)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testReduceSumAlongWCKeepDims(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[2, 3], keepdims=True)
output = array_ops.identity(reduce_sum)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testConcatWithControlDependency(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
axis = constant_op.constant(3)
var = variables.Variable(3)
assign = state_ops.assign(var, 6)
with ops.control_dependencies([assign]):
concat = array_ops.concat([conv, conv], axis)
output = array_ops.identity(concat)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('concat-0-0', nodes)
self.assertIn('concat-2-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testFill(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = array_ops.placeholder(dtype='float32')
conv = _two_layer_model(x)
shape = array_ops.shape(conv)
scalar = array_ops.constant(5.7)
fill = array_ops.fill(shape, scalar)
output = array_ops.identity(fill)
x_val = [3.4] * 784
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={x: x_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
x: x_val
})
nodes = []
num_transposes = 0
num_vec_permute = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
if _is_permute(node.name):
num_vec_permute += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
# Two vector permute nodes were initially added in the Expand phase of
# LayoutOptimizer; they cancelled out each other in the Collapse phase.
expected_vec_permute = 0
self.assertEqual(expected_vec_permute, num_vec_permute)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Fill-0-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testTile(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
multiple = array_ops.placeholder(dtype='int32')
tile = array_ops.tile(conv, multiple)
output = array_ops.identity(tile)
multiple_val = [2, 3, 4, 1]
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={multiple: multiple_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
multiple: multiple_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Tile-0-0', nodes)
self._assert_vec_nhwc_to_nchw('Tile-1', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testReverseWithConstDims(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
dims = constant_op.constant([3, 1], name='DimsConst')
reverse = array_ops.reverse(conv, dims)
output = array_ops.identity(reverse)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('ReverseV2-0-0', nodes)
self.assertIn('ReverseV2-1-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testReverseWithNonConstDims(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
dims = array_ops.placeholder(dtype='int32')
reverse = array_ops.reverse(conv, dims)
output = array_ops.identity(reverse)
dims_val = [2, 3]
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={dims: dims_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
dims: dims_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('ReverseV2-0-0', nodes)
self._assert_map_nhwc_to_nchw('ReverseV2-1', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testSelectOp(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
add = math_ops.add(conv, conv)
mean = math_ops.reduce_mean(conv)
condition = math_ops.less(conv, mean)
select = gen_math_ops.select(condition, conv, add)
output = array_ops.identity(select)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Select-0-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testSelectOpConditionUnknownShape(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
add = math_ops.add(conv, conv)
condition = array_ops.placeholder(dtype='bool')
select = gen_math_ops.select(condition, conv, add)
output = array_ops.identity(select)
condition_val = np.zeros((1, 7, 7, 64))
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={condition: condition_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={condition: condition_val})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 3
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testSelectOpScalarCondition(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
add = math_ops.add(conv, conv)
condition = constant_op.constant(True)
select = gen_math_ops.select(condition, conv, add)
output = array_ops.identity(select)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Select-0-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testPadWithNonConstPaddings(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
paddings = array_ops.placeholder(dtype='int32')
pad = array_ops.pad(conv, paddings)
output = array_ops.identity(pad)
paddings_val = [[1, 2], [3, 4], [5, 6], [7, 8]]
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={paddings: paddings_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
paddings: paddings_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Pad-0-0', nodes)
self._assert_vec_nhwc_to_nchw('Pad-1', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testMaxPoolV2(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
ksize = constant_op.constant([1, 2, 3, 1], shape=[4])
strides = array_ops.placeholder(dtype='int32', shape=[4])
max_pool = gen_nn_ops.max_pool_v2(conv, ksize, strides, 'VALID')
output = array_ops.identity(max_pool)
strides_val = [1, 3, 2, 1]
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={strides: strides_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
strides: strides_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('MaxPoolV2-0-0', nodes)
self._assert_vec_nhwc_to_nchw('MaxPoolV2-2', nodes)
self.assertIn('MaxPoolV2-1-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testMaxPoolGradV2(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
ksize = constant_op.constant([1, 2, 3, 1], shape=[4])
strides = array_ops.placeholder(dtype='int32', shape=[4])
max_pool_grad = gen_nn_ops.max_pool_grad_v2(conv, conv, conv, ksize,
strides, 'VALID')
output = array_ops.identity(max_pool_grad)
strides_val = [1, 3, 2, 1]
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={strides: strides_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
strides: strides_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('MaxPoolGradV2-0-0', nodes)
self._assert_vec_nhwc_to_nchw('MaxPoolGradV2-4', nodes)
self.assertIn('MaxPoolGradV2-3-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testSliceWithNonConstAxis(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
size = array_ops.placeholder(dtype='int32')
s = array_ops.slice(conv, [0, 0, 0, 0], size)
output = array_ops.identity(s)
size_val = [1, 2, 3, 4]
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={size: size_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
size: size_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Slice-0-0', nodes)
self._assert_vec_nhwc_to_nchw('Slice-2', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testStridedSliceWithNonConstAxis(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
end = array_ops.placeholder(dtype='int32')
s = array_ops.strided_slice(conv, [0, 0, 0, 0], end, strides=[1, 2, 3, 1])
output = array_ops.identity(s)
end_val = [1, 2, 3, 4]
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={end: end_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
end: end_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('StridedSlice-0-0', nodes)
self._assert_vec_nhwc_to_nchw('StridedSlice-2', nodes)
self.assertIn('StridedSlice-1-LayoutOptimizer', nodes)
self.assertIn('StridedSlice-3-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testStridedSliceWithMask1011(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
# This will generate a StridedSlice op with begin mask and
# end mask 11(1011).
s = conv[:, :, 1:-1, :]
output = array_ops.identity(s)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('strided_slice-0-0', nodes)
self.assertIn('strided_slice-1-LayoutOptimizer', nodes)
self.assertIn('strided_slice-2-LayoutOptimizer', nodes)
self.assertIn('strided_slice-3-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testStridedSliceWithMask0111(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
# This will generate a StridedSlice op with begin mask and
# end mask 7(0111).
s = conv[:, :, :, 1:-1]
output = array_ops.identity(s)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('strided_slice-0-0', nodes)
self.assertIn('strided_slice-1-LayoutOptimizer', nodes)
self.assertIn('strided_slice-2-LayoutOptimizer', nodes)
self.assertIn('strided_slice-3-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testStridedSliceGradWithNonConstAxis(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
end = array_ops.placeholder(dtype='int32')
shape = array_ops.shape(conv)
end_val = [1, 2, 3, 4]
s = array_ops.strided_slice(
conv, [0, 0, 0, 0], end_val, strides=[1, 2, 3, 1])
s_grad = array_ops.strided_slice_grad(shape, [0, 0, 0, 0], end,
[1, 2, 3, 1], s)
output = array_ops.identity(s_grad)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={end: end_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
end: end_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('StridedSliceGrad-0-0', nodes)
self._assert_vec_nhwc_to_nchw('StridedSliceGrad-2', nodes)
self.assertIn('StridedSlice-1-LayoutOptimizer', nodes)
self.assertIn('StridedSlice-2-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testShapeN(self):
if test.is_gpu_available(cuda_only=True):
x = array_ops.placeholder(dtype='float32')
conv = _two_layer_model(x)
shapen = array_ops.shape_n([conv, conv])
output = math_ops.add(shapen[0], shapen[1])
x_val = [1.7] * 784
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={x: x_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
x: x_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_vec_nchw_to_nhwc('ShapeN-0-0', nodes)
self.assertAllEqual(output_val_ref, output_val)
@test_util.deprecated_graph_mode_only
def testShapeNFollowedByNotConvertibleNodeReshape(self):
if test.is_gpu_available(cuda_only=True):
x = array_ops.placeholder(dtype='float32')
conv = _two_layer_model(x)
conv_reshape = array_ops.reshape(conv, [1, 1, 1, -1])
shapen = array_ops.shape_n([conv, conv_reshape])
shape = array_ops.identity(shapen[1])
ones = array_ops.ones(shape)
output = math_ops.add_n([conv_reshape, ones])
x_val = [1.7] * 784
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={x: x_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={x: x_val})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testLoop(self):
if test.is_gpu_available(cuda_only=True):
output = _loop()
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('map/while/Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('map/while/MaxPool_1-0-2', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testLoopWithBranch(self):
if test.is_gpu_available(cuda_only=True):
output = _loop_with_branch()
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 3
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('map/while/Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('map/while/Add_1-0-2', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testLoopWithVecAnd4D(self):
if test.is_gpu_available(cuda_only=True):
output = _loop_with_vec_and_4d()
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('map/while/Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('map/while/Add_1-0-2', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testBinaryOpSecondPort(self):
with compat.forward_compatibility_horizon(2019, 6, 7):
if test.is_gpu_available(cuda_only=True):
output = _model_with_second_port()
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('FusedBatchNormV3-0', nodes)
self._assert_trans_nchw_to_nhwc('Add-0-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.deprecated_graph_mode_only
def testGradient(self):
meta_graph = _simple_metagraph()
config = config_pb2.ConfigProto()
config.graph_options.rewrite_options.CopyFrom(
rewriter_config_pb2.RewriterConfig(
layout_optimizer=rewriter_config_pb2.RewriterConfig.ON,
min_graph_nodes=-1))
optimized_graph = tf_optimizer.OptimizeGraph(
config, meta_graph, cluster=_get_cluster())
found = 0
for node in optimized_graph.node:
if node.op in ['Conv2D', 'Conv2DBackpropFilter', 'Conv2DBackpropInput']:
found += 1
self.assertEqual(node.attr['data_format'].s, b'NCHW')
self.assertEqual(found, 5)
@test_util.deprecated_graph_mode_only
def testDepthwise(self):
meta_graph = _simple_metagraph(depthwise=True)
config = config_pb2.ConfigProto()
config.graph_options.rewrite_options.CopyFrom(
rewriter_config_pb2.RewriterConfig(
layout_optimizer=rewriter_config_pb2.RewriterConfig.ON,
min_graph_nodes=-1))
optimized_graph = tf_optimizer.OptimizeGraph(
config, meta_graph, cluster=_get_cluster())
found = 0
for node in optimized_graph.node:
if node.op in [
'DepthwiseConv2dNative', 'DepthwiseConv2dNativeBackpropFilter',
'DepthwiseConv2dNativeBackpropInput'
]:
found += 1
self.assertEqual(node.attr['data_format'].s, b'NCHW')
self.assertEqual(found, 6)
def testCheckpointCompatibility(self):
if not test.is_gpu_available(cuda_only=True):
self.skipTest('GPU required')
checkpoint_path = self.get_temp_dir()
self._train(checkpoint_path)
vars_expected = self._train(checkpoint_path, restore=True)
vars_layout_optimized = self._train(
checkpoint_path, restore=True, layout_optimizer=True)
for var_expected, var_layout_optimized in zip(vars_expected,
vars_layout_optimized):
self.assertAllClose(var_expected, var_layout_optimized, atol=1e-6)
if __name__ == '__main__':
test.main()
|
ghchinoy/tensorflow
|
tensorflow/python/grappler/layout_optimizer_test.py
|
Python
|
apache-2.0
| 60,128 | 0.015018 |
from configparser import ConfigParser
import v20
# Create an object config
config = ConfigParser()
# Read the config
config.read("../API_Connection_Oanda/pyalgo.cfg")
ctx = v20.Context(
'api-fxpractice.oanda.com',
443,
True,
application = 'sample_code',
token = config['oanda_v20']['access_token'],
datetime_format = 'RFC3339')
# class oanda_info():
def get_Id_Account():
response = ctx.account.list()
# Ask for the Oanda ID Account
accounts = response.get('accounts')
# Show the ID
for account in accounts:
# account('Account: %s' %account)
print account
def get_instruments():
response = ctx.account.instruments(
config['oanda_v20']['account_id'])
instruments = response.get('instruments')
# instruments[0].dict()
for instrument in instruments:
ins = instrument.dict()
print('%20s | %10s' % (ins['displayName'],
ins['name']))
|
cgomezfandino/Project_PTX
|
API_Connection_Oanda/PTX_oandaInfo.py
|
Python
|
mit
| 972 | 0.009259 |
# By starting at the top of the triangle below and moving to adjacent numbers on the
# row below, the maximum total from top to bottom is 23.
# 3
# 7 4
# 2 4 6
# 8 5 9 3
# That is, 3 + 7 + 4 + 9 = 23.
# Find the maximum total from top to bottom of the triangle below:
# 75
# 95 64
# 17 47 82
# 18 35 87 10
# 20 04 82 47 65
# 19 01 23 75 03 34
# 88 02 77 73 07 63 67
# 99 65 04 28 06 16 70 92
# 41 41 26 56 83 40 80 70 33
# 41 48 72 33 47 32 37 16 94 29
# 53 71 44 65 25 43 91 52 97 51 14
# 70 11 33 28 77 73 17 78 39 68 17 57
# 91 71 52 38 17 14 91 43 58 50 27 29 48
# 63 66 04 68 89 53 67 30 73 16 69 87 40 31
# 04 62 98 27 23 09 70 98 73 93 38 53 60 04 23
# NOTE: As there are only 16384 routes, it is possible to solve this problem by trying
# every route. However, Problem 67, is the same challenge with a triangle containing
# one-hundred rows; it cannot be solved by brute force, and requires a clever method! ;o)
text = '75\n\
95 64\n\
17 47 82\n\
18 35 87 10\n\
20 04 82 47 65\n\
19 01 23 75 03 34\n\
88 02 77 73 07 63 67\n\
99 65 04 28 06 16 70 92\n\
41 41 26 56 83 40 80 70 33\n\
41 48 72 33 47 32 37 16 94 29\n\
53 71 44 65 25 43 91 52 97 51 14\n\
70 11 33 28 77 73 17 78 39 68 17 57\n\
91 71 52 38 17 14 91 43 58 50 27 29 48\n\
63 66 04 68 89 53 67 30 73 16 69 87 40 31\n\
04 62 98 27 23 09 70 98 73 93 38 53 60 04 23'
digits = [[int (y) for y in x.split(' ')] for x in text.split('\n')]
for i in range(1, len(digits)):
digits[i][0] += digits[i - 1][0]
digits[i][len(digits[i]) - 1] += digits[i - 1][len(digits[i - 1]) - 1]
for j in range(1, len(digits[i]) - 1):
digits[i][j] += max(digits[i - 1][j - 1], digits[i - 1][j])
print max(digits[len(digits) - 1])
|
cloudzfy/euler
|
src/18.py
|
Python
|
mit
| 1,684 | 0.006532 |
def get_perm_argparser(self, args):
args = args.split(" ")
if args[0] == "nick":
self.conman.gen_send("Permission level for %s: %s" % (args[1], self.permsman.get_nick_perms(args[1])))
elif args[0] == "cmd":
if args[1].startswith("."):
args[1] = args[1][1:]
self.conman.gen_send("Permission level for %s: %s" % (args[1], self.permsman.get_cmd_perms(args[1])))
elif args[0] == "msg":
self.conman.gen_send("Message permissions for %s: %s" % (args[1], self.permsman.get_msg_perms(args[1])))
def set_perm_argparser(self, args):
args = args.split(" ")
if args[0] == "nick":
self.conman.gen_send("Setting permission level for %s: %s" % (args[1], args[2]))
self.permsman.set_nick_perms(args[1], args[2])
elif args[0] == "cmd":
if args[1].startswith("."):
args[1] = args[1][1:]
self.conman.gen_send("Setting permission level for %s: %s" % (args[1], args[2]))
self.permsman.set_cmd_perms(args[1], args[2])
elif args[0] == "msg":
args[2] = args[2].lower() == "true" or args[2] == "1"
self.conman.gen_send("Setting message permissions for %s: %s" % (args[1], args[2]))
self.permsman.set_msg_perms(args[1], args[2])
self._map("command", "getperm", get_perm_argparser)
self._map("command", "setperm", set_perm_argparser)
|
vsquare95/JiyuuBot
|
modules/permissions.py
|
Python
|
gpl-3.0
| 1,361 | 0.005878 |
#!/usr/bin/env python
import os
import sys
PROJECT_DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(PROJECT_DIR)
sys.path.append(os.path.abspath(PROJECT_DIR + '/../'))
sys.path.append(os.path.abspath(PROJECT_DIR + '/../realestate/'))
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproject.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
wm3ndez/realestate
|
testproject/manage.py
|
Python
|
bsd-2-clause
| 464 | 0.002155 |
# -*- coding: utf-8 -*-
# gedit CodeCompletion plugin
# Copyright (C) 2011 Fabio Zendhi Nagao
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def get_word(piter):
a = piter.copy()
b = piter.copy()
while True:
if a.starts_line():
break
a.backward_char()
ch = a.get_char()
#if not (ch.isalnum() or ch in ['_', ':', '.', '-', '>']):
if not (ch.isalnum() or ch in "_:.->"):
a.forward_char()
break
word = a.get_visible_text(b)
return a, word
def get_document(piter):
a = piter.copy()
b = piter.copy()
while True:
if not a.backward_char():
break
while True:
if not b.forward_char():
break
return a.get_visible_text(b)
# ex:ts=4:et:
|
nagaozen/my-os-customizations
|
home/nagaozen/.gnome2/gedit/plugins/codecompletion/utils.py
|
Python
|
gpl-3.0
| 1,420 | 0.008451 |
"""Admin Configuration for Improved User"""
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.utils.translation import gettext_lazy as _
from .forms import UserChangeForm, UserCreationForm
class UserAdmin(BaseUserAdmin):
"""Admin panel for Improved User, mimics Django's default"""
fieldsets = (
(None, {"fields": ("email", "password")}),
(_("Personal info"), {"fields": ("full_name", "short_name")}),
(
_("Permissions"),
{
"fields": (
"is_active",
"is_staff",
"is_superuser",
"groups",
"user_permissions",
),
},
),
(_("Important dates"), {"fields": ("last_login", "date_joined")}),
)
add_fieldsets = (
(
None,
{
"classes": ("wide",),
"fields": ("email", "short_name", "password1", "password2"),
},
),
)
form = UserChangeForm
add_form = UserCreationForm
list_display = ("email", "full_name", "short_name", "is_staff")
search_fields = ("email", "full_name", "short_name")
ordering = ("email",)
|
jambonsw/django-improved-user
|
src/improved_user/admin.py
|
Python
|
bsd-2-clause
| 1,256 | 0 |
import os
import re
import struct
from . import helpers
from .raid import RaidController, RaidLD, RaidPD, DeviceCapacity
from .mixins import TextAttributeParser
from .smart import SMARTinfo
if os.name == 'nt':
raidUtil = 'C:\\Program Files (x86)\\MegaRAID Storage Manager\\StorCLI64.exe'
elif 'VMkernel' in os.uname():
raidUtil = '/opt/lsi/storcli/storcli'
else:
raidUtil = '/opt/MegaRAID/storcli/storcli64'
class RaidControllerLSI(TextAttributeParser, RaidController):
_attributes = [
(r'(?i)^Model\s=\s(.*)$', 'Model', None, False, None),
(r'(?i)^Serial\sNumber\s=\s(.*)$', 'Serial', None, False, None),
(r'(?i)^Controller\sStatus\s=\s(.*)$', 'Status', None, False, None),
(r'(?i)^Bios\sVersion\s=\s(.*)$', 'BIOS', None, False, None),
(r'(?i)^Firmware\sVersion\s=\s(.*)$', 'Firmware', None, False, None),
(r'(?i)^On\sBoard\sMemory\sSize\s=\s(.*)$', 'CacheSize', None, False, None),
(r'(?i)^BBU\s=\s(.*)$', 'Battery', None, False, lambda match: {'Absent': False}.get(match.group(1), True)),
(r'(?i)^BBU\sStatus\s=\s(.*)$', 'BatteryStatus', None, False, lambda match: {'32': 'Degraded'}.get(match.group(1), match.group(1)))
]
def __init__(self, name):
super(self.__class__, self).__init__(name)
self.Type = 'LSIMegaRAID'
self.Serial = '-'
self.__fill_data()
self.__enumerate_ld()
@staticmethod
def probe():
if not os.path.isfile(raidUtil):
return []
output = helpers.getOutput('{} show nolog'.format(raidUtil))
controllers = []
for line in output:
match = re.search(r'^(\d+)\s\S+\s+\d+', line)
if match:
controllers.append(match.group(1))
return controllers
def __enumerate_ld(self):
ld_section = False
for line in helpers.getOutput('{} /c{} show all nolog'.format(raidUtil, self.Name)):
if re.match(r'(?i)^VD\sLIST\s:', line):
ld_section = True
continue
if not ld_section:
continue
if re.match(r'(?i)Physical\sDrives.*', line):
break
match = re.search(r'(?i)(\d+/\d+)\s+', line)
if match:
self.LDs.append(RaidLDvendorLSI(match.group(1), self))
def printSpecificInfo(self):
print('Model: {}, s/n {}, {}'.format(self.Model, self.Serial, self.Status))
print('Cache: {}'.format(self.CacheSize))
if self.Battery:
print('BBU status: {}'.format(self.BatteryStatus))
print('BIOS version: {}'.format(self.BIOS))
print('FW version : {}'.format(self.Firmware))
def __fill_data(self):
for line in helpers.getOutput('{} /c{} show all nolog'.format(raidUtil, self.Name)):
if re.match(r'(?i)^TOPOLOGY\s:', line):
break
if self._process_attributes_line(line):
continue
class RaidLDvendorLSI(RaidLD):
def __init__(self, name, controller):
(self.DG, self.VD) = name.split('/')
super(self.__class__, self).__init__(name, controller)
self.Device = self.Name
self.Level = ''
self.State = ''
self.Size = ''
self.__fill_data()
self.__find_devicename()
self.__enumerate_pd()
self.DriveCount = len(self.PDs)
self.DriveActiveCount = self.DriveCount
def __enumerate_pd(self):
pd_section = False
for line in helpers.getOutput('{} /c{}/v{} show all nolog'.format(raidUtil, self.Controller.Name, self.VD)):
if re.match(r'(?i)PDs\sfor\sVD', line):
pd_section = True
continue
if not pd_section:
continue
match = re.search(r'(?i)^(\d+):(\d+)\s+(\d+)\s+\S+', line)
if match:
self.PDs.append(RaidPDvendorLSI(match.group(1), match.group(2), match.group(3), self))
def __fill_data(self):
for line in helpers.getOutput('{} /c{}/v{} show all nolog'.format(raidUtil, self.Controller.Name, self.VD)):
match = re.search(r'(?i)SCSI\sNAA\sId\s=\s(.*)$', line)
if match:
self.NAA = match.group(1)
match = re.search(r'(?i)^(\d+)\/(\d+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)', line)
if match:
self.Level = match.group(3)
self.State = {'Optl': 'Optimal',
'Rec': 'Recovery',
'OfLn': 'OffLine',
'Pdgd': 'Partially Degraded',
'Dgrd': 'Degraded'}.get(match.group(4), match.group(4))
self.Size = DeviceCapacity(int(float(match.group(10)) * 1024), {'TB': 'GiB', 'GB': 'MiB', 'MB': 'KiB'}.get(match.group(11), None))
def __find_devicename(self):
try:
for filename in [f for f in os.listdir('/dev/disk/by-id')]:
match = re.search(r'^scsi-\d+' + self.NAA, filename)
if match:
self.Device = '/dev/disk/by-id/' + filename
except:
pass
class RaidPDvendorLSI(TextAttributeParser, RaidPD):
_attributes = [
(r'(?i)^SN\s+=\s+(.*)$', 'Serial', None, False, None),
(r'(?i)^Manufacturer\sId\s=\s+(.*)$', 'Vendor', None, False, None),
(r'(?i)^Drive\sTemperature\s=\s+(\d+)C', 'Temperature', None, False, None),
(r'(?i)^Model\sNumber\s=\s+(.*)$', 'Model', None, False, None),
(r'(?i)^Media\sError\sCount\s=\s+(\d+)', 'ErrorCount', None, True, lambda match: int(match.group(1))),
(r'(?i)^Predictive\sFailure\sCount\s=\s+(\d+)', 'ErrorCount', None, True, lambda match: int(match.group(1)))
]
def __init__(self, enclosure, slot, did, ld):
super(self.__class__, self).__init__('{}:{}'.format(enclosure, slot), ld)
self.Enclosure = enclosure
self.Slot = slot
self.Device = did
self.PHYCount = 0
self.__fill_basic_info()
if hasattr(self, 'Vendor'):
self.Model = self.Vendor + ' ' + self.Model
if 'VMkernel' in os.uname():
self.__fill_LSI_smart_info()
else:
self.__fill_smart_info()
def __fill_basic_info(self):
for line in helpers.getOutput('{} /c{}/e{}/s{} show all nolog'.format(raidUtil, self.LD.Controller.Name, self.Enclosure, self.Slot)):
match = re.search(r'^(\d+):(\d+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)', line)
if match:
self.Capacity = DeviceCapacity(int(float(match.group(6)) * 1024), {'TB': 'GiB', 'GB': 'MiB', 'MB': 'KiB'}.get(match.group(7), None))
self.Technology = match.group(8)
self.State = {
'DHS': 'Dedicated Hot Spare',
'UGood': 'Unconfigured Good',
'GHS': 'Global Hotspare',
'UBad': 'Unconfigured Bad',
'Onln': 'Optimal',
'Rbld': 'Rebuild',
'Offln': 'Offline'
}.get(match.group(4), match.group(4))
if self._process_attributes_line(line):
continue
def __fill_smart_info(self):
smart = SMARTinfo('-d megaraid,{}'.format(int(self.Device)), self.LD.Device)
if not smart.SMART:
return
for prop in ['Model', 'Serial', 'Firmware', 'Capacity', 'SectorSizes', 'FormFactor', 'PHYCount', 'PHYSpeed', 'RPM', 'PowerOnHours', 'ErrorCount', 'Temperature', 'SCT']:
if hasattr(smart, prop):
setattr(self, prop, getattr(smart, prop))
def __fill_LSI_smart_info(self):
data_dump = []
for line in helpers.getOutput('{} /c{}/e{}/s{} show smart nolog'.format(raidUtil, self.LD.Controller.Name, self.Enclosure, self.Slot)):
match = re.search(r'^(\S\S\s){15}\S\S$', line)
if match:
for c in line.split(' '):
data_dump.append(int(c, 16))
data_dump = data_dump[2:]
smart = {}
for attr_index in range(0, len(data_dump) // 12):
attr, value = struct.unpack('<BxxxxHxxxxx', bytearray(data_dump[attr_index * 12:(attr_index + 1) * 12]))
if attr != 0:
smart[attr] = value
setattr(self, 'PowerOnHours', smart.get(9, None))
setattr(self, 'ErrorCount', smart.get(5, 0) + smart.get(187, 0) + smart.get(196, 0) + smart.get(197, 0) + smart.get(198, 0))
|
Bloodoff/raidinfo
|
lib/raid_megaraid.py
|
Python
|
gpl-3.0
| 8,614 | 0.003018 |
# -*- encoding: utf-8 -*-
import os
from abjad import abjad_configuration
from abjad.demos import desordre
def test_demos_desordre_01():
lilypond_file = desordre.make_desordre_lilypond_file()
|
mscuthbert/abjad
|
abjad/demos/desordre/test/test_demos_desordre.py
|
Python
|
gpl-3.0
| 198 | 0.005051 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/furniture/all/shared_frn_all_lamp_free_s01_lit.iff"
result.attribute_template_id = 6
result.stfName("frn_n","frn_lamp_free")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/furniture/all/shared_frn_all_lamp_free_s01_lit.py
|
Python
|
mit
| 461 | 0.047722 |
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2020, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import math
from collections import defaultdict
from nupic.research.frameworks.vernon import interfaces
__all__ = [
"StepBasedLogging",
]
class StepBasedLogging(
interfaces.Experiment, # Requires
interfaces.StepBasedLogging, # Implements
):
@staticmethod
def step_based_logging_interface_implemented():
return True
def setup_experiment(self, config):
"""
:param config: Dictionary containing the configuration parameters
- log_timestep_freq: Configures mixins and subclasses that log every
timestep to only log every nth timestep (in
addition to the final timestep of each epoch).
Set to 0 to log only at the end of each epoch.
"""
super().setup_experiment(config)
self._current_timestep = 0
self.log_timestep_freq = config.get("log_timestep_freq", 1)
@property
def current_timestep(self):
return self._current_timestep
@current_timestep.setter
def current_timestep(self, value):
self._current_timestep = value
def run_iteration(self):
timestep_begin = self.current_timestep
ret = super().run_iteration()
ret.update(
timestep_begin=timestep_begin,
timestep_end=self.current_timestep,
)
return ret
def post_batch(self, **kwargs):
super().post_batch(**kwargs)
# FIXME: move to post_optimizer_step
self.current_timestep += 1
def should_log_batch(self, train_batch_idx):
return (train_batch_idx == self.total_batches - 1) or (
self.log_timestep_freq > 0
and (self.current_timestep % self.log_timestep_freq) == 0)
def get_state(self):
state = super().get_state()
state["current_timestep"] = self.current_timestep
return state
def set_state(self, state):
super().set_state(state)
if "current_timestep" in state:
self.current_timestep = state["current_timestep"]
@classmethod
def get_recorded_timesteps(cls, result, config):
log_timestep_freq = config.get("log_timestep_freq", 1)
timestep_end = result["timestep_end"]
if log_timestep_freq == 0:
ret = [timestep_end - 1]
else:
# Find first logged timestep in range
logged_begin = int(math.ceil(result["timestep_begin"]
/ log_timestep_freq)
* log_timestep_freq)
ret = list(range(logged_begin, timestep_end, log_timestep_freq))
last_batch_timestep = timestep_end - 1
if last_batch_timestep % log_timestep_freq != 0:
ret.append(last_batch_timestep)
return ret
@classmethod
def expand_result_to_time_series(cls, result, config):
result_by_timestep = defaultdict(dict)
# Assign the epoch result to the appropriate timestep.
result_by_timestep[result["timestep_end"]].update(
cls.get_readable_result(result)
)
return result_by_timestep
@classmethod
def get_execution_order(cls):
eo = super().get_execution_order()
exp = "StepBasedLoggingCore"
eo["run_iteration"].append(exp + ": Add timestep info")
eo["post_batch"].append(exp + ": Increment timestep")
eo["get_state"].append(exp + ": Get current timestep")
eo["set_state"].append(exp + ": Set current timestep")
eo.update(
# StepBasedLogging
expand_result_to_time_series=[exp + ": common result dict keys"],
)
return eo
|
mrcslws/nupic.research
|
src/nupic/research/frameworks/vernon/mixins/step_based_logging.py
|
Python
|
agpl-3.0
| 4,696 | 0.000213 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Trial.max_participants'
db.delete_column(u'trials_trial', 'max_participants')
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'Trial.max_participants'
raise RuntimeError("Cannot reverse this migration. 'Trial.max_participants' and its values cannot be restored.")
models = {
u'trials.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"})
},
u'trials.invitation': {
'Meta': {'object_name': 'Invitation'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"})
},
u'trials.participant': {
'Meta': {'object_name': 'Participant'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['userprofiles.RMUser']", 'null': 'True', 'blank': 'True'})
},
u'trials.report': {
'Meta': {'object_name': 'Report'},
'binary': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'participant': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Participant']", 'null': 'True', 'blank': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"}),
'variable': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Variable']"})
},
u'trials.trial': {
'Meta': {'object_name': 'Trial'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'finish_date': ('django.db.models.fields.DateField', [], {}),
'group_a': ('django.db.models.fields.TextField', [], {}),
'group_a_desc': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'group_a_expected': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'group_b': ('django.db.models.fields.TextField', [], {}),
'group_b_desc': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'group_b_impressed': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instruction_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'instruction_delivery': ('django.db.models.fields.TextField', [], {'default': "'im'", 'max_length': '2'}),
'instruction_hours_after': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'is_edited': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'min_participants': ('django.db.models.fields.IntegerField', [], {}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['userprofiles.RMUser']"}),
'participants': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'recruiting': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'recruitment': ('django.db.models.fields.CharField', [], {'default': "'an'", 'max_length': '2'}),
'reporting_freq': ('django.db.models.fields.CharField', [], {'default': "'da'", 'max_length': '200'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'stopped': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'trials.variable': {
'Meta': {'object_name': 'Variable'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'question': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'style': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"})
},
u'userprofiles.rmuser': {
'Meta': {'object_name': 'RMUser'},
'account': ('django.db.models.fields.CharField', [], {'default': "'st'", 'max_length': '2'}),
'dob': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'receive_questions': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'db_index': 'True'})
}
}
complete_apps = ['trials']
|
openhealthcare/randomise.me
|
rm/trials/migrations/0035_auto__del_field_trial_max_participants.py
|
Python
|
agpl-3.0
| 7,643 | 0.008112 |
# Ansible module to manage CheckPoint Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from units.modules.utils import set_module_args, exit_json, fail_json, AnsibleExitJson
from ansible.module_utils import basic
from ansible.modules.network.check_point import cp_mgmt_host
OBJECT = {
"name": "New Host 1",
"ip_address": "192.0.2.1"
}
CREATE_PAYLOAD = {
"name": "New Host 1",
"ip_address": "192.0.2.1"
}
UPDATE_PAYLOAD = {
"name": "New Host 1",
"color": "blue",
"ipv4_address": "192.0.2.2"
}
OBJECT_AFTER_UPDATE = UPDATE_PAYLOAD
DELETE_PAYLOAD = {
"name": "New Host 1",
"state": "absent"
}
function_path = 'ansible.modules.network.check_point.cp_mgmt_host.api_call'
api_call_object = 'host'
class TestCheckpointHost(object):
module = cp_mgmt_host
@pytest.fixture(autouse=True)
def module_mock(self, mocker):
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
@pytest.fixture
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible.module_utils.network.checkpoint.checkpoint.Connection')
return connection_class_mock.return_value
def test_create(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': True, api_call_object: OBJECT}
result = self._run_module(CREATE_PAYLOAD)
assert result['changed']
assert OBJECT.items() == result[api_call_object].items()
def test_create_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False, api_call_object: OBJECT}
result = self._run_module(CREATE_PAYLOAD)
assert not result['changed']
def test_update(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': True, api_call_object: OBJECT_AFTER_UPDATE}
result = self._run_module(UPDATE_PAYLOAD)
assert result['changed']
assert OBJECT_AFTER_UPDATE.items() == result[api_call_object].items()
def test_update_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False, api_call_object: OBJECT_AFTER_UPDATE}
result = self._run_module(UPDATE_PAYLOAD)
assert not result['changed']
def test_delete(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': True}
result = self._run_module(DELETE_PAYLOAD)
assert result['changed']
def test_delete_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False}
result = self._run_module(DELETE_PAYLOAD)
assert not result['changed']
def _run_module(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleExitJson) as ex:
self.module.main()
return ex.value.args[0]
|
kvar/ansible
|
test/units/modules/network/check_point/test_cp_mgmt_host.py
|
Python
|
gpl-3.0
| 3,853 | 0.001557 |
import datetime
from django.conf import settings
from django_remote_forms import logger, widgets
class RemoteField(object):
"""
A base object for being able to return a Django Form Field as a Python
dictionary.
This object also takes into account if there is initial data for the field
coming in from the form directly, which overrides any initial data
specified on the field per Django's rules:
https://docs.djangoproject.com/en/dev/ref/forms/api/#dynamic-initial-values
"""
def __init__(self, field, form_initial_data=None, field_name=None):
self.field_name = field_name
self.field = field
self.form_initial_data = form_initial_data
def as_dict(self):
field_dict = OrderedDict()
field_dict['title'] = self.field.__class__.__name__
field_dict['required'] = self.field.required
field_dict['label'] = self.field.label
field_dict['initial'] = self.form_initial_data or self.field.initial
field_dict['help_text'] = self.field.help_text
field_dict['error_messages'] = self.field.error_messages
# Instantiate the Remote Forms equivalent of the widget if possible
# in order to retrieve the widget contents as a dictionary.
remote_widget_class_name = 'Remote%s' % self.field.widget.__class__.__name__
try:
remote_widget_class = getattr(widgets, remote_widget_class_name)
remote_widget = remote_widget_class(self.field.widget, field_name=self.field_name)
except Exception, e:
logger.warning('Error serializing %s: %s', remote_widget_class_name, str(e))
widget_dict = {}
else:
widget_dict = remote_widget.as_dict()
field_dict['widget'] = widget_dict
return field_dict
class RemoteCharField(RemoteField):
def as_dict(self):
field_dict = super(RemoteCharField, self).as_dict()
field_dict.update({
'max_length': self.field.max_length,
'min_length': self.field.min_length
})
return field_dict
class RemoteIntegerField(RemoteField):
def as_dict(self):
field_dict = super(RemoteIntegerField, self).as_dict()
field_dict.update({
'max_value': self.field.max_value,
'min_value': self.field.min_value
})
return field_dict
class RemoteFloatField(RemoteIntegerField):
def as_dict(self):
return super(RemoteFloatField, self).as_dict()
class RemoteDecimalField(RemoteIntegerField):
def as_dict(self):
field_dict = super(RemoteDecimalField, self).as_dict()
field_dict.update({
'max_digits': self.field.max_digits,
'decimal_places': self.field.decimal_places
})
return field_dict
class RemoteTimeField(RemoteField):
def as_dict(self):
field_dict = super(RemoteTimeField, self).as_dict()
field_dict['input_formats'] = self.field.input_formats
if (field_dict['initial']):
if callable(field_dict['initial']):
field_dict['initial'] = field_dict['initial']()
# If initial value is datetime then convert it using first available input format
if (isinstance(field_dict['initial'], (datetime.datetime, datetime.time, datetime.date))):
if not len(field_dict['input_formats']):
if isinstance(field_dict['initial'], datetime.date):
field_dict['input_formats'] = settings.DATE_INPUT_FORMATS
elif isinstance(field_dict['initial'], datetime.time):
field_dict['input_formats'] = settings.TIME_INPUT_FORMATS
elif isinstance(field_dict['initial'], datetime.datetime):
field_dict['input_formats'] = settings.DATETIME_INPUT_FORMATS
input_format = field_dict['input_formats'][0]
field_dict['initial'] = field_dict['initial'].strftime(input_format)
return field_dict
class RemoteDateField(RemoteTimeField):
def as_dict(self):
return super(RemoteDateField, self).as_dict()
class RemoteDateTimeField(RemoteTimeField):
def as_dict(self):
return super(RemoteDateTimeField, self).as_dict()
class RemoteRegexField(RemoteCharField):
def as_dict(self):
field_dict = super(RemoteRegexField, self).as_dict()
# We don't need the pattern object in the frontend
# field_dict['regex'] = self.field.regex
return field_dict
class RemoteEmailField(RemoteCharField):
def as_dict(self):
return super(RemoteEmailField, self).as_dict()
class RemoteFileField(RemoteField):
def as_dict(self):
field_dict = super(RemoteFileField, self).as_dict()
field_dict['max_length'] = self.field.max_length
return field_dict
class RemoteImageField(RemoteFileField):
def as_dict(self):
return super(RemoteImageField, self).as_dict()
class RemoteURLField(RemoteCharField):
def as_dict(self):
return super(RemoteURLField, self).as_dict()
class RemoteBooleanField(RemoteField):
def as_dict(self):
return super(RemoteBooleanField, self).as_dict()
class RemoteNullBooleanField(RemoteBooleanField):
def as_dict(self):
return super(RemoteNullBooleanField, self).as_dict()
class RemoteBCTChoiceFieldWithTitles(RemoteField):
def as_dict(self):
return super(RemoteBCTChoiceFieldWithTitles, self).as_dict()
def get_dict(self):
#field_dict = {'widget': {'attrs' : self.field.widget.attrs}}
#field_dict = {'results': self.field.widget.attrs['results']}
field_dict = {'results': self.field.results}
if hasattr(self.field, 'img_url'):
field_dict['img_url'] = self.field.img_url
return field_dict
class RemoteInlineForeignKeyField(RemoteField):
def as_dict(self):
return super(RemoteInlineForeignKeyField, self).as_dict()
class RemoteChoiceField(RemoteField):
def as_dict(self):
field_dict = super(RemoteChoiceField, self).as_dict()
#temporary switch off
'''
field_dict['choices'] = []
for key, value in self.field.choices:
field_dict['choices'].append({
'value': key,
'display': value
})
'''
#field_dict['choices'] = []
field_dict['widget']['choices'] = []
return field_dict
def get_dict(self):
field_dict = {'choices': []}
'''
for key, value in self.field.choices:
field_dict['choices'].append({
'value': key,
})
'''
#'display': value
#return field_dict
return {}
class RemoteTypedChoiceField(RemoteChoiceField):
def as_dict(self):
field_dict = super(RemoteTypedChoiceField, self).as_dict()
field_dict.update({
'coerce': self.field.coerce,
'empty_value': self.field.empty_value
})
return field_dict
class RemoteToolChoiceField(RemoteTypedChoiceField):
def get_dict(self):
field_dict = {'choices': self.field.choices,
'ng-options': self.field.widget.attrs['ng-options'],
}
#print dir(self.field.widget)
#print self.field.to_python()
'''
for key, value in self.field.choices:
field_dict['choices'].append({
'value': key,
})
'''
#'display': value
#return field_dict
return field_dict
class RemoteModelChoiceField(RemoteChoiceField):
def as_dict(self):
return super(RemoteModelChoiceField, self).as_dict()
'''
def get_dict(self):
#field_dict = {'widget': {'attrs' : self.field.widget.attrs}}
#field_dict = {'results': self.field.widget.attrs['results']}
field_dict = {'results': self.field.results}
if hasattr(self.field, 'img_url'):
field_dict['img_url'] = self.field.img_url
return field_dict
'''
class RemoteMultipleChoiceField(RemoteChoiceField):
def as_dict(self):
return super(RemoteMultipleChoiceField, self).as_dict()
class RemoteModelMultipleChoiceField(RemoteMultipleChoiceField):
def as_dict(self):
return super(RemoteModelMultipleChoiceField, self).as_dict()
class RemoteTypedMultipleChoiceField(RemoteMultipleChoiceField):
def as_dict(self):
field_dict = super(RemoteTypedMultipleChoiceField, self).as_dict()
field_dict.update({
'coerce': self.field.coerce,
'empty_value': self.field.empty_value
})
return field_dict
class RemoteComboField(RemoteField):
def as_dict(self):
field_dict = super(RemoteComboField, self).as_dict()
field_dict.update(fields=self.field.fields)
return field_dict
class RemoteMultiValueField(RemoteField):
def as_dict(self):
field_dict = super(RemoteMultiValueField, self).as_dict()
field_dict['fields'] = self.field.fields
return field_dict
class RemoteFilePathField(RemoteChoiceField):
def as_dict(self):
field_dict = super(RemoteFilePathField, self).as_dict()
field_dict.update({
'path': self.field.path,
'match': self.field.match,
'recursive': self.field.recursive
})
return field_dict
class RemoteSplitDateTimeField(RemoteMultiValueField):
def as_dict(self):
field_dict = super(RemoteSplitDateTimeField, self).as_dict()
field_dict.update({
'input_date_formats': self.field.input_date_formats,
'input_time_formats': self.field.input_time_formats
})
return field_dict
class RemoteIPAddressField(RemoteCharField):
def as_dict(self):
return super(RemoteIPAddressField, self).as_dict()
class RemoteSlugField(RemoteCharField):
def as_dict(self):
return super(RemoteSlugField, self).as_dict()
|
promil23/django-remote-forms
|
django_remote_forms/fields.py
|
Python
|
mit
| 10,077 | 0.002481 |
#!/usr/bin/python
import math
# return statement
def printLog(x):
if x <= 0:
print "Positive number only, please."
return
result = math.log(x)
print "The log of x is", result
x, y = -2, 3
printLog(y)
|
janusnic/21v-python
|
unit_01/23.py
|
Python
|
mit
| 210 | 0.033333 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import flt, getdate, get_url
from frappe import _
from frappe.model.document import Document
from erpnext.controllers.queries import get_filters_cond
from frappe.desk.reportview import get_match_cond
class Project(Document):
def get_feed(self):
return '{0}: {1}'.format(_(self.status), self.project_name)
def onload(self):
"""Load project tasks for quick view"""
if not self.get('__unsaved') and not self.get("tasks"):
self.load_tasks()
self.set_onload('activity_summary', frappe.db.sql('''select activity_type,
sum(hours) as total_hours
from `tabTimesheet Detail` where project=%s and docstatus < 2 group by activity_type
order by total_hours desc''', self.name, as_dict=True))
def __setup__(self):
self.onload()
def load_tasks(self):
"""Load `tasks` from the database"""
self.tasks = []
for task in self.get_tasks():
task_map = {
"title": task.subject,
"status": task.status,
"start_date": task.exp_start_date,
"end_date": task.exp_end_date,
"description": task.description,
"task_id": task.name,
"task_weight": task.task_weight
}
self.map_custom_fields(task, task_map)
self.append("tasks", task_map)
def get_tasks(self):
return frappe.get_all("Task", "*", {"project": self.name}, order_by="exp_start_date asc")
def validate(self):
self.validate_dates()
self.validate_weights()
self.sync_tasks()
self.tasks = []
self.send_welcome_email()
def validate_dates(self):
if self.expected_start_date and self.expected_end_date:
if getdate(self.expected_end_date) < getdate(self.expected_start_date):
frappe.throw(_("Expected End Date can not be less than Expected Start Date"))
def validate_weights(self):
sum = 0
for task in self.tasks:
if task.task_weight > 0:
sum = sum + task.task_weight
if sum > 0 and sum != 1:
frappe.throw(_("Total of all task weights should be 1. Please adjust weights of all Project tasks accordingly"))
def sync_tasks(self):
"""sync tasks and remove table"""
if self.flags.dont_sync_tasks: return
task_names = []
for t in self.tasks:
if t.task_id:
task = frappe.get_doc("Task", t.task_id)
else:
task = frappe.new_doc("Task")
task.project = self.name
task.update({
"subject": t.title,
"status": t.status,
"exp_start_date": t.start_date,
"exp_end_date": t.end_date,
"description": t.description,
"task_weight": t.task_weight
})
self.map_custom_fields(t, task)
task.flags.ignore_links = True
task.flags.from_project = True
task.flags.ignore_feed = True
task.save(ignore_permissions = True)
task_names.append(task.name)
# delete
for t in frappe.get_all("Task", ["name"], {"project": self.name, "name": ("not in", task_names)}):
frappe.delete_doc("Task", t.name)
self.update_percent_complete()
self.update_costing()
def map_custom_fields(self, source, target):
project_task_custom_fields = frappe.get_all("Custom Field", {"dt": "Project Task"}, "fieldname")
for field in project_task_custom_fields:
target.update({
field.fieldname: source.get(field.fieldname)
})
def update_project(self):
self.update_percent_complete()
self.update_costing()
self.flags.dont_sync_tasks = True
self.save(ignore_permissions = True)
def update_percent_complete(self):
total = frappe.db.sql("""select count(name) from tabTask where project=%s""", self.name)[0][0]
if not total and self.percent_complete:
self.percent_complete = 0
if (self.percent_complete_method == "Task Completion" and total > 0) or (not self.percent_complete_method and total > 0):
completed = frappe.db.sql("""select count(name) from tabTask where
project=%s and status in ('Closed', 'Cancelled')""", self.name)[0][0]
self.percent_complete = flt(flt(completed) / total * 100, 2)
if (self.percent_complete_method == "Task Progress" and total > 0):
progress = frappe.db.sql("""select sum(progress) from tabTask where
project=%s""", self.name)[0][0]
self.percent_complete = flt(flt(progress) / total, 2)
if (self.percent_complete_method == "Task Weight" and total > 0):
weight_sum = frappe.db.sql("""select sum(task_weight) from tabTask where
project=%s""", self.name)[0][0]
if weight_sum == 1:
weighted_progress = frappe.db.sql("""select progress,task_weight from tabTask where
project=%s""", self.name,as_dict=1)
pct_complete=0
for row in weighted_progress:
pct_complete += row["progress"] * row["task_weight"]
self.percent_complete = flt(flt(pct_complete), 2)
def update_costing(self):
from_time_sheet = frappe.db.sql("""select
sum(costing_amount) as costing_amount,
sum(billing_amount) as billing_amount,
min(from_time) as start_date,
max(to_time) as end_date,
sum(hours) as time
from `tabTimesheet Detail` where project = %s and docstatus = 1""", self.name, as_dict=1)[0]
from_expense_claim = frappe.db.sql("""select
sum(total_sanctioned_amount) as total_sanctioned_amount
from `tabExpense Claim` where project = %s and approval_status='Approved'
and docstatus = 1""",
self.name, as_dict=1)[0]
self.actual_start_date = from_time_sheet.start_date
self.actual_end_date = from_time_sheet.end_date
self.total_costing_amount = from_time_sheet.costing_amount
self.total_billing_amount = from_time_sheet.billing_amount
self.actual_time = from_time_sheet.time
self.total_expense_claim = from_expense_claim.total_sanctioned_amount
self.gross_margin = flt(self.total_billing_amount) - flt(self.total_costing_amount)
if self.total_billing_amount:
self.per_gross_margin = (self.gross_margin / flt(self.total_billing_amount)) *100
def update_purchase_costing(self):
total_purchase_cost = frappe.db.sql("""select sum(base_net_amount)
from `tabPurchase Invoice Item` where project = %s and docstatus=1""", self.name)
self.total_purchase_cost = total_purchase_cost and total_purchase_cost[0][0] or 0
def update_sales_costing(self):
total_sales_cost = frappe.db.sql("""select sum(grand_total)
from `tabSales Order` where project = %s and docstatus=1""", self.name)
self.total_sales_cost = total_sales_cost and total_sales_cost[0][0] or 0
def send_welcome_email(self):
url = get_url("/project/?name={0}".format(self.name))
messages = (
_("You have been invited to collaborate on the project: {0}".format(self.name)),
url,
_("Join")
)
content = """
<p>{0}.</p>
<p><a href="{1}">{2}</a></p>
"""
for user in self.users:
if user.welcome_email_sent==0:
frappe.sendmail(user.user, subject=_("Project Collaboration Invitation"), content=content.format(*messages))
user.welcome_email_sent=1
def on_update(self):
self.load_tasks()
self.sync_tasks()
def get_timeline_data(doctype, name):
'''Return timeline for attendance'''
return dict(frappe.db.sql('''select unix_timestamp(from_time), count(*)
from `tabTimesheet Detail` where project=%s
and from_time > date_sub(curdate(), interval 1 year)
and docstatus < 2
group by date(from_time)''', name))
def get_project_list(doctype, txt, filters, limit_start, limit_page_length=20):
return frappe.db.sql('''select distinct project.*
from tabProject project, `tabProject User` project_user
where
(project_user.user = %(user)s
and project_user.parent = project.name)
or project.owner = %(user)s
order by project.modified desc
limit {0}, {1}
'''.format(limit_start, limit_page_length),
{'user':frappe.session.user},
as_dict=True,
update={'doctype':'Project'})
def get_list_context(context=None):
return {
"show_sidebar": True,
"show_search": True,
'no_breadcrumbs': True,
"title": _("Projects"),
"get_list": get_project_list,
"row_template": "templates/includes/projects/project_row.html"
}
def get_users_for_project(doctype, txt, searchfield, start, page_len, filters):
conditions = []
return frappe.db.sql("""select name, concat_ws(' ', first_name, middle_name, last_name)
from `tabUser`
where enabled=1
and name not in ("Guest", "Administrator")
and ({key} like %(txt)s
or full_name like %(txt)s)
{fcond} {mcond}
order by
if(locate(%(_txt)s, name), locate(%(_txt)s, name), 99999),
if(locate(%(_txt)s, full_name), locate(%(_txt)s, full_name), 99999),
idx desc,
name, full_name
limit %(start)s, %(page_len)s""".format(**{
'key': searchfield,
'fcond': get_filters_cond(doctype, filters, conditions),
'mcond': get_match_cond(doctype)
}), {
'txt': "%%%s%%" % txt,
'_txt': txt.replace("%", ""),
'start': start,
'page_len': page_len
})
@frappe.whitelist()
def get_cost_center_name(project):
return frappe.db.get_value("Project", project, "cost_center")
|
RandyLowery/erpnext
|
erpnext/projects/doctype/project/project.py
|
Python
|
gpl-3.0
| 8,909 | 0.02851 |
from base import MediaFile
from fields import MediaFileField
from widgets import AdminMediaFileWidget
|
aino/aino-convert
|
convert/__init__.py
|
Python
|
bsd-3-clause
| 105 | 0 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Password Encryption',
'version': '1.1',
'author': ['OpenERP SA', 'FS3'],
'maintainer': 'OpenERP SA',
'website': 'http://www.openerp.com',
'category': 'Tools',
'description': """
Ecrypted passwords
==================
Interaction with LDAP authentication:
-------------------------------------
This module is currently not compatible with the ``user_ldap`` module and
will disable LDAP authentication completely if installed at the same time.
""",
'depends': ['base'],
'data': [],
'auto_install': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
jmesteve/saas3
|
openerp/addons/auth_crypt/__openerp__.py
|
Python
|
agpl-3.0
| 1,628 | 0 |
# encoding: utf-8
# module pyexpat
# from /usr/lib/python2.7/lib-dynload/pyexpat.x86_64-linux-gnu.so
# by generator 1.135
""" Python wrapper for Expat parser. """
# imports
import pyexpat.errors as errors # <module 'pyexpat.errors' (built-in)>
import pyexpat.model as model # <module 'pyexpat.model' (built-in)>
# Variables with simple values
EXPAT_VERSION = 'expat_2.1.0'
native_encoding = 'UTF-8'
XML_PARAM_ENTITY_PARSING_ALWAYS = 2
XML_PARAM_ENTITY_PARSING_NEVER = 0
XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE = 1
__version__ = '2.7.8'
# functions
def ErrorString(errno): # real signature unknown; restored from __doc__
"""
ErrorString(errno) -> string
Returns string error for given number.
"""
return ""
def ParserCreate(encoding=None, namespace_separator=None): # real signature unknown; restored from __doc__
"""
ParserCreate([encoding[, namespace_separator]]) -> parser
Return a new XML parser object.
"""
pass
# classes
from Exception import Exception
class ExpatError(Exception):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
error = ExpatError
from object import object
class XMLParserType(object):
""" XML parser """
def __init__(self, *args, **kwargs): # real signature unknown
pass
# variables with complex values
expat_CAPI = None # (!) real value is ''
features = [
(
'sizeof(XML_Char)',
1,
),
(
'sizeof(XML_LChar)',
1,
),
(
'XML_DTD',
0,
),
(
'XML_CONTEXT_BYTES',
1024,
),
(
'XML_NS',
0,
),
]
version_info = (
2,
1,
0,
)
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247972723/pyexpat/__init__.py
|
Python
|
gpl-2.0
| 1,861 | 0.009672 |
"""Support for the AccuWeather service."""
from __future__ import annotations
from statistics import mean
from typing import Any, cast
from homeassistant.components.weather import (
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_PRECIPITATION,
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
ATTR_FORECAST_TEMP,
ATTR_FORECAST_TEMP_LOW,
ATTR_FORECAST_TIME,
ATTR_FORECAST_WIND_BEARING,
ATTR_FORECAST_WIND_SPEED,
Forecast,
WeatherEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_NAME,
SPEED_MILES_PER_HOUR,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceEntryType
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from homeassistant.util.dt import utc_from_timestamp
from . import AccuWeatherDataUpdateCoordinator
from .const import (
API_IMPERIAL,
API_METRIC,
ATTR_FORECAST,
ATTRIBUTION,
CONDITION_CLASSES,
DOMAIN,
MANUFACTURER,
NAME,
)
PARALLEL_UPDATES = 1
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Add a AccuWeather weather entity from a config_entry."""
name: str = entry.data[CONF_NAME]
coordinator: AccuWeatherDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
async_add_entities([AccuWeatherEntity(name, coordinator)])
class AccuWeatherEntity(CoordinatorEntity, WeatherEntity):
"""Define an AccuWeather entity."""
coordinator: AccuWeatherDataUpdateCoordinator
def __init__(
self, name: str, coordinator: AccuWeatherDataUpdateCoordinator
) -> None:
"""Initialize."""
super().__init__(coordinator)
self._unit_system = API_METRIC if coordinator.is_metric else API_IMPERIAL
wind_speed_unit = self.coordinator.data["Wind"]["Speed"][self._unit_system][
"Unit"
]
if wind_speed_unit == "mi/h":
self._attr_wind_speed_unit = SPEED_MILES_PER_HOUR
else:
self._attr_wind_speed_unit = wind_speed_unit
self._attr_name = name
self._attr_unique_id = coordinator.location_key
self._attr_temperature_unit = (
TEMP_CELSIUS if coordinator.is_metric else TEMP_FAHRENHEIT
)
self._attr_attribution = ATTRIBUTION
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, coordinator.location_key)},
manufacturer=MANUFACTURER,
name=NAME,
# You don't need to provide specific details for the URL,
# so passing in _ characters is fine if the location key
# is correct
configuration_url="http://accuweather.com/en/"
f"_/_/{coordinator.location_key}/"
f"weather-forecast/{coordinator.location_key}/",
)
@property
def condition(self) -> str | None:
"""Return the current condition."""
try:
return [
k
for k, v in CONDITION_CLASSES.items()
if self.coordinator.data["WeatherIcon"] in v
][0]
except IndexError:
return None
@property
def temperature(self) -> float:
"""Return the temperature."""
return cast(
float, self.coordinator.data["Temperature"][self._unit_system]["Value"]
)
@property
def pressure(self) -> float:
"""Return the pressure."""
return cast(
float, self.coordinator.data["Pressure"][self._unit_system]["Value"]
)
@property
def humidity(self) -> int:
"""Return the humidity."""
return cast(int, self.coordinator.data["RelativeHumidity"])
@property
def wind_speed(self) -> float:
"""Return the wind speed."""
return cast(
float, self.coordinator.data["Wind"]["Speed"][self._unit_system]["Value"]
)
@property
def wind_bearing(self) -> int:
"""Return the wind bearing."""
return cast(int, self.coordinator.data["Wind"]["Direction"]["Degrees"])
@property
def visibility(self) -> float:
"""Return the visibility."""
return cast(
float, self.coordinator.data["Visibility"][self._unit_system]["Value"]
)
@property
def ozone(self) -> int | None:
"""Return the ozone level."""
# We only have ozone data for certain locations and only in the forecast data.
if self.coordinator.forecast and self.coordinator.data[ATTR_FORECAST][0].get(
"Ozone"
):
return cast(int, self.coordinator.data[ATTR_FORECAST][0]["Ozone"]["Value"])
return None
@property
def forecast(self) -> list[Forecast] | None:
"""Return the forecast array."""
if not self.coordinator.forecast:
return None
# remap keys from library to keys understood by the weather component
return [
{
ATTR_FORECAST_TIME: utc_from_timestamp(item["EpochDate"]).isoformat(),
ATTR_FORECAST_TEMP: item["TemperatureMax"]["Value"],
ATTR_FORECAST_TEMP_LOW: item["TemperatureMin"]["Value"],
ATTR_FORECAST_PRECIPITATION: self._calc_precipitation(item),
ATTR_FORECAST_PRECIPITATION_PROBABILITY: round(
mean(
[
item["PrecipitationProbabilityDay"],
item["PrecipitationProbabilityNight"],
]
)
),
ATTR_FORECAST_WIND_SPEED: item["WindDay"]["Speed"]["Value"],
ATTR_FORECAST_WIND_BEARING: item["WindDay"]["Direction"]["Degrees"],
ATTR_FORECAST_CONDITION: [
k for k, v in CONDITION_CLASSES.items() if item["IconDay"] in v
][0],
}
for item in self.coordinator.data[ATTR_FORECAST]
]
@staticmethod
def _calc_precipitation(day: dict[str, Any]) -> float:
"""Return sum of the precipitation."""
precip_sum = 0
precip_types = ["Rain", "Snow", "Ice"]
for precip in precip_types:
precip_sum = sum(
[
precip_sum,
day[f"{precip}Day"]["Value"],
day[f"{precip}Night"]["Value"],
]
)
return round(precip_sum, 1)
|
rohitranjan1991/home-assistant
|
homeassistant/components/accuweather/weather.py
|
Python
|
mit
| 6,728 | 0.002081 |
#!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Basic string exercises
# Fill in the code for the functions below. main() is already set up
# to call the functions with a few different inputs,
# printing 'OK' when each function is correct.
# The starter code for each function includes a 'return'
# which is just a placeholder for your code.
# It's ok if you do not complete all the functions, and there
# are some additional functions to try in string2.py.
# A. donuts
# Given an int count of a number of donuts, return a string
# of the form 'Number of donuts: <count>', where <count> is the number
# passed in. However, if the count is 10 or more, then use the word 'many'
# instead of the actual count.
# So donuts(5) returns 'Number of donuts: 5'
# and donuts(23) returns 'Number of donuts: many'
def donuts(count):
if count < 10:
return 'Number of donuts: ' + str(count)
else:
return 'Number of donuts: many'
# B. both_ends
# Given a string s, return a string made of the first 2
# and the last 2 chars of the original string,
# so 'spring' yields 'spng'. However, if the string length
# is less than 2, return instead the empty string.
def both_ends(s):
if len(s) >= 2:
return s[0] + s[1] + s[-2] + s[-1]
else:
return ''
# C. fix_start
# Given a string s, return a string
# where all occurences of its first char have
# been changed to '*', except do not change
# the first char itself.
# e.g. 'babble' yields 'ba**le'
# Assume that the string is length 1 or more.
# Hint: s.replace(stra, strb) returns a version of string s
# where all instances of stra have been replaced by strb.
def fix_start(s):
first_char = s[0]
rest = s[1:]
return first_char + rest.replace(first_char,'*')
# D. MixUp
# Given strings a and b, return a single string with a and b separated
# by a space '<a> <b>', except swap the first 2 chars of each string.
# e.g.
# 'mix', pod' -> 'pox mid'
# 'dog', 'dinner' -> 'dig donner'
# Assume a and b are length 2 or more.
def mix_up(a, b):
first_a = a[:2]
rest_a = a[2:]
first_b = b[:2]
rest_b = b[2:]
return first_b + rest_a + ' ' + first_a + rest_b
# Provided simple test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected))
# Provided main() calls the above functions with interesting inputs,
# using test() to check if each result is correct or not.
def main():
print 'donuts'
# Each line calls donuts, compares its result to the expected for that call.
test(donuts(4), 'Number of donuts: 4')
test(donuts(9), 'Number of donuts: 9')
test(donuts(10), 'Number of donuts: many')
test(donuts(99), 'Number of donuts: many')
print
print 'both_ends'
test(both_ends('spring'), 'spng')
test(both_ends('Hello'), 'Helo')
test(both_ends('a'), '')
test(both_ends('xyz'), 'xyyz')
print
print 'fix_start'
test(fix_start('babble'), 'ba**le')
test(fix_start('aardvark'), 'a*rdv*rk')
test(fix_start('google'), 'goo*le')
test(fix_start('donut'), 'donut')
print
print 'mix_up'
test(mix_up('mix', 'pod'), 'pox mid')
test(mix_up('dog', 'dinner'), 'dig donner')
test(mix_up('gnash', 'sport'), 'spash gnort')
test(mix_up('pezzy', 'firm'), 'fizzy perm')
# Standard boilerplate to call the main() function.
if __name__ == '__main__':
main()
|
kghatala/googlePythonCourse
|
basic/string1.py
|
Python
|
apache-2.0
| 3,654 | 0.011768 |
def main():
#init an array named a
a = list()
a = []
b = [1,'1',[1,2]]
#Get the size of a list
a_size = len(a)
#how to check if a list is empty
if (a):
print ("not empty")
else:
print ("empty")
index = 0
a = ['a','b','c']
print (a[index])
a.append('d')
a.extend(['e'])
print ('After append a, extend [e]')
print (a)
a.insert(2,'bb')
print ('After insert bb at 2')
print (a)
a.insert(0, 'a0')
print ('After insert a0 at 0')
print (a)
#Find the index of a item in an array
answer_1 = a.index('a')
answer_0 = a.index('a0')
print ('use a.index(item) to find the index only for the first item')
#list.pop() r
eturn last item in the list and remove the last item
print 'Before a.pop(), a = ', a
print 'a.pop() = ', a.pop()
print 'After a.pop(), a = ', a
#Remove an item
a.remove('a0')
print 'After remove(a0), a = ' a
main()
|
jeremykid/FunAlgorithm
|
python_practice/data_structure/array/array.py
|
Python
|
mit
| 869 | 0.073648 |
# # product
import logging
from django.contrib import messages
from django.contrib.auth.decorators import user_passes_test
from django.urls import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render
from dojo.utils import add_breadcrumb
from dojo.forms import ToolTypeForm
from dojo.models import Tool_Type
logger = logging.getLogger(__name__)
@user_passes_test(lambda u: u.is_staff)
def new_tool_type(request):
if request.method == 'POST':
tform = ToolTypeForm(request.POST, instance=Tool_Type())
if tform.is_valid():
tform.save()
messages.add_message(request,
messages.SUCCESS,
'Tool Type Configuration Successfully Created.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('tool_type', ))
else:
tform = ToolTypeForm()
add_breadcrumb(title="New Tool Type Configuration", top_level=False, request=request)
return render(request, 'dojo/new_tool_type.html',
{'tform': tform})
@user_passes_test(lambda u: u.is_staff)
def edit_tool_type(request, ttid):
tool_type = Tool_Type.objects.get(pk=ttid)
if request.method == 'POST':
tform = ToolTypeForm(request.POST, instance=tool_type)
if tform.is_valid():
tform.save()
messages.add_message(request,
messages.SUCCESS,
'Tool Type Configuration Successfully Updated.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('tool_type', ))
else:
tform = ToolTypeForm(instance=tool_type)
add_breadcrumb(title="Edit Tool Type Configuration", top_level=False, request=request)
return render(request,
'dojo/edit_tool_type.html',
{
'tform': tform,
})
@user_passes_test(lambda u: u.is_staff)
def tool_type(request):
confs = Tool_Type.objects.all().order_by('name')
add_breadcrumb(title="Tool Type List", top_level=not len(request.GET), request=request)
return render(request,
'dojo/tool_type.html',
{'confs': confs,
})
|
rackerlabs/django-DefectDojo
|
dojo/tool_type/views.py
|
Python
|
bsd-3-clause
| 2,344 | 0.002133 |
# Copyright (C) 2009, Brad Beattie
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from pygraph.algorithms.accessibility import accessibility, mutual_accessibility
from pygraph.classes.digraph import digraph
from pygraph.algorithms.minmax import maximum_flow
from pyvotecore.condorcet import CondorcetHelper
from pyvotecore.common_functions import matching_keys, unique_permutations
PREFERRED_LESS = 1
PREFERRED_SAME = 2
PREFERRED_MORE = 3
STRENGTH_TOLERANCE = 0.0000000001
STRENGTH_THRESHOLD = 0.1
NODE_SINK = -1
NODE_SOURCE = -2
# This class implements the Schulze Method (aka the beatpath method)
class SchulzeHelper(CondorcetHelper):
def condorcet_completion_method(self):
self.schwartz_set_heuristic()
def schwartz_set_heuristic(self):
# Iterate through using the Schwartz set heuristic
self.actions = []
while len(self.graph.edges()) > 0:
access = accessibility(self.graph)
mutual_access = mutual_accessibility(self.graph)
candidates_to_remove = set()
for candidate in self.graph.nodes():
candidates_to_remove |= (set(access[candidate]) - set(mutual_access[candidate]))
# Remove nodes at the end of non-cycle paths
if len(candidates_to_remove) > 0:
self.actions.append({'nodes': candidates_to_remove})
for candidate in candidates_to_remove:
self.graph.del_node(candidate)
# If none exist, remove the weakest edges
else:
edge_weights = self.edge_weights(self.graph)
self.actions.append({'edges': matching_keys(edge_weights, min(edge_weights.values()))})
for edge in self.actions[-1]["edges"]:
self.graph.del_edge(edge)
self.graph_winner()
def generate_vote_management_graph(self):
self.vote_management_graph = digraph()
self.vote_management_graph.add_nodes(self.completed_patterns)
self.vote_management_graph.del_node(tuple([PREFERRED_MORE] * self.required_winners))
self.pattern_nodes = self.vote_management_graph.nodes()
self.vote_management_graph.add_nodes([NODE_SOURCE, NODE_SINK])
for pattern_node in self.pattern_nodes:
self.vote_management_graph.add_edge((NODE_SOURCE, pattern_node))
for i in range(self.required_winners):
self.vote_management_graph.add_node(i)
for pattern_node in self.pattern_nodes:
for i in range(self.required_winners):
if pattern_node[i] == 1:
self.vote_management_graph.add_edge((pattern_node, i))
for i in range(self.required_winners):
self.vote_management_graph.add_edge((i, NODE_SINK))
# Generates a list of all patterns that do not contain indifference
def generate_completed_patterns(self):
self.completed_patterns = []
for i in range(0, self.required_winners + 1):
for pattern in unique_permutations(
[PREFERRED_LESS] * (self.required_winners - i)
+ [PREFERRED_MORE] * (i)
):
self.completed_patterns.append(tuple(pattern))
def proportional_completion(self, candidate, other_candidates):
profile = dict(zip(self.completed_patterns, [0] * len(self.completed_patterns)))
# Obtain an initial tally from the ballots
for ballot in self.ballots:
pattern = []
for other_candidate in other_candidates:
if ballot["ballot"][candidate] < ballot["ballot"][other_candidate]:
pattern.append(PREFERRED_LESS)
elif ballot["ballot"][candidate] == ballot["ballot"][other_candidate]:
pattern.append(PREFERRED_SAME)
else:
pattern.append(PREFERRED_MORE)
pattern = tuple(pattern)
if pattern not in profile:
profile[pattern] = 0.0
profile[pattern] += ballot["count"]
weight_sum = sum(profile.values())
# Peel off patterns with indifference (from the most to the least) and apply proportional completion to them
for pattern in sorted(profile.keys(), key=lambda pattern: pattern.count(PREFERRED_SAME), reverse=True):
if pattern.count(PREFERRED_SAME) == 0:
break
self.proportional_completion_round(pattern, profile)
try:
assert round(weight_sum, 5) == round(sum(profile.values()), 5)
except:
print ("Proportional completion broke (went from %s to %s)" % (weight_sum, sum(profile.values())))
return profile
def proportional_completion_round(self, completion_pattern, profile):
# Remove pattern that contains indifference
weight_sum = sum(profile.values())
completion_pattern_weight = profile[completion_pattern]
del profile[completion_pattern]
patterns_to_consider = {}
for pattern in profile.keys():
append = False
append_target = []
for i in range(len(completion_pattern)):
if completion_pattern[i] == PREFERRED_SAME:
append_target.append(pattern[i])
if pattern[i] != PREFERRED_SAME:
append = True
else:
append_target.append(completion_pattern[i])
append_target = tuple(append_target)
if append is True and append_target in profile:
append_target = tuple(append_target)
if append_target not in patterns_to_consider:
patterns_to_consider[append_target] = set()
patterns_to_consider[append_target].add(pattern)
denominator = 0
for (append_target, patterns) in patterns_to_consider.items():
for pattern in patterns:
denominator += profile[pattern]
# Reweight the remaining items
for pattern in patterns_to_consider.keys():
if denominator == 0:
profile[pattern] += completion_pattern_weight / len(patterns_to_consider)
else:
if pattern not in profile:
profile[pattern] = 0
profile[pattern] += sum(profile[considered_pattern] for considered_pattern in patterns_to_consider[pattern]) * completion_pattern_weight / denominator
try:
assert round(weight_sum, 5) == round(sum(profile.values()), 5)
except:
print ("Proportional completion round broke (went from %s to %s)" % (weight_sum, sum(profile.values())))
return profile
# This method converts the voter profile into a capacity graph and iterates
# on the maximum flow using the Edmonds Karp algorithm. The end result is
# the limit of the strength of the voter management as per Markus Schulze's
# Calcul02.pdf (draft, 28 March 2008, abstract: "In this paper we illustrate
# the calculation of the strengths of the vote managements.").
def strength_of_vote_management(self, voter_profile):
# Initialize the graph weights
for pattern in self.pattern_nodes:
self.vote_management_graph.set_edge_weight((NODE_SOURCE, pattern), voter_profile[pattern])
for i in range(self.required_winners):
if pattern[i] == 1:
self.vote_management_graph.set_edge_weight((pattern, i), voter_profile[pattern])
# Iterate towards the limit
r = [(float(sum(voter_profile.values())) - voter_profile[tuple([PREFERRED_MORE] * self.required_winners)]) / self.required_winners]
while len(r) < 2 or r[-2] - r[-1] > STRENGTH_TOLERANCE:
for i in range(self.required_winners):
self.vote_management_graph.set_edge_weight((i, NODE_SINK), r[-1])
max_flow = maximum_flow(self.vote_management_graph, NODE_SOURCE, NODE_SINK)
sink_sum = sum(v for k, v in max_flow[0].iteritems() if k[1] == NODE_SINK)
r.append(sink_sum / self.required_winners)
# We expect strengths to be above a specified threshold
if sink_sum < STRENGTH_THRESHOLD:
return 0
# Return the final max flow
return round(r[-1], 9)
|
R-daneel-olivaw/mutation-tolerance-voting
|
pyvotecore/schulze_helper.py
|
Python
|
lgpl-3.0
| 8,939 | 0.002685 |
# ===============================================================================
# Copyright 2015 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from traits.api import HasTraits, Str, List, Instance
from traitsui.api import View, UItem, Item, TableEditor
from traitsui.table_column import ObjectColumn
from pychron.core.helpers.traitsui_shortcuts import okcancel_view
from pychron.core.ui.enum_editor import myEnumEditor
class Conflict(HasTraits):
queue_name = Str
runspec = Instance('pychron.experiment.automated_run.spec.AutomatedRunSpec')
identifier = Str
position = Str
repository_identifier = Str
repository_ids = Str
available_ids = List
class ConflictResolver(HasTraits):
conflicts = List
available_ids = List
def apply(self):
for c in self.conflicts:
c.runspec.repository_identifier = c.repository_identifier
def add_conflicts(self, qname, cs):
for ai, exps in cs:
self.conflicts.append(Conflict(queue_name=qname,
runspec=ai,
position=ai.position,
repository_identifier=ai.repository_identifier,
identifier=ai.identifier,
repository_ids=','.join(exps),
available_ids=self.available_ids))
def traits_view(self):
cols = [ObjectColumn(name='queue_name', editable=False),
ObjectColumn(name='identifier', editable=False),
ObjectColumn(name='position', editable=False),
ObjectColumn(name='repository_identifier',
label='Assigned Repository',
tooltip='Repository assigned to this analysis in the Experiment Queue',
editor=myEnumEditor(name='available_ids')),
ObjectColumn(name='repository_ids',
label='Existing Repositories',
tooltip='Set of repositories that already contain this L#',
editable=False)]
v = okcancel_view(UItem('conflicts', editor=TableEditor(columns=cols)),
title='Resolve Repository Conflicts')
return v
if __name__ == '__main__':
def main():
from pychron.paths import paths
paths.build('_dev')
from pychron.core.helpers.logger_setup import logging_setup
from pychron.experiment.automated_run.spec import AutomatedRunSpec
logging_setup('dvcdb')
from pychron.dvc.dvc_database import DVCDatabase
from itertools import groupby
db = DVCDatabase(kind='mysql', host='localhost', username='root', name='pychronmeta', password='Argon')
db.connect()
identifiers = ['63290', '63291']
runs = [AutomatedRunSpec(identifier='63290', repository_identifier='Cather_McIntoshd')]
cr = ConflictResolver()
experiments = {}
cr.available_ids = db.get_repository_identifiers()
eas = db.get_associated_repositories(identifiers)
for idn, exps in groupby(eas, key=lambda x: x[1]):
experiments[idn] = [e[0] for e in exps]
conflicts = []
for ai in runs:
identifier = ai.identifier
es = experiments[identifier]
if ai.repository_identifier not in es:
conflicts.append((ai, es))
if conflicts:
cr.add_conflicts('Foo', conflicts)
if cr.conflicts:
info = cr.edit_traits(kind='livemodal')
if info.result:
cr.apply()
# for ci in runs:
# print ci.identifier, ci.experiment_identifier
from traits.api import Button
class Demo(HasTraits):
test = Button
def traits_view(self):
return View(Item('test'))
def _test_fired(self):
main()
d = Demo()
d.configure_traits()
# ============= EOF =============================================
|
UManPychron/pychron
|
pychron/experiment/conflict_resolver.py
|
Python
|
apache-2.0
| 4,802 | 0.001874 |
# flake8: noqa
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from ..compat import USER_MODEL
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Review'
db.create_table(u'review_review', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm[USER_MODEL['orm_label']], null=True, blank=True)),
('content', self.gf('django.db.models.fields.TextField')(max_length=1024, blank=True)),
('language', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('creation_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'review', ['Review'])
# Adding model 'ReviewExtraInfo'
db.create_table(u'review_reviewextrainfo', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('type', self.gf('django.db.models.fields.CharField')(max_length=256)),
('review', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['review.Review'])),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal(u'review', ['ReviewExtraInfo'])
# Adding model 'RatingCategory'
db.create_table(u'review_ratingcategory', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal(u'review', ['RatingCategory'])
# Adding model 'RatingCategoryTranslation'
db.create_table(u'review_ratingcategorytranslation', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=256)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['review.RatingCategory'])),
('language', self.gf('django.db.models.fields.CharField')(max_length=2)),
))
db.send_create_signal(u'review', ['RatingCategoryTranslation'])
# Adding model 'Rating'
db.create_table(u'review_rating', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('value', self.gf('django.db.models.fields.CharField')(max_length=20)),
('review', self.gf('django.db.models.fields.related.ForeignKey')(related_name='ratings', to=orm['review.Review'])),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['review.RatingCategory'])),
))
db.send_create_signal(u'review', ['Rating'])
def backwards(self, orm):
# Deleting model 'Review'
db.delete_table(u'review_review')
# Deleting model 'ReviewExtraInfo'
db.delete_table(u'review_reviewextrainfo')
# Deleting model 'RatingCategory'
db.delete_table(u'review_ratingcategory')
# Deleting model 'RatingCategoryTranslation'
db.delete_table(u'review_ratingcategorytranslation')
# Deleting model 'Rating'
db.delete_table(u'review_rating')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
USER_MODEL['model_label']: {
'Meta': {'object_name': USER_MODEL['object_name']},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'review.rating': {
'Meta': {'ordering': "['category', 'review']", 'object_name': 'Rating'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['review.RatingCategory']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'review': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ratings'", 'to': u"orm['review.Review']"}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
u'review.ratingcategory': {
'Meta': {'object_name': 'RatingCategory'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'review.ratingcategorytranslation': {
'Meta': {'object_name': 'RatingCategoryTranslation'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['review.RatingCategory']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
u'review.review': {
'Meta': {'ordering': "['-creation_date']", 'object_name': 'Review'},
'content': ('django.db.models.fields.TextField', [], {'max_length': '1024', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s']" % USER_MODEL['orm_label'], 'null': 'True', 'blank': 'True'})
},
u'review.reviewextrainfo': {
'Meta': {'ordering': "['type']", 'object_name': 'ReviewExtraInfo'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'review': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['review.Review']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
u'user_media.usermediaimage': {
'Meta': {'object_name': 'UserMediaImage'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s']" % USER_MODEL['orm_label']})
}
}
complete_apps = ['review']
|
bitmazk/django-review
|
review/south_migrations/0001_initial.py
|
Python
|
mit
| 10,388 | 0.007605 |
from .plot_widget import PlotWidget
from .filter_popup import FilterPopup
from .filterable_kw_list_model import FilterableKwListModel
from .data_type_keys_list_model import DataTypeKeysListModel
from .data_type_proxy_model import DataTypeProxyModel
from .data_type_keys_widget import DataTypeKeysWidget
from .plot_case_model import PlotCaseModel
from .plot_case_selection_widget import CaseSelectionWidget
from .color_chooser import ColorBox
from .style_chooser import StyleChooser
from .plot_window import PlotWindow
from .plot_tool import PlotTool
|
joakim-hove/ert
|
ert_gui/tools/plot/__init__.py
|
Python
|
gpl-3.0
| 555 | 0 |
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from random import randint
from time import sleep
import brain
import game
drv = webdriver.Firefox()
drv.get('http://gabrielecirulli.github.io/2048/')
container = drv.find_element_by_class_name('tile-container')
retry = drv.find_element_by_class_name('retry-button')
board = [[None, None, None, None],
[None, None, None, None],
[None, None, None, None],
[None, None, None, None]]
def move_up():
container.send_keys(Keys.UP)
def move_down():
container.send_keys(Keys.DOWN)
def move_left():
container.send_keys(Keys.LEFT)
def move_right():
container.send_keys(Keys.RIGHT)
def zero_board():
global board
board = [[None, None, None, None],
[None, None, None, None],
[None, None, None, None],
[None, None, None, None]]
def update_board():
global board
sleep(0.1)
tiles = container.find_elements_by_class_name('tile')
tiledata = list(map(lambda x: x.get_attribute('class').split(), tiles))
zero_board()
for tile in tiledata:
value = tile[1].split('-')[1]
pos = tile[2].split('-')[-2:]
board[int(pos[1]) - 1][int(pos[0]) - 1] = int(value)
def pick_move():
global board
g = game.Game(board)
predictions = brain.predict_next_board(g)
scores = []
for p in predictions[1:]:
print(p, len(p))
score = brain.weight_boards(predictions[0], p)
scores.append(score)
return brain.choose(scores)
while not retry.is_displayed():
update_board()
pick_move()()
sleep(2)
update_board()
for b in board:
print(b)
sleep(2)
print("Score: ", drv.find_element_by_class_name('score-container').text.splitlines()[0])
print("Game Over")
|
munk/play2048
|
tfe.py
|
Python
|
mit
| 1,792 | 0.004464 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2004-2011 Pexego Sistemas Informáticos. All Rights Reserved
# $Omar Castiñeira Saavedra$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
|
diagramsoftware/l10n-spain
|
l10n_es_igic/data/__init__.py
|
Python
|
agpl-3.0
| 985 | 0 |
#-*- coding: utf-8 -*-
# collections.py
# Define various kind of collections
#
# Copyright (C) 2016 Jakub Kadlcik
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
from __future__ import absolute_import
from operator import attrgetter, methodcaller
from psutil import NoSuchProcess
class Collection(list):
def replace_values(self, attribute, source_value, required_value):
for app in self:
if getattr(app, attribute) == source_value:
app.update(attribute, required_value)
def sorted(self, attribute):
self.replace_values(attribute, None, "")
try:
return sorted(self, key=methodcaller(attribute))
except TypeError:
return sorted(self, key=attrgetter(attribute))
class ApplicationsCollection(Collection):
def with_helpers(self):
applications = filter(lambda app: app.helper, self)
return ApplicationsCollection(applications)
def without_helpers(self):
applications = filter(lambda app: not app.helper, self)
return ApplicationsCollection(applications)
def exclude_types(self, app_types):
"""app_types -- see Applications.TYPES"""
applications = filter(lambda app: app.type not in app_types, self)
return ApplicationsCollection(applications)
def filter_types(self, app_types):
"""app_types -- see Applications.TYPES"""
applications = filter(lambda app: app.type in app_types, self)
return ApplicationsCollection(applications)
def count_type(self, app_type):
count = 0
for application in self:
if application.type == app_type:
count += 1
return count
class ProcessesCollection(Collection):
def owned_by(self, user):
if not user:
return self
return self.filtered(lambda process: process.username() == user)
def newer_than(self, timestamp):
return self.filtered(lambda process: process.create_time() >= timestamp)
def unique(self):
unique = set()
for process in self:
try: unique.add(process)
except NoSuchProcess: pass
return ProcessesCollection(unique)
def filtered(self, function):
processes = ProcessesCollection()
for process in self:
try:
if function(process):
processes.append(process)
except NoSuchProcess: pass
return processes
class AffectedProcessesCollection(ProcessesCollection):
def update(self, iterable):
for x in iterable:
if x in self:
self[self.index(x)].update(x)
else:
self.append(x)
class PackagesCollection(Collection):
_package_manager = None
def __init__(self, *args):
list.__init__(self, *args)
def intersection(self, packages):
if packages is not None:
return PackagesCollection(set(packages).intersection(self))
return self
@property
def files(self):
files = []
for package in self:
files.extend(self._package_manager.package_files(package.name))
return set(files)
def unique_newest(self):
packages = {}
for p in self:
if p.name in packages:
if packages[p.name].modified > p.modified:
continue
packages[p.name] = p
return PackagesCollection(packages.values())
|
sean797/tracer
|
tracer/resources/collections.py
|
Python
|
gpl-2.0
| 3,632 | 0.022577 |
"""
Analytical template tags and filters.
"""
from __future__ import absolute_import
import logging
from django import template
from django.template import Node, TemplateSyntaxError
from django.utils.importlib import import_module
from templatetags.utils import AnalyticalException
TAG_LOCATIONS = ['head_top', 'head_bottom', 'body_top', 'body_bottom']
TAG_POSITIONS = ['first', None, 'last']
TAG_MODULES = [
'storefront.clicky',
'storefront.mixpanel',
'storefront.google_analytics',
]
'''
'storefront.olark',
'analytical.chartbeat',
'analytical.crazy_egg',
'analytical.gosquared',
'analytical.hubspot',
'analytical.kiss_insights',
'analytical.kiss_metrics',
'analytical.optimizely',
'analytical.performable',
'analytical.reinvigorate',
'analytical.woopra',
'''
logger = logging.getLogger(__name__)
register = template.Library()
def _location_tag(location):
def analytical_tag(parser, token):
bits = token.split_contents()
if len(bits) > 1:
raise TemplateSyntaxError("'%s' tag takes no arguments" % bits[0])
return AnalyticalNode(location)
return analytical_tag
for loc in TAG_LOCATIONS:
register.tag('analytical_%s' % loc, _location_tag(loc))
class AnalyticalNode(Node):
def __init__(self, location):
self.nodes = [node_cls() for node_cls in template_nodes[location]]
def render(self, context):
return "".join([node.render(context) for node in self.nodes])
def _load_template_nodes():
template_nodes = dict((l, dict((p, []) for p in TAG_POSITIONS))
for l in TAG_LOCATIONS)
def add_node_cls(location, node, position=None):
template_nodes[location][position].append(node)
for path in TAG_MODULES:
module = _import_tag_module(path)
try:
module.contribute_to_analytical(add_node_cls)
except AnalyticalException, e:
logger.debug("not loading tags from '%s': %s", path, e)
for location in TAG_LOCATIONS:
template_nodes[location] = sum((template_nodes[location][p]
for p in TAG_POSITIONS), [])
return template_nodes
def _import_tag_module(path):
app_name, lib_name = path.rsplit('.', 1)
return import_module("%s.templatetags.%s" % (app_name, lib_name))
template_nodes = _load_template_nodes()
|
linkedin/indextank-service
|
storefront/templatetags/analytical.py
|
Python
|
apache-2.0
| 2,352 | 0.002976 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Set User.last_login automatically in the DB
Revision ID: a65114e48d6f
Revises: 104b4c56862b
Create Date: 2016-06-11 00:28:39.176496
"""
from alembic import op
import sqlalchemy as sa
revision = 'a65114e48d6f'
down_revision = '104b4c56862b'
def upgrade():
op.alter_column(
"accounts_user",
"last_login",
server_default=sa.func.now(),
)
def downgrade():
op.alter_column("accounts_user", "last_login", server_default=None)
|
alex/warehouse
|
warehouse/migrations/versions/a65114e48d6f_set_user_last_login_automatically_in_.py
|
Python
|
apache-2.0
| 1,008 | 0 |
from man import comm
from . import NogginConstants as Constants
from . import GameStates
from .util import FSA
from . import Leds
TEAM_BLUE = 0
TEAM_RED = 1
class GameController(FSA.FSA):
def __init__(self, brain):
FSA.FSA.__init__(self,brain)
self.brain = brain
self.gc = brain.comm.gc
#jf- self.setTimeFunction(self.brain.nao.getSimulatedTime)
self.addStates(GameStates)
self.currentState = 'gameInitial'
self.setName('GameController')
self.setPrintStateChanges(True)
self.stateChangeColor = 'cyan'
self.setPrintFunction(self.brain.out.printf)
self.timeRemaining = self.gc.timeRemaining()
self.kickOff = self.gc.kickOff
self.penaltyShots = False
self.ownKickOff = False
def run(self):
self.setGCLEDS()
self.ownKickOff = (self.gc.kickOff == self.brain.my.teamColor)
if self.gc.secondaryState == comm.STATE2_PENALTYSHOOT:
if self.gc.state == comm.STATE_INITIAL:
self.switchTo('penaltyShotsGameInitial')
elif self.gc.state == comm.STATE_SET:
self.switchTo('penaltyShotsGameSet')
elif self.gc.state == comm.STATE_READY:
self.switchTo('penaltyShotsGameReady')
elif self.gc.state == comm.STATE_PLAYING:
if self.gc.penalty != comm.PENALTY_NONE:
self.switchTo('penaltyShotsGamePenalized')
else:
self.switchTo("penaltyShotsGamePlaying")
elif self.gc.state == comm.STATE_FINISHED:
self.switchTo('penaltyShotsGameFinished')
elif self.gc.secondaryState == comm.STATE2_NORMAL:
if self.gc.state == comm.STATE_INITIAL:
self.switchTo('gameInitial')
elif self.gc.state == comm.STATE_SET:
self.switchTo('gameSet')
elif self.gc.state == comm.STATE_READY:
self.switchTo('gameReady')
elif self.gc.state == comm.STATE_PLAYING:
if self.gc.penalty != comm.PENALTY_NONE:
self.switchTo("gamePenalized")
else:
self.switchTo("gamePlaying")
elif self.gc.state == comm.STATE_FINISHED:
self.switchTo('gameFinished')
self.timeRemaining = self.gc.timeRemaining()
#Set team color
if self.gc.color != self.brain.my.teamColor:
self.brain.my.teamColor = self.gc.color
self.brain.makeFieldObjectsRelative()
self.printf("Switching team color to " +
Constants.teamColorDict[self.brain.my.teamColor])
if self.gc.kickOff != self.kickOff:
self.printf("Switching kickoff to team #%g"%self.gc.kickOff +
" from team #%g"% self.kickOff)
self.kickOff = self.gc.kickOff
FSA.FSA.run(self)
def timeRemaining(self):
return self.timeRemaining()
def timeSincePlay(self):
return Constants.LENGTH_OF_HALF - self.timeRemaining
def getScoreDifferential(self):
'''
negative when we're losing
'''
return self.brain.gameController.gc.teams(self.brain.my.teamColor)[1] -\
self.brain.gameController.gc.teams((self.brain.my.teamColor+1)%2)[1]
def setGCLEDS(self):
'''
Method to set the chest and feet according to the current
GC states and infos
'''
####### KICKOFF ######
if (self.gc.kickOff == self.gc.team and
(self.gc.state == comm.STATE_INITIAL or
self.gc.state == comm.STATE_READY or
self.gc.state == comm.STATE_PLAYING)):
self.brain.leds.executeLeds(Leds.HAVE_KICKOFF_LEDS)
else:
self.brain.leds.executeLeds(Leds.NO_KICKOFF_LEDS)
###### TEAM COLOR ######
if self.gc.color == TEAM_BLUE:
self.brain.leds.executeLeds(Leds.TEAM_BLUE_LEDS)
else:
self.brain.leds.executeLeds(Leds.TEAM_RED_LEDS)
###### GAME STATE ######
if self.gc.state == comm.STATE_INITIAL:
self.brain.leds.executeLeds(Leds.STATE_INITIAL_LEDS)
elif self.gc.state == comm.STATE_SET:
self.brain.leds.executeLeds(Leds.STATE_SET_LEDS)
elif self.gc.state == comm.STATE_READY:
self.brain.leds.executeLeds(Leds.STATE_READY_LEDS)
elif self.gc.state == comm.STATE_PLAYING:
if self.gc.penalty != comm.PENALTY_NONE:
self.brain.leds.executeLeds(Leds.STATE_PENALIZED_LEDS)
else:
self.brain.leds.executeLeds(Leds.STATE_PLAYING_LEDS)
elif self.gc.state == comm.STATE_FINISHED:
self.brain.leds.executeLeds(Leds.STATE_FINISHED_LEDS)
|
northern-bites/nao-man
|
noggin/GameController.py
|
Python
|
gpl-3.0
| 4,831 | 0.002484 |
###############################################################################
##
## Copyright (C) 2013-2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from __future__ import absolute_import
__all__ = ['Hello',
'Welcome',
'Abort',
'Challenge',
'Authenticate',
'Goodbye',
'Heartbeat'
'Error',
'Publish',
'Published',
'Subscribe',
'Subscribed',
'Unsubscribe',
'Unsubscribed',
'Event',
'Call',
'Cancel',
'Result',
'Register',
'Registered',
'Unregister',
'Unregistered',
'Invocation',
'Interrupt',
'Yield']
import re
import six
import autobahn
from autobahn import util
from autobahn.wamp.exception import ProtocolError
from autobahn.wamp.interfaces import IMessage
from autobahn.wamp.role import ROLE_NAME_TO_CLASS
## strict URI check allowing empty URI components
_URI_PAT_STRICT = re.compile(r"^(([0-9a-z_]{2,}\.)|\.)*([0-9a-z_]{2,})?$")
## loose URI check allowing empty URI components
_URI_PAT_LOOSE = re.compile(r"^(([^\s\.#]+\.)|\.)*([^\s\.#]+)?$")
## strict URI check disallowing empty URI components
_URI_PAT_STRICT_NON_EMPTY = re.compile(r"^([0-9a-z_]{2,}\.)*([0-9a-z_]{2,})?$")
## loose URI check disallowing empty URI components
_URI_PAT_LOOSE_NON_EMPTY = re.compile(r"^([^\s\.#]+\.)*([^\s\.#]+)?$")
def check_or_raise_uri(value, message):
if type(value) != six.text_type:
raise ProtocolError("{}: invalid type {} for URI".format(message, type(value)))
if not _URI_PAT_LOOSE.match(value):
raise ProtocolError("{}: invalid value '{}' for URI".format(message, value))
return value
def check_or_raise_id(value, message):
if type(value) not in six.integer_types:
raise ProtocolError("{}: invalid type {} for ID".format(message, type(value)))
if value < 0 or value > 9007199254740992: # 2**53
raise ProtocolError("{}: invalid value {} for ID".format(message, value))
return value
def check_or_raise_extra(value, message):
if type(value) != dict:
raise ProtocolError("{}: invalid type {}".format(message, type(value)))
for k in value.keys():
if type(k) != six.text_type:
raise ProtocolError("{}: invalid type {} for key '{}'".format(message, type(k), k))
return value
class Message(util.EqualityMixin):
"""
WAMP message base class. This is not supposed to be instantiated.
"""
def __init__(self):
"""
Base constructor.
"""
## serialization cache: mapping from ISerializer instances
## to serialized bytes
##
self._serialized = {}
def uncache(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.uncache`
"""
self._serialized = {}
def serialize(self, serializer):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.serialize`
"""
## only serialize if not cached ..
if not serializer in self._serialized:
self._serialized[serializer] = serializer.serialize(self.marshal())
return self._serialized[serializer]
IMessage.register(Message)
class Hello(Message):
"""
A WAMP `HELLO` message.
Format: `[HELLO, Realm|uri, Details|dict]`
"""
MESSAGE_TYPE = 1
"""
The WAMP message code for this type of message.
"""
def __init__(self, realm, roles, authmethods = None):
"""
Message constructor.
:param realm: The URI of the WAMP realm to join.
:type realm: str
:param roles: The WAMP roles to announce.
:type roles: list of :class:`autobahn.wamp.role.RoleFeatures`
"""
assert(type(realm) == six.text_type)
assert(type(roles) == list)
for role in roles:
assert(isinstance(role, autobahn.wamp.role.RoleFeatures))
if authmethods:
assert(type(authmethods) == list)
for authmethod in authmethods:
assert(type(authmethod) == six.text_type)
Message.__init__(self)
self.realm = realm
self.roles = roles
self.authmethods = authmethods
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Hello.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for HELLO".format(len(wmsg)))
realm = check_or_raise_uri(wmsg[1], "'realm' in HELLO")
details = check_or_raise_extra(wmsg[2], "'details' in HELLO")
roles = []
if not u'roles' in details:
raise ProtocolError("missing mandatory roles attribute in options in HELLO")
details_roles = check_or_raise_extra(details[u'roles'], "'roles' in 'details' in HELLO")
if len(details_roles) == 0:
raise ProtocolError("empty 'roles' in 'details' in HELLO")
for role in details_roles:
if role not in ROLE_NAME_TO_CLASS:
raise ProtocolError("invalid role '{}' in 'roles' in 'details' in HELLO".format(role))
details_role = check_or_raise_extra(details_roles[role], "role '{}' in 'roles' in 'details' in HELLO".format(role))
if u'features' in details_role:
details_role_features = check_or_raise_extra(details_role[u'features'], "'features' in role '{}' in 'roles' in 'details' in HELLO".format(role))
## FIXME: skip unknown attributes
role_features = ROLE_NAME_TO_CLASS[role](**details_role[u'features'])
else:
role_features = ROLE_NAME_TO_CLASS[role]()
roles.append(role_features)
authmethods = None
if u'authmethods' in details:
details_authmethods = details[u'authmethods']
if type(details_authmethods) != list:
raise ProtocolError("invalid type {} for 'authmethods' detail in HELLO".format(type(details_authmethods)))
for auth_method in details_authmethods:
if type(auth_method) != six.text_type:
raise ProtocolError("invalid type {} for item in 'authmethods' detail in HELLO".format(type(auth_method)))
authmethods = details_authmethods
obj = Hello(realm, roles, authmethods)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {u'roles': {}}
for role in self.roles:
details[u'roles'][role.ROLE] = {}
for feature in role.__dict__:
if not feature.startswith('_') and feature != 'ROLE' and getattr(role, feature) is not None:
if not u'features' in details[u'roles'][role.ROLE]:
details[u'roles'][role.ROLE] = {u'features': {}}
details[u'roles'][role.ROLE][u'features'][six.u(feature)] = getattr(role, feature)
if self.authmethods:
details[u'authmethods'] = self.authmethods
return [Hello.MESSAGE_TYPE, self.realm, details]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP HELLO Message (realm = {}, roles = {}, authmethods = {})".format(self.realm, self.roles, self.authmethods)
class Welcome(Message):
"""
A WAMP `WELCOME` message.
Format: `[WELCOME, Session|id, Details|dict]`
"""
MESSAGE_TYPE = 2
"""
The WAMP message code for this type of message.
"""
def __init__(self, session, roles, authid = None, authrole = None, authmethod = None):
"""
Message constructor.
:param session: The WAMP session ID the other peer is assigned.
:type session: int
"""
assert(type(session) in six.integer_types)
assert(type(roles) == list)
for role in roles:
assert(isinstance(role, autobahn.wamp.role.RoleFeatures))
assert(authid is None or type(authid) == six.text_type)
assert(authrole is None or type(authrole) == six.text_type)
assert(authmethod is None or type(authmethod) == six.text_type)
Message.__init__(self)
self.session = session
self.roles = roles
self.authid = authid
self.authrole = authrole
self.authmethod = authmethod
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Welcome.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for WELCOME".format(len(wmsg)))
session = check_or_raise_id(wmsg[1], "'session' in WELCOME")
details = check_or_raise_extra(wmsg[2], "'details' in WELCOME")
authid = details.get(u'authid', None)
authrole = details.get(u'authrole', None)
authmethod = details.get(u'authmethod', None)
roles = []
if not u'roles' in details:
raise ProtocolError("missing mandatory roles attribute in options in WELCOME")
details_roles = check_or_raise_extra(details['roles'], "'roles' in 'details' in WELCOME")
if len(details_roles) == 0:
raise ProtocolError("empty 'roles' in 'details' in WELCOME")
for role in details_roles:
if role not in ROLE_NAME_TO_CLASS:
raise ProtocolError("invalid role '{}' in 'roles' in 'details' in WELCOME".format(role))
if u'features' in details_roles[role]:
details_role_features = check_or_raise_extra(details_roles[role][u'features'], "'features' in role '{}' in 'roles' in 'details' in WELCOME".format(role))
## FIXME: skip unknown attributes
role_features = ROLE_NAME_TO_CLASS[role](**details_roles[role][u'features'])
else:
role_features = ROLE_NAME_TO_CLASS[role]()
roles.append(role_features)
obj = Welcome(session, roles, authid, authrole, authmethod)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {
u'roles': {}
}
if self.authid:
details[u'authid'] = self.authid
if self.authrole:
details[u'authrole'] = self.authrole
if self.authrole:
details[u'authmethod'] = self.authmethod
for role in self.roles:
details[u'roles'][role.ROLE] = {}
for feature in role.__dict__:
if not feature.startswith('_') and feature != 'ROLE' and getattr(role, feature) is not None:
if not u'features' in details[u'roles'][role.ROLE]:
details[u'roles'][role.ROLE] = {u'features': {}}
details[u'roles'][role.ROLE][u'features'][six.u(feature)] = getattr(role, feature)
return [Welcome.MESSAGE_TYPE, self.session, details]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP WELCOME Message (session = {}, roles = {}, authid = {}, authrole = {}, authmethod = {})".format(self.session, self.roles, self.authid, self.authrole, self.authmethod)
class Abort(Message):
"""
A WAMP `ABORT` message.
Format: `[ABORT, Details|dict, Reason|uri]`
"""
MESSAGE_TYPE = 3
"""
The WAMP message code for this type of message.
"""
def __init__(self, reason, message = None):
"""
Message constructor.
:param reason: WAMP or application error URI for aborting reason.
:type reason: str
:param message: Optional human-readable closing message, e.g. for logging purposes.
:type message: str
"""
assert(type(reason) == six.text_type)
assert(message is None or type(message) == six.text_type)
Message.__init__(self)
self.reason = reason
self.message = message
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Abort.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for ABORT".format(len(wmsg)))
details = check_or_raise_extra(wmsg[1], "'details' in ABORT")
reason = check_or_raise_uri(wmsg[2], "'reason' in ABORT")
message = None
if u'message' in details:
details_message = details[u'message']
if type(details_message) != six.text_type:
raise ProtocolError("invalid type {} for 'message' detail in ABORT".format(type(details_message)))
message = details_message
obj = Abort(reason, message)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {}
if self.message:
details[u'message'] = self.message
return [Abort.MESSAGE_TYPE, details, self.reason]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP ABORT Message (message = {}, reason = {})".format(self.message, self.reason)
class Challenge(Message):
"""
A WAMP `CHALLENGE` message.
Format: `[CHALLENGE, Method|string, Extra|dict]`
"""
MESSAGE_TYPE = 4
"""
The WAMP message code for this type of message.
"""
def __init__(self, method, extra = {}):
"""
Message constructor.
:param method: The authentication method.
:type method: str
:param extra: Authentication method specific information.
:type extra: dict
"""
assert(type(method) == six.text_type)
assert(type(extra) == dict)
Message.__init__(self)
self.method = method
self.extra = extra
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Challenge.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for CHALLENGE".format(len(wmsg)))
method = wmsg[1]
if type(method) != str:
raise ProtocolError("invalid type {} for 'method' in CHALLENGE".format(type(method)))
extra = check_or_raise_extra(wmsg[2], "'extra' in CHALLENGE")
obj = Challenge(method, extra)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Challenge.MESSAGE_TYPE, self.method, self.extra]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP CHALLENGE Message (method = {}, extra = {})".format(self.method, self.extra)
class Authenticate(Message):
"""
A WAMP `AUTHENTICATE` message.
Format: `[AUTHENTICATE, Signature|string, Extra|dict]`
"""
MESSAGE_TYPE = 5
"""
The WAMP message code for this type of message.
"""
def __init__(self, signature):
"""
Message constructor.
:param signature: The signature for the authentication challenge.
:type signature: str
"""
assert(type(signature) == six.text_type)
Message.__init__(self)
self.signature = signature
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Authenticate.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for AUTHENTICATE".format(len(wmsg)))
signature = wmsg[1]
if type(signature) != six.text_type:
raise ProtocolError("invalid type {} for 'signature' in AUTHENTICATE".format(type(signature)))
extra = check_or_raise_extra(wmsg[2], "'extra' in AUTHENTICATE")
obj = Authenticate(signature)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
extra = {}
return [Authenticate.MESSAGE_TYPE, self.signature, extra]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP AUTHENTICATE Message (signature = {})".format(self.signature)
class Goodbye(Message):
"""
A WAMP `GOODBYE` message.
Format: `[GOODBYE, Details|dict, Reason|uri]`
"""
MESSAGE_TYPE = 6
"""
The WAMP message code for this type of message.
"""
DEFAULT_REASON = u"wamp.goodbye.normal"
"""
Default WAMP closing reason.
"""
def __init__(self, reason = DEFAULT_REASON, message = None):
"""
Message constructor.
:param reason: Optional WAMP or application error URI for closing reason.
:type reason: str
:param message: Optional human-readable closing message, e.g. for logging purposes.
:type message: str
"""
assert(type(reason) == six.text_type)
assert(message is None or type(message) == six.text_type)
Message.__init__(self)
self.reason = reason
self.message = message
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Goodbye.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for GOODBYE".format(len(wmsg)))
details = check_or_raise_extra(wmsg[1], "'details' in GOODBYE")
reason = check_or_raise_uri(wmsg[2], "'reason' in GOODBYE")
message = None
if u'message' in details:
details_message = details[u'message']
if type(details_message) != six.text_type:
raise ProtocolError("invalid type {} for 'message' detail in GOODBYE".format(type(details_message)))
message = details_message
obj = Goodbye(reason, message)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {}
if self.message:
details[u'message'] = self.message
return [Goodbye.MESSAGE_TYPE, details, self.reason]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP GOODBYE Message (message = {}, reason = {})".format(self.message, self.reason)
class Heartbeat(Message):
"""
A WAMP `HEARTBEAT` message.
Formats:
* `[HEARTBEAT, Incoming|integer, Outgoing|integer]`
* `[HEARTBEAT, Incoming|integer, Outgoing|integer, Discard|string]`
"""
MESSAGE_TYPE = 7
"""
The WAMP message code for this type of message.
"""
def __init__(self, incoming, outgoing, discard = None):
"""
Message constructor.
:param incoming: Last incoming heartbeat processed from peer.
:type incoming: int
:param outgoing: Outgoing heartbeat.
:type outgoing: int
:param discard: Optional data that is discared by peer.
:type discard: str
"""
assert(type(incoming) in six.integer_types)
assert(type(outgoing) in six.integer_types)
assert(discard is None or type(discard) == six.text_type)
Message.__init__(self)
self.incoming = incoming
self.outgoing = outgoing
self.discard = discard
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Heartbeat.MESSAGE_TYPE)
if len(wmsg) not in [3, 4]:
raise ProtocolError("invalid message length {} for HEARTBEAT".format(len(wmsg)))
incoming = wmsg[1]
if type(incoming) not in six.integer_types:
raise ProtocolError("invalid type {} for 'incoming' in HEARTBEAT".format(type(incoming)))
if incoming < 0: # must be non-negative
raise ProtocolError("invalid value {} for 'incoming' in HEARTBEAT".format(incoming))
outgoing = wmsg[2]
if type(outgoing) not in six.integer_types:
raise ProtocolError("invalid type {} for 'outgoing' in HEARTBEAT".format(type(outgoing)))
if outgoing <= 0: # must be positive
raise ProtocolError("invalid value {} for 'outgoing' in HEARTBEAT".format(outgoing))
discard = None
if len(wmsg) > 3:
discard = wmsg[3]
if type(discard) != six.text_type:
raise ProtocolError("invalid type {} for 'discard' in HEARTBEAT".format(type(discard)))
obj = Heartbeat(incoming, outgoing, discard = discard)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
if self.discard:
return [Heartbeat.MESSAGE_TYPE, self.incoming, self.outgoing, self.discard]
else:
return [Heartbeat.MESSAGE_TYPE, self.incoming, self.outgoing]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP HEARTBEAT Message (incoming {}, outgoing = {}, len(discard) = {})".format(self.incoming, self.outgoing, len(self.discard) if self.discard else None)
class Error(Message):
"""
A WAMP `ERROR` message.
Formats:
* `[ERROR, REQUEST.Type|int, REQUEST.Request|id, Details|dict, Error|uri]`
* `[ERROR, REQUEST.Type|int, REQUEST.Request|id, Details|dict, Error|uri, Arguments|list]`
* `[ERROR, REQUEST.Type|int, REQUEST.Request|id, Details|dict, Error|uri, Arguments|list, ArgumentsKw|dict]`
"""
MESSAGE_TYPE = 8
"""
The WAMP message code for this type of message.
"""
def __init__(self, request_type, request, error, args = None, kwargs = None):
"""
Message constructor.
:param request_type: The WAMP message type code for the original request.
:type request_type: int
:param request: The WAMP request ID of the original request (`Call`, `Subscribe`, ...) this error occured for.
:type request: int
:param error: The WAMP or application error URI for the error that occured.
:type error: str
:param args: Positional values for application-defined exception.
Must be serializable using any serializers in use.
:type args: list
:param kwargs: Keyword values for application-defined exception.
Must be serializable using any serializers in use.
:type kwargs: dict
"""
assert(type(request_type) in six.integer_types)
assert(type(request) in six.integer_types)
assert(type(error) == six.text_type)
assert(args is None or type(args) in [list, tuple])
assert(kwargs is None or type(kwargs) == dict)
Message.__init__(self)
self.request_type = request_type
self.request = request
self.error = error
self.args = args
self.kwargs = kwargs
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Error.MESSAGE_TYPE)
if len(wmsg) not in (5, 6, 7):
raise ProtocolError("invalid message length {} for ERROR".format(len(wmsg)))
request_type = wmsg[1]
if type(request_type) not in six.integer_types:
raise ProtocolError("invalid type {} for 'request_type' in ERROR".format(request_type))
if request_type not in [Subscribe.MESSAGE_TYPE,
Unsubscribe.MESSAGE_TYPE,
Publish.MESSAGE_TYPE,
Register.MESSAGE_TYPE,
Unregister.MESSAGE_TYPE,
Call.MESSAGE_TYPE,
Invocation.MESSAGE_TYPE]:
raise ProtocolError("invalid value {} for 'request_type' in ERROR".format(request_type))
request = check_or_raise_id(wmsg[2], "'request' in ERROR")
details = check_or_raise_extra(wmsg[3], "'details' in ERROR")
error = check_or_raise_uri(wmsg[4], "'error' in ERROR")
args = None
if len(wmsg) > 5:
args = wmsg[5]
if type(args) != list:
raise ProtocolError("invalid type {} for 'args' in ERROR".format(type(args)))
kwargs = None
if len(wmsg) > 6:
kwargs = wmsg[6]
if type(kwargs) != dict:
raise ProtocolError("invalid type {} for 'kwargs' in ERROR".format(type(kwargs)))
obj = Error(request_type, request, error, args = args, kwargs = kwargs)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {}
if self.kwargs:
return [self.MESSAGE_TYPE, self.request_type, self.request, details, self.error, self.args, self.kwargs]
elif self.args:
return [self.MESSAGE_TYPE, self.request_type, self.request, details, self.error, self.args]
else:
return [self.MESSAGE_TYPE, self.request_type, self.request, details, self.error]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP Error Message (request_type = {}, request = {}, error = {}, args = {}, kwargs = {})".format(self.request_type, self.request, self.error, self.args, self.kwargs)
class Publish(Message):
"""
A WAMP `PUBLISH` message.
Formats:
* `[PUBLISH, Request|id, Options|dict, Topic|uri]`
* `[PUBLISH, Request|id, Options|dict, Topic|uri, Arguments|list]`
* `[PUBLISH, Request|id, Options|dict, Topic|uri, Arguments|list, ArgumentsKw|dict]`
"""
MESSAGE_TYPE = 16
"""
The WAMP message code for this type of message.
"""
def __init__(self,
request,
topic,
args = None,
kwargs = None,
acknowledge = None,
excludeMe = None,
exclude = None,
eligible = None,
discloseMe = None):
"""
Message constructor.
:param request: The WAMP request ID of this request.
:type request: int
:param topic: The WAMP or application URI of the PubSub topic the event should
be published to.
:type topic: str
:param args: Positional values for application-defined event payload.
Must be serializable using any serializers in use.
:type args: list
:param kwargs: Keyword values for application-defined event payload.
Must be serializable using any serializers in use.
:type kwargs: dict
:param acknowledge: If True, acknowledge the publication with a success or
error response.
:type acknowledge: bool
:param excludeMe: If True, exclude the publisher from receiving the event, even
if he is subscribed (and eligible).
:type excludeMe: bool
:param exclude: List of WAMP session IDs to exclude from receiving this event.
:type exclude: list
:param eligible: List of WAMP session IDs eligible to receive this event.
:type eligible: list
:param discloseMe: If True, request to disclose the publisher of this event
to subscribers.
:type discloseMe: bool
"""
assert(type(request) in six.integer_types)
assert(type(topic) == six.text_type)
assert(args is None or type(args) in [list, tuple])
assert(kwargs is None or type(kwargs) == dict)
assert(acknowledge is None or type(acknowledge) == bool)
assert(excludeMe is None or type(excludeMe) == bool)
assert(exclude is None or type(exclude) == list)
assert(eligible is None or type(eligible) == list)
assert(discloseMe is None or type(discloseMe) == bool)
Message.__init__(self)
self.request = request
self.topic = topic
self.args = args
self.kwargs = kwargs
self.acknowledge = acknowledge
self.excludeMe = excludeMe
self.exclude = exclude
self.eligible = eligible
self.discloseMe = discloseMe
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Publish.MESSAGE_TYPE)
if len(wmsg) not in (4, 5, 6):
raise ProtocolError("invalid message length {} for PUBLISH".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in PUBLISH")
options = check_or_raise_extra(wmsg[2], "'options' in PUBLISH")
topic = check_or_raise_uri(wmsg[3], "'topic' in PUBLISH")
args = None
if len(wmsg) > 4:
args = wmsg[4]
if type(args) != list:
raise ProtocolError("invalid type {} for 'args' in PUBLISH".format(type(args)))
kwargs = None
if len(wmsg) > 5:
kwargs = wmsg[5]
if type(kwargs) != dict:
raise ProtocolError("invalid type {} for 'kwargs' in PUBLISH".format(type(kwargs)))
acknowledge = None
excludeMe = None
exclude = None
eligible = None
discloseMe = None
if u'acknowledge' in options:
option_acknowledge = options[u'acknowledge']
if type(option_acknowledge) != bool:
raise ProtocolError("invalid type {} for 'acknowledge' option in PUBLISH".format(type(option_acknowledge)))
acknowledge = option_acknowledge
if u'exclude_me' in options:
option_excludeMe = options[u'exclude_me']
if type(option_excludeMe) != bool:
raise ProtocolError("invalid type {} for 'exclude_me' option in PUBLISH".format(type(option_excludeMe)))
excludeMe = option_excludeMe
if u'exclude' in options:
option_exclude = options[u'exclude']
if type(option_exclude) != list:
raise ProtocolError("invalid type {} for 'exclude' option in PUBLISH".format(type(option_exclude)))
for sessionId in option_exclude:
if type(sessionId) not in six.integer_types:
raise ProtocolError("invalid type {} for value in 'exclude' option in PUBLISH".format(type(sessionId)))
exclude = option_exclude
if u'eligible' in options:
option_eligible = options[u'eligible']
if type(option_eligible) != list:
raise ProtocolError("invalid type {} for 'eligible' option in PUBLISH".format(type(option_eligible)))
for sessionId in option_eligible:
if type(sessionId) not in six.integer_types:
raise ProtocolError("invalid type {} for value in 'eligible' option in PUBLISH".format(type(sessionId)))
eligible = option_eligible
if u'disclose_me' in options:
option_discloseMe = options[u'disclose_me']
if type(option_discloseMe) != bool:
raise ProtocolError("invalid type {} for 'disclose_me' option in PUBLISH".format(type(option_discloseMe)))
discloseMe = option_discloseMe
obj = Publish(request,
topic,
args = args,
kwargs = kwargs,
acknowledge = acknowledge,
excludeMe = excludeMe,
exclude = exclude,
eligible = eligible,
discloseMe = discloseMe)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.acknowledge is not None:
options[u'acknowledge'] = self.acknowledge
if self.excludeMe is not None:
options[u'exclude_me'] = self.excludeMe
if self.exclude is not None:
options[u'exclude'] = self.exclude
if self.eligible is not None:
options[u'eligible'] = self.eligible
if self.discloseMe is not None:
options[u'disclose_me'] = self.discloseMe
if self.kwargs:
return [Publish.MESSAGE_TYPE, self.request, options, self.topic, self.args, self.kwargs]
elif self.args:
return [Publish.MESSAGE_TYPE, self.request, options, self.topic, self.args]
else:
return [Publish.MESSAGE_TYPE, self.request, options, self.topic]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP PUBLISH Message (request = {}, topic = {}, args = {}, kwargs = {}, acknowledge = {}, excludeMe = {}, exclude = {}, eligible = {}, discloseMe = {})".format(self.request, self.topic, self.args, self.kwargs, self.acknowledge, self.excludeMe, self.exclude, self.eligible, self.discloseMe)
class Published(Message):
"""
A WAMP `PUBLISHED` message.
Format: `[PUBLISHED, PUBLISH.Request|id, Publication|id]`
"""
MESSAGE_TYPE = 17
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, publication):
"""
Message constructor.
:param request: The request ID of the original `PUBLISH` request.
:type request: int
:param publication: The publication ID for the published event.
:type publication: int
"""
assert(type(request) in six.integer_types)
assert(type(publication) in six.integer_types)
Message.__init__(self)
self.request = request
self.publication = publication
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Published.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for PUBLISHED".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in PUBLISHED")
publication = check_or_raise_id(wmsg[2], "'publication' in PUBLISHED")
obj = Published(request, publication)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Published.MESSAGE_TYPE, self.request, self.publication]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP PUBLISHED Message (request = {}, publication = {})".format(self.request, self.publication)
class Subscribe(Message):
"""
A WAMP `SUBSCRIBE` message.
Format: `[SUBSCRIBE, Request|id, Options|dict, Topic|uri]`
"""
MESSAGE_TYPE = 32
"""
The WAMP message code for this type of message.
"""
MATCH_EXACT = u'exact'
MATCH_PREFIX = u'prefix'
MATCH_WILDCARD = u'wildcard'
def __init__(self, request, topic, match = MATCH_EXACT):
"""
Message constructor.
:param request: The WAMP request ID of this request.
:type request: int
:param topic: The WAMP or application URI of the PubSub topic to subscribe to.
:type topic: str
:param match: The topic matching method to be used for the subscription.
:type match: str
"""
assert(type(request) in six.integer_types)
assert(type(topic) == six.text_type)
assert(match is None or type(match) == six.text_type)
assert(match is None or match in [self.MATCH_EXACT, self.MATCH_PREFIX, self.MATCH_WILDCARD])
Message.__init__(self)
self.request = request
self.topic = topic
self.match = match
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Subscribe.MESSAGE_TYPE)
if len(wmsg) != 4:
raise ProtocolError("invalid message length {} for SUBSCRIBE".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in SUBSCRIBE")
options = check_or_raise_extra(wmsg[2], "'options' in SUBSCRIBE")
topic = check_or_raise_uri(wmsg[3], "'topic' in SUBSCRIBE")
match = Subscribe.MATCH_EXACT
if u'match' in options:
option_match = options[u'match']
if type(option_match) != six.text_type:
raise ProtocolError("invalid type {} for 'match' option in SUBSCRIBE".format(type(option_match)))
if option_match not in [Subscribe.MATCH_EXACT, Subscribe.MATCH_PREFIX, Subscribe.MATCH_WILDCARD]:
raise ProtocolError("invalid value {} for 'match' option in SUBSCRIBE".format(option_match))
match = option_match
obj = Subscribe(request, topic, match)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.match and self.match != Subscribe.MATCH_EXACT:
options[u'match'] = self.match
return [Subscribe.MESSAGE_TYPE, self.request, options, self.topic]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP SUBSCRIBE Message (request = {}, topic = {}, match = {})".format(self.request, self.topic, self.match)
class Subscribed(Message):
"""
A WAMP `SUBSCRIBED` message.
Format: `[SUBSCRIBED, SUBSCRIBE.Request|id, Subscription|id]`
"""
MESSAGE_TYPE = 33
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, subscription):
"""
Message constructor.
:param request: The request ID of the original `SUBSCRIBE` request.
:type request: int
:param subscription: The subscription ID for the subscribed topic (or topic pattern).
:type subscription: int
"""
assert(type(request) in six.integer_types)
assert(type(subscription) in six.integer_types)
Message.__init__(self)
self.request = request
self.subscription = subscription
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Subscribed.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for SUBSCRIBED".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in SUBSCRIBED")
subscription = check_or_raise_id(wmsg[2], "'subscription' in SUBSCRIBED")
obj = Subscribed(request, subscription)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Subscribed.MESSAGE_TYPE, self.request, self.subscription]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP SUBSCRIBED Message (request = {}, subscription = {})".format(self.request, self.subscription)
class Unsubscribe(Message):
"""
A WAMP `UNSUBSCRIBE` message.
Format: `[UNSUBSCRIBE, Request|id, SUBSCRIBED.Subscription|id]`
"""
MESSAGE_TYPE = 34
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, subscription):
"""
Message constructor.
:param request: The WAMP request ID of this request.
:type request: int
:param subscription: The subscription ID for the subscription to unsubscribe from.
:type subscription: int
"""
assert(type(request) in six.integer_types)
assert(type(subscription) in six.integer_types)
Message.__init__(self)
self.request = request
self.subscription = subscription
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Unsubscribe.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for WAMP UNSUBSCRIBE".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in UNSUBSCRIBE")
subscription = check_or_raise_id(wmsg[2], "'subscription' in UNSUBSCRIBE")
obj = Unsubscribe(request, subscription)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Unsubscribe.MESSAGE_TYPE, self.request, self.subscription]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP UNSUBSCRIBE Message (request = {}, subscription = {})".format(self.request, self.subscription)
class Unsubscribed(Message):
"""
A WAMP `UNSUBSCRIBED` message.
Format: `[UNSUBSCRIBED, UNSUBSCRIBE.Request|id]`
"""
MESSAGE_TYPE = 35
"""
The WAMP message code for this type of message.
"""
def __init__(self, request):
"""
Message constructor.
:param request: The request ID of the original `UNSUBSCRIBE` request.
:type request: int
"""
assert(type(request) in six.integer_types)
Message.__init__(self)
self.request = request
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Unsubscribed.MESSAGE_TYPE)
if len(wmsg) != 2:
raise ProtocolError("invalid message length {} for UNSUBSCRIBED".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in UNSUBSCRIBED")
obj = Unsubscribed(request)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Unsubscribed.MESSAGE_TYPE, self.request]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP UNSUBSCRIBED Message (request = {})".format(self.request)
class Event(Message):
"""
A WAMP `EVENT` message.
Formats:
* `[EVENT, SUBSCRIBED.Subscription|id, PUBLISHED.Publication|id, Details|dict]`
* `[EVENT, SUBSCRIBED.Subscription|id, PUBLISHED.Publication|id, Details|dict, PUBLISH.Arguments|list]`
* `[EVENT, SUBSCRIBED.Subscription|id, PUBLISHED.Publication|id, Details|dict, PUBLISH.Arguments|list, PUBLISH.ArgumentsKw|dict]`
"""
MESSAGE_TYPE = 36
"""
The WAMP message code for this type of message.
"""
def __init__(self, subscription, publication, args = None, kwargs = None, publisher = None):
"""
Message constructor.
:param subscription: The subscription ID this event is dispatched under.
:type subscription: int
:param publication: The publication ID of the dispatched event.
:type publication: int
:param args: Positional values for application-defined exception.
Must be serializable using any serializers in use.
:type args: list
:param kwargs: Keyword values for application-defined exception.
Must be serializable using any serializers in use.
:type kwargs: dict
:param publisher: If present, the WAMP session ID of the publisher of this event.
:type publisher: str
"""
assert(type(subscription) in six.integer_types)
assert(type(publication) in six.integer_types)
assert(args is None or type(args) in [list, tuple])
assert(kwargs is None or type(kwargs) == dict)
assert(publisher is None or type(publisher) in six.integer_types)
Message.__init__(self)
self.subscription = subscription
self.publication = publication
self.args = args
self.kwargs = kwargs
self.publisher = publisher
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Event.MESSAGE_TYPE)
if len(wmsg) not in (4, 5, 6):
raise ProtocolError("invalid message length {} for EVENT".format(len(wmsg)))
subscription = check_or_raise_id(wmsg[1], "'subscription' in EVENT")
publication = check_or_raise_id(wmsg[2], "'publication' in EVENT")
details = check_or_raise_extra(wmsg[3], "'details' in EVENT")
args = None
if len(wmsg) > 4:
args = wmsg[4]
if type(args) != list:
raise ProtocolError("invalid type {} for 'args' in EVENT".format(type(args)))
kwargs = None
if len(wmsg) > 5:
kwargs = wmsg[5]
if type(kwargs) != dict:
raise ProtocolError("invalid type {} for 'kwargs' in EVENT".format(type(kwargs)))
publisher = None
if u'publisher' in details:
detail_publisher = details[u'publisher']
if type(detail_publisher) not in six.integer_types:
raise ProtocolError("invalid type {} for 'publisher' detail in EVENT".format(type(detail_publisher)))
publisher = detail_publisher
obj = Event(subscription,
publication,
args = args,
kwargs = kwargs,
publisher = publisher)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {}
if self.publisher is not None:
details[u'publisher'] = self.publisher
if self.kwargs:
return [Event.MESSAGE_TYPE, self.subscription, self.publication, details, self.args, self.kwargs]
elif self.args:
return [Event.MESSAGE_TYPE, self.subscription, self.publication, details, self.args]
else:
return [Event.MESSAGE_TYPE, self.subscription, self.publication, details]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP EVENT Message (subscription = {}, publication = {}, args = {}, kwargs = {}, publisher = {})".format(self.subscription, self.publication, self.args, self.kwargs, self.publisher)
class Call(Message):
"""
A WAMP `CALL` message.
Formats:
* `[CALL, Request|id, Options|dict, Procedure|uri]`
* `[CALL, Request|id, Options|dict, Procedure|uri, Arguments|list]`
* `[CALL, Request|id, Options|dict, Procedure|uri, Arguments|list, ArgumentsKw|dict]`
"""
MESSAGE_TYPE = 48
"""
The WAMP message code for this type of message.
"""
def __init__(self,
request,
procedure,
args = None,
kwargs = None,
timeout = None,
receive_progress = None,
discloseMe = None):
"""
Message constructor.
:param request: The WAMP request ID of this request.
:type request: int
:param procedure: The WAMP or application URI of the procedure which should be called.
:type procedure: str
:param args: Positional values for application-defined call arguments.
Must be serializable using any serializers in use.
:type args: list
:param kwargs: Keyword values for application-defined call arguments.
Must be serializable using any serializers in use.
:param timeout: If present, let the callee automatically cancel
the call after this ms.
:type timeout: int
"""
assert(type(request) in six.integer_types)
assert(type(procedure) == six.text_type)
assert(args is None or type(args) in [list, tuple])
assert(kwargs is None or type(kwargs) == dict)
assert(timeout is None or type(timeout) in six.integer_types)
assert(receive_progress is None or type(receive_progress) == bool)
assert(discloseMe is None or type(discloseMe) == bool)
Message.__init__(self)
self.request = request
self.procedure = procedure
self.args = args
self.kwargs = kwargs
self.timeout = timeout
self.receive_progress = receive_progress
self.discloseMe = discloseMe
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Call.MESSAGE_TYPE)
if len(wmsg) not in (4, 5, 6):
raise ProtocolError("invalid message length {} for CALL".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in CALL")
options = check_or_raise_extra(wmsg[2], "'options' in CALL")
procedure = check_or_raise_uri(wmsg[3], "'procedure' in CALL")
args = None
if len(wmsg) > 4:
args = wmsg[4]
if type(args) != list:
raise ProtocolError("invalid type {} for 'args' in CALL".format(type(args)))
kwargs = None
if len(wmsg) > 5:
kwargs = wmsg[5]
if type(kwargs) != dict:
raise ProtocolError("invalid type {} for 'kwargs' in CALL".format(type(kwargs)))
timeout = None
if u'timeout' in options:
option_timeout = options[u'timeout']
if type(option_timeout) not in six.integer_types:
raise ProtocolError("invalid type {} for 'timeout' option in CALL".format(type(option_timeout)))
if option_timeout < 0:
raise ProtocolError("invalid value {} for 'timeout' option in CALL".format(option_timeout))
timeout = option_timeout
receive_progress = None
if u'receive_progress' in options:
option_receive_progress = options[u'receive_progress']
if type(option_receive_progress) != bool:
raise ProtocolError("invalid type {} for 'receive_progress' option in CALL".format(type(option_receive_progress)))
receive_progress = option_receive_progress
discloseMe = None
if u'disclose_me' in options:
option_discloseMe = options[u'disclose_me']
if type(option_discloseMe) != bool:
raise ProtocolError("invalid type {} for 'disclose_me' option in CALL".format(type(option_discloseMe)))
discloseMe = option_discloseMe
obj = Call(request,
procedure,
args = args,
kwargs = kwargs,
timeout = timeout,
receive_progress = receive_progress,
discloseMe = discloseMe)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.timeout is not None:
options[u'timeout'] = self.timeout
if self.receive_progress is not None:
options[u'receive_progress'] = self.receive_progress
if self.discloseMe is not None:
options[u'disclose_me'] = self.discloseMe
if self.kwargs:
return [Call.MESSAGE_TYPE, self.request, options, self.procedure, self.args, self.kwargs]
elif self.args:
return [Call.MESSAGE_TYPE, self.request, options, self.procedure, self.args]
else:
return [Call.MESSAGE_TYPE, self.request, options, self.procedure]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP CALL Message (request = {}, procedure = {}, args = {}, kwargs = {}, timeout = {}, receive_progress = {}, discloseMe = {})".format(self.request, self.procedure, self.args, self.kwargs, self.timeout, self.receive_progress, self.discloseMe)
class Cancel(Message):
"""
A WAMP `CANCEL` message.
Format: `[CANCEL, CALL.Request|id, Options|dict]`
"""
MESSAGE_TYPE = 49
"""
The WAMP message code for this type of message.
"""
SKIP = u'skip'
ABORT = u'abort'
KILL = u'kill'
def __init__(self, request, mode = None):
"""
Message constructor.
:param request: The WAMP request ID of the original `CALL` to cancel.
:type request: int
:param mode: Specifies how to cancel the call (`"skip"`, `"abort"` or `"kill"`).
:type mode: str
"""
assert(type(request) in six.integer_types)
assert(mode is None or type(mode) == six.text_type)
assert(mode is None or mode in [self.SKIP, self.ABORT, self.KILL])
Message.__init__(self)
self.request = request
self.mode = mode
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Cancel.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for CANCEL".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in CANCEL")
options = check_or_raise_extra(wmsg[2], "'options' in CANCEL")
## options
##
mode = None
if u'mode' in options:
option_mode = options[u'mode']
if type(option_mode) != six.text_type:
raise ProtocolError("invalid type {} for 'mode' option in CANCEL".format(type(option_mode)))
if option_mode not in [Cancel.SKIP, Cancel.ABORT, Cancel.KILL]:
raise ProtocolError("invalid value '{}' for 'mode' option in CANCEL".format(option_mode))
mode = option_mode
obj = Cancel(request, mode = mode)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.mode is not None:
options[u'mode'] = self.mode
return [Cancel.MESSAGE_TYPE, self.request, options]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP CANCEL Message (request = {}, mode = '{}'')".format(self.request, self.mode)
class Result(Message):
"""
A WAMP `RESULT` message.
Formats:
* `[RESULT, CALL.Request|id, Details|dict]`
* `[RESULT, CALL.Request|id, Details|dict, YIELD.Arguments|list]`
* `[RESULT, CALL.Request|id, Details|dict, YIELD.Arguments|list, YIELD.ArgumentsKw|dict]`
"""
MESSAGE_TYPE = 50
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, args = None, kwargs = None, progress = None):
"""
Message constructor.
:param request: The request ID of the original `CALL` request.
:type request: int
:param args: Positional values for application-defined event payload.
Must be serializable using any serializers in use.
:type args: list
:param kwargs: Keyword values for application-defined event payload.
Must be serializable using any serializers in use.
:type kwargs: dict
:param progress: If `True`, this result is a progressive call result, and subsequent
results (or a final error) will follow.
:type progress: bool
"""
assert(type(request) in six.integer_types)
assert(args is None or type(args) in [list, tuple])
assert(kwargs is None or type(kwargs) == dict)
assert(progress is None or type(progress) == bool)
Message.__init__(self)
self.request = request
self.args = args
self.kwargs = kwargs
self.progress = progress
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Result.MESSAGE_TYPE)
if len(wmsg) not in (3, 4, 5):
raise ProtocolError("invalid message length {} for RESULT".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in RESULT")
details = check_or_raise_extra(wmsg[2], "'details' in RESULT")
args = None
if len(wmsg) > 3:
args = wmsg[3]
if type(args) != list:
raise ProtocolError("invalid type {} for 'args' in RESULT".format(type(args)))
kwargs = None
if len(wmsg) > 4:
kwargs = wmsg[4]
if type(kwargs) != dict:
raise ProtocolError("invalid type {} for 'kwargs' in RESULT".format(type(kwargs)))
progress = None
if u'progress' in details:
detail_progress = details[u'progress']
if type(detail_progress) != bool:
raise ProtocolError("invalid type {} for 'progress' option in RESULT".format(type(detail_progress)))
progress = detail_progress
obj = Result(request, args = args, kwargs = kwargs, progress = progress)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {}
if self.progress is not None:
details[u'progress'] = self.progress
if self.kwargs:
return [Result.MESSAGE_TYPE, self.request, details, self.args, self.kwargs]
elif self.args:
return [Result.MESSAGE_TYPE, self.request, details, self.args]
else:
return [Result.MESSAGE_TYPE, self.request, details]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP RESULT Message (request = {}, args = {}, kwargs = {}, progress = {})".format(self.request, self.args, self.kwargs, self.progress)
class Register(Message):
"""
A WAMP `REGISTER` message.
Format: `[REGISTER, Request|id, Options|dict, Procedure|uri]`
"""
MESSAGE_TYPE = 64
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, procedure, pkeys = None, discloseCaller = None):
"""
Message constructor.
:param request: The WAMP request ID of this request.
:type request: int
:param procedure: The WAMP or application URI of the RPC endpoint provided.
:type procedure: str
:param pkeys: The endpoint can work for this list of application partition keys.
:type pkeys: list
"""
assert(type(request) in six.integer_types)
assert(type(procedure) == six.text_type)
assert(pkeys is None or type(pkeys) == list)
if pkeys:
for k in pkeys:
assert(type(k) in six.integer_types)
assert(discloseCaller is None or type(discloseCaller) == bool)
Message.__init__(self)
self.request = request
self.procedure = procedure
self.pkeys = pkeys
self.discloseCaller = discloseCaller
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Register.MESSAGE_TYPE)
if len(wmsg) != 4:
raise ProtocolError("invalid message length {} for REGISTER".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in REGISTER")
options = check_or_raise_extra(wmsg[2], "'options' in REGISTER")
procedure = check_or_raise_uri(wmsg[3], "'procedure' in REGISTER")
pkeys = None
discloseCaller = None
if u'pkeys' in options:
option_pkeys = options[u'pkeys']
if type(option_pkeys) != list:
raise ProtocolError("invalid type {} for 'pkeys' option in REGISTER".format(type(option_pkeys)))
for pk in option_pkeys:
if type(pk) not in six.integer_types:
raise ProtocolError("invalid type for value '{}' in 'pkeys' option in REGISTER".format(type(pk)))
pkeys = option_pkeys
if u'disclose_caller' in options:
option_discloseCaller = options[u'disclose_caller']
if type(option_discloseCaller) != bool:
raise ProtocolError("invalid type {} for 'disclose_caller' option in REGISTER".format(type(option_discloseCaller)))
discloseCaller = option_discloseCaller
obj = Register(request, procedure, pkeys = pkeys, discloseCaller = discloseCaller)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.pkeys is not None:
options[u'pkeys'] = self.pkeys
if self.discloseCaller is not None:
options[u'disclose_caller'] = self.discloseCaller
return [Register.MESSAGE_TYPE, self.request, options, self.procedure]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP REGISTER Message (request = {}, procedure = {}, pkeys = {}, discloseCaller = {})".format(self.request, self.procedure, self.pkeys, self.discloseCaller)
class Registered(Message):
"""
A WAMP `REGISTERED` message.
Format: `[REGISTERED, REGISTER.Request|id, Registration|id]`
"""
MESSAGE_TYPE = 65
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, registration):
"""
Message constructor.
:param request: The request ID of the original `REGISTER` request.
:type request: int
:param registration: The registration ID for the registered procedure (or procedure pattern).
:type registration: int
"""
assert(type(request) in six.integer_types)
assert(type(registration) in six.integer_types)
Message.__init__(self)
self.request = request
self.registration = registration
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Registered.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for REGISTERED".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in REGISTERED")
registration = check_or_raise_id(wmsg[2], "'registration' in REGISTERED")
obj = Registered(request, registration)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Registered.MESSAGE_TYPE, self.request, self.registration]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP REGISTERED Message (request = {}, registration = {})".format(self.request, self.registration)
class Unregister(Message):
"""
A WAMP Unprovide message.
Format: `[UNREGISTER, Request|id, REGISTERED.Registration|id]`
"""
MESSAGE_TYPE = 66
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, registration):
"""
Message constructor.
:param request: The WAMP request ID of this request.
:type request: int
:param registration: The registration ID for the registration to unregister.
:type registration: int
"""
assert(type(request) in six.integer_types)
assert(type(registration) in six.integer_types)
Message.__init__(self)
self.request = request
self.registration = registration
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Unregister.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for WAMP UNREGISTER".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in UNREGISTER")
registration = check_or_raise_id(wmsg[2], "'registration' in UNREGISTER")
obj = Unregister(request, registration)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Unregister.MESSAGE_TYPE, self.request, self.registration]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP UNREGISTER Message (request = {}, registration = {})".format(self.request, self.registration)
class Unregistered(Message):
"""
A WAMP `UNREGISTERED` message.
Format: `[UNREGISTERED, UNREGISTER.Request|id]`
"""
MESSAGE_TYPE = 67
"""
The WAMP message code for this type of message.
"""
def __init__(self, request):
"""
Message constructor.
:param request: The request ID of the original `UNREGISTER` request.
:type request: int
"""
assert(type(request) in six.integer_types)
Message.__init__(self)
self.request = request
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Unregistered.MESSAGE_TYPE)
if len(wmsg) != 2:
raise ProtocolError("invalid message length {} for UNREGISTER".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in UNREGISTER")
obj = Unregistered(request)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
return [Unregistered.MESSAGE_TYPE, self.request]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP UNREGISTER Message (request = {})".format(self.request)
class Invocation(Message):
"""
A WAMP `INVOCATION` message.
Formats:
* `[INVOCATION, Request|id, REGISTERED.Registration|id, Details|dict]`
* `[INVOCATION, Request|id, REGISTERED.Registration|id, Details|dict, CALL.Arguments|list]`
* `[INVOCATION, Request|id, REGISTERED.Registration|id, Details|dict, CALL.Arguments|list, CALL.ArgumentsKw|dict]`
"""
MESSAGE_TYPE = 68
"""
The WAMP message code for this type of message.
"""
def __init__(self,
request,
registration,
args = None,
kwargs = None,
timeout = None,
receive_progress = None,
caller = None,
authid = None,
authrole = None,
authmethod = None):
"""
Message constructor.
:param request: The WAMP request ID of this request.
:type request: int
:param registration: The registration ID of the endpoint to be invoked.
:type registration: int
:param args: Positional values for application-defined event payload.
Must be serializable using any serializers in use.
:type args: list
:param kwargs: Keyword values for application-defined event payload.
Must be serializable using any serializers in use.
:type kwargs: dict
:param timeout: If present, let the callee automatically cancels
the invocation after this ms.
:type timeout: int
"""
assert(type(request) in six.integer_types)
assert(type(registration) in six.integer_types)
assert(args is None or type(args) in [list, tuple])
assert(kwargs is None or type(kwargs) == dict)
assert(timeout is None or type(timeout) in six.integer_types)
assert(receive_progress is None or type(receive_progress) == bool)
assert(caller is None or type(caller) in six.integer_types)
assert(authid is None or type(authid) == six.text_type)
assert(authrole is None or type(authrole) == six.text_type)
assert(authmethod is None or type(authmethod) == six.text_type)
Message.__init__(self)
self.request = request
self.registration = registration
self.args = args
self.kwargs = kwargs
self.timeout = timeout
self.receive_progress = receive_progress
self.caller = caller
self.authid = authid
self.authrole = authrole
self.authmethod = authmethod
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Invocation.MESSAGE_TYPE)
if len(wmsg) not in (4, 5, 6):
raise ProtocolError("invalid message length {} for INVOCATION".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in INVOCATION")
registration = check_or_raise_id(wmsg[2], "'registration' in INVOCATION")
details = check_or_raise_extra(wmsg[3], "'details' in INVOCATION")
args = None
if len(wmsg) > 4:
args = wmsg[4]
if type(args) != list:
raise ProtocolError("invalid type {} for 'args' in INVOCATION".format(type(args)))
kwargs = None
if len(wmsg) > 5:
kwargs = wmsg[5]
if type(kwargs) != dict:
raise ProtocolError("invalid type {} for 'kwargs' in INVOCATION".format(type(kwargs)))
timeout = None
if u'timeout' in details:
detail_timeout = details[u'timeout']
if type(detail_timeout) not in six.integer_types:
raise ProtocolError("invalid type {} for 'timeout' detail in INVOCATION".format(type(detail_timeout)))
if detail_timeout < 0:
raise ProtocolError("invalid value {} for 'timeout' detail in INVOCATION".format(detail_timeout))
timeout = detail_timeout
receive_progress = None
if u'receive_progress' in details:
detail_receive_progress = details[u'receive_progress']
if type(detail_receive_progress) != bool:
raise ProtocolError("invalid type {} for 'receive_progress' detail in INVOCATION".format(type(detail_receive_progress)))
receive_progress = detail_receive_progress
caller = None
if u'caller' in details:
detail_caller = details[u'caller']
if type(detail_caller) not in six.integer_types:
raise ProtocolError("invalid type {} for 'caller' detail in INVOCATION".format(type(detail_caller)))
caller = detail_caller
authid = None
if u'authid' in details:
detail_authid = details[u'authid']
if type(detail_authid) != six.text_type:
raise ProtocolError("invalid type {} for 'authid' detail in INVOCATION".format(type(detail_authid)))
authid = detail_authid
authrole = None
if u'authrole' in details:
detail_authrole = details[u'authrole']
if type(detail_authrole) != six.text_type:
raise ProtocolError("invalid type {} for 'authrole' detail in INVOCATION".format(type(detail_authrole)))
authrole = detail_authrole
authmethod = None
if u'authmethod' in details:
detail_authmethod = details[u'authmethod']
if type(detail_authrole) != six.text_type:
raise ProtocolError("invalid type {} for 'authmethod' detail in INVOCATION".format(type(detail_authrole)))
authmethod = detail_authmethod
obj = Invocation(request,
registration,
args = args,
kwargs = kwargs,
timeout = timeout,
receive_progress = receive_progress,
caller = caller,
authid = authid,
authrole = authrole,
authmethod = authmethod)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.timeout is not None:
options[u'timeout'] = self.timeout
if self.receive_progress is not None:
options[u'receive_progress'] = self.receive_progress
if self.caller is not None:
options[u'caller'] = self.caller
if self.authid is not None:
options[u'authid'] = self.authid
if self.authrole is not None:
options[u'authrole'] = self.authrole
if self.authmethod is not None:
options[u'authmethod'] = self.authmethod
if self.kwargs:
return [Invocation.MESSAGE_TYPE, self.request, self.registration, options, self.args, self.kwargs]
elif self.args:
return [Invocation.MESSAGE_TYPE, self.request, self.registration, options, self.args]
else:
return [Invocation.MESSAGE_TYPE, self.request, self.registration, options]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP INVOCATION Message (request = {}, registration = {}, args = {}, kwargs = {}, timeout = {}, receive_progress = {}, caller = {}, authid = {}, authrole = {}, authmethod = {})".format(self.request, self.registration, self.args, self.kwargs, self.timeout, self.receive_progress, self.caller, self.authid, self.authrole, self.authmethod)
class Interrupt(Message):
"""
A WAMP `INTERRUPT` message.
Format: `[INTERRUPT, INVOCATION.Request|id, Options|dict]`
"""
MESSAGE_TYPE = 69
"""
The WAMP message code for this type of message.
"""
ABORT = u'abort'
KILL = u'kill'
def __init__(self, request, mode = None):
"""
Message constructor.
:param request: The WAMP request ID of the original `INVOCATION` to interrupt.
:type request: int
:param mode: Specifies how to interrupt the invocation (`"abort"` or `"kill"`).
:type mode: str
"""
assert(type(request) in six.integer_types)
assert(mode is None or type(mode) == six.text_type)
assert(mode is None or mode in [self.ABORT, self.KILL])
Message.__init__(self)
self.request = request
self.mode = mode
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Interrupt.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for INTERRUPT".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in INTERRUPT")
options = check_or_raise_extra(wmsg[2], "'options' in INTERRUPT")
## options
##
mode = None
if u'mode' in options:
option_mode = options[u'mode']
if type(option_mode) != six.text_type:
raise ProtocolError("invalid type {} for 'mode' option in INTERRUPT".format(type(option_mode)))
if option_mode not in [Interrupt.ABORT, Interrupt.KILL]:
raise ProtocolError("invalid value '{}' for 'mode' option in INTERRUPT".format(option_mode))
mode = option_mode
obj = Interrupt(request, mode = mode)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.mode is not None:
options[u'mode'] = self.mode
return [Interrupt.MESSAGE_TYPE, self.request, options]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP INTERRUPT Message (request = {}, mode = '{}'')".format(self.request, self.mode)
class Yield(Message):
"""
A WAMP `YIELD` message.
Formats:
* `[YIELD, INVOCATION.Request|id, Options|dict]`
* `[YIELD, INVOCATION.Request|id, Options|dict, Arguments|list]`
* `[YIELD, INVOCATION.Request|id, Options|dict, Arguments|list, ArgumentsKw|dict]`
"""
MESSAGE_TYPE = 70
"""
The WAMP message code for this type of message.
"""
def __init__(self, request, args = None, kwargs = None, progress = None):
"""
Message constructor.
:param request: The WAMP request ID of the original call.
:type request: int
:param args: Positional values for application-defined event payload.
Must be serializable using any serializers in use.
:type args: list
:param kwargs: Keyword values for application-defined event payload.
Must be serializable using any serializers in use.
:type kwargs: dict
:param progress: If `True`, this result is a progressive invocation result, and subsequent
results (or a final error) will follow.
:type progress: bool
"""
assert(type(request) in six.integer_types)
assert(args is None or type(args) in [list, tuple])
assert(kwargs is None or type(kwargs) == dict)
assert(progress is None or type(progress) == bool)
Message.__init__(self)
self.request = request
self.args = args
self.kwargs = kwargs
self.progress = progress
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Yield.MESSAGE_TYPE)
if len(wmsg) not in (3, 4, 5):
raise ProtocolError("invalid message length {} for YIELD".format(len(wmsg)))
request = check_or_raise_id(wmsg[1], "'request' in YIELD")
options = check_or_raise_extra(wmsg[2], "'options' in YIELD")
args = None
if len(wmsg) > 3:
args = wmsg[3]
if type(args) != list:
raise ProtocolError("invalid type {} for 'args' in YIELD".format(type(args)))
kwargs = None
if len(wmsg) > 4:
kwargs = wmsg[4]
if type(kwargs) != dict:
raise ProtocolError("invalid type {} for 'kwargs' in YIELD".format(type(kwargs)))
progress = None
if u'progress' in options:
option_progress = options[u'progress']
if type(option_progress) != bool:
raise ProtocolError("invalid type {} for 'progress' option in YIELD".format(type(option_progress)))
progress = option_progress
obj = Yield(request, args = args, kwargs = kwargs, progress = progress)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
options = {}
if self.progress is not None:
options[u'progress'] = self.progress
if self.kwargs:
return [Yield.MESSAGE_TYPE, self.request, options, self.args, self.kwargs]
elif self.args:
return [Yield.MESSAGE_TYPE, self.request, options, self.args]
else:
return [Yield.MESSAGE_TYPE, self.request, options]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP YIELD Message (request = {}, args = {}, kwargs = {}, progress = {})".format(self.request, self.args, self.kwargs, self.progress)
|
ahmedbodi/AutobahnPython
|
autobahn/autobahn/wamp/message.py
|
Python
|
apache-2.0
| 84,035 | 0.020694 |
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2019, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
import numpy as np
import torch
from PIL import Image
from torch.nn.utils.rnn import pad_sequence
from torch.utils.data import Sampler
from torchvision import datasets
class MNISTBufferedDataset(datasets.MNIST):
def __init__(
self, root, train=True, transform=None, target_transform=None, download=False
):
super(MNISTBufferedDataset, self).__init__(
root,
train=train,
transform=transform,
target_transform=target_transform,
download=download,
)
def __getitem__(self, index):
"""
Override to allow generation of white noise for index -1
Args:
index (int): Index
Returns:
tuple: (image, target) where target is index of the target class.
"""
if index == -1:
# Noise
target = np.random.randint(10) # -1
img = np.random.rand(28, 28)
else:
img, target = self.data[index].numpy(), int(self.targets[index])
img = Image.fromarray(img, mode="L")
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
class MNISTSequenceSampler(Sampler):
"""
Loop through one or more sequences of digits
Draw each digit image (based on label specified by sequence) randomly
TODO: Having this work with a custom DataSet that draws random
MNIST digits may be more appropriate
"""
def __init__(
self,
data_source,
sequences=None,
batch_size=64,
random_mnist_images=True,
randomize_sequence_cursors=True,
max_batches=100,
use_mnist_pct=1.0,
noise_buffer=False,
):
super(MNISTSequenceSampler, self).__init__(data_source)
self.data_source = data_source
self.random_mnist_images = random_mnist_images
self.randomize_sequence_cursors = randomize_sequence_cursors
self.use_mnist_pct = use_mnist_pct
self.noise_buffer = noise_buffer
self.max_batches = max_batches
self.bsz = batch_size
self.label_indices = {} # Digit -> Indices in dataset
self.label_cursors = {} # Digit -> Cursor across images for each digit
sequences = list(sequences) # Avoid changing underlying sequence list
if self.noise_buffer:
for seq in sequences:
if seq[-1] != -1:
seq.append(-1)
self.sequences = sequences
self.n_sequences = len(self.sequences)
self.seq_lengths = torch.tensor([len(subseq) for subseq in self.sequences])
# Each of these stores both current and next batch state (2 x batch_size)
self.sequence_id = torch.stack(
(self._init_sequence_ids(), self._init_sequence_ids())
) # Iterate over subsequences
first_batch_cursors = self._init_sequence_cursors()
self.sequence_cursor = torch.stack(
(first_batch_cursors, first_batch_cursors)
) # Iterates over sequence items
self._increment_next()
self.sequences_mat = pad_sequence(
torch.tensor(self.sequences), batch_first=True, padding_value=-99
)
# Get index for each digit (that appears in a passed sequence)
for seq in sequences:
for digit in seq:
if digit != -1 and digit not in self.label_indices:
mask = (data_source.targets == digit).nonzero().flatten()
idx = torch.randperm(mask.size(0))
if self.use_mnist_pct < 1.0:
idx = idx[: int(self.use_mnist_pct * len(idx))]
self.label_indices[digit] = mask[idx]
self.label_cursors[digit] = 0
def _init_sequence_ids(self):
return torch.LongTensor(self.bsz).random_(0, self.n_sequences)
def _init_sequence_cursors(self):
if self.randomize_sequence_cursors:
lengths = self.seq_lengths[self.sequence_id[0]]
cursors = (
torch.FloatTensor(self.bsz).uniform_(0, 1) * lengths.float()
).long()
else:
cursors = torch.zeros(self.bsz).long()
return cursors
def _increment_next(self):
# Increment cursors and select new random subsequences for those that
# have terminated
self.sequence_cursor[1] += 1
roll_mask = self.sequence_cursor[1] >= self.seq_lengths[self.sequence_id[1]]
if roll_mask.sum() > 0:
# Roll items to 0 of randomly chosen next subsequence
self.sequence_id[1, roll_mask] = torch.LongTensor(
1, roll_mask.sum()
).random_(0, self.n_sequences)
self.sequence_cursor[1, roll_mask] = 0
def _get_next_batch(self):
"""
"""
# First row is current inputs
inp_labels_batch = self.sequences_mat[
self.sequence_id[0], self.sequence_cursor[0]
]
inp_idxs = [self._get_sample_image(digit.item()) for digit in inp_labels_batch]
# Second row is next (predicted) inputs
tgt_labels_batch = self.sequences_mat[
self.sequence_id[1], self.sequence_cursor[1]
]
tgt_idxs = [self._get_sample_image(digit.item()) for digit in tgt_labels_batch]
# Roll next to current
self.sequence_id[0] = self.sequence_id[1]
self.sequence_cursor[0] = self.sequence_cursor[1]
self._increment_next()
return inp_idxs + tgt_idxs
def _get_sample_image(self, digit):
"""
Return a sample image id for digit from MNIST
"""
if digit == -1:
# Generate white noise
return -1
else:
cursor = self.label_cursors[digit]
if self.random_mnist_images:
# If not random, always take first digit
self.label_cursors[digit] += 1
indices = self.label_indices[digit]
if cursor >= len(indices) - 1:
# Begin sequence from beginning & shuffle
self.label_cursors[digit] = cursor = 0
idx = torch.randperm(len(self.label_indices[digit]))
self.label_indices[digit] = indices = self.label_indices[digit][idx]
return indices[cursor].item()
def __iter__(self):
for _i in range(len(self)):
yield self._get_next_batch()
return
def __len__(self):
return self.max_batches if self.max_batches else len(self.data_source)
def pred_sequence_collate(batch):
"""
"""
bsz = len(batch) // 2
inp_tuples = batch[:bsz]
tgt_tuples = batch[bsz:]
inp_images_batch = torch.stack([item[0] for item in inp_tuples]).view(bsz, -1)
tgt_images_batch = torch.stack([item[0] for item in tgt_tuples]).view(bsz, -1)
inp_labels_batch = torch.tensor([item[1] for item in inp_tuples])
tgt_labels_batch = torch.tensor([item[1] for item in tgt_tuples])
return (inp_images_batch, tgt_images_batch, tgt_labels_batch, inp_labels_batch)
class PTBSequenceSampler(Sampler):
"""
"""
def __init__(
self, data_source, batch_size=64, max_batches=1000000, uniform_offsets=False
):
super(PTBSequenceSampler, self).__init__(None)
self.batch_size = batch_size
self.max_batches = max_batches
self.data_source = data_source
self.data_len = len(self.data_source)
# Choose initial random offsets into PTB, one per item in batch
if uniform_offsets:
# Useful for evaluation to guarantee even coverage
self.batch_idxs = (
self.data_len / self.batch_size * torch.arange(0, batch_size)
).long()
else:
self.batch_idxs = (torch.rand(self.batch_size) * (self.data_len - 1)).long()
def __iter__(self):
# Yield the next single batch of (batch_size) word IDs,
# each at a different offset into PTB
for _i in range(len(self)):
# yield data, target
yield self.batch_idxs, self.batch_idxs + 1
self.batch_idxs += 1 # Next token row
self.batch_idxs[self.batch_idxs > (self.data_len - 2)] = 0 # Wrap to start
return
def __len__(self):
return self.max_batches if self.max_batches else self.data_len
def vector_batch(word_ids, vector_dict):
vectors = []
for word_id in word_ids:
vectors.append(vector_dict[word_id.item()])
return torch.stack(vectors).view(word_ids.size(0), -1).detach()
def ptb_pred_sequence_collate(batch, vector_dict=None):
"""
Return minibatches, shape (batch_size, embed dim)
"""
data, target = batch
pred_input = data
data = vector_batch(data, vector_dict)
pred_target = target
target = vector_batch(target, vector_dict)
return (data, target, pred_target, pred_input)
|
mrcslws/nupic.research
|
projects/rsm/rsm_samplers.py
|
Python
|
agpl-3.0
| 9,884 | 0.001315 |
"""
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import xbmc
import xbmcaddon
import xbmcgui
from salts_lib import log_utils
from salts_lib import utils
from salts_lib.constants import MODES
from salts_lib.db_utils import DB_Connection
MAX_ERRORS = 10
kodi = xbmcaddon.Addon(id='plugin.video.salts')
log_utils.log('Service: Installed Version: %s' % (kodi.getAddonInfo('version')))
db_connection = DB_Connection()
if kodi.getSetting('use_remote_db') == 'false' or kodi.getSetting('enable_upgrade') == 'true':
db_connection.init_database()
class Service(xbmc.Player):
def __init__(self, *args, **kwargs):
log_utils.log('Service: starting...')
xbmc.Player.__init__(self, *args, **kwargs)
self.win = xbmcgui.Window(10000)
self.reset()
def reset(self):
log_utils.log('Service: Resetting...')
self.win.clearProperty('salts.playing')
self.win.clearProperty('salts.playing.trakt_id')
self.win.clearProperty('salts.playing.season')
self.win.clearProperty('salts.playing.episode')
self.win.clearProperty('salts.playing.srt')
self.win.clearProperty('salts.playing.resume')
self.tracked = False
self._totalTime = 999999
self.trakt_id = None
self.season = None
self.episode = None
self._lastPos = 0
def onPlayBackStarted(self):
log_utils.log('Service: Playback started')
playing = self.win.getProperty('salts.playing') == 'True'
self.trakt_id = self.win.getProperty('salts.playing.trakt_id')
self.season = self.win.getProperty('salts.playing.season')
self.episode = self.win.getProperty('salts.playing.episode')
srt_path = self.win.getProperty('salts.playing.srt')
resume_point = self.win.getProperty('salts.playing.trakt_resume')
if playing: # Playback is ours
log_utils.log('Service: tracking progress...')
self.tracked = True
if srt_path:
log_utils.log('Service: Enabling subtitles: %s' % (srt_path))
self.setSubtitles(srt_path)
else:
self.showSubtitles(False)
self._totalTime = 0
while self._totalTime == 0:
try:
self._totalTime = self.getTotalTime()
except RuntimeError:
self._totalTime = 0
break
xbmc.sleep(1000)
if resume_point:
resume_time = float(resume_point) * self._totalTime / 100
log_utils.log("Resume Percent: %s, Resume Time: %s Total Time: %s" % (resume_point, resume_time, self._totalTime), log_utils.LOGDEBUG)
self.seekTime(resume_time)
def onPlayBackStopped(self):
log_utils.log('Service: Playback Stopped')
if self.tracked:
playedTime = float(self._lastPos)
try: percent_played = int((playedTime / self._totalTime) * 100)
except: percent_played = 0 # guard div by zero
pTime = utils.format_time(playedTime)
tTime = utils.format_time(self._totalTime)
log_utils.log('Service: Played %s of %s total = %s%%' % (pTime, tTime, percent_played), log_utils.LOGDEBUG)
if playedTime == 0 and self._totalTime == 999999:
log_utils.log('XBMC silently failed to start playback', log_utils.LOGWARNING)
elif playedTime >= 5:
log_utils.log('Service: Setting bookmark on |%s|%s|%s| to %s seconds' % (self.trakt_id, self.season, self.episode, playedTime), log_utils.LOGDEBUG)
db_connection.set_bookmark(self.trakt_id, playedTime, self.season, self.episode)
if percent_played >= 75:
if xbmc.getCondVisibility('System.HasAddon(script.trakt)'):
run = 'RunScript(script.trakt, action=sync, silent=True)'
xbmc.executebuiltin(run)
self.reset()
def onPlayBackEnded(self):
log_utils.log('Service: Playback completed')
self.onPlayBackStopped()
monitor = Service()
utils.do_startup_task(MODES.UPDATE_SUBS)
errors = 0
while not xbmc.abortRequested:
try:
isPlaying = monitor.isPlaying()
utils.do_scheduled_task(MODES.UPDATE_SUBS, isPlaying)
if monitor.tracked and monitor.isPlayingVideo():
monitor._lastPos = monitor.getTime()
except Exception as e:
errors += 1
if errors >= MAX_ERRORS:
log_utils.log('Service: Error (%s) received..(%s/%s)...Ending Service...' % (e, errors, MAX_ERRORS), log_utils.LOGERROR)
break
else:
log_utils.log('Service: Error (%s) received..(%s/%s)...Continuing Service...' % (e, errors, MAX_ERRORS), log_utils.LOGERROR)
else:
errors = 0
xbmc.sleep(1000)
log_utils.log('Service: shutting down...')
|
aplicatii-romanesti/allinclusive-kodi-pi
|
.kodi/addons/plugin.video.salts/service.py
|
Python
|
apache-2.0
| 5,535 | 0.00271 |
'''
New Integration Test for hybrid.
@author: Quarkonics
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.operations.hybrid_operations as hyb_ops
import zstackwoodpecker.operations.resource_operations as res_ops
import time
import os
postfix = time.strftime('%m%d-%H%M%S', time.localtime())
test_obj_dict = test_state.TestStateDict()
remote_bucket_name = 'test-bucket-%s' % postfix
test_stub = test_lib.lib_get_test_stub()
hybrid = test_stub.HybridObject()
def test():
hybrid.add_datacenter_iz(add_datacenter_only=True)
hybrid.add_bucket()
hybrid.detach_bucket()
hybrid.attach_bucket()
test_util.test_pass('Create Attach Detach OSS Bucket Test Success')
def env_recover():
if hybrid.oss_bucket_create:
hybrid.del_bucket()
#Will be called only if exception happens in test().
def error_cleanup():
global test_obj_dict
test_lib.lib_error_cleanup(test_obj_dict)
|
zstackorg/zstack-woodpecker
|
integrationtest/vm/hybrid/test_attach_detach_oss_bucket.py
|
Python
|
apache-2.0
| 1,062 | 0.003766 |
import os
import uuid
from django.db import models
from django.core.files.uploadedfile import UploadedFile
from django.forms.forms import pretty_name
from . import get_image_cropper
from . import tasks
from . import settings
from . import utils
from . import signals
from .managers import AssetManager
from .fields import AssetRealFileField
try:
from ..versioning import manager
except ValueError:
from versioning import manager
try:
from ..cms.internal_tags.models import AutoTagModel
except ValueError:
from cms.internal_tags.models import AutoTagModel
class AssetBase(AutoTagModel):
UNKNOWN = 'unknown'
IMAGE = 'image'
DOCUMENT = 'document'
AUDIO = 'audio'
VIDEO = 'video'
TYPES = settings.ASSET_TYPES and settings.ASSET_TYPES or \
((UNKNOWN, 'Unknown'),
(IMAGE, 'Image'),
(DOCUMENT, 'Document'),
(AUDIO, 'Audio'),
(VIDEO, 'Video'),)
__original_file = None
title = models.CharField(max_length=255)
file = AssetRealFileField(upload_to=utils.assets_dir)
type = models.CharField(max_length=255, choices=TYPES, db_index=True)
slug = models.SlugField(unique=True, max_length=255)
user_filename = models.CharField(max_length=255)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
cbversion = models.PositiveIntegerField(editable=False)
objects = AssetManager()
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
super(AssetBase, self).__init__(*args, **kwargs)
self.__original_file = self.file
def rename_file(self):
if self.type == self.DOCUMENT:
return False
return settings.HASH_FILENAME
def url(self):
"""
This is a wrapper of file.url
"""
return self.file.url
def generate_slug(self):
return str(uuid.uuid1())
def assign_tag(self):
pass
def delete_real_file(self, file_obj):
file_obj.storage.delete(file_obj.name)
signals.file_removed.send(file_obj.name)
def _can_crop(self):
return self.type == self.IMAGE
def reset_crops(self):
"""
Reset all known crops to the default crop.
If settings.ASSET_CELERY is specified then
the task will be run async
"""
if self._can_crop():
if settings.CELERY or settings.USE_CELERY_DECORATOR:
# this means that we are using celery
tasks.reset_crops.apply_async(args=[self.pk], countdown=5)
else:
tasks.reset_crops(None, asset=self)
def ensure_crops(self, *required_crops):
"""
Make sure a crop exists for each crop in required_crops.
Existing crops will not be changed.
If settings.ASSET_CELERY is specified then
the task will be run async
"""
if self._can_crop():
if settings.CELERY or settings.USE_CELERY_DECORATOR:
# this means that we are using celery
args = [self.pk]+list(required_crops)
tasks.ensure_crops.apply_async(args=args, countdown=5)
else:
tasks.ensure_crops(None, *required_crops, asset=self)
def create_crop(self, name, x, x2, y, y2):
"""
Create a crop for this asset.
"""
if self._can_crop():
spec = get_image_cropper().create_crop(name, self.file, x=x,
x2=x2, y=y, y2=y2)
ImageDetail.save_crop_spec(self, spec)
def save(self, *args, **kwargs):
"""
For new assets, creates a new slug.
For updates, deletes the old file from storage.
Calls super to actually save the object.
"""
if not self.pk and not self.slug:
self.slug = self.generate_slug()
if self.__original_file and self.file != self.__original_file:
self.delete_real_file(self.__original_file)
file_changed = True
if self.pk:
new_value = getattr(self, 'file')
if hasattr(new_value, "file"):
file_changed = isinstance(new_value.file, UploadedFile)
else:
self.cbversion = 0
if file_changed:
self.user_filename = os.path.basename(self.file.name)
self.cbversion = self.cbversion + 1
if not self.title:
self.title = self.user_filename
super(AssetBase, self).save(*args, **kwargs)
if file_changed:
signals.file_saved.send(self.file.name)
utils.update_cache_bust_version(self.file.url, self.cbversion)
self.reset_crops()
if self.__original_file and self.file.name != self.__original_file.name:
with manager.SwitchSchemaManager(None):
for related in self.__class__._meta.get_all_related_objects(
include_hidden=True):
field = related.field
if getattr(field, 'denormalize', None):
cname = field.get_denormalized_field_name(field.name)
if getattr(field, 'denormalize'):
related.model.objects.filter(**{
field.name: self.pk
}).update(**{
cname: self.file.name
})
def delete(self, *args, **kwargs):
"""
Deletes the actual file from storage after the object is deleted.
Calls super to actually delete the object.
"""
file_obj = self.file
super(AssetBase, self).delete(*args, **kwargs)
self.delete_real_file(file_obj)
def __unicode__(self):
return '%s' % (self.user_filename)
class ImageDetailBase(models.Model):
image = models.ForeignKey(settings.ASSET_MODEL)
width = models.PositiveIntegerField()
height = models.PositiveIntegerField()
name = models.CharField(max_length=255)
editable = models.BooleanField(editable=False, default=False)
x = models.PositiveIntegerField(null=True)
x2 = models.PositiveIntegerField(null=True)
y = models.PositiveIntegerField(null=True)
y2 = models.PositiveIntegerField(null=True)
class Meta:
abstract = True
def __unicode__(self):
return pretty_name(self.name)
def get_crop_config(self):
return get_image_cropper().get_crop_config(self.name)
@classmethod
def save_crop_spec(cls, asset, spec, update_version=True):
if spec:
cdict = spec.to_dict()
updated = cls.objects.filter(image=asset,
name=cdict['name']).update(**cdict)
if not updated:
cls(image=asset, **cdict).save()
if update_version:
asset.__class__.objects.filter(pk=asset.pk
).update(cbversion=models.F('cbversion')+1)
class Asset(AssetBase):
class Meta:
abstract = False
class ImageDetail(ImageDetailBase):
class Meta:
abstract = False
|
ff0000/scarlet
|
scarlet/assets/models.py
|
Python
|
mit
| 7,186 | 0.000835 |
#!/usr/bin/env python
#######################################################
# Copyright (c) 2015, ArrayFire
# All rights reserved.
#
# This file is distributed under 3-clause BSD license.
# The complete license agreement can be obtained at:
# http://arrayfire.com/licenses/BSD-3-Clause
########################################################
import arrayfire as af
from . import _util
def simple_algorithm(verbose=False):
display_func = _util.display_func(verbose)
print_func = _util.print_func(verbose)
a = af.randu(3, 3)
k = af.constant(1, 3, 3, dtype=af.Dtype.u32)
af.eval(k)
print_func(af.sum(a), af.product(a), af.min(a), af.max(a), af.count(a), af.any_true(a), af.all_true(a))
display_func(af.sum(a, 0))
display_func(af.sum(a, 1))
rk = af.constant(1, 3, dtype=af.Dtype.u32)
rk[2] = 0
af.eval(rk)
display_func(af.sumByKey(rk, a, dim=0))
display_func(af.sumByKey(rk, a, dim=1))
display_func(af.productByKey(rk, a, dim=0))
display_func(af.productByKey(rk, a, dim=1))
display_func(af.minByKey(rk, a, dim=0))
display_func(af.minByKey(rk, a, dim=1))
display_func(af.maxByKey(rk, a, dim=0))
display_func(af.maxByKey(rk, a, dim=1))
display_func(af.anyTrueByKey(rk, a, dim=0))
display_func(af.anyTrueByKey(rk, a, dim=1))
display_func(af.allTrueByKey(rk, a, dim=0))
display_func(af.allTrueByKey(rk, a, dim=1))
display_func(af.countByKey(rk, a, dim=0))
display_func(af.countByKey(rk, a, dim=1))
display_func(af.product(a, 0))
display_func(af.product(a, 1))
display_func(af.min(a, 0))
display_func(af.min(a, 1))
display_func(af.max(a, 0))
display_func(af.max(a, 1))
display_func(af.count(a, 0))
display_func(af.count(a, 1))
display_func(af.any_true(a, 0))
display_func(af.any_true(a, 1))
display_func(af.all_true(a, 0))
display_func(af.all_true(a, 1))
display_func(af.accum(a, 0))
display_func(af.accum(a, 1))
display_func(af.scan(a, 0, af.BINARYOP.ADD))
display_func(af.scan(a, 1, af.BINARYOP.MAX))
display_func(af.scan_by_key(k, a, 0, af.BINARYOP.ADD))
display_func(af.scan_by_key(k, a, 1, af.BINARYOP.MAX))
display_func(af.sort(a, is_ascending=True))
display_func(af.sort(a, is_ascending=False))
b = (a > 0.1) * a
c = (a > 0.4) * a
d = b / c
print_func(af.sum(d))
print_func(af.sum(d, nan_val=0.0))
display_func(af.sum(d, dim=0, nan_val=0.0))
val, idx = af.sort_index(a, is_ascending=True)
display_func(val)
display_func(idx)
val, idx = af.sort_index(a, is_ascending=False)
display_func(val)
display_func(idx)
b = af.randu(3, 3)
keys, vals = af.sort_by_key(a, b, is_ascending=True)
display_func(keys)
display_func(vals)
keys, vals = af.sort_by_key(a, b, is_ascending=False)
display_func(keys)
display_func(vals)
c = af.randu(5, 1)
d = af.randu(5, 1)
cc = af.set_unique(c, is_sorted=False)
dd = af.set_unique(af.sort(d), is_sorted=True)
display_func(cc)
display_func(dd)
display_func(af.set_union(cc, dd, is_unique=True))
display_func(af.set_union(cc, dd, is_unique=False))
display_func(af.set_intersect(cc, cc, is_unique=True))
display_func(af.set_intersect(cc, cc, is_unique=False))
_util.tests["algorithm"] = simple_algorithm
|
arrayfire/arrayfire_python
|
tests/simple/algorithm.py
|
Python
|
bsd-3-clause
| 3,357 | 0.000298 |
#!/usr/bin/env python
import datetime
import logging
import math
import socket
import tables
import xml.etree.ElementTree as ET
logging.basicConfig(filename = 'mbta_daemon.log', level=logging.INFO)
logger = logging.getLogger('xml2hdf5')
class VehicleLocation(tables.IsDescription):
vehicleID = tables.StringCol(4)
route = tables.StringCol(8)
direction = tables.StringCol(16)
latitude = tables.Float64Col() #Reported latitude
longitude = tables.Float64Col() #Reported longitude
time = tables.Float64Col() #Time stamp in seconds since epoch time
heading = tables.UInt16Col() #Heading in degrees
def parse_mbta_xml(database, thefile, presentData = None):
"""
Parses MBTA XML data and adds it to a HDF5 database.
Inputs:
database: Handle to HDF5 file
thefile: Name of XML file to parse
presentData: A dictionary hash of present data (to save time on the check)
If absent, will use database queries (much slower)
"""
try:
tree = ET.parse(thefile)
root = tree.getroot()
except ET.ParseError: #Error parsing XML content of the file
logger.error('Could not find root of XML file: %s', thefile)
return
#Part 1. Get epoch time to nearest second
# MBTA reports in whole units of milliseconds
timeData = root.find('lastTime')
if timeData is None: #Maybe XML returned an error of some sort
logger.warning('XML file %s does not have time data', thefile)
return
report_time = long(timeData.attrib['time'][:-3])
#Part 2. Parse vehicle location data.
for thevehicle in root.iter('vehicle'):
rawdata = thevehicle.attrib #Raw MBTA data
data= {}
try:
#Bus was here at this epoch time
data['time'] = report_time - long(rawdata['secsSinceReport'])
data['vehicleID'] = rawdata['id']
data['route'] = rawdata['routeTag']
data['direction'] = rawdata['dirTag']
data['latitude'] = rawdata['lat']
data['longitude'] = rawdata['lon']
data['heading'] = rawdata['heading']
except KeyError:
pass
#Part 3. Make sure record is not a duplicate
if presentData is None:
#No hashes. Query database to check that this record wasn't already reported
queryString = '((vehicleID == "%(vehicleID)s") & (time == %(time)s))' % data
try:
query = database.getWhereList(queryString)
except tables.exceptions.HDF5ExtError:
#gets thrown whenHDF5 file is open and being written to
logger.critical("Could not get file lock on HDF5 file. Abort.")
import sys
sys.exit()
if len(query) == 0:
vehiclePosition = database.row
for key, value in data.items():
vehiclePosition[key] = value
vehiclePosition.append()
else:
assert len(query) == 1, "OMG COLLISION"
else:
#Use hashes to check if record is already reported
if (data['vehicleID'], data['time']) not in presentData:
vehiclePosition = database.row
for key, value in data.items():
vehiclePosition[key] = value
vehiclePosition.append()
presentData[data['vehicleID'], data['time']] = True
database.flush()
logger.info('Parsed data from XML file: %s', thefile)
return presentData
def ParseAll(theHDF5FileName = 'mbta_trajectories.h5', Cleanup = True):
import glob, os
try:
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
## Create an abstract socket, by prefixing it with null.
s.bind('\0mbta_hdf5_writer_'+theHDF5FileName)
compressionOptions = tables.Filters(complevel=9, complib='blosc')
f = tables.openFile(theHDF5FileName, mode = 'a',
filters = compressionOptions, title = 'Historical MBTA bus data')
logging.debug('Lock acquired on %s', theHDF5FileName)
except socket.error:
logging.error('Lock could not be acquired on %s', theHDF5FileName)
return
try:
thetable = f.root.VehicleLocations
except tables.exceptions.NoSuchNodeError:
thetable = f.createTable('/', 'VehicleLocations', VehicleLocation,
'MBTA vehicle positions', filters = compressionOptions)
#Create table indexers
thetable.cols.time.createIndex()
thetable.cols.vehicleID.createIndex()
#Hash current data
presentData = {}
for row in thetable:
presentData[row['vehicleID'], row['time']] = True
for filename in sorted(glob.glob('*.xml')):
presentData = parse_mbta_xml(thetable, filename, presentData)
if Cleanup:
os.unlink(filename)
f.close()
if __name__ == '__main__':
ParseAll()
|
jiahao/godot
|
parseh5.py
|
Python
|
mit
| 5,010 | 0.011976 |
import logging
from datetime import datetime
import os
import json
from flask import request, g, Response
#from openspending.core import cache
from openspending.auth import require
from openspending.lib.jsonexport import jsonify
from openspending.views.api_v2.common import blueprint
from openspending.views.error import api_json_errors
#imports prepare_cell_cubes_ext
from openspending.lib.cubes_util import *
from openspending.lib.cache import cache_key
from openspending.core import cache
from cubes.server.utils import *
from cubes.formatters import JSONLinesGenerator, csv_generator, xls_generator
from cubes.browser import SPLIT_DIMENSION_NAME
from cubes.server.decorators import prepare_cell
log = logging.getLogger(__name__)
@blueprint.route("/api/slicer/cube/<star_name>/cubes_model", methods=["JSON", "GET"])
@requires_complex_browser
@api_json_errors
@cache.cached(timeout=60, key_prefix=cache_key)
#@log_request("aggregate", "aggregates")
def cubes_model(star_name):
cubes_arg = request.args.get("cubes", None)
try:
cubes = cubes_arg.split("|")
except:
raise RequestError("Parameter cubes with value '%s'should be a valid cube names separated by a '|'"
% (cubes_arg) )
if len (cubes) > 5:
raise RequestError("You can only join 5 cubes together at one time")
g.cube = get_complex_cube(star_name, cubes)
hier_limits = None
# Copy from the application context
#g.json_record_limit = current_app.slicer.json_record_limit
g.json_record_limit = 10000
if "prettyprint" in request.args:
g.prettyprint = str_to_bool(request.args.get("prettyprint"))
else:
g.prettyprint = current_app.slicer.prettyprint
response = g.cube.to_dict(expand_dimensions=True,
with_mappings=False,
full_attribute_names=True,
create_label=True,
hierarchy_limits=hier_limits)
response["features"] = workspace.cube_features(g.cube)
return jsonify(response)
def xlschecker(*args, **kwargs):
if "format" in request.args:
if request.args.get("format") in ['excel', 'csv']:
return True
return False
@blueprint.route("/api/slicer/cube/<star_name>/cubes_aggregate", methods=["JSON", "GET"])
@requires_complex_browser
@api_json_errors
@cache.cached(timeout=60, key_prefix=cache_key, unless=xlschecker)
def aggregate_cubes(star_name):
cubes_arg = request.args.get("cubes", None)
try:
cubes = cubes_arg.split("|")
except:
raise RequestError("Parameter cubes with value '%s'should be a valid cube names separated by a '|'"
% (cubes_arg) )
if len (cubes) > 5:
raise RequestError("You can only join 5 cubes together at one time")
g.cube = get_complex_cube(star_name, cubes)
g.browser = current_app.cubes_workspace.browser(g.cube)
cube = g.cube
output_format = validated_parameter(request.args, "format",
values=["json", "csv", "excel"],
default="json")
header_type = validated_parameter(request.args, "header",
values=["names", "labels", "none"],
default="labels")
fields_str = request.args.get("fields")
if fields_str:
fields = fields_str.lower().split(',')
else:
fields = None
# Aggregates
# ----------
aggregates = []
for agg in request.args.getlist("aggregates") or []:
aggregates += agg.split("|")
drilldown = []
ddlist = request.args.getlist("drilldown")
if ddlist:
for ddstring in ddlist:
drilldown += ddstring.split("|")
#this handles cuts with geometry__time
prepare_cell_cubes_ext(restrict=False)
prepare_cell("split", "split")
result = g.browser.aggregate(g.cell,
aggregates=aggregates,
drilldown=drilldown,
split=g.split,
page=g.page,
page_size=g.page_size,
order=g.order)
# Hide cuts that were generated internally (default: don't)
if current_app.slicer.hide_private_cuts:
result.cell = result.cell.public_cell()
# Copy from the application context
#g.json_record_limit = current_app.slicer.json_record_limit
g.json_record_limit = 10000
if "prettyprint" in request.args:
g.prettyprint = str_to_bool(request.args.get("prettyprint"))
else:
g.prettyprint = current_app.slicer.prettyprint
if output_format == "json":
resultdict= result.to_dict()
tempcells = list(result._cells)
resultdict['cells'] = tempcells
resultdict['cell'] = list(resultdict['cell'])
if "cluster" in request.args:
clusteragg = request.args.get('clusteragg', 'avg')
if len(cubes) > 1 or len(cubes) < 1:
log.warn("cluster must have one and only one cube. This call had %s"%str(cubes))
if clusteragg in ['avg', 'min', 'max', 'sum']:
clusterfield = "%s__amount_%s"%(cubes[0], clusteragg,)
numclusters = request.args.get('numclusters',5)
tempresult = get_cubes_breaks(resultdict['cells'], clusterfield, method=request.args.get('cluster'), k=numclusters)
tempresult['data'] = list(tempresult['data'])
resultdict.set('cluster', tempresult)
resp = Response(response=json.dumps(resultdict),
status=200, \
mimetype="application/json")
return(resp)
elif output_format not in ["csv","excel"]:
raise RequestError("unknown response format '%s'" % output_format)
# csv
if header_type == "names":
header = result.labels
elif header_type == "labels":
header = []
for l in result.labels:
# TODO: add a little bit of polish to this
if l == SPLIT_DIMENSION_NAME:
header.append('Matches Filters')
else:
header += [ attr.label or attr.name for attr in cube.get_attributes([l], aggregated=True) ]
else:
header = None
fields = result.labels
try:
filename_output = cubes[0] + "_" + datetime.now().strftime("%Y-%m-%d")
except:
filename_output = "aggregate_" + datetime
if output_format == "excel":
output_string = xls_generator(result,
fields,
include_header=bool(header),
header=header)
headers = {"Content-Disposition": 'attachment; filename="' + filename_output + '.xlsx"'}
return Response(output_string,
mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
headers=headers)
else:
generator = csv_generator(result,
fields,
include_header=bool(header),
header=header)
headers = {"Content-Disposition": 'attachment; filename="' + filename_output + '.csv"'}
return Response(generator,
mimetype='text/csv',
headers=headers)
@blueprint.route("/api/slicer/cube/<star_name>/cubes_facts", methods=["JSON", "GET"])
@requires_complex_browser
@api_json_errors
@cache.cached(timeout=60, key_prefix=cache_key)
#@log_request("facts", "fields")
def cubes_facts(star_name):
cubes_arg = request.args.get("cubes", None)
try:
cubes = cubes_arg.split("|")
except:
raise RequestError("Parameter cubes with value '%s'should be a valid cube names separated by a '|'"
% (cubes_arg) )
if len (cubes) > 5:
raise RequestError("You can only join 5 cubes together at one time")
g.cube = get_complex_cube(star_name, cubes)
g.browser = current_app.cubes_workspace.browser(g.cube)
# Copy from the application context
g.json_record_limit = current_app.slicer.json_record_limit
if "prettyprint" in request.args:
g.prettyprint = str_to_bool(request.args.get("prettyprint"))
else:
g.prettyprint = current_app.slicer.prettyprint
# Request parameters
fields_str = request.args.get("fields")
if fields_str:
fields = fields_str.split(',')
else:
fields = None
# fields contain attribute names
if fields:
attributes = g.cube.get_attributes(fields)
else:
attributes = g.cube.all_attributes
# Construct the field list
fields = [attr.ref() for attr in attributes]
# Get the result
facts = g.browser.facts(g.cell,
fields=fields,
order=g.order,
page=g.page,
page_size=g.page_size)
# Add cube key to the fields (it is returned in the result)
fields.insert(0, g.cube.key or "id")
# Construct the header
labels = [attr.label or attr.name for attr in attributes]
labels.insert(0, g.cube.key or "id")
return formated_response(facts, fields, labels)
|
nathanhilbert/FPA_Core
|
openspending/views/api_v2/cubes_ext.py
|
Python
|
agpl-3.0
| 9,418 | 0.008388 |
# Transformer/Utilities/__init__.py
|
JMSkelton/Transformer
|
Transformer/Utilities/__init__.py
|
Python
|
gpl-3.0
| 36 | 0 |
import bcrypt
from hashlib import sha512
from helptux import db, login_manager
class Role(db.Model):
__tablename__ = 'roles'
id = db.Column(db.Integer, primary_key=True)
role = db.Column(db.String(255), index=True, unique=True)
def __repr__(self):
return '<Role {0}>'.format(self.role)
def __init__(self, role):
self.role = role
users_roles = db.Table('users_roles',
db.Column('user_id', db.Integer, db.ForeignKey('users.id')),
db.Column('role_id', db.Integer, db.ForeignKey('roles.id'))
)
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(255), index=True, unique=True, nullable=False)
email = db.Column(db.String(255), index=True, unique=True, nullable=False)
password_hash = db.Column(db.String(), nullable=False)
posts = db.relationship('Post', backref='author', lazy='dynamic')
authenticated = db.Column(db.Boolean, default=False)
roles = db.relationship('Role',
secondary=users_roles,
primaryjoin=(users_roles.c.user_id == id),
secondaryjoin=(users_roles.c.role_id == Role.id),
backref=db.backref('users', lazy='dynamic'),
lazy='dynamic')
def __init__(self, email, password):
self.email = email
self.username = self.email
self.set_password(password)
def __repr__(self):
return '<User {0}>'.format(self.username)
def output_obj(self):
return {
'id': self.id,
'username': self.username,
'posts': [p.id for p in self.posts],
'roles': [r.id for r in self.roles]
}
def set_password(self, input_password):
bit_input = input_password.encode('utf-8')
self.password_hash = bcrypt.hashpw(bit_input, bcrypt.gensalt())
def verify_password(self, input_password):
bit_input = input_password.encode('utf-8')
if bcrypt.hashpw(bit_input, self.password_hash) == self.password_hash:
return True
else:
return False
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return str(self.id)
def is_authenticated(self):
return self.authenticated
def has_role(self, role_name):
for role in self.roles:
if role.role == role_name:
return True
return False
|
pieterdp/helptux
|
helptux/models/user.py
|
Python
|
gpl-2.0
| 2,613 | 0.001148 |
import csv
from openpyxl import load_workbook
import io
from dwarfsquad.lib.build.from_export import build_compound_methods, build_lots_and_levels
from dwarfsquad.lib.build.from_export.build_assay_configuration import build_assay_configuration
from dwarfsquad.lib.build.from_export.build_rulesettings import add_rules_to_methods
from dwarfsquad.lib.export.export_rulesettings import generate_rule_schemas
from dwarfsquad.lib.macros.generate_macros import generate_macros
def build_full_ac(path_to_xlsx):
wb = load_workbook(path_to_xlsx)
validate_workbook(wb)
ac = build_assay_configuration(read_csv_from_sheet(wb.get_sheet_by_name('Assay')))
ac.compound_methods = build_compound_methods(read_csv_from_sheet(wb.get_sheet_by_name('Compound')))
ac.lots = build_lots_and_levels(read_csv_from_sheet(wb.get_sheet_by_name('Lots')))
ac.compound_methods = add_rules_to_methods(read_csv_from_sheet(wb.get_sheet_by_name('Rule')), ac.compound_methods)
ac.qa_rule_schemas = generate_rule_schemas(ac)
if not ac.macros:
ac.macros = generate_macros(ac)
return ac
def get_column_value(c):
if c.value:
try:
return str(round(c.value, 8))
except TypeError:
return str(c.value)
else:
return ''
def read_csv_from_sheet(worksheet):
stream = io.StringIO()
for row in worksheet.rows:
stream.write(u','.join([get_column_value(c) for c in row]))
stream.write(u'\n')
reader = csv.DictReader(stream.getvalue().splitlines())
rows = [r for r in reader]
return rows
def validate_workbook(wb):
assert 'Assay' in wb
assert 'Compound' in wb
assert 'Lots' in wb
assert 'Rule' in wb
|
whereskenneth/Dwarfsquad
|
dwarfsquad/lib/build/from_xlsx/build_full_ac.py
|
Python
|
mit
| 1,706 | 0.004103 |
from __future__ import absolute_import, unicode_literals
import flask
import os
import logging
from flask_heroku import Heroku
from flask_redis import Redis
from flask_sslify import SSLify
from flask_sqlalchemy import SQLAlchemy
from raven.contrib.flask import Sentry
from werkzeug.contrib.fixers import ProxyFix
from freight.api.controller import ApiController
from freight.constants import PROJECT_ROOT
from freight.utils.celery import ContextualCelery
api = ApiController(prefix='/api/0')
db = SQLAlchemy(session_options={})
celery = ContextualCelery()
heroku = Heroku()
redis = Redis()
sentry = Sentry(logging=True, level=logging.WARN)
def configure_logging(app):
logging.getLogger().setLevel(getattr(logging, app.config['LOG_LEVEL']))
def create_app(_read_config=True, **config):
from kombu import Queue
app = flask.Flask(
__name__,
static_folder=None,
template_folder=os.path.join(PROJECT_ROOT, 'templates'))
# Utilized for sessions and other secrets
# NOTE: This key is insecure and you should override it on the server
app.config['SECRET_KEY'] = 't\xad\xe7\xff%\xd2.\xfe\x03\x02=\xec\xaf\\2+\xb8=\xf7\x8a\x9aLD\xb1'
if 'SECRET_KEY' in os.environ:
app.config['SECRET_KEY'] = os.environ['SECRET_KEY']
# The api key to authorize end users against this system.
# NOTE: This key is insecure and you should override it on the server
app.config['API_KEY'] = '3e84744ab2714151b1db789df82b41c0021958fe4d77406e9c0947c34f5c5a70'
if 'API_KEY' in os.environ:
app.config['API_KEY'] = os.environ['API_KEY']
# The private key to use when cloning repositories
# TODO(dcramer): this should support an on-disk option, as well as be
# possible to override per repo
app.config['SSH_PRIVATE_KEY'] = os.environ.get('SSH_PRIVATE_KEY', '').replace("\\n", "\n")
app.config['FREIGHT_URL'] = os.environ.get('FREIGHT_URL', '').rstrip('/')
if 'REDISCLOUD_URL' in os.environ:
app.config['REDIS_URL'] = os.environ['REDISCLOUD_URL']
app.config['WORKSPACE_ROOT'] = os.environ.get('WORKSPACE_ROOT', '/tmp')
app.config['DEFAULT_TIMEOUT'] = int(os.environ.get('DEFAULT_TIMEOUT', 300))
app.config['LOG_LEVEL'] = os.environ.get('LOG_LEVEL', 'INFO' if config.get('DEBUG') else 'ERROR')
# Currently authentication requires Google
app.config['GOOGLE_CLIENT_ID'] = os.environ.get('GOOGLE_CLIENT_ID')
app.config['GOOGLE_CLIENT_SECRET'] = os.environ.get('GOOGLE_CLIENT_SECRET')
app.config['GOOGLE_DOMAIN'] = os.environ.get('GOOGLE_DOMAIN')
# Generate a GitHub token via Curl:
# curlish https://api.github.com/authorizations \
# -u your-username \
# -X POST \
# -J scopes='repo' \
# -J note='freight'
app.config['GITHUB_TOKEN'] = os.environ.get('GITHUB_TOKEN')
app.config['GITHUB_API_ROOT'] = 'https://api.github.com'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
app.config['SQLALCHEMY_POOL_SIZE'] = 60
app.config['SQLALCHEMY_MAX_OVERFLOW'] = 20
if 'SQLALCHEMY_DATABASE_URI' in os.environ:
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ['SQLALCHEMY_DATABASE_URI']
app.config['BROKER_TRANSPORT'] = None
if 'BROKER_URL' in os.environ:
app.config['BROKER_URL'] = os.environ['BROKER_URL']
app.config['CELERY_ACCEPT_CONTENT'] = ['json']
app.config['CELERY_ACKS_LATE'] = True
app.config['CELERY_DEFAULT_QUEUE'] = "default"
app.config['CELERY_DEFAULT_EXCHANGE'] = "default"
app.config['CELERY_DEFAULT_EXCHANGE_TYPE'] = "direct"
app.config['CELERY_DEFAULT_ROUTING_KEY'] = "default"
app.config['CELERY_DISABLE_RATE_LIMITS'] = True
app.config['CELERY_IGNORE_RESULT'] = True
app.config['CELERY_RESULT_BACKEND'] = None
app.config['CELERY_RESULT_SERIALIZER'] = 'json'
app.config['CELERY_SEND_EVENTS'] = False
app.config['CELERY_TASK_RESULT_EXPIRES'] = 1
app.config['CELERY_TASK_SERIALIZER'] = 'json'
app.config['CELERY_TIMEZONE'] = 'UTC'
app.config['CELERYD_PREFETCH_MULTIPLIER'] = 1
app.config['CELERYD_MAX_TASKS_PER_CHILD'] = 10000
app.config['CELERY_QUEUES'] = (
Queue('default', routing_key='default'),
Queue('freight.tasks', routing_key='freight.tasks'),
)
app.config['CELERY_IMPORTS'] = (
'freight.tasks',
)
app.config['CELERY_ROUTES'] = {
'freight.execute_task': {
'queue': 'freight.tasks',
'routing_key': 'freight.tasks',
},
}
app.config['SENTRY_INCLUDE_PATHS'] = [
'ds',
]
# We don't support non-proxied installs
app.wsgi_app = ProxyFix(app.wsgi_app)
# Pull in Heroku configuration
heroku.init_app(app)
if 'DYNO' in os.environ:
# XXX: the released version of flask-sslify does not support init_app
SSLify(app)
# Set any remaining defaults that might not be present yet
if not app.config.get('SQLALCHEMY_DATABASE_URI'):
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///freight'
if not app.config.get('BROKER_URL'):
app.config['BROKER_URL'] = 'redis://localhost/0'
app.config.update(config)
if _read_config:
if os.environ.get('FREIGHT_CONF'):
# FREIGHT_CONF=/etc/freight.conf.py
app.config.from_envvar('FREIGHT_CONF')
else:
# Look for ~/.freight/freight.conf.py
path = os.path.normpath(os.path.expanduser('~/.freight/freight.conf.py'))
app.config.from_pyfile(path, silent=True)
configure_logging(app)
configure_sentry(app)
configure_api(app)
configure_celery(app)
configure_redis(app)
configure_sqlalchemy(app)
configure_web_routes(app)
return app
def configure_api(app):
from freight.api.controller import ApiCatchall
from freight.api.app_details import AppDetailsApiView
from freight.api.app_index import AppIndexApiView
from freight.api.stats import StatsApiView
from freight.api.task_details import TaskDetailsApiView
from freight.api.task_index import TaskIndexApiView
from freight.api.task_log import TaskLogApiView
api.add_resource(AppIndexApiView, '/apps/')
api.add_resource(AppDetailsApiView, '/apps/<app_id>/')
api.add_resource(StatsApiView, '/stats/')
api.add_resource(TaskIndexApiView, '/tasks/')
api.add_resource(TaskDetailsApiView, '/tasks/<task_id>/')
api.add_resource(TaskLogApiView, '/tasks/<task_id>/log/')
# catchall should be the last resource
api.add_resource(ApiCatchall, '/<path:path>')
# init must be called after routes are registered
api.init_app(app)
def configure_celery(app):
celery.init_app(app)
def configure_redis(app):
redis.init_app(app)
def configure_sentry(app):
from flask import session
from raven.contrib.celery import register_signal, register_logger_signal
sentry.init_app(app)
@app.before_request
def capture_user(*args, **kwargs):
if 'uid' in session:
sentry.client.user_context({
'id': session['uid'],
'email': session['email'],
})
register_signal(sentry.client)
register_logger_signal(sentry.client)
def configure_sqlalchemy(app):
db.init_app(app)
def configure_web_routes(app):
from freight.web.auth import AuthorizedView, LoginView, LogoutView
from freight.web.index import IndexView
from freight.web.static import StaticView
static_root = os.path.join(PROJECT_ROOT, 'dist')
app.add_url_rule(
'/static/<path:filename>',
view_func=StaticView.as_view(b'static', root=static_root))
app.add_url_rule(
'/auth/login/',
view_func=LoginView.as_view(b'login', authorized_url='authorized'))
app.add_url_rule(
'/auth/logout/',
view_func=LogoutView.as_view(b'logout', complete_url='index'))
app.add_url_rule(
'/auth/complete/',
view_func=AuthorizedView.as_view(b'authorized', authorized_url='authorized', complete_url='index'))
index_view = IndexView.as_view(b'index', login_url='login')
app.add_url_rule('/', view_func=index_view)
app.add_url_rule('/<path:path>', view_func=index_view)
|
jkimbo/freight
|
freight/config.py
|
Python
|
apache-2.0
| 8,210 | 0.000853 |
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import collections
import datetime
import os
import re
import shutil
import sys
import yaml
def parse_opts(argv):
parser = argparse.ArgumentParser(
description='Convert to new NIC config templates with '
'OS::Heat::Value resources.')
parser.add_argument('-t', '--template', metavar='TEMPLATE_FILE',
help=("Existing NIC config template to conver."),
required=True)
parser.add_argument('--discard-comments', metavar='DISCARD_COMMENTS',
help="Discard comments from the template. (The "
"scripts functions to keep YAML file comments in "
"place, does not work in all scenarios.)",
default=False)
opts = parser.parse_args(argv[1:])
return opts
def to_commented_yaml(filename):
"""Convert comments into 'comments<num>: ...' YAML"""
out_str = ''
last_non_comment_spaces = ''
with open(filename, 'r') as f:
comment_count = 0
for line in f:
# skip blank line
if line.isspace():
continue
char_count = 0
spaces = ''
for char in line:
char_count += 1
if char == ' ':
spaces += ' '
next
elif char == '#':
comment_count += 1
comment = line[char_count:-1]
last_non_comment_spaces = spaces
out_str += "%scomment%i_%i: '%s'\n" % (
last_non_comment_spaces, comment_count, len(spaces),
comment)
break
else:
last_non_comment_spaces = spaces
out_str += line
# inline comments check
m = re.match(".*:.*#(.*)", line)
if m:
comment_count += 1
out_str += "%s inline_comment%i: '%s'\n" % (
last_non_comment_spaces, comment_count, m.group(1))
break
with open(filename, 'w') as f:
f.write(out_str)
return out_str
def to_normal_yaml(filename):
"""Convert back to normal #commented YAML"""
with open(filename, 'r') as f:
data = f.read()
out_str = ''
next_line_break = False
for line in data.split('\n'):
# get_input not supported by run-os-net-config.sh script
line = line.replace('get_input: ', '')
# Normal comments
m = re.match(" +comment[0-9]+_([0-9]+): '(.*)'.*", line)
# Inline comments
i = re.match(" +inline_comment[0-9]+: '(.*)'.*", line)
if m:
if next_line_break:
out_str += '\n'
next_line_break = False
for x in range(0, int(m.group(1))):
out_str += " "
out_str += "#%s\n" % m.group(2)
elif i:
out_str += " #%s\n" % i.group(1)
next_line_break = False
else:
if next_line_break:
out_str += '\n'
out_str += line
next_line_break = True
if next_line_break:
out_str += '\n'
with open(filename, 'w') as f:
f.write(out_str)
return out_str
class TemplateDumper(yaml.SafeDumper):
def represent_ordered_dict(self, data):
return self.represent_dict(data.items())
def description_presenter(self, data):
if not len(data) > 80:
return self.represent_scalar('tag:yaml.org,2002:str', data)
return self.represent_scalar('tag:yaml.org,2002:str', data, style='>')
class TemplateLoader(yaml.SafeLoader):
def construct_mapping(self, node):
self.flatten_mapping(node)
return collections.OrderedDict(self.construct_pairs(node))
TemplateDumper.add_representer(str,
TemplateDumper.description_presenter)
TemplateDumper.add_representer(bytes,
TemplateDumper.description_presenter)
TemplateDumper.add_representer(collections.OrderedDict,
TemplateDumper.represent_ordered_dict)
TemplateLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
TemplateLoader.construct_mapping)
def write_template(template, filename=None):
with open(filename, 'w') as f:
yaml.dump(template, f, TemplateDumper, width=120,
default_flow_style=False)
def validate_template(template):
if not os.path.exists(template):
raise RuntimeError('Template not provided.')
if not os.path.isfile(template):
raise RuntimeError('Template %s is not a file.')
pass
def backup_template(template):
extension = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
backup_filename = os.path.realpath(template) + '.' + extension
if os.path.exists(backup_filename):
raise RuntimeError('Backupe file: %s already exists. Aborting!'
% backup_filename)
shutil.copyfile(template, backup_filename)
print('The original template was saved as: %s' % backup_filename)
def needs_conversion():
with open(OPTS.template, 'r') as f:
template = yaml.load(f.read(), Loader=TemplateLoader)
net_config_res = template['resources'].get('OsNetConfigImpl')
if (net_config_res and net_config_res[
'type'] == 'OS::Heat::SoftwareConfig'):
backup_template(OPTS.template)
if not OPTS.discard_comments:
# Convert comments '# ...' into 'comments<num>: ...'
# is not lost when loading the data.
to_commented_yaml(OPTS.template)
return True
return False
def convert_to_heat_value_resource():
if needs_conversion():
with open(OPTS.template, 'r') as f:
template = yaml.load(f.read(), Loader=TemplateLoader)
net_config_res = template['resources']['OsNetConfigImpl']
net_config_res_props = net_config_res['properties']
# set the type to OS::Heat::Value
net_config_res['type'] = 'OS::Heat::Value'
del net_config_res_props['group']
old_config = net_config_res_props['config']
new_config = old_config['str_replace']['params']['$network_config']
del net_config_res_props['config']
net_config_res_props['value'] = new_config
outputs = template['outputs']
del outputs['OS::stack_id']
outputs['config'] = {}
outputs['config']['value'] = 'get_attr[OsNetConfigImpl, value]'
write_template(template, filename=OPTS.template)
if not OPTS.discard_comments:
# Convert previously converted comments, 'comments<num>: ...'
# YAML back to normal #commented YAML
to_normal_yaml(OPTS.template)
print('The update template was saved as: %s' % OPTS.template)
else:
print('Template does not need conversion: %s' % OPTS.template)
OPTS = parse_opts(sys.argv)
convert_to_heat_value_resource()
|
openstack/tripleo-heat-templates
|
tools/convert_nic_config.py
|
Python
|
apache-2.0
| 7,737 | 0 |
from Monument import Monument, Dataset
import importer_utils as utils
import importer as importer
class DkBygningDa(Monument):
def set_adm_location(self):
if self.has_non_empty_attribute("kommune"):
if utils.count_wikilinks(self.kommune) == 1:
adm_location = utils.q_from_first_wikilink("da", self.kommune)
self.add_statement("located_adm", adm_location)
def set_location(self):
"""
Set location based on 'by' column.
If there's one wikilinked item, confirm that
the corresponding WD item is of a type that's
a subclass of 'human settlement', using query results
downloaded by importer.
If not wikilinked, check if there's a dawp article
with the same name and do the same check.
"""
place_item = None
if self.has_non_empty_attribute("by"):
place = self.by
if utils.count_wikilinks(place) == 1:
place = utils.get_wikilinks(place)[0].title
if utils.wp_page_exists("da", place):
place_item = utils.q_from_wikipedia("da", place)
if place_item:
place_item_ids = utils.get_P31(place_item, self.repo)
for p31_value in place_item_ids:
if p31_value in self.data_files["settlement"]:
self.add_statement("location", place_item)
# there can be more than one P31, but after first positive
# we can leave
return
def set_sagsnr(self):
"""Danish listed buildings case ID (P2783)."""
self.add_statement("listed_building_dk", str(self.sagsnr))
def update_labels(self):
self.add_label("da", utils.remove_markup(self.sagsnavn))
def set_address(self):
"""
Set address of object.
self.addresse is always streetname + number.
self.postnr is always zipcode
self.by is always placename.
"""
if self.has_non_empty_attribute("adresse"):
address = self.adresse + " " + self.postnr + " " + self.by
self.add_statement("located_street", address)
def set_inception(self):
if self.has_non_empty_attribute("opforelsesar"):
inception = utils.parse_year(self.opforelsesar)
if isinstance(inception, int):
self.add_statement(
"inception", utils.package_time({"year": inception}))
def set_monuments_all_id(self):
"""Map monuments_all ID to fields in this table."""
self.monuments_all_id = "{!s}-{!s}-{!s}".format(
self.kommunenr, self.ejendomsnr, self.bygningsnr)
def __init__(self, db_row_dict, mapping, data_files, existing):
Monument.__init__(self, db_row_dict, mapping, data_files, existing)
self.set_monuments_all_id()
self.update_labels()
self.exists("da")
self.set_commonscat()
self.set_image("billede")
self.set_coords(("lat", "lon"))
self.set_adm_location()
self.set_location()
self.set_sagsnr()
self.set_address()
self.set_inception()
self.exists_with_prop(mapping)
self.print_wd()
if __name__ == "__main__":
"""Point of entrance for importer."""
args = importer.handle_args()
dataset = Dataset("dk-bygninger", "da", DkBygningDa)
dataset.subclass_downloads = {"settlement": "Q486972"}
importer.main(args, dataset)
|
Vesihiisi/COH-tools
|
importer/DkBygningDa.py
|
Python
|
mit
| 3,500 | 0 |
"""helpers.py -- supporting routines for PyBlaster project
@Author Ulrich Jansen <ulrich.jansen@rwth-aachen.de>
"""
suffixes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
def humansize(nbytes):
if nbytes == 0:
return '0 B'
i = 0
while nbytes >= 1024 and i < len(suffixes)-1:
nbytes /= 1024.
i += 1
f = ('%.2f' % nbytes).rstrip('0').rstrip('.')
return '%s %s' % (f, suffixes[i])
def seconds_to_minutes(nsecs):
if nsecs == 0:
return ""
return "%d:%02d" % (int(nsecs / 60), nsecs % 60)
|
ujac81/PiBlaster
|
Pi/PyBlaster/src/helpers.py
|
Python
|
gpl-3.0
| 541 | 0 |
import unittest
from cumulusci.core import template_utils
class TemplateUtils(unittest.TestCase):
def test_string_generator(self):
x = 100
y = template_utils.StringGenerator(lambda: str(x))
assert str(y) == "100"
x = 200
assert str(y) == "200"
def test_faker_library(self):
fake = template_utils.FakerTemplateLibrary()
assert fake.first_name
assert "example.com" in fake.email(domain="example.com")
def test_faker_languages(self):
fake = template_utils.FakerTemplateLibrary("no_NO")
assert fake.first_name
assert "example.com" in fake.email(domain="example.com")
def test_format_str(self):
assert template_utils.format_str("abc") == "abc"
assert template_utils.format_str("{{abc}}", {"abc": 5}) == "5"
assert len(template_utils.format_str("{{fake.first_name}}"))
assert "15" in template_utils.format_str(
"{{fake.first_name}} {{count}}", {"count": 15}
)
assert "15" in template_utils.format_str(
"{{fake.first_name}} {{count}}", {"count": "15"}
)
assert (
template_utils.format_str("{% raw %}{}{% endraw %}", {"count": "15"})
== "{}"
)
def test_format_str_languages(self):
norwegian_faker = template_utils.FakerTemplateLibrary("no_NO")
val = template_utils.format_str(
"{{vikingfake.first_name}} {{abc}}",
{"abc": 5, "vikingfake": norwegian_faker},
)
assert "5" in val
def cosmopolitan_faker(language):
return template_utils.FakerTemplateLibrary(language)
val = template_utils.format_str(
"{{fakei18n('ne_NP').first_name}} {{abc}}",
{"abc": 5, "fakei18n": cosmopolitan_faker, "type": type},
)
assert "5" in val
|
SalesforceFoundation/CumulusCI
|
cumulusci/robotframework/tests/test_template_util.py
|
Python
|
bsd-3-clause
| 1,872 | 0.000534 |
"""General tests for Buoyant library."""
import datetime
import unittest
from io import BytesIO
import buoyant
from buoyant import buoy
sampledata = [
{
"latitude (degree)": "39.235",
"sea_surface_wave_peak_period (s)": "13.79",
"polar_coordinate_r1 (1)": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;",
"station_id": "urn:ioos:station:wmo:46014",
"sea_surface_wind_wave_period (s)": "3.80",
"spectral_energy (m**2/Hz)": "0;0;0;0;0.117495;0.347233;0.340078;1.07545;1.31407;0.644604;0.319928;0.20951;0.203445;0.407703;0.501098;1.05528;0.552653;0.982512;0.40238;0.259344;0.176087;0.156276;0.10127;0.0713481;0.1257;0.0469963;0.0294347;0.0344079;0.0196117;0.0208386;0.0207157;0.0185725;0.0112313;0.0140935;0.00829521;0.0135329;0.0103501;0.00823833;0.00611987;0.00516951;0.00295949;0.00274196;0.00162249;0.00153895;0.000701703;0.000452887",
"sea_surface_wave_mean_period (s)": "7.61",
"sea_water_temperature (c)": "",
"bandwidths (Hz)": "0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0200;0.0200;0.0200;0.0200;0.0200;0.0200;0.0200",
"sea_surface_wind_wave_to_direction (degree)": "",
"polar_coordinate_r2 (1)": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;",
"sampling_rate (Hz)": "",
"sea_surface_wave_to_direction (degree)": "",
"sea_surface_swell_wave_significant_height (m)": "1.07",
"number_of_frequencies (count)": "46",
"center_frequencies (Hz)": "0.0325;0.0375;0.0425;0.0475;0.0525;0.0575;0.0625;0.0675;0.0725;0.0775;0.0825;0.0875;0.0925;0.1000;0.1100;0.1200;0.1300;0.1400;0.1500;0.1600;0.1700;0.1800;0.1900;0.2000;0.2100;0.2200;0.2300;0.2400;0.2500;0.2600;0.2700;0.2800;0.2900;0.3000;0.3100;0.3200;0.3300;0.3400;0.3500;0.3650;0.3850;0.4050;0.4250;0.4450;0.4650;0.4850",
"date_time": "2015-07-31T19:50:00Z",
"sea_surface_wind_wave_significant_height (m)": "0.17",
"sea_surface_wave_significant_height (m)": "1.09",
"sea_surface_swell_wave_to_direction (degree)": "",
"sea_surface_swell_wave_period (s)": "",
"calculation_method": "UNKNOWN",
"mean_wave_direction (degree)": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;",
"longitude (degree)": "-123.974",
"principal_wave_direction (degree)": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;",
"sensor_id": "urn:ioos:sensor:wmo:46014::wpm1",
}
]
class BuoyTestCase(unittest.TestCase):
def setUp(self):
self.b = buoyant.Buoy("41012")
def test_observation(self):
"""Test the Observation class."""
self.assertTrue(issubclass(buoyant.Observation, float))
subint = float.__new__(buoyant.Observation, 11)
assert subint == 11
assert isinstance(subint, buoyant.Observation)
obs = buoyant.Observation(1, "m")
assert isinstance(obs, buoyant.Observation)
assert obs.unit == "m"
self.assertEqual(str(obs), "1.0 m")
assert repr(obs) == "Observation(1.0, 'm')"
assert obs + 2 == 3
def test_buoy_instant(self):
assert self.b
assert isinstance(self.b, buoyant.Buoy)
def test_data_exists(self):
x = self.b.sea_water_electrical_conductivity
assert x.unit == "mS/cm"
currents = self.b.currents
self.assertIsInstance(currents, list)
assert isinstance(x.datetime, datetime.datetime)
assert isinstance(self.b.image, BytesIO)
assert isinstance(self.b.__dict__["lat"], float)
assert isinstance(self.b.coords, tuple)
assert (self.b.__dict__["lat"], self.b.__dict__["lon"]) == self.b.coords
def test_keys(self):
"""Test that observation attributes exist in Buoy class."""
self.assertIsNotNone(self.b.sea_water_salinity)
self.assertIsNotNone(self.b.air_pressure_at_sea_level)
self.assertIsNotNone(self.b.air_temperature)
self.assertIsNotNone(self.b.currents)
self.assertIsNotNone(self.b.sea_water_electrical_conductivity)
self.assertIsNotNone(self.b.sea_water_salinity)
self.assertIsNotNone(self.b.sea_water_temperature)
def test_parse_unit(self):
dictionary = {"magic (pixie dust)": "42"}
x = buoy.parse_unit("magic", dictionary)
assert isinstance(x, buoyant.Observation)
nope = buoy.parse_unit("widget", dictionary)
self.assertIsNone(nope)
spectral_energy = buoy.parse_unit("spectral_energy", sampledata[0])
self.assertEqual(spectral_energy[4], buoy.Observation(0.117495, "m**2/Hz"))
def test_error(self):
with self.assertRaises(AttributeError):
self.b._get("foo bar")
self.assertIsNone(self.b.depth)
def test_image(self):
station = buoyant.Buoy(51001)
assert buoy.CAM_ENDPOINT in station.image_url
self.assertIsNotNone(station.image)
def test_degroup(self):
waves = buoyant.buoy._degroup(sampledata, buoyant.properties.waves)
self.assertEqual(
waves[0]["sea_surface_wind_wave_significant_height"],
buoy.Observation(0.17, "m"),
)
if __name__ == "__main__":
unittest.main()
|
fitnr/buoyant
|
tests/test_buoyant.py
|
Python
|
gpl-3.0
| 5,408 | 0.001664 |
# Copyright 2002-2011 Nick Mathewson. See LICENSE for licensing information.
"""mixminion.ServerKeys
Classes for servers to generate and store keys and server descriptors.
"""
#FFFF We need support for encrypting private keys.
__all__ = [ "ServerKeyring", "generateServerDescriptorAndKeys",
"generateCertChain" ]
import os
import errno
import socket
import re
import sys
import time
import threading
import urllib
import urllib2
import mixminion._minionlib
import mixminion.Crypto
import mixminion.NetUtils
import mixminion.Packet
import mixminion.server.HashLog
import mixminion.server.MMTPServer
import mixminion.server.ServerMain
from mixminion.ServerInfo import ServerInfo, PACKET_KEY_BYTES, MMTP_KEY_BYTES,\
signServerInfo
from mixminion.Common import AtomicFile, LOG, MixError, MixFatalError, \
ceilDiv, createPrivateDir, checkPrivateFile, englishSequence, \
formatBase64, formatDate, formatTime, previousMidnight, readFile, \
replaceFile, secureDelete, tryUnlink, UIError, writeFile
from mixminion.Config import ConfigError
#----------------------------------------------------------------------
# Seconds before a key becomes live that we want to generate
# and publish it.
#
#FFFF Make this configurable? (Set to 2 days, 13 hours)
PUBLICATION_LATENCY = (2*24+13)*60*60
# Number of seconds worth of keys we want to generate in advance.
#
#FFFF Make this configurable? (Set to 2 weeks).
PREPUBLICATION_INTERVAL = 14*24*60*60
# We have our X509 certificate set to expire a bit after public key does,
# so that slightly-skewed clients don't incorrectly give up while trying to
# connect to us. (And so that we don't mess up the world while being
# slightly skewed.)
CERTIFICATE_EXPIRY_SLOPPINESS = 2*60*60
# DOCDOC
CERTIFICATE_LIFETIME = 24*60*60
#----------------------------------------------------------------------
class ServerKeyring:
"""A ServerKeyring remembers current and future keys, descriptors, and
hash logs for a mixminion server. It keeps track of key rotation
schedules, and generates new keys as needed.
"""
## Fields:
# homeDir: server home directory
# keyDir: server key directory
# keyOverlap: How long after a new key begins do we accept the old one?
# keySets: sorted list of (start, end, keyset)
# nextUpdate: time_t when a new key should be added, or a current key
# should be removed, or "None" for uncalculated.
# keyRange: tuple of (firstKey, lastKey) to represent which key names
# have keys on disk.
# currentKeys: None, if we haven't checked for currently live keys, or
# a list of currently live ServerKeyset objects.
# dhFile: pathname to file holding diffie-helman parameters.
# _lock: A lock to prevent concurrent key generation or rotation.
def __init__(self, config):
"Create a ServerKeyring from a config object"
self._lock = threading.RLock()
self.configure(config)
def configure(self, config):
"Set up a ServerKeyring from a config object"
self.config = config
self.homeDir = config.getBaseDir()
self.keyDir = config.getKeyDir()
self.hashDir = os.path.join(config.getWorkDir(), 'hashlogs')
self.dhFile = os.path.join(config.getWorkDir(), 'tls', 'dhparam')
self.certFile = os.path.join(config.getWorkDir(), "cert_chain")
self.keyOverlap = config['Server']['PublicKeyOverlap'].getSeconds()
self.nickname = config['Server']['Nickname'] #DOCDOC
self.nextUpdate = None
self.currentKeys = None
self._tlsContext = None #DOCDOC
self._tlsContextExpires = -1 #DOCDOC
self.pingerSeed = None
self.checkKeys()
def checkKeys(self):
"""Internal method: read information about all this server's
currently-prepared keys from disk.
May raise ConfigError if any of the server descriptors on disk
are invalid.
"""
self.keySets = []
badKeySets = []
firstKey = sys.maxint
lastKey = 0
LOG.debug("Scanning server keystore at %s", self.keyDir)
if not os.path.exists(self.keyDir):
LOG.info("Creating server keystore at %s", self.keyDir)
createPrivateDir(self.keyDir)
# Iterate over the entires in HOME/keys
for dirname in os.listdir(self.keyDir):
# Skip any that aren't directories named "key_INT"
if not os.path.isdir(os.path.join(self.keyDir,dirname)):
continue
if not dirname.startswith('key_'):
LOG.warn("Unexpected directory %s under %s",
dirname, self.keyDir)
continue
keysetname = dirname[4:]
try:
setNum = int(keysetname)
# keep trace of the first and last used key number
if setNum < firstKey: firstKey = setNum
if setNum > lastKey: lastKey = setNum
except ValueError:
LOG.warn("Unexpected directory %s under %s",
dirname, self.keyDir)
continue
# Find the server descriptor...
keyset = ServerKeyset(self.keyDir, keysetname, self.hashDir)
ok = 1
try:
keyset.checkKeys()
except MixError, e:
LOG.warn("Error checking private keys in keyset %s: %s",
keysetname, str(e))
ok = 0
try:
if ok:
keyset.getServerDescriptor()
except (ConfigError, IOError), e:
LOG.warn("Key set %s has invalid/missing descriptor: %s",
keysetname, str(e))
ok = 0
if ok:
t1, t2 = keyset.getLiveness()
self.keySets.append( (t1, t2, keyset) )
LOG.trace("Found key %s (valid from %s to %s)",
dirname, formatDate(t1), formatDate(t2))
else:
badKeySets.append(keyset)
LOG.debug("Found %s keysets: %s were incomplete or invalid.",
len(self.keySets), len(badKeySets))
if badKeySets:
LOG.warn("Removing %s invalid keysets", len(badKeySets))
for b in badKeySets:
b.delete()
# Now, sort the key intervals by starting time.
self.keySets.sort()
self.keyRange = (firstKey, lastKey)
# Now we try to see whether we have more or less than 1 key in effect
# for a given time.
for idx in xrange(len(self.keySets)-1):
end = self.keySets[idx][1]
start = self.keySets[idx+1][0]
if start < end:
LOG.warn("Multiple keys for %s. That's unsupported.",
formatDate(end))
elif start > end:
LOG.warn("Gap in key schedule: no key from %s to %s",
formatDate(end), formatDate(start))
def checkDescriptorConsistency(self, regen=1):
"""Check whether the server descriptors in this keyring are
consistent with the server's configuration. If 'regen' is
true, inconsistent descriptors are regenerated."""
identity = None
state = []
for _,_,ks in self.keySets:
ok = ks.checkConsistency(self.config, 0)
if ok == 'good':
continue
state.append((ok, ks))
if not state:
return
LOG.warn("Some generated keysets do not match "
"current configuration...")
for ok, ks in state:
va,vu = ks.getLiveness()
LOG.warn("Keyset %s (%s--%s):",ks.keyname,formatTime(va,1),
formatTime(vu,1))
ks.checkConsistency(self.config, 1)
if regen and ok == 'bad':
if not identity: identity = self.getIdentityKey()
ks.regenerateServerDescriptor(self.config, identity)
def getIdentityKey(self):
"""Return this server's identity key. Generate one if it doesn't
exist."""
password = None # FFFF Use this, somehow.
fn = os.path.join(self.keyDir, "identity.key")
bits = self.config['Server']['IdentityKeyBits']
if os.path.exists(fn):
checkPrivateFile(fn)
key = mixminion.Crypto.pk_PEM_load(fn, password)
keylen = key.get_modulus_bytes()*8
if keylen != bits:
LOG.warn(
"Stored identity key has %s bits, but you asked for %s.",
keylen, bits)
else:
LOG.info("Generating identity key. (This may take a while.)")
key = mixminion.Crypto.pk_generate(bits)
mixminion.Crypto.pk_PEM_save(key, fn, password)
LOG.info("Generated %s-bit identity key.", bits)
return key
def getPingerSeed(self):
"""DOCDOC"""
if self.pingerSeed is not None:
return self.pingerSeed
fn = os.path.join(self.keyDir, "pinger.seed")
if os.path.exists(fn):
checkPrivateFile(fn)
r = readFile(fn)
if len(r) == mixminion.Crypto.DIGEST_LEN:
self.pingerSeed = r
return r
self.pingerSeed = r = mixminion.Crypto.trng(mixminion.Crypto.DIGEST_LEN)
createPrivateDir(self.keyDir)
writeFile(fn, r, 0600)
return r
def getIdentityKeyDigest(self):
"""DOCDOC"""
k = self.getIdentityKey()
return mixminion.Crypto.sha1(mixminion.Crypto.pk_encode_public_key(k))
def publishKeys(self, allKeys=0):
"""Publish server descriptors to the directory server. Ordinarily,
only unpublished descriptors are sent. If allKeys is true,
all descriptors are sent."""
keySets = [ ks for _, _, ks in self.keySets ]
if allKeys:
LOG.info("Republishing all known keys to directory server")
else:
keySets = [ ks for ks in keySets if not ks.isPublished() ]
if not keySets:
LOG.trace("publishKeys: no unpublished keys found")
return
LOG.info("Publishing %s keys to directory server...",len(keySets))
rejected = 0
for ks in keySets:
status = ks.publish(self.config['DirectoryServers']['PublishURL'])
if status == 'error':
LOG.error("Error publishing a key; giving up")
return 0
elif status == 'reject':
rejected += 1
else:
assert status == 'accept'
if rejected == 0:
LOG.info("All keys published successfully.")
return 1
else:
LOG.info("%s/%s keys were rejected." , rejected, len(keySets))
return 0
def removeIdentityKey(self):
"""Remove this server's identity key."""
fn = os.path.join(self.keyDir, "identity.key")
if not os.path.exists(fn):
LOG.info("No identity key to remove.")
else:
LOG.warn("Removing identity key in 10 seconds")
time.sleep(10)
LOG.warn("Removing identity key")
secureDelete([fn], blocking=1)
if os.path.exists(self.dhFile):
LOG.info("Removing diffie-helman parameters file")
secureDelete([self.dhFile], blocking=1)
def createKeysAsNeeded(self,now=None):
"""Generate new keys and descriptors as needed, so that the next
PUBLICATION_LATENCY+PREPUBLICATION_INTERVAL seconds are covered."""
if now is None:
now = time.time()
if self.getNextKeygen() > now-10: # 10 seconds of leeway
return
if self.keySets:
lastExpiry = self.keySets[-1][1]
if lastExpiry < now:
lastExpiry = now
else:
lastExpiry = now
needToCoverUntil = now+PUBLICATION_LATENCY+PREPUBLICATION_INTERVAL
timeToCover = needToCoverUntil-lastExpiry
lifetime = self.config['Server']['PublicKeyLifetime'].getSeconds()
nKeys = int(ceilDiv(timeToCover, lifetime))
LOG.info("Creating %s keys", nKeys)
self.createKeys(num=nKeys)
def createKeys(self, num=1, startAt=None):
"""Generate 'num' public keys for this server. If startAt is provided,
make the first key become valid at 'startAt'. Otherwise, make the
first key become valid right after the last key we currently have
expires. If we have no keys now, make the first key start now."""
# FFFF Use this.
#password = None
if startAt is None:
if self.keySets:
startAt = self.keySets[-1][1]+60
if startAt < time.time():
startAt = time.time()+60
else:
startAt = time.time()+60
startAt = previousMidnight(startAt)
firstKey, lastKey = self.keyRange
for _ in xrange(num):
if firstKey == sys.maxint:
keynum = firstKey = lastKey = 1
elif firstKey > 1:
firstKey -= 1
keynum = firstKey
else:
lastKey += 1
keynum = lastKey
keyname = "%04d" % keynum
lifetime = self.config['Server']['PublicKeyLifetime'].getSeconds()
nextStart = startAt + lifetime
LOG.info("Generating key %s to run from %s through %s (GMT)",
keyname, formatDate(startAt),
formatDate(nextStart-3600))
generateServerDescriptorAndKeys(config=self.config,
identityKey=self.getIdentityKey(),
keyname=keyname,
keydir=self.keyDir,
hashdir=self.hashDir,
validAt=startAt)
startAt = nextStart
self.checkKeys()
def regenerateDescriptors(self):
"""Regenerate all server descriptors for all keysets in this
keyring, but keep all old keys intact."""
LOG.info("Regenerating server descriptors; keeping old keys.")
identityKey = self.getIdentityKey()
for _,_,ks in self.keySets:
ks.regenerateServerDescriptor(self.config, identityKey)
def getNextKeygen(self):
"""Return the time (in seconds) when we should next generate keys.
If -1 is returned, keygen should occur immediately.
"""
if not self.keySets:
return -1
# Our last current key expires at 'lastExpiry'.
lastExpiry = self.keySets[-1][1]
# We want to have keys in the directory valid for
# PREPUBLICATION_INTERVAL seconds after that, and we assume that
# a key takes up to PUBLICATION_LATENCY seconds to make it into the
# directory.
nextKeygen = lastExpiry - PUBLICATION_LATENCY - PREPUBLICATION_INTERVAL
LOG.info("Last expiry at %s; next keygen at %s",
formatTime(lastExpiry,1), formatTime(nextKeygen, 1))
return nextKeygen
def removeDeadKeys(self, now=None):
"""Remove all keys that have expired."""
self.checkKeys()
keys = self.getDeadKeys(now)
for message, keyset in keys:
LOG.info(message)
keyset.delete()
self.checkKeys()
def getDeadKeys(self,now=None):
"""Helper function: return a list of (informative-message, keyset
object) for each expired keyset in the keystore. Does not rescan
the keystore or remove dead keys.
"""
if now is None:
now = time.time()
expiryStr = " expired"
else:
expiryStr = ""
cutoff = now - self.keyOverlap
result = []
for va, vu, keyset in self.keySets:
if vu >= cutoff:
continue
name = keyset.keyname
message ="Removing%s key %s (valid from %s through %s)"%(
expiryStr, name, formatDate(va), formatDate(vu))
result.append((message, keyset))
return result
def _getLiveKeys(self, now=None):
"""Find all keys that are now valid. Return list of (Valid-after,
valid-util, keyset)."""
if not self.keySets:
return []
if now is None:
now = time.time()
cutoff = now-self.keyOverlap
# A key is live if
# * it became valid before now, and
# * it did not become invalid until keyOverlap seconds ago
return [ (va,vu,k) for (va,vu,k) in self.keySets
if va <= now and vu >= cutoff ]
def getServerKeysets(self, now=None):
"""Return list of ServerKeyset objects for the currently live keys.
"""
# FFFF Support passwords on keys
keysets = [ ]
for va, vu, ks in self._getLiveKeys(now):
ks.load()
keysets.append(ks)
return keysets
def _getDHFile(self):
"""Return the filename for the diffie-helman parameters for the
server. Creates the file if it doesn't yet exist."""
dhdir = os.path.split(self.dhFile)[0]
createPrivateDir(dhdir)
if not os.path.exists(self.dhFile):
# ???? This is only using 512-bit Diffie-Hellman! That isn't
# ???? remotely enough.
LOG.info("Generating Diffie-Helman parameters for TLS...")
mixminion._minionlib.generate_dh_parameters(self.dhFile, verbose=0)
LOG.info("...done")
else:
LOG.debug("Using existing Diffie-Helman parameter from %s",
self.dhFile)
return self.dhFile
def _newTLSContext(self, now=None):
"""Create and return a TLS context."""
if now is None:
now = time.time()
mmtpKey = mixminion.Crypto.pk_generate(MMTP_KEY_BYTES*8)
certStarts = now - CERTIFICATE_EXPIRY_SLOPPINESS
expires = now + CERTIFICATE_LIFETIME
certEnds = now + CERTIFICATE_LIFETIME + CERTIFICATE_EXPIRY_SLOPPINESS
tmpName = self.certFile + "_tmp"
generateCertChain(tmpName, mmtpKey, self.getIdentityKey(),
self.nickname, certStarts, certEnds)
replaceFile(tmpName, self.certFile)
self._tlsContext = (
mixminion._minionlib.TLSContext_new(self.certFile,
mmtpKey,
self._getDHFile()))
self._tlsContextExpires = expires
return self._tlsContext
def _getTLSContext(self, force=0, now=None):
if now is None:
now = time.time()
if force or self._tlsContext is None or self._tlsContextExpires < now:
return self._newTLSContext(now=now)
else:
return self._tlsContext
def updateMMTPServerTLSContext(self,mmtpServer,force=0,now=None):
"""DOCDOC"""
context = self._getTLSContext(force=force,now=now)
mmtpServer.setServerContext(context)
return self._tlsContextExpires
def updateKeys(self, packetHandler, statusFile=None,when=None):
"""Update the keys stored in a PacketHandler,
MMTPServer object, so that they contain the currently correct
keys. Also removes any dead keys.
This function is idempotent.
"""
self.checkKeys()
deadKeys = self.getDeadKeys(when)
self.currentKeys = keys = self.getServerKeysets(when)
keyNames = [k.keyname for k in keys]
deadKeyNames = [k.keyname for msg, k in deadKeys]
LOG.info("Updating keys: %s currently valid (%s); %s expired (%s)",
len(keys), " ".join(keyNames),
len(deadKeys), " ".join(deadKeyNames))
if packetHandler is not None:
packetKeys = []
hashLogs = []
for k in keys:
packetKeys.append(k.getPacketKey())
hashLogs.append(k.getHashLog())
packetHandler.setKeys(packetKeys, hashLogs)
if statusFile:
writeFile(statusFile,
"".join(["%s\n"%k.getDescriptorFileName() for k in keys]),
0644)
for msg, ks in deadKeys:
LOG.info(msg)
ks.delete()
if deadKeys:
self.checkKeys()
self.nextUpdate = None
self.getNextKeyRotation(keys)
def getNextKeyRotation(self, curKeys=None):
"""Calculate the next time at which we should change the set of live
keys."""
if self.nextUpdate is None:
if curKeys is None:
if self.currentKeys is None:
curKeys = self.getServerKeysets()
else:
curKeys = self.currentKeys
events = []
curNames = {}
# For every current keyset, we'll remove it at keyOverlap
# seconds after its stated expiry time.
for k in curKeys:
va, vu = k.getLiveness()
events.append((vu+self.keyOverlap, "RM"))
curNames[k.keyname] = 1
# For every other keyset, we'll add it when it becomes valid.
for va, vu, k in self.keySets:
if curNames.has_key(k.keyname): continue
events.append((va, "ADD"))
# Which even happens first?
events.sort()
if not events:
LOG.info("No future key rotation events.")
self.nextUpdate = sys.maxint
return self.nextUpdate
self.nextUpdate, eventType = events[0]
if eventType == "RM":
LOG.info("Next key event: old key is removed at %s",
formatTime(self.nextUpdate,1))
else:
assert eventType == "ADD"
LOG.info("Next key event: new key becomes valid at %s",
formatTime(self.nextUpdate,1))
return self.nextUpdate
def getCurrentDescriptor(self, now=None):
"""DOCDOC"""
self._lock.acquire()
if now is None:
now = time.time()
try:
keysets = self.getServerKeysets()
for k in keysets:
va,vu = k.getLiveness()
if va <= now <= vu:
return k.getServerDescriptor()
LOG.warn("getCurrentDescriptor: no live keysets??")
return self.getServerKeysets()[-1].getServerDescriptor()
finally:
self._lock.release()
def lock(self, blocking=1):
return self._lock.acquire(blocking)
def unlock(self):
self._lock.release()
#----------------------------------------------------------------------
class ServerKeyset:
"""A set of expirable keys for use by a server.
A server has one long-lived identity key, and two short-lived
temporary keys: one for subheader encryption and one for MMTP. The
subheader (or 'packet') key has an associated hashlog, and the
MMTP key has an associated self-signed X509 certificate.
Whether we publish or not, we always generate a server descriptor
to store the keys' lifetimes.
When we create a new ServerKeyset object, the associated keys are not
read from disk until the object's load method is called."""
## Fields:
# keydir: Directory to store this keyset's data.
# hashlogFile: filename of this keyset's hashlog.
# packetKeyFile, mmtpKeyFile: filename of this keyset's short-term keys
# descFile: filename of this keyset's server descriptor.
# publishedFile: filename to store this server's publication time.
#
# packetKey, mmtpKey: This server's actual short-term keys.
#
# serverinfo: None, or a parsed server descriptor.
# validAfter, validUntil: This keyset's published lifespan, or None.
# published: has this boolean: has this server been published?
def __init__(self, keyroot, keyname, hashroot):
"""Load a set of keys named "keyname" on a server where all keys
are stored under the directory "keyroot" and hashlogs are stored
under "hashroot". """
self.keyroot = keyroot
self.keyname = keyname
self.hashroot= hashroot
self.keydir = keydir = os.path.join(keyroot, "key_"+keyname)
self.hashlogFile = os.path.join(hashroot, "hash_"+keyname)
self.packetKeyFile = os.path.join(keydir, "mix.key")
self.mmtpKeyFile = os.path.join(keydir, "mmtp.key")
self.certFile = os.path.join(keydir, "mmtp.cert")
if os.path.exists(self.mmtpKeyFile):
secureDelete(self.mmtpKeyFile)
if os.path.exists(self.certFile):
secureDelete(self.certFile)
self.descFile = os.path.join(keydir, "ServerDesc")
self.publishedFile = os.path.join(keydir, "published")
self.serverinfo = None
self.validAfter = None
self.validUntil = None
self.published = os.path.exists(self.publishedFile)
if not os.path.exists(keydir):
createPrivateDir(keydir)
def delete(self):
"""Remove this keyset from disk."""
files = [self.packetKeyFile,
self.descFile,
self.publishedFile,
self.hashlogFile ]
files = [f for f in files if os.path.exists(f)]
secureDelete(files, blocking=1)
mixminion.server.HashLog.deleteHashLog(self.hashlogFile)
os.rmdir(self.keydir)
def checkKeys(self):
"""Check whether all the required keys exist and are private."""
checkPrivateFile(self.packetKeyFile)
def load(self, password=None):
"""Read the short-term keys from disk. Must be called before
getPacketKey or getMMTPKey."""
self.checkKeys()
self.packetKey = mixminion.Crypto.pk_PEM_load(self.packetKeyFile,
password)
def save(self, password=None):
"""Save this set of keys to disk."""
mixminion.Crypto.pk_PEM_save(self.packetKey, self.packetKeyFile,
password)
def clear(self):
"""Stop holding the keys in memory."""
self.packetKey = None
def getHashLogFileName(self): return self.hashlogFile
def getDescriptorFileName(self): return self.descFile
def getPacketKey(self): return self.packetKey
def getPacketKeyID(self):
"Return the sha1 hash of the asn1 encoding of the packet public key"
return mixminion.Crypto.sha1(self.packetKey.encode_key(1))
def getServerDescriptor(self):
"""Return a ServerInfo for this keyset, reading it from disk if
needed."""
if self.serverinfo is None:
self.serverinfo = ServerInfo(fname=self.descFile)
return self.serverinfo
def getHashLog(self):
return mixminion.server.HashLog.getHashLog(
self.getHashLogFileName(), self.getPacketKeyID())
def getLiveness(self):
"""Return a 2-tuple of validAfter/validUntil for this server."""
if self.validAfter is None or self.validUntil is None:
info = self.getServerDescriptor()
self.validAfter = info['Server']['Valid-After']
self.validUntil = info['Server']['Valid-Until']
return self.validAfter, self.validUntil
def isPublished(self):
"""Return true iff we have published this keyset."""
return self.published
def markAsPublished(self):
"""Mark this keyset as published."""
contents = "%s\n"%formatTime(time.time(),1)
writeFile(self.publishedFile, contents, mode=0600)
self.published = 1
def markAsUnpublished(self):
"""Mark this keyset as unpublished."""
tryUnlink(self.publishedFile)
self.published = 0
def regenerateServerDescriptor(self, config, identityKey):
"""Regenerate the server descriptor for this keyset, keeping the
original keys."""
self.load()
self.markAsUnpublished()
validAt,validUntil = self.getLiveness()
LOG.info("Regenerating descriptor for keyset %s (%s--%s)",
self.keyname, formatTime(validAt,1),
formatTime(validUntil,1))
generateServerDescriptorAndKeys(config, identityKey,
self.keyroot, self.keyname, self.hashroot,
validAt=validAt, validUntil=validUntil,
useServerKeys=1)
self.serverinfo = self.validAfter = self.validUntil = None
def checkConsistency(self, config, log=1):
"""Check whether this server descriptor is consistent with a
given configuration file. Returns are as for
'checkDescriptorConsistency'.
"""
return checkDescriptorConsistency(self.getServerDescriptor(),
config,
log=log,
isPublished=self.published)
def publish(self, url):
"""Try to publish this descriptor to a given directory URL. Returns
'accept' if the publication was successful, 'reject' if the
server refused to accept the descriptor, and 'error' if
publication failed for some other reason."""
fname = self.getDescriptorFileName()
descriptor = readFile(fname)
fields = urllib.urlencode({"desc" : descriptor})
f = None
try:
try:
f = urllib2.urlopen(url, fields)
info = f.info()
reply = f.read()
except IOError, e:
LOG.error("Error while publishing server descriptor: %s",e)
return 'error'
except:
LOG.error_exc(sys.exc_info(),
"Error publishing server descriptor")
return 'error'
finally:
if f is not None:
f.close()
if info.get('Content-Type') != 'text/plain':
LOG.error("Bad content type %s from directory"%info.get(
'Content-Type'))
return 'error'
m = DIRECTORY_RESPONSE_RE.search(reply)
if not m:
LOG.error("Didn't understand reply from directory: %s",
reply)
return 'error'
ok = int(m.group(1))
msg = m.group(2)
if not ok:
LOG.error("Directory rejected descriptor: %r", msg)
return 'reject'
LOG.info("Directory accepted descriptor: %r", msg)
self.markAsPublished()
return 'accept'
# Matches the reply a directory server gives.
DIRECTORY_RESPONSE_RE = re.compile(r'^Status: (0|1)[ \t]*\nMessage: (.*)$',
re.M)
class _WarnWrapper:
"""Helper for 'checkDescriptorConsistency' to keep its implementation
short. Counts the number of times it's invoked, and delegates to
LOG.warn if silence is false."""
def __init__(self, silence, isPublished):
self.silence = silence
self.errors = 0
self.called = 0
self.published = isPublished
def __call__(self, *args):
self.called = 1
self.errors += 1
if not self.published:
args = list(args)
args[0] = args[0].replace("published", "in unpublished descriptor")
if not self.silence:
LOG.warn(*args)
def checkDescriptorConsistency(info, config, log=1, isPublished=1):
"""Given a ServerInfo and a ServerConfig, compare them for consistency.
Returns 'good' iff info may have come from 'config'.
If the server is inconsistent with the configuration file and should
be regenerated, returns 'bad'. Otherwise, returns 'so-so'.
If 'log' is true, warn as well. Does not check keys.
"""
#XXXX This needs unit tests. For now, though, it seems to work.
warn = _WarnWrapper(silence = not log, isPublished=isPublished)
config_s = config['Server']
info_s = info['Server']
if info_s['Nickname'] != config_s['Nickname']:
warn("Mismatched nicknames: %s in configuration; %s published.",
config_s['Nickname'], info_s['Nickname'])
idBits = info_s['Identity'].get_modulus_bytes()*8
confIDBits = config_s['IdentityKeyBits']
if idBits != confIDBits:
warn("Mismatched identity bits: %s in configuration; %s published.",
confIDBits, idBits)
warn.errors -= 1 # We can't do anything about this!
if config_s['Contact-Email'] != info_s['Contact']:
warn("Mismatched contacts: %s in configuration; %s published.",
config_s['Contact-Email'], info_s['Contact'])
if config_s['Contact-Fingerprint'] != info_s['Contact-Fingerprint']:
warn("Mismatched contact fingerprints.")
if info_s['Software'] and info_s['Software'] != (
"Mixminion %s" % mixminion.__version__):
warn("Mismatched versions: running %s; %s published.",
mixminion.__version__, info_s['Software'])
if config_s['Comments'] != info_s['Comments']:
warn("Mismatched comments field.")
if (previousMidnight(info_s['Valid-Until']) !=
previousMidnight(config_s['PublicKeyLifetime'].getSeconds() +
info_s['Valid-After'])):
warn("Published lifetime does not match PublicKeyLifetime")
warn("(Future keys will be generated with the correct lifetime")
warn.errors -= 2 # We can't do anything about this!
insecurities = config.getInsecurities()
if insecurities:
if (info_s['Secure-Configuration'] or
info_s.get('Why-Insecure',None)!=", ".join(insecurities)):
warn("Mismatched Secure-Configuration: %r %r %r",
info_s['Secure-Configuration'],
info_s.get("Why-Insecure",None),
", ".join(insecurities))
else:
if not info_s['Secure-Configuration'] or info_s.get('Why-Insecure'):
warn("Mismatched Secure-Configuration")
info_im = info['Incoming/MMTP']
config_im = config['Incoming/MMTP']
if info_im['Port'] != config_im['Port']:
warn("Mismatched ports: %s configured; %s published.",
config_im['Port'], info_im['Port'])
## info_ip = info_im.get('IP',None)
## if config_im['IP'] == '0.0.0.0':
## guessed = _guessLocalIP()
## if guessed != info_ip:
## warn("Mismatched IPs: Guessed IP (%s); %s published.",
## guessed, info_ip)
## elif config_im['IP'] != info_ip:
## warn("Mismatched IPs: %s configured; %s published.",
## config_im['IP'], info_ip)
info_host = info_im.get('Hostname',None)
config_host = config_im['Hostname']
if config_host is None:
guessed = socket.getfqdn()
if guessed != info_host:
warn("Mismatched hostnames: %s guessed; %s published",
guessed, info_host)
elif config_host != info_host:
warn("Mismatched hostnames: %s configured, %s published",
config_host, info_host)
if config_im['Enabled'] and not info_im.get('Version'):
warn("Incoming MMTP enabled but not published.")
elif not config_im['Enabled'] and info_im.get('Version'):
warn("Incoming MMTP published but not enabled.")
for section in ('Outgoing/MMTP', 'Delivery/MBOX', 'Delivery/SMTP'):
info_out = info[section].get('Version')
config_out = (config[section].get('Enabled') and
config[section].get('Advertise',1))
if not config_out and section == 'Delivery/SMTP':
config_out = (config['Delivery/SMTP-Via-Mixmaster'].get("Enabled")
and config['Delivery/SMTP-Via-Mixmaster'].get("Advertise", 1))
if info_out and not config_out:
warn("%s published, but not enabled.", section)
if config_out and not info_out:
warn("%s enabled, but not published.", section)
info_testing = info.get("Testing",{})
if info_testing.get("Platform", "") != getPlatformSummary():
warn("Mismatched platform: running %r, but %r published",
getPlatformSummary(), info_testing.get("Platform",""))
if not warn.errors and info_testing.get("Configuration", "") != config.getConfigurationSummary():
warn("Configuration has changed since last publication")
if warn.errors:
return "bad"
elif warn.called:
return "so-so"
else:
return "good"
#----------------------------------------------------------------------
# Functionality to generate keys and server descriptors
def generateServerDescriptorAndKeys(config, identityKey, keydir, keyname,
hashdir, validAt=None, now=None,
useServerKeys=0, validUntil=None):
"""Generate and sign a new server descriptor, and generate all the keys to
go with it.
config -- Our ServerConfig object.
identityKey -- This server's private identity key
keydir -- The root directory for storing key sets.
keyname -- The name of this new key set within keydir
hashdir -- The root directory for storing hash logs.
validAt -- The starting time (in seconds) for this key's lifetime.
useServerKeys -- If true, try to read an existing keyset from
(keydir,keyname,hashdir) rather than generating a fresh one.
validUntil -- Time at which the generated descriptor should
expire.
"""
if useServerKeys:
serverKeys = ServerKeyset(keydir, keyname, hashdir)
serverKeys.load()
packetKey = serverKeys.packetKey
else:
# First, we generate both of our short-term keys...
packetKey = mixminion.Crypto.pk_generate(PACKET_KEY_BYTES*8)
# ...and save them to disk, setting up our directory structure while
# we're at it.
serverKeys = ServerKeyset(keydir, keyname, hashdir)
serverKeys.packetKey = packetKey
serverKeys.save()
# FFFF unused
# allowIncoming = config['Incoming/MMTP'].get('Enabled', 0)
# Now, we pull all the information we need from our configuration.
nickname = config['Server']['Nickname']
contact = config['Server']['Contact-Email']
fingerprint = config['Server']['Contact-Fingerprint']
comments = config['Server']['Comments']
if not now:
now = time.time()
if not validAt:
validAt = now
insecurities = config.getInsecurities()
if insecurities:
secure = "no"
else:
secure = "yes"
# Calculate descriptor and X509 certificate lifetimes.
# (Round validAt to previous midnight.)
validAt = mixminion.Common.previousMidnight(validAt+30)
if not validUntil:
keyLifetime = config['Server']['PublicKeyLifetime'].getSeconds()
validUntil = previousMidnight(validAt + keyLifetime + 30)
mmtpProtocolsIn = mixminion.server.MMTPServer.MMTPServerConnection \
.PROTOCOL_VERSIONS[:]
mmtpProtocolsOut = mixminion.server.MMTPServer.MMTPClientConnection \
.PROTOCOL_VERSIONS[:]
mmtpProtocolsIn.sort()
mmtpProtocolsOut.sort()
mmtpProtocolsIn = ",".join(mmtpProtocolsIn)
mmtpProtocolsOut = ",".join(mmtpProtocolsOut)
#XXXX009 remove: hasn't been checked since 007 or used since 005.
identityKeyID = formatBase64(
mixminion.Crypto.sha1(
mixminion.Crypto.pk_encode_public_key(identityKey)))
fields = {
# XXXX009 remove: hasn't been checked since 007.
"IP": config['Incoming/MMTP'].get('IP', "0.0.0.0"),
"Hostname": config['Incoming/MMTP'].get('Hostname', None),
"Port": config['Incoming/MMTP'].get('Port', 0),
"Nickname": nickname,
"Identity":
formatBase64(mixminion.Crypto.pk_encode_public_key(identityKey)),
"Published": formatTime(now),
"ValidAfter": formatDate(validAt),
"ValidUntil": formatDate(validUntil),
"PacketKey":
formatBase64(mixminion.Crypto.pk_encode_public_key(packetKey)),
"KeyID": identityKeyID,
"MMTPProtocolsIn" : mmtpProtocolsIn,
"MMTPProtocolsOut" : mmtpProtocolsOut,
"PacketVersion" : mixminion.Packet.PACKET_VERSION,
"mm_version" : mixminion.__version__,
"Secure" : secure,
"Contact" : contact,
}
# If we don't know our IP address, try to guess
if fields['IP'] == '0.0.0.0': #XXXX008 remove; not needed since 005.
try:
fields['IP'] = _guessLocalIP()
LOG.warn("No IP configured; guessing %s",fields['IP'])
except IPGuessError, e:
LOG.error("Can't guess IP: %s", str(e))
raise UIError("Can't guess IP: %s" % str(e))
# If we don't know our Hostname, try to guess
if fields['Hostname'] is None:
fields['Hostname'] = socket.getfqdn()
LOG.warn("No Hostname configured; guessing %s",fields['Hostname'])
try:
_checkHostnameIsLocal(fields['Hostname'])
dnsResults = mixminion.NetUtils.getIPs(fields['Hostname'])
except socket.error, e:
LOG.warn("Can't resolve configured hostname %r: %s",
fields['Hostname'],str(e))
else:
found = [ ip for _,ip,_ in dnsResults ]
if fields['IP'] not in found:
LOG.warn("Configured hostname %r resolves to %s, but we're publishing the IP %s",
fields['Hostname'], englishSequence(found), fields['IP'])
# Fill in a stock server descriptor. Note the empty Digest: and
# Signature: lines.
info = """\
[Server]
Descriptor-Version: 0.2
Nickname: %(Nickname)s
Identity: %(Identity)s
Digest:
Signature:
Published: %(Published)s
Valid-After: %(ValidAfter)s
Valid-Until: %(ValidUntil)s
Packet-Key: %(PacketKey)s
Packet-Versions: %(PacketVersion)s
Software: Mixminion %(mm_version)s
Secure-Configuration: %(Secure)s
Contact: %(Contact)s
""" % fields
if insecurities:
info += "Why-Insecure: %s\n"%(", ".join(insecurities))
if fingerprint:
info += "Contact-Fingerprint: %s\n"%fingerprint
if comments:
info += "Comments: %s\n"%comments
# Only advertise incoming MMTP if we support it.
if config["Incoming/MMTP"].get("Enabled", 0):
info += """\
[Incoming/MMTP]
Version: 0.1
IP: %(IP)s
Hostname: %(Hostname)s
Port: %(Port)s
Key-Digest: %(KeyID)s
Protocols: %(MMTPProtocolsIn)s
""" % fields
for k,v in config.getSectionItems("Incoming/MMTP"):
if k not in ("Allow", "Deny"):
continue
info += "%s: %s" % (k, _rule(k=='Allow',v))
# Only advertise outgoing MMTP if we support it.
if config["Outgoing/MMTP"].get("Enabled", 0):
info += """\
[Outgoing/MMTP]
Version: 0.1
Protocols: %(MMTPProtocolsOut)s
""" % fields
for k,v in config.getSectionItems("Outgoing/MMTP"):
if k not in ("Allow", "Deny"):
continue
info += "%s: %s" % (k, _rule(k=='Allow',v))
if not config.moduleManager.isConfigured():
config.moduleManager.configure(config)
# Ask our modules for their configuration information.
info += "".join(config.moduleManager.getServerInfoBlocks())
info += """\
[Testing]
Platform: %s
Configuration: %s
""" %(getPlatformSummary(),
config.getConfigurationSummary())
# Remove extra (leading or trailing) whitespace from the lines.
lines = [ line.strip() for line in info.split("\n") ]
# Remove empty lines
lines = filter(None, lines)
# Force a newline at the end of the file, rejoin, and sign.
lines.append("")
info = "\n".join(lines)
info = signServerInfo(info, identityKey)
# Write the desciptor
writeFile(serverKeys.getDescriptorFileName(), info, mode=0644)
# This is for debugging: we try to parse and validate the descriptor
# we just made.
# FFFF Remove this once we're more confident.
inf = ServerInfo(string=info)
ok = checkDescriptorConsistency(inf, config, log=0, isPublished=0)
if ok not in ('good', 'so-so'):
print "========"
print info
print "======"
checkDescriptorConsistency(inf, config, log=1, isPublished=0)
assert ok in ('good', 'so-so')
return info
def _rule(allow, (ip, mask, portmin, portmax)):
"""Return an external representation of an IP allow/deny rule."""
if mask == '0.0.0.0':
ip="*"
mask=""
elif mask == "255.255.255.255":
mask = ""
else:
mask = "/%s" % mask
if portmin==portmax==48099 and allow:
ports = ""
elif portmin == 0 and portmax == 65535 and not allow:
ports = ""
elif portmin == portmax:
ports = " %s" % portmin
else:
ports = " %s-%s" % (portmin, portmax)
return "%s%s%s\n" % (ip,mask,ports)
#----------------------------------------------------------------------
# Helpers to guess a reasonable local IP when none is provided.
class IPGuessError(MixError):
"""Exception: raised when we can't guess a single best IP."""
pass
# Cached guessed IP address
_GUESSED_IP = None
def _guessLocalIP():
"Try to find a reasonable IP for this host."
global _GUESSED_IP
if _GUESSED_IP is not None:
return _GUESSED_IP
# First, let's see what our name resolving subsystem says our
# name is.
ip_set = {}
try:
ip_set[ socket.gethostbyname(socket.gethostname()) ] = 1
except socket.error:
try:
ip_set[ socket.gethostbyname(socket.getfqdn()) ] = 1
except socket.error:
pass
# And in case that doesn't work, let's see what other addresses we might
# think we have by using 'getsockname'.
for target_addr in ('18.0.0.1', '10.0.0.1', '192.168.0.1',
'172.16.0.1')+tuple(ip_set.keys()):
# open a datagram socket so that we don't actually send any packets
# by connecting.
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect((target_addr, 9)) #discard port
ip_set[ s.getsockname()[0] ] = 1
except socket.error:
pass
for ip in ip_set.keys():
if ip.startswith("127.") or ip.startswith("0."):
del ip_set[ip]
# FFFF reject 192.168, 10., 176.16.x
if len(ip_set) == 0:
raise IPGuessError("No address found")
if len(ip_set) > 1:
raise IPGuessError("Multiple addresses found: %s" % (
", ".join(ip_set.keys())))
IP = ip_set.keys()[0]
if IP.startswith("192.168.") or IP.startswith("10.") or \
IP.startswith("176.16."):
raise IPGuessError("Only address found is in a private IP block")
return IP
_KNOWN_LOCAL_HOSTNAMES = {}
def _checkHostnameIsLocal(name):
if _KNOWN_LOCAL_HOSTNAMES.has_key(name):
return
r = mixminion.NetUtils.getIPs(name)
for family, addr, _ in r:
if family == mixminion.NetUtils.AF_INET:
if addr.startswith("127.") or addr.startswith("0."):
LOG.warn("Hostname %r resolves to reserved address %s",
name, addr)
else:
if addr in ("::", "::1"):
LOG.warn("Hostname %r resolves to reserved address %s",
name,addr)
_KNOWN_LOCAL_HOSTNAMES[name] = 1
def generateCertChain(filename, mmtpKey, identityKey, nickname,
certStarts, certEnds):
"""Create a two-certificate chain for use in MMTP.
filename -- location to store certificate chain.
mmtpKey -- a short-term RSA key to use for connection
encryption (1024 bits).
identityKey -- our long-term signing key (2048-4096 bits).
nickname -- nickname to use in our certificates.
certStarts, certEnds -- certificate lifetimes.
"""
fname = filename+"_tmp"
mixminion.Crypto.generate_cert(fname,
mmtpKey, identityKey,
"%s<MMTP>" %nickname,
nickname,
certStarts, certEnds)
certText = readFile(fname)
os.unlink(fname)
mixminion.Crypto.generate_cert(fname,
identityKey, identityKey,
nickname, nickname,
certStarts, certEnds)
identityCertText = readFile(fname)
os.unlink(fname)
writeFile(filename, certText+identityCertText, 0600)
def getPlatformSummary():
"""Return a string describing the current software and platform."""
if hasattr(os, "uname"):
uname = " ".join(os.uname())
else:
uname = sys.platform
return "Mixminion %s; Python %r on %r" % (
mixminion.__version__, sys.version, uname)
|
Javex/mixminion
|
lib/mixminion/server/ServerKeys.py
|
Python
|
mit
| 49,832 | 0.003793 |
import datetime
from django.db import models
from django.utils import timezone
class Question(models.Model):
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __unicode__(self): # __unicode__ on Python 2
return self.question_text
def was_published_recently(self):
now = timezone.now()
return now - datetime.timedelta(days=1) <= self.pub_date <= now
was_published_recently.admin_order_field = 'pub_date'
was_published_recently.boolean = True
was_published_recently.short_description = 'Published recently?'
class Choice(models.Model):
question = models.ForeignKey(Question)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __unicode__(self): # __unicode__ on Python 2
return self.choice_text
|
JCraft40/finalproject
|
polls/models.py
|
Python
|
gpl-2.0
| 899 | 0.006674 |
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from selenium.common.exceptions import WebDriverException
try:
import http.client as http_client
except ImportError:
import httplib as http_client
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.remote.webdriver import WebDriver as RemoteWebDriver
from .service import Service
from .remote_connection import SafariRemoteConnection
class WebDriver(RemoteWebDriver):
"""
Controls the SafariDriver and allows you to drive the browser.
"""
def __init__(self, port=0, executable_path="/usr/bin/safaridriver", reuse_service=False,
desired_capabilities=DesiredCapabilities.SAFARI, quiet=False,
keep_alive=True):
"""
Creates a new Safari driver instance and launches or finds a running safaridriver service.
:Args:
- port - The port on which the safaridriver service should listen for new connections. If zero, a free port will be found.
- executable_path - Path to a custom safaridriver executable to be used. If absent, /usr/bin/safaridriver is used.
- reuse_service - If True, do not spawn a safaridriver instance; instead, connect to an already-running service that was launched externally.
- desired_capabilities: Dictionary object with desired capabilities (Can be used to provide various Safari switches).
- quiet - If True, the driver's stdout and stderr is suppressed.
- keep_alive - Whether to configure SafariRemoteConnection to use
HTTP keep-alive. Defaults to False.
"""
self._reuse_service = reuse_service
self.service = Service(executable_path, port=port, quiet=quiet)
if not reuse_service:
self.service.start()
executor = SafariRemoteConnection(remote_server_addr=self.service.service_url,
keep_alive=keep_alive)
RemoteWebDriver.__init__(
self,
command_executor=executor,
desired_capabilities=desired_capabilities)
self._is_remote = False
def quit(self):
"""
Closes the browser and shuts down the SafariDriver executable
that is started when starting the SafariDriver
"""
try:
RemoteWebDriver.quit(self)
except http_client.BadStatusLine:
pass
finally:
if not self._reuse_service:
self.service.stop()
# safaridriver extension commands. The canonical command support matrix is here:
# https://developer.apple.com/library/content/documentation/NetworkingInternetWeb/Conceptual/WebDriverEndpointDoc/Commands/Commands.html
# First available in Safari 11.1 and Safari Technology Preview 41.
def set_permission(self, permission, value):
if not isinstance(value, bool):
raise WebDriverException("Value of a session permission must be set to True or False.")
payload = {}
payload[permission] = value
self.execute("SET_PERMISSIONS", {"permissions": payload})
# First available in Safari 11.1 and Safari Technology Preview 41.
def get_permission(self, permission):
payload = self.execute("GET_PERMISSIONS")["value"]
permissions = payload["permissions"]
if not permissions:
return None
if permission not in permissions:
return None
value = permissions[permission]
if not isinstance(value, bool):
return None
return value
# First available in Safari 11.1 and Safari Technology Preview 42.
def debug(self):
self.execute("ATTACH_DEBUGGER")
self.execute_script("debugger;")
|
krmahadevan/selenium
|
py/selenium/webdriver/safari/webdriver.py
|
Python
|
apache-2.0
| 4,520 | 0.001991 |
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
The system for scheduling tasks and executing them in order.
Deals with dependencies, priorities, resources, etc.
The :py:class:`~luigi.worker.Worker` pulls tasks from the scheduler (usually over the REST interface) and executes them.
See :doc:`/central_scheduler` for more info.
"""
import collections
try:
import cPickle as pickle
except ImportError:
import pickle
import datetime
import functools
import itertools
import logging
import os
import re
import time
from luigi import six
from luigi import configuration
from luigi import notifications
from luigi import parameter
from luigi import task_history as history
from luigi.task_status import DISABLED, DONE, FAILED, PENDING, RUNNING, SUSPENDED, UNKNOWN
from luigi.task import Config
logger = logging.getLogger("luigi.server")
class Scheduler(object):
"""
Abstract base class.
Note that the methods all take string arguments, not Task objects...
"""""
add_task = NotImplemented
get_work = NotImplemented
ping = NotImplemented
UPSTREAM_RUNNING = 'UPSTREAM_RUNNING'
UPSTREAM_MISSING_INPUT = 'UPSTREAM_MISSING_INPUT'
UPSTREAM_FAILED = 'UPSTREAM_FAILED'
UPSTREAM_DISABLED = 'UPSTREAM_DISABLED'
UPSTREAM_SEVERITY_ORDER = (
'',
UPSTREAM_RUNNING,
UPSTREAM_MISSING_INPUT,
UPSTREAM_FAILED,
UPSTREAM_DISABLED,
)
UPSTREAM_SEVERITY_KEY = UPSTREAM_SEVERITY_ORDER.index
STATUS_TO_UPSTREAM_MAP = {
FAILED: UPSTREAM_FAILED,
RUNNING: UPSTREAM_RUNNING,
PENDING: UPSTREAM_MISSING_INPUT,
DISABLED: UPSTREAM_DISABLED,
}
TASK_FAMILY_RE = re.compile(r'([^(_]+)[(_]')
class scheduler(Config):
# TODO(erikbern): the config_path is needed for backwards compatilibity. We should drop the compatibility
# at some point (in particular this would force users to replace all dashes with underscores in the config)
retry_delay = parameter.FloatParameter(default=900.0)
remove_delay = parameter.FloatParameter(default=600.0)
worker_disconnect_delay = parameter.FloatParameter(default=60.0)
state_path = parameter.Parameter(default='/var/lib/luigi-server/state.pickle')
# Jobs are disabled if we see more than disable_failures failures in disable_window seconds.
# These disables last for disable_persist seconds.
disable_window = parameter.IntParameter(default=3600,
config_path=dict(section='scheduler', name='disable-window-seconds'))
disable_failures = parameter.IntParameter(default=None,
config_path=dict(section='scheduler', name='disable-num-failures'))
disable_hard_timeout = parameter.IntParameter(default=None,
config_path=dict(section='scheduler', name='disable-hard-timeout'))
disable_persist = parameter.IntParameter(default=86400,
config_path=dict(section='scheduler', name='disable-persist-seconds'))
max_shown_tasks = parameter.IntParameter(default=100000)
max_graph_nodes = parameter.IntParameter(default=100000)
prune_done_tasks = parameter.BoolParameter(default=False)
record_task_history = parameter.BoolParameter(default=False)
prune_on_get_work = parameter.BoolParameter(default=False)
def fix_time(x):
# Backwards compatibility for a fix in Dec 2014. Prior to the fix, pickled state might store datetime objects
# Let's remove this function soon
if isinstance(x, datetime.datetime):
return time.mktime(x.timetuple())
else:
return x
class Failures(object):
"""
This class tracks the number of failures in a given time window.
Failures added are marked with the current timestamp, and this class counts
the number of failures in a sliding time window ending at the present.
"""
def __init__(self, window):
"""
Initialize with the given window.
:param window: how long to track failures for, as a float (number of seconds).
"""
self.window = window
self.failures = collections.deque()
self.first_failure_time = None
def add_failure(self):
"""
Add a failure event with the current timestamp.
"""
failure_time = time.time()
if not self.first_failure_time:
self.first_failure_time = failure_time
self.failures.append(failure_time)
def num_failures(self):
"""
Return the number of failures in the window.
"""
min_time = time.time() - self.window
while self.failures and fix_time(self.failures[0]) < min_time:
self.failures.popleft()
return len(self.failures)
def clear(self):
"""
Clear the failure queue.
"""
self.failures.clear()
def _get_default(x, default):
if x is not None:
return x
else:
return default
class Task(object):
def __init__(self, task_id, status, deps, resources=None, priority=0, family='', module=None,
params=None, disable_failures=None, disable_window=None, disable_hard_timeout=None,
tracking_url=None):
self.id = task_id
self.stakeholders = set() # workers ids that are somehow related to this task (i.e. don't prune while any of these workers are still active)
self.workers = set() # workers ids that can perform task - task is 'BROKEN' if none of these workers are active
if deps is None:
self.deps = set()
else:
self.deps = set(deps)
self.status = status # PENDING, RUNNING, FAILED or DONE
self.time = time.time() # Timestamp when task was first added
self.updated = self.time
self.retry = None
self.remove = None
self.worker_running = None # the worker id that is currently running the task or None
self.time_running = None # Timestamp when picked up by worker
self.expl = None
self.priority = priority
self.resources = _get_default(resources, {})
self.family = family
self.module = module
self.params = _get_default(params, {})
self.disable_failures = disable_failures
self.disable_hard_timeout = disable_hard_timeout
self.failures = Failures(disable_window)
self.tracking_url = tracking_url
self.scheduler_disable_time = None
self.runnable = False
def __repr__(self):
return "Task(%r)" % vars(self)
def add_failure(self):
self.failures.add_failure()
def has_excessive_failures(self):
if (self.failures.first_failure_time is not None and
self.disable_hard_timeout):
if (time.time() >= self.failures.first_failure_time +
self.disable_hard_timeout):
return True
if self.failures.num_failures() >= self.disable_failures:
return True
return False
def can_disable(self):
return (self.disable_failures is not None or
self.disable_hard_timeout is not None)
@property
def pretty_id(self):
param_str = ', '.join('{}={}'.format(key, value) for key, value in self.params.items())
return '{}({})'.format(self.family, param_str)
class Worker(object):
"""
Structure for tracking worker activity and keeping their references.
"""
def __init__(self, worker_id, last_active=None):
self.id = worker_id
self.reference = None # reference to the worker in the real world. (Currently a dict containing just the host)
self.last_active = last_active or time.time() # seconds since epoch
self.last_get_work = None
self.started = time.time() # seconds since epoch
self.tasks = set() # task objects
self.info = {}
self.disabled = False
def add_info(self, info):
self.info.update(info)
def update(self, worker_reference, get_work=False):
if worker_reference:
self.reference = worker_reference
self.last_active = time.time()
if get_work:
self.last_get_work = time.time()
def prune(self, config):
# Delete workers that haven't said anything for a while (probably killed)
if self.last_active + config.worker_disconnect_delay < time.time():
return True
def get_pending_tasks(self, state):
"""
Get PENDING (and RUNNING) tasks for this worker.
You have to pass in the state for optimization reasons.
"""
if len(self.tasks) < state.num_pending_tasks():
return six.moves.filter(lambda task: task.status in [PENDING, RUNNING],
self.tasks)
else:
return state.get_pending_tasks()
def is_trivial_worker(self, state):
"""
If it's not an assistant having only tasks that are without
requirements.
We have to pass the state parameter for optimization reasons.
"""
if self.assistant:
return False
return all(not task.resources for task in self.get_pending_tasks(state))
@property
def assistant(self):
return self.info.get('assistant', False)
def __str__(self):
return self.id
class SimpleTaskState(object):
"""
Keep track of the current state and handle persistance.
The point of this class is to enable other ways to keep state, eg. by using a database
These will be implemented by creating an abstract base class that this and other classes
inherit from.
"""
def __init__(self, state_path):
self._state_path = state_path
self._tasks = {} # map from id to a Task object
self._status_tasks = collections.defaultdict(dict)
self._active_workers = {} # map from id to a Worker object
def get_state(self):
return self._tasks, self._active_workers
def set_state(self, state):
self._tasks, self._active_workers = state
def dump(self):
try:
with open(self._state_path, 'wb') as fobj:
pickle.dump(self.get_state(), fobj)
except IOError:
logger.warning("Failed saving scheduler state", exc_info=1)
else:
logger.info("Saved state in %s", self._state_path)
# prone to lead to crashes when old state is unpickled with updated code. TODO some kind of version control?
def load(self):
if os.path.exists(self._state_path):
logger.info("Attempting to load state from %s", self._state_path)
try:
with open(self._state_path, 'rb') as fobj:
state = pickle.load(fobj)
except BaseException:
logger.exception("Error when loading state. Starting from clean slate.")
return
self.set_state(state)
self._status_tasks = collections.defaultdict(dict)
for task in six.itervalues(self._tasks):
self._status_tasks[task.status][task.id] = task
# Convert from old format
# TODO: this is really ugly, we need something more future-proof
# Every time we add an attribute to the Worker or Task class, this
# code needs to be updated
# Compatibility since 2014-06-02
for k, v in six.iteritems(self._active_workers):
if isinstance(v, float):
self._active_workers[k] = Worker(worker_id=k, last_active=v)
# Compatibility since 2015-05-28
if any(not hasattr(w, 'tasks') for k, w in six.iteritems(self._active_workers)):
# If you load from an old format where Workers don't contain tasks.
for k, worker in six.iteritems(self._active_workers):
worker.tasks = set()
for task in six.itervalues(self._tasks):
for worker_id in task.workers:
self._active_workers[worker_id].tasks.add(task)
# Compatibility since 2015-04-28
if any(not hasattr(t, 'disable_hard_timeout') for t in six.itervalues(self._tasks)):
for t in six.itervalues(self._tasks):
t.disable_hard_timeout = None
else:
logger.info("No prior state file exists at %s. Starting with clean slate", self._state_path)
def get_active_tasks(self, status=None):
if status:
for task in six.itervalues(self._status_tasks[status]):
yield task
else:
for task in six.itervalues(self._tasks):
yield task
def get_running_tasks(self):
return six.itervalues(self._status_tasks[RUNNING])
def get_pending_tasks(self):
return itertools.chain.from_iterable(six.itervalues(self._status_tasks[status])
for status in [PENDING, RUNNING])
def num_pending_tasks(self):
"""
Return how many tasks are PENDING + RUNNING. O(1).
"""
return len(self._status_tasks[PENDING]) + len(self._status_tasks[RUNNING])
def get_task(self, task_id, default=None, setdefault=None):
if setdefault:
task = self._tasks.setdefault(task_id, setdefault)
self._status_tasks[task.status][task.id] = task
return task
else:
return self._tasks.get(task_id, default)
def has_task(self, task_id):
return task_id in self._tasks
def re_enable(self, task, config=None):
task.scheduler_disable_time = None
task.failures.clear()
if config:
self.set_status(task, FAILED, config)
task.failures.clear()
def set_status(self, task, new_status, config=None):
if new_status == FAILED:
assert config is not None
if new_status == DISABLED and task.status == RUNNING:
return
if task.status == DISABLED:
if new_status == DONE:
self.re_enable(task)
# don't allow workers to override a scheduler disable
elif task.scheduler_disable_time is not None and new_status != DISABLED:
return
if new_status == FAILED and task.can_disable() and task.status != DISABLED:
task.add_failure()
if task.has_excessive_failures():
task.scheduler_disable_time = time.time()
new_status = DISABLED
notifications.send_error_email(
'Luigi Scheduler: DISABLED {task} due to excessive failures'.format(task=task.id),
'{task} failed {failures} times in the last {window} seconds, so it is being '
'disabled for {persist} seconds'.format(
failures=config.disable_failures,
task=task.id,
window=config.disable_window,
persist=config.disable_persist,
))
elif new_status == DISABLED:
task.scheduler_disable_time = None
if new_status != task.status:
self._status_tasks[task.status].pop(task.id)
self._status_tasks[new_status][task.id] = task
task.status = new_status
task.updated = time.time()
def fail_dead_worker_task(self, task, config, assistants):
# If a running worker disconnects, tag all its jobs as FAILED and subject it to the same retry logic
if task.status == RUNNING and task.worker_running and task.worker_running not in task.stakeholders | assistants:
logger.info("Task %r is marked as running by disconnected worker %r -> marking as "
"FAILED with retry delay of %rs", task.id, task.worker_running,
config.retry_delay)
task.worker_running = None
self.set_status(task, FAILED, config)
task.retry = time.time() + config.retry_delay
def prune(self, task, config):
remove = False
# Mark tasks with no remaining active stakeholders for deletion
if not task.stakeholders:
if task.remove is None:
logger.info("Task %r has stakeholders %r but none remain connected -> will remove "
"task in %s seconds", task.id, task.stakeholders, config.remove_delay)
task.remove = time.time() + config.remove_delay
# Re-enable task after the disable time expires
if task.status == DISABLED and task.scheduler_disable_time is not None:
if time.time() - fix_time(task.scheduler_disable_time) > config.disable_persist:
self.re_enable(task, config)
# Remove tasks that have no stakeholders
if task.remove and time.time() > task.remove:
logger.info("Removing task %r (no connected stakeholders)", task.id)
remove = True
# Reset FAILED tasks to PENDING if max timeout is reached, and retry delay is >= 0
if task.status == FAILED and config.retry_delay >= 0 and task.retry < time.time():
self.set_status(task, PENDING, config)
return remove
def inactivate_tasks(self, delete_tasks):
# The terminology is a bit confusing: we used to "delete" tasks when they became inactive,
# but with a pluggable state storage, you might very well want to keep some history of
# older tasks as well. That's why we call it "inactivate" (as in the verb)
for task in delete_tasks:
task_obj = self._tasks.pop(task)
self._status_tasks[task_obj.status].pop(task)
def get_active_workers(self, last_active_lt=None, last_get_work_gt=None):
for worker in six.itervalues(self._active_workers):
if last_active_lt is not None and worker.last_active >= last_active_lt:
continue
last_get_work = getattr(worker, 'last_get_work', None)
if last_get_work_gt is not None and (
last_get_work is None or last_get_work <= last_get_work_gt):
continue
yield worker
def get_assistants(self, last_active_lt=None):
return filter(lambda w: w.assistant, self.get_active_workers(last_active_lt))
def get_worker_ids(self):
return self._active_workers.keys() # only used for unit tests
def get_worker(self, worker_id):
return self._active_workers.setdefault(worker_id, Worker(worker_id))
def inactivate_workers(self, delete_workers):
# Mark workers as inactive
for worker in delete_workers:
self._active_workers.pop(worker)
self._remove_workers_from_tasks(delete_workers)
def _remove_workers_from_tasks(self, workers, remove_stakeholders=True):
for task in self.get_active_tasks():
if remove_stakeholders:
task.stakeholders.difference_update(workers)
task.workers.difference_update(workers)
def disable_workers(self, workers):
self._remove_workers_from_tasks(workers, remove_stakeholders=False)
for worker in workers:
self.get_worker(worker).disabled = True
def get_necessary_tasks(self):
necessary_tasks = set()
for task in self.get_active_tasks():
if task.status not in (DONE, DISABLED) or \
getattr(task, 'scheduler_disable_time', None) is not None:
necessary_tasks.update(task.deps)
necessary_tasks.add(task.id)
return necessary_tasks
class CentralPlannerScheduler(Scheduler):
"""
Async scheduler that can handle multiple workers, etc.
Can be run locally or on a server (using RemoteScheduler + server.Server).
"""
def __init__(self, config=None, resources=None, task_history_impl=None, **kwargs):
"""
Keyword Arguments:
:param config: an object of class "scheduler" or None (in which the global instance will be used)
:param resources: a dict of str->int constraints
:param task_history_override: ignore config and use this object as the task history
"""
self._config = config or scheduler(**kwargs)
self._state = SimpleTaskState(self._config.state_path)
if task_history_impl:
self._task_history = task_history_impl
elif self._config.record_task_history:
from luigi import db_task_history # Needs sqlalchemy, thus imported here
self._task_history = db_task_history.DbTaskHistory()
else:
self._task_history = history.NopHistory()
self._resources = resources or configuration.get_config().getintdict('resources') # TODO: Can we make this a Parameter?
self._make_task = functools.partial(
Task, disable_failures=self._config.disable_failures,
disable_hard_timeout=self._config.disable_hard_timeout,
disable_window=self._config.disable_window)
self._worker_requests = {}
def load(self):
self._state.load()
def dump(self):
self._state.dump()
def prune(self):
logger.info("Starting pruning of task graph")
remove_workers = []
for worker in self._state.get_active_workers():
if worker.prune(self._config):
logger.info("Worker %s timed out (no contact for >=%ss)", worker, self._config.worker_disconnect_delay)
remove_workers.append(worker.id)
self._state.inactivate_workers(remove_workers)
assistant_ids = set(w.id for w in self._state.get_assistants())
remove_tasks = []
if assistant_ids:
necessary_tasks = self._state.get_necessary_tasks()
else:
necessary_tasks = ()
for task in self._state.get_active_tasks():
self._state.fail_dead_worker_task(task, self._config, assistant_ids)
removed = self._state.prune(task, self._config)
if removed and task.id not in necessary_tasks:
remove_tasks.append(task.id)
self._state.inactivate_tasks(remove_tasks)
logger.info("Done pruning task graph")
def update(self, worker_id, worker_reference=None, get_work=False):
"""
Keep track of whenever the worker was last active.
"""
worker = self._state.get_worker(worker_id)
worker.update(worker_reference, get_work=get_work)
return not getattr(worker, 'disabled', False)
def _update_priority(self, task, prio, worker):
"""
Update priority of the given task.
Priority can only be increased.
If the task doesn't exist, a placeholder task is created to preserve priority when the task is later scheduled.
"""
task.priority = prio = max(prio, task.priority)
for dep in task.deps or []:
t = self._state.get_task(dep)
if t is not None and prio > t.priority:
self._update_priority(t, prio, worker)
def add_task(self, task_id=None, status=PENDING, runnable=True,
deps=None, new_deps=None, expl=None, resources=None,
priority=0, family='', module=None, params=None,
assistant=False, tracking_url=None, **kwargs):
"""
* add task identified by task_id if it doesn't exist
* if deps is not None, update dependency list
* update status of task
* add additional workers/stakeholders
* update priority when needed
"""
worker_id = kwargs['worker']
worker_enabled = self.update(worker_id)
if worker_enabled:
_default_task = self._make_task(
task_id=task_id, status=PENDING, deps=deps, resources=resources,
priority=priority, family=family, module=module, params=params,
)
else:
_default_task = None
task = self._state.get_task(task_id, setdefault=_default_task)
if task is None or (task.status != RUNNING and not worker_enabled):
return
# for setting priority, we'll sometimes create tasks with unset family and params
if not task.family:
task.family = family
if not getattr(task, 'module', None):
task.module = module
if not task.params:
task.params = _get_default(params, {})
if tracking_url is not None or task.status != RUNNING:
task.tracking_url = tracking_url
if task.remove is not None:
task.remove = None # unmark task for removal so it isn't removed after being added
if expl is not None:
task.expl = expl
if not (task.status == RUNNING and status == PENDING) or new_deps:
# don't allow re-scheduling of task while it is running, it must either fail or succeed first
if status == PENDING or status != task.status:
# Update the DB only if there was a acctual change, to prevent noise.
# We also check for status == PENDING b/c that's the default value
# (so checking for status != task.status woule lie)
self._update_task_history(task, status)
self._state.set_status(task, PENDING if status == SUSPENDED else status, self._config)
if status == FAILED:
task.retry = self._retry_time(task, self._config)
if deps is not None:
task.deps = set(deps)
if new_deps is not None:
task.deps.update(new_deps)
if resources is not None:
task.resources = resources
if worker_enabled and not assistant:
task.stakeholders.add(worker_id)
# Task dependencies might not exist yet. Let's create dummy tasks for them for now.
# Otherwise the task dependencies might end up being pruned if scheduling takes a long time
for dep in task.deps or []:
t = self._state.get_task(dep, setdefault=self._make_task(task_id=dep, status=UNKNOWN, deps=None, priority=priority))
t.stakeholders.add(worker_id)
self._update_priority(task, priority, worker_id)
if runnable and status != FAILED and worker_enabled:
task.workers.add(worker_id)
self._state.get_worker(worker_id).tasks.add(task)
task.runnable = runnable
def add_worker(self, worker, info, **kwargs):
self._state.get_worker(worker).add_info(info)
def disable_worker(self, worker):
self._state.disable_workers({worker})
def update_resources(self, **resources):
if self._resources is None:
self._resources = {}
self._resources.update(resources)
def _has_resources(self, needed_resources, used_resources):
if needed_resources is None:
return True
available_resources = self._resources or {}
for resource, amount in six.iteritems(needed_resources):
if amount + used_resources[resource] > available_resources.get(resource, 1):
return False
return True
def _used_resources(self):
used_resources = collections.defaultdict(int)
if self._resources is not None:
for task in self._state.get_active_tasks():
if task.status == RUNNING and task.resources:
for resource, amount in six.iteritems(task.resources):
used_resources[resource] += amount
return used_resources
def _rank(self, task):
"""
Return worker's rank function for task scheduling.
:return:
"""
return task.priority, -task.time
def _schedulable(self, task):
if task.status != PENDING:
return False
for dep in task.deps:
dep_task = self._state.get_task(dep, default=None)
if dep_task is None or dep_task.status != DONE:
return False
return True
def _retry_time(self, task, config):
return time.time() + config.retry_delay
def get_work(self, host=None, assistant=False, current_tasks=None, **kwargs):
# TODO: remove any expired nodes
# Algo: iterate over all nodes, find the highest priority node no dependencies and available
# resources.
# Resource checking looks both at currently available resources and at which resources would
# be available if all running tasks died and we rescheduled all workers greedily. We do both
# checks in order to prevent a worker with many low-priority tasks from starving other
# workers with higher priority tasks that share the same resources.
# TODO: remove tasks that can't be done, figure out if the worker has absolutely
# nothing it can wait for
if self._config.prune_on_get_work:
self.prune()
worker_id = kwargs['worker']
# Return remaining tasks that have no FAILED descendants
self.update(worker_id, {'host': host}, get_work=True)
if assistant:
self.add_worker(worker_id, [('assistant', assistant)])
best_task = None
if current_tasks is not None:
ct_set = set(current_tasks)
for task in sorted(self._state.get_running_tasks(), key=self._rank):
if task.worker_running == worker_id and task.id not in ct_set:
best_task = task
locally_pending_tasks = 0
running_tasks = []
upstream_table = {}
greedy_resources = collections.defaultdict(int)
n_unique_pending = 0
worker = self._state.get_worker(worker_id)
if worker.is_trivial_worker(self._state):
relevant_tasks = worker.get_pending_tasks(self._state)
used_resources = collections.defaultdict(int)
greedy_workers = dict() # If there's no resources, then they can grab any task
else:
relevant_tasks = self._state.get_pending_tasks()
used_resources = self._used_resources()
activity_limit = time.time() - self._config.worker_disconnect_delay
active_workers = self._state.get_active_workers(last_get_work_gt=activity_limit)
greedy_workers = dict((worker.id, worker.info.get('workers', 1))
for worker in active_workers)
tasks = list(relevant_tasks)
tasks.sort(key=self._rank, reverse=True)
for task in tasks:
upstream_status = self._upstream_status(task.id, upstream_table)
in_workers = (assistant and getattr(task, 'runnable', bool(task.workers))) or worker_id in task.workers
if task.status == RUNNING and in_workers:
# Return a list of currently running tasks to the client,
# makes it easier to troubleshoot
other_worker = self._state.get_worker(task.worker_running)
more_info = {'task_id': task.id, 'worker': str(other_worker)}
if other_worker is not None:
more_info.update(other_worker.info)
running_tasks.append(more_info)
if task.status == PENDING and in_workers and upstream_status != UPSTREAM_DISABLED:
locally_pending_tasks += 1
if len(task.workers) == 1 and not assistant:
n_unique_pending += 1
if best_task:
continue
if task.status == RUNNING and (task.worker_running in greedy_workers):
greedy_workers[task.worker_running] -= 1
for resource, amount in six.iteritems((task.resources or {})):
greedy_resources[resource] += amount
if self._schedulable(task) and self._has_resources(task.resources, greedy_resources):
if in_workers and self._has_resources(task.resources, used_resources):
best_task = task
else:
workers = itertools.chain(task.workers, [worker_id]) if assistant else task.workers
for task_worker in workers:
if greedy_workers.get(task_worker, 0) > 0:
# use up a worker
greedy_workers[task_worker] -= 1
# keep track of the resources used in greedy scheduling
for resource, amount in six.iteritems((task.resources or {})):
greedy_resources[resource] += amount
break
reply = {'n_pending_tasks': locally_pending_tasks,
'running_tasks': running_tasks,
'task_id': None,
'n_unique_pending': n_unique_pending}
if best_task:
self._state.set_status(best_task, RUNNING, self._config)
best_task.worker_running = worker_id
best_task.time_running = time.time()
self._update_task_history(best_task, RUNNING, host=host)
reply['task_id'] = best_task.id
reply['task_family'] = best_task.family
reply['task_module'] = getattr(best_task, 'module', None)
reply['task_params'] = best_task.params
return reply
def ping(self, **kwargs):
worker_id = kwargs['worker']
self.update(worker_id)
def _upstream_status(self, task_id, upstream_status_table):
if task_id in upstream_status_table:
return upstream_status_table[task_id]
elif self._state.has_task(task_id):
task_stack = [task_id]
while task_stack:
dep_id = task_stack.pop()
if self._state.has_task(dep_id):
dep = self._state.get_task(dep_id)
if dep.status == DONE:
continue
if dep_id not in upstream_status_table:
if dep.status == PENDING and dep.deps:
task_stack = task_stack + [dep_id] + list(dep.deps)
upstream_status_table[dep_id] = '' # will be updated postorder
else:
dep_status = STATUS_TO_UPSTREAM_MAP.get(dep.status, '')
upstream_status_table[dep_id] = dep_status
elif upstream_status_table[dep_id] == '' and dep.deps:
# This is the postorder update step when we set the
# status based on the previously calculated child elements
upstream_status = [upstream_status_table.get(a_task_id, '') for a_task_id in dep.deps]
upstream_status.append('') # to handle empty list
status = max(upstream_status, key=UPSTREAM_SEVERITY_KEY)
upstream_status_table[dep_id] = status
return upstream_status_table[dep_id]
def _serialize_task(self, task_id, include_deps=True, deps=None):
task = self._state.get_task(task_id)
ret = {
'display_name': task.pretty_id,
'status': task.status,
'workers': list(task.workers),
'worker_running': task.worker_running,
'time_running': getattr(task, "time_running", None),
'start_time': task.time,
'last_updated': getattr(task, "updated", task.time),
'params': task.params,
'name': task.family,
'priority': task.priority,
'resources': task.resources,
'tracking_url': getattr(task, "tracking_url", None),
}
if task.status == DISABLED:
ret['re_enable_able'] = task.scheduler_disable_time is not None
if include_deps:
ret['deps'] = list(task.deps if deps is None else deps)
return ret
def graph(self, **kwargs):
self.prune()
serialized = {}
seen = set()
for task in self._state.get_active_tasks():
serialized.update(self._traverse_graph(task.id, seen))
return serialized
def _filter_done(self, task_ids):
for task_id in task_ids:
task = self._state.get_task(task_id)
if task is None or task.status != DONE:
yield task_id
def _traverse_graph(self, root_task_id, seen=None, dep_func=None, include_done=True):
""" Returns the dependency graph rooted at task_id
This does a breadth-first traversal to find the nodes closest to the
root before hitting the scheduler.max_graph_nodes limit.
:param root_task_id: the id of the graph's root
:return: A map of task id to serialized node
"""
if seen is None:
seen = set()
elif root_task_id in seen:
return {}
if dep_func is None:
def dep_func(t):
return t.deps
seen.add(root_task_id)
serialized = {}
queue = collections.deque([root_task_id])
while queue:
task_id = queue.popleft()
task = self._state.get_task(task_id)
if task is None or not task.family:
logger.warn('Missing task for id [%s]', task_id)
# NOTE : If a dependency is missing from self._state there is no way to deduce the
# task family and parameters.
family_match = TASK_FAMILY_RE.match(task_id)
family = family_match.group(1) if family_match else UNKNOWN
params = {'task_id': task_id}
serialized[task_id] = {
'deps': [],
'status': UNKNOWN,
'workers': [],
'start_time': UNKNOWN,
'params': params,
'name': family,
'display_name': task_id,
'priority': 0,
}
else:
deps = dep_func(task)
if not include_done:
deps = list(self._filter_done(deps))
serialized[task_id] = self._serialize_task(task_id, deps=deps)
for dep in sorted(deps):
if dep not in seen:
seen.add(dep)
queue.append(dep)
if task_id != root_task_id:
del serialized[task_id]['display_name']
if len(serialized) >= self._config.max_graph_nodes:
break
return serialized
def dep_graph(self, task_id, include_done=True, **kwargs):
self.prune()
if not self._state.has_task(task_id):
return {}
return self._traverse_graph(task_id, include_done=include_done)
def inverse_dep_graph(self, task_id, include_done=True, **kwargs):
self.prune()
if not self._state.has_task(task_id):
return {}
inverse_graph = collections.defaultdict(set)
for task in self._state.get_active_tasks():
for dep in task.deps:
inverse_graph[dep].add(task.id)
return self._traverse_graph(
task_id, dep_func=lambda t: inverse_graph[t.id], include_done=include_done)
def task_list(self, status, upstream_status, limit=True, search=None, **kwargs):
"""
Query for a subset of tasks by status.
"""
self.prune()
result = {}
upstream_status_table = {} # used to memoize upstream status
if search is None:
def filter_func(_):
return True
else:
terms = search.split()
def filter_func(t):
return all(term in t.pretty_id for term in terms)
for task in filter(filter_func, self._state.get_active_tasks(status)):
if (task.status != PENDING or not upstream_status or
upstream_status == self._upstream_status(task.id, upstream_status_table)):
serialized = self._serialize_task(task.id, False)
result[task.id] = serialized
if limit and len(result) > self._config.max_shown_tasks:
return {'num_tasks': len(result)}
return result
def _first_task_display_name(self, worker):
task_id = worker.info.get('first_task', '')
if self._state.has_task(task_id):
return self._state.get_task(task_id).pretty_id
else:
return task_id
def worker_list(self, include_running=True, **kwargs):
self.prune()
workers = [
dict(
name=worker.id,
last_active=worker.last_active,
started=getattr(worker, 'started', None),
first_task_display_name=self._first_task_display_name(worker),
**worker.info
) for worker in self._state.get_active_workers()]
workers.sort(key=lambda worker: worker['started'], reverse=True)
if include_running:
running = collections.defaultdict(dict)
num_pending = collections.defaultdict(int)
num_uniques = collections.defaultdict(int)
for task in self._state.get_pending_tasks():
if task.status == RUNNING and task.worker_running:
running[task.worker_running][task.id] = self._serialize_task(task.id, False)
elif task.status == PENDING:
for worker in task.workers:
num_pending[worker] += 1
if len(task.workers) == 1:
num_uniques[list(task.workers)[0]] += 1
for worker in workers:
tasks = running[worker['name']]
worker['num_running'] = len(tasks)
worker['num_pending'] = num_pending[worker['name']]
worker['num_uniques'] = num_uniques[worker['name']]
worker['running'] = tasks
return workers
def resource_list(self):
"""
Resources usage info and their consumers (tasks).
"""
self.prune()
resources = [
dict(
name=resource,
num_total=r_dict['total'],
num_used=r_dict['used']
) for resource, r_dict in six.iteritems(self.resources())]
if self._resources is not None:
consumers = collections.defaultdict(dict)
for task in self._state.get_running_tasks():
if task.status == RUNNING and task.resources:
for resource, amount in six.iteritems(task.resources):
consumers[resource][task.id] = self._serialize_task(task.id, False)
for resource in resources:
tasks = consumers[resource['name']]
resource['num_consumer'] = len(tasks)
resource['running'] = tasks
return resources
def resources(self):
''' get total resources and available ones '''
used_resources = self._used_resources()
ret = collections.defaultdict(dict)
for resource, total in self._resources.iteritems():
ret[resource]['total'] = total
if resource in used_resources:
ret[resource]['used'] = used_resources[resource]
else:
ret[resource]['used'] = 0
return ret
def task_search(self, task_str, **kwargs):
"""
Query for a subset of tasks by task_id.
:param task_str:
:return:
"""
self.prune()
result = collections.defaultdict(dict)
for task in self._state.get_active_tasks():
if task.id.find(task_str) != -1:
serialized = self._serialize_task(task.id, False)
result[task.status][task.id] = serialized
return result
def re_enable_task(self, task_id):
serialized = {}
task = self._state.get_task(task_id)
if task and task.status == DISABLED and task.scheduler_disable_time:
self._state.re_enable(task, self._config)
serialized = self._serialize_task(task_id)
return serialized
def fetch_error(self, task_id, **kwargs):
if self._state.has_task(task_id):
task = self._state.get_task(task_id)
return {"taskId": task_id, "error": task.expl, 'displayName': task.pretty_id}
else:
return {"taskId": task_id, "error": ""}
def _update_task_history(self, task, status, host=None):
try:
if status == DONE or status == FAILED:
successful = (status == DONE)
self._task_history.task_finished(task, successful)
elif status == PENDING:
self._task_history.task_scheduled(task)
elif status == RUNNING:
self._task_history.task_started(task, host)
except BaseException:
logger.warning("Error saving Task history", exc_info=True)
@property
def task_history(self):
# Used by server.py to expose the calls
return self._task_history
|
oldpa/luigi
|
luigi/scheduler.py
|
Python
|
apache-2.0
| 45,889 | 0.002245 |
import nose
from nose.plugins.attrib import attr
import logging
import colorguard
import os
bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries'))
@attr(speed='slow')
def test_cromu_00070_caching():
# Test exploitation of CROMU_00070 given an input which causes a leak. Then test that we can do it again restoring
# from the cache.
for _ in range(2):
payload = bytes.fromhex("06000006020a00000000000000000000000c030c00000100e1f505000000000000eb")
cg = colorguard.ColorGuard(os.path.join(bin_location, "tests/cgc/CROMU_00070"), payload)
pov = cg.attempt_exploit()
nose.tools.assert_not_equal(pov, None)
nose.tools.assert_true(pov.test_binary())
def run_all():
functions = globals()
all_functions = dict(filter((lambda kv: kv[0].startswith('test_')), functions.items()))
for f in sorted(all_functions.keys()):
if hasattr(all_functions[f], '__call__'):
all_functions[f]()
if __name__ == "__main__":
logging.getLogger("colorguard").setLevel("DEBUG")
logging.getLogger("povsim").setLevel("DEBUG")
import sys
if len(sys.argv) > 1:
globals()['test_' + sys.argv[1]]()
else:
run_all()
|
mechaphish/colorguard
|
tests/test_cromu70_caching.py
|
Python
|
bsd-2-clause
| 1,245 | 0.006426 |
# coding=utf-8
#https://developers.google.com/drive/v3/web/quickstart/python
from __future__ import print_function
import httplib2
import os
import io
from apiclient import discovery
import oauth2client
from oauth2client import client
from oauth2client import tools
from apiclient.http import MediaIoBaseDownload
from apiclient.http import MediaFileUpload
import sys
import argparse
from pyfcm import FCMNotification
import h5py
"""
DESCRIPTION
Script with class that manages operations with Google.
Send file, uploads file and list files
"""
class GoogleManager:
def __init__(self):
self.SCOPES = 'https://www.googleapis.com/auth/drive'
self.CLIENT_SECRET_FILE = 'GoogleDrive_Client_secret.json'
self.APPLICATION_NAME = 'pythonscript'
print("[GOOGLE MANAGER] Google Manager started")
def init_for_upload(self,upload_file=None,upload_file_name=None):
if upload_file and upload_file_name:
self.upload_manager(upload_file,upload_file_name)
print("[GOOGLE MANAGER] Will upload file")
else:
raise ValueError("[ERROR] Object initializer has to have file name to upload and name of uploaded file in upload mode. Initialize object with mode, upload filename and upload destination name")
def init_for_download(self,download_file=None):
if download_file:
self.download_manager(download_file)
print("[GOOGLE MANAGER] Will download file")
else:
raise ValueError("[ERROR] Object initializer has to have file name to download in download mode. Initialize object with mode and file name to download")
def init_for_list(self,):
self.download_manager(True)
def download_file(self,file_id, mimeType, filename,drive_service):
if "google-apps" in mimeType:
return
request = drive_service.files().get_media(fileId=file_id)
fh = io.FileIO(filename, 'wb')
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print("[PROGRESS] Download %d%%." % int(status.progress() * 100))
def get_credentials(self):
SCOPES = 'https://www.googleapis.com/auth/drive'
CLIENT_SECRET_FILE = 'GoogleDrive_Client_secret.json'
APPLICATION_NAME = 'pythonscript'
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,'drive-python-quickstart.json')
store = oauth2client.file.Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
credentials = tools.run_flow(flow, store)
return credentials
def upload_manager(self,fileToUpload,nameToUpload):
credentials = self.get_credentials()
http = credentials.authorize(httplib2.Http())
drive_service = discovery.build('drive', 'v3', http=http)
file_metadata = {'name' : nameToUpload}
media = MediaFileUpload(fileToUpload,resumable=True)
file = drive_service.files().create(body=file_metadata,media_body=media,fields='id').execute()
print(file)
print("[GOOGLE MANAGER] File with name {} uploaded to Google Drive".format(nameToUpload))
def download_manager(self,fileToDownload=None,list = False):
credentials = self.get_credentials()
http = credentials.authorize(httplib2.Http())
service = discovery.build('drive', 'v3', http=http)
results = service.files().list(pageSize=10,fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
if not items:
print("[GOOGLE MANAGER] No files found.")
else:
for item in items:
name = str(item["name"].encode('ascii', 'ignore'))
print("[GOOGLE MANAGER] Found file -> {}".format(name))
if name == fileToDownload and not list:
credentials = self.get_credentials()
http = credentials.authorize(httplib2.Http())
drive_service = discovery.build('drive', 'v3', http=http)
self.download_file(item['id'],"text/plain",item['name'],drive_service)
#drive = GoogleManager()
#drive.init_for_download("weights.h5")
#drive.init_for_download("model.json")
#drive.init_for_upload("more_images.h5","weights.h5")
#drive.init_for_upload("model_more_images.json","model.json")
#drive.init_for_list()
|
aleixo/cnn_fire
|
googlemanager.py
|
Python
|
gpl-3.0
| 4,878 | 0.009842 |
# -*- coding: utf-8 -*-
#Import libraries
from sys import exit
from math import sqrt
#Print title (http://patorjk.com/software/taag/#p=display&f=Small%20Slant&t=Equation%20Solver%20V2.1)
print " ____ __ _ ____ __ _ _____ ___"
print " / __/__ ___ _____ _/ /_(_)__ ___ / __/__ / / _____ ____ | | / /_ | < /"
print " / _// _ `/ // / _ `/ __/ / _ \/ _ \ _\ \/ _ \/ / |/ / -_) __/ | |/ / __/_ / / "
print "/___/\_, /\_,_/\_,_/\__/_/\___/_//_/ /___/\___/_/|___/\__/_/ |___/____(_)_/ "
print " /_/ "
#Welcome phrase
print "\nWelcome in the 'Equation Solver' 2.1 by Rafael Riber .\nPlease give the values for 'a', 'b' and 'c' as follows: f(x) = Ax^2+Bx+C.\n"
#Define check function
def check(x):
if x != 0:
pass
else:
exit("Invalid value. Please enter only numbers other than zero.")
#Input and check
a = float(input("Value of 'A': "))
check(a)
b = float(input("Value of 'B': "))
check(b)
c = float(input("Value of 'C': "))
check(c)
#Formulas
dis = (b * b) - 4 * (a * c)
x1 = (-b - sqrt(dis) ) / (2 * a)
x2 = (-b + sqrt(dis) ) / (2 * a)
x3 = (-b) / (2 * a)
sx = (-b) / (2 * a)
sy = (- dis) / (4 * a)
#Calculus conditions
if dis >= 0:
print "\nThe discriminant is equal to: %s.\n" % (dis)
else:
exit("The equation has no real roots: The discriminant is negative.")
if dis == 0:
print "Sole root of the equation: (%s). Summit: (%s; %s)\n" % (x3, sx, sy)
else:
print "Roots: (%s; %s)\nSummit: (%s; %s) \n\nThank you for using the Equation Solver by Rafael Riber !" % (x1, x2, sx, sy)
|
V3sth4cks153/Python-Programs
|
equation_solver.py
|
Python
|
mit
| 1,645 | 0.024924 |
"""
sentry_javascript_lite.plugin
~~~~~~~~~~~~~~~~~~~~~
"""
import re
from django.conf import settings
from sentry.lang.javascript.plugin import JavascriptPlugin
from sentry.lang.javascript.processor import SourceProcessor
from sentry.interfaces.stacktrace import (Frame, Stacktrace)
from sentry_javascript_lite import VERSION
def javascript_lite_preprocess_event(data):
if data.get('platform') != 'javascript':
return
processor = JavascriptLiteSourceProcessor()
return processor.process(data)
class JavascriptPlugin(JavascriptPlugin):
author = 'Chad Killingsworth, Jack Henry and Associates'
author_url = 'https://github.com/Banno/getsentry-javascript-lite'
version = VERSION
description = "Preprocess Raw Javascript Stacktraces"
resource_links = [
('Bug Tracker', 'https://github.com/Banno/getsentry-javascript-lite/issues'),
('Source', 'https://github.com/Banno/getsentry-javascript-lite'),
]
slug = 'javascript-lite'
title = 'Javascript-lite Event Preprocessor'
conf_title = title
conf_key = 'javascript-lite'
def get_event_preprocessors(self, **kwargs):
if not settings.SENTRY_SCRAPE_JAVASCRIPT_CONTEXT:
return []
return [javascript_lite_preprocess_event]
class JavascriptLiteSourceProcessor(SourceProcessor):
chrome_ie_stacktrace_expr = re.compile(r'^\s*at (.*?) ?\(?((?:file|https?|chrome-extension):.*?):(\d+)(?::(\d+))?\)?\s*$',
re.IGNORECASE)
firefox_safari_stacktrace_expr = re.compile(r'^\s*(.*?)(?:\((.*?)\))?@((?:file|https?|chrome).*?):(\d+)(?::(\d+))?\s*$',
re.IGNORECASE)
whitespace_expr = re.compile(r'^\s+')
location_parts_expr = re.compile(r'[\(\)\s]')
def get_stacktraces(self, data):
stacktraces = super(JavascriptLiteSourceProcessor, self).get_stacktraces(data);
if (not stacktraces and 'extra' in data and
isinstance(data['extra'], dict) and 'rawstack' in data['extra']):
stacktraces = self.format_raw_stacktrace(data['extra']['rawstack'])
if stacktraces:
data['extra'].pop('rawstack', None)
return stacktraces
def format_raw_stacktrace(self, value):
kwargs = {
'frames': [],
'frames_omitted': []
}
for frame in value.split('\n'):
if JavascriptLiteSourceProcessor.chrome_ie_stacktrace_expr.search(frame):
kwargs['frames'].append(self.format_chrome_ie_frame(frame))
elif JavascriptLiteSourceProcessor.firefox_safari_stacktrace_expr.search(frame):
kwargs['frames'].append(self.format_firefox_safari_frame(frame))
if len(kwargs['frames']) > 0:
return [Stacktrace(**kwargs)]
return []
def format_chrome_ie_frame(self, frame):
tokens = JavascriptLiteSourceProcessor.chrome_ie_stacktrace_expr.findall(frame)[0]
frame = {
'filename': tokens[1],
'function': tokens[0] or '?',
'in_app': True,
}
try:
frame['lineno'] = int(float(tokens[2]))
except:
pass
try:
frame['colno'] = int(float(tokens[3]))
except:
pass
return Frame.to_python(frame)
def format_firefox_safari_frame(self, frame):
tokens = JavascriptLiteSourceProcessor.firefox_safari_stacktrace_expr.findall(frame)[0]
frame = {
'filename': tokens[2],
'function': tokens[0] or '?',
'in_app': True,
}
if tokens[1]:
frame['args'] = tokens[1].split(',')
try:
frame['lineno'] = int(float(tokens[3]))
except:
pass
try:
frame['colno'] = int(float(tokens[4]))
except:
pass
return Frame.to_python(frame)
|
Banno/getsentry-javascript-lite
|
sentry_javascript_lite/plugin.py
|
Python
|
apache-2.0
| 3,880 | 0.004897 |
###############################################################################
# This file is part of openWNS (open Wireless Network Simulator)
# _____________________________________________________________________________
#
# Copyright (C) 2004-2007
# Chair of Communication Networks (ComNets)
# Kopernikusstr. 16, D-52074 Aachen, Germany
# phone: ++49-241-80-27910,
# fax: ++49-241-80-22242
# email: info@openwns.org
# www: http://www.openwns.org
# _____________________________________________________________________________
#
# openWNS is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License version 2 as published by the
# Free Software Foundation;
#
# openWNS is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import scenarios.interfaces
import openwns.geometry.position
import math
class PositionListPlacer(scenarios.interfaces.INodePlacer):
"""
Place a number of nodes on the given positions.
"""
def __init__(self, numberOfNodes = 1, positionsList = [openwns.geometry.position.Position(1,1)], rotate = 0.0):
"""
@type numberOfNodes: int
@param numberOfNodes: The number of nodes on the circle
@Type: position: float
@param position: distance from BS in Meters for every single node
@type rotate: float
@param rotate: Rotate the final result by rotate in radiant [0..2pi]
"""
self.center = openwns.geometry.position.Position(x = 0.0, y = 0.0, z = 0.0)
self.numberOfNodes = numberOfNodes
self.positionsList = positionsList
self.rotate = rotate
def setCenter(self, center):
self.center = center
def getPositions(self):
positions = []
for i in xrange(self.numberOfNodes):
x = self.positionsList[i].x
y = self.positionsList[i].y
v = openwns.geometry.position.Vector(x = x, y = y, z = 0.0)
p = v.turn2D(self.rotate).toPosition()
positions.append(p)
return [p + self.center for p in positions]
def isInside(self, position):
for i in xrange(self.numberOfNodes):
x = self.positionsList[i].x
y = self.positionsList[i].y
v = openwns.geometry.position.Vector(x = x, y = y, z = 0.0)
p = v.turn2D(self.rotate).toPosition()
if p.x + self.center.x == position.x:
return True
return False
|
creasyw/IMTAphy
|
framework/scenarios/PyConfig/scenarios/placer/positionList.py
|
Python
|
gpl-2.0
| 2,880 | 0.010069 |
# AsteriskLint -- an Asterisk PBX config syntax checker
# Copyright (C) 2015-2016 Walter Doekes, OSSO B.V.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from .config import ConfigAggregator
from .dialplan import DialplanAggregator
from .file import FileReader
from .func_odbc import FuncOdbcAggregator
class FileConfigParser(ConfigAggregator, FileReader):
pass
class FileDialplanParser(DialplanAggregator, FileReader):
pass
class FileFuncOdbcParser(FuncOdbcAggregator, FileReader):
pass
|
ossobv/asterisklint
|
asterisklint/__init__.py
|
Python
|
gpl-3.0
| 1,096 | 0 |
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for classification of real numbers."""
__author__ = 'Sean Lip'
from core.tests import test_utils
from extensions.rules import real
class RealRuleUnitTests(test_utils.GenericTestBase):
"""Tests for rules operating on Real objects."""
def test_equals_rule(self):
self.assertTrue(real.Equals(3).eval(3))
self.assertTrue(real.Equals(3.0).eval(3))
self.assertFalse(real.Equals(4).eval(3))
def test_is_less_than_rule(self):
self.assertTrue(real.IsLessThan(4).eval(3))
self.assertTrue(real.IsLessThan(4).eval(3.0))
self.assertTrue(real.IsLessThan(4.0).eval(3.0))
self.assertFalse(real.IsLessThan(3).eval(3))
self.assertFalse(real.IsLessThan(3.0).eval(3.0))
self.assertFalse(real.IsLessThan(3.0).eval(4.0))
self.assertFalse(real.IsLessThan(3).eval(4))
def test_is_greater_than_rule(self):
self.assertTrue(real.IsGreaterThan(3).eval(4))
self.assertTrue(real.IsGreaterThan(3.0).eval(4))
self.assertTrue(real.IsGreaterThan(3.0).eval(4.0))
self.assertFalse(real.IsGreaterThan(3).eval(3))
self.assertFalse(real.IsGreaterThan(3.0).eval(3.0))
self.assertFalse(real.IsGreaterThan(4.0).eval(3.0))
self.assertFalse(real.IsGreaterThan(4).eval(3))
def test_is_less_than_or_equal_to_rule(self):
rule = real.IsLessThanOrEqualTo(3)
self.assertTrue(rule.eval(2))
self.assertTrue(rule.eval(3))
self.assertFalse(rule.eval(4))
def test_is_greater_than_or_equal_to_rule(self):
rule = real.IsGreaterThanOrEqualTo(3)
self.assertTrue(rule.eval(4))
self.assertTrue(rule.eval(3))
self.assertFalse(rule.eval(2))
def test_is_inclusively_between_rule(self):
with self.assertRaises(AssertionError):
real.IsInclusivelyBetween(2, 1)
rule = real.IsInclusivelyBetween(1, 3)
self.assertTrue(rule.eval(2))
self.assertTrue(rule.eval(1))
self.assertTrue(rule.eval(3))
self.assertTrue(rule.eval(1.0))
self.assertFalse(rule.eval(3.001))
def test_is_within_tolerance_rule(self):
rule = real.IsWithinTolerance(0.5, 0)
self.assertTrue(rule.eval(0))
self.assertTrue(rule.eval(0.5))
self.assertFalse(rule.eval(0.51))
|
won0089/oppia
|
extensions/rules/real_test.py
|
Python
|
apache-2.0
| 2,944 | 0 |
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('layers', '0002_initial_step2'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Upload',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('import_id', models.BigIntegerField(null=True)),
('state', models.CharField(max_length=16)),
('date', models.DateTimeField(default=datetime.datetime.now, verbose_name=b'date')),
('upload_dir', models.CharField(max_length=100, null=True)),
('name', models.CharField(max_length=64, null=True)),
('complete', models.BooleanField(default=False)),
('session', models.TextField(null=True)),
('metadata', models.TextField(null=True)),
('mosaic_time_regex', models.CharField(max_length=128, null=True)),
('mosaic_time_value', models.CharField(max_length=128, null=True)),
('mosaic_elev_regex', models.CharField(max_length=128, null=True)),
('mosaic_elev_value', models.CharField(max_length=128, null=True)),
('layer', models.ForeignKey(to='layers.Layer', null=True)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ['-date'],
},
),
migrations.CreateModel(
name='UploadFile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('file', models.FileField(upload_to=b'uploads')),
('slug', models.SlugField(blank=True)),
('upload', models.ForeignKey(blank=True, to='upload.Upload', null=True)),
],
),
]
|
Geode/geonode
|
geonode/upload/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 2,961 | 0.00304 |
# Sample 5
import socket
import sys
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error as msg:
print('Failed to create socket!')
print('Error code: ' + str(msg[0]) + ', error message: ' + msg[1])
sys.exit()
print('Socked created successfully.')
# Part 1
host = ''
port = 8888
try:
s.bind((host, port))
except socket.error:
msg = str(socket.error)
print('Bind failed! Error code: ' + str(msg[0]) + ', message: ' + msg[1])
sys.exit()
print('Socket bind complete.')
s.listen(10) # Limitation to number of connections that can be in the queue
print('Socket is now listening.')
# Part 3 - the while loop to keep the socket listening for clients
while True:
conn, addr = s.accept() # blocking call, to accept the first client that comes
# can type in bash the following to talk to the socket: telnet localhost 8888
# Part 2
data = conn.recv(1024)
if not data:
break
reply = '<<<Hello ' + str(data) + '>>>'
conn.sendall(reply.encode('UTF8'))
# once you start the socket with python sample5.py
# try telnet localhost 8888 in another terminal
# type test, and it should echo back <<<Hello test>>>
conn.close()
s.close()
|
jessicayuen/cmput410-lab2
|
sample5.py
|
Python
|
gpl-3.0
| 1,165 | 0.017167 |
import typing as t
import warnings
from .request import Request
class _FakeSubclassCheck(type):
def __subclasscheck__(cls, subclass: t.Type) -> bool:
warnings.warn(
"'BaseRequest' is deprecated and will be removed in"
" Werkzeug 2.1. Use 'issubclass(cls, Request)' instead.",
DeprecationWarning,
stacklevel=2,
)
return issubclass(subclass, Request)
def __instancecheck__(cls, instance: t.Any) -> bool:
warnings.warn(
"'BaseRequest' is deprecated and will be removed in"
" Werkzeug 2.1. Use 'isinstance(obj, Request)' instead.",
DeprecationWarning,
stacklevel=2,
)
return isinstance(instance, Request)
class BaseRequest(Request, metaclass=_FakeSubclassCheck):
def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
warnings.warn(
"'BaseRequest' is deprecated and will be removed in"
" Werkzeug 2.1. 'Request' now includes the functionality"
" directly.",
DeprecationWarning,
stacklevel=2,
)
super().__init__(*args, **kwargs)
|
mitsuhiko/werkzeug
|
src/werkzeug/wrappers/base_request.py
|
Python
|
bsd-3-clause
| 1,174 | 0 |
# -*- coding: utf-8 -*-
import rlp
import secp256k1
from rlp.sedes import big_endian_int, binary, Binary
from rlp.utils import str_to_bytes, ascii_chr
from eth_utils.address import to_normalized_address
from eth_utils.hexidecimal import encode_hex, decode_hex
try:
from Crypto.Hash import keccak
sha3_256 = lambda x: keccak.new(digest_bits=256, data=x).digest()
except ImportError:
import sha3 as _sha3
sha3_256 = lambda x: _sha3.keccak_256(x).digest()
from py_ecc.secp256k1 import privtopub, ecdsa_raw_sign, ecdsa_raw_recover
from icenine.contrib.keys import privtoaddr
#from ethereum.utils import encode_hex
#from ethereum.exceptions import InvalidTransaction
#from ethereum import bloom
#from ethereum import opcodes
#from ethereum import utils
#from ethereum.slogging import get_logger
#from ethereum.utils import TT256, mk_contract_address, zpad, int_to_32bytearray, big_endian_to_int, ecsign, ecrecover_to_pub, normalize_key
# Reimplemented from ethereum.utils
def sha3(seed):
return sha3_256(to_string(seed))
big_endian_to_int = lambda x: big_endian_int.deserialize(str_to_bytes(x).lstrip(b'\x00'))
is_numeric = lambda x: isinstance(x, int)
def bytearray_to_bytestr(value):
return bytes(value)
def to_string(value):
if isinstance(value, bytes):
return value
if isinstance(value, str):
return bytes(value, 'utf-8')
if isinstance(value, int):
return bytes(str(value), 'utf-8')
def normalize_address(x, allow_blank=False):
if is_numeric(x):
return int_to_addr(x)
if allow_blank and x in {'', b''}:
return b''
if len(x) in (42, 50) and x[:2] in {'0x', b'0x'}:
x = x[2:]
if len(x) in (40, 48):
x = decode_hex(x)
if len(x) == 24:
assert len(x) == 24 and sha3(x[:20])[:4] == x[-4:]
x = x[:20]
if len(x) != 20:
raise Exception("Invalid address format: %r" % x)
return x
def normalize_key(key):
if is_numeric(key):
o = encode_int32(key)
elif len(key) == 32:
o = key
elif len(key) == 64:
o = decode_hex(key)
elif len(key) == 66 and key[:2] == '0x':
o = decode_hex(key[2:])
else:
raise Exception("Invalid key format: %r" % key)
if o == b'\x00' * 32:
raise Exception("Zero privkey invalid")
return o
def safe_ord(value):
if isinstance(value, int):
return value
else:
return ord(value)
def ecsign(rawhash, key):
if secp256k1 and hasattr(secp256k1, 'PrivateKey'):
pk = secp256k1.PrivateKey(key, raw=True)
signature = pk.ecdsa_recoverable_serialize(
pk.ecdsa_sign_recoverable(rawhash, raw=True)
)
signature = signature[0] + bytearray_to_bytestr([signature[1]])
v = safe_ord(signature[64]) + 27
r = big_endian_to_int(signature[0:32])
s = big_endian_to_int(signature[32:64])
else:
v, r, s = ecdsa_raw_sign(rawhash, key)
return v, r, s
# end reimplementation
#log = get_logger('eth.chain.tx')
TT256 = 2 ** 256
TT256M1 = 2 ** 256 - 1
TT255 = 2 ** 255
SECP256K1P = 2**256 - 4294968273
# in the yellow paper it is specified that s should be smaller than secpk1n (eq.205)
secpk1n = 115792089237316195423570985008687907852837564279074904382605163141518161494337
null_address = b'\xff' * 20
address_type = Binary.fixed_length(20, allow_empty=True)
class Transaction(rlp.Serializable):
"""
A transaction is stored as:
[nonce, gasprice, startgas, to, value, data, v, r, s]
nonce is the number of transactions already sent by that account, encoded
in binary form (eg. 0 -> '', 7 -> '\x07', 1000 -> '\x03\xd8').
(v,r,s) is the raw Electrum-style signature of the transaction without the
signature made with the private key corresponding to the sending account,
with 0 <= v <= 3. From an Electrum-style signature (65 bytes) it is
possible to extract the public key, and thereby the address, directly.
A valid transaction is one where:
(i) the signature is well-formed (ie. 0 <= v <= 3, 0 <= r < P, 0 <= s < N,
0 <= r < P - N if v >= 2), and
(ii) the sending account has enough funds to pay the fee and the value.
"""
fields = [
('nonce', big_endian_int),
('gasprice', big_endian_int),
('startgas', big_endian_int),
('to', address_type),
('value', big_endian_int),
('data', binary),
('v', big_endian_int),
('r', big_endian_int),
('s', big_endian_int),
]
_sender = None
def __init__(self, nonce, gasprice, startgas, to, value, data, v=0, r=0, s=0):
self.data = None
to = normalize_address(to, allow_blank=True)
super(Transaction, self).__init__(nonce, gasprice, startgas, to, value, data, v, r, s)
if self.gasprice >= TT256 or self.startgas >= TT256 or \
self.value >= TT256 or self.nonce >= TT256:
raise InvalidTransaction("Values way too high!")
@property
def sender(self):
if not self._sender:
# Determine sender
if self.r == 0 and self.s == 0:
self._sender = null_address
else:
if self.v in (27, 28):
vee = self.v
sighash = sha3(rlp.encode(self, UnsignedTransaction))
elif self.v >= 37:
vee = self.v - self.network_id * 2 - 8
assert vee in (27, 28)
rlpdata = rlp.encode(rlp.infer_sedes(self).serialize(self)[:-3] + [self.network_id, '', ''])
sighash = sha3(rlpdata)
else:
raise InvalidTransaction("Invalid V value")
if self.r >= secpk1n or self.s >= secpk1n or self.r == 0 or self.s == 0:
raise InvalidTransaction("Invalid signature values!")
pub = ecrecover_to_pub(sighash, vee, self.r, self.s)
if pub == b"\x00" * 64:
raise InvalidTransaction("Invalid signature (zero privkey cannot sign)")
self._sender = sha3(pub)[-20:]
return self._sender
@property
def network_id(self):
if self.r == 0 and self.s == 0:
return self.v
elif self.v in (27, 28):
return None
else:
return ((self.v - 1) // 2) - 17
@sender.setter
def sender(self, value):
self._sender = value
def sign(self, key, network_id=None):
"""Sign this transaction with a private key.
A potentially already existing signature would be overridden.
"""
if network_id is None:
rawhash = sha3(rlp.encode(self, UnsignedTransaction))
else:
assert 1 <= network_id < 2**63 - 18
rlpdata = rlp.encode(rlp.infer_sedes(self).serialize(self)[:-3] + [network_id, b'', b''])
rawhash = sha3(rlpdata)
key = normalize_key(key)
self.v, self.r, self.s = ecsign(rawhash, key)
if network_id is not None:
self.v += 8 + network_id * 2
self._sender = privtoaddr(key)
return self
@property
def hash(self):
return sha3(rlp.encode(self))
def to_dict(self):
d = {}
for name, _ in self.__class__.fields:
d[name] = getattr(self, name)
if name in ('to', 'data'):
d[name] = '0x' + encode_hex(d[name])
d['sender'] = '0x' + encode_hex(self.sender)
d['hash'] = '0x' + encode_hex(self.hash)
return d
@property
def intrinsic_gas_used(self):
num_zero_bytes = str_to_bytes(self.data).count(ascii_chr(0))
num_non_zero_bytes = len(self.data) - num_zero_bytes
return (opcodes.GTXCOST
# + (0 if self.to else opcodes.CREATE[3])
+ opcodes.GTXDATAZERO * num_zero_bytes
+ opcodes.GTXDATANONZERO * num_non_zero_bytes)
@property
def creates(self):
"returns the address of a contract created by this tx"
if self.to in (b'', '\0' * 20):
return mk_contract_address(self.sender, self.nonce)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.hash == other.hash
def __lt__(self, other):
return isinstance(other, self.__class__) and self.hash < other.hash
def __hash__(self):
return big_endian_to_int(self.hash)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return '<Transaction(%s)>' % encode_hex(self.hash)[:4]
def __structlog__(self):
return encode_hex(self.hash)
# This method should be called for block numbers >= HOMESTEAD_FORK_BLKNUM only.
# The >= operator is replaced by > because the integer division N/2 always produces the value
# which is by 0.5 less than the real N/2
def check_low_s_metropolis(self):
if self.s > secpk1n // 2:
raise InvalidTransaction("Invalid signature S value!")
def check_low_s_homestead(self):
if self.s > secpk1n // 2 or self.s == 0:
raise InvalidTransaction("Invalid signature S value!")
UnsignedTransaction = Transaction.exclude(['v', 'r', 's'])
|
mikeshultz/icenine
|
icenine/contrib/transactions.py
|
Python
|
gpl-3.0
| 9,242 | 0.003787 |
# This file is part of the Trezor project.
#
# Copyright (C) 2012-2018 SatoshiLabs and contributors
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3
# as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the License along with this library.
# If not, see <https://www.gnu.org/licenses/lgpl-3.0.html>.
from . import messages
from .tools import expect
#
# Ontology functions
#
@expect(messages.OntologyAddress, field="address")
def get_address(client, address_n, show_display=False):
return client.call(
messages.OntologyGetAddress(address_n=address_n, show_display=show_display)
)
@expect(messages.OntologyPublicKey)
def get_public_key(client, address_n, show_display=False):
return client.call(
messages.OntologyGetPublicKey(address_n=address_n, show_display=show_display)
)
@expect(messages.OntologySignedTransfer)
def sign_transfer(client, address_n, t, tr):
return client.call(
messages.OntologySignTransfer(address_n=address_n, transaction=t, transfer=tr)
)
@expect(messages.OntologySignedWithdrawOng)
def sign_withdrawal(client, address_n, t, w):
return client.call(
messages.OntologySignWithdrawOng(
address_n=address_n, transaction=t, withdraw_ong=w
)
)
@expect(messages.OntologySignedOntIdRegister)
def sign_register(client, address_n, t, r):
return client.call(
messages.OntologySignOntIdRegister(
address_n=address_n, transaction=t, ont_id_register=r
)
)
@expect(messages.OntologySignedOntIdAddAttributes)
def sign_add_attr(client, address_n, t, a):
return client.call(
messages.OntologySignOntIdAddAttributes(
address_n=address_n, transaction=t, ont_id_add_attributes=a
)
)
|
jhoenicke/python-trezor
|
trezorlib/ontology.py
|
Python
|
lgpl-3.0
| 2,134 | 0.001406 |
assignments = []
rows = 'ABCDEFGHI'
cols = '123456789'
def assign_value(values, box, value):
"""
Please use this function to update your values dictionary!
Assigns a value to a given box. If it updates the board record it.
"""
# Don't waste memory appending actions that don't actually change any values
if values[box] == value:
return values
values[box] = value
if len(value) == 1:
assignments.append(values.copy())
return values
def naked_twins(values):
"""Eliminate values using the naked twins strategy.
Args:
values(dict): a dictionary of the form {'box_name': '123456789', ...}
Returns:
the values dictionary with the naked twins eliminated from peers.
"""
# Find all instances of naked twins
twins_list = []
for box in boxes:
if len(values[box]) == 2:
for peer in peers[box]:
if values[peer] == values[box]:
twins_list.append([box,peer])
# Eliminate the naked twins as possibilities for their peers
if twins_list:
for twins in twins_list:
# intersect list of twins' peers for common units
twins_peers = set(peers[twins[0]]).intersection(set(peers[twins[1]]))
for peer in twins_peers:
for v in values[twins[0]]:
values = assign_value(values, peer, values[peer].replace(v,''))
return values
def cross(A, B):
"Cross product of elements in A and elements in B."
return [s+t for s in A for t in B]
def diag(A, B):
"Diagonals of A elements with elements in B."
return [A[r]+B[c] for r in range(len(A)) for c in range(len(B)) if r == c]
def grid_values(grid):
"""
Convert grid into a dict of {square: char} with '123456789' for empties.
Args:
grid(string) - A grid in string form.
Returns:
A grid in dictionary form
Keys: The boxes, e.g., 'A1'
Values: The value in each box, e.g., '8'. If the box has no value, then the value will be '123456789'.
"""
return dict((boxes[i], grid[i] if (grid[i] != '.') else '123456789') for i in range(len(boxes)))
def display(values):
"""
Display the values as a 2-D grid.
Args:
values(dict): The sudoku in dictionary form
"""
width = 1+max(len(values[s]) for s in boxes)
line = '+'.join(['-'*(width*3)]*3)
for r in rows:
print(''.join(values[r+c].center(width)+('|' if c in '36' else '')
for c in cols))
if r in 'CF': print(line)
return
def eliminate(values):
for box,value in values.items():
if len(value) == 1:
for peer in peers[box]:
values = assign_value(values, peer, values[peer].replace(value,''))
return values
def only_choice(values):
for box,v in values.items():
if len(v) > 1:
for unit in units[box]:
pval = str().join(values[key] for key in unit if key != box)
d = [val for val in v if val not in pval]
if len(d) == 1:
values = assign_value(values, box, d[0])
return values
def reduce_puzzle(values):
stalled = False
while not stalled:
# Check how many boxes have a determined value
solved_values_before = len([box for box in values.keys() if len(values[box]) == 1])
# Use the Eliminate Strategy
values = eliminate(values)
# Use the Only Choice Strategy
values = only_choice(values)
# Use the Naked Twins Strategy
values = naked_twins(values)
# Check how many boxes have a determined value, to compare
solved_values_after = len([box for box in values.keys() if len(values[box]) == 1])
# If no new values were added, stop the loop.
stalled = solved_values_before == solved_values_after
# Sanity check, return False if there is a box with zero available values:
if len([box for box in values.keys() if len(values[box]) == 0]):
return False
return values
def search(values):
# First, reduce the puzzle using the previous function
values = reduce_puzzle(values)
if not values:
return False
# Return solution if all box have unique value
if all(len(v) == 1 for v in values.values()):
return values
# Choose one of the unfilled squares with the fewest possibilities
_,box = min((len(v),k) for k,v in values.items() if len(v) > 1)
# Now use recursion to solve each one of the resulting sudokus, and if one returns a value (not False), return that answer!
# If you're stuck, see the solution.py tab!
for val in values[box]:
new_values = values.copy()
new_values[box] = val
res = search(new_values)
if res:
return res
def solve(grid):
"""
Find the solution to a Sudoku grid.
Args:
grid(string): a string representing a sudoku grid.
Example: '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3'
Returns:
The dictionary representation of the final sudoku grid. False if no solution exists.
"""
return search(grid_values(grid))
boxes = cross(rows, cols)
row_units = [cross(r, cols) for r in rows]
column_units = [cross(rows, c) for c in cols]
square_units = [cross(rs, cs) for rs in ('ABC','DEF','GHI') for cs in ('123','456','789')]
diag_units = [diag(rows, cols)] + [diag(rows, cols[::-1])]
unitlist = row_units + column_units + square_units + diag_units
units = dict((s, [u for u in unitlist if s in u]) for s in boxes)
peers = dict((s, set(sum(units[s],[]))-set([s])) for s in boxes)
if __name__ == '__main__':
diag_sudoku_grid = '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3'
display(solve(diag_sudoku_grid))
try:
from visualize import visualize_assignments
visualize_assignments(assignments)
except SystemExit:
pass
except:
print('We could not visualize your board due to a pygame issue. Not a problem! It is not a requirement.')
|
edno/udacity-sandbox
|
ud889/AIND_Sudoku/solution.py
|
Python
|
unlicense
| 6,141 | 0.006839 |
import click
import pickle
from build import Build
@click.group()
def cli():
pass
@cli.command()
@click.option('--cache-file', default='test-cache')
@click.option('--query')
def query(cache_file, query):
with open(cache_file, 'rb') as f:
key, criteria = query.split('=')
buildobjs = pickle.load(f)
for name, build in buildobjs.items():
item = getattr(build, key, '')
if criteria in item:
print(build, item)
cli()
|
jpmontez/jenkins-rpc
|
scripts/build-summary/cachequery.py
|
Python
|
gpl-2.0
| 492 | 0 |
from dnfpyUtils.stats.statistic import Statistic
import numpy as np
class Trajectory(Statistic):
"""
Abstract class for trajectory
"""
def __init__(self,name,dt=0.1,dim=0,**kwargs):
super().__init__(name=name,size=0,dim=dim,dt=dt,**kwargs)
self.trace = [] #save the trace
def getViewData(self):
return self._data#,self.getMean()
def reset(self):
super().reset()
self.trace = []
self._data = np.nan
def getMean(self):
return np.nanmean(self.trace)
def getRMSE(self):
return np.sqrt(np.nanmean(self.trace))
def getCount(self):
return np.sum(~np.isnan(self.trace))
def getMax(self):
return np.max(self.trace)
def getPercentile(self,percent):
return np.nanpercentile(self.trace,percent)
def getMin(self):
return np.min(self.trace)
def getStd(self):
return np.std(self.trace)
def getTrace(self):
"""
Return the time trace of the statistic
"""
return self.trace
|
bchappet/dnfpy
|
src/dnfpyUtils/stats/trajectory.py
|
Python
|
gpl-2.0
| 1,105 | 0.021719 |
# -*- coding: utf-8 -*-
#
# Moonstone is platform for processing of medical images (DICOM).
# Copyright (C) 2009-2011 by Neppo Tecnologia da Informação LTDA
# and Aevum Softwares LTDA
#
# This file is part of Moonstone.
#
# Moonstone is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
from PySide import QtCore, QtGui
from mscreen import MScreen
from ....bloodstone.scenes.imageplane import VtkImagePlane
from ..rename import Rename
from ....bloodstone.scenes.cameracontroller2d import CameraController2D
class MWindow(QtGui.QTabWidget):
def __init__(self, ilsa = None, parent=None, serie=None):
logging.debug("In MWindow::__init__()")
super(MWindow, self).__init__(parent)
self._serie = serie
self.createWidgets()
self.createContextMenu()
self.createActions()
self.updateWidgets()
self._ilsa = ilsa
self._mScreens = []
self._yamlPath = None
self._mainImageData = None
self._vtiPath = None
self._cameraController = CameraController2D(self)
def addTab(self, widget, title):
logging.debug("In MWindow::addTab()")
if isinstance(widget, MScreen):
super(MWindow, self).addTab(widget, title)
self._mScreens.append(widget)
else:
raise "Widget is not a instance of MScreen!"
def createWidgets(self):
logging.debug("In MWindow::createWidgets()")
self.rename = Rename(self)
def close(self):
self._mainImageData = None
for mscreen in self._mScreens:
mscreen.close(force=True)
#mscreen.destroy()
#mscreen.setParent(None)
#mscreen = None
#del mscreen
#self._mScreens = None
#self.mouseReleaseEvent = None
super(MWindow, self).close()
def createContextMenu(self):
logging.debug("In AxialPlane::createContextMenu()")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/static/default/icon/22x22/im-status-message-edit.png"))
self.renameAction = QtGui.QAction(self)
self.renameAction.setText(QtGui.QApplication.translate("MWindow",
"Rename",
None,
QtGui.QApplication.UnicodeUTF8))
self.renameAction.setIconVisibleInMenu(True)
self.renameAction.setObjectName("renameAction")
self.renameAction.setIcon(icon1)
iconDuplicate = QtGui.QIcon()
iconDuplicate.addPixmap(QtGui.QPixmap(":/static/default/icon/22x22/document-new.png"))
self.duplicateAction = QtGui.QAction(self)
self.duplicateAction.setText(QtGui.QApplication.translate("MWindow",
"Duplicate",
None,
QtGui.QApplication.UnicodeUTF8))
self.duplicateAction.setIconVisibleInMenu(True)
self.duplicateAction.setObjectName("duplicateAction")
self.duplicateAction.setIcon(iconDuplicate)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/static/default/icon/22x22/view-refresh.png"))
self.resetAction = QtGui.QAction(self)
self.resetAction.setText(QtGui.QApplication.translate("MWindow",
"Reset",
None,
QtGui.QApplication.UnicodeUTF8))
self.resetAction.setIconVisibleInMenu(True)
self.resetAction.setObjectName("resetAction")
self.resetAction.setIcon(icon2)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/static/default/icon/22x22/dialog-close.png"))
self.closeAction = QtGui.QAction(self)
self.closeAction.setText(QtGui.QApplication.translate("MWindow",
"Close",
None,
QtGui.QApplication.UnicodeUTF8))
self.closeAction.setIconVisibleInMenu(True)
self.closeAction.setObjectName("closeAction")
self.closeAction.setIcon(icon2)
self.addAxialAction = QtGui.QAction(self)
self.addAxialAction.setText("Axial")
self.addAxialAction.setIconVisibleInMenu(True)
self.addAxialAction.setObjectName("addAxialAction")
self.addCoronalAction = QtGui.QAction(self)
self.addCoronalAction.setText(QtGui.QApplication.translate("MWindow",
"Coronal",
None,
QtGui.QApplication.UnicodeUTF8))
self.addCoronalAction.setIconVisibleInMenu(True)
self.addCoronalAction.setObjectName("addCoronalAction")
self.addSagittalAction = QtGui.QAction(self)
self.addSagittalAction.setText(QtGui.QApplication.translate("MWindow",
"Sagittal",
None,
QtGui.QApplication.UnicodeUTF8))
self.addSagittalAction.setIconVisibleInMenu(True)
self.addSagittalAction.setObjectName("addSagittalAction")
self.addVolumeAction = QtGui.QAction(self)
self.addVolumeAction.setText(QtGui.QApplication.translate("MWindow",
"Volume",
None,
QtGui.QApplication.UnicodeUTF8))
self.addVolumeAction.setIconVisibleInMenu(True)
self.addVolumeAction.setObjectName("addVolumeAction")
self.contextMenu = QtGui.QMenu(self)
self.contextMenu.addAction(self.renameAction)
self.contextMenu.addAction(self.resetAction)
self.contextMenu.addAction(self.duplicateAction)
self.contextMenu.addAction(self.closeAction)
self.contextMenu.setIcon(icon1)
windowMenu = QtGui.QMenu(self.contextMenu)
windowMenu.addAction(self.addAxialAction)
windowMenu.addAction(self.addCoronalAction)
windowMenu.addAction(self.addSagittalAction)
windowMenu.addAction(self.addVolumeAction)
windowMenu.setTitle(QtGui.QApplication.translate("MWindow",
"Add Scene",
None,
QtGui.QApplication.UnicodeUTF8))
self.contextMenu.addAction(windowMenu.menuAction())
def createActions(self):
logging.debug("In MWindow::createActions()")
self.connect(self, QtCore.SIGNAL("tabCloseRequested(int)"),
self.slotTabCloseRequested)
self.connect(self, QtCore.SIGNAL("currentChanged(int)"),
self.slotTabChanged)
self.mouseReleaseEvent = self.rightClickAction
self.connect(self.rename.Ok, QtCore.SIGNAL("clicked()"),
self.slotRenameOkButtonClicked)
self.connect(self.rename.Cancel, QtCore.SIGNAL("clicked()"),
self.slotRenameCancelButtonClicked)
def rightClickAction(self, event):
if event.button() == 2:
pos = QtGui.QCursor.pos()
result = self.contextMenu.exec_(pos)
if result == self.renameAction:
self.rename.newName.setText(self.tabText(self.currentIndex()))
self.rename.show()
elif result == self.closeAction:
self.close()
elif result == self.resetAction:
self.reset()
elif result == self.duplicateAction:
self.duplicate()
elif result == self.addAxialAction:
mscreen = self._mScreens[self.currentIndex()]
mscreen.createScene(VtkImagePlane.PLANE_ORIENTATION_AXIAL)
elif result == self.addCoronalAction:
mscreen = self._mScreens[self.currentIndex()]
mscreen.createScene(VtkImagePlane.PLANE_ORIENTATION_CORONAL)
elif result == self.addSagittalAction:
mscreen = self._mScreens[self.currentIndex()]
mscreen.createScene(VtkImagePlane.PLANE_ORIENTATION_SAGITTAL)
elif result == self.addVolumeAction:
mscreen = self._mScreens[self.currentIndex()]
mscreen.createScene(VtkImagePlane.PLANE_ORIENTATION_VOLUME)
def updateWidgets(self):
logging.debug("In MWindow::updateWidgets()")
self.setTabsClosable(True)
self.setMovable(True)
def slotTabCloseRequested(self, index):
logging.debug("In MWindow::slotTabCloseRequested()")
mScreen = self.widget(index)
if mScreen.main or mScreen.references > 0:
QtGui.QMessageBox.critical(self,
QtGui.QApplication.translate(
"Implant", "Error",
None, QtGui.QApplication.UnicodeUTF8),
QtGui.QApplication.translate(
"Implant", "Some tool is locking this tab and it cannot be closed.",
None, QtGui.QApplication.UnicodeUTF8))
return
mScreen.close()
self._mScreens.remove(mScreen)
self.removeTab(index)
if self.currentIndex() == -1:
self.close()
def slotRenameOkButtonClicked(self):
logging.debug("In MWindow::slotRenameOkButtonClicked()")
self.setTabText(self.currentIndex(), self.rename.newName.text())
mScreen = self.widget(self.currentIndex())
mScreen.name = self.rename.newName.text()
self.rename.hide()
def slotRenameCancelButtonClicked(self):
logging.debug("In MWindow::slotRenameCancelButtonClicked()")
self.rename.hide()
def slotTabChanged(self, index):
logging.debug("In MWindow::slotTabCloseRequested()")
if self._mScreens:
self.currentTab().updateWidgets()
def allTabs(self):
logging.debug("In MWindow::allTabs()")
return self._mScreens
def currentTab(self):
logging.debug("In MWindow::currentTab()")
return self._mScreens[self.currentIndex()]
def createMScreensFromImagedata(self, imagedata, cubeCorners=None, name=None, generate3D=1):
logging.debug("In MWindow::createMScreensFromImagedata()")
i = self.count()
name = QtGui.QApplication.translate("MWindow", "Region {0}", None,
QtGui.QApplication.UnicodeUTF8).format(i)
screen = MScreen(mWindow=self, vtkImageData=imagedata, cubeCorners=cubeCorners, name=name)
screen.createScene(VtkImagePlane.PLANE_ORIENTATION_AXIAL)
if generate3D:
screen.createScene(VtkImagePlane.PLANE_ORIENTATION_VOLUME)
screen.createScene(VtkImagePlane.PLANE_ORIENTATION_CORONAL)
screen.createScene(VtkImagePlane.PLANE_ORIENTATION_SAGITTAL)
self.addTab(screen, name)
return screen
def reset(self):
self.currentTab().reset()
def duplicate(self):
self.currentTab().duplicate()
def save(self):
logging.debug("In MWindow::save()")
save = {"vti": self._vtiPath}
mscreens = []
save["mScreens"] = mscreens
save["camera"] = [self.cameraController.getActiveAction(self.cameraController.BUTTON_LEFT),
self.cameraController.getActiveAction(self.cameraController.BUTTON_RIGHT),
self.cameraController.getActiveAction(self.cameraController.BUTTON_MIDDLE),
self.cameraController.getActiveAction(self.cameraController.BUTTON_SCROLL)]
for i, screen in enumerate(self._mScreens):
mscreens.append(screen.save(self._yamlPath, i, self.tabText(i)))
return save
@property
def ilsa(self):
logging.debug("In MWindow::ilsa()")
return self._ilsa
@property
def yamlPath(self):
logging.debug("In MWindow::yamlPath.getter()")
return self._yamlPath
@property
def cameraController(self):
logging.debug("In MWindow::cameraContoller.getter()")
return self._cameraController
@yamlPath.setter
def yamlPath(self, yamlPath):
logging.debug("In MWindow::yamlPath.setter()")
self._yamlPath = yamlPath
@property
def mainImageData(self):
return self._mainImageData
@mainImageData.setter
def mainImageData(self, mainImageData):
self._mainImageData = mainImageData
@property
def serie(self):
return self._serie
@serie.setter
def serie(self, serie):
self._serie = serie
@property
def vtiPath(self):
return self._vtiPath
@vtiPath.setter
def vtiPath(self, vtiPath):
self._vtiPath = vtiPath
@property
def planes(self):
logging.debug("In MWindow::planes.getter()")
planes = []
for screen in self._mScreens:
planes = planes + screen.planes
return planes
|
aevum/moonstone
|
src/moonstone/gui/qt/component/mwindow.py
|
Python
|
lgpl-3.0
| 14,404 | 0.00854 |
# Copyright (C) 2012 - 2014 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The consistencygroups api."""
import webob
from webob import exc
from cinder.api import common
from cinder.api import extensions
from cinder.api.openstack import wsgi
from cinder.api.views import consistencygroups as consistencygroup_views
from cinder.api import xmlutil
from cinder import consistencygroup as consistencygroupAPI
from cinder import exception
from cinder.i18n import _, _LI
from cinder.openstack.common import log as logging
from cinder import utils
LOG = logging.getLogger(__name__)
def make_consistencygroup(elem):
elem.set('id')
elem.set('status')
elem.set('availability_zone')
elem.set('created_at')
elem.set('name')
elem.set('description')
def make_consistencygroup_from_src(elem):
elem.set('id')
elem.set('status')
elem.set('created_at')
elem.set('name')
elem.set('description')
elem.set('cgsnapshot_id')
class ConsistencyGroupTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('consistencygroup',
selector='consistencygroup')
make_consistencygroup(root)
alias = Consistencygroups.alias
namespace = Consistencygroups.namespace
return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace})
class ConsistencyGroupsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('consistencygroups')
elem = xmlutil.SubTemplateElement(root, 'consistencygroup',
selector='consistencygroups')
make_consistencygroup(elem)
alias = Consistencygroups.alias
namespace = Consistencygroups.namespace
return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace})
class ConsistencyGroupFromSrcTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('consistencygroup-from-src',
selector='consistencygroup-from-src')
make_consistencygroup_from_src(root)
alias = Consistencygroups.alias
namespace = Consistencygroups.namespace
return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace})
class CreateDeserializer(wsgi.MetadataXMLDeserializer):
def default(self, string):
dom = utils.safe_minidom_parse_string(string)
consistencygroup = self._extract_consistencygroup(dom)
return {'body': {'consistencygroup': consistencygroup}}
def _extract_consistencygroup(self, node):
consistencygroup = {}
consistencygroup_node = self.find_first_child_named(
node,
'consistencygroup')
attributes = ['name',
'description']
for attr in attributes:
if consistencygroup_node.getAttribute(attr):
consistencygroup[attr] = consistencygroup_node.\
getAttribute(attr)
return consistencygroup
class CreateFromSrcDeserializer(wsgi.MetadataXMLDeserializer):
def default(self, string):
dom = utils.safe_minidom_parse_string(string)
consistencygroup = self._extract_consistencygroup(dom)
retval = {'body': {'consistencygroup-from-src': consistencygroup}}
return retval
def _extract_consistencygroup(self, node):
consistencygroup = {}
consistencygroup_node = self.find_first_child_named(
node, 'consistencygroup-from-src')
attributes = ['cgsnapshot', 'name', 'description']
for attr in attributes:
if consistencygroup_node.getAttribute(attr):
consistencygroup[attr] = (
consistencygroup_node.getAttribute(attr))
return consistencygroup
class ConsistencyGroupsController(wsgi.Controller):
"""The ConsistencyGroups API controller for the OpenStack API."""
_view_builder_class = consistencygroup_views.ViewBuilder
def __init__(self):
self.consistencygroup_api = consistencygroupAPI.API()
super(ConsistencyGroupsController, self).__init__()
@wsgi.serializers(xml=ConsistencyGroupTemplate)
def show(self, req, id):
"""Return data about the given consistency group."""
LOG.debug('show called for member %s', id)
context = req.environ['cinder.context']
try:
consistencygroup = self.consistencygroup_api.get(
context,
group_id=id)
except exception.ConsistencyGroupNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
return self._view_builder.detail(req, consistencygroup)
def delete(self, req, id, body):
"""Delete a consistency group."""
LOG.debug('delete called for member %s', id)
context = req.environ['cinder.context']
force = False
if body:
cg_body = body['consistencygroup']
force = cg_body.get('force', False)
LOG.info(_LI('Delete consistency group with id: %s'), id,
context=context)
try:
group = self.consistencygroup_api.get(context, id)
self.consistencygroup_api.delete(context, group, force)
except exception.ConsistencyGroupNotFound:
msg = _("Consistency group %s could not be found.") % id
raise exc.HTTPNotFound(explanation=msg)
except exception.InvalidConsistencyGroup as error:
raise exc.HTTPBadRequest(explanation=error.msg)
return webob.Response(status_int=202)
@wsgi.serializers(xml=ConsistencyGroupsTemplate)
def index(self, req):
"""Returns a summary list of consistency groups."""
return self._get_consistencygroups(req, is_detail=False)
@wsgi.serializers(xml=ConsistencyGroupsTemplate)
def detail(self, req):
"""Returns a detailed list of consistency groups."""
return self._get_consistencygroups(req, is_detail=True)
def _get_consistencygroups(self, req, is_detail):
"""Returns a list of consistency groups through view builder."""
context = req.environ['cinder.context']
consistencygroups = self.consistencygroup_api.get_all(context)
limited_list = common.limited(consistencygroups, req)
if is_detail:
consistencygroups = self._view_builder.detail_list(req,
limited_list)
else:
consistencygroups = self._view_builder.summary_list(req,
limited_list)
return consistencygroups
@wsgi.response(202)
@wsgi.serializers(xml=ConsistencyGroupTemplate)
@wsgi.deserializers(xml=CreateDeserializer)
def create(self, req, body):
"""Create a new consistency group."""
LOG.debug('Creating new consistency group %s', body)
if not self.is_valid_body(body, 'consistencygroup'):
raise exc.HTTPBadRequest()
context = req.environ['cinder.context']
try:
consistencygroup = body['consistencygroup']
except KeyError:
msg = _("Incorrect request body format")
raise exc.HTTPBadRequest(explanation=msg)
name = consistencygroup.get('name', None)
description = consistencygroup.get('description', None)
volume_types = consistencygroup.get('volume_types', None)
if not volume_types:
msg = _("volume_types must be provided to create "
"consistency group %(name)s.") % {'name': name}
raise exc.HTTPBadRequest(explanation=msg)
availability_zone = consistencygroup.get('availability_zone', None)
LOG.info(_LI("Creating consistency group %(name)s."),
{'name': name},
context=context)
try:
new_consistencygroup = self.consistencygroup_api.create(
context, name, description, volume_types,
availability_zone=availability_zone)
except exception.InvalidConsistencyGroup as error:
raise exc.HTTPBadRequest(explanation=error.msg)
except exception.InvalidVolumeType as error:
raise exc.HTTPBadRequest(explanation=error.msg)
except exception.ConsistencyGroupNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
retval = self._view_builder.summary(
req,
dict(new_consistencygroup.iteritems()))
return retval
@wsgi.response(202)
@wsgi.serializers(xml=ConsistencyGroupFromSrcTemplate)
@wsgi.deserializers(xml=CreateFromSrcDeserializer)
def create_from_src(self, req, body):
"""Create a new consistency group from a source.
The source can be a snapshot. It could be extended
in the future to support other sources. Note that
this does not require volume_types as the "create"
API above.
"""
LOG.debug('Creating new consistency group %s.', body)
if not self.is_valid_body(body, 'consistencygroup-from-src'):
raise exc.HTTPBadRequest()
context = req.environ['cinder.context']
try:
consistencygroup = body['consistencygroup-from-src']
except KeyError:
msg = _("Incorrect request body format.")
raise exc.HTTPBadRequest(explanation=msg)
name = consistencygroup.get('name', None)
description = consistencygroup.get('description', None)
cgsnapshot_id = consistencygroup.get('cgsnapshot_id', None)
if not cgsnapshot_id:
msg = _("Cgsnapshot id must be provided to create "
"consistency group %(name)s from source.") % {'name': name}
raise exc.HTTPBadRequest(explanation=msg)
LOG.info(_LI("Creating consistency group %(name)s from cgsnapshot "
"%(snap)s."),
{'name': name, 'snap': cgsnapshot_id},
context=context)
try:
new_consistencygroup = self.consistencygroup_api.create_from_src(
context, name, description, cgsnapshot_id)
except exception.InvalidConsistencyGroup as error:
raise exc.HTTPBadRequest(explanation=error.msg)
except exception.CgSnapshotNotFound as error:
raise exc.HTTPBadRequest(explanation=error.msg)
except exception.ConsistencyGroupNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
except exception.CinderException as error:
raise exc.HTTPBadRequest(explanation=error.msg)
retval = self._view_builder.summary(
req,
dict(new_consistencygroup.iteritems()))
return retval
@wsgi.serializers(xml=ConsistencyGroupTemplate)
def update(self, req, id, body):
"""Update the consistency group.
Expected format of the input parameter 'body':
{
"consistencygroup":
{
"name": "my_cg",
"description": "My consistency group",
"add_volumes": "volume-uuid-1,volume-uuid-2,..."
"remove_volumes": "volume-uuid-8,volume-uuid-9,..."
}
}
"""
LOG.debug('Update called for consistency group %s.', id)
if not body:
msg = _("Missing request body.")
raise exc.HTTPBadRequest(explanation=msg)
if not self.is_valid_body(body, 'consistencygroup'):
msg = _("Incorrect request body format.")
raise exc.HTTPBadRequest(explanation=msg)
context = req.environ['cinder.context']
consistencygroup = body.get('consistencygroup', None)
name = consistencygroup.get('name', None)
description = consistencygroup.get('description', None)
add_volumes = consistencygroup.get('add_volumes', None)
remove_volumes = consistencygroup.get('remove_volumes', None)
if (not name and not description and not add_volumes
and not remove_volumes):
msg = _("Name, description, add_volumes, and remove_volumes "
"can not be all empty in the request body.")
raise exc.HTTPBadRequest(explanation=msg)
LOG.info(_LI("Updating consistency group %(id)s with name %(name)s "
"description: %(description)s add_volumes: "
"%(add_volumes)s remove_volumes: %(remove_volumes)s."),
{'id': id, 'name': name,
'description': description,
'add_volumes': add_volumes,
'remove_volumes': remove_volumes},
context=context)
try:
group = self.consistencygroup_api.get(context, id)
self.consistencygroup_api.update(
context, group, name, description,
add_volumes, remove_volumes)
except exception.ConsistencyGroupNotFound:
msg = _("Consistency group %s could not be found.") % id
raise exc.HTTPNotFound(explanation=msg)
except exception.InvalidConsistencyGroup as error:
raise exc.HTTPBadRequest(explanation=error.msg)
return webob.Response(status_int=202)
class Consistencygroups(extensions.ExtensionDescriptor):
"""consistency groups support."""
name = 'Consistencygroups'
alias = 'consistencygroups'
namespace = 'http://docs.openstack.org/volume/ext/consistencygroups/api/v1'
updated = '2014-08-18T00:00:00+00:00'
def get_resources(self):
resources = []
res = extensions.ResourceExtension(
Consistencygroups.alias, ConsistencyGroupsController(),
collection_actions={'detail': 'GET', 'create_from_src': 'POST'},
member_actions={'delete': 'POST', 'update': 'PUT'})
resources.append(res)
return resources
|
Akrog/cinder
|
cinder/api/contrib/consistencygroups.py
|
Python
|
apache-2.0
| 14,556 | 0 |
import unittest
from restkiss.preparers import Preparer, FieldsPreparer
class InstaObj(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class LookupDataTestCase(unittest.TestCase):
def setUp(self):
super(LookupDataTestCase, self).setUp()
self.preparer = FieldsPreparer(fields=None)
self.obj_data = InstaObj(
say='what',
count=453,
moof={
'buried': {
'id': 7,
'data': InstaObj(yes='no')
}
},
parent=None
)
self.dict_data = {
'hello': 'world',
'abc': 123,
'more': {
'things': 'here',
'nested': InstaObj(
awesome=True,
depth=3
),
},
'parent': None,
}
def test_dict_simple(self):
self.assertEqual(self.preparer.lookup_data('hello', self.dict_data), 'world')
self.assertEqual(self.preparer.lookup_data('abc', self.dict_data), 123)
def test_obj_simple(self):
self.assertEqual(self.preparer.lookup_data('say', self.obj_data), 'what')
self.assertEqual(self.preparer.lookup_data('count', self.obj_data), 453)
def test_dict_nested(self):
self.assertEqual(self.preparer.lookup_data('more.things', self.dict_data), 'here')
self.assertEqual(self.preparer.lookup_data('more.nested.depth', self.dict_data), 3)
def test_obj_nested(self):
self.assertEqual(self.preparer.lookup_data('moof.buried.id', self.obj_data), 7)
self.assertEqual(self.preparer.lookup_data('moof.buried.data.yes', self.obj_data), 'no')
def test_dict_miss(self):
with self.assertRaises(KeyError):
self.preparer.lookup_data('another', self.dict_data)
def test_obj_miss(self):
with self.assertRaises(AttributeError):
self.preparer.lookup_data('whee', self.obj_data)
def test_dict_nullable_fk(self):
self.assertEqual(self.preparer.lookup_data('parent.id', self.dict_data), None)
def test_obj_nullable_fk(self):
self.assertEqual(self.preparer.lookup_data('parent.id', self.obj_data), None)
def test_empty_lookup(self):
# We could possibly get here in the recursion.
self.assertEqual(self.preparer.lookup_data('', 'Last value'), 'Last value')
def test_complex_miss(self):
with self.assertRaises(AttributeError):
self.preparer.lookup_data('more.nested.nope', self.dict_data)
|
CraveFood/restkiss
|
tests/test_preparers.py
|
Python
|
bsd-3-clause
| 2,628 | 0.003805 |
from huzzer.function_generator import generate_expression, generate_unary_expr
from huzzer.expressions import VariableExpression, FunctionExpression, BRANCH_EXPRESSIONS
from huzzer.namers import DefaultNamer
from huzzer import INT, BOOL
empty_variables = {
INT: [],
BOOL: []
}
def test_generate_unary_expr():
ints = [generate_unary_expr(INT, empty_variables, 0) for i in range(50)]
assert all([
x.type_signiature == (INT, INT) and len(x.args) == 1 and type(x.args[0]) == int
for x in ints
])
bools = [generate_unary_expr(BOOL, empty_variables, 0) for i in range(10)]
assert all([
x.type_signiature == (BOOL, BOOL) and len(x.args) == 1 and type(x.args[0]) == bool
for x in bools
])
bool_variable = VariableExpression(BOOL, 1)
just_bools = {
INT: [],
BOOL: [bool_variable]
}
var_expr = generate_unary_expr(BOOL, just_bools, 1)
assert var_expr is bool_variable
int_expr = generate_unary_expr(INT, just_bools, 1)
assert int_expr is not bool_variable
# haskell_type,
# variables,
# functions,
# branch_expressions,
# tree_depth,
# branching_probability=0.4,
# variable_probability=0.7,
# function_call_probability=0.5
def test_generate_expression():
int_function = FunctionExpression([BOOL, INT, INT], 1)
bool_function = FunctionExpression([BOOL, BOOL, BOOL, BOOL], 2)
functions = {
INT: [int_function],
BOOL: [bool_function]
}
# this should definitely start with the bool func, as the probabilities are one
bool_expr = generate_expression(
BOOL,
empty_variables,
functions,
BRANCH_EXPRESSIONS,
2,
branching_probability=1.0,
function_call_probability=1.0
)
assert type(bool_expr) == type(bool_function) and bool_expr.function_id == 2
expr = generate_expression(
BOOL,
empty_variables,
functions,
BRANCH_EXPRESSIONS,
1,
branching_probability=1.0,
function_call_probability=1.0
)
assert expr.type_signiature == (BOOL, BOOL)
assert type(expr) != type(bool_function)
bool_variable = VariableExpression(BOOL, 1)
int_variable = VariableExpression(INT, 2)
variables = {
INT: [int_variable],
BOOL: [bool_variable]
}
var_expr = generate_expression(
BOOL,
variables,
functions,
BRANCH_EXPRESSIONS,
1,
branching_probability=1.0,
function_call_probability=1.0,
variable_probability=1.0
)
assert type(var_expr) is type(bool_variable) and var_expr.var_id == bool_variable.var_id
func_expr_with_only_vars = generate_expression(
BOOL,
variables,
functions,
BRANCH_EXPRESSIONS,
2,
branching_probability=1.0,
function_call_probability=1.0,
variable_probability=1.0
)
assert type(func_expr_with_only_vars) == type(bool_function) and \
all([arg is bool_variable for arg in func_expr_with_only_vars.args])
|
coopie/huzzer
|
test/test_function_generator.py
|
Python
|
mit
| 3,071 | 0.001954 |
import os
from celery import Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'persephone.settings')
app = Celery('persephone')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
|
karamanolev/persephone
|
persephone/persephone/celery.py
|
Python
|
mit
| 231 | 0 |
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
import re
from ...const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from . import Rule
#-------------------------------------------------------------------------
#
# HasIdOf
#
#-------------------------------------------------------------------------
class RegExpIdBase(Rule):
"""
Objects with a Gramps ID that contains a substring or matches a
regular expression.
"""
labels = [ _('Text:') ]
name = 'Objects with <Id>'
description = "Matches objects whose Gramps ID contains a substring " \
"or matches a regular expression"
category = _('General filters')
allow_regex = True
def apply(self, db, obj):
return self.match_substring(0, obj.gramps_id)
|
SNoiraud/gramps
|
gramps/gen/filters/rules/_regexpidbase.py
|
Python
|
gpl-2.0
| 1,899 | 0.005793 |
import numpy as np
from . import _marching_cubes_cy
def marching_cubes(volume, level, spacing=(1., 1., 1.)):
"""
Marching cubes algorithm to find iso-valued surfaces in 3d volumetric data
Parameters
----------
volume : (M, N, P) array of doubles
Input data volume to find isosurfaces. Will be cast to `np.float64`.
level : float
Contour value to search for isosurfaces in `volume`.
spacing : length-3 tuple of floats
Voxel spacing in spatial dimensions corresponding to numpy array
indexing dimensions (M, N, P) as in `volume`.
Returns
-------
verts : (V, 3) array
Spatial coordinates for V unique mesh vertices. Coordinate order
matches input `volume` (M, N, P).
faces : (F, 3) array
Define triangular faces via referencing vertex indices from ``verts``.
This algorithm specifically outputs triangles, so each face has
exactly three indices.
Notes
-----
The marching cubes algorithm is implemented as described in [1]_.
A simple explanation is available here::
http://www.essi.fr/~lingrand/MarchingCubes/algo.html
There are several known ambiguous cases in the marching cubes algorithm.
Using point labeling as in [1]_, Figure 4, as shown::
v8 ------ v7
/ | / | y
/ | / | ^ z
v4 ------ v3 | | /
| v5 ----|- v6 |/ (note: NOT right handed!)
| / | / ----> x
| / | /
v1 ------ v2
Most notably, if v4, v8, v2, and v6 are all >= `level` (or any
generalization of this case) two parallel planes are generated by this
algorithm, separating v4 and v8 from v2 and v6. An equally valid
interpretation would be a single connected thin surface enclosing all
four points. This is the best known ambiguity, though there are others.
This algorithm does not attempt to resolve such ambiguities; it is a naive
implementation of marching cubes as in [1]_, but may be a good beginning
for work with more recent techniques (Dual Marching Cubes, Extended
Marching Cubes, Cubic Marching Squares, etc.).
Because of interactions between neighboring cubes, the isosurface(s)
generated by this algorithm are NOT guaranteed to be closed, particularly
for complicated contours. Furthermore, this algorithm does not guarantee
a single contour will be returned. Indeed, ALL isosurfaces which cross
`level` will be found, regardless of connectivity.
The output is a triangular mesh consisting of a set of unique vertices and
connecting triangles. The order of these vertices and triangles in the
output list is determined by the position of the smallest ``x,y,z`` (in
lexicographical order) coordinate in the contour. This is a side-effect
of how the input array is traversed, but can be relied upon.
To quantify the area of an isosurface generated by this algorithm, pass
the outputs directly into `skimage.measure.mesh_surface_area`.
Regarding visualization of algorithm output, the ``mayavi`` package
is recommended. To contour a volume named `myvolume` about the level 0.0::
>>> from mayavi import mlab # doctest: +SKIP
>>> verts, tris = marching_cubes(myvolume, 0.0, (1., 1., 2.)) # doctest: +SKIP
>>> mlab.triangular_mesh([vert[0] for vert in verts],
... [vert[1] for vert in verts],
... [vert[2] for vert in verts],
... tris) # doctest: +SKIP
>>> mlab.show() # doctest: +SKIP
References
----------
.. [1] Lorensen, William and Harvey E. Cline. Marching Cubes: A High
Resolution 3D Surface Construction Algorithm. Computer Graphics
(SIGGRAPH 87 Proceedings) 21(4) July 1987, p. 163-170).
See Also
--------
skimage.measure.mesh_surface_area
"""
# Check inputs and ensure `volume` is C-contiguous for memoryviews
if volume.ndim != 3:
raise ValueError("Input volume must have 3 dimensions.")
if level < volume.min() or level > volume.max():
raise ValueError("Contour level must be within volume data range.")
volume = np.array(volume, dtype=np.float64, order="C")
# Extract raw triangles using marching cubes in Cython
# Returns a list of length-3 lists, each sub-list containing three
# tuples. The tuples hold (x, y, z) coordinates for triangle vertices.
# Note: this algorithm is fast, but returns degenerate "triangles" which
# have repeated vertices - and equivalent vertices are redundantly
# placed in every triangle they connect with.
raw_tris = _marching_cubes_cy.iterate_and_store_3d(volume, float(level),
spacing)
# Find and collect unique vertices, storing triangle verts as indices.
# Returns a true mesh with no degenerate faces.
verts, faces = _marching_cubes_cy.unpack_unique_verts(raw_tris)
return np.asarray(verts), np.asarray(faces)
def mesh_surface_area(verts, tris):
"""
Compute surface area, given vertices & triangular faces
Parameters
----------
verts : (V, 3) array of floats
Array containing (x, y, z) coordinates for V unique mesh vertices.
faces : (F, 3) array of ints
List of length-3 lists of integers, referencing vertex coordinates as
provided in `verts`
Returns
-------
area : float
Surface area of mesh. Units now [coordinate units] ** 2.
Notes
-----
The arguments expected by this function are the exact outputs from
`skimage.measure.marching_cubes`. For unit correct output, ensure correct
`spacing` was passed to `skimage.measure.marching_cubes`.
This algorithm works properly only if the ``faces`` provided are all
triangles.
See Also
--------
skimage.measure.marching_cubes
"""
# Fancy indexing to define two vector arrays from triangle vertices
actual_verts = verts[tris]
a = actual_verts[:, 0, :] - actual_verts[:, 1, :]
b = actual_verts[:, 0, :] - actual_verts[:, 2, :]
del actual_verts
# Area of triangle in 3D = 1/2 * Euclidean norm of cross product
return ((np.cross(a, b) ** 2).sum(axis=1) ** 0.5).sum() / 2.
|
almarklein/scikit-image
|
skimage/measure/_marching_cubes.py
|
Python
|
bsd-3-clause
| 6,374 | 0.000157 |
#!/bin/false
# -*- coding: utf-8 -*-
from objects.orobject import OrObject
from objects.function import Function
from objects.number import Number
from objects.file import File
from objects.inheritdict import InheritDict
from objects.ordict import OrDict
from objects.orddict import ODict
import objects.console as console
import objects.exception as exception
import objects.orstring as orstring
import types
import libbuiltin
def expose(r, n=""):
v = OrObject.from_py(r)
if n:
v.name = n
return v
builtin = InheritDict()
builtin.update({
"int": expose(libbuiltin.toint),
"num": expose(Number),
"dict": expose(OrDict),
"odict": expose(ODict),
"set": expose(set),
"io": expose(console.io),
"file": expose(File),
"input": expose(console.input),
"output": expose(console.output),
"error": expose(console.error),
"endl": expose("\n"),
"repr": expose(repr),
"join": expose(libbuiltin.join),
"range": expose(range),
"type": expose(libbuiltin.typeof, "type"),
"dir": expose(libbuiltin.dirof, "dir"),
"attrs": expose(libbuiltin.attrsof, "attrs"),
"reverse": expose(reversed),
"sort": expose(sorted),
"chr": expose(unichr),
"Exception": expose(Exception),
"hasattr": expose(OrObject.has, "hasattr"),
"getattr": expose(OrObject.get, "getattr"),
"setattr": expose(OrObject.set, "setattr"),
})
stolen_builtins = [
'abs', 'all', 'any', 'bool', 'callable', #buffer
'cmp', #chr (not as unichr)
'dict', 'divmod', 'enumerate', #delattr
'exit', 'filter', # frozenset
'hash', 'id', #get/hasattr
'iter', 'len', 'list',
'map', 'max', 'min', 'ord', # object
'range', 'repr', #property
'round', 'set', 'slice', #setattr
'str', 'sum', 'unicode', #super
'zip'
]
for i in stolen_builtins:
builtin[i] = expose(__builtins__[i])
|
pavpanchekha/oranj
|
oranj/core/builtin.py
|
Python
|
gpl-3.0
| 1,879 | 0.010112 |
from django.contrib.auth import update_session_auth_hash
from rest_framework import serializers
from authentication.models import Account
class AccountSerializer(serializers.ModelSerializer):
password = serializers.CharField(write_only=True, required=False)
confirm_password = serializers.CharField(write_only=True, required=False)
class Meta:
model = Account
fields = ('id', 'email', 'username', 'created_at', 'updated_at',
'first_name', 'last_name', 'tagline', 'password',
'confirm_password', 'userType')
read_only_fields = ('created_at', 'updated_at',)
def create(self, validated_data):
return Account.objects.create(**validated_data)
def update(self, instance, validated_data):
instance.username = validated_data.get('username', instance.username)
instance.tagline = validated_data.get('tagline', instance.tagline)
instance.save()
password = validated_data.get('password', None)
confirm_password = validated_data.get('confirm_password', None)
if password and confirm_password and password == confirm_password:
instance.set_password(password)
instance.save()
update_session_auth_hash(self.context.get('request'), instance)
return instance
class SimpleAccountSerializer(serializers.ModelSerializer):
class Meta:
model = Account
fields = ('id', 'email', 'username',)
|
bewallyt/Classy
|
authentication/serializers.py
|
Python
|
mit
| 1,481 | 0.000675 |
#!/usr/bin/python
# coding=utf-8
# Simple Steam profile checker Telegram bot
# Copyright (c) 2017 EasyCoding Team
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from html import unescape
from re import sub
from urllib.request import Request as request, urlopen
from xml.dom import minidom
class SteamChecker:
@staticmethod
def striptags(gtstr, gtrep=''):
"""
Strip HTML tags from string.
:param gtstr: String to strip tags
:param gtrep: Replacement for tags
:return: String without HTML tags
"""
return sub('<[^<]+?>', gtrep, unescape(gtstr))
def __fetchxml(self):
"""
Format query to API, fetch results and return them as string.
:return: API check results
"""
apiuri = 'https://check.team-fortress.su/api.php?action=check&token=%s&id=%s' % (self.__token, self.__id)
req = request(apiuri, data=None, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; rv:52.0.0)'
'Gecko/20100101 Firefox/52.0.0'})
with urlopen(req) as xmlres:
return xmlres.read().decode('utf-8')
@property
def sitestatus(self):
"""
TEAM-FORTRESS.SU user friendly status of checked user profile.
:return: TEAM-FORTRESS.SU check results
"""
# Set dictionary with API return codes...
stv = {
'1': 'гарант',
'2': 'в белом списке',
'3': 'в чёрном списке',
'4': 'нет в базе',
'5': 'в чёрном списке аукциона',
'6': 'сотрудник сайта',
'7': 'донатер',
'8': 'ненадёжный'
}
# Return result using dictionary...
return stv[self.__sitestatus]
@property
def vacstatus(self):
"""
VAC status of checked user profile.
:return: VAC status
"""
stv = {
'0': 'чист',
'1': 'забанен'
}
return stv[self.__vacstatus]
@property
def f2pstatus(self):
"""
Free-to-Play status (has no purchased games) of checked user profile.
:return: Free-to-Play status
"""
stv = {
'0': 'нет',
'1': 'да'
}
return stv[self.__f2pstatus]
@property
def tradestatus(self):
"""
Current trade status of checked user profile.
:return: Trade status
"""
stv = {
'0': 'нет ограничений',
'1': 'заблокирована',
'2': 'испытательный срок'
}
return stv[self.__tradestatus]
@property
def gamebanstatus(self):
"""
Current game bans on checked user profile.
:return: Game bans status and their count
"""
return 'нет' if self.__gamebans == '0' else 'есть (%s)' % self.__gamebans
@property
def description(self):
"""
Formatted custom description of checked user profile.
:return: Custom description with markup
"""
return '`%s`' % self.striptags(self.__description, ' ') if self.__description else '*отсутствует.*'
def __init__(self, tid, token):
"""
Main SteamChecker constructor.
:param tid: Profile link, username or SteamID
:param token: API token
"""
# Setting token and unique identifier to pseudo-private properties...
self.__id = tid
self.__token = token
# Fetching XML from API...
rxml = self.__fetchxml()
# Parsing received XML...
xmlp = minidom.parseString(rxml)
# Checking API result...
if xmlp.getElementsByTagName('qstatus')[0].firstChild.data != 'OK':
raise Exception('Incorrect API return code')
# Setting public fields...
self.steamid32 = xmlp.getElementsByTagName('steamID')[0].firstChild.data
self.steamid64 = xmlp.getElementsByTagName('steamID64')[0].firstChild.data
self.steamidv3 = xmlp.getElementsByTagName('steamIDv3')[0].firstChild.data
self.nickname = xmlp.getElementsByTagName('nickname')[0].firstChild.data
self.avatar = xmlp.getElementsByTagName('avatar')[0].firstChild.data
self.permalink = xmlp.getElementsByTagName('permalink')[0].firstChild.data
self.srstatus = self.striptags(xmlp.getElementsByTagName('steamrep')[0].firstChild.data)
# Setting private fields...
self.__sitestatus = xmlp.getElementsByTagName('sitestatus')[0].firstChild.data
self.__vacstatus = xmlp.getElementsByTagName('isbanned')[0].firstChild.data
self.__f2pstatus = xmlp.getElementsByTagName('isf2p')[0].firstChild.data
self.__tradestatus = xmlp.getElementsByTagName('istrbanned')[0].firstChild.data
self.__premium = xmlp.getElementsByTagName('ispremium')[0].firstChild.data
self.__gamebans = xmlp.getElementsByTagName('gamebans')[0].firstChild.data
# Fetching custom description...
dcs = xmlp.getElementsByTagName('customdescr')[0].firstChild
self.__description = dcs.data if dcs else ''
|
xvitaly/stmbot
|
stmbot/checker.py
|
Python
|
gpl-3.0
| 5,895 | 0.002969 |
# -*- coding: utf-8 -*-
""" Tablib - JSON Support
"""
import tablib
import sys
from tablib.packages import omnijson as json
title = 'json'
extentions = ('json', 'jsn')
def export_set(dataset):
"""Returns JSON representation of Dataset."""
return json.dumps(dataset.dict)
def export_book(databook):
"""Returns JSON representation of Databook."""
return json.dumps(databook._package())
def import_set(dset, in_stream):
"""Returns dataset from JSON stream."""
dset.wipe()
dset.dict = json.loads(in_stream)
def import_book(dbook, in_stream):
"""Returns databook from JSON stream."""
dbook.wipe()
for sheet in json.loads(in_stream):
data = tablib.Dataset()
data.title = sheet['title']
data.dict = sheet['data']
dbook.add_sheet(data)
def detect(stream):
"""Returns True if given stream is valid JSON."""
try:
json.loads(stream)
return True
except ValueError:
return False
|
justinpotts/mozillians
|
vendor-local/lib/python/tablib/formats/_json.py
|
Python
|
bsd-3-clause
| 991 | 0 |
"""Support for Nest devices."""
from datetime import datetime, timedelta
import logging
import socket
import threading
from nest import Nest
from nest.nest import APIError, AuthorizationError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_BINARY_SENSORS,
CONF_FILENAME,
CONF_MONITORED_CONDITIONS,
CONF_SENSORS,
CONF_STRUCTURE,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
from homeassistant.helpers.entity import Entity
from . import local_auth
from .const import DOMAIN
_CONFIGURING = {}
_LOGGER = logging.getLogger(__name__)
SERVICE_CANCEL_ETA = "cancel_eta"
SERVICE_SET_ETA = "set_eta"
DATA_NEST = "nest"
DATA_NEST_CONFIG = "nest_config"
SIGNAL_NEST_UPDATE = "nest_update"
NEST_CONFIG_FILE = "nest.conf"
CONF_CLIENT_ID = "client_id"
CONF_CLIENT_SECRET = "client_secret"
ATTR_ETA = "eta"
ATTR_ETA_WINDOW = "eta_window"
ATTR_STRUCTURE = "structure"
ATTR_TRIP_ID = "trip_id"
AWAY_MODE_AWAY = "away"
AWAY_MODE_HOME = "home"
ATTR_AWAY_MODE = "away_mode"
SERVICE_SET_AWAY_MODE = "set_away_mode"
SENSOR_SCHEMA = vol.Schema(
{vol.Optional(CONF_MONITORED_CONDITIONS): vol.All(cv.ensure_list)}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_CLIENT_SECRET): cv.string,
vol.Optional(CONF_STRUCTURE): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_SENSORS): SENSOR_SCHEMA,
vol.Optional(CONF_BINARY_SENSORS): SENSOR_SCHEMA,
}
)
},
extra=vol.ALLOW_EXTRA,
)
SET_AWAY_MODE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_AWAY_MODE): vol.In([AWAY_MODE_AWAY, AWAY_MODE_HOME]),
vol.Optional(ATTR_STRUCTURE): vol.All(cv.ensure_list, [cv.string]),
}
)
SET_ETA_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ETA): cv.time_period,
vol.Optional(ATTR_TRIP_ID): cv.string,
vol.Optional(ATTR_ETA_WINDOW): cv.time_period,
vol.Optional(ATTR_STRUCTURE): vol.All(cv.ensure_list, [cv.string]),
}
)
CANCEL_ETA_SCHEMA = vol.Schema(
{
vol.Required(ATTR_TRIP_ID): cv.string,
vol.Optional(ATTR_STRUCTURE): vol.All(cv.ensure_list, [cv.string]),
}
)
def nest_update_event_broker(hass, nest):
"""
Dispatch SIGNAL_NEST_UPDATE to devices when nest stream API received data.
Runs in its own thread.
"""
_LOGGER.debug("Listening for nest.update_event")
while hass.is_running:
nest.update_event.wait()
if not hass.is_running:
break
nest.update_event.clear()
_LOGGER.debug("Dispatching nest data update")
dispatcher_send(hass, SIGNAL_NEST_UPDATE)
_LOGGER.debug("Stop listening for nest.update_event")
async def async_setup(hass, config):
"""Set up Nest components."""
if DOMAIN not in config:
return True
conf = config[DOMAIN]
local_auth.initialize(hass, conf[CONF_CLIENT_ID], conf[CONF_CLIENT_SECRET])
filename = config.get(CONF_FILENAME, NEST_CONFIG_FILE)
access_token_cache_file = hass.config.path(filename)
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"nest_conf_path": access_token_cache_file},
)
)
# Store config to be used during entry setup
hass.data[DATA_NEST_CONFIG] = conf
return True
async def async_setup_entry(hass, entry):
"""Set up Nest from a config entry."""
nest = Nest(access_token=entry.data["tokens"]["access_token"])
_LOGGER.debug("proceeding with setup")
conf = hass.data.get(DATA_NEST_CONFIG, {})
hass.data[DATA_NEST] = NestDevice(hass, conf, nest)
if not await hass.async_add_job(hass.data[DATA_NEST].initialize):
return False
for component in "climate", "camera", "sensor", "binary_sensor":
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
def validate_structures(target_structures):
all_structures = [structure.name for structure in nest.structures]
for target in target_structures:
if target not in all_structures:
_LOGGER.info("Invalid structure: %s", target)
def set_away_mode(service):
"""Set the away mode for a Nest structure."""
if ATTR_STRUCTURE in service.data:
target_structures = service.data[ATTR_STRUCTURE]
validate_structures(target_structures)
else:
target_structures = hass.data[DATA_NEST].local_structure
for structure in nest.structures:
if structure.name in target_structures:
_LOGGER.info(
"Setting away mode for: %s to: %s",
structure.name,
service.data[ATTR_AWAY_MODE],
)
structure.away = service.data[ATTR_AWAY_MODE]
def set_eta(service):
"""Set away mode to away and include ETA for a Nest structure."""
if ATTR_STRUCTURE in service.data:
target_structures = service.data[ATTR_STRUCTURE]
validate_structures(target_structures)
else:
target_structures = hass.data[DATA_NEST].local_structure
for structure in nest.structures:
if structure.name in target_structures:
if structure.thermostats:
_LOGGER.info(
"Setting away mode for: %s to: %s",
structure.name,
AWAY_MODE_AWAY,
)
structure.away = AWAY_MODE_AWAY
now = datetime.utcnow()
trip_id = service.data.get(
ATTR_TRIP_ID, "trip_{}".format(int(now.timestamp()))
)
eta_begin = now + service.data[ATTR_ETA]
eta_window = service.data.get(ATTR_ETA_WINDOW, timedelta(minutes=1))
eta_end = eta_begin + eta_window
_LOGGER.info(
"Setting ETA for trip: %s, "
"ETA window starts at: %s and ends at: %s",
trip_id,
eta_begin,
eta_end,
)
structure.set_eta(trip_id, eta_begin, eta_end)
else:
_LOGGER.info(
"No thermostats found in structure: %s, " "unable to set ETA",
structure.name,
)
def cancel_eta(service):
"""Cancel ETA for a Nest structure."""
if ATTR_STRUCTURE in service.data:
target_structures = service.data[ATTR_STRUCTURE]
validate_structures(target_structures)
else:
target_structures = hass.data[DATA_NEST].local_structure
for structure in nest.structures:
if structure.name in target_structures:
if structure.thermostats:
trip_id = service.data[ATTR_TRIP_ID]
_LOGGER.info("Cancelling ETA for trip: %s", trip_id)
structure.cancel_eta(trip_id)
else:
_LOGGER.info(
"No thermostats found in structure: %s, "
"unable to cancel ETA",
structure.name,
)
hass.services.async_register(
DOMAIN, SERVICE_SET_AWAY_MODE, set_away_mode, schema=SET_AWAY_MODE_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_SET_ETA, set_eta, schema=SET_ETA_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_CANCEL_ETA, cancel_eta, schema=CANCEL_ETA_SCHEMA
)
@callback
def start_up(event):
"""Start Nest update event listener."""
threading.Thread(
name="Nest update listener",
target=nest_update_event_broker,
args=(hass, nest),
).start()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_up)
@callback
def shut_down(event):
"""Stop Nest update event listener."""
nest.update_event.set()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shut_down)
_LOGGER.debug("async_setup_nest is done")
return True
class NestDevice:
"""Structure Nest functions for hass."""
def __init__(self, hass, conf, nest):
"""Init Nest Devices."""
self.hass = hass
self.nest = nest
self.local_structure = conf.get(CONF_STRUCTURE)
def initialize(self):
"""Initialize Nest."""
try:
# Do not optimize next statement, it is here for initialize
# persistence Nest API connection.
structure_names = [s.name for s in self.nest.structures]
if self.local_structure is None:
self.local_structure = structure_names
except (AuthorizationError, APIError, socket.error) as err:
_LOGGER.error("Connection error while access Nest web service: %s", err)
return False
return True
def structures(self):
"""Generate a list of structures."""
try:
for structure in self.nest.structures:
if structure.name not in self.local_structure:
_LOGGER.debug(
"Ignoring structure %s, not in %s",
structure.name,
self.local_structure,
)
continue
yield structure
except (AuthorizationError, APIError, socket.error) as err:
_LOGGER.error("Connection error while access Nest web service: %s", err)
def thermostats(self):
"""Generate a list of thermostats."""
return self._devices("thermostats")
def smoke_co_alarms(self):
"""Generate a list of smoke co alarms."""
return self._devices("smoke_co_alarms")
def cameras(self):
"""Generate a list of cameras."""
return self._devices("cameras")
def _devices(self, device_type):
"""Generate a list of Nest devices."""
try:
for structure in self.nest.structures:
if structure.name not in self.local_structure:
_LOGGER.debug(
"Ignoring structure %s, not in %s",
structure.name,
self.local_structure,
)
continue
for device in getattr(structure, device_type, []):
try:
# Do not optimize next statement,
# it is here for verify Nest API permission.
device.name_long
except KeyError:
_LOGGER.warning(
"Cannot retrieve device name for [%s]"
", please check your Nest developer "
"account permission settings.",
device.serial,
)
continue
yield (structure, device)
except (AuthorizationError, APIError, socket.error) as err:
_LOGGER.error("Connection error while access Nest web service: %s", err)
class NestSensorDevice(Entity):
"""Representation of a Nest sensor."""
def __init__(self, structure, device, variable):
"""Initialize the sensor."""
self.structure = structure
self.variable = variable
if device is not None:
# device specific
self.device = device
self._name = "{} {}".format(
self.device.name_long, self.variable.replace("_", " ")
)
else:
# structure only
self.device = structure
self._name = "{} {}".format(
self.structure.name, self.variable.replace("_", " ")
)
self._state = None
self._unit = None
@property
def name(self):
"""Return the name of the nest, if any."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit
@property
def should_poll(self):
"""Do not need poll thanks using Nest streaming API."""
return False
@property
def unique_id(self):
"""Return unique id based on device serial and variable."""
return f"{self.device.serial}-{self.variable}"
@property
def device_info(self):
"""Return information about the device."""
if not hasattr(self.device, "name_long"):
name = self.structure.name
model = "Structure"
else:
name = self.device.name_long
if self.device.is_thermostat:
model = "Thermostat"
elif self.device.is_camera:
model = "Camera"
elif self.device.is_smoke_co_alarm:
model = "Nest Protect"
else:
model = None
return {
"identifiers": {(DOMAIN, self.device.serial)},
"name": name,
"manufacturer": "Nest Labs",
"model": model,
}
def update(self):
"""Do not use NestSensorDevice directly."""
raise NotImplementedError
async def async_added_to_hass(self):
"""Register update signal handler."""
async def async_update_state():
"""Update sensor state."""
await self.async_update_ha_state(True)
async_dispatcher_connect(self.hass, SIGNAL_NEST_UPDATE, async_update_state)
|
leppa/home-assistant
|
homeassistant/components/nest/__init__.py
|
Python
|
apache-2.0
| 14,138 | 0.000637 |
# -*- coding: utf-8 -*-
import os
import django
from .fixtures import * # noqa
# import pytest
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
def pytest_configure(config):
django.setup()
|
pythonindia/junction
|
tests/conftest.py
|
Python
|
mit
| 212 | 0 |
#!/usr/bin/env python
#
# Copyright 2014 (c) Lei Xu <eddyxu@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from fabric.api import task
import yaml
import logging
import vsfs_ec2 as ec2
vsfs = ec2.VsfsEC2()
logging.basicConfig(format='[%(asctime)s](%(levelname)s) %(message)s',
level=logging.INFO)
@task
def help(name=''):
"""Print full information of the function. (name='task')
"""
if not name:
print("Use: 'fab help:func_name' for detailed help for each task.")
else:
print(globals()[name].__doc__)
@task
def price_history(instance='m1.small', n=10):
"""Print out the recent price history (instance='m1.small',n=10).
"""
vsfs.get_spot_price_history(instance_type=instance, num_prices=n)
@task
def spot_requests():
"""Prints all active spot instance requests.
"""
vsfs.get_all_spot_requests()
@task
def instances(state='running'):
"""Prints the information of instance.
"""
vsfs.get_all_instances(state)
@task
def image_list():
"""Prints all images.
"""
vsfs.print_all_images()
@task
def image_create(price=0.01, spot='yes', revision='HEAD', branch='master'):
"""Creates an VSFS image using Spot Instance (price=0.01,spot=yes/no).
Options:
@param spot set to 'yes' to use spot instance, set to 'no' to use on-demand
instance. default: 'yes'
@param price the bid price for spot instance. default: 0.01
@param branch git branch of the vsfs source.
@param revision the git revsion of the vsfs source.
"""
if spot == 'yes':
vsfs.create_image_spot(price)
else:
vsfs.create_image()
@task
def image_delete(image_id):
"""Deleted a stored image with the given ID.
"""
vsfs.delete_image(image_id)
@task
def security_group_list():
"""List out all security groups.
"""
vsfs.print_security_groups()
@task
def cluster_start(ami, nmaster, nindexd, nclient, yaml='example.yaml'):
"""Starts a cluster (ami='', nmaster=0, nindexd=0, nclient=0, \
yaml='example.yaml')
Configuration of cluster is defined in 'example.yaml'
"""
num_masters = int(nmaster)
num_indexd = int(nindexd)
num_client = int(nclient)
vsfs.start_cluster(ami, num_masters, num_indexd, num_client,
conf_yaml=yaml)
@task
def vpc_list():
"""Prints all available VPC and its detailed information.
"""
vsfs.print_vpcs()
@task
def vpc_create():
"""Creates a 10.0.0.0/22 virtual private cluster (VPC).
"""
vsfs.create_vpc()
@task
def vpc_clear():
"""Removes all virtual private clusters.
"""
vsfs.remove_vpcs()
@task
def list_x509_certifications():
print vsfs.locate_x509_certifications()
@task
def s3_space():
"""Calculate s3 space consumption.
"""
vsfs.print_s3_space()
@task
def volume_list():
"""List all volumes
"""
vsfs.print_volumes()
@task
def volume_create(ami, price, volsize):
"""Creates a new EBS volume and format it (param: ami, price, volsize)
"""
vsfs.create_volume_spot(ami, price, volsize)
@task
def elastic_ip_list():
"""List all elastic ips.
"""
vsfs.print_elastic_ips()
@task
def test_run():
"""Start cluster on active instances.
"""
confs = {}
with open('test.yaml') as fobj:
confs = yaml.load(fobj.read())
vsfs.start_test_cluster(confs)
|
vsfs/vsfs-bench
|
ec2/fabfile.py
|
Python
|
apache-2.0
| 3,913 | 0 |
"""
This file tests the MNISTPlus class. majorly concerning the X and y member
of the dataset and their corresponding sizes, data scales and topological
views.
"""
from pylearn2.datasets.mnistplus import MNISTPlus
from pylearn2.space import IndexSpace, VectorSpace
import unittest
from pylearn2.testing.skip import skip_if_no_data
import numpy as np
def test_MNISTPlus():
"""
Test the MNISTPlus warper.
Tests the scale of data, the splitting of train, valid, test sets.
Tests that a topological batch has 4 dimensions.
Tests that it work well with selected type of augmentation.
"""
skip_if_no_data()
for subset in ['train', 'valid', 'test']:
ids = MNISTPlus(which_set=subset)
assert 0.01 >= ids.X.min() >= 0.0
assert 0.99 <= ids.X.max() <= 1.0
topo = ids.get_batch_topo(1)
assert topo.ndim == 4
del ids
train_y = MNISTPlus(which_set='train', label_type='label')
assert 0.99 <= train_y.X.max() <= 1.0
assert 0.0 <= train_y.X.min() <= 0.01
assert train_y.y.max() == 9
assert train_y.y.min() == 0
assert train_y.y.shape == (train_y.X.shape[0], 1)
train_y = MNISTPlus(which_set='train', label_type='azimuth')
assert 0.99 <= train_y.X.max() <= 1.0
assert 0.0 <= train_y.X.min() <= 0.01
assert 0.0 <= train_y.y.max() <= 1.0
assert 0.0 <= train_y.y.min() <= 1.0
assert train_y.y.shape == (train_y.X.shape[0], 1)
train_y = MNISTPlus(which_set='train', label_type='rotation')
assert 0.99 <= train_y.X.max() <= 1.0
assert 0.0 <= train_y.X.min() <= 0.01
assert train_y.y.max() == 9
assert train_y.y.min() == 0
assert train_y.y.shape == (train_y.X.shape[0], 1)
train_y = MNISTPlus(which_set='train', label_type='texture_id')
assert 0.99 <= train_y.X.max() <= 1.0
assert 0.0 <= train_y.X.min() <= 0.01
assert train_y.y.max() == 9
assert train_y.y.min() == 0
assert train_y.y.shape == (train_y.X.shape[0], 1)
|
JazzeYoung/VeryDeepAutoEncoder
|
pylearn2/pylearn2/datasets/tests/test_mnistplus.py
|
Python
|
bsd-3-clause
| 1,978 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.