repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
todd-x86/tkplus
|
exception.py
|
1
|
1522
|
from form import Form
from button import Button
from label import Label
from image import Image
from memo import Memo
import os
import Tkinter as tk
import traceback
def handle_exception(ex, stacktrace=None):
err_icon = os.path.join(os.path.dirname(__file__), 'graphics', 'icon_error.gif')
frm = Form(caption='Exception: {}'.format(ex.__class__.__name__),
left=100, top=100, width=350, height=180)
frm.resizable = False
msg = Label(frm, left=45, top=5, width=305, height=40, caption=ex.message)
msg.wordwrap = True
img = Image(frm, left=5, top=15, width=32, height=32, file=err_icon)
trace = Memo(frm, left=5, top=55, width=335, height=90)
trace.text = stacktrace
def close_form():
frm.close()
btn = Button(frm, left=140, top=148, width=65, height=27, caption="Close")
btn.on_click = close_form
frm.show_modal()
def enable_handler():
tk.CallWrapper = ExceptionHandler
class ExceptionHandler(object):
def __init__(self, func, subst, widget):
self._func = func
self._subst = subst
self._widget = widget
def __call__(self, *args):
try:
if self._subst:
return self._subst(*args)
else:
return self._func(*args)
except SystemExit, msg:
raise SystemExit, msg
except Exception as ex:
# TODO: Figure out how to ignore this section of code in stacktrace
handle_exception(ex, traceback.format_exc())
|
apache-2.0
| -826,496,938,995,160,800 | 31.382979 | 84 | 0.618922 | false |
lingfliu/smart_tuwa
|
twrt/testbed/massive_scene_test.py
|
1
|
3298
|
import socket
import time
import sys
import random
import math
import threading
msg_header = 'AADD'
msg_stamp = '\x00\x00\x00\x00'
msg_id_gw = '2016A008'
msg_id_dev = '00000000'
msg_devtype = '\x01\x00'
msg_auth_key = '88888888'
msg_auth_datatype = '\x1c\x00'
msg_auth = msg_header+msg_stamp+msg_id_gw+msg_id_dev+msg_devtype+msg_auth_datatype+'\x00\x08'+msg_auth_key
#serverAddress = ('192.168.20.104', 9091)
serverAddress = ('localhost', 9091)
skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
skt.connect(serverAddress)
length = skt.send(msg_auth)
msg_bak = skt.recv(1024)
print msg_bak
#scene set
for i in range(0,20):
print('create scene' + str(i))
sce_type_val = int(math.ceil(random.random()*3))
sce_type = '%c'%sce_type_val
sce_id_major_val = i #round(random.random()*1000)
sce_id_major = '%08d'%sce_id_major_val
sce_id_minor_val = i #round(random.random()*1000)
sce_id_minor = '%08d'%sce_id_minor_val
sce_mac_val= round(random.random()*1000)
sce_mac = '%08d'%sce_mac_val
sce_name_val = round(random.random()*100)
sce_name = 'scene'+'%04d'%sce_name_val + '\x00'*51
sce_type_val = int(math.ceil(random.random()*4))
sce_type = '%c'%sce_type_val
sce_type +='\x00'*3
sce_trigger_num = 100 #int(random.random()*100)
sce_trigger = ''
for m in range(0, sce_trigger_num):
sce_trigger_val = round(random.random()*100)
sce_trigger += ('%08d'%sce_trigger_val)*6
sce_item_num = int(random.random()*100)
sce_item = ''
for m in range(0, sce_item_num):
sce_item_val = round(random.random()*100)
sce_item += ('%08d'%sce_item_val)*6
body_len_val = 48*sce_item_num + 48*sce_trigger_num + 96
body_len = ''
body_len +='%c'%(int(body_len_val/256))
body_len +='%c'%(body_len_val%256)
msg_set_scene = msg_header+msg_stamp+msg_id_gw+msg_id_dev+msg_devtype+'\x0f\x00'+ body_len + sce_id_major +sce_id_minor+sce_mac+sce_type+sce_name+'%c'%sce_trigger_num + '\x00'*3+'%c'%sce_item_num+'\x00'*3+sce_trigger+sce_item
#print('message length=' + str(len(msg_set_scene)))
#print('body length=' + str(body_len_val))
print (sce_id_major + ' ' + sce_id_minor + ' ' + sce_mac + ' ' + sce_name + ' ' + str(sce_trigger_num) + ' ' + str(sce_item_num) )
#print(str('%c'%sce_trigger_num))
#print(body_len)
#print('msg = ' + msg_set_scene)
m = 0
while(True):
if m+256 < len(msg_set_scene):
pkt = msg_set_scene[m:m+256]
length = skt.send(pkt)
print length
m += 256
time.sleep(0.01)
continue
else:
pkt = msg_set_scene[m:]
length = skt.send(pkt)
time.sleep(0.01)
print length
break
#length = skt.send(msg_set_scene())
msg_bak = skt.recv(1024)
print msg_bak
time.sleep(0.01)
msg_finish_scene = msg_header+msg_stamp+msg_id_gw+msg_id_dev+msg_devtype+'\x11\x00'+'\x00\x01' + '\x00'
print('msg finish = ' + msg_finish_scene)
length = skt.send(msg_finish_scene)
print length
msg_bak = skt.recv(1024)
print msg_bak
#while(True):
#msg_bak = skt.recv(1024)
#print msg_bak
#pass
|
apache-2.0
| -1,788,681,679,811,182,000 | 28.711712 | 229 | 0.583081 | false |
sublime-ycmd/sublime-ycmd
|
tests/lib/subtest.py
|
1
|
1883
|
#!/usr/bin/env python3
'''
tests/lib/subtest.py
Utility functions for running sub-tests within a test case. Includes additional
logging to add context during sub-test execution.
'''
import logging
import unittest
from tests.lib.decorator import log_function
logger = logging.getLogger('sublime-ycmd.' + __name__)
def _is_args_kwargs(test_case):
if not isinstance(test_case, (tuple, list)):
return False
if len(test_case) != 2:
return False
if not isinstance(test_case[1], dict):
return False
return True
def map_test_function(test_instance, test_function, test_cases):
assert isinstance(test_instance, unittest.TestCase), \
'test instance must be a unittest.TestCase: %r' % (test_instance)
assert callable(test_function), \
'test function must be callable: %r' % (test_function)
assert hasattr(test_cases, '__iter__'), \
'test cases must be iterable: %r' % (test_cases)
for test_index, test_case in enumerate(test_cases, start=1):
is_args_kwargs = _is_args_kwargs(test_case)
is_kwargs = isinstance(test_case, dict)
is_args = not (is_args_kwargs or is_kwargs)
if is_args_kwargs:
test_args, test_kwargs = test_case
elif is_kwargs:
test_args = tuple()
test_kwargs = test_case
elif is_args:
test_args = test_case
test_kwargs = dict()
log_args = is_args_kwargs or is_args
log_kwargs = is_args_kwargs or is_kwargs
wrapped_test_function = log_function(
desc='[%d]' % (test_index),
include_args=log_args, include_kwargs=log_kwargs,
)(test_function)
with test_instance.subTest(num=test_index,
args=test_args, kwargs=test_kwargs):
wrapped_test_function(*test_args, **test_kwargs)
|
mit
| 7,860,737,243,510,147,000 | 30.383333 | 79 | 0.619225 | false |
charlesll/RamPy
|
legacy_code/IR_dec_comb.py
|
1
|
6585
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 22 07:54:05 2014
@author: charleslelosq
Carnegie Institution for Science
"""
import sys
sys.path.append("/Users/charleslelosq/Documents/RamPy/lib-charles/")
import csv
import numpy as np
import scipy
import matplotlib
import matplotlib.gridspec as gridspec
from pylab import *
from StringIO import StringIO
from scipy import interpolate
# to fit spectra we use the lmfit software of Matt Newville, CARS, university of Chicago, available on the web
from lmfit import minimize, Minimizer, Parameters, Parameter, report_fit, fit_report
from spectratools import * #Charles' libraries and functions
from Tkinter import *
import tkMessageBox
from tkFileDialog import askopenfilename
#### We define a set of functions that will be used for fitting data
#### unfortunatly, as we use lmfit (which is convenient because it can fix or release
#### easily the parameters) we are not able to use arrays for parameters...
#### so it is a little bit long to write all the things, but in a way quite robust also...
#### gaussian and pseudovoigt functions are available in spectratools
#### if you need a voigt, fix the gaussian-to-lorentzian ratio to 1 in the parameter definition before
#### doing the data fit
def residual(pars, x, data=None, eps=None):
# unpack parameters:
# extract .value attribute for each parameter
a1 = pars['a1'].value
a2 = pars['a2'].value
f1 = pars['f1'].value
f2 = pars['f2'].value
l1 = pars['l1'].value
l2 = pars['l2'].value
# Gaussian model
peak1 = gaussian(x,a1,f1,l1)
peak2 = gaussian(x,a2,f2,l2)
model = peak1 + peak2
if data is None:
return model, peak1, peak2
if eps is None:
return (model - data)
return (model - data)/eps
##### CORE OF THE CALCULATION BELOW
#### CALLING THE DATA NAMES
tkMessageBox.showinfo(
"Open file",
"Please open the list of spectra")
Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
filename = askopenfilename() # show an "Open" dialog box and return the path to the selected file
with open(filename) as inputfile:
results = list(csv.reader(inputfile)) # we read the data list
#### LOOP FOR BEING ABLE TO TREAT MULTIPLE DATA
#### WARNING: OUTPUT ARE AUTOMATICALLY GENERATED IN A DIRECTORY CALLED "DECONV"
#### (see end) THAT SHOULD BE PRESENT !!!!!!!!!!
for lg in range(len(results)):
name = str(results[lg]).strip('[]')
name = name[1:-1] # to remove unwanted ""
sample = np.genfromtxt(name) # get the sample to deconvolute
# we set here the lower and higher bonds for the interest region
lb = 4700 ### MAY NEED TO AJUST THAT
hb = 6000
interestspectra = sample[np.where((sample[:,0] > lb)&(sample[:,0] < hb))]
ese0 = interestspectra[:,2]/abs(interestspectra[:,1]) #take ese as a percentage, we assume that the treatment was made correctly for error determination... if not, please put sigma = None
interestspectra[:,1] = interestspectra[:,1]/np.amax(interestspectra[:,1])*100 # normalise spectra to maximum, easier to handle after
sigma = abs(ese0*interestspectra[:,1]) #calculate good ese
#sigma = None # you can activate that if you are not sure about the errors
xfit = interestspectra[:,0] # region to be fitted
data = interestspectra[:,1] # region to be fitted
params = Parameters()
####################### FOR MELT:
####################### COMMENT IF NOT WANTED
# (Name, Value, Vary, Min, Max, Expr)
params.add_many(('a1', 1, True, 0, None, None),
('f1', 5200, True, 750, None, None),
('l1', 1, True, 0, None, None),
('a2', 1, True, 0, None, None),
('f2', 5400, True, None, None, None),
('l2', 1, True, None, None, None))
result = minimize(residual_melt, params, args=(xfit, data)) # fit data with leastsq model from scipy
model = fit_report(params) # the report
yout, peak1,peak2,= residual_melt(params,xfit) # the different peaks
#### We just calculate the different areas up to 4700 cmm-1 and those of the gaussians
# Select interest areas for calculating the areas of OH and H2Omol peaks
intarea45 = sample[np.where((sample[:,0]> 4100) & (sample[:,0]<4700))]
area4500 = np.trapz(intarea45[:,1],intarea45[:,0])
esearea4500 = 1/sqrt(area4500) # We assume that RELATIVE errors on areas are globally equal to 1/sqrt(Area)
# now for the gaussians
# unpack parameters:
# extract .value attribute for each parameter
a1 = pars['a1'].value
a2 = pars['a2'].value
l1 = pars['l1'].value
l2 = pars['l2'].value
AireG1 = gaussianarea(a1,l1)
AireG2 = gaussianarea(a2,l2)
##### WE DO A NICE FIGURE THAT CAN BE IMPROVED FOR PUBLICATION
fig = figure()
plot(sample[:,0],sample[:,1],'k-')
plot(xfit,yout,'r-')
plot(xfit,peak1,'b-')
plot(xfit,peak2,'b-')
xlim(lb,hb)
ylim(0,np.max(sample[:,1]))
xlabel("Wavenumber, cm$^{-1}$", fontsize = 18, fontweight = "bold")
ylabel("Absorption, a. u.", fontsize = 18, fontweight = "bold")
text(4000,np.max(intarea45[:,1])+0.03*np.max(intarea45[:,1]),('Area OH: \n'+'%.1f' % area4500),color='b',fontsize = 16)
text(4650,a1 + 0.05*a1,('Area pic 1$: \n'+ '%.1f' % AireG1),color='b',fontsize = 16)
text(5000,a2 + 0.05*a2,('OH/H$_2$O$_{mol}$: \n'+'%.3f' % ratioOH_H2O+'\n+/-'+'%.3f' % eseratioOH_H2O),color='r',fontsize = 16)
##### output of data, fitted peaks, parameters, and the figure in pdf
##### all goes into the ./deconv/ folder
name.rfind('/')
nameout = name[name.rfind('/')+1::]
namesample = nameout[0:nameout.find('.')]
pathint = str('/deconv/') # the output folder
ext1 = '_ydec.txt'
ext2 = '_params.txt'
ext3 = '.pdf'
pathout1 = pathbeg+pathint+namesample+ext1
pathout2 = pathbeg+pathint+namesample+ext2
pathout3 = pathbeg+pathint+namesample+ext3
matout = np.vstack((xfit,data,yout,peak1,peak2))
matout = np.transpose(matout)
np.savetxt(pathout1,matout) # saving the arrays of spectra
fd = os.open( pathout2, os.O_RDWR|os.O_CREAT ) # Open a file and create it if it do not exist
fo = os.fdopen(fd, "w+") # Now get a file object for the above file.
fo.write(model) # write the parameters in it
fo.close()
savefig(pathout3) # save the figure
|
gpl-2.0
| 1,000,396,658,416,147,200 | 38.668675 | 193 | 0.632194 | false |
tarikgwa/nfd
|
newfies/mod_registration/forms.py
|
1
|
1850
|
#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The primary maintainer of this project is
# Arezqui Belaid <info@star2billing.com>
#
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.forms import SetPasswordForm
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Submit, Div, Field
from crispy_forms.bootstrap import FormActions
class ForgotForm(forms.Form):
"""Forgot password Form"""
email = forms.EmailField(max_length=60, label=_('Email'), required=True)
email.widget.attrs['class'] = 'form-control'
def __init__(self, *args, **kwargs):
super(ForgotForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'well'
self.helper.layout = Layout(
Div(
Div('email', css_class='col-md-4'),
css_class='row'
),
FormActions(Submit('submit', _('Reset my password')))
)
class CustomSetPasswordForm(SetPasswordForm):
"""Set Password Form"""
def __init__(self, *args, **kwargs):
super(CustomSetPasswordForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Div(
Div(
Field('new_password1'),
Field('new_password2'),
Submit('submit', _('Change my password')),
css_class='col-md-4'
),
css_class='well col-md-12'
),
)
|
mpl-2.0
| 7,480,438,449,943,020,000 | 30.896552 | 76 | 0.6 | false |
argonemyth/argonemyth-blog
|
blog/migrations/0008_auto__add_field_photo_orientation.py
|
1
|
8412
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Photo.orientation'
db.add_column(u'blog_photo', 'orientation',
self.gf('django.db.models.fields.CharField')(default='landscape', max_length=20),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Photo.orientation'
db.delete_column(u'blog_photo', 'orientation')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'blog.blogcategory': {
'Meta': {'ordering': "('position', 'title')", 'object_name': 'BlogCategory'},
'background': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'blog.blogpost': {
'Meta': {'ordering': "('-date_published',)", 'object_name': 'BlogPost'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'blogposts'", 'to': u"orm['auth.User']"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'blogposts'", 'null': 'True', 'to': u"orm['blog.BlogCategory']"}),
'comment_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'content': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_expired': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_published': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'view_count': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'blog.photo': {
'Meta': {'ordering': "['post', 'position']", 'object_name': 'Photo'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_published': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'orientation': ('django.db.models.fields.CharField', [], {'default': "'landscape'", 'max_length': '20'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'photos'", 'to': u"orm['blog.BlogPost']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['blog']
|
gpl-3.0
| -4,360,855,887,661,198,000 | 72.798246 | 187 | 0.552306 | false |
astroScott/TeachingUtilities
|
classutils.py
|
1
|
5241
|
import os
from configparser import ConfigParser
import string
from numpy import std, mean
def read_config(file = 'configdata.cfg'):
"""
read the configuration file, by default configdata.cfg.
returns a dict containing relevant config info.
"""
config = ConfigParser()
config.read(file)
assert(os.path.isfile(config['Config']['csvfile'].strip()))
return config
def reader(filename, delimiter=',', quotechar="\""):
f = open(filename,'r')
data = []
for line in f:
line = ''.join(filter(lambda x:x in string.printable, line))
line=line.replace(quotechar, "")
data.append([item.strip() for item in line.split(delimiter)])
header = data.pop(0)
return [Student(dict(zip(header, item))) for item in data ]
def reverse_lookup(dic, key):
for k, v in dic.items():
if v==key:
return k
raise KeyError(key)
cfg_dict = read_config()
aliases = cfg_dict['Column Headers']
assignment_map={}
def convert(string, char='?'):
"""
this converts a messier header name to something neater.
key formats specified in config file where identifiers notated by \'?\'
example:
in config file we specify:
conc? = Conclusion?blahblahblah 88458924532453
in actual datafile we find Conclusion02blahblahblah 88458924532453
which maps over to
conc02
"""
#need to find a better, more general name to parse headers
for key, value in cfg_dict["Assignments"].items():
val=value.split(char)
val[0] = val[0].strip()
val[1] = val[1].strip()
if val[0] in string:
unique=string.replace(val[0],"")[0:2]
ret_val = key.split(char)
assignment_map[string]=ret_val[0]+unique
return ret_val[0]+unique
return None
class Student(object):
def __init__(self, row,char='?'):
"""row is a dict"""
aliases = cfg_dict['Column Headers']
for k, v in aliases.items():
setattr(self,k,row[v])
for key, val in row.items():
key=key.strip()
if not key in assignment_map.keys():
key=convert(key)
try:
newkey=assignment_map[key]
#print(newkey)
setattr(self,newkey,val)
except KeyError:
pass
def __str__(self):
return self.firstname+' '+self.lastname
def get_total(self):
"""
get the totals for a student.
input:
----------------------------
student: an instance of Student
cfg_dict: the configuration dictionary defined in classUtils.read_config
output:
----------------------------
score summed over all assignments
"""
summ = 0.
for key in list(self.__dict__.keys()):
if key in assignment_map.values():
try:
summ+=float(getattr(self,key))
except:
if getattr(self,key)=='' or getattr(self,key)==None:
continue
else:
raise ValueError("cannot convert: "+str(getattr(self,key)))
return summ
@staticmethod
def get_list(section, data):
if type(data) is str:
data=reader(data)
return [Student(item) for item in data if item[aliases['section']]==section]
@staticmethod
def get_column(studentlist, key):
"""get all values for all students in a section"""
key=str(key)
assert(not key is None)
#try:
# key=reverse_lookup(assignment_map, key)
# assert(not key is None)
#except KeyError:
# for key, val in assignment_map.items():
# print(key, val)
# raise
try:
return [getattr(item, key) for item in studentlist]
except TypeError:
print(key)
raise
except:
print(assignment_map)
print(key)
print(dir(studentlist[0]))
raise
@staticmethod
def getStats(assignment, studentlist):
"""
input: assignment name (str), section number
output: tuple of (mean, stdev). returns None on failure
"""
try:
assert(assignment in assignment_map.values())
except:
raise Exception(str(assignment)+" not in "+str(assignment_map.values()))
col = Student.get_column(studentlist, assignment)
for i in range(0,len(col)):
try:
col[i]=float(col[i])
except ValueError:
col[i]=0.
return mean(col), std(col), col
@staticmethod
def get_all_stats(studentlist):
lst = [item.get_total() for item in studentlist]
return mean(lst), std(lst), lst
@staticmethod
def get_emails(studentlist, ext=cfg_dict["Email"]["emailext"]):
return [item.username+'@'+ext for item in studentlist]
all_students=reader(cfg_dict["Config"]["csvfile"])
def get_section(section):
return [item for item in all_students if item.section==section]
|
gpl-2.0
| -6,433,354,924,796,667,000 | 29.47093 | 96 | 0.552566 | false |
batra-mlp-lab/DIGITS
|
digits/dataset/tasks/analyze_db.py
|
1
|
4644
|
# Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.
import sys
import os.path
import re
import digits
from digits.utils import subclass, override
from digits.task import Task
# NOTE: Increment this everytime the pickled object
PICKLE_VERSION = 1
@subclass
class AnalyzeDbTask(Task):
"""
Reads information from a database
"""
def __init__(self, database, purpose, **kwargs):
"""
Arguments:
database -- path to the database to analyze
purpose -- what is this database going to be used for
Keyword arguments:
force_same_shape -- if True, enforce that every entry in the database has the same shape
"""
self.force_same_shape = kwargs.pop('force_same_shape', False)
super(AnalyzeDbTask, self).__init__(**kwargs)
self.pickver_task_analyzedb = PICKLE_VERSION
self.database = database
self.purpose = purpose
self.backend = 'lmdb'
# Results
self.image_count = None
self.image_width = None
self.image_height = None
self.image_channels = None
self.analyze_db_log_file = 'analyze_db_%s.log' % '-'.join(p.lower() for p in self.purpose.split())
def __getstate__(self):
state = super(AnalyzeDbTask, self).__getstate__()
if 'analyze_db_log' in state:
del state['analyze_db_log']
return state
def __setstate__(self, state):
super(AnalyzeDbTask, self).__setstate__(state)
if not hasattr(self, 'backend') or self.backend is None:
self.backend = 'lmdb'
@override
def name(self):
return 'Analyze DB (%s)' % (self.purpose)
@override
def html_id(self):
return 'task-analyze-db-%s' % '-'.join(p.lower() for p in self.purpose.split())
@override
def offer_resources(self, resources):
key = 'analyze_db_task_pool'
if key not in resources:
return None
for resource in resources[key]:
if resource.remaining() >= 1:
return {key: [(resource.identifier, 1)]}
return None
@override
def task_arguments(self, resources, env):
args = [sys.executable, os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(digits.__file__))),
'tools', 'analyze_db.py'),
self.database,
]
if self.force_same_shape:
args.append('--force-same-shape')
else:
args.append('--only-count')
return args
@override
def before_run(self):
super(AnalyzeDbTask, self).before_run()
self.analyze_db_log = open(self.path(self.analyze_db_log_file), 'a')
@override
def process_output(self, line):
self.analyze_db_log.write('%s\n' % line)
self.analyze_db_log.flush()
timestamp, level, message = self.preprocess_output_digits(line)
if not message:
return False
# progress
match = re.match(r'Progress: (\d+)\/(\d+)', message)
if match:
self.progress = float(match.group(1))/float(match.group(2))
self.emit_progress_update()
return True
# total count
match = re.match(r'Total entries: (\d+)', message)
if match:
self.image_count = int(match.group(1))
return True
# image dimensions
match = re.match(r'(\d+) entries found with shape ((\d+)x(\d+)x(\d+))', message)
if match:
count = int(match.group(1))
dims = match.group(2)
self.image_width = int(match.group(3))
self.image_height = int(match.group(4))
self.image_channels = int(match.group(5))
self.logger.debug('Images are %s' % dims)
return True
if level == 'warning':
self.logger.warning('%s: %s' % (self.name(), message))
return True
if level in ['error', 'critical']:
self.logger.error('%s: %s' % (self.name(), message))
self.exception = message
return True
return True
@override
def after_run(self):
super(AnalyzeDbTask, self).after_run()
self.analyze_db_log.close()
def image_type(self):
"""
Returns an easy-to-read version of self.image_channels
"""
if self.image_channels is None:
return None
elif self.image_channels == 1:
return 'GRAYSCALE'
elif self.image_channels == 3:
return 'COLOR'
else:
return '%s-channel' % self.image_channels
|
bsd-3-clause
| 8,634,382,080,693,580,000 | 29.352941 | 106 | 0.566322 | false |
JSeam2/IsoGraph
|
old/gen_iso_graph.py
|
1
|
4050
|
# -*- coding: utf-8 -*-
"""
Generate adjacency matrices of isomorphic graphs
task
1) Check the sizes of isomorphic graphs you want to generate
2) Store them in different numpy files for various graph sizes
Build a table for example:
______________________________________
| Graph 1 | Graph 2 | Is Isomorphic? |
|--------------------------------------
| ... | ... | 0 - No; 1 - Yes|
|______________________________________
. . .
. . .
. . .
"""
import os
import numpy as np
import networkx as nx
from networkx.algorithms import isomorphism
import sqlite3
def gen_rnd_graph(n, mode="dense"):
"""
Generate a random pair of isomorphic graphs as adjacency matrices
Adjacency matrices are numpy arrays
n gives the total number of nodes in the graph
If graphs are isomorphic:
put 1 in the Is Isomorphic column
else:
put 0 in the Is Isomorphic column
output the | Graph_1 | Graph_2 |
Output the isomorphic graphs adjacency matrix
Some mathematical definition:
G ≅ H (G is isomorphic to H)
iff ∃ a: V(G)→ V(H) (A bijection)
such that
a(u)a(v) ∈ E(H) ↔ uv ∈ E(G)
Similarly,
for some permutation matrix P,
G ≅ H ↔ A_G = P* A_H *P_transpose
:param:
nodes(int): number of node
mode(str) : 'dense' to generate dense graph
'sparse' for sparse graph
:returns:
tuple (graph1(numpy), graph2(numpy), is_isomorphic(int))
"""
if mode == 'dense':
# Generate random graph, G1
G1 = nx.dense_gnm_random_graph(n, n)
# Generate random graph, G2
G2 = nx.dense_gnm_random_graph(n, n)
# This might not really be sparse
elif mode == 'sparse':
G1 = nx.gnm_random_graph(n, n)
G2 = nx.gnm_random_graph(n, n)
else:
return 'Invalid Mode'
# Check if graphs are isomorphic
GM = isomorphism.GraphMatcher(G1, G2)
# Check if graphs are isomorphic
if GM.is_isomorphic():
is_GM_isomorphic = 1
else:
is_GM_isomorphic = 0
# Convert graphs to numpy matrix
G1_numpy = nx.to_numpy_matrix(G1)
G2_numpy = nx.to_numpy_matrix(G2)
return (G1_numpy, G2_numpy, is_GM_isomorphic)
def save_graph(nodes, num_graph, db_path = "./graph.db" ):
"""
Looks for graph.db, creates graph.db if it doesn't exist
Run gen_rnd_graph(nodes), creates up till the nodes in parameters. Doesn't create for 3 nodes and below.
and store it with sqlite3
:param: nodes := number of nodes the database will make until(int)
:param: num_graph := number of graphs to generate (int)
:param: db_path := path of sqlite3 db, default is same directory as gen_iso_graph.py
"""
# in event connection to database is not possible put None
conn = None
# connect to db path
# will make sql database if it doesn't exist
conn = sqlite3.connect(db_path)
with conn:
# 1st loop to make various tables with various nodes x
# 2nd loop to make insert gen_rnd_graph entries with nodes x
for x in range(3,nodes):
cur = conn.cursor()
# Create Table this string formatting of a SQL command is generally
# bad but we can make do with this for now.
cur.execute("CREATE TABLE IF NOT EXISTS Node_{} (Id INT, Graph1 BLOB, Graph2 BLOB, is_isomorphic INT)".format(str(x)))
for num in range(num_graph):
g1, g2 , is_isomorphic = gen_rnd_graph(x)
# Convert np tostring
# To retrieve back using np.fromstring(bytearray)
cur.execute("INSERT INTO Node_{} VALUES(?,?,?,?)".format(str(x))
,(num, g1.tostring(), g2.tostring(), is_isomorphic))
conn.commit()
jkj
if __name__ == "__main__":
#save_graph(10, 20000, "./graph.db")
A,B,C = (gen_rnd_graph(3,mode='sparse'))
print(A)
print()
print(B)
print()
print(C)
|
mit
| -5,893,967,120,085,904,000 | 27.814286 | 130 | 0.57883 | false |
adamfisk/littleshoot-client
|
server/common/appengine/patch/common/appenginepatch/ragendja/forms.py
|
1
|
11392
|
from copy import deepcopy
import re
from django.utils.datastructures import SortedDict, MultiValueDict
from django.utils.html import conditional_escape
from django.utils.encoding import StrAndUnicode, smart_unicode, force_unicode
from django.utils.safestring import mark_safe
from django.forms.widgets import flatatt
from google.appengine.ext import db
from ragendja.dbutils import transaction
class FormWithSets(object):
def __init__(self, form, formsets=()):
self.form = form
setattr(self, '__module__', form.__module__)
setattr(self, '__name__', form.__name__ + 'WithSets')
setattr(self, '__doc__', form.__doc__)
self._meta = form._meta
fields = [(name, field) for name, field in form.base_fields.iteritems() if isinstance(field, FormSetField)]
formset_dict = dict(formsets)
newformsets = []
for name, field in fields:
if formset_dict.has_key(name):
continue
newformsets.append((name, {'formset':field.make_formset(form._meta.model)}))
self.formsets = formsets + tuple(newformsets)
def __call__(self, *args, **kwargs):
prefix = kwargs['prefix'] + '-' if 'prefix' in kwargs else ''
form = self.form(*args, **kwargs)
formsets = []
for name, formset in self.formsets:
kwargs['prefix'] = prefix + name
instance = formset['formset'](*args, **kwargs)
if form.base_fields.has_key(name):
field = form.base_fields[name]
else:
field = FormSetField(formset['formset'].model, **formset)
formsets.append(BoundFormSet(field, instance, name, formset))
return type(self.__name__ + 'Instance', (FormWithSetsInstance, ), {})(self, form, formsets)
def pretty_name(name):
"Converts 'first_name' to 'First name'"
name = name[0].upper() + name[1:]
return name.replace('_', ' ')
table_sections_re = re.compile(r'^(.*?)(<tr>.*</tr>)(.*?)$', re.DOTALL)
table_row_re = re.compile(r'(<tr>(<th><label.*?</label></th>)(<td>.*?</td>)</tr>)', re.DOTALL)
ul_sections_re = re.compile(r'^(.*?)(<li>.*</li>)(.*?)$', re.DOTALL)
ul_row_re = re.compile(r'(<li>(<label.*?</label>)(.*?)</li>)', re.DOTALL)
p_sections_re = re.compile(r'^(.*?)(<p>.*</p>)(.*?)$', re.DOTALL)
p_row_re = re.compile(r'(<p>(<label.*?</label>)(.*?)</p>)', re.DOTALL)
label_re = re.compile(r'^(.*)<label for="id_(.*?)">(.*)</label>(.*)$')
class BoundFormSet(StrAndUnicode):
def __init__(self, field, formset, name, args):
self.field = field
self.formset = formset
self.name = name
self.args = args
if self.field.label is None:
self.label = pretty_name(name)
else:
self.label = self.field.label
self.auto_id = self.formset.auto_id % self.formset.prefix
if args.has_key('attrs'):
self.attrs = args['attrs'].copy()
else:
self.attrs = {}
def __unicode__(self):
"""Renders this field as an HTML widget."""
return self.as_widget()
def as_widget(self, attrs=None):
"""
Renders the field by rendering the passed widget, adding any HTML
attributes passed as attrs. If no widget is specified, then the
field's default widget will be used.
"""
attrs = attrs or {}
auto_id = self.auto_id
if auto_id and 'id' not in attrs and not self.args.has_key('id'):
attrs['id'] = auto_id
try:
data = self.formset.as_table()
name = self.name
return self.render(name, data, attrs=attrs)
except Exception, e:
import traceback
return traceback.format_exc()
def render(self, name, value, attrs=None):
table_sections = table_sections_re.search(value).groups()
output = []
heads = []
current_row = []
first_row = True
first_head_id = None
prefix = 'id_%s-%%s-' % self.formset.prefix
for row, head, item in table_row_re.findall(table_sections[1]):
if first_row:
head_groups = label_re.search(head).groups()
if first_head_id == head_groups[1]:
first_row = False
output.append(current_row)
current_row = []
else:
heads.append('%s%s%s' % (head_groups[0], head_groups[2], head_groups[3]))
if first_head_id is None:
first_head_id = head_groups[1].replace('-0-','-1-')
current_row.append(item)
if not first_row and len(current_row) >= len(heads):
output.append(current_row)
current_row = []
if len(current_row) != 0:
raise Exception('Unbalanced render')
def last_first(tuple):
return tuple[-1:] + tuple[:-1]
return mark_safe(u'%s<table%s><tr>%s</tr><tr>%s</tr></table>%s'%(
table_sections[0],
flatatt(attrs),
u''.join(last_first(heads)),
u'</tr><tr>'.join((u''.join(last_first(x)) for x in output)),
table_sections[2]))
class CachedQuerySet(object):
def __init__(self, get_queryset):
self.queryset_results = (x for x in get_queryset())
def __call__(self):
return self.queryset_results
class FormWithSetsInstance(object):
def __init__(self, master, form, formsets):
self.master = master
self.form = form
self.formsets = formsets
self.instance = form.instance
def __unicode__(self):
return self.as_table()
def is_valid(self):
result = self.form.is_valid()
for bf in self.formsets:
result = bf.formset.is_valid() and result
return result
def save(self, *args, **kwargs):
def save_forms(forms, obj=None):
for form in forms:
if not instance and form != self.form:
for row in form.forms:
row.cleaned_data[form.rel_name] = obj
form_obj = form.save(*args, **kwargs)
if form == self.form:
obj = form_obj
return obj
instance = self.form.instance
grouped = [self.form]
ungrouped = []
# cache the result of get_queryset so that it doesn't run inside the transaction
for bf in self.formsets:
if bf.formset.rel_name == 'parent':
grouped.append(bf.formset)
else:
ungrouped.append(bf.formset)
bf.formset_get_queryset = bf.formset.get_queryset
bf.formset.get_queryset = CachedQuerySet(bf.formset_get_queryset)
obj = db.run_in_transaction(save_forms, grouped)
save_forms(ungrouped, obj)
for bf in self.formsets:
bf.formset.get_queryset = bf.formset_get_queryset
del bf.formset_get_queryset
return obj
def _html_output(self, form_as, normal_row, help_text_html, sections_re, row_re):
formsets = SortedDict()
for bf in self.formsets:
if bf.label:
label = conditional_escape(force_unicode(bf.label))
# Only add the suffix if the label does not end in
# punctuation.
if self.form.label_suffix:
if label[-1] not in ':?.!':
label += self.form.label_suffix
label = label or ''
else:
label = ''
if bf.field.help_text:
help_text = help_text_html % force_unicode(bf.field.help_text)
else:
help_text = u''
formsets[bf.name] = normal_row % {'label': force_unicode(label), 'field': unicode(bf), 'help_text': help_text}
try:
output = []
data = form_as()
section_search = sections_re.search(data)
if not section_search:
output.append(data)
else:
section_groups = section_search.groups()
for row, head, item in row_re.findall(section_groups[1]):
head_search = label_re.search(head)
if head_search:
id = head_search.groups()[1]
if formsets.has_key(id):
row = formsets[id]
del formsets[id]
output.append(row)
for name, row in formsets.items():
if name in self.form.fields.keyOrder:
output.append(row)
return mark_safe(u'\n'.join(output))
except Exception,e:
import traceback
return traceback.format_exc()
def as_table(self):
"Returns this form rendered as HTML <tr>s -- excluding the <table></table>."
return self._html_output(self.form.as_table, u'<tr><th>%(label)s</th><td>%(help_text)s%(field)s</td></tr>', u'<br />%s', table_sections_re, table_row_re)
def as_ul(self):
"Returns this form rendered as HTML <li>s -- excluding the <ul></ul>."
return self._html_output(self.form.as_ul, u'<li>%(label)s %(help_text)s%(field)s</li>', u' %s', ul_sections_re, ul_row_re)
def as_p(self):
"Returns this form rendered as HTML <p>s."
return self._html_output(self.form.as_p, u'<p>%(label)s %(help_text)s</p>%(field)s', u' %s', p_sections_re, p_row_re)
def full_clean(self):
self.form.full_clean()
for bf in self.formsets:
bf.formset.full_clean()
def has_changed(self):
result = self.form.has_changed()
for bf in self.formsets:
result = bf.formset.has_changed() or result
return result
def is_multipart(self):
result = self.form.is_multipart()
for bf in self.formsets:
result = bf.formset.is_multipart() or result
return result
from django.forms.fields import Field
from django.forms.widgets import Widget
from django.forms.models import inlineformset_factory
class FormSetWidget(Widget):
def __init__(self, field, attrs=None):
super(FormSetWidget, self).__init__(attrs)
self.field = field
def render(self, name, value, attrs=None):
if value is None: value = 'FormWithSets decorator required to render %s FormSet' % self.field.model.__name__
value = force_unicode(value)
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(conditional_escape(value))
class FormSetField(Field):
def __init__(self, model, widget=FormSetWidget, label=None, initial=None,
help_text=None, error_messages=None, show_hidden_initial=False,
formset_factory=inlineformset_factory, *args, **kwargs):
widget = widget(self)
super(FormSetField, self).__init__(required=False, widget=widget, label=label, initial=initial, help_text=help_text, error_messages=error_messages, show_hidden_initial=show_hidden_initial)
self.model = model
self.formset_factory = formset_factory
self.args = args
self.kwargs = kwargs
def make_formset(self, parent_model):
return self.formset_factory(parent_model, self.model, *self.args, **self.kwargs)
|
gpl-2.0
| 5,366,842,550,476,129,000 | 39.397163 | 196 | 0.561271 | false |
Shinoby1992/xstream
|
default.py
|
1
|
1196
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
from os import getcwd
from os.path import join
from sys import path
import xbmc
from xbmc import log
from resources.lib import common
__settings__ = common.addon
__cwd__ = common.addonPath
# Add different library path
path.append(join(__cwd__, "resources", "lib"))
path.append(join(__cwd__, "resources", "lib", "gui"))
path.append(join(__cwd__, "resources", "lib", "handler"))
path.append(join(__cwd__, "resources", "art", "sites"))
path.append(join(__cwd__, "sites"))
log("The new sys.path list: %s" % path, level = xbmc.LOGDEBUG)
# Run xstream
from xstream import run
log('*---- Running xStream, version %s ----*' % __settings__.getAddonInfo('version'))
#import cProfile
#cProfile.run('run()',join(__cwd__,'xstream.pstats'))
try:
run()
except Exception, err:
if str(err) == 'UserAborted':
print "\t[xStream] User aborted list creation"
else:
import traceback
import xbmcgui
print traceback.format_exc()
dialog = xbmcgui.Dialog().ok('Error',str(err.__class__.__name__)+" : "+str(err),str(traceback.format_exc().splitlines()[-3].split('addons')[-1]))
|
gpl-3.0
| -5,597,492,991,402,507,000 | 30.378378 | 153 | 0.624582 | false |
sokolic/miniSASS
|
minisass_registration/forms.py
|
1
|
6601
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
# Frank Sokolic: June 2018 - Disabled all the recaptcha code as version 1 is no longer supported
#from cmsplugin_contact.nospam.widgets import RecaptchaChallenge, RecaptchaResponse
from registration.forms import RegistrationForm
from minisass_registration.models import Lookup
def _get_organisation_types():
result = [('','-- Select a Type --')]
qs = Lookup.objects.filter(
container__description='Organisation Type',
active=True)
qs = qs.order_by('rank', 'description')
result.extend([(itm.id, itm.description,) for itm in qs])
return result
def _get_organisation_names():
return []
def _get_countries():
result = [('','-- Select a Country --')]
qs = Lookup.objects.raw("SELECT * FROM minisass_registration_lookup WHERE container_id='8' AND active ='t' ORDER BY rank = 0, rank, description" )
result.extend([(itm.id, itm.description,) for itm in qs])
return result
def _get_countries_old():
result = [('','-- Select a Country --')]
qs = Lookup.objects.filter(
container__description='Country',
active=True)
qs = qs.order_by('rank', 'description')
result.extend([(itm.id, itm.description,) for itm in qs])
return result
class miniSASSregistrationForm(RegistrationForm):
""" Add fields for firstname, lastname and organisation
"""
firstname = forms.CharField(
label=_("Name"),
max_length=30,
help_text=_(u"Kept confidential"))
lastname = forms.CharField(
label=_("Surname"),
max_length=30,
help_text=_(u"Kept confidential"))
organisation_type = forms.ChoiceField(
label=_("Organisation Type"),
required=True,
help_text=_(u"Please select an organisation type, \
or private individual"))
organisation_name = forms.CharField(
label=_("Organisation Name"),
max_length=50,
help_text=_(u"Please check if school already listed, \
then add if not."),
required=False)
country = forms.ChoiceField(
label=_("Country"),
required=False,
help_text=_(u"Please select a country"))
# recaptcha_challenge_field = forms.CharField(widget=RecaptchaChallenge)
# recaptcha_response_field = forms.CharField(
# widget = RecaptchaResponse,
# label = _('Please enter the letters/digits you see in the image :'),
# error_messages = {
# 'required': _('You did not enter any of the words.')
# })
# recaptcha_always_validate = False
def __init__(self, request, *args, **kwargs):
# Because the ReCAPTCHA library requires the fields to be named a
# certain way, using a form prefix will break the validation unless we
# modify the received POST and rename the keys accordingly
self._request = request
if ('data' in kwargs or len(args) > 1) and 'prefix' in kwargs:
data = kwargs.get('data', args[1]).__copy__()
# data['%s-recaptcha_challenge_field' % kwargs['prefix']] = \
# data.pop('recaptcha_challenge_field', [u''])[0]
# data['%s-recaptcha_response_field' % kwargs['prefix']] = \
# data.pop('recaptcha_response_field', [u''])[0]
data._mutable = False
# Since data could have been passed eith as an arg or kwarg, set
# the right one to the new data
if 'data' in kwargs:
kwargs['data'] = data
else:
args = (args[0], data) + args[2:]
super(miniSASSregistrationForm, self).__init__(*args, **kwargs)
# self._recaptcha_public_key = getattr(self, 'recaptcha_public_key', getattr(settings, 'RECAPTCHA_PUBLIC_KEY', None))
# self._recaptcha_private_key = getattr(self, 'recaptcha_private_key', getattr(settings, 'RECAPTCHA_PRIVATE_KEY', None))
# self._recaptcha_theme = getattr(self, 'recaptcha_theme', getattr(settings, 'RECAPTCHA_THEME', 'clean'))
# self.fields['recaptcha_response_field'].widget.public_key = self._recaptcha_public_key
# self.fields['recaptcha_response_field'].widget.theme = self._recaptcha_theme
# Move the ReCAPTCHA fields to the end of the form
# self.fields['recaptcha_challenge_field'] = self.fields.pop('recaptcha_challenge_field')
# self.fields['recaptcha_response_field'] = self.fields.pop('recaptcha_response_field')
self.fields['username'].help_text = \
_(u"Public username (don't use any spaces)")
self.fields['username'].error_messages={'invalid': _("The username may only contain letters, numbers and @, fullstop, plus, minus or underscore characters. NO SPACES.")}
self.fields['email'].help_text = _(u"Kept confidential")
self.fields['organisation_type'].choices = _get_organisation_types()
self.fields['country'].choices = _get_countries()
self.fields.keyOrder = [
'username',
'firstname', 'lastname',
'email',
'organisation_type',
'organisation_name',
'country',
'password1',
'password2',
# 'recaptcha_challenge_field',
# 'recaptcha_response_field'
]
# def clean_recaptcha_response_field(self):
# if 'recaptcha_challenge_field' in self.cleaned_data:
# self._validate_captcha()
# return self.cleaned_data['recaptcha_response_field']
# def clean_recaptcha_challenge_field(self):
# if 'recaptcha_response_field' in self.cleaned_data:
# self._validate_captcha()
# return self.cleaned_data['recaptcha_challenge_field']
# def _validate_captcha(self):
# if not self.recaptcha_always_validate:
# rcf = self.cleaned_data['recaptcha_challenge_field']
# rrf = self.cleaned_data['recaptcha_response_field']
# if rrf == '':
# raise forms.ValidationError(_('You did not enter the two words shown in the image.'))
# else:
# from recaptcha.client import captcha as recaptcha
# ip = self._request.META['REMOTE_ADDR']
# check = recaptcha.submit(rcf, rrf, self._recaptcha_private_key, ip)
# if not check.is_valid:
# raise forms.ValidationError(_('The words you entered did not match the image.'))
|
gpl-3.0
| -6,425,227,694,695,306,000 | 43.904762 | 177 | 0.609302 | false |
EvolutionClip/pyload
|
module/plugins/hooks/OverLoadMe.py
|
1
|
1605
|
# -*- coding: utf-8 -*-
from module.plugins.internal.MultiHook import MultiHook
class OverLoadMe(MultiHook):
__name__ = "OverLoadMe"
__type__ = "hook"
__version__ = "0.04"
__config__ = [("pluginmode" , "all;listed;unlisted", "Use for plugins" , "all"),
("pluginlist" , "str" , "Plugin list (comma separated)" , "" ),
("revertfailed" , "bool" , "Revert to standard download if fails", True ),
("retry" , "int" , "Number of retries before revert" , 10 ),
("retryinterval" , "int" , "Retry interval in minutes" , 1 ),
("reload" , "bool" , "Reload plugin list" , True ),
("reloadinterval", "int" , "Reload interval in hours" , 12 ),
("ssl" , "bool" , "Use HTTPS" , True )]
__description__ = """Over-Load.me hook plugin"""
__license__ = "GPLv3"
__authors__ = [("marley", "marley@over-load.me")]
def getHosters(self):
https = "https" if self.getConfig("ssl") else "http"
page = self.getURL(https + "://api.over-load.me/hoster.php",
get={'auth': "0001-cb1f24dadb3aa487bda5afd3b76298935329be7700cd7-5329be77-00cf-1ca0135f"}).replace("\"", "").strip()
self.logDebug("Hosterlist", page)
return [x.strip() for x in page.split(",") if x.strip()]
|
gpl-3.0
| 841,295,249,525,964,800 | 50.774194 | 138 | 0.456075 | false |
zathras777/pywind
|
pywind/elexon/cmd.py
|
1
|
11805
|
from datetime import timedelta, time, datetime, date
from pywind.elexon.api import B1420, B1330, B1320, FUELINST, \
DERSYSDATA, DERBMDATA, BMUNITSEARCH, \
B1610, B1630, UOU2T52W
from pywind.elexon.unit import BalancingData
from pywind.utils import StdoutFormatter, args_get_datetime
def check_api_key(args):
if args.apikey is None:
print("You MUST supply an API key to access Elexon data.")
print("Registration is free, but you need to go to the URL below and register.")
print("https://www.elexonportal.co.uk/registration/newuser")
return False
return True
def get_check_data(api, params):
if not api.get_data(**params):
print("No data returned.")
return False
return True
def elexon_generation_inst(args):
""" Generation Data at 5 minute intervals from the Elexon Data Portal """
if not check_api_key(args):
return None
api = FUELINST(args.apikey)
args_get_datetime(args)
params = {}
if args.fromdatetime is not None or args.todatetime is not None:
params['FromDateTime'] = args.fromdatetime if args.fromdatetime else args.todatetime - timedelta(days=1)
params['ToDateTime'] = args.todatetime if args.todatetime else args.fromdatetime + timedelta(days=1)
else:
print("Getting data for yesterday as no dates specified.")
params['FromDateTime'] = datetime.combine(date.today() - timedelta(days=2), time(23, 59))
params['ToDateTime'] = datetime.combine(date.today() - timedelta(days=1), time(23, 59))
if get_check_data(api, params) is False:
return None
fmt = StdoutFormatter("10s", "6s", "7s", "7s", "7s", "7s", "7s", "7s", "7s", "7s", "7s", "7s", "7s", "7s", "7s", "7s")
print("\n" + fmt.titles('Date', 'Time', 'Period', 'CCGT', 'Oil', 'Coal', 'Nuclear', 'Wind', 'PS', 'NPSHYD', 'OCGT',
'Other', 'Int Fr', 'Int Irl', 'Int Ned', 'Int E/W'))
for item in api.items:
print(fmt.row(item['date'].strftime("%Y-%m-%d"),
item['time'].strftime("%H:%M"),
item['settlementperiod'],
item['ccgt'],
item['oil'],
item['coal'],
item['nuclear'],
item['wind'],
item['ps'],
item['npshyd'],
item['ocgt'],
item['other'],
item['intfr'],
item['intirl'],
item['intned'],
item['intew'],
))
return api
def elexon_b1320(args):
""" Congestion Management Measures Countertrading """
if not check_api_key(args):
return None
print("This report has *VERY* sparse data.")
api = B1320(args.apikey)
if args.date is None:
print("You MUST supply a date for this report.")
return None
if args.period is None:
print("You MUST supply a period for this report, from 1 to 50")
return None
params = {'SettlementDate': args.date,
'Period': args.period}
if get_check_data(api, params) is False:
return None
fmt = StdoutFormatter("12s", "8s", "10.4f", "9s", "6s", "20s", "10s")
print("\n" + fmt.titles('Date', 'Period', 'Quantity', 'Direction', 'Active', 'Reason', 'Resolution'))
for item in api.items:
print(fmt.row(item['settlementdate'],
item['settlementperiod'],
item['quantity'],
item['flowdirection'],
str(item['activeflag']),
item['reasoncode'],
item['resolution']))
return api
def elexon_b1330(args):
""" Congestion Management Measures Costs of Congestion Management Service """
if args.apikey is None:
print("You MUST supply an API key to access Elexon data")
return None
if args.year is None:
print("You MUST supply a year for this report.")
return None
if args.month is None:
print("You MUST supply a month for this report.")
return None
MONTHS = [
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'
]
api = B1330(args.apikey)
params = {'Year': args.year or 2016,
'Month': MONTHS[args.month - 1 or 8]}
if get_check_data(api, params) is False:
return None
fmt = StdoutFormatter("4d", "5s", "40s", "8s")
print("\n" + fmt.titles('Year', 'Mon', 'Document Id', 'Rev. Num'))
for item in api.items:
print(fmt.row(item['year'], item['month'], item['documentid'], item['documentrevnum']))
return api
def elexon_b1420(args):
""" Installed Generation Capacity per Unit """
if not check_api_key(args):
return None
api = B1420(args.apikey)
if not api.get_data(**{'Year': args.year or 2016}):
print("No data returned.")
return None
fmt = StdoutFormatter("30s", "8s", "10s", "6s", "10.1f", "20s")
print("\n" + fmt.titles('Resource Name', 'NGC Id', 'BM Unit Id', 'Active', 'Output', 'Type'))
for item in sorted(api.items, key=lambda xxx: xxx['ngcbmunitid']):
print(fmt.row(item['registeredresourcename'],
item['ngcbmunitid'],
item['bmunitid'],
str(item['activeflag']),
float(item['nominal']),
item.get('powersystemresourcetype', 'n/a')))
return api
def elexon_b1610(args):
""" Generated output by generator """
if not check_api_key(args):
return None
api = B1610(args.apikey)
if args.settlement_period is None:
print("A settlement period should be supplied using the --settlement-period flag (range 1 to 50)."
"Defaulting to 1")
if args.date is None:
print("A date should be supplied using the --date flag. Format is YYYY-MM-DD. Defaulting to today")
if not api.get_data(**{'SettlementDate': args.date or date.today().strftime("%Y-%m-%d"),
'Period': args.settlement_period or 1}):
print("No data returned.")
return None
fmt = StdoutFormatter("8s", "10s", "6s", "6s", "10.1f", "20s", "30s")
print("\n" + fmt.titles('NGC Unit', 'Date', 'Period', 'Active', 'Output', 'Type', 'Reference'))
for item in sorted(api.items, key=lambda xxx: xxx['ngcbmunitid']):
print(fmt.row(item['ngcbmunitid'],
item['settlementdate'],
str(item['settlementperiod']),
str(item['activeflag']),
float(item['quantity']),
item.get('powersystemresourcetype', 'n/a'),
item['documentid'] + " - " + item['documentrevnum']))
return api
def elexon_b1630(args):
""" Actual or forecast Wind & Solar Generation """
if not check_api_key(args):
return None
api = B1630(args.apikey)
if args.settlement_period is None:
print("A settlement period should be supplied using the --settlement-period flag (range 1 to 50)."
"Defaulting to 1")
if args.date is None:
print("A date should be supplied using the --date flag. Format is YYYY-MM-DD. Defaulting to today")
if not api.get_data(**{'SettlementDate': args.date or date.today().strftime("%Y-%m-%d"),
'Period': args.settlement_period or 1}):
print("No data returned.")
return None
fmt = StdoutFormatter("10s", "6s", "6s", "10.1f", "20s", "30s")
print("\n" + fmt.titles('Date', 'Period', 'Active', 'Output', 'Type', 'Reference'))
for item in sorted(api.items, key=lambda xxx: xxx['documentid']):
print(fmt.row(item['settlementdate'],
str(item['settlementperiod']),
str(item['activeflag']),
float(item['quantity']),
item.get('powersystemresourcetype', 'n/a'),
item['documentid'] + " - " + item['documentrevnum']))
return api
def elexon_sbp(args):
""" Derived System Prices from Elexon """
if not check_api_key(args):
return None
api = DERSYSDATA(args.apikey)
params = {
'FromSettlementDate': args.fromdate or date.today() - timedelta(days=1),
'ToSettlementDate': args.todate or args.fromdate or (date.today()) - timedelta(days=1)
}
if args.period is not None:
params['SettlementPeriod'] = args.period
if args.all_periods:
params['SettlementPeriod'] = '*'
if get_check_data(api, params) is False:
return None
fmt = StdoutFormatter("15s", "^20d", "15.4f", "15.4f", "4s")
print("\nSystem adjustments are included in the figures shown below where '*' is shown.\n")
print("\n" + fmt.titles('Date', 'Settlement Period', 'Sell Price', 'Buy Price', 'Adj?'))
for item in api.items:
print(fmt.row(item['settlementdate'].strftime("%Y %b %d"),
item['settlementperiod'],
item['systemsellprice'] + item['sellpriceadjustment'],
item['systembuyprice'] + item['buypriceadjustment'],
"*" if item['sellpriceadjustment'] + item['buypriceadjustment'] > 0 else ''
))
return api
def elexon_bm_data(args):
""" Derived System Prices from Elexon """
if not check_api_key(args):
return None
bd = BalancingData(args.apikey)
params = {
'SettlementDate': args.date or date.today() - timedelta(days=1),
'SettlementPeriod': args.period or 1
}
if args.all_periods:
params['SettlementPeriod'] = '*'
if not bd.get_data(**params):
return None
fmt = StdoutFormatter('12s', '^7d', '16.4f', '16.4f', '18.4f', '18.4f', '12.4f', '12.4f')
print("\n" + fmt.titles('Unit Name', 'Period', 'Bid Volume', 'Offer Volume',
'Bid Cashflow', 'Offer Cashflow', 'Bid Rate', 'Offer Rate'))
for unit_name in sorted(bd.units):
unit = bd.units[unit_name]
for period in sorted(unit.periods):
pd = unit.periods[period]
print(fmt.row(unit.unit,
period,
pd.bid_volume,
pd.offer_volume,
pd.bid_cashflow,
pd.offer_cashflow,
pd.bid_rate,
pd.offer_rate))
return bd.api
def elexon_bm_unit(args):
""" Balancing Mechanism Unit information from Elexon """
if not check_api_key(args):
return None
api = BMUNITSEARCH(args.apikey)
params = {
'BMUnitType': args.unit_type or '*'
}
if not get_check_data(api, params):
return None
print("Total of {} units\n".format(len(api.items)))
fmt = StdoutFormatter('12s', '12s', '^8s', '30s', '50s')
print("\n" + fmt.titles('NGC ID', 'BM ID', 'Active ?', 'BM Type', 'Lead Party Name'))
for item in sorted(api.items, key=lambda x: x['ngcbmunitname']):
print(fmt.row(item['ngcbmunitname'],
item['bmunitid'],
'Y' if item['activeflag'] else 'N',
"{}, {}".format(item['bmunittype'], item['category']),
item['leadpartyname']))
return api
def elexon_uou2t52w(args):
""" Generator output for 52 weeks by unit and fueld type """
if not check_api_key(args):
return None
api = UOU2T52W(args.apikey)
params = {}
if not get_check_data(api, params):
return None
return api
|
unlicense
| 6,573,191,138,269,887,000 | 35.661491 | 122 | 0.55036 | false |
LLNL/spack
|
lib/spack/spack/cmd/configure.py
|
2
|
3097
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.cmd.install as inst
from spack.build_systems.autotools import AutotoolsPackage
from spack.build_systems.cmake import CMakePackage
from spack.build_systems.qmake import QMakePackage
from spack.build_systems.waf import WafPackage
from spack.build_systems.perl import PerlPackage
from spack.build_systems.intel import IntelPackage
from spack.build_systems.meson import MesonPackage
from spack.build_systems.sip import SIPPackage
description = 'DEPRECATED: stage and configure a package but do not install'
section = "build"
level = "long"
build_system_to_phase = {
AutotoolsPackage: 'configure',
CMakePackage: 'cmake',
QMakePackage: 'qmake',
WafPackage: 'configure',
PerlPackage: 'configure',
IntelPackage: 'configure',
MesonPackage: 'meson',
SIPPackage: 'configure',
}
def setup_parser(subparser):
subparser.add_argument(
'-v', '--verbose',
action='store_true',
help="print additional output during builds"
)
arguments.add_common_arguments(subparser, ['spec'])
def _stop_at_phase_during_install(args, calling_fn, phase_mapping):
if not args.package:
tty.die("configure requires at least one package argument")
# TODO: to be refactored with code in install
specs = spack.cmd.parse_specs(args.package, concretize=True)
if len(specs) != 1:
tty.error('only one spec can be installed at a time.')
spec = specs.pop()
pkg = spec.package
try:
key = [cls for cls in phase_mapping if isinstance(pkg, cls)].pop()
phase = phase_mapping[key]
# Install package dependencies if needed
parser = argparse.ArgumentParser()
inst.setup_parser(parser)
tty.msg('Checking dependencies for {0}'.format(args.spec[0]))
cli_args = ['-v'] if args.verbose else []
install_args = parser.parse_args(cli_args + ['--only=dependencies'])
install_args.spec = args.spec
inst.install(parser, install_args)
# Install package and stop at the given phase
cli_args = ['-v'] if args.verbose else []
install_args = parser.parse_args(cli_args + ['--only=package'])
install_args.spec = args.spec
inst.install(parser, install_args, stop_at=phase)
except IndexError:
tty.error(
'Package {0} has no {1} phase, or its {1} phase is not separated from install'.format( # NOQA: ignore=E501
spec.name, calling_fn.__name__)
)
def configure(parser, args):
tty.warn("This command is deprecated. Use `spack install --until` to"
" select an end phase instead. The `spack configure` command will"
" be removed in a future version of Spack.")
_stop_at_phase_during_install(args, configure, build_system_to_phase)
|
lgpl-2.1
| -8,002,407,861,356,429,000 | 35.435294 | 119 | 0.683565 | false |
qsnake/gpaw
|
gpaw/test/wannier_ethylene.py
|
1
|
2008
|
import os
from ase import Atom, Atoms
from gpaw import GPAW
from gpaw.test import equal
from gpaw.wannier import Wannier
import numpy as np
# GPAW wannier example for ethylene corresponding to the ASE Wannier
# tutorial.
a = 6.0 # Size of unit cell (Angstrom)
ethylene = Atoms([Atom('H', (-1.235,-0.936 , 0 )),
Atom('H', (-1.235, 0.936 , 0 )),
Atom('C', (-0.660, 0.000 , 0 )),
Atom('C', ( 0.660, 0.000 , 0 )),
Atom('H', ( 1.235,-0.936 , 0 )),
Atom('H', ( 1.235, 0.936 , 0 ))],
cell=(a, a, a), pbc=True)
ethylene.center()
calc = GPAW(nbands=8, gpts=(32, 32, 32), convergence={'eigenstates': 1e-6})
ethylene.set_calculator(calc)
e = ethylene.get_potential_energy()
niter = calc.get_number_of_iterations()
energy_tolerance = 0.00003
niter_tolerance = 0
equal(e, -33.3232491, energy_tolerance)
equal(niter, 25, niter_tolerance)
def check(calc):
wannier = Wannier(calc, nbands=6)
wannier.localize()
centers = wannier.get_centers()
print centers
expected = [[1.950, 2.376, 3.000],
[1.950, 3.624, 3.000],
[3.000, 3.000, 2.671],
[3.000, 3.000, 3.329],
[4.050, 2.376, 3.000],
[4.050, 3.624, 3.000]]
equal(13.7995, wannier.value, 0.016)
for center in centers:
i = 0
while np.sum((expected[i] - center)**2) > 0.01:
i += 1
if i == len(expected):
raise RuntimeError, 'Correct center not found'
expected.pop(i)
check(calc)
calc.write('ethylene.gpw', 'all')
check(GPAW('ethylene.gpw', txt=None))
## for i in range(6):
## wannier.write_cube(i, 'ethylene%s.cube' % i, real=True)
## from ASE.Visualization.PrimiPlotter import PrimiPlotter, X11Window
## ethylene.extend(wannier.get_centers_as_atoms())
## plot = PrimiPlotter(ethylene)
## plot.set_output(X11Window())
## plot.set_radii(.2)
## plot.set_rotation([15, 0, 0])
## plot.plot()
|
gpl-3.0
| -7,751,138,382,603,430,000 | 29.424242 | 75 | 0.575697 | false |
jpypi/dup-image-search
|
simple_hash/fast_simple_hash.py
|
1
|
1638
|
#!/usr/bin/env python2
"""
simple_hash.py
Generates a hash using the "simple" method outlined on:
http://www.hackerfactor.com/blog/index.php?/archives/432-Looks-Like-It.html
:author: Brandon Arrendondo
:author: James Jenkins
:license: MIT
"""
import sys
import argparse
import numpy
import glob
from PIL import Image
from multiprocessing import Pool
def calculate_simple_hash(image):
"""
Calculates the simple hash of an image.
The basic steps (verbatim from hackerfactor, see heading):
1. Reduce size to 8x8
2. Reduce color to greyscale
3. Average the colors
4. Compute the 64 bits - 1 if above average, 0 if not
5. Construct the hash
"""
# reduce size to 8x8
image = image.resize((8, 8))
# convert to greyscale
image = image.convert("L")
# average the colors
imgdata = image.getdata()
average = numpy.mean(imgdata)
image_hash = 0
for i in xrange(0, len(imgdata)):
image_hash |= (imgdata[i] > average) << i
return image_hash
def hash_directory(directory):
with open("simple_hashes.txt", "a") as f:
for filepath in glob.iglob("{0!s}/*".format(directory)):
try:
image = Image.open(filepath)
image_hash = calculate_simple_hash(image)
f.write("{0!s},{1!s}\n".format(image_hash, filepath))
except:
pass
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("directory", help="directory to scan")
args = parser.parse_args()
hash_directory(args.directory)
|
mit
| 3,918,191,656,420,829,000 | 23.818182 | 79 | 0.617827 | false |
taget/node
|
scripts/password.py
|
1
|
2111
|
#!/usr/bin/python
# password.py - Copyright (C) 2010 Red Hat, Inc.
# Written by Joey Boggs <jboggs@redhat.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA. A copy of the GNU General Public License is
# also available at http://www.gnu.org/copyleft/gpl.html.
from ovirtnode.ovirtfunctions import *
import libuser
import random
import crypt
def cryptPassword(password):
saltlen = 2
algo = 'sha512'
saltlen = 16
saltstr = '$6$'
for i in range(saltlen):
saltstr = saltstr + random.choice (string.letters +
string.digits + './')
return crypt.crypt (password, saltstr)
def set_password(password, user):
admin = libuser.admin()
root = admin.lookupUserByName(user)
passwd = cryptPassword(password)
unmount_config("/etc/shadow")
admin.setpassUser(root, passwd, "is_crypted")
ovirt_store_config("/etc/shadow")
return True
def check_ssh_password_auth():
password_auth_status = augeas.Augeas("root=/")
password_auth_status.get("/files/etc/ssh/sshd_config/PasswordAuthentication")
return password_auth_status
def toggle_ssh_access():
ssh_config = augeas.Augeas("root=/")
ssh_config.set("/files/etc/ssh/sshd_config", OVIRT_VARS["ssh_pass_enabled"])
ssh_config.save()
ovirt_store_config("/etc/ssh/sshd_config")
rc = system_closefds("service sshd reload")
return rc
def set_sasl_password(user, password):
system_closefds("saslpasswd2 -a libvirt -p %s") % user
|
gpl-2.0
| -1,600,541,113,615,766,800 | 35.396552 | 81 | 0.702037 | false |
gamingrobot/SpockBot
|
spockbot/plugins/helpers/entities.py
|
1
|
6663
|
"""
An entity tracker
"""
from spockbot.mcdata.utils import Info
from spockbot.plugins.base import PluginBase, pl_announce
class MCEntity(Info):
eid = 0
status = 0
nbt = None
metadata = None
class MovementEntity(MCEntity):
x = 0
y = 0
z = 0
yaw = 0
pitch = 0
on_ground = True
class PlayerEntity(MovementEntity):
uuid = 0
current_item = 0
metadata = None
class ObjectEntity(MovementEntity):
obj_type = 0
obj_data = 0
speed_x = 0
speed_y = 0
speed_z = 0
class MobEntity(MovementEntity):
mob_type = 0
head_pitch = 0
head_yaw = 0
velocity_x = 0
velocity_y = 0
velocity_z = 0
metadata = None
class PaintingEntity(MCEntity):
title = ""
location = {
'x': 0,
'y': 0,
'z': 0,
}
direction = 0
class ExpEntity(MCEntity):
x = 0
y = 0
z = 0
count = 0
class GlobalEntity(MCEntity):
global_type = 0
x = 0
y = 0
z = 0
class EntitiesCore(object):
def __init__(self):
self.client_player = MCEntity()
self.entities = {}
self.players = {}
self.mobs = {}
self.objects = {}
self.paintings = {}
self.exp_orbs = {}
self.global_entities = {}
@pl_announce('Entities')
class EntitiesPlugin(PluginBase):
requires = 'Event'
events = {
'PLAY<Join Game': 'handle_join_game',
'PLAY<Spawn Player': 'handle_spawn_player',
'PLAY<Spawn Object': 'handle_spawn_object',
'PLAY<Spawn Mob': 'handle_spawn_mob',
'PLAY<Spawn Painting': 'handle_spawn_painting',
'PLAY<Spawn Experience Orb': 'handle_spawn_experience_orb',
'PLAY<Destroy Entities': 'handle_destroy_entities',
'PLAY<Entity Equipment': 'handle_unhandled',
'PLAY<Entity Velocity': 'handle_velocity',
'PLAY<Entity Relative Move': 'handle_relative_move',
'PLAY<Entity Look': 'handle_set_dict',
'PLAY<Entity Look and Relative Move': 'handle_relative_move',
'PLAY<Entity Teleport': 'handle_set_dict',
'PLAY<Entity Head Look': 'handle_set_dict',
'PLAY<Entity Status': 'handle_set_dict',
'PLAY<Entity Metadata': 'handle_set_dict',
'PLAY<Entity Effect': 'handle_unhandled',
'PLAY<Remove Entity Effect': 'handle_unhandled',
'PLAY<Entity Properties': 'handle_unhandled',
'PLAY<Spawn Global Entity': 'handle_spawn_global_entity',
'PLAY<Update Entity NBT': 'handle_set_dict',
}
def __init__(self, ploader, settings):
super(EntitiesPlugin, self).__init__(ploader, settings)
self.ec = EntitiesCore()
ploader.provides('Entities', self.ec)
# TODO: Implement all these things
def handle_unhandled(self, event, packet):
pass
def handle_join_game(self, event, packet):
self.ec.client_player.set_dict(packet.data)
self.ec.entities[packet.data['eid']] = self.ec.client_player
def handle_spawn_player(self, event, packet):
entity = PlayerEntity()
entity.set_dict(packet.data)
self.ec.entities[packet.data['eid']] = entity
self.ec.players[packet.data['eid']] = entity
self.event.emit('entity_spawn', {'entity': entity})
self.event.emit('entity_player_spawn', entity)
def handle_spawn_object(self, event, packet):
entity = ObjectEntity()
entity.set_dict(packet.data)
self.ec.entities[packet.data['eid']] = entity
self.ec.objects[packet.data['eid']] = entity
self.event.emit('entity_spawn', {'entity': entity})
def handle_spawn_mob(self, event, packet):
entity = MobEntity()
entity.set_dict(packet.data)
self.ec.entities[packet.data['eid']] = entity
self.ec.mobs[packet.data['eid']] = entity
self.event.emit('entity_spawn', {'entity': entity})
self.event.emit('entity_mob_spawn', entity)
def handle_spawn_painting(self, event, packet):
entity = PaintingEntity()
entity.set_dict(packet.data)
self.ec.entities[packet.data['eid']] = entity
self.ec.paintings[packet.data['eid']] = entity
self.event.emit('entity_spawn', {'entity': entity})
def handle_spawn_experience_orb(self, event, packet):
entity = ExpEntity()
entity.set_dict(packet.data)
self.ec.entities[packet.data['eid']] = entity
self.ec.exp_orbs[packet.data['eid']] = entity
self.event.emit('entity_spawn', {'entity': entity})
def handle_spawn_global_entity(self, event, packet):
entity = GlobalEntity()
entity.set_dict(packet.data)
self.ec.entities[packet.data['eid']] = entity
self.ec.global_entities[packet.data['eid']] = entity
self.event.emit('entity_spawn', {'entity': entity})
def handle_destroy_entities(self, event, packet):
for eid in packet.data['eids']:
if eid in self.ec.entities:
entity = self.ec.entities[eid]
del self.ec.entities[eid]
if eid in self.ec.players:
del self.ec.players[eid]
elif eid in self.ec.objects:
del self.ec.objects[eid]
elif eid in self.ec.mobs:
del self.ec.mobs[eid]
elif eid in self.ec.paintings:
del self.ec.paintings[eid]
elif eid in self.ec.exp_orbs:
del self.ec.exp_orbs[eid]
elif eid in self.ec.global_entities:
del self.ec.global_entities[eid]
self.event.emit('entity_destroy', {'entity': entity})
def handle_relative_move(self, event, packet):
if packet.data['eid'] in self.ec.entities:
entity = self.ec.entities[packet.data['eid']]
old_pos = [entity.x, entity.y, entity.z]
entity.set_dict(packet.data)
entity.x = entity.x + packet.data['dx']
entity.y = entity.y + packet.data['dy']
entity.z = entity.z + packet.data['dz']
self.event.emit('entity_move',
{'entity': entity, 'old_pos': old_pos})
def handle_velocity(self, event, packet):
if packet.data['eid'] in self.ec.entities:
self.ec.entities[packet.data['eid']].set_dict(packet.data)
if packet.data['eid'] == self.ec.client_player.eid:
self.event.emit('entity_player_velocity', packet.data)
def handle_set_dict(self, event, packet):
if packet.data['eid'] in self.ec.entities:
self.ec.entities[packet.data['eid']].set_dict(packet.data)
|
mit
| -3,606,966,926,603,620,400 | 31.34466 | 70 | 0.585922 | false |
boundlessgeo/qgis-connect-plugin
|
boundlessconnect/connect.py
|
1
|
16927
|
# -*- coding: utf-8 -*-
from builtins import object
import os
import re
import json
import base64
import urllib2
import tempfile
from copy import copy
import webbrowser
from qgis.PyQt.QtGui import QIcon, QCursor
from qgis.PyQt.QtCore import Qt, QUrl, QFile, QEventLoop
from qgis.PyQt.QtWidgets import QMessageBox, QApplication
from qgis.PyQt.QtNetwork import QNetworkReply, QNetworkRequest
from qgis.gui import QgsMessageBar, QgsFileDownloader
from qgis.core import QgsNetworkAccessManager, QgsRasterLayer, QgsMapLayerRegistry
from qgis.utils import iface
from qgis import utils as qgsutils
import pyplugin_installer
from pyplugin_installer.installer_data import plugins
from qgiscommons2.network.networkaccessmanager import NetworkAccessManager
from qgiscommons2.gui.settings import pluginSetting
from qgiscommons2.files import tempFilenameInTempFolder
from qgiscommons2.network.oauth2 import (oauth2_supported,
get_oauth_authcfg
)
from boundlessconnect.gui.executor import execute
from boundlessconnect import utils
from boundlessconnect import basemaputils
pluginPath = os.path.dirname(__file__)
OPEN_ROLE = "open"
PUBLIC_ROLE = "public"
SUBSCRIBE_URL = "https://connect.boundlessgeo.com/Upgrade-Subscription"
LESSONS_PLUGIN_NAME = "lessons"
RESULTS_PER_PAGE = 20
class ConnectContent(object):
def __init__(self, url, name, description, roles = ["open"]):
self.url = url
self.name = name
self.description = description
self.roles = roles
def iconPath(self):
pass
def canOpen(self, roles):
matches = [role for role in roles if role in self.roles]
return bool(matches) or (OPEN_ROLE in self.roles) or (PUBLIC_ROLE in self.roles)
def open(self, roles):
if self.canOpen(roles):
self._open()
else:
webbrowser.open_new(SUBSCRIBE_URL)
def asHtmlEntry(self, roles):
canInstall = "Green" if self.canOpen(roles) else "Orange"
desc = self.description
if len(self.description) < 100:
desc = self.description + " " * (100-len(self.description))
s = """<div class="icon"><div class="icon-container">
<img src="{image}"></div></div>
<div class="description"><h2>{title}</h2><p>{description}</p>
<a class="btn{available}" href="{url}">OPEN</a>
</div>
""".format(image=QUrl.fromLocalFile(self.iconPath()).toString(),
title=self.name,
description=desc,
available=canInstall,
url=self.url)
return s
class ConnectWebAdress(ConnectContent):
def _open(self):
webbrowser.open_new(self.url)
class ConnectVideo(ConnectWebAdress):
def typeName(self):
return "Video"
class ConnectLearning(ConnectWebAdress):
def typeName(self):
return "Learning"
def iconPath(self):
return os.path.join(pluginPath, "icons", "learning.svg")
class ConnectQA(ConnectWebAdress):
def typeName(self):
return "Q & A"
def iconPath(self):
return os.path.join(pluginPath, "icons", "qa.svg")
class ConnectBlog(ConnectWebAdress):
def typeName(self):
return "Blog"
def iconPath(self):
return os.path.join(pluginPath, "icons", "blog.svg")
class ConnectDocumentation(ConnectWebAdress):
def typeName(self):
return "Documentation"
def iconPath(self):
return os.path.join(pluginPath, "icons", "doc.svg")
class ConnectDiscussion(ConnectWebAdress):
def typeName(self):
return "Discussion"
class ConnectOther(ConnectWebAdress):
def typeName(self):
return "Other"
class ConnectLesson(ConnectContent):
def typeName(self):
return "Lesson"
def iconPath(self):
return os.path.join(pluginPath, "icons", "howto.svg")
def _open(self):
if LESSONS_PLUGIN_NAME not in qgsutils.available_plugins:
iface.messageBar().pushMessage(
"Cannot install lessons",
"Lessons plugin is not installed",
QgsMessageBar.WARNING)
elif LESSONS_PLUGIN_NAME not in qgsutils.active_plugins:
iface.messageBar().pushMessage(
"Cannot install lessons",
"Lessons plugin is not active",
QgsMessageBar.WARNING)
else:
self.downloadAndInstall()
def asHtmlEntry(self, roles):
canInstall = "Green" if self.canOpen(roles) else "Orange"
desc = self.description
if len(self.description) < 100:
desc = self.description + " " * (100-len(self.description))
s = """<div class="icon"><div class="icon-container">
<img src="{image}"></div></div>
<div class="description"><h2>{title}</h2><p>{description}</p>
<a class="btn{available}" href="{url}">INSTALL</a>
</div>
""".format(image=QUrl.fromLocalFile(self.iconPath()).toString(),
title=self.name,
description=desc,
available=canInstall,
url=self.url)
return s
def downloadAndInstall(self):
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
url = QUrl(self.url)
self.request = QNetworkRequest(url)
self.reply = QgsNetworkAccessManager.instance().get(self.request)
self.reply.finished.connect(self.requestFinished)
def requestFinished(self):
if self.reply.error() != QNetworkReply.NoError:
QApplication.restoreOverrideCursor()
iface.messageBar().pushMessage(
"Lessons could not be installed:\n",
self.reply.errorString(),
QgsMessageBar.WARNING)
self.reply.deleteLater()
return
f = QFile(tempFilenameInTempFolder(os.path.basename(self.url).split(".")[0]))
f.open(QFile.WriteOnly)
f.write(self.reply.readAll())
f.close()
self.reply.deleteLater()
from lessons import installLessonsFromZipFile
installLessonsFromZipFile(f.fileName())
QApplication.restoreOverrideCursor()
iface.messageBar().pushMessage(
"Completed",
"Lessons were correctly installed",
QgsMessageBar.INFO)
class ConnectPlugin(ConnectContent):
def __init__(self, plugin, roles):
self.plugin = plugin
self.name = plugin["name"]
self.description = re.sub("<p>This plugin is available.*?access</a></p>", "", plugin["description"])
self.url = plugin["download_url"]
self.roles = roles
def typeName(self):
return "Plugin"
def iconPath(self):
return os.path.join(pluginPath, "icons", "plugin.svg")
def asHtmlEntry(self, roles):
canInstall = "Green" if self.canOpen(roles) else "Orange"
s = """<div class="icon"><div class="icon-container">
<img src="{image}"></div></div>
<div class="description"><h2>{title}</h2><p>{description}</p>
<a class="btn{available}" href="{url}">INSTALL</a>
</div>
""".format(image=QUrl.fromLocalFile(self.iconPath()).toString(),
title=self.name,
description=self.description,
available=canInstall,
url=self.url
)
return s
def _open(self):
if self.plugin["status"] == "upgradeable":
reply = QMessageBox.question(
iface.mainWindow(),
"Plugin",
"An older version of the plugin is already installed. Do you want to upgrade it?",
QMessageBox.Yes | QMessageBox.No)
if reply != QMessageBox.Yes:
return
elif self.plugin["status"] in ["not installed", "new"]:
pass
else:
reply = QMessageBox.question(
iface.mainWindow(),
"Plugin",
"The plugin is already installed. Do you want to reinstall it?",
QMessageBox.Yes | QMessageBox.No)
if reply != QMessageBox.Yes:
return
def _install():
installer = pyplugin_installer.instance()
installer.installPlugin(self.plugin["id"])
self.plugin["status"] = "installed"
execute(_install)
class ConnectBasemap(ConnectContent):
def __init__(self, url, name, description, json, roles=["open"]):
self.url = url
self.name = name
self.description = description
self.roles = roles
self.json = json
def typeName(self):
return "Basemap"
def iconPath(self):
return os.path.join(pluginPath, "icons", "map.svg")
def asHtmlEntry(self, roles):
canInstall = "Green" if self.canOpen(roles) else "Orange"
s = """<div class="icon"><div class="icon-container">
<img src="{image}"></div></div>
<div class="description"><h2>{title}</h2><p>{description}</p>
<a class="btn{available}" href="canvas{url}">ADD TO MAP</a>
<a class="btn{available}" href="project{url}">ADD TO DEFAULT PROJECT</a>
</div>
""".format(image=QUrl.fromLocalFile(self.iconPath()).toString(),
title=self.name,
description=self.description,
available=canInstall,
url=self.url
)
return s
def addToCanvas(self, roles):
if self.canOpen(roles):
if not oauth2_supported:
iface.messageBar().pushMessage(
"Cannot load basemap",
"OAuth support is not available",
QgsMessageBar.WARNING)
else:
authcfg = get_oauth_authcfg()
if authcfg is None:
iface.messageBar().pushMessage(
"Cannot load basemap",
"Cannot find a valid authentication configuration",
QgsMessageBar.WARNING)
else:
authId = authcfg.id()
layer = QgsRasterLayer('authcfg={authcfg}&type=xyz&url={url}'.format(url=urllib2.quote("{}?version={}".format(self.url, pluginSetting("apiVersion"))),
authcfg=authId), self.name, "wms")
if layer.isValid():
QgsMapLayerRegistry.instance().addMapLayer(layer)
else:
iface.messageBar().pushMessage(
"Cannot load basemap",
"Cannot create basemap layer",
QgsMessageBar.WARNING)
else:
webbrowser.open_new(SUBSCRIBE_URL)
def addToDefaultProject(self, roles):
if self.canOpen(roles):
if not oauth2_supported:
iface.messageBar().pushMessage(
"Cannot add basemap",
"OAuth support is not available",
QgsMessageBar.WARNING)
else:
authcfg = get_oauth_authcfg()
if authcfg is None:
iface.messageBar().pushMessage(
"Cannot add basemap",
"Cannot find a valid authentication configuration",
QgsMessageBar.WARNING)
else:
authId = authcfg.id()
if not basemaputils.createOrAddDefaultBasemap([self.json], [self.name], authId):
iface.messageBar().pushMessage(
"Cannot add basemap",
"Cannot update or create default project",
QgsMessageBar.WARNING)
else:
iface.messageBar().pushMessage(
"Base map added",
"Base map correctly added to default project.",
QgsMessageBar.INFO)
else:
webbrowser.open_new(SUBSCRIBE_URL)
categories = {"LC": (ConnectLearning, "Learning"),
"DOC": (ConnectDocumentation, "Documentation"),
"BLOG": (ConnectBlog, "Blog"),
"QA": (ConnectQA, "Q & A"),
"LESSON": (ConnectLesson, "Lesson")
}
_plugins = {}
def loadPlugins():
global _plugins
_plugins = {}
installer = pyplugin_installer.instance()
installer.fetchAvailablePlugins(True)
for name in plugins.all():
plugin = plugins.all()[name]
if utils.isBoundlessPlugin(plugin) and name not in ["boundlessconnect"]:
_plugins[plugin["name"]] = copy(plugin)
def search(text, category='', page=0, token=None):
if text != '':
text = '&q=' + text
searchUrl = "{}/search/?version={}".format(pluginSetting("connectEndpoint"), pluginSetting("apiVersion"))
else:
searchUrl = "{}/search/matchAll?version={}".format(pluginSetting("connectEndpoint"), pluginSetting("apiVersion"))
headers = {}
headers["Authorization"] = "Bearer {}".format(token)
nam = NetworkAccessManager()
if category == '':
res, content = nam.request("{}{}&si={}&c={}".format(searchUrl, text, int(page), RESULTS_PER_PAGE), headers=headers)
else:
res, content = nam.request("{}{}&cat={}&si={}&c={}".format(searchUrl, text, category, int(page), RESULTS_PER_PAGE), headers=headers)
j = json.loads(re.sub(r'[^\x00-\x7f]',r'', content))
results = []
for element in j["features"]:
props = element["properties"]
roles = props["role"].split(",")
category = props["category"]
if category != "PLUG":
title = props["title"] or props["description"].split(".")[0]
if category in categories:
results.append(categories[category][0](props["url"].replace("\n", ""),
title,
props["description"],
roles))
else:
plugin = _plugins.get(props["title"], None)
if plugin:
results.append(ConnectPlugin(plugin, roles))
return results
def findAll(text, category, token):
page = 0
results = []
data = search(text, category, page, token)
results = data
while len(data) == RESULTS_PER_PAGE:
page += 1
data = search(text, category, page, token)
results.extend(data)
return results
def searchBasemaps(text, token):
searchUrl = "{}/basemaps?version={}".format(pluginSetting("connectEndpoint"), pluginSetting("apiVersion"))
headers = {}
headers["Authorization"] = "Bearer {}".format(token)
nam = NetworkAccessManager()
res, content = nam.request(searchUrl, headers=headers)
try:
j = json.loads(content)
except:
raise Exception("Unable to parse server reply.")
maps = [l for l in j if basemaputils.isSupported(l)]
results = []
if text == '':
for item in maps:
results.append(
ConnectBasemap(item["endpoint"],
item["name"],
item["description"],
item,
item["accessList"]))
else:
for item in maps:
if text.lower() in item["name"].lower() or text.lower() in item["description"].lower():
results.append(
ConnectBasemap(item["endpoint"],
item["name"],
item["description"],
item,
item["accessList"]))
return results
token = None
def getToken(login, password):
global token
if token:
return token
token = None
payload = {"username": login,
"password": password}
headers = {}
headers["Content-Type"] = "application/json"
url = "{}/token?version={}".format(pluginSetting("connectEndpoint"), pluginSetting("apiVersion"))
nam = NetworkAccessManager()
try:
res, data = nam.request(url, method="POST", body=json.dumps(payload), headers=headers)
except Exception as e:
return token
try:
responce = json.loads(str(data))
token = responce["token"]
except:
pass
return token
def resetToken():
global token
token = None
|
gpl-2.0
| 3,266,682,857,822,858,000 | 33.19596 | 170 | 0.555858 | false |
xxn59/weat
|
app/views.py
|
1
|
14710
|
from flask import render_template, flash, redirect, session, url_for, request, g, make_response
from flask.ext.login import login_user, logout_user, current_user, login_required
from . import db, lm
from . import app
from .forms import LoginForm, SignupForm, FoodForm, ChangePasswordForm, AddFoodForm, EditForm
from datetime import datetime, date, time, timedelta
from .models import User, Food, Salad, Order
# food_list = []
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
@app.errorhandler(404)
def not_found_error(error):
return render_template('404.html'), 404
@app.errorhandler(500)
def internal_error(error):
db.session.rollback()
return render_template('500.html'), 500
@app.route('/', methods=['GET', 'POST'])
@app.route('/index', methods=['GET', 'POST'])
@app.route('/index/<int:page>', methods=['GET', 'POST'])
@login_required
def index():
user = g.user
orders = Order.query.filter().order_by(Order.timestamp.desc())
if request.method == 'POST':
# print request.form.values
copy_id = request.form.get('copy', None)
if copy_id is not None:
copy_salad = Salad.query.get(copy_id)
new_salad = Salad(foods=copy_salad.foods, price=copy_salad.price)
db.session.add(new_salad)
new_order = Order(cos_id=user.id, status=9, price=new_salad.price, timestamp=datetime.utcnow())
new_order.salads.append(new_salad)
db.session.add(new_order)
db.session.commit()
return redirect(url_for('order_review', source='copy'))
return render_template('index.html',
title='We eat together!',
user=user,
orders=orders)
@app.route('/signup', methods=['GET', 'POST'])
def signup():
form = SignupForm()
if form.validate_on_submit():
# session['remember_me'] = form.remember_me.data
# print 'on submit'
# print 'form.nickname:', form1.nickname
user = User.query.filter_by(nickname=form.nickname.data).first()
if user is None:
# print 'new nickname,adding to db'
user = User(nickname=form.nickname.data,
floor=form.floor.data,
group=form.group.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
return redirect(request.args.get('next') or url_for('index'))
else:
# print 'nickname exist:', user.nickname
flash('User exists.' % form.nickname.data)
# return redirect(request.args.get('next') or url_for('index'))
# remember_me = False
# return oid.try_login(form.openid.data, ask_for=['nickname', 'email'])
return render_template('signup.html',
title='Sign Up for Weat!',
form=form)
@app.route('/login', methods=['GET', 'POST'])
def login():
if g.user is not None and g.user.is_authenticated:
# print 'user valid:', g.user
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
session['remember_me'] = form.remember_me.data
# print 'on submit'
# print 'form.nickname:',form.nickname
user = User.query.filter_by(nickname=form.nickname.data).first()
# print 'filtering nickname'
if user is None:
# print 'nickname none'
flash('The nickname is not registered.')
# return redirect(url_for('signup'))
# user = User(nickname=form.nickname.data, floor=form.floor.data)
# db.session.add(user)
# db.session.commit()
# return redirect(url_for('signup'))
else:
if user.is_admin():
pass
# flash('please enter the PASSWORD')
# return redirect(url_for('login_admin'))
# print 'nickname exist:', user.nickname
login_user(user, remember=session['remember_me'])
return redirect(request.args.get('next') or url_for('index'))
# remember_me = False
# return oid.try_login(form.openid.data, ask_for=['nickname', 'email'])
return render_template('login.html',
title='Sign In',
form=form)
@app.route('/login_admin', methods=['GET', 'POST'])
def login_admin():
form = LoginForm()
return render_template('login_admin.html',
title='Sign In',
form=form)
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
@app.route('/user/<nickname>')
@app.route('/user/<nickname>/<int:page>')
@login_required
def user(nickname, page=1):
user = User.query.filter_by(nickname=nickname).first()
if user is None:
# print 'user is none in /user/profile'
flash('User %s not found.' % nickname)
return redirect(url_for('index'))
# print 'user:', user.nickname
return render_template('user.html',
user=user)
from models import food_category
@app.route('/food_add', methods=['GET', 'POST'])
@login_required
def food_add():
user = g.user
if user.level < 3:
return redirect(url_for('index'))
form = FoodForm()
form.cat.choices = [(f, f) for f in food_category]
foods = Food.query.all()
if form.validate_on_submit():
# print 'food add commit'
food = Food.query.filter_by(name=form.name.data).first()
if food is None:
food = Food(name=form.name.data, price=form.price.data, cat=form.cat.data)
db.session.add(food)
db.session.commit()
flash('add food %s succeed!' % food.name)
# print 'food added:', food.name
return redirect(url_for('food_add'))
else:
# print 'food exists:', food.name
flash('this food is already included.')
return render_template('food_add.html',
title='Add new food',
form=form,
foods=foods)
@app.route('/order_add', methods=['GET', 'POST'])
@login_required
def order_add():
user = g.user
form = AddFoodForm()
foods = Food.query.filter(Food.cat != 'new_arrival').order_by(Food.price)
foods_new = Food.query.filter(Food.cat == 'new_arrival').order_by(Food.price)
if request.method == 'POST':
# print request.form.values
done = request.form.get('over', None)
# print done
if done == "7963":
# print 'yes,done=7963'
# meal = request.form.get('meal', None)
# if meal is None:
# flash('please choose which meal you want to order')
# return redirect(url_for('order_add'))
submit_order = Order.query.filter_by(cos_id=user.id, status=1).first()
if submit_order is None:
flash('no unconfirmed order to submit ')
return redirect(url_for('order_add'))
submit_salad = Salad.query.filter_by(order_id=submit_order.id, status=1).first()
if submit_salad is None:
flash('no incomplete salad to submit')
return redirect(url_for('order_add'))
for f in submit_salad.foods:
submit_salad.price = submit_salad.price + f.price
submit_salad.price += 4
if submit_salad.price < 25:
flash('price < 25, please add something more~')
return redirect(url_for('order_add'))
for s in submit_order.salads:
submit_order.price = submit_order.price + s.price
submit_order.status = 2
submit_salad.status = 2
submit_order.timestamp = datetime.utcnow()
# print 'db commit'
db.session.commit()
# user.add_order(new_order)
return redirect(url_for('order_review', source='new'))
click_id = request.form.get('add', None)
if click_id is None:
# print 'no click'
pass
else:
# print 'click_id:', click_id
new_order = Order.query.filter_by(cos_id=user.id, status=1).first()
if new_order is None:
new_order = Order(cos_id=user.id, status=1)
db.session.add(new_order)
# print 'added new order'
new_salad = Salad.query.filter_by(order_id=new_order.id, status=1).first()
if new_salad is None:
new_salad = Salad(order_id=new_order.id, status=1)
db.session.add(new_salad)
# print 'added new salad'
food = Food.query.get(click_id)
new_salad.foods.append(food)
db.session.commit()
resp = make_response('', 204)
return resp
# db.session.commit()
# print 'food_list:', food_list
# new_salad.add_food(food)
# db.session.commit()
if form.validate_on_submit():
print 'here'
# if form.remove_id.data is not None and form.remove_id.data != 9999:
# print 'remove id:', form.remove_id.data
# food1 = foods.query.filter_by(id=form.remove_id.data)
# if food1 is None:
# print 'delete error:', form.remove_id.data
# else:
# db.delete(food1)
# print 'food deleted:', food1.name
# db.commit()
#
return render_template('order_add.html',
title='add new order',
form=form,
foods=foods,
foods_new=foods_new)
@app.route('/order_review/<source>', methods=['GET', 'POST'])
@login_required
def order_review(source):
user = g.user
if source == 'copy':
new_orders = Order.query.filter_by(cos_id=user.id, status=9)
if source == 'new':
new_orders = Order.query.filter_by(cos_id=user.id, status=2)
# print 'this is from newing order'
if new_orders is not None:
# print new_order.id
if request.method == 'POST':
confirm = request.form.get('confirm', None)
remove = request.form.get('remove_order', None)
if confirm is not None:
meal = request.form.get('meal', None)
if meal is None:
flash('please choose which meal you want to order')
return redirect(url_for('order_review', source=source))
new_order = Order.query.get(confirm)
new_order.status = 3
new_order.timestamp = datetime.utcnow()
# # new_order.status = 3
new_order.meal = meal
db.session.commit()
return redirect(url_for('orders'))
if remove is not None:
remove_order = Order.query.get(remove)
db.session.delete(remove_order)
for s in remove_order.salads:
db.session.delete(s)
db.session.commit()
return redirect(url_for('order_review',source='new'))
return render_template('order_review.html',
title='add new order',
orders=new_orders)
@app.route('/orders', methods=['GET', 'POST'])
@login_required
def orders():
user = g.user
if user.level < 3:
orders = Order.query.filter_by(cos_id=user.id)
o = orders.first()
if o is None:
order_len = 0
else:
order_len = 1
print order_len
if request.method == 'POST':
btn = request.form.get('remove', None)
if btn is not None:
print btn
del_order = Order.query.get(btn)
print del_order.cos_id
user.del_order(del_order)
# db.session.remove(del_order)
db.session.commit()
return redirect(url_for('orders'))
else:
print 'btn is none'
return render_template('orders.html',
title='My Orders',
user=user,
orders=orders,
len=order_len)
return redirect(url_for('orders_all'))
@app.route('/orders_all', methods=['GET', 'POST'])
@login_required
def orders_all():
user = g.user
if user.level >= 3:
dateNow = datetime.utcnow().date()
# timeNow = datetime.utcnow().time()
time_z = time(0, 0)
# dinner_end = time(19, 0)
query_begin = datetime.combine(dateNow, time_z) - timedelta(days=1)
# query_end = datetime.combine(dateNow, time_z)
orders = Order.query.all()
orders_lunch = Order.query.filter(Order.status == 3, Order.meal == 'lunch')
orders_dinner = Order.query.filter(Order.timestamp.between(query_begin, datetime.utcnow()), Order.status == 3,
Order.meal == 'dinner')
return render_template('orders_all.html',
title='All Orders',
user=user,
orders_lunch=orders_lunch,
orders_dinner=orders_dinner
)
return redirect(url_for('orders'))
@app.route('/change-password', methods=['GET', 'POST'])
@login_required
def change_password():
form = ChangePasswordForm()
return render_template("change_password.html", form=form)
@app.route('/user_edit', methods=['GET', 'POST'])
@login_required
def user_edit():
user = g.user
form = EditForm()
form.floor.data = user.floor
form.group.data = user.group
if form.validate_on_submit():
user.floor = form.floor.data
user.group = form.group.data
return redirect(url_for('user'))
return render_template("user_edit.html", user=user, form=form)
@app.route('/users', methods=['GET', 'POST'])
@login_required
def users():
user = g.user
if user.level < 5:
return redirect(url_for('index'))
users_all = User.query.all()
return render_template("users.html", users=users_all)
@app.route('/pay', methods=['GET', 'POST'])
def pay():
return render_template("pay_test.html")
@app.route('/about_me', methods=['GET', 'POST'])
def about_me():
return render_template("about_me.html")
|
mit
| -8,336,185,183,291,548,000 | 34.617433 | 118 | 0.547927 | false |
wmealem/VerbTrainer
|
languages/spanish.py
|
1
|
8117
|
# Spanish verb conjugations
from collections import namedtuple, OrderedDict
# Spanish has two forms of the sps familiar - 'tú' and 'vos'
SpanishCategory = namedtuple('SpanishCategory', 'fps sps spsv tps fpp spp tpp')
_PRONOUNS = SpanishCategory('yo', 'tú', 'vos', 'él/ella/usted',
'nosotros/nosotras', 'vosotros/vosotras',
'ellos/ellas/ustedes')
_STD_FORMAT = '{} {}'
_STD_CLOZE_FORMAT = '{0} {{{{c1::{1}::{2}, {3}}}}}'
_TENSES =\
[# tiempos simples
'presente',
'pretérito imperfecto',
'pretérito indefinido',
'futuro simple',
# tiempos compuestos
'pretérito perfecto',
'pretérito pluscuamperfecto',
'pretérito anterior',
'futuro compuesto',
# condicional
'condicional simple',
'condicional compuesto',
# imperativo
'imperativo positivo',
'imperativo negativo',
# subjuntivo - tiempos simples
'presente de subjuntivo',
'imperfecto de subjuntivo(-ra)',
'imperfecto de subjuntivo(-se)'
'futuro simple de subjuntivo',
# subjuntivo - timepos compuestos
'pretérito perfecto de subjuntivo',
'pluscuamperfecto de subjuntivo',
'futuro compuesto de subjuntivo'
]
# Endings for the simple tenses
_ENDINGS =\
{'ar':
{'presente': SpanishCategory('o', 'as', 'ás', 'a',
'amos', 'áis', 'an'),
'pretérito imperfecto': SpanishCategory('aba', 'abas', 'abas', 'aba',
'ábamos', 'abais', 'aban'),
'futuro simple': SpanishCategory('é', 'ás', 'ás', 'á',
'emos', 'éis', 'án'),
'pretérito indefinido': SpanishCategory('é', 'aste(s)', 'aste(s)', 'ó',
'amos', 'asteis', 'aron')
},
'er':
{'presente': SpanishCategory('o', 'es', 'és', 'e',
'emos', 'éis', 'en'),
'pretérito imperfecto': SpanishCategory('ía', 'ías', 'ías', 'ía',
'íamos', 'íais', 'ían'),
'futuro simple': SpanishCategory('é', 'ás', 'ás', 'á',
'emos', 'éis', 'án'),
'pretérito indefinido': SpanishCategory('í', 'iste(s)', 'iste(s)','ió',
'imos', 'isteis', 'ieron')
},
'ir':
{'presente': SpanishCategory('o', 'es', 'ís', 'e',
'imos', 'ís', 'en'),
'pretérito imperfecto': SpanishCategory('ía', 'ías', 'ías', 'ía',
'íamos', 'íais', 'ían'),
'futuro simple': SpanishCategory('é', 'ás', 'ás', 'á',
'emos', 'éis', 'án'),
'pretérito indefinido': SpanishCategory('í', 'iste(s)', 'iste(s)', 'ió',
'imos', 'isteis', 'ieron')
}
}
# logic for adjusting the stem of the verb for the case
_STEM_RULES =\
{'presente': (lambda x: x[:-2]),
'pretérito imperfecto': (lambda x: x[:-2]),
'futuro simple': (lambda x: x),
'pretérito indefinido': (lambda x: x[:-2])
}
def construct_stem_and_ending(infinitive, tense):
if tense in ['pretérito perfecto']:
past_participle = _construct_past_participle(infinitive)
inflection = ['{} {}'.format(aux, past_participle)
for aux in AUX_VERB['haber']['presente']]
else:
stem = _STEM_RULES[tense](infinitive)
verb_type = infinitive[-2:]
endings = _ENDINGS[verb_type][tense]
inflection = [stem + end for end in endings]
return SpanishCategory._make(inflection)
def _construct_past_participle(infinitive):
'''
Given an infinitive, returns the past participle for
the given verb
'''
ending = infinitive[-2:]
stem = infinitive[:-2]
if ending == 'ar':
return stem + 'ado'
elif ending == 'er':
return stem + 'ido'
elif ending == 'ir':
return stem + 'ido'
else:
raise ValueError('parameter not a verb infinitive')
def construct_inflection(infinitive, tense):
'''
Given an infinitive and tense, constructs the combined
stem and ending, and then prepends the appropriate pronoun
'''
stem_and_ending = construct_stem_and_ending(infinitive, tense)
return SpanishCategory._make([item for item in zip(_PRONOUNS,
stem_and_ending)])
def output_normal_view(infinitive, tense, conj):
'''
Pretty-printing for the traditional two-column output
of a verb conjugation
'''
return ['{}, {}:'.format(infinitive, tense),
('⎯'*45), '{:<25}‖ {}'.format(_STD_FORMAT.format(*conj.fps),
_STD_FORMAT.format(*conj.fpp)),
'{:<25}‖ {}'.format(_STD_FORMAT.format(*conj.sps),
_STD_FORMAT.format(*conj.spp)),
'{:<25}‖'.format(_STD_FORMAT.format(*conj.spsv)),
'{:<25}‖ {}'.format(_STD_FORMAT.format(*conj.tps),
_STD_FORMAT. format(*conj.tpp))]
def output_cloze(infinitive, tense, conj):
'''
Combines the different parts of a verb conjugation with
Anki's required formatting to produce a form suitable
for a cloze-deletion card
'''
result = []
# TODO - make this pythonic, it's an ugly hack as it is
for i, item in enumerate(conj):
result.append(_STD_CLOZE_FORMAT.format(item[0], item[1],
infinitive, tense))
return SpanishCategory._make(result)
def output_cloze_import(infinitive, tense, translation, sound, conj):
'''
Combines the output of the output_cloze function with optional
translation and sound fields and combines them to produce the
format required for Anki's import function
'''
cloze = output_cloze(infinitive, tense, conj)
if translation:
add_trn = [cz + ('|{}'.format(trn)) for cz, trn in
zip(cloze, translation)]
else:
add_trn = [cz + '|' for cz in cloze]
if sound:
add_snd = [trn + ('|[sound:{}]'.format(snd)) for
trn, snd in zip(add_trn, sound)]
else:
add_snd = [trn + '|' for trn in add_trn]
add_tag = [snd + ('|{}'.format(infinitive)) for snd in add_snd]
return SpanishCategory._make(add_tag)
AUX_VERB = {'haber':
{'presente':
SpanishCategory._make(['he', 'has', 'has', 'ha',
'hemos', 'habéis', 'han']),
'pretérito imperfecto':
SpanishCategory._make(['habíá', 'habías', 'habías', 'había',
'habíamos', 'habíaís', 'habían']),
'pretérito indefinido':
SpanishCategory._make(['hube', 'hubiste(s)', 'hubiste(s)', 'hubo',
'hubimos', 'hubisteis', 'hubieron']),
'futuro simple':
SpanishCategory._make(['habré', 'habrás', 'habrás', 'habrá',
'habremos', 'habréis', 'habrán']),
'condicional simple':
SpanishCategory._make(['habría', 'habrías', 'habrías', 'habría',
'habríamos', 'habríais', 'habrían']),
'presente de subjuntivo':
SpanishCategory._make(['haya', 'hayas', 'hayas', 'haya',
'hayamos', 'hayáis', 'hayan']),
'imperfecto de subjuntivo(-ra)':
SpanishCategory._make(['hubiera', 'hubieras', 'hubieras', 'hubiera',
'hubiéramos', 'hubierais', 'hubieran']),
'imperfecto de subjuntivo(-se)':
SpanishCategory._make(['hubiese', 'hubieses', 'hubieses', 'hubiese',
'hubiésemos', 'hubieseis', 'hubiesen'])}}
|
mit
| 232,128,548,254,710,900 | 38.880597 | 81 | 0.513473 | false |
ajponte/yelpML
|
maps/tests/7.py
|
1
|
4726
|
test = {
'name': 'Problem 7',
'points': 3,
'suites': [
{
'cases': [
{
'answer': '7b94a2861b435311f9fceeb5e6f092c4',
'choices': [
'the restaurants in restaurants',
'the names of restaurants in restaurants',
'the extracted values for each restaurant in restaurants',
'the restaurants reviewed by user'
],
'hidden': False,
'locked': True,
'question': 'What does the list xs represent?'
},
{
'answer': '901ae86eb8ae688b7a7ca7c1f77cab35',
'choices': [
'the ratings for the restaurants reviewed by user',
'the ratings for the restaurants in restaurants',
'the names for the restaurants reviewed by user',
'the names for the restaurants in restaurants'
],
'hidden': False,
'locked': True,
'question': 'What does the list ys represent?'
}
],
'scored': False,
'type': 'concept'
},
{
'cases': [
{
'code': r"""
>>> user = make_user('John D.', [
... make_review('A', 1),
... make_review('B', 5),
... make_review('C', 2),
... make_review('D', 2.5),
... ])
>>> restaurant = make_restaurant('New', [-10, 2], [], 2, [
... make_review('New', 4),
... ])
>>> cluster = [
... make_restaurant('B', [4, 2], [], 1, [
... make_review('B', 5)
... ]),
... make_restaurant('C', [-2, 6], [], 4, [
... make_review('C', 2)
... ]),
... make_restaurant('D', [4, 2], [], 3.5, [
... make_review('D', 2.5),
... make_review('D', 3),
... ]),
... ]
>>> pred, r_squared = find_predictor(user, cluster, restaurant_price)
>>> round(pred(restaurant), 5)
4.0
>>> round(r_squared, 5)
1.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> user = make_user('John D.', [
... make_review('A', 1),
... make_review('B', 5),
... make_review('C', 2),
... make_review('D', 2.5),
... ])
>>> restaurant = make_restaurant('New', [-10, 2], [], 2, [
... make_review('New', 4),
... ])
>>> cluster = [
... make_restaurant('B', [4, 2], [], 1, [
... make_review('B', 5)
... ]),
... make_restaurant('C', [-2, 6], [], 4, [
... make_review('C', 2)
... ]),
... make_restaurant('D', [4, 2], [], 3.5, [
... make_review('D', 2.5),
... make_review('D', 3),
... ]),
... ]
>>> pred, r_squared = find_predictor(user, cluster, restaurant_mean_rating)
>>> round(pred(restaurant), 5)
3.9359
>>> round(r_squared, 5)
0.99256
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> user = make_user('John D.', [
... make_review('A', 1),
... make_review('B', 5),
... make_review('C', 2),
... make_review('D', 2.5),
... ])
>>> restaurant = make_restaurant('New', [-10, 2], [], 2, [
... make_review('New', 4),
... ])
>>> cluster = [
... make_restaurant('B', [4, 2], [], 1, [
... make_review('B', 5)
... ]),
... make_restaurant('C', [-2, 6], [], 4, [
... make_review('C', 2)
... ]),
... make_restaurant('D', [4, 2], [], 3.5, [
... make_review('D', 2.5),
... make_review('D', 3),
... ]),
... ]
>>> pred, r_squared = find_predictor(user, cluster, restaurant_num_ratings)
>>> round(pred(restaurant), 5)
3.5
>>> round(r_squared, 5)
0.12903
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
>>> import tests.test_functions as test
>>> import recommend
>>> test.swap_implementations(recommend)
>>> from recommend import *
""",
'teardown': r"""
>>> test.restore_implementations(recommend)
""",
'type': 'doctest'
}
]
}
|
mit
| -1,079,751,825,662,691,100 | 31.156463 | 85 | 0.371985 | false |
dayatz/taiga-back
|
tests/integration/resources_permissions/test_projects_choices_resources.py
|
1
|
96198
|
# -*- coding: utf-8 -*-
# Copyright (C) 2014-2017 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2017 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2017 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2017 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# Copyright (C) 2014-2017 Anler Hernández <hello@anler.me>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.urlresolvers import reverse
from taiga.base.utils import json
from taiga.projects import choices as project_choices
from taiga.projects import serializers
from taiga.users.serializers import RoleSerializer
from taiga.permissions.choices import MEMBERS_PERMISSIONS
from tests import factories as f
from tests.utils import helper_test_http_method
import pytest
pytestmark = pytest.mark.django_db
@pytest.fixture
def data():
m = type("Models", (object,), {})
m.registered_user = f.UserFactory.create()
m.project_member_with_perms = f.UserFactory.create()
m.project_member_without_perms = f.UserFactory.create()
m.project_owner = f.UserFactory.create()
m.other_user = f.UserFactory.create()
m.superuser = f.UserFactory.create(is_superuser=True)
m.public_project = f.ProjectFactory(is_private=False,
anon_permissions=['view_project'],
public_permissions=['view_project'],
owner=m.project_owner,
tags_colors = [("tag1", "#123123"), ("tag2", "#456456"), ("tag3", "#111222")])
m.private_project1 = f.ProjectFactory(is_private=True,
anon_permissions=['view_project'],
public_permissions=['view_project'],
owner=m.project_owner,
tags_colors = [("tag1", "#123123"), ("tag2", "#456456"), ("tag3", "#111222")])
m.private_project2 = f.ProjectFactory(is_private=True,
anon_permissions=[],
public_permissions=[],
owner=m.project_owner,
tags_colors = [("tag1", "#123123"), ("tag2", "#456456"), ("tag3", "#111222")])
m.blocked_project = f.ProjectFactory(is_private=True,
anon_permissions=[],
public_permissions=[],
owner=m.project_owner,
blocked_code=project_choices.BLOCKED_BY_STAFF,
tags_colors = [("tag1", "#123123"), ("tag2", "#456456"), ("tag3", "#111222")])
m.public_membership = f.MembershipFactory(project=m.public_project,
user=m.project_member_with_perms,
email=m.project_member_with_perms.email,
role__project=m.public_project,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
m.private_membership1 = f.MembershipFactory(project=m.private_project1,
user=m.project_member_with_perms,
email=m.project_member_with_perms.email,
role__project=m.private_project1,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(project=m.private_project1,
user=m.project_member_without_perms,
email=m.project_member_without_perms.email,
role__project=m.private_project1,
role__permissions=[])
m.private_membership2 = f.MembershipFactory(project=m.private_project2,
user=m.project_member_with_perms,
email=m.project_member_with_perms.email,
role__project=m.private_project2,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(project=m.private_project2,
user=m.project_member_without_perms,
email=m.project_member_without_perms.email,
role__project=m.private_project2,
role__permissions=[])
m.blocked_membership = f.MembershipFactory(project=m.blocked_project,
user=m.project_member_with_perms,
role__project=m.blocked_project,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(project=m.blocked_project,
user=m.project_member_without_perms,
role__project=m.blocked_project,
role__permissions=[])
f.MembershipFactory(project=m.public_project,
user=m.project_owner,
is_admin=True)
f.MembershipFactory(project=m.private_project1,
user=m.project_owner,
is_admin=True)
f.MembershipFactory(project=m.private_project2,
user=m.project_owner,
is_admin=True)
f.MembershipFactory(project=m.blocked_project,
user=m.project_owner,
is_admin=True)
m.public_epic_status = f.EpicStatusFactory(project=m.public_project)
m.private_epic_status1 = f.EpicStatusFactory(project=m.private_project1)
m.private_epic_status2 = f.EpicStatusFactory(project=m.private_project2)
m.blocked_epic_status = f.EpicStatusFactory(project=m.blocked_project)
m.public_points = f.PointsFactory(project=m.public_project)
m.private_points1 = f.PointsFactory(project=m.private_project1)
m.private_points2 = f.PointsFactory(project=m.private_project2)
m.blocked_points = f.PointsFactory(project=m.blocked_project)
m.public_user_story_status = f.UserStoryStatusFactory(project=m.public_project)
m.private_user_story_status1 = f.UserStoryStatusFactory(project=m.private_project1)
m.private_user_story_status2 = f.UserStoryStatusFactory(project=m.private_project2)
m.blocked_user_story_status = f.UserStoryStatusFactory(project=m.blocked_project)
m.public_task_status = f.TaskStatusFactory(project=m.public_project)
m.private_task_status1 = f.TaskStatusFactory(project=m.private_project1)
m.private_task_status2 = f.TaskStatusFactory(project=m.private_project2)
m.blocked_task_status = f.TaskStatusFactory(project=m.blocked_project)
m.public_issue_status = f.IssueStatusFactory(project=m.public_project)
m.private_issue_status1 = f.IssueStatusFactory(project=m.private_project1)
m.private_issue_status2 = f.IssueStatusFactory(project=m.private_project2)
m.blocked_issue_status = f.IssueStatusFactory(project=m.blocked_project)
m.public_issue_type = f.IssueTypeFactory(project=m.public_project)
m.private_issue_type1 = f.IssueTypeFactory(project=m.private_project1)
m.private_issue_type2 = f.IssueTypeFactory(project=m.private_project2)
m.blocked_issue_type = f.IssueTypeFactory(project=m.blocked_project)
m.public_priority = f.PriorityFactory(project=m.public_project)
m.private_priority1 = f.PriorityFactory(project=m.private_project1)
m.private_priority2 = f.PriorityFactory(project=m.private_project2)
m.blocked_priority = f.PriorityFactory(project=m.blocked_project)
m.public_severity = f.SeverityFactory(project=m.public_project)
m.private_severity1 = f.SeverityFactory(project=m.private_project1)
m.private_severity2 = f.SeverityFactory(project=m.private_project2)
m.blocked_severity = f.SeverityFactory(project=m.blocked_project)
m.project_template = m.public_project.creation_template
return m
#####################################################
# Roles
#####################################################
def test_roles_retrieve(client, data):
public_url = reverse('roles-detail', kwargs={"pk": data.public_project.roles.all()[0].pk})
private1_url = reverse('roles-detail', kwargs={"pk": data.private_project1.roles.all()[0].pk})
private2_url = reverse('roles-detail', kwargs={"pk": data.private_project2.roles.all()[0].pk})
blocked_url = reverse('roles-detail', kwargs={"pk": data.blocked_project.roles.all()[0].pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_roles_update(client, data):
public_url = reverse('roles-detail', kwargs={"pk": data.public_project.roles.all()[0].pk})
private1_url = reverse('roles-detail', kwargs={"pk": data.private_project1.roles.all()[0].pk})
private2_url = reverse('roles-detail', kwargs={"pk": data.private_project2.roles.all()[0].pk})
blocked_url = reverse('roles-detail', kwargs={"pk": data.blocked_project.roles.all()[0].pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
role_data = RoleSerializer(data.public_project.roles.all()[0]).data
role_data["name"] = "test"
role_data = json.dumps(role_data)
results = helper_test_http_method(client, 'put', public_url, role_data, users)
assert results == [401, 403, 403, 403, 200]
role_data = RoleSerializer(data.private_project1.roles.all()[0]).data
role_data["name"] = "test"
role_data = json.dumps(role_data)
results = helper_test_http_method(client, 'put', private1_url, role_data, users)
assert results == [401, 403, 403, 403, 200]
role_data = RoleSerializer(data.private_project2.roles.all()[0]).data
role_data["name"] = "test"
role_data = json.dumps(role_data)
results = helper_test_http_method(client, 'put', private2_url, role_data, users)
assert results == [401, 403, 403, 403, 200]
role_data = RoleSerializer(data.blocked_project.roles.all()[0]).data
role_data["name"] = "test"
role_data = json.dumps(role_data)
results = helper_test_http_method(client, 'put', blocked_url, role_data, users)
assert results == [401, 403, 403, 403, 451]
def test_roles_delete(client, data):
public_url = reverse('roles-detail', kwargs={"pk": data.public_project.roles.all()[0].pk})
private1_url = reverse('roles-detail', kwargs={"pk": data.private_project1.roles.all()[0].pk})
private2_url = reverse('roles-detail', kwargs={"pk": data.private_project2.roles.all()[0].pk})
blocked_url = reverse('roles-detail', kwargs={"pk": data.blocked_project.roles.all()[0].pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_roles_list(client, data):
url = reverse('roles-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 3
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 3
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 3
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 7
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 7
assert response.status_code == 200
def test_roles_patch(client, data):
public_url = reverse('roles-detail', kwargs={"pk": data.public_project.roles.all()[0].pk})
private1_url = reverse('roles-detail', kwargs={"pk": data.private_project1.roles.all()[0].pk})
private2_url = reverse('roles-detail', kwargs={"pk": data.private_project2.roles.all()[0].pk})
blocked_url = reverse('roles-detail', kwargs={"pk": data.blocked_project.roles.all()[0].pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Epic Status
#####################################################
def test_epic_status_retrieve(client, data):
public_url = reverse('epic-statuses-detail', kwargs={"pk": data.public_epic_status.pk})
private1_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status1.pk})
private2_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status2.pk})
blocked_url = reverse('epic-statuses-detail', kwargs={"pk": data.blocked_epic_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_epic_status_update(client, data):
public_url = reverse('epic-statuses-detail', kwargs={"pk": data.public_epic_status.pk})
private1_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status1.pk})
private2_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status2.pk})
blocked_url = reverse('epic-statuses-detail', kwargs={"pk": data.blocked_epic_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
epic_status_data = serializers.EpicStatusSerializer(data.public_epic_status).data
epic_status_data["name"] = "test"
epic_status_data = json.dumps(epic_status_data)
results = helper_test_http_method(client, 'put', public_url, epic_status_data, users)
assert results == [401, 403, 403, 403, 200]
epic_status_data = serializers.EpicStatusSerializer(data.private_epic_status1).data
epic_status_data["name"] = "test"
epic_status_data = json.dumps(epic_status_data)
results = helper_test_http_method(client, 'put', private1_url, epic_status_data, users)
assert results == [401, 403, 403, 403, 200]
epic_status_data = serializers.EpicStatusSerializer(data.private_epic_status2).data
epic_status_data["name"] = "test"
epic_status_data = json.dumps(epic_status_data)
results = helper_test_http_method(client, 'put', private2_url, epic_status_data, users)
assert results == [401, 403, 403, 403, 200]
epic_status_data = serializers.EpicStatusSerializer(data.blocked_epic_status).data
epic_status_data["name"] = "test"
epic_status_data = json.dumps(epic_status_data)
results = helper_test_http_method(client, 'put', blocked_url, epic_status_data, users)
assert results == [401, 403, 403, 403, 451]
def test_epic_status_delete(client, data):
public_url = reverse('epic-statuses-detail', kwargs={"pk": data.public_epic_status.pk})
private1_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status1.pk})
private2_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status2.pk})
blocked_url = reverse('epic-statuses-detail', kwargs={"pk": data.blocked_epic_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_epic_status_list(client, data):
url = reverse('epic-statuses-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_epic_status_patch(client, data):
public_url = reverse('epic-statuses-detail', kwargs={"pk": data.public_epic_status.pk})
private1_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status1.pk})
private2_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status2.pk})
blocked_url = reverse('epic-statuses-detail', kwargs={"pk": data.blocked_epic_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_epic_status_action_bulk_update_order(client, data):
url = reverse('epic-statuses-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_epic_statuses": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_epic_statuses": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_epic_statuses": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_epic_statuses": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Points
#####################################################
def test_points_retrieve(client, data):
public_url = reverse('points-detail', kwargs={"pk": data.public_points.pk})
private1_url = reverse('points-detail', kwargs={"pk": data.private_points1.pk})
private2_url = reverse('points-detail', kwargs={"pk": data.private_points2.pk})
blocked_url = reverse('points-detail', kwargs={"pk": data.blocked_points.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_points_update(client, data):
public_url = reverse('points-detail', kwargs={"pk": data.public_points.pk})
private1_url = reverse('points-detail', kwargs={"pk": data.private_points1.pk})
private2_url = reverse('points-detail', kwargs={"pk": data.private_points2.pk})
blocked_url = reverse('points-detail', kwargs={"pk": data.blocked_points.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
points_data = serializers.PointsSerializer(data.public_points).data
points_data["name"] = "test"
points_data = json.dumps(points_data)
results = helper_test_http_method(client, 'put', public_url, points_data, users)
assert results == [401, 403, 403, 403, 200]
points_data = serializers.PointsSerializer(data.private_points1).data
points_data["name"] = "test"
points_data = json.dumps(points_data)
results = helper_test_http_method(client, 'put', private1_url, points_data, users)
assert results == [401, 403, 403, 403, 200]
points_data = serializers.PointsSerializer(data.private_points2).data
points_data["name"] = "test"
points_data = json.dumps(points_data)
results = helper_test_http_method(client, 'put', private2_url, points_data, users)
assert results == [401, 403, 403, 403, 200]
points_data = serializers.PointsSerializer(data.blocked_points).data
points_data["name"] = "test"
points_data = json.dumps(points_data)
results = helper_test_http_method(client, 'put', blocked_url, points_data, users)
assert results == [401, 403, 403, 403, 451]
def test_points_delete(client, data):
public_url = reverse('points-detail', kwargs={"pk": data.public_points.pk})
private1_url = reverse('points-detail', kwargs={"pk": data.private_points1.pk})
private2_url = reverse('points-detail', kwargs={"pk": data.private_points2.pk})
blocked_url = reverse('points-detail', kwargs={"pk": data.blocked_points.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_points_list(client, data):
url = reverse('points-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_points_patch(client, data):
public_url = reverse('points-detail', kwargs={"pk": data.public_points.pk})
private1_url = reverse('points-detail', kwargs={"pk": data.private_points1.pk})
private2_url = reverse('points-detail', kwargs={"pk": data.private_points2.pk})
blocked_url = reverse('points-detail', kwargs={"pk": data.blocked_points.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_points_action_bulk_update_order(client, data):
url = reverse('points-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_points": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_points": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_points": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_points": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# User Story Status
#####################################################
def test_user_story_status_retrieve(client, data):
public_url = reverse('userstory-statuses-detail', kwargs={"pk": data.public_user_story_status.pk})
private1_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status1.pk})
private2_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status2.pk})
blocked_url = reverse('userstory-statuses-detail', kwargs={"pk": data.blocked_user_story_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_user_story_status_update(client, data):
public_url = reverse('userstory-statuses-detail', kwargs={"pk": data.public_user_story_status.pk})
private1_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status1.pk})
private2_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status2.pk})
blocked_url = reverse('userstory-statuses-detail', kwargs={"pk": data.blocked_user_story_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
user_story_status_data = serializers.UserStoryStatusSerializer(data.public_user_story_status).data
user_story_status_data["name"] = "test"
user_story_status_data = json.dumps(user_story_status_data)
results = helper_test_http_method(client, 'put', public_url, user_story_status_data, users)
assert results == [401, 403, 403, 403, 200]
user_story_status_data = serializers.UserStoryStatusSerializer(data.private_user_story_status1).data
user_story_status_data["name"] = "test"
user_story_status_data = json.dumps(user_story_status_data)
results = helper_test_http_method(client, 'put', private1_url, user_story_status_data, users)
assert results == [401, 403, 403, 403, 200]
user_story_status_data = serializers.UserStoryStatusSerializer(data.private_user_story_status2).data
user_story_status_data["name"] = "test"
user_story_status_data = json.dumps(user_story_status_data)
results = helper_test_http_method(client, 'put', private2_url, user_story_status_data, users)
assert results == [401, 403, 403, 403, 200]
user_story_status_data = serializers.UserStoryStatusSerializer(data.blocked_user_story_status).data
user_story_status_data["name"] = "test"
user_story_status_data = json.dumps(user_story_status_data)
results = helper_test_http_method(client, 'put', blocked_url, user_story_status_data, users)
assert results == [401, 403, 403, 403, 451]
def test_user_story_status_delete(client, data):
public_url = reverse('userstory-statuses-detail', kwargs={"pk": data.public_user_story_status.pk})
private1_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status1.pk})
private2_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status2.pk})
blocked_url = reverse('userstory-statuses-detail', kwargs={"pk": data.blocked_user_story_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_user_story_status_list(client, data):
url = reverse('userstory-statuses-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_user_story_status_patch(client, data):
public_url = reverse('userstory-statuses-detail', kwargs={"pk": data.public_user_story_status.pk})
private1_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status1.pk})
private2_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status2.pk})
blocked_url = reverse('userstory-statuses-detail', kwargs={"pk": data.blocked_user_story_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_user_story_status_action_bulk_update_order(client, data):
url = reverse('userstory-statuses-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_userstory_statuses": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_userstory_statuses": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_userstory_statuses": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_userstory_statuses": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Task Status
#####################################################
def test_task_status_retrieve(client, data):
public_url = reverse('task-statuses-detail', kwargs={"pk": data.public_task_status.pk})
private1_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status1.pk})
private2_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status2.pk})
blocked_url = reverse('task-statuses-detail', kwargs={"pk": data.blocked_task_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_task_status_update(client, data):
public_url = reverse('task-statuses-detail', kwargs={"pk": data.public_task_status.pk})
private1_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status1.pk})
private2_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status2.pk})
blocked_url = reverse('task-statuses-detail', kwargs={"pk": data.blocked_task_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
task_status_data = serializers.TaskStatusSerializer(data.public_task_status).data
task_status_data["name"] = "test"
task_status_data = json.dumps(task_status_data)
results = helper_test_http_method(client, 'put', public_url, task_status_data, users)
assert results == [401, 403, 403, 403, 200]
task_status_data = serializers.TaskStatusSerializer(data.private_task_status1).data
task_status_data["name"] = "test"
task_status_data = json.dumps(task_status_data)
results = helper_test_http_method(client, 'put', private1_url, task_status_data, users)
assert results == [401, 403, 403, 403, 200]
task_status_data = serializers.TaskStatusSerializer(data.private_task_status2).data
task_status_data["name"] = "test"
task_status_data = json.dumps(task_status_data)
results = helper_test_http_method(client, 'put', private2_url, task_status_data, users)
assert results == [401, 403, 403, 403, 200]
task_status_data = serializers.TaskStatusSerializer(data.blocked_task_status).data
task_status_data["name"] = "test"
task_status_data = json.dumps(task_status_data)
results = helper_test_http_method(client, 'put', blocked_url, task_status_data, users)
assert results == [401, 403, 403, 403, 451]
def test_task_status_delete(client, data):
public_url = reverse('task-statuses-detail', kwargs={"pk": data.public_task_status.pk})
private1_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status1.pk})
private2_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status2.pk})
blocked_url = reverse('task-statuses-detail', kwargs={"pk": data.blocked_task_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_task_status_list(client, data):
url = reverse('task-statuses-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_task_status_patch(client, data):
public_url = reverse('task-statuses-detail', kwargs={"pk": data.public_task_status.pk})
private1_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status1.pk})
private2_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status2.pk})
blocked_url = reverse('task-statuses-detail', kwargs={"pk": data.blocked_task_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_task_status_action_bulk_update_order(client, data):
url = reverse('task-statuses-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_task_statuses": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_task_statuses": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_task_statuses": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_task_statuses": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Issue Status
#####################################################
def test_issue_status_retrieve(client, data):
public_url = reverse('issue-statuses-detail', kwargs={"pk": data.public_issue_status.pk})
private1_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status1.pk})
private2_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status2.pk})
blocked_url = reverse('issue-statuses-detail', kwargs={"pk": data.blocked_issue_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_issue_status_update(client, data):
public_url = reverse('issue-statuses-detail', kwargs={"pk": data.public_issue_status.pk})
private1_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status1.pk})
private2_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status2.pk})
blocked_url = reverse('issue-statuses-detail', kwargs={"pk": data.blocked_issue_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
issue_status_data = serializers.IssueStatusSerializer(data.public_issue_status).data
issue_status_data["name"] = "test"
issue_status_data = json.dumps(issue_status_data)
results = helper_test_http_method(client, 'put', public_url, issue_status_data, users)
assert results == [401, 403, 403, 403, 200]
issue_status_data = serializers.IssueStatusSerializer(data.private_issue_status1).data
issue_status_data["name"] = "test"
issue_status_data = json.dumps(issue_status_data)
results = helper_test_http_method(client, 'put', private1_url, issue_status_data, users)
assert results == [401, 403, 403, 403, 200]
issue_status_data = serializers.IssueStatusSerializer(data.private_issue_status2).data
issue_status_data["name"] = "test"
issue_status_data = json.dumps(issue_status_data)
results = helper_test_http_method(client, 'put', private2_url, issue_status_data, users)
assert results == [401, 403, 403, 403, 200]
issue_status_data = serializers.IssueStatusSerializer(data.blocked_issue_status).data
issue_status_data["name"] = "test"
issue_status_data = json.dumps(issue_status_data)
results = helper_test_http_method(client, 'put', blocked_url, issue_status_data, users)
assert results == [401, 403, 403, 403, 451]
def test_issue_status_delete(client, data):
public_url = reverse('issue-statuses-detail', kwargs={"pk": data.public_issue_status.pk})
private1_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status1.pk})
private2_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status2.pk})
blocked_url = reverse('issue-statuses-detail', kwargs={"pk": data.blocked_issue_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_issue_status_list(client, data):
url = reverse('issue-statuses-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_issue_status_patch(client, data):
public_url = reverse('issue-statuses-detail', kwargs={"pk": data.public_issue_status.pk})
private1_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status1.pk})
private2_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status2.pk})
blocked_url = reverse('issue-statuses-detail', kwargs={"pk": data.blocked_issue_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_issue_status_action_bulk_update_order(client, data):
url = reverse('issue-statuses-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_issue_statuses": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_issue_statuses": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_issue_statuses": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_issue_statuses": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Issue Type
#####################################################
def test_issue_type_retrieve(client, data):
public_url = reverse('issue-types-detail', kwargs={"pk": data.public_issue_type.pk})
private1_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type1.pk})
private2_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type2.pk})
blocked_url = reverse('issue-types-detail', kwargs={"pk": data.blocked_issue_type.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_issue_type_update(client, data):
public_url = reverse('issue-types-detail', kwargs={"pk": data.public_issue_type.pk})
private1_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type1.pk})
private2_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type2.pk})
blocked_url = reverse('issue-types-detail', kwargs={"pk": data.blocked_issue_type.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
issue_type_data = serializers.IssueTypeSerializer(data.public_issue_type).data
issue_type_data["name"] = "test"
issue_type_data = json.dumps(issue_type_data)
results = helper_test_http_method(client, 'put', public_url, issue_type_data, users)
assert results == [401, 403, 403, 403, 200]
issue_type_data = serializers.IssueTypeSerializer(data.private_issue_type1).data
issue_type_data["name"] = "test"
issue_type_data = json.dumps(issue_type_data)
results = helper_test_http_method(client, 'put', private1_url, issue_type_data, users)
assert results == [401, 403, 403, 403, 200]
issue_type_data = serializers.IssueTypeSerializer(data.private_issue_type2).data
issue_type_data["name"] = "test"
issue_type_data = json.dumps(issue_type_data)
results = helper_test_http_method(client, 'put', private2_url, issue_type_data, users)
assert results == [401, 403, 403, 403, 200]
issue_type_data = serializers.IssueTypeSerializer(data.blocked_issue_type).data
issue_type_data["name"] = "test"
issue_type_data = json.dumps(issue_type_data)
results = helper_test_http_method(client, 'put', blocked_url, issue_type_data, users)
assert results == [401, 403, 403, 403, 451]
def test_issue_type_delete(client, data):
public_url = reverse('issue-types-detail', kwargs={"pk": data.public_issue_type.pk})
private1_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type1.pk})
private2_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type2.pk})
blocked_url = reverse('issue-types-detail', kwargs={"pk": data.blocked_issue_type.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_issue_type_list(client, data):
url = reverse('issue-types-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_issue_type_patch(client, data):
public_url = reverse('issue-types-detail', kwargs={"pk": data.public_issue_type.pk})
private1_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type1.pk})
private2_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type2.pk})
blocked_url = reverse('issue-types-detail', kwargs={"pk": data.blocked_issue_type.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_issue_type_action_bulk_update_order(client, data):
url = reverse('issue-types-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_issue_types": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_issue_types": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_issue_types": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_issue_types": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Priority
#####################################################
def test_priority_retrieve(client, data):
public_url = reverse('priorities-detail', kwargs={"pk": data.public_priority.pk})
private1_url = reverse('priorities-detail', kwargs={"pk": data.private_priority1.pk})
private2_url = reverse('priorities-detail', kwargs={"pk": data.private_priority2.pk})
blocked_url = reverse('priorities-detail', kwargs={"pk": data.blocked_priority.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_priority_update(client, data):
public_url = reverse('priorities-detail', kwargs={"pk": data.public_priority.pk})
private1_url = reverse('priorities-detail', kwargs={"pk": data.private_priority1.pk})
private2_url = reverse('priorities-detail', kwargs={"pk": data.private_priority2.pk})
blocked_url = reverse('priorities-detail', kwargs={"pk": data.blocked_priority.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
priority_data = serializers.PrioritySerializer(data.public_priority).data
priority_data["name"] = "test"
priority_data = json.dumps(priority_data)
results = helper_test_http_method(client, 'put', public_url, priority_data, users)
assert results == [401, 403, 403, 403, 200]
priority_data = serializers.PrioritySerializer(data.private_priority1).data
priority_data["name"] = "test"
priority_data = json.dumps(priority_data)
results = helper_test_http_method(client, 'put', private1_url, priority_data, users)
assert results == [401, 403, 403, 403, 200]
priority_data = serializers.PrioritySerializer(data.private_priority2).data
priority_data["name"] = "test"
priority_data = json.dumps(priority_data)
results = helper_test_http_method(client, 'put', private2_url, priority_data, users)
assert results == [401, 403, 403, 403, 200]
priority_data = serializers.PrioritySerializer(data.blocked_priority).data
priority_data["name"] = "test"
priority_data = json.dumps(priority_data)
results = helper_test_http_method(client, 'put', blocked_url, priority_data, users)
assert results == [401, 403, 403, 403, 451]
def test_priority_delete(client, data):
public_url = reverse('priorities-detail', kwargs={"pk": data.public_priority.pk})
private1_url = reverse('priorities-detail', kwargs={"pk": data.private_priority1.pk})
private2_url = reverse('priorities-detail', kwargs={"pk": data.private_priority2.pk})
blocked_url = reverse('priorities-detail', kwargs={"pk": data.blocked_priority.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_priority_list(client, data):
url = reverse('priorities-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_priority_patch(client, data):
public_url = reverse('priorities-detail', kwargs={"pk": data.public_priority.pk})
private1_url = reverse('priorities-detail', kwargs={"pk": data.private_priority1.pk})
private2_url = reverse('priorities-detail', kwargs={"pk": data.private_priority2.pk})
blocked_url = reverse('priorities-detail', kwargs={"pk": data.blocked_priority.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_priority_action_bulk_update_order(client, data):
url = reverse('priorities-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_priorities": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_priorities": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_priorities": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_priorities": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Severity
#####################################################
def test_severity_retrieve(client, data):
public_url = reverse('severities-detail', kwargs={"pk": data.public_severity.pk})
private1_url = reverse('severities-detail', kwargs={"pk": data.private_severity1.pk})
private2_url = reverse('severities-detail', kwargs={"pk": data.private_severity2.pk})
blocked_url = reverse('severities-detail', kwargs={"pk": data.blocked_severity.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_severity_update(client, data):
public_url = reverse('severities-detail', kwargs={"pk": data.public_severity.pk})
private1_url = reverse('severities-detail', kwargs={"pk": data.private_severity1.pk})
private2_url = reverse('severities-detail', kwargs={"pk": data.private_severity2.pk})
blocked_url = reverse('severities-detail', kwargs={"pk": data.blocked_severity.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
severity_data = serializers.SeveritySerializer(data.public_severity).data
severity_data["name"] = "test"
severity_data = json.dumps(severity_data)
results = helper_test_http_method(client, 'put', public_url, severity_data, users)
assert results == [401, 403, 403, 403, 200]
severity_data = serializers.SeveritySerializer(data.private_severity1).data
severity_data["name"] = "test"
severity_data = json.dumps(severity_data)
results = helper_test_http_method(client, 'put', private1_url, severity_data, users)
assert results == [401, 403, 403, 403, 200]
severity_data = serializers.SeveritySerializer(data.private_severity2).data
severity_data["name"] = "test"
severity_data = json.dumps(severity_data)
results = helper_test_http_method(client, 'put', private2_url, severity_data, users)
assert results == [401, 403, 403, 403, 200]
severity_data = serializers.SeveritySerializer(data.blocked_severity).data
severity_data["name"] = "test"
severity_data = json.dumps(severity_data)
results = helper_test_http_method(client, 'put', blocked_url, severity_data, users)
assert results == [401, 403, 403, 403, 451]
def test_severity_delete(client, data):
public_url = reverse('severities-detail', kwargs={"pk": data.public_severity.pk})
private1_url = reverse('severities-detail', kwargs={"pk": data.private_severity1.pk})
private2_url = reverse('severities-detail', kwargs={"pk": data.private_severity2.pk})
blocked_url = reverse('severities-detail', kwargs={"pk": data.blocked_severity.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_severity_list(client, data):
url = reverse('severities-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_severity_patch(client, data):
public_url = reverse('severities-detail', kwargs={"pk": data.public_severity.pk})
private1_url = reverse('severities-detail', kwargs={"pk": data.private_severity1.pk})
private2_url = reverse('severities-detail', kwargs={"pk": data.private_severity2.pk})
blocked_url = reverse('severities-detail', kwargs={"pk": data.blocked_severity.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_severity_action_bulk_update_order(client, data):
url = reverse('severities-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_severities": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_severities": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_severities": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_severities": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Memberships
#####################################################
def test_membership_retrieve(client, data):
public_url = reverse('memberships-detail', kwargs={"pk": data.public_membership.pk})
private1_url = reverse('memberships-detail', kwargs={"pk": data.private_membership1.pk})
private2_url = reverse('memberships-detail', kwargs={"pk": data.private_membership2.pk})
blocked_url = reverse('memberships-detail', kwargs={"pk": data.blocked_membership.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_membership_update(client, data):
public_url = reverse('memberships-detail', kwargs={"pk": data.public_membership.pk})
private1_url = reverse('memberships-detail', kwargs={"pk": data.private_membership1.pk})
private2_url = reverse('memberships-detail', kwargs={"pk": data.private_membership2.pk})
blocked_url = reverse('memberships-detail', kwargs={"pk": data.blocked_membership.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
membership_data = serializers.MembershipSerializer(data.public_membership).data
membership_data["token"] = "test"
membership_data["username"] = data.public_membership.user.email
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'put', public_url, membership_data, users)
assert results == [401, 403, 403, 403, 200]
membership_data = serializers.MembershipSerializer(data.private_membership1).data
membership_data["token"] = "test"
membership_data["username"] = data.private_membership1.user.email
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'put', private1_url, membership_data, users)
assert results == [401, 403, 403, 403, 200]
membership_data = serializers.MembershipSerializer(data.private_membership2).data
membership_data["token"] = "test"
membership_data["username"] = data.private_membership2.user.email
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'put', private2_url, membership_data, users)
assert results == [401, 403, 403, 403, 200]
membership_data = serializers.MembershipSerializer(data.blocked_membership).data
membership_data["token"] = "test"
membership_data["username"] = data.blocked_membership.user.email
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'put', blocked_url, membership_data, users)
assert results == [401, 403, 403, 403, 451]
def test_membership_delete(client, data):
public_url = reverse('memberships-detail', kwargs={"pk": data.public_membership.pk})
private1_url = reverse('memberships-detail', kwargs={"pk": data.private_membership1.pk})
private2_url = reverse('memberships-detail', kwargs={"pk": data.private_membership2.pk})
blocked_url = reverse('memberships-detail', kwargs={"pk": data.blocked_membership.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_membership_list(client, data):
url = reverse('memberships-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 5
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 5
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 5
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 11
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 11
assert response.status_code == 200
def test_membership_patch(client, data):
public_url = reverse('memberships-detail', kwargs={"pk": data.public_membership.pk})
private1_url = reverse('memberships-detail', kwargs={"pk": data.private_membership1.pk})
private2_url = reverse('memberships-detail', kwargs={"pk": data.private_membership2.pk})
blocked_url = reverse('memberships-detail', kwargs={"pk": data.blocked_membership.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_membership_create(client, data):
url = reverse('memberships-list')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
membership_data = serializers.MembershipSerializer(data.public_membership).data
del(membership_data["id"])
del(membership_data["user"])
membership_data["username"] = "test1@test.com"
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'post', url, membership_data, users)
assert results == [401, 403, 403, 403, 201]
membership_data = serializers.MembershipSerializer(data.private_membership1).data
del(membership_data["id"])
del(membership_data["user"])
membership_data["username"] = "test2@test.com"
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'post', url, membership_data, users)
assert results == [401, 403, 403, 403, 201]
membership_data = serializers.MembershipSerializer(data.private_membership2).data
del(membership_data["id"])
del(membership_data["user"])
membership_data["username"] = "test3@test.com"
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'post', url, membership_data, users)
assert results == [401, 403, 403, 403, 201]
membership_data = serializers.MembershipSerializer(data.blocked_membership).data
del(membership_data["id"])
del(membership_data["user"])
membership_data["username"] = "test4@test.com"
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'post', url, membership_data, users)
assert results == [401, 403, 403, 403, 451]
def test_membership_action_bulk_create(client, data):
url = reverse('memberships-bulk-create')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
bulk_data = {
"project_id": data.public_project.id,
"bulk_memberships": [
{"role_id": data.public_membership.role.pk, "username": "test1@test.com"},
{"role_id": data.public_membership.role.pk, "username": "test2@test.com"},
]
}
bulk_data = json.dumps(bulk_data)
results = helper_test_http_method(client, 'post', url, bulk_data, users)
assert results == [401, 403, 403, 403, 200]
bulk_data = {
"project_id": data.private_project1.id,
"bulk_memberships": [
{"role_id": data.private_membership1.role.pk, "username": "test1@test.com"},
{"role_id": data.private_membership1.role.pk, "username": "test2@test.com"},
]
}
bulk_data = json.dumps(bulk_data)
results = helper_test_http_method(client, 'post', url, bulk_data, users)
assert results == [401, 403, 403, 403, 200]
bulk_data = {
"project_id": data.private_project2.id,
"bulk_memberships": [
{"role_id": data.private_membership2.role.pk, "username": "test1@test.com"},
{"role_id": data.private_membership2.role.pk, "username": "test2@test.com"},
]
}
bulk_data = json.dumps(bulk_data)
results = helper_test_http_method(client, 'post', url, bulk_data, users)
assert results == [401, 403, 403, 403, 200]
bulk_data = {
"project_id": data.blocked_project.id,
"bulk_memberships": [
{"role_id": data.blocked_membership.role.pk, "username": "test1@test.com"},
{"role_id": data.blocked_membership.role.pk, "username": "test2@test.com"},
]
}
bulk_data = json.dumps(bulk_data)
results = helper_test_http_method(client, 'post', url, bulk_data, users)
assert results == [401, 403, 403, 403, 451]
def test_membership_action_resend_invitation(client, data):
public_invitation = f.InvitationFactory(project=data.public_project, role__project=data.public_project)
private_invitation1 = f.InvitationFactory(project=data.private_project1, role__project=data.private_project1)
private_invitation2 = f.InvitationFactory(project=data.private_project2, role__project=data.private_project2)
blocked_invitation = f.InvitationFactory(project=data.blocked_project, role__project=data.blocked_project)
public_url = reverse('memberships-resend-invitation', kwargs={"pk": public_invitation.pk})
private1_url = reverse('memberships-resend-invitation', kwargs={"pk": private_invitation1.pk})
private2_url = reverse('memberships-resend-invitation', kwargs={"pk": private_invitation2.pk})
blocked_url = reverse('memberships-resend-invitation', kwargs={"pk": blocked_invitation.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'post', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'post', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'post', private2_url, None, users)
assert results == [404, 404, 404, 403, 204]
results = helper_test_http_method(client, 'post', blocked_url, None, users)
assert results == [404, 404, 404, 403, 451]
#####################################################
# Project Templates
#####################################################
def test_project_template_retrieve(client, data):
url = reverse('project-templates-detail', kwargs={"pk": data.project_template.pk})
users = [
None,
data.registered_user,
data.superuser,
]
results = helper_test_http_method(client, 'get', url, None, users)
assert results == [200, 200, 200]
def test_project_template_update(client, data):
url = reverse('project-templates-detail', kwargs={"pk": data.project_template.pk})
users = [
None,
data.registered_user,
data.superuser,
]
project_template_data = serializers.ProjectTemplateSerializer(data.project_template).data
project_template_data["default_owner_role"] = "test"
project_template_data = json.dumps(project_template_data)
results = helper_test_http_method(client, 'put', url, project_template_data, users)
assert results == [401, 403, 200]
def test_project_template_delete(client, data):
url = reverse('project-templates-detail', kwargs={"pk": data.project_template.pk})
users = [
None,
data.registered_user,
data.superuser,
]
results = helper_test_http_method(client, 'delete', url, None, users)
assert results == [401, 403, 204]
def test_project_template_list(client, data):
url = reverse('project-templates-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 1
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 1
assert response.status_code == 200
client.login(data.superuser)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 1
assert response.status_code == 200
def test_project_template_patch(client, data):
url = reverse('project-templates-detail', kwargs={"pk": data.project_template.pk})
users = [
None,
data.registered_user,
data.superuser,
]
results = helper_test_http_method(client, 'patch', url, '{"name": "Test"}', users)
assert results == [401, 403, 200]
#####################################################
# Tags
#####################################################
def test_create_tag(client, data):
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"tag": "testtest",
"color": "#123123"
})
url = reverse('projects-create-tag', kwargs={"pk": data.public_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-create-tag', kwargs={"pk": data.private_project1.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-create-tag', kwargs={"pk": data.private_project2.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 200]
url = reverse('projects-create-tag', kwargs={"pk": data.blocked_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 451]
def test_edit_tag(client, data):
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"from_tag": "tag1",
"to_tag": "renamedtag1",
"color": "#123123"
})
url = reverse('projects-edit-tag', kwargs={"pk": data.public_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-edit-tag', kwargs={"pk": data.private_project1.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-edit-tag', kwargs={"pk": data.private_project2.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 200]
url = reverse('projects-edit-tag', kwargs={"pk": data.blocked_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 451]
def test_delete_tag(client, data):
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"tag": "tag2",
})
url = reverse('projects-delete-tag', kwargs={"pk": data.public_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-delete-tag', kwargs={"pk": data.private_project1.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-delete-tag', kwargs={"pk": data.private_project2.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 200]
url = reverse('projects-delete-tag', kwargs={"pk": data.blocked_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 451]
def test_mix_tags(client, data):
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"from_tags": ["tag1"],
"to_tag": "tag3"
})
url = reverse('projects-mix-tags', kwargs={"pk": data.public_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-mix-tags', kwargs={"pk": data.private_project1.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-mix-tags', kwargs={"pk": data.private_project2.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 200]
url = reverse('projects-mix-tags', kwargs={"pk": data.blocked_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 451]
|
agpl-3.0
| 2,196,972,104,921,484,500 | 40.751302 | 120 | 0.642913 | false |
GoogleCloudPlatform/declarative-resource-client-library
|
python/services/iam/role.py
|
1
|
11094
|
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from connector import channel
from google3.cloud.graphite.mmv2.services.google.iam import role_pb2
from google3.cloud.graphite.mmv2.services.google.iam import role_pb2_grpc
from typing import List
class Role(object):
def __init__(
self,
name: str = None,
title: str = None,
description: str = None,
localized_values: dict = None,
lifecycle_phase: str = None,
group_name: str = None,
group_title: str = None,
included_permissions: list = None,
stage: str = None,
etag: str = None,
deleted: bool = None,
included_roles: list = None,
parent: str = None,
service_account_file: str = "",
):
channel.initialize()
self.name = name
self.title = title
self.description = description
self.localized_values = localized_values
self.lifecycle_phase = lifecycle_phase
self.group_name = group_name
self.group_title = group_title
self.included_permissions = included_permissions
self.stage = stage
self.etag = etag
self.deleted = deleted
self.included_roles = included_roles
self.parent = parent
self.service_account_file = service_account_file
def apply(self):
stub = role_pb2_grpc.IamRoleServiceStub(channel.Channel())
request = role_pb2.ApplyIamRoleRequest()
if Primitive.to_proto(self.name):
request.resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.title):
request.resource.title = Primitive.to_proto(self.title)
if Primitive.to_proto(self.description):
request.resource.description = Primitive.to_proto(self.description)
if RoleLocalizedValues.to_proto(self.localized_values):
request.resource.localized_values.CopyFrom(
RoleLocalizedValues.to_proto(self.localized_values)
)
else:
request.resource.ClearField("localized_values")
if Primitive.to_proto(self.lifecycle_phase):
request.resource.lifecycle_phase = Primitive.to_proto(self.lifecycle_phase)
if Primitive.to_proto(self.group_name):
request.resource.group_name = Primitive.to_proto(self.group_name)
if Primitive.to_proto(self.group_title):
request.resource.group_title = Primitive.to_proto(self.group_title)
if Primitive.to_proto(self.included_permissions):
request.resource.included_permissions.extend(
Primitive.to_proto(self.included_permissions)
)
if RoleStageEnum.to_proto(self.stage):
request.resource.stage = RoleStageEnum.to_proto(self.stage)
if Primitive.to_proto(self.etag):
request.resource.etag = Primitive.to_proto(self.etag)
if Primitive.to_proto(self.deleted):
request.resource.deleted = Primitive.to_proto(self.deleted)
if Primitive.to_proto(self.included_roles):
request.resource.included_roles.extend(
Primitive.to_proto(self.included_roles)
)
if Primitive.to_proto(self.parent):
request.resource.parent = Primitive.to_proto(self.parent)
request.service_account_file = self.service_account_file
response = stub.ApplyIamRole(request)
self.name = Primitive.from_proto(response.name)
self.title = Primitive.from_proto(response.title)
self.description = Primitive.from_proto(response.description)
self.localized_values = RoleLocalizedValues.from_proto(
response.localized_values
)
self.lifecycle_phase = Primitive.from_proto(response.lifecycle_phase)
self.group_name = Primitive.from_proto(response.group_name)
self.group_title = Primitive.from_proto(response.group_title)
self.included_permissions = Primitive.from_proto(response.included_permissions)
self.stage = RoleStageEnum.from_proto(response.stage)
self.etag = Primitive.from_proto(response.etag)
self.deleted = Primitive.from_proto(response.deleted)
self.included_roles = Primitive.from_proto(response.included_roles)
self.parent = Primitive.from_proto(response.parent)
def delete(self):
stub = role_pb2_grpc.IamRoleServiceStub(channel.Channel())
request = role_pb2.DeleteIamRoleRequest()
request.service_account_file = self.service_account_file
if Primitive.to_proto(self.name):
request.resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.title):
request.resource.title = Primitive.to_proto(self.title)
if Primitive.to_proto(self.description):
request.resource.description = Primitive.to_proto(self.description)
if RoleLocalizedValues.to_proto(self.localized_values):
request.resource.localized_values.CopyFrom(
RoleLocalizedValues.to_proto(self.localized_values)
)
else:
request.resource.ClearField("localized_values")
if Primitive.to_proto(self.lifecycle_phase):
request.resource.lifecycle_phase = Primitive.to_proto(self.lifecycle_phase)
if Primitive.to_proto(self.group_name):
request.resource.group_name = Primitive.to_proto(self.group_name)
if Primitive.to_proto(self.group_title):
request.resource.group_title = Primitive.to_proto(self.group_title)
if Primitive.to_proto(self.included_permissions):
request.resource.included_permissions.extend(
Primitive.to_proto(self.included_permissions)
)
if RoleStageEnum.to_proto(self.stage):
request.resource.stage = RoleStageEnum.to_proto(self.stage)
if Primitive.to_proto(self.etag):
request.resource.etag = Primitive.to_proto(self.etag)
if Primitive.to_proto(self.deleted):
request.resource.deleted = Primitive.to_proto(self.deleted)
if Primitive.to_proto(self.included_roles):
request.resource.included_roles.extend(
Primitive.to_proto(self.included_roles)
)
if Primitive.to_proto(self.parent):
request.resource.parent = Primitive.to_proto(self.parent)
response = stub.DeleteIamRole(request)
@classmethod
def list(self, parent, service_account_file=""):
stub = role_pb2_grpc.IamRoleServiceStub(channel.Channel())
request = role_pb2.ListIamRoleRequest()
request.service_account_file = service_account_file
request.Parent = parent
return stub.ListIamRole(request).items
def to_proto(self):
resource = role_pb2.IamRole()
if Primitive.to_proto(self.name):
resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.title):
resource.title = Primitive.to_proto(self.title)
if Primitive.to_proto(self.description):
resource.description = Primitive.to_proto(self.description)
if RoleLocalizedValues.to_proto(self.localized_values):
resource.localized_values.CopyFrom(
RoleLocalizedValues.to_proto(self.localized_values)
)
else:
resource.ClearField("localized_values")
if Primitive.to_proto(self.lifecycle_phase):
resource.lifecycle_phase = Primitive.to_proto(self.lifecycle_phase)
if Primitive.to_proto(self.group_name):
resource.group_name = Primitive.to_proto(self.group_name)
if Primitive.to_proto(self.group_title):
resource.group_title = Primitive.to_proto(self.group_title)
if Primitive.to_proto(self.included_permissions):
resource.included_permissions.extend(
Primitive.to_proto(self.included_permissions)
)
if RoleStageEnum.to_proto(self.stage):
resource.stage = RoleStageEnum.to_proto(self.stage)
if Primitive.to_proto(self.etag):
resource.etag = Primitive.to_proto(self.etag)
if Primitive.to_proto(self.deleted):
resource.deleted = Primitive.to_proto(self.deleted)
if Primitive.to_proto(self.included_roles):
resource.included_roles.extend(Primitive.to_proto(self.included_roles))
if Primitive.to_proto(self.parent):
resource.parent = Primitive.to_proto(self.parent)
return resource
class RoleLocalizedValues(object):
def __init__(self, localized_title: str = None, localized_description: str = None):
self.localized_title = localized_title
self.localized_description = localized_description
@classmethod
def to_proto(self, resource):
if not resource:
return None
res = role_pb2.IamRoleLocalizedValues()
if Primitive.to_proto(resource.localized_title):
res.localized_title = Primitive.to_proto(resource.localized_title)
if Primitive.to_proto(resource.localized_description):
res.localized_description = Primitive.to_proto(
resource.localized_description
)
return res
@classmethod
def from_proto(self, resource):
if not resource:
return None
return RoleLocalizedValues(
localized_title=Primitive.from_proto(resource.localized_title),
localized_description=Primitive.from_proto(resource.localized_description),
)
class RoleLocalizedValuesArray(object):
@classmethod
def to_proto(self, resources):
if not resources:
return resources
return [RoleLocalizedValues.to_proto(i) for i in resources]
@classmethod
def from_proto(self, resources):
return [RoleLocalizedValues.from_proto(i) for i in resources]
class RoleStageEnum(object):
@classmethod
def to_proto(self, resource):
if not resource:
return resource
return role_pb2.IamRoleStageEnum.Value("IamRoleStageEnum%s" % resource)
@classmethod
def from_proto(self, resource):
if not resource:
return resource
return role_pb2.IamRoleStageEnum.Name(resource)[len("IamRoleStageEnum") :]
class Primitive(object):
@classmethod
def to_proto(self, s):
if not s:
return ""
return s
@classmethod
def from_proto(self, s):
return s
|
apache-2.0
| -2,879,384,759,754,092,000 | 38.201413 | 87 | 0.655129 | false |
zadarastorage/zadarapy
|
zadarapy/vpsaos/drives.py
|
1
|
2669
|
# Copyright 2019 Zadara Storage, Inc.
# Originally authored by Jeremy Brown - https://github.com/jwbrown77
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from zadarapy.validators import verify_start_limit, \
verify_volume_id
def get_all_drives(session, start=None, limit=None, return_type=None,
**kwargs):
"""
Retrieves details for all drives for the VPSAOS.
:type session: zadarapy.session.Session
:param session: A valid zadarapy.session.Session object. Required.
:type start: int
:param start: The offset to start displaying drives from. Optional.
:type: limit: int
:param limit: The maximum number of drives to return. Optional.
:type return_type: str
:param return_type: If this is set to the string 'json', this function
will return a JSON string. Otherwise, it will return a Python
dictionary. Optional (will return a Python dictionary by default).
:rtype: dict, str
:returns: A dictionary or JSON data set as a string depending on
return_type parameter.
"""
parameters = verify_start_limit(start, limit)
path = '/api/zios/drives.json'
return session.get_api(path=path, parameters=parameters,
return_type=return_type, **kwargs)
def get_one_drive(session, name, return_type=None, **kwargs):
"""
Retrieves details for a single drive for the VPSAOS.
:type session: zadarapy.session.Session
:param session: A valid zadarapy.session.Session object. Required.
:type name: str
:param name: The 'name' value as returned by
get_all_drives. Required.
:type return_type: str
:param return_type: If this is set to the string 'json', this function
will return a JSON string. Otherwise, it will return a Python
dictionary. Optional (will return a Python dictionary by default).
:rtype: dict, str
:returns: A dictionary or JSON data set as a string depending on
return_type parameter.
"""
verify_volume_id(name)
path = '/api/zios/drives/{0}.json'.format(name)
return session.get_api(path=path, return_type=return_type, **kwargs)
|
apache-2.0
| 2,293,885,315,494,486,800 | 36.069444 | 77 | 0.694642 | false |
sekikn/ambari
|
ambari-server/src/test/python/custom_actions/TestUpdateRepo.py
|
2
|
5282
|
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ambari Agent
"""
import os, sys
from mock.mock import patch
from mock.mock import MagicMock
from unittest import TestCase
from resource_management import *
from resource_management import Script
from ambari_commons.os_check import OSCheck
from update_repo import UpdateRepo
class TestUpdateRepo(TestCase):
@patch.object(OSCheck, "is_suse_family")
@patch.object(OSCheck, "is_ubuntu_family")
@patch.object(OSCheck, "is_redhat_family")
@patch.object(Script, 'get_config')
@patch("resource_management.libraries.providers.repository.File")
@patch("resource_management.libraries.script.Script.put_structured_out")
@patch.object(System, "os_family", new='redhat')
def testUpdateRepo(self, structured_out_mock, file_mock, mock_config, is_redhat_mock, is_ubuntu_mock, is_suse_mock):
###### valid case
is_suse_mock.return_value = False
is_ubuntu_mock.return_value = False
is_redhat_mock.return_value = True
updateRepo = UpdateRepo()
mock_config.return_value = { "configurations": {
"cluster-env": {
"repo_suse_rhel_template": "REPO_SUSE_RHEL_TEST_TEMPLATE",
"repo_ubuntu_template": "REPO_UBUNTU_TEMPLATE"
}
},
"repositoryFile": {
"resolved": True,
"repoVersion": "2.4.3.0-227",
"repositories": [
{
"mirrorsList": None,
"ambariManaged": True,
"baseUrl": "http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.4.3.0/",
"repoName": "HDP",
"components": None,
"osType": "redhat6",
"distribution": None,
"repoId": "HDP-2.4-repo-1"
},
{
"mirrorsList": None,
"ambariManaged": True,
"baseUrl": "http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6",
"repoName": "HDP-UTILS",
"components": None,
"osType": "redhat6",
"distribution": None,
"repoId": "HDP-UTILS-1.1.0.20-repo-1"
}
],
"feature": {
"m_isScoped": False,
"m_isPreInstalled": False
},
"stackName": "HDP",
"repoVersionId": 1
},
}
with Environment('/') as env:
updateRepo.actionexecute(None)
self.assertTrue(file_mock.called)
self.assertEquals(file_mock.call_args[0][0], "/etc/yum.repos.d/HDP.repo")
self.assertEquals(structured_out_mock.call_args[0][0], {'repo_update': {'message': 'Repository files successfully updated!', 'exit_code': 0}})
###### invalid repo info
file_mock.reset_mock()
failed = False
mock_config.return_value = { "configurations": {
"clugit ster-env": {
"repo_suse_rhel_template": "REPO_SUSE_RHEL_TEST_TEMPLATE",
"repo_ubuntu_template": "REPO_UBUNTU_TEMPLATE"
}
},
"repositoryFile": {}
}
try:
with Environment('/') as env:
updateRepo.actionexecute(None)
except Exception, exception:
failed = True
self.assertFalse(file_mock.called)
self.assertTrue(failed)
|
apache-2.0
| 2,976,111,629,241,852,400 | 44.543103 | 146 | 0.46933 | false |
luotao1/Paddle
|
python/paddle/dataset/cifar.py
|
1
|
5299
|
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
CIFAR dataset.
This module will download dataset from https://dataset.bj.bcebos.com/cifar/cifar-10-python.tar.gz and https://dataset.bj.bcebos.com/cifar/cifar-100-python.tar.gz, parse train/test set into
paddle reader creators.
The CIFAR-10 dataset consists of 60000 32x32 color images in 10 classes,
with 6000 images per class. There are 50000 training images and 10000 test
images.
The CIFAR-100 dataset is just like the CIFAR-10, except it has 100 classes
containing 600 images each. There are 500 training images and 100 testing
images per class.
"""
from __future__ import print_function
import itertools
import numpy
import paddle.dataset.common
import paddle.utils.deprecated as deprecated
import tarfile
import six
from six.moves import cPickle as pickle
__all__ = ['train100', 'test100', 'train10', 'test10']
URL_PREFIX = 'https://dataset.bj.bcebos.com/cifar/'
CIFAR10_URL = URL_PREFIX + 'cifar-10-python.tar.gz'
CIFAR10_MD5 = 'c58f30108f718f92721af3b95e74349a'
CIFAR100_URL = URL_PREFIX + 'cifar-100-python.tar.gz'
CIFAR100_MD5 = 'eb9058c3a382ffc7106e4002c42a8d85'
def reader_creator(filename, sub_name, cycle=False):
def read_batch(batch):
data = batch[six.b('data')]
labels = batch.get(
six.b('labels'), batch.get(six.b('fine_labels'), None))
assert labels is not None
for sample, label in six.moves.zip(data, labels):
yield (sample / 255.0).astype(numpy.float32), int(label)
def reader():
while True:
with tarfile.open(filename, mode='r') as f:
names = (each_item.name for each_item in f
if sub_name in each_item.name)
for name in names:
if six.PY2:
batch = pickle.load(f.extractfile(name))
else:
batch = pickle.load(
f.extractfile(name), encoding='bytes')
for item in read_batch(batch):
yield item
if not cycle:
break
return reader
@deprecated(
since="2.0.0",
update_to="paddle.vision.datasets.Cifar100",
reason="Please use new dataset API which supports paddle.io.DataLoader")
def train100():
"""
CIFAR-100 training set creator.
It returns a reader creator, each sample in the reader is image pixels in
[0, 1] and label in [0, 99].
:return: Training reader creator
:rtype: callable
"""
return reader_creator(
paddle.dataset.common.download(CIFAR100_URL, 'cifar', CIFAR100_MD5),
'train')
@deprecated(
since="2.0.0",
update_to="paddle.vision.datasets.Cifar100",
reason="Please use new dataset API which supports paddle.io.DataLoader")
def test100():
"""
CIFAR-100 test set creator.
It returns a reader creator, each sample in the reader is image pixels in
[0, 1] and label in [0, 99].
:return: Test reader creator.
:rtype: callable
"""
return reader_creator(
paddle.dataset.common.download(CIFAR100_URL, 'cifar', CIFAR100_MD5),
'test')
@deprecated(
since="2.0.0",
update_to="paddle.vision.datasets.Cifar10",
reason="Please use new dataset API which supports paddle.io.DataLoader")
def train10(cycle=False):
"""
CIFAR-10 training set creator.
It returns a reader creator, each sample in the reader is image pixels in
[0, 1] and label in [0, 9].
:param cycle: whether to cycle through the dataset
:type cycle: bool
:return: Training reader creator
:rtype: callable
"""
return reader_creator(
paddle.dataset.common.download(CIFAR10_URL, 'cifar', CIFAR10_MD5),
'data_batch',
cycle=cycle)
@deprecated(
since="2.0.0",
update_to="paddle.vision.datasets.Cifar10",
reason="Please use new dataset API which supports paddle.io.DataLoader")
def test10(cycle=False):
"""
CIFAR-10 test set creator.
It returns a reader creator, each sample in the reader is image pixels in
[0, 1] and label in [0, 9].
:param cycle: whether to cycle through the dataset
:type cycle: bool
:return: Test reader creator.
:rtype: callable
"""
return reader_creator(
paddle.dataset.common.download(CIFAR10_URL, 'cifar', CIFAR10_MD5),
'test_batch',
cycle=cycle)
@deprecated(
since="2.0.0",
update_to="paddle.vision.datasets.Cifar10",
reason="Please use new dataset API which supports paddle.io.DataLoader")
def fetch():
paddle.dataset.common.download(CIFAR10_URL, 'cifar', CIFAR10_MD5)
paddle.dataset.common.download(CIFAR100_URL, 'cifar', CIFAR100_MD5)
|
apache-2.0
| 603,859,091,514,423,700 | 30.730539 | 188 | 0.66371 | false |
igordejanovic/textX
|
tests/functional/test_metamodel/test_model_params.py
|
1
|
5362
|
from __future__ import unicode_literals
from click.testing import CliRunner
import os.path
from pytest import raises
from textx import metamodel_from_str
from textx.cli import textx
from textx.exceptions import TextXError
from textx.generators import gen_file, get_output_filename
from textx import language, generator, register_language, register_generator
grammar = r"""
Model: 'MyModel' name=ID;
"""
text = r"""
MyModel test1
"""
def test_model_params():
mm = metamodel_from_str(grammar)
mm.model_param_defs.add(
"parameter1", "an example param (1)"
)
mm.model_param_defs.add(
"parameter2", "an example param (2)"
)
m = mm.model_from_str(text, parameter1='P1', parameter2='P2')
assert m.name == 'test1'
assert hasattr(m, '_tx_model_params')
assert len(m._tx_model_params) == 2
assert len(m._tx_model_params.used_keys) == 0
assert not m._tx_model_params.all_used
assert m._tx_model_params['parameter1'] == 'P1'
assert len(m._tx_model_params.used_keys) == 1
assert 'parameter1' in m._tx_model_params.used_keys
assert 'parameter2' not in m._tx_model_params.used_keys
assert not m._tx_model_params.all_used
assert m._tx_model_params['parameter2'] == 'P2'
assert len(m._tx_model_params.used_keys) == 2
assert 'parameter1' in m._tx_model_params.used_keys
assert 'parameter2' in m._tx_model_params.used_keys
assert m._tx_model_params.all_used
assert m._tx_model_params.get(
'missing_params', default='default value') == 'default value'
assert m._tx_model_params.get(
'parameter1', default='default value') == 'P1'
with raises(TextXError, match=".*unknown parameter myerror2.*"):
mm.model_from_str(text, parameter1='P1', myerror2='P2')
assert len(mm.model_param_defs) >= 2
assert 'parameter1' in mm.model_param_defs
assert 'parameter1' in mm.model_param_defs
assert mm.model_param_defs[
'parameter1'].description == "an example param (1)"
def test_model_params_empty():
mm = metamodel_from_str(grammar)
mm.model_param_defs.add(
"parameter1", "an example param (1)"
)
mm.model_param_defs.add(
"parameter2", "an example param (2)"
)
m = mm.model_from_str(text)
assert m.name == 'test1'
assert hasattr(m, '_tx_model_params')
assert len(m._tx_model_params) == 0
assert m._tx_model_params.all_used
def test_model_params_file_based():
mm = metamodel_from_str(grammar)
mm.model_param_defs.add(
"parameter1", "an example param (1)"
)
mm.model_param_defs.add(
"parameter2", "an example param (2)"
)
current_dir = os.path.dirname(__file__)
m = mm.model_from_file(
os.path.join(current_dir, 'test_model_params',
'model.txt'),
parameter1='P1', parameter2='P2')
assert m.name == 'file_based'
assert hasattr(m, '_tx_model_params')
assert len(m._tx_model_params) == 2
def test_model_params_generate_cli():
"""
Test that model parameters are passed through generate cli command.
"""
# register test language
@language('testlang', '*.mpt')
def model_param_test():
def processor(model, metamodel):
# Just to be sure that processor sees the model parameters
model.model_params = model._tx_model_params
mm = metamodel_from_str(grammar)
mm.model_param_defs.add('meaning_of_life', 'The Meaning of Life')
mm.register_model_processor(processor)
return mm
register_language(model_param_test)
# register language generator
@generator('testlang', 'testtarget')
def mytarget_generator(metamodel, model, output_path, overwrite,
debug=False, **custom_args):
# Dump custom args for testing
txt = '\n'.join(["{}={}".format(arg_name, arg_value)
for arg_name, arg_value in custom_args.items()])
# Dump model params processed by model processor for testing
txt += '\nModel params:'
txt += '\n'.join(["{}={}".format(param_name, param_value)
for param_name, param_value in model.model_params.items()])
output_file = get_output_filename(model._tx_filename, None, 'testtarget')
def gen_callback():
with open(output_file, 'w') as f:
f.write(txt)
gen_file(model._tx_filename, output_file, gen_callback, overwrite)
register_generator(mytarget_generator)
# Run generator from CLI
this_folder = os.path.abspath(os.path.dirname(__file__))
runner = CliRunner()
model_file = os.path.join(this_folder, 'model_param_generate_test.mpt')
result = runner.invoke(textx, ['generate',
'--language', 'testlang',
'--target', 'testtarget',
'--overwrite', model_file,
'--meaning_of_life', '42',
'--someparam', 'somevalue'])
assert result.exit_code == 0
output_file = os.path.join(this_folder, 'model_param_generate_test.testtarget')
with open(output_file, 'r') as f:
content = f.read()
assert 'someparam=somevalue' in content
assert 'Model params:meaning_of_life=42' in content
|
mit
| 5,578,914,488,214,486,000 | 31.49697 | 85 | 0.614323 | false |
mulkieran/justbytes
|
src/justbytes/_errors.py
|
1
|
3871
|
# Copyright (C) 2015 - 2019 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; If not, see <http://www.gnu.org/licenses/>.
#
# Red Hat Author(s): Anne Mulhern <amulhern@redhat.com>
""" Exception types used by the justbytes class. """
# isort: STDLIB
import abc
class RangeError(Exception, metaclass=abc.ABCMeta):
""" Generic Range error. """
class RangeValueError(RangeError):
"""
Raised when a parameter has an unacceptable value.
May also be raised when the parameter has an unacceptable type.
"""
_FMT_STR = "value '%s' for parameter %s is unacceptable"
def __init__(self, value, param, msg=None):
"""
Initializer.
:param object value: the value
:param str param: the parameter
:param str msg: an explanatory message
"""
# pylint: disable=super-init-not-called
self.value = value
self.param = param
self.msg = msg
def __str__(self):
if self.msg:
fmt_str = self._FMT_STR + ": %s"
return fmt_str % (self.value, self.param, self.msg)
return self._FMT_STR % (self.value, self.param)
class RangeUnsupportedOpError(RangeError, metaclass=abc.ABCMeta):
""" Error when executing unsupported operation on Range. """
class RangeNonsensicalOpError(RangeUnsupportedOpError, metaclass=abc.ABCMeta):
""" Error when requesting an operation that doesn't make sense. """
class RangeNonsensicalBinOpValueError(RangeNonsensicalOpError):
""" Error when requesting a binary operation with a nonsense value. """
_FMT_STR = "nonsensical value for for %s: '%s'"
def __init__(self, operator, other):
"""
Initializer.
:param str operator: the operator
:param object other: the other argument
"""
# pylint: disable=super-init-not-called
self._operator = operator
self._other = other
def __str__(self):
return self._FMT_STR % (self._operator, self._other)
class RangeNonsensicalBinOpError(RangeNonsensicalOpError):
""" Error when requesting a binary operation that doesn't make sense. """
_FMT_STR = "nonsensical operand types for %s: 'Range' and '%s'"
def __init__(self, operator, other):
"""
Initializer.
:param str operator: the operator
:param object other: the other argument
"""
# pylint: disable=super-init-not-called
self._operator = operator
self._other = other
def __str__(self):
return self._FMT_STR % (self._operator, type(self._other).__name__)
class RangeUnrepresentableResultError(RangeUnsupportedOpError, metaclass=abc.ABCMeta):
"""
Error when requesting an operation that yields units that cannot
be represented with Range, e.g., when multiplying a Range by a Range.
"""
class RangePowerResultError(RangeUnrepresentableResultError):
""" Error when requesting an operation that would yield a byte power. """
def __str__(self):
return "requested operation result requires non-unit power of bytes"
class RangeFractionalResultError(RangeUnrepresentableResultError):
""" Error when Range construction is strict. """
def __str__(self):
return "requested operation result has a fractional quantity of bytes"
|
gpl-2.0
| -550,328,828,070,604,900 | 30.471545 | 86 | 0.671144 | false |
edx/xblock-lti-consumer
|
lti_consumer/migrations/0002_ltiagslineitem.py
|
1
|
1256
|
# Generated by Django 2.2.16 on 2020-09-29 21:48
from django.db import migrations, models
import django.db.models.deletion
import opaque_keys.edx.django.models
class Migration(migrations.Migration):
dependencies = [
('lti_consumer', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='LtiAgsLineItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('resource_id', models.CharField(blank=True, max_length=100)),
('resource_link_id', opaque_keys.edx.django.models.UsageKeyField(blank=True, db_index=True, max_length=255, null=True)),
('label', models.CharField(max_length=100)),
('score_maximum', models.IntegerField()),
('tag', models.CharField(blank=True, max_length=50)),
('start_date_time', models.DateTimeField(blank=True, null=True)),
('end_date_time', models.DateTimeField(blank=True, null=True)),
('lti_configuration', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='lti_consumer.LtiConfiguration')),
],
),
]
|
agpl-3.0
| 6,771,993,912,979,698,000 | 42.310345 | 161 | 0.61465 | false |
Micronaet/micronaet-migration
|
base_accounting_program/accounting.py
|
1
|
14040
|
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import os
import sys
import logging
import openerp
import openerp.netsvc as netsvc
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv, expression, orm
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from openerp import SUPERUSER_ID, api
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.float_utils import float_round as round
from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT,
DATETIME_FORMATS_MAP,
float_compare)
_logger = logging.getLogger(__name__)
class ProductProductExtraFields(orm.Model):
_inherit ='product.product'
_columns = {
'import': fields.boolean('Imported'),
'mexal_id': fields.char(
'Product mexal ID', size=20),
'q_x_pack': fields.float(
'Q. per collo', digits=(16, 3)),
'linear_length': fields.float(
'Lung. lineare', digits=(16, 3)),
'large_description': fields.text(
'Large Description', translate=True, help="For web publishing"),
}
class ProductPricelistExtraFields(orm.Model):
_inherit ='product.pricelist'
_columns = {
'import': fields.boolean('Imported', required=False),
'mexal_id': fields.char(
'Mexal Pricelist', size=9, required=False, readonly=False),
}
class PricelistVersionExtraFields(orm.Model):
_inherit ='product.pricelist.version'
_columns = {
'import': fields.boolean('Imported', required=False),
'mexal_id': fields.char(
'Mexal Pricelist version', size=9, required=False, readonly=False),
}
class PricelistItemExtraFields(orm.Model):
_inherit ='product.pricelist.item'
_columns = {
'mexal_id': fields.char(
'Mexal Pricelist item', size=9, required=False, readonly=False),
}
"""
# fiam_sale.py
Extra fields for object used in sale orders
Maybe this new objects are not necessary and will be replaced in the future
TODO Maybe discount part is better move in a single module
"""
class SaleOrderBank(orm.Model):
_name = 'sale.order.bank'
_description = 'Sale oder bank'
_columns = {
'name': fields.char('Bank account', size=64),
'information': fields.text(
'Information', translate=True,
help="Account description, IBAN etc. linked in the offer"),
}
class SaleProductReturn(orm.Model):
''' List of text sentences for the return of the product, this list are
show in offer modules
'''
_name = 'sale.product.return'
_description = 'Sale product return'
_columns = {
'name': fields.char('Description', size=64),
'text': fields.text('Text', translate=True),
}
class SaleOrderExtraFields(orm.Model):
_inherit='sale.order'
_columns = {
'bank_id': fields.many2one('sale.order.bank', 'Conto bancario'),
'print_address': fields.boolean('Use extra address'),
'print_only_prices': fields.boolean('Only price offer'),
'has_master_header': fields.boolean(
'Header master table',
help="In 'only comunication offer' doesn't add header"),
'return_id': fields.many2one('sale.product.return', 'Product return'),
}
_defaults={
'has_master_header': lambda *a: True,
}
class SaleOrderLineExtraFields(orm.Model):
_inherit ='sale.order.line'
# TODO remove (put in correct module mx_discount_scale_order
# vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
def create(self, cr, uid, vals, context=None):
""" Multi discount rate
"""
if not vals.get('discount', 0.0) and vals.get(
'multi_discount_rates', False):
res = self.on_change_multi_discount(
cr, uid, 0, vals.get('multi_discount_rates'))['value']
vals['discount'] = res.get('discount', '')
return super(SaleOrderLineExtraFields, self).create(
cr, uid, vals, context=context)
def write(self, cr, uid, ids, vals, context=None):
""" Multi discount rate
"""
if vals.get('multi_discount_rates', False):
res = self.on_change_multi_discount(
cr, uid, 0, vals.get('multi_discount_rates'))['value']
vals['discount'] = res.get('discount', '')
# TODO raise error when update (need restart server)
return super(SaleOrderLineExtraFields, self).write(
cr, uid, ids, vals, context=context)
def on_change_multi_discount(self, cr, uid, ids, multi_discount_rates,
context=None):
''' Get multidiscount return compute of discount and better format
of multi rates
'''
res = {}
if multi_discount_rates:
disc = multi_discount_rates.replace(' ', '')
disc = disc.replace(',', '.')
discount_list = disc.split('+')
if discount_list:
base_discount = float(100)
for aliquota in discount_list:
try:
i = float(eval(aliquota))
except:
i = 0.00
base_discount -= base_discount * i / 100.00
res['discount'] = 100 - base_discount
res['multi_discount_rates'] = '+ '.join(discount_list)
else:
res['discount'] = 0.0
res['multi_discount_rates'] = ''
else:
res['discount'] = 0.00
res['multi_discount_rates'] = ''
return {'value': res}
def _discount_rates_get(self, cr, uid, context=None):
if context is None:
context = {}
if context.get('partner_id'):
cr.execute("""
SELECT discount_rates, id
FROM res_partner
WHERE id = %d
""" % context['partner_id'])
res = cr.fetchall()
if res[0][0]:
return res[0][0]
else:
return False
else:
return False
def _discount_value_get(self, cr, uid, context=None):
if context is None:
context = {}
if context.get('partner_id', False):
cr.execute("""
SELECT discount_value, id
FROM res_partner
WHERE id = %d""" % context['partner_id'])
res = cr.fetchall()
if res[0][0]:
return res[0][0]
else:
return False
else:
return False
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
_columns = {
# TODO remove (put in correct module mx_discount_scale_order
# vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
'multi_discount_rates': fields.char('Discount scale', size=30),
'price_use_manual': fields.boolean('Use manual net price',
help="If specificed use manual net price instead of "
"lord price - discount"),
'price_unit_manual': fields.float(
'Manual net price', digits_compute=dp.get_precision('Sale Price')),
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
'image_http': fields.boolean('Has image',
help="Has link for image on the web"),
'image_replace_name':fields.char('Override name',
size=30,
help="Usually the name is art. code + '.PNG', es. 400.PNG"
"if you want to change write the name in this field!"),
}
# TODO remove (put in correct module mx_discount_scale_order
# vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
_defaults = {
'multi_discount_rates': _discount_rates_get,
'discount': _discount_value_get,
}
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"""
# fiam_partner.py
Add zone manage TODO maybe better put in a single module
Add extra fields populated from accounting > maybe better in a single module
"""
# TODO move in new module!!!!
class ResPartnerZone(orm.Model):
_name = 'res.partner.zone'
_description = 'Partner Zone'
_order = 'type,name'
_columns = {
'name':fields.char('Zone', size=64, required=True),
'mexal_id': fields.integer('Mexal ID'),
'type': fields.selection([
('region', 'Region'),
('state', 'State'),
('area', 'Area'),
], 'Tipo', required=True),
}
_defaults = {
'type': lambda *a: 'state',
}
class ResPartnerExtraFields(orm.Model):
_inherit ='res.partner'
def _function_statistics_invoice(
self, cr, uid, ids, args, field_list, context=None):
'''
Calculate up or down of invoice:
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param context: A standard dictionary for contextual values
@return: list of dictionary which contain partner id, colour
'''
if context is None:
context = {}
res = {}
for partner in self.browse(cr, uid, ids, context=context):
if partner.invoiced_current_year == partner.invoiced_last_year:
segno = 'equal'
valore = 0.0
else:
if partner.invoiced_last_year:
valore = 100.0 * (
partner.invoiced_current_year -
partner.invoiced_last_year) / partner.invoiced_last_year
else:
valore = 100.0
if partner.invoiced_current_year < partner.invoiced_last_year:
segno = 'down'
else:
segno = 'up'
res[partner.id] = {}
res[partner.id]['invoice_trend'] = segno
res[partner.id]['invoice_trend_perc'] = valore
return res
_columns = {
'zone_id': fields.many2one('res.partner.zone', 'Zone'),
'mexal_province': fields.char('MX province', size=9),
# vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
# TODO MOVE IN fido_management:
'fido_date': fields.date('FIDO Date'),
'fido_ko': fields.boolean('No FIDO'),
'fido_total': fields.float('Totale fido', digits=(16, 2)),
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
'mexal_note': fields.text('Mexal Note'),
'import': fields.char('ID import', size=10),
# vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
# NO MORE USED:
'mexal_c': fields.char('Mexal cliente', size=9),
'mexal_s': fields.char('Mexal fornitore', size=9),
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
'fiscal_id_code': fields.char('Fiscal code', size=16),
'private': fields.boolean('Private'),
'type_cei': fields.char('Type CEI', size=1),
# TODO remove (put in correct module mx_discount_scale_order
# vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
'discount_value': fields.float('Discount value', digits=(16, 2)),
'discount_rates':fields.char('Discount scale', size=30),
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# Statistics values:
# TODO Override fields and calculate with internal data not MX data
# vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
'date_last_ddt': fields.datetime('Date last DDT'),
'day_left_ddt': fields.integer('Day left last DDT'),
'invoiced_current_year': fields.float(
'Current invoiced', digits=(16, 2)),
'invoiced_last_year': fields.float('Last invoiced', digits=(16, 2)),
'order_current_year': fields.float('Current order', digits=(16, 2)),
'order_last_year': fields.float('Last order', digits=(16, 2)),
'invoice_trend': fields.function(
_function_statistics_invoice, method=True, type='selection',
selection=[
('down','<'),
('equal','='),
('up','>'), ],
string='Invoice status', store=True, readonly=True,
multi='invoice_stat'),
'invoice_trend_perc': fields.function(
_function_statistics_invoice, method=True, type='float',
digits=(16,2), string='Invoice diff. %', store=True, readonly=True,
multi='invoice_stat'),
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
'type_id': fields.many2one(
'crm.tracking.campaign',
# NOTE ex: 'crm.case.resource.type',
'Campaign'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
| 3,122,961,838,378,075,600 | 37.565934 | 86 | 0.557701 | false |
inertialsense/InertialSenseSDK
|
python/pylib/ISToolsDataSorted.py
|
1
|
51887
|
'''
Created on Feb 22, 2014
@author: waltj
'''
from numbers import Number
import numpy as np
import os
import glob
import sys
import simplekml
import ctypes as ct
import pylib.pose as pose
import pylib.filterTools as ft
# Profiling code
import time as systime
from numpy import uint8 as u8
from numpy import uint16 as u16
from numpy import uint32 as u32
from numpy import int32 as i32
from numpy import float32 as f32
from numpy import int64 as i64
from numpy import float64 as f64
import datetime
# Set Reference LLA (deg, deg, m) used for NED - Salem, UT
refLla = np.r_[40.0557114, -111.6585476, 1426.77]
gpsWeek = 0
showUtcTime = 0
# Set Reference latitude, longitude, height above ellipsoid (deg, deg, m) used for NED calculations
def setRefLla(lla):
global refLla
refLla = lla
def setShowUtcTime(show):
global showUtcTime
showUtcTime = show
WEEK_TIME = []
def setGpsWeek(week):
global gpsWeek
global WEEK_TIME
# Search for a valid GPS week
size = np.shape(week)
if size and size[0] > 1:
# if week[0]:
# week = week[0]
# else:
# week = week[-1]
week = np.max(week)
if week > gpsWeek:
gpsWeek = week
GPS_start_Time = datetime.datetime.strptime('6/Jan/1980', "%d/%b/%Y")
WEEK_TIME = GPS_start_Time + (datetime.timedelta(weeks=int(week)))
def getTimeFromTowMs(ms):
global WEEK_TIME
return [WEEK_TIME + datetime.timedelta(milliseconds=int(i)) for i in ms]
def getTimeFromTow(s):
global WEEK_TIME
return [WEEK_TIME + datetime.timedelta(seconds=float(i)) for i in s]
def getTimeFromGTime(gtime):
GPS_start_Time = datetime.datetime.strptime('1/Jan/1970', "%d/%b/%Y")
return [GPS_start_Time + datetime.timedelta(seconds=float(t['time'] + t['sec'])) for t in gtime]
# import time
# Default run behavior
# execfile("..\INS_logger\IsParseLoggerDat.py")
# def getdict(self):
# dict((f, getattr(self, f)) for f, _ in self._fields_)
# Empty class/dictionary
class cObj:
def __init__(self):
# self.res = []
return
class cDataType:
def __init__(self, name='', dtype=0):
self.name = name
self.dtype = dtype
def set(self, name, dtype):
self.name = name
self.dtype = dtype
# def nameID(self, did, name ):
# self.id = did
# self.name = name
#
# def dType(self, dtype):
# self.dtype = dtype
def vector3(_v, name):
return np.c_[_v[name + '[0]'].T, _v[name + '[1]'].T, _v[name + '[2]'].T]
def vector4(_v, name):
return np.c_[_v[name + '[0]'].T, _v[name + '[1]'].T, _v[name + '[2]'].T, _v[name + '[3]'].T]
RAW_DATA_OBS = 1,
RAW_DATA_EPH = 2,
RAW_DATA_GEPH = 3,
RAW_DATA_SBAS = 4,
RAW_DATA_STA = 5,
RAW_DATA_RTK_SOL = 123
dtypeGpsRaw = np.dtype([
('dataSerNum', u32),
('receiverIndex', u8),
('type', u8),
('count', u8),
('reserved', u8)
])
dtypeGtime = np.dtype([
('time', i64),
('sec', f64)])
dtypeEph = np.dtype([
('sat', i32),
('iode', i32),
('iodc', i32),
('sva', i32),
('svh', i32),
('week', i32),
('code', i32),
('flag', i32),
('toe', dtypeGtime),
('toc', dtypeGtime),
('ttr', dtypeGtime),
('A', f64),
('e', f64),
('i0', f64),
('OMG0', f64),
('omg', f64),
('M0', f64),
('deln', f64),
('OMGd', f64),
('idot', f64),
('crc', f64),
('crs', f64),
('cuc', f64),
('cus', f64),
('cic', f64),
('cis', f64),
('toes', f64),
('fit', f64),
('f0', f64),
('f1', f64),
('f2', f64),
('tgd', (f64, 4)),
('Adot', f64),
('ndot', f64),
])
dtypeGEph = np.dtype([
('sat', i32),
('iode', i32),
('frq', i32),
('svh', i32),
('sva', i32),
('age', i32),
('toe', dtypeGtime),
('tof', dtypeGtime),
('pos', (f64, 3)),
('vel', (f64, 3)),
('acc', (f64, 3)),
('taun', f64),
('gamn', f64),
('dtaun', f64)
])
dtypeSbas = np.dtype([
('week', i32),
('tow', i32),
('prn', i32),
('msg', (u8, 29)),
('reserved', (u8, 3)),
])
dtypeSta = np.dtype([
('deltype', i32),
('pos', (f32, 3)),
('delta', (f32, 3)),
('hgt', f32),
('stationId', i32),
])
dtypeObsD = np.dtype([
('time', dtypeGtime),
('sat', u8),
('rcv', u8),
('SNR', u8),
('LLI', u8),
('code', u8),
('qualL', u8),
('qualP', u8),
('reserved', u8),
('L', f64),
('P', f64),
('D', f32)
])
class cDevice:
def __init__(self, index, directory, serialNumber, refIns=None):
global refLla
self.unknownDidDisplayed = {}
self.serialNumber = serialNumber
self.dtCnkHdr = np.dtype([
('marker', u32),
('version', u16),
('classification', u16),
('name', np.dtype((str, 4))),
('invName', np.dtype((str, 4))),
('dataSize', u32),
('invDataSize', u32),
('grpNum', u32),
('devSerialNum', u32),
('pHandle', u32),
('reserved', u32),
])
self.dtCnkSubHdr = np.dtype([
('dHdr', [('id', u32),
('size', u32),
('offset', u32), ]),
('dCount', u32),
])
# Data info
self.DID_COUNT = 73
self.di = [cDataType() for i in range(self.DID_COUNT)]
self.di[1].set('devInfo', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('reserved', u32),
('serialNumber', u32),
('hardwareVer', (u8, 4)),
('firmwareVer', (u8, 4)),
('build', u32),
('commVer', (u8, 4)),
('repoRevision', f32),
('manufacturer', np.dtype((str, 24))),
('buildDate', (u8, 4)),
('buildTime', (u8, 4)),
('addInfo', np.dtype((str, 24))),
]))
dtypeImu = np.dtype([
('pqr', (f32, 3)),
('acc', (f32, 3)),
])
# 2 'crashInfo'
self.di[3].set('preintegratedImu', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('time', f64),
('theta1', (f32, 3)),
('theta2', (f32, 3)),
('vel1', (f32, 3)),
('vel2', (f32, 3)),
('dt', f32),
]))
self.di[4].set('ins1', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('week', u32),
('tow', f64),
('iStatus', u32),
('hStatus', u32),
('euler', (f32, 3)),
('uvw', (f32, 3)),
('lla', (f64, 3)),
('ned', (f32, 3)),
]))
self.di[5].set('ins2', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('week', u32),
('tow', f64),
('iStatus', u32),
('hStatus', u32),
('q', (f32, 4)),
('uvw', (f32, 3)),
('lla', (f64, 3)),
]))
dtypeGpsPos = np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('week', u32),
('timeOfWeekMs', u32),
('status', u32),
('ecef', (f64, 3)),
('lla', (f64, 3)),
('hMSL', f32),
('hAcc', f32),
('vAcc', f32),
('pDop', f32),
('cnoMean', f32),
('towOffset', f64)
])
self.di[6].set('gps1UbxPos', dtypeGpsPos)
# 7 'config'
# 8 'asciiBCastPeriod'
dtStartVars = np.dtype([
('lla', (f64, 3)),
('uvw', (f32, 3)),
('q', (f32, 4)),
])
self.di[9].set('insMisc', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('tow', f64),
('towMs', u32),
('x', dtStartVars),
('theta', (f32, 3)),
('ned', (f32, 3)),
('dcm', (f32, 9)),
('pqr', (f32, 3)),
('acc', (f32, 3)),
('mag', (f32, 3)),
('mslBar', f32),
]))
self.di[10].set('sysParams', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('towMs', u32),
('iStatus', u32),
('hStatus', u32),
('imuTemp', f32),
('baroTemp', f32),
('mcuTemp', f32),
('reserved1', f32),
('sampleDtMs', u32),
('insDtMs', u32),
('reserved2', (f32, 4)),
('genFaultcode', u32),
]))
# 11 'sysSensors'
self.di[12].set('flashConfig', np.dtype([
('dataSerNum', u32), # Indicates serial order in ti
('size', u32),
('checksum', u32),
('key', u32),
('startupSampleDtMs', u32),
('startupNavDtMs', u32),
('ser0BaudRate', u32),
('ser1BaudRate', u32),
('insRotation', (f32, 3)),
('insOffset', (f32, 3)),
('gps1AntOffset', (f32, 3)),
('insDynModel', u32),
('sysCfgBits', u32),
('refLla', (f64, 3)),
('lastLla', (f64, 3)),
('lastLlaTimeOfWeekMs', u32),
('lastLlaWeek', u32),
('lastLlaUpdateDistance', f32),
('ioConfig', u32),
('cBrdConfig', u32),
('gps2AntOffset', (f32, 3)),
('zeroVelRotation', (f32, 3)),
('zeroVelOffset', (f32, 3)),
('magInclination', f32),
('magDeclination', f32),
('gpsTimeSyncPulsePeriodMs', u32),
('startupGPSDtMs', u32),
('RTKCfgBits', u32),
('reserved', u32),
('ser2BaudRate', u32),
]))
self.di[13].set('gps1Pos', dtypeGpsPos)
self.di[14].set('gps2Pos', dtypeGpsPos)
# 15 'gps1Cno'
# 16 'gps2Cno'
# 17 'gps2Version'
# 18 'gps2Version'
# 19 'magCal'
self.di[20].set('insResources', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('towMs', u32),
('x_dot', dtStartVars),
('magYawOffset', f32),
]))
self.di[21].set('gps1RtkPosRel', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('timeOfWeekMs', u32 ),
('differentialAge', f32 ),
('arRatio', f32 ),
('vectorToBase', (f32, 3)),
('distanceToBase', f32 ),
('headingToBase', f32 ),
]))
self.di[22].set('gps1RtkPosMisc', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('timeOfWeekMs', u32),
('accuracyPos', (f32, 3)),
('accuracyCov', (f32, 3)),
('arThreshold', f32),
('gDop', f32),
('hDop', f32),
('vDop', f32),
('baseLla', (f64, 3)),
('cycleSlipCount', u32),
('roverGpsObservationCount', u32),
('baseGpsObservationCount', u32),
('roverGlonassObservationCount', u32),
('baseGlonassObservationCount', u32),
('roverGalileoObservationCount', u32),
('baseGalileoObservationCount', u32),
('roverBeidouObservationCount', u32),
('baseBeidouObservationCount', u32),
('roverQzsObservationCount', u32),
('baseQzsObservationCount', u32),
('roverGpsEphemerisCount', u32),
('baseGpsEphemerisCount', u32),
('roverGlonassEphemerisCount', u32),
('baseGlonassEphemerisCount', u32),
('roverGalileoEphemerisCount', u32),
('baseGalileoEphemerisCount', u32),
('roverBeidouEphemerisCount', u32),
('baseBeidouEphemerisCount', u32),
('roverQzsEphemerisCount', u32),
('baseQzsEphemerisCount', u32),
('roverSbasCount', u32),
('baseSbasCount', u32),
('baseAntennaCount', u32),
('ionUtcAlmCount', u32)
]))
# 23 'Feature Bits'
dtypeSensorsMpuWTemp = np.dtype([
('pqr', (f32, 3)),
('acc', (f32, 3)),
('mag', (f32, 3)),
('temp', f32),
])
self.di[24].set('sensorsIs1', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('mpu', (dtypeSensorsMpuWTemp, 2)),
]))
# 25 'Sensor IS2'
# 26 'Sensor TC Bias'
self.di[27].set('sensorBias', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('towMs', u32),
('pqr', (f32, 3)),
('acc', (f32, 3)),
('mslBar', f32),
('magI', (f32, 3)),
('magB', (f32, 3)),
]))
# 28 'Sensor ADC'
# 29 'SCOMP'
dtypeGpsVel = np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('timeOfWeekMs', u32),
('velEcef', (f32, 3)),
('sAcc', f32)
])
self.di[30].set('gps1Vel', dtypeGpsVel)
self.di[31].set('gps2Vel', dtypeGpsVel)
# 32 'HDW params'
# 33-37 Flash
# 38 'RTOS Info'
self.di[39].set('debugArray', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('i', (i32, 9)),
('f', (f32, 9)),
('lf', (f64, 3)),
]))
self.di[47].set('insDev1', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('week', u32),
('tow', f64),
('iStatus', u32),
('hStatus', u32),
('euler', (f32, 3)),
('uvw', (f32, 3)),
('lla', (f64, 3)),
('ned', (f32, 3)),
('eulerErr', (f32, 3)),
('uvwErr', (f32, 3)),
('nedErr', (f32, 3)),
]))
self.di[48].set('ekfStates', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('time', f64),
('qe2b', (f32, 4)),
('ve', (f32, 3)),
('ecef', (f64, 3)),
('biasPqr', (f32, 3)),
('biasAcc', (f32, 3)),
('biasBaro', f32),
('magDec', f32),
('magInc', f32),
]))
# 49 'EKF Covariance'
# 50 'EKF Innovations'
# 51 'EKF Innovations Var'
self.di[52].set('magnetometer1', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('time', f64),
('mag', (f32, 3)),
]))
self.di[53].set('barometer', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('time', f64),
('bar', f32),
('mslBar', f32),
('barTemp', f32),
('humidity', f32),
]))
self.di[54].set('gps1RtkPos', dtypeGpsPos)
self.di[55].set('gps1RtkCmpRel', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('timeOfWeekMs', u32 ),
('differentialAge', f32 ),
('arRatio', f32 ),
('vectorToBase', (f32, 3)),
('distanceToBase', f32 ),
('headingToBase', f32 ),
]))
self.di[56].set('gpsVersion', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('swVersion', np.dtype((str, 30))),
('hwVersion', np.dtype((str, 10))),
('extension', np.dtype((str, 30))),
('reserved', (u32, 2)),
]))
# 57 'Communications Loopback'
self.di[58].set('dualImu', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('time', f64),
('I', (dtypeImu, 2)),
]))
self.di[59].set('inl2MagObs', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('towMs', u32),
('Ncal_samples', u32),
('ready', u32),
('calibrated', u32),
('auto-recal', u32),
('outlier', u32),
('magHeading', f32),
('insHeading', f32),
('magInsHdgDelta', f32),
('nis', f32),
('nis_threshold', f32),
('Wcal', (f32, 9)),
('activeCalSet', u32),
('magHeadingOffset', f32),
]))
# 60 - Raw GPS Ephemeris and Observation from Base
self.di[60].set('GPSBaseRaw', dtypeGpsRaw)
# 61 - RTK Options
# 62 - Internal User page Info
# 63 - Manufacturing Info
# 64 - Self Test
# 65 - INS - 3 - ECEF Position & Quaternions NED
# 66 - INS - 4 - ECEF Position & Quaternions ECEF
self.di[67].set('inl2Variance', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('towMs', u32),
('PxyxNED', (f32, 3)),
('PvelNED', (f32, 3)),
('PattNED', (f32, 3)),
('PABias', (f32, 3)),
('PWBias', (f32, 3)),
('PBaroBias', f32),
('PDeclination', f32),
]))
# 68 - Strobe input time
self.di[69].set('GPS1Raw', dtypeGpsRaw)
self.di[70].set('GPS2Raw', dtypeGpsRaw)
self.di[91].set('gps1RtkCmpMisc', np.dtype([
('dataSerNum', u32), # Indicates serial order in time
('timeOfWeekMs', u32),
('accuracyPos', (f32, 3)),
('accuracyCov', (f32, 3)),
('arThreshold', f32),
('gDop', f32),
('hDop', f32),
('vDop', f32),
('baseLla', (f64, 3)),
('cycleSlipCount', u32),
('roverGpsObservationCount', u32),
('baseGpsObservationCount', u32),
('roverGlonassObservationCount', u32),
('baseGlonassObservationCount', u32),
('roverGalileoObservationCount', u32),
('baseGalileoObservationCount', u32),
('roverBeidouObservationCount', u32),
('baseBeidouObservationCount', u32),
('roverQzsObservationCount', u32),
('baseQzsObservationCount', u32),
('roverGpsEphemerisCount', u32),
('baseGpsEphemerisCount', u32),
('roverGlonassEphemerisCount', u32),
('baseGlonassEphemerisCount', u32),
('roverGalileoEphemerisCount', u32),
('baseGalileoEphemerisCount', u32),
('roverBeidouEphemerisCount', u32),
('baseBeidouEphemerisCount', u32),
('roverQzsEphemerisCount', u32),
('baseQzsEphemerisCount', u32),
('roverSbasCount', u32),
('baseSbasCount', u32),
('baseAntennaCount', u32),
('ionUtcAlmCount', u32)
]))
# Profiling
timeStart = systime.time()
self.loadTime = 0
self.unknownId = {}
self.directory = directory
self.serialNumber = serialNumber
self.rdat = {} # Raw data in python list format
self.data = {} # data in numpy format
self.index = index # index in all serial numbers
self.refLla = refLla
# self.version = []
# self.units = []
if refIns is not None:
print("#%2d Opening: Ref INS %s" % (index, directory))
fileMask = "LOG_REF_INS*.dat"
# Use first file in directory if not defined
else:
print("#%2d Opening: %s %s" % (index, serialNumber, directory))
fileMask = "LOG_" + serialNumber + "*.sdat"
if not os.path.isdir(directory):
print("Directory doesn't exist!")
sys.exit()
os.chdir(directory)
self.fileNames = glob.glob(fileMask)
if not self.fileNames:
# print(" ***** Files not found! Check directory name and serial number. ***** ")
raise Exception('Load Error: .sdat files not found.')
self.parse()
self.clean()
# Profiling
self.loadTime = systime.time() - timeStart
print("Load time: %.2fs" % (self.loadTime))
def clean(self):
for key, item in self.data.iteritems():
if not isinstance(item, np.ndarray):
continue
for field in ['towMs', 'timeOfWeekMs', 'tow']:
if field in item.dtype.names:
if (np.diff(item[field].astype(np.int64)) < 0).any():
idx = np.argmin(np.diff(item[field].astype(np.int64)))
print("\033[93m" + "Time went backwards in ", key, r"!!!, removing all data " + ("before" if idx < len(item[field])/2.0 else "after") + "\033[0m")
if idx < len(item[field])/2.0:
self.data[key] = item[idx +1:]
else:
self.data[key] = item[:idx]
ms_multiplier = 1000.0 if 'Ms' in field else 1.0
if (np.diff(item[field]) > 3600 * ms_multiplier).any():
print("\033[93m" + "greater than 1 minute gap in ", key, " data, assuming GPS fix was acquired during data set, and chopping data"+ "\033[0m")
idx = np.argmax(np.diff(item[field])) + 1
self.data[key] = item[idx:]
def parse(self):
self.curTime = np.r_[0]
self.raw_gps_keys = []
# Iterate over files to concatenate data
self.fileNames.sort()
for fileName in self.fileNames:
print(fileName)
self.__parseFile(fileName)
# set the raw GPS dictionary as a datatype
for name in self.raw_gps_keys:
for key, item in self.data[name].iteritems():
self.data[name][key] = np.array(item)
if 'ins2' in self.data.keys():
setGpsWeek(self.data['ins2']['week'][0])
def parse_raw_gps(self, f, did, dati, sHdr, cHdr):
valid_types = [1, 2, 3, 4, 5, 6, 123]
valid_receiver_indexes = [1, 2, 3, 4]
if dati.name not in self.raw_gps_keys:
self.raw_gps_keys.append(dati.name)
buf = np.fromfile(f, np.uint8, count=cHdr['dataSize'])
for i in range(sHdr['dCount']):
pointer = 0
hdr_size = np.dtype(dtypeGpsRaw).itemsize
gps_raw_header = buf[pointer:pointer + hdr_size].view(dtypeGpsRaw)
pointer += hdr_size
# Pull in the header data
try:
type = gps_raw_header['type'][0]
count = gps_raw_header['count'][0]
receiverIndex = gps_raw_header['receiverIndex'][0]
assert (type in valid_types and receiverIndex in valid_receiver_indexes)
if dati.name not in self.data.keys():
self.data[dati.name] = {'dataSerNum': [gps_raw_header['dataSerNum'][0]],
'receiverIndex': [receiverIndex],
'type': [type],
'count': [count],
'corrupt_data': 0}
else:
self.data[dati.name]['dataSerNum'].append(gps_raw_header['dataSerNum'][0])
self.data[dati.name]['receiverIndex'].append(gps_raw_header['receiverIndex'][0])
self.data[dati.name]['type'].append(type)
self.data[dati.name]['count'].append(count)
except:
print("invalid raw gps header: type=", type, "count = ", count, "receiverIndex = ", receiverIndex)
self.data[dati.name]['corrupt_data'] += 1
continue
if type == RAW_DATA_OBS:
try:
bytes_in_payload = np.dtype(dtypeObsD).itemsize * count
obs = buf[pointer:pointer + bytes_in_payload].view(dtypeObsD)
pointer += bytes_in_payload
if 'obs' not in self.data[dati.name]:
self.data[dati.name]['obs'] = np.rec.array(obs)
else:
self.data[dati.name]['obs'] = np.hstack((self.data[dati.name]['obs'], np.rec.array(obs)))
except:
print("badly formed raw gps data - DID: %d type: Obs, count: %d, actual: %f" %
(did, count, (len(buf) - 8) / (float(np.dtype(dtypeObsD).itemsize))))
self.data[dati.name]['corrupt_data'] += 1
continue
def __parseFile(self, filename):
with open(filename, 'rb') as f:
while 1:
# Read and validate chunk header
cHdr = np.fromfile(f, dtype=self.dtCnkHdr, count=1)
count = cHdr['dataSize']
if np.shape(cHdr)[0] == 0 or cHdr['marker'][0] != 0xFC05EA32:
# print( "Done parsing data!" )
break
# Read chunk sub header
sHdr = np.fromfile(f, dtype=self.dtCnkSubHdr, count=1)
# Find ID
did = sHdr['dHdr']['id'][0]
dsize = sHdr['dHdr']['size'][0]
# if did == 6:
# print( "DID: ",did )
if did >= self.DID_COUNT:
if did not in self.unknownDidDisplayed.keys():
self.unknownDidDisplayed[did] = True
print("==============================================================================")
print(" - ERROR - Data ID " + str(did) + " out of range " + str(
self.DID_COUNT) + ". Please add missing DID definitions to ISToolsDataSorted.pyx.")
print("==============================================================================")
did = 0
self.unknownDidDisplayed[did] = True
systime.sleep(0.5)
dati = self.di[did]
if dati.dtype:
if dsize == (dati.dtype.itemsize - 4):
# Known data type
# print("Found id: ", did)
cDat = np.fromfile(f, dati.dtype, count=sHdr['dCount'])
if dati.name in self.data.keys():
# Append
# self.data[dati.name].append(cDat)
self.data[dati.name] = np.concatenate([self.data[dati.name], cDat])
else:
# Create
self.data[dati.name] = cDat
# Handle Raw data differently (because it changes sizes and carries multiple messages)
elif dati.dtype == dtypeGpsRaw:
self.parse_raw_gps(f, did, dati, sHdr, cHdr)
else:
# Mismatched data size
print("==============================================================================")
print(" - ERROR - Data ID", did, "(" + dati.name + ") mismatched size. Read", dsize, "expected", dati.dtype.itemsize - 4)
print("==============================================================================")
# systime.sleep(0.5)
# sys.exit()
cDat = np.fromfile(f, np.uint8, count=cHdr['dataSize'][0])
else:
# Unknown data type
if did not in self.unknownDidDisplayed.keys():
self.unknownDidDisplayed[did] = True
print("Undefined DID: ", did)
cDat = np.fromfile(f, np.uint8, count=cHdr['dataSize'][0])
class cDevices:
def __init__(self):
self.devices = []
self.loadTime = 0 # Profiling
# Load data to be viewed. If the "selection.txt" file is found, the line by line contents
# of selection.txt specify an additional subdirectory and list of serial numbers to be loaded.
# If serial numbers are not specified, either in selection.txt or in the loadData() parameter,
# then all serial numbers and files are read.
# directory Directory data is loaded from. If not specified, the current directory is used. If no data found, use latest data sub directory.
# serialNumbers Device serial numbers to load. If not specified, all serial numbers and files found are loaded.
# startDev First index of found devices (serial numbers) to load.
# devCount Number of devices (serial numbers) to load.
def loadData(self, directory=None, serialNumbers=None, refIns=None, startDev=0, devCount=-1):
# Profiling
self.loadTime = 0
timeLoadStart = systime.time()
# We don't support reference INS right now
if refIns != None:
raise Exception('refIns not supported right now.')
if directory is not None:
# Convert backslash to forward slash (Windows to Linux)
directory = directory.replace('\\', '/')
if '~' in directory:
pass
# Automatically open logs specified in "selection.txt"
os.chdir(directory)
# Use selection file if it exists
selectionFileName = 'selection.txt'
if os.path.exists(selectionFileName):
with open(selectionFileName) as f:
lines = f.read().splitlines()
# Convert backslash to forward slash (Windows to Linux)
directory += lines[0].replace('\\', '/')
# Read serial numbers from selection.txt
serialNumbers = []
for serNum in lines[1:]:
# Stop if we find a blank line
if serNum == '':
break
serialNumbers.append(serNum)
# If current directory has NO data, use newest sub directory containing data.
files = os.listdir(directory)
if not any(".sdat" in s for s in files):
dirName = None
dirTime = 0
for fname in files:
# Has data log directory name format
if len(fname) >= 15 and fname[0:2] == '20' and fname[8:9] == '_':
dTime = int(fname[0:8] + fname[9:15])
# Is latest
if dTime > dirTime:
dirTime = dTime
dirName = fname
if dirName != None:
directory += dirName
# Print directory
print("Loading Data: ", directory)
# Add all devices in directory
if serialNumbers is None or serialNumbers == []:
# Find list of serial numbers from files in directory
files = os.listdir(directory)
serNums = []
for str in files:
if str.find('.sdat') != -1:
str = str.replace('.sdat', '')
if str.find('LOG_SN') != -1:
str = str[4:11]
if not str in serNums:
serNums.append(str)
elif str.find('LOG_PR') != -1:
str = str.replace('LOG_', '')
str = str[:str.find('_')]
if not str in serNums:
serNums.append(str)
serialNumbers = serNums
count = len(serialNumbers)
# Validate serial numbers
if count <= 0:
raise Exception('Load Error: .sdat files not found.')
# Find size and last index
if devCount > 0 and devCount < count:
count = devCount
endIndex = min(startDev + count, len(serialNumbers))
# print ("Start Index: ", startDev, " End Index: ", endIndex)
# Add devices
for i in range(startDev, endIndex):
device = cDevice(i, directory, serialNumbers[i], refIns)
self.devices.append(device)
# Profiling
self.loadTime = systime.time() - timeLoadStart
print("Total load time: %.2fs" % (self.loadTime))
def gpsTimeToUTC(gpsWeek, gpsSOW, leapSecs=14):
global showUtcTime
if showUtcTime == 0:
return gpsSOW
# Search for a valid GPS week
size = np.shape(gpsWeek)
if size and size[0] > 1:
# if gpsWeek[0] == 0:
# gpsWeek = gpsWeek[-1]
# Use the largest value for the week
gpsWeek = np.max(gpsWeek)
if gpsWeek == 0:
return gpsSOW
secsInWeek = 604800
# secsInDay = 86400
gpsEpoch = (1980, 1, 6, 0, 0, 0) # (year, month, day, hh, mm, ss)
# secFract = gpsSOW % 1
epochTuple = gpsEpoch + (-1, -1, 0)
t0 = systime.mktime(epochTuple) - systime.timezone # mktime is localtime, correct for UTC
tdiff = (gpsWeek * secsInWeek) + gpsSOW - leapSecs
t = t0 + tdiff
return t
def join_struct_arrays(arrays):
sizes = np.array([a.itemsize for a in arrays])
offsets = np.r_[0, sizes.cumsum()]
n = len(arrays[0])
joint = np.empty((n, offsets[-1]), dtype=np.uint8)
for a, size, offset in zip(arrays, sizes, offsets):
joint[:, offset:offset + size] = a.view(np.uint8).reshape(n, size)
dtype = sum((a.dtype.descr for a in arrays), [])
return joint.ravel().view(dtype)
# Join list of structured numpy arrays into one
def join_struct_arrays2(arrays):
newdtype = sum((a.dtype.descr for a in arrays), [])
newrecarray = np.empty(len(arrays[0]), dtype=newdtype)
for a in arrays:
for name in a.dtype.names:
newrecarray[name] = a[name]
return newrecarray
class cSIMPLE:
def __init__(self, _v):
self.v = _v
class cIMU:
def __init__(self, _v):
global gpsWeek
self.v = _v
self.__flt = cObj()
self.__flt.pqr = None
self.__flt.acc = None
self.__flt.pqrNoBias = None
self.__flt.accNoBias = None
self.__flt.barNoBias = None
self.cornerFreqHz = 60
# self.cornerFreqHz = 30
# self.cornerFreqHz = 15
self.time = gpsTimeToUTC(gpsWeek, self.v['time'])
self.i = [cObj(), cObj()]
for j in range(0, 2):
self.i[j].pqr = None
self.i[j].acc = None
# Dual IMU
if 'I' in self.v.dtype.names:
self.i[0].pqr = self.v['I']['pqr'][:, 0, :]
self.i[1].pqr = self.v['I']['pqr'][:, 1, :]
self.i[0].acc = self.v['I']['acc'][:, 0, :]
self.i[1].acc = self.v['I']['acc'][:, 1, :]
# Preintegrated IMU
if 'theta1' in self.v.dtype.names and 'theta2' in self.v.dtype.names:
divDt = 1.0 / self.v['dt']
self.i[0].pqr = self.v['theta1']
self.i[1].pqr = self.v['theta2']
self.i[0].acc = self.v['vel1']
self.i[1].acc = self.v['vel2']
for i in range(0, 2):
for a in range(0, 3):
self.i[i].pqr[:, a] *= divDt
self.i[i].acc[:, a] *= divDt
def fltAcc(self):
if self.__flt.acc is None:
self.__flt.acc = ft.lpfNoDelay(self.v['acc'], self.cornerFreqHz, time=self.v['time'])
return self.__flt.acc
def fltPqr(self):
if self.__flt.pqr is None:
self.__flt.pqr = ft.lpfNoDelay(self.v['pqr'], self.cornerFreqHz, time=self.v['time'])
return self.__flt.pqr
def fltPqrNoBias(self):
if 'pqrNoBias' in self.v.dtype.names and self.__flt.pqrNoBias is None:
self.__flt.pqrNoBias = ft.lpfNoDelay(self.v['pqrNoBias'], self.cornerFreqHz, time=self.v['time'])
return self.__flt.pqrNoBias
def fltAccNoBias(self):
if 'accNoBias' in self.v.dtype.names and self.__flt.accNoBias is None:
self.__flt.accNoBias = ft.lpfNoDelay(self.v['accNoBias'], self.cornerFreqHz, time=self.v['time'])
return self.__flt.accNoBias
def fltBarNoBias(self):
if 'mslBarNoBias' in self.v.dtype.names and self.__flt.barNoBias is None:
self.__flt.mslBarNoBias = ft.lpfNoDelay(self.v['mslBarNoBias'], self.cornerFreqHz, time=self.v['time'])
return self.__flt.mslBarNoBias
# self.mslBar = ft.smooth(self.v['mslBar']+72, delta=200)
# self.mslBarDot = ft.derivative(self.v['time'], self.mslBar, delta=10)
# self.mslBarDotLpf = ft.lpfNoDelay(self.mslBarDot, cornerFreqHz=0.5, time = self.v['time'])
class cINS:
def __init__(self, _v):
# self.v = _v
self.v = _v[:-1] # Throw out last element
self.__velNED = None
self.__course = None
self.__ecef = None
self.__ned = None
self.__istatus = None
self.__hstatus = None
self.__size = np.shape(self.v['tow'])[0]
self.time = gpsTimeToUTC(self.v['week'], self.v['tow'])
if not 'euler' in self.v.dtype.names and 'q' in self.v.dtype.names:
# self.v['euler'] = pose.quat2eulerArray(self.v['q'])
# self.euler = pose.quat2eulerArray(self.v['q'])
dtypeeuler = np.dtype([('euler', (np.float, 3))])
e = pose.quat2eulerArray(self.v['q'])
euler = np.ndarray(np.shape(e)[0], dtype=dtypeeuler, buffer=e)
self.v = join_struct_arrays2([self.v, euler])
if not 'q' in self.v.dtype.names and 'euler' in self.v.dtype.names:
# self.v['q'] = pose.euler2quatArray(self.v['euler'])
# self.q = pose.euler2quatArray(self.v['euler'])
dtypeq = np.dtype([('q', (np.float, 4))])
q = pose.euler2quatArray(self.v['euler'])
quat = np.ndarray(np.shape(q)[0], dtype=dtypeq, buffer=q)
self.v = join_struct_arrays2([self.v, quat])
# Velocity vector in inertial frame
def velNed(self):
if self.__velNED is None:
self.__velNED = np.zeros(np.shape(self.v['uvw']))
for i in range(0, self.__size):
DCM = pose.eulerDCM(self.v['euler'][i, :])
velNED = np.dot(DCM.T, self.v['uvw'][i, :]) # body to inertial frame
self.__velNED[i, :] = velNED
return self.__velNED
def course(self):
if self.__course is None:
self.__course = np.arctan2(self.velNED[:, 1], self.velNED[:, 0])
return self.__course
def ned(self):
global refLla
if self.__ned is None:
self.__ned = pose.lla2ned(refLla, self.v['lla'])
return self.__ned
def ecef(self):
if self.__ecef is None:
self.__ecef = pose.lla2ecef(self.v['lla'])
return self.__ecef
def set(self, time):
self.time = time
def speed2D(self):
return np.sqrt(np.square(self.v['uvw'][:, 0]) +
np.square(self.v['uvw'][:, 1]))
def speed3D(self):
return np.sqrt(np.square(self.v['uvw'][:, 0]) +
np.square(self.v['uvw'][:, 1]) +
np.square(self.v['uvw'][:, 2]))
def iStatus(self):
if self.__istatus is None:
self.__istatus = insStatus(self.v['iStatus'])
return self.__istatus
def hStatus(self):
if self.__hstatus is None:
self.__hstatus = hdwStatus(self.v['hStatus'])
return self.__hstatus
class cRIMU:
def __init__(self, _v,
accBias=np.r_[0, 0, 0],
pqrBias=np.r_[0, 0, 0],
rotate=np.r_[0, 0, 0]):
self.v = _v
self.cornerFreqHz = 30
self.__flt = cObj()
self.__flt.pqr = None
self.__flt.acc = None
if accBias[0] != 0 or accBias[1] != 0 or accBias[2] != 0:
self.v['acc'] += accBias
if pqrBias[0] != 0 or pqrBias[1] != 0 or pqrBias[2] != 0:
self.v['pqr'] += pqrBias
if rotate[0] != 0 or rotate[1] != 0 or rotate[2] != 0:
self.v['acc'] = pose.vectorRotateInertialToBody2(self.v['acc'], rotate)
self.v['pqr'] = pose.vectorRotateInertialToBody2(self.v['pqr'], rotate)
def fltPqr(self):
if self.__flt.pqr is None:
self.__flt.pqr = ft.lpfNoDelay(self.v['pqr'], self.cornerFreqHz, time=self.v['time'])
return self.__flt.pqr
def fltAcc(self):
if self.__flt.acc is None:
self.__flt.acc = ft.lpfNoDelay(self.v['acc'], self.cornerFreqHz, time=self.v['time'])
return self.__flt.acc
class cRINS:
def __init__(self, _v, rotate=np.r_[0, 0, 0]):
global refLla
self.v = _v
self.__ned = None
self.__nedDotDot = None
self.__uvw = None
self.__rotate = rotate
# self.v['nedDot'] = ft.smooth(self.v['nedDot'], delta=10)
# self.v['euler'] = ft.smooth(self.v['euler'], delta=10)
if self.__rotate[0] != 0 or self.__rotate[1] != 0 or self.__rotate[2] != 0:
self.v['euler'][:, 0] += self.__rotate[0]
self.v['euler'][:, 1] += self.__rotate[1]
self.v['euler'][:, 2] += self.__rotate[2]
def ned(self):
if self.__ned is None:
self.__ned = pose.lla2ned(refLla, self.v['lla'])
return self.__ned
def nedDotDot(self):
if self.__nedDotDot is None:
self.__nedDotDot = ft.derivative(self.v['time'], self.v['nedDot'], delta=2)
self.__nedDotDot[:, 2] -= 9.80665
cornerFreqHz = 10
self.__nedDotDot = ft.lpfNoDelay(self.__nedDotDot, cornerFreqHz, time=self.v['time'])
return self.__nedDotDot
def uvw(self):
if self.__uvw is None:
self.__uvw = pose.vectorRotateInertialToBody(self.v['nedDot'], self.v['euler'])
if self.__rotate[0] != 0 or self.__rotate[1] != 0 or self.__rotate[2] != 0:
self.uvw = pose.vectorRotateInertialToBody2(self.uvw, self.__rotate)
return self.__uvw
class cRGPS:
def __init__(self, _v):
global refLla
self.v = _v
self.__ned = None
self.__acc = cObj()
self.__acc.ned = None
def ned(self):
if self.__ned is None:
self.__ned = pose.lla2ned(refLla, self.v['lla'])
return self.__ned
def accNed(self):
if self.__acc.ned is None:
# Create Accelerations from GPS velocities
# self.__acc.ned = ft.meanDerivative(self.v['time'], self.v['vel.ned'], 5, 3)
self.__acc.ned = ft.meanDerivative(self.v['time'], self.v['vel.ned'], 2, 2)
return self.__acc.ned
class cGPS:
def __init__(self, _v):
global refLla
global refLla
global gpsWeek
self.v = _v
self.time = gpsTimeToUTC(self.v['week'], (_v['timeOfWeekMs'] * 0.001))
self.ned = pose.lla2ned(refLla, _v['lla'])
self.satsUsed = (_v['status'] >> 0) & 0xFF
self.fixType = (_v['status'] >> 8) & 0xFF
self.rtkMode = (_v['status'] >> 20) & 0x01
# self.vectorToBase = _v['vectorToBase']
# self.distanceToBase = _v['distanceToBase']
class cGPSRaw:
def __init__(self, _v):
self.count = _v['count']
self.type = _v['type']
self.receiverIndex = _v['receiverIndex']
self.corruptCount = int(_v['corrupt_data'])
if 'obs' in _v.keys():
self.obs = _v['obs']
try:
self.obstime = np.array([np.datetime64(int(np.round((t['time'] + t['sec'])*1000000)), 'us') for t in _v['obs']['time']])
except OverflowError as e:
debug = 1
class cRTKMisc:
def __init__(self, _v):
self.v = _v
self.time = gpsTimeToUTC(_v['week'], (_v['timeOfWeekMs'] * 0.001))
self.slipCounter = _v['cycleSlipCount']
self.arThreshold = _v['arThreshold']
self.baseLla = _v['baseLla']
self.heading = _v['rtkCompassHeading']
class cGpsVel:
def __init__(self, _v):
global gpsWeek
self.v = _v
def acc(self):
if self.__acc is None:
self.__acc = cObj()
self.__acc.time = self.time
# self.__acc.ned = ft.meanDerivative(self.vel.time, self.v['ned'], 5, 3)
self.__acc.ned = ft.meanDerivative(self.time, self.v['ned'], 2, 2)
return self.__acc
class cGpsAcc:
def __init__(self, _v):
self.v = _v
# self.time = _v['timeMs'] * 0.001
self.time = gpsTimeToUTC(self.v['week'], (_v['timeOfWeekMs'] * 0.001))
class cBias:
def __init__(self, _v):
global gpsWeek
self.v = _v
# self.time = _v['timeMs'] * 0.001
self.time = gpsTimeToUTC(gpsWeek, (_v['towMs'] * 0.001))
class cInsRes:
def __init__(self, _v):
global gpsWeek
self.v = _v
self.time = gpsTimeToUTC(gpsWeek, (_v['towMs'] * 0.001))
class cDevInfo:
def __init__(self, _v):
self.v = _v
class cSysParams:
def __init__(self, _v):
global gpsWeek
self.v = _v
self.__istatus = None
self.__hstatus = None
if 'tow' in _v.dtype.names:
# self.time = _v['time']
self.time = gpsTimeToUTC(gpsWeek, _v['tow'])
if 'towMs' in _v.dtype.names:
# self.time = (_v['timeMs']) * 0.001
self.time = gpsTimeToUTC(gpsWeek, (_v['towMs'] * 0.001))
def iStatus(self):
if self.__istatus is None:
self.__istatus = insStatus(self.v['iStatus'])
return self.__istatus
def hStatus(self):
if self.__istatus is None:
self.__hstatus = hdwStatus(self.v['hStatus'])
return self.__hstatus
class cObsParams:
def __init__(self, _v):
global refLla
self.v = _v
self.accNed = cObj()
self.velNed = cObj()
self.lla = cObj()
self.uvw = cObj()
# self.time = _v['timeMs'] * 0.001
# self.accNed.time = _v['accNed.timeMs'] * 0.001
# self.velNed.time = _v['velNed.timeMs'] * 0.001
# self.lla.time = _v['lla.timeMs'] * 0.001
# self.uvw.time = _v['uvw.timeMs'] * 0.001
self.time = gpsTimeToUTC(gpsWeek, (_v['towMs'] * 0.001))
self.accNed.time = gpsTimeToUTC(gpsWeek, (_v['accNed']['towMs'] * 0.001))
self.velNed.time = gpsTimeToUTC(gpsWeek, (_v['velNed']['towMs'] * 0.001))
self.lla.time = gpsTimeToUTC(gpsWeek, (_v['lla']['towMs'] * 0.001))
self.accNed.refHdg = np.arctan2(self.v['accNed']['ref'][:, 1], self.v['accNed']['ref'][:, 0])
self.accNed.insHdg = np.arctan2(self.v['accNed']['ins'][:, 1], self.v['accNed']['ins'][:, 0])
self.lla.refNed = pose.lla2ned(refLla, _v['lla']['ref'])
self.lla.insNed = pose.lla2ned(refLla, _v['lla']['ins'])
# self.v['mslBar'] += 86;
class cInsParams:
def __init__(self, _v):
global gpsWeek
self.v = _v
# self.time = _v['timeMs'] * 0.001
self.time = gpsTimeToUTC(gpsWeek, (_v['towMs'] * 0.001))
if 'magTowMs' in _v.dtype.names:
# self.magTime = (_v['magTowMs']) * 0.001
self.magTime = gpsTimeToUTC(gpsWeek, (_v['magTowMs'] * 0.001))
def lla2kml(time, lla, serialNumber, kmlFileName="log.kml", **kwargs):
kml = simplekml.Kml()
color = kwargs.pop('color', simplekml.Color.yellow)
altitudeMode = kwargs.pop('altitudeMode', simplekml.constants.AltitudeMode.absolute)
timeStep = kwargs.pop('timeStep', 0)
latLon = []
tNext = 0
lNext = 0
for i in range(0, np.shape(lla)[0]):
latLon.append((lla[i, 1], lla[i, 0], lla[i, 2]))
# Add timestamp
if timeStep:
# if timeStep == -1:
# pt = kml.newpoint(name="%.1f" % time[i], coords=[latLon[i]])
# pt.style.iconstyle.color = color
# pt.style.iconstyle.scale = 0.5
# pt.style.labelstyle.scale = 0.7
if time[i] >= tNext:
tNext += timeStep
# round(tNext, timeStep)
if time[i] >= lNext:
if timeStep > lNext:
lNext += timeStep
else:
lNext += 1
pt = kml.newpoint(name="%.2f" % time[i], coords=[latLon[i]])
else:
pt = kml.newpoint(coords=[latLon[i]])
pt.style.iconstyle.color = color
pt.style.iconstyle.scale = 0.4
pt.style.labelstyle.scale = 0.6
pt.altitudemode = altitudeMode
# Add path
ls = kml.newlinestring(name="Tracks", description=serialNumber + " tracks", coords=latLon)
# Style
ls.extrude = 1
ls.altitudemode = altitudeMode
ls.style.linestyle.width = 2
ls.style.linestyle.color = color
kml.save(kmlFileName)
return kmlFileName
##### INS Status #####
def insStatus(istatus):
result = cObj()
result.align = cObj()
result.align.coarse = cObj()
result.align.good = cObj()
result.align.fine = cObj()
# 0-3
result.align.coarse.att = (istatus >> 0) & 1
result.align.coarse.vel = (istatus >> 1) & 1
result.align.coarse.pos = (istatus >> 2) & 1
# 4-7
result.align.good.att = (istatus >> 4) & 1
result.align.good.vel = (istatus >> 5) & 1
result.align.good.pos = (istatus >> 6) & 1
result.align.fine.att = (istatus >> 7) & 1
# 8-11
result.usingGps = (istatus >> 8) & 1
result.usingMag = (istatus >> 11) & 1
# 12-15
result.navMode = (istatus >> 12) & 1
# 16-23
result.solutionStatus = (istatus >> 16) & 0x7
# 20-23
result.magActiveCalSet = (istatus >> 20) & 1
result.magRecalibrating = (istatus >> 22) & 1
result.magInterOrBadCal = ((istatus >> 23) & 1) != 1
# 24-27
# 28-31
result.rtosTaskPeriodOverrun = (istatus >> 29) & 1
result.generalFault = (istatus >> 31) & 1
return result
##### Hardware Status #####
def hdwStatus(hstatus):
result = cObj()
# 0-3
result.motionGyrSig = (hstatus >> 0) & 0x1
result.motionAccSig = (hstatus >> 1) & 0x1
result.motionGyrDev = (hstatus >> 2) & 0x1
result.motionAccDev = (hstatus >> 3) & 0x1
# 4-7
result.satellite_rx = (hstatus >> 4) & 0x1
# 8-11
result.saturationGyr = (hstatus >> 8) & 0x1
result.saturationAcc = (hstatus >> 9) & 0x1
result.saturationMag = (hstatus >> 10) & 0x1
result.saturationBaro = (hstatus >> 11) & 0x1
# 12-15
result.saturationHistory = (hstatus >> 12) & 0x1
# 16-19
result.errComTxLimited = (hstatus >> 16) & 0x1
result.errComRxOverrun = (hstatus >> 17) & 0x1
result.errGpsTxLimited = (hstatus >> 18) & 0x1
result.errGpsRxOverrun = (hstatus >> 19) & 0x1
# 20-23
result.comParseErrCount = (hstatus >> 20) & 0xF
# 24-27
result.selfTestFault = (hstatus >> 24) & 0x1
result.errTemperature = (hstatus >> 25) & 0x1
# 28-31
result.faultWatchdogReset = (hstatus >> 28) & 0x1
result.faultBODReset = (hstatus >> 29) & 0x1
result.faultPORReset = (hstatus >> 30) & 0x1
result.faultCPUErrReset = (hstatus >> 31) & 0x1
return result
|
mit
| -7,239,151,973,451,428,000 | 32.260897 | 170 | 0.488658 | false |
GPflow/GPflowOpt
|
doc/source/conf.py
|
1
|
5534
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# GPflowOpt documentation build configuration file, created by
# sphinx-quickstart on Sun Apr 30 20:34:41 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
from gpflowopt import __version__
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.todo',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'numpydoc',
'nbsphinx',
'IPython.sphinxext.ipython_console_highlighting'
]
numpydoc_show_class_members = True
numpydoc_show_inherited_class_members = True
numpydoc_class_members_toctree = False
#autoclass_content = 'both'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'GPflowOpt'
copyright = '2017, Joachim van der Herten'
author = 'Joachim van der Herten, Ivo Couckuyt'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = __version__
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'GPflowOptdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'gpflowopt.tex', 'GPflowOpt Documentation',
'Joachim van der Herten', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'GPflowOpt', 'GPflowOpt Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'GPflowOpt', 'GPflowOpt Documentation',
author, 'GPflowOpt', 'One line description of project.',
'Miscellaneous'),
]
|
apache-2.0
| -5,716,388,727,266,818,000 | 29.574586 | 98 | 0.68305 | false |
rsip22/101
|
Python/OO/cheapest_options/models/establishment.py
|
1
|
1339
|
import random
from constants import PRICE_TABLE
from enums import client
class Establishment:
"""
Class for an establishment in the chain.
Attributes:
name: (str) name of the establishment
stars: (int) establishment category
is_rewards: (bool) establishment client participates in the rewards program
"""
REGULAR = client.ClientType.REGULAR.value
REWARDS = client.ClientType.REWARDS.value
def __init__(self, stars, is_rewards=False):
self.is_rewards = is_rewards
self.stars = stars
self.name = self._get_establishment_name()
self.weekday_price = self._get_weekday_price()
self.weekend_price = self._get_weekend_price()
def _get_weekday_price(self):
if not self.is_rewards:
return PRICE_TABLE[self.stars]["weekday"][self.REGULAR]
return PRICE_TABLE[self.stars]["weekday"][self.REWARDS]
def _get_weekend_price(self):
if not self.is_rewards:
return PRICE_TABLE[self.stars]["weekend"][self.REGULAR]
return PRICE_TABLE[self.stars]["weekend"][self.REWARDS]
def _get_establishment_name(self):
establishment_for_category = PRICE_TABLE[self.stars]["establishment"]
return random.choice(establishment_for_category)
def __str__(self):
return self.name
|
gpl-2.0
| 1,346,721,449,851,784,400 | 30.880952 | 83 | 0.659447 | false |
majek/rons
|
rons/parser.py
|
1
|
4184
|
'''
Recursive decoder for Redis protocol. The code is quite reasonable and
has no external dependencies whatsoever.
To test run:
$ python -m doctest parser.py -v
'''
import collections
import itertools
class ProtocolError(Exception): pass
EMPTY=0
BULK=1
MULTIBULK=2
_State = collections.namedtuple('State', ['s', 'l', 'r', 'state'])
def State(**kwargs):
x = {'s':None, 'l':None, 'r':None, 'state':None}
x.update(kwargs)
return _State(**x)
INITIAL_STATE=State(s=EMPTY)
def initial_state():
return INITIAL_STATE
def decode(buf, state):
if state.s is EMPTY:
line, p, rest = buf.partition('\r\n')
if not p: return ( 0, None, INITIAL_STATE )
c, t, line_len = line[0], line[1:], len(line)+2
if c not in '+-:$*':
raise ProtocolError("Unexpected Redis response %r" % (line,))
if c in ('+', '-'):
return ( line_len, (c, t), INITIAL_STATE )
elif c is ':':
return ( line_len, (':', int(t)), INITIAL_STATE )
no = int(t)
if c is '$':
if no is -1:
return ( line_len, ('$', None), INITIAL_STATE )
else:
return ( line_len, None, State(s=BULK, l=no) )
elif c is '*':
if no is -1:
return ( line_len, ('*', None), INITIAL_STATE )
else:
return ( line_len, None, State(s=MULTIBULK, l=no, r=[], state=INITIAL_STATE) )
elif state.s is BULK:
if len(buf) < state.l+2: return (0, None, state)
return ( state.l+2, ('$', buf[:state.l]), INITIAL_STATE )
elif state.s is MULTIBULK:
if state.l is 0:
return ( 0, ('*', state.r), INITIAL_STATE )
else:
(c, frame, new_s_state) = decode(buf, state.state)
state = state._replace(state=new_s_state)
if frame:
state = state._replace(r=state.r + [frame],
l=state.l - 1)
return (c, None, state)
def test_decode(buf):
r'''
>>> test_decode("$-1\r\n")
[('$', None)]
>>> test_decode("$6\r\nfoobar\r\n")
[('$', 'foobar')]
>>> test_decode("*0\r\n")
[('*', [])]
>>> test_decode("*-1\r\n")
[('*', None)]
>>> test_decode("*3\r\n$3\r\nfoo\r\n$-1\r\n$3\r\nbar\r\n")
[('*', [('$', 'foo'), ('$', None), ('$', 'bar')])]
>>> test_decode("*3\r\n$3\r\nSET\r\n$5\r\nmykey\r\n$7\r\nmyvalue\r\n")
[('*', [('$', 'SET'), ('$', 'mykey'), ('$', 'myvalue')])]
>>> test_decode("*4\r\n$3\r\nfoo\r\n$3\r\nbar\r\n$5\r\nHello\r\n$5\r\nWorld\r\n")
[('*', [('$', 'foo'), ('$', 'bar'), ('$', 'Hello'), ('$', 'World')])]
>>> # All at once
>>> test_decode("$-1\r\n$6\r\nfoobar\r\n*0\r\n*-1\r\n*3\r\n$3\r\nfoo\r\n$-1\r\n$3\r\nbar\r\n*3\r\n$3\r\nSET\r\$5\r\nmykey\r\n$7\r\nmyvalue\r\n*4\r\n$3\r\nfoo\r\n$3\r\nbar\r\n$5\r\nHello\r\n$5\r\nWorld\r\n")
[('$', None), ('$', 'foobar'), ('*', []), ('*', None), ('*', [('$', 'foo'), ('$', None), ('$', 'bar')]), ('*', [('$', 'SET'), ('$', 'mykey'), ('$', 'myvalue')]), ('*', [('$', 'foo'), ('$', 'bar'), ('$', 'Hello'), ('$', 'World')])]
>>> # Other things
>>> test_decode("r\r\n")
Traceback (most recent call last):
...
ProtocolError: Unexpected Redis response 'r'
>>> test_decode("+OK\r\n")
[('+', 'OK')]
>>> test_decode("-ERROR\r\n")
[('-', 'ERROR')]
>>> test_decode("$6\r\nfoo\r\n\r\r\n")
[('$', 'foo\r\n\r')]
'''
pos, state, results = 0, initial_state(), []
while True:
(consumed, frame, state) = decode(buf[pos:], state)
if frame:
results.append( frame )
elif not consumed:
break
pos += consumed
return results
def encode(arguments):
return ''.join(itertools.chain(
('*', str(len(arguments)), '\r\n'),
*(('$', str(len(a)), '\r\n', a, '\r\n') for a in arguments)))
def test_encode(arguments):
r'''
>>> test_encode(['SET', 'mykey', 'myvalue'])
'*3\r\n$3\r\nSET\r\n$5\r\nmykey\r\n$7\r\nmyvalue\r\n'
>>> test_encode(['SET'])
'*1\r\n$3\r\nSET\r\n'
>>> test_encode([])
'*0\r\n'
'''
return encode(arguments)
|
mit
| 6,675,986,615,087,071,000 | 33.578512 | 234 | 0.48088 | false |
jameskyle/KExperiment
|
Scripts/create_task_list.py
|
1
|
1800
|
#!/opt/local/bin/python
import os
import sys
import re
import fnmatch
PROJECT_NAME = "KExperiment"
source_reg = re.compile(".*\.(cpp|h)$")
task_reg = re.compile("^\s*/+\s*(TODO|FIXME|BUG|NOTE|HACK):?\s*(.*)$", re.I)
source_match = source_reg.match
task_match = task_reg.match
def main():
output = os.path.join(os.getcwd(), "{0}.tasks".format(PROJECT_NAME))
if len(sys.argv) < 2:
sys.stderr.write("You must provide a project root path\n")
exit(1)
if len(sys.argv) > 2:
output = os.path.abspath(sys.argv[2])
root = os.path.abspath(sys.argv[1])
matches = []
types = {
"todo": "err",
"fixme": "err",
"bug": "err",
"note": "info", # currently undefined
"hack": "warn"
}
for root, dirs, files in os.walk(root):
paths = [os.path.join(root, f) for f in filter(source_match, files)]
matches.extend(paths)
tasks = []
for source in matches:
with open(source, 'r') as f:
lines = f.readlines()
for line in lines:
m = task_match(line)
if m:
base = os.path.relpath(source)
line_number = lines.index(line) + 1
t = types.get(m.group(1).lower(), "info")
desc = "{0}: {1}".format(m.group(1), m.group(2))
task = "{base}\t{line}\t{type}\t{desc}"
tasks.append(task.format(base=base, line=line_number,
type=t, desc=desc))
with open(output, 'w') as f:
f.write("\n".join(tasks))
if __name__ == "__main__":
main()
|
gpl-3.0
| -2,010,988,968,146,286,000 | 28.52459 | 76 | 0.468333 | false |
energyPATHWAYS/energyPATHWAYS
|
energyPATHWAYS/dispatch_maintenance.py
|
1
|
7610
|
from pyomo.environ import *
import numpy as np
import util
import config as cfg
import pdb
import pandas as pd
import copy
import dispatch_budget
import logging
def surplus_capacity(model):
return model.surplus_capacity + model.peak_penalty * model.weight_on_peak_penalty
def define_penalty_to_preference_high_cost_gen_maint_during_peak(model):
# if forced to choose between having high cost or low cost gen be on maintenance when load is high, we'd rather high cost gen be doing maintenance
# this should lower production cost overall and make maintenance schedules less random
return model.peak_penalty == sum([sum([model.marginal_costs[g]*model.max_load_by_group[i]*model.scheduled_maintenance[i, g] for g in model.g])
for i in model.i])
def feasible_maintenance_constraint_0(model, i, g):
return model.scheduled_maintenance[i, g] >= 0
def feasible_maintenance_constraint_1(model, i, g):
return model.scheduled_maintenance[i, g] <= 1
def define_available_gen(model, i):
return model.available_gen[i] == sum([(1 - model.scheduled_maintenance[i, g]) * model.pmax[g] for g in model.g])
def meet_maintenance_constraint(model, g):
# average maintenance across the hours == annual maintenance rate
return sum([model.scheduled_maintenance[i, g] * model.group_lengths[i] for i in model.i]) == model.annual_maintenace_hours[g]
def define_surplus_capacity(model, i):
return model.surplus_capacity >= model.available_gen[i] - model.max_load_by_group[i]
def scale_load_to_system(load, pmaxs, typical_reserve=1.15):
max_load = load.max()
sum_cap = sum(pmaxs)
if (max_load * typical_reserve) > sum_cap:
assert max_load != 0
load2 = load * (sum_cap / (max_load * typical_reserve))
return load2
else:
return load
def schedule_generator_maintenance(load, pmaxs, annual_maintenance_rates, dispatch_periods, marginal_costs, print_opt=False):
# annual maintenance rates must be between zero and one
annual_maintenance_rates = np.clip(annual_maintenance_rates, 0, 1)
# gives the index for the change between dispatch_periods
group_cuts = list(np.where(np.diff(dispatch_periods) != 0)[0] + 1) if dispatch_periods is not None else None
group_lengths = np.array([group_cuts[0]] + list(np.diff(group_cuts)) + [len(load) - group_cuts[-1]])
num_groups = len(group_cuts) + 1
# necessary to scale load in some cases for the optimization to work. Basically, load shouldn't be > gen
load_scaled = scale_load_to_system(load, pmaxs)
max_load_by_group = np.array([np.max(ls) for ls in np.array_split(load_scaled, np.array(group_cuts))])
annual_maintenace_hours = annual_maintenance_rates*len(load)
pmaxs_zero = np.nonzero(pmaxs==0)[0]
pmaxs_not_zero = np.nonzero(pmaxs)[0]
estimated_peak_penalty = sum(sum(np.outer(marginal_costs[pmaxs_not_zero],max_load_by_group).T*annual_maintenance_rates[pmaxs_not_zero]))
estimated_surplus_capacity = (pmaxs.sum() - max_load_by_group.min())*(1-annual_maintenance_rates.mean())
weight_on_peak_penalty = estimated_surplus_capacity/estimated_peak_penalty/10.
model = ConcreteModel()
# INPUT PARAMS
model.i = RangeSet(0, num_groups - 1)
model.g = RangeSet(0, len(pmaxs_not_zero) - 1)
model.annual_maintenace_hours = Param(model.g, initialize=dict(zip(model.g.keys(), annual_maintenace_hours[pmaxs_not_zero])))
model.pmax = Param(model.g, initialize=dict(zip(model.g.keys(), pmaxs[pmaxs_not_zero])))
model.marginal_costs = Param(model.g, initialize=dict(zip(model.g.keys(), marginal_costs[pmaxs_not_zero])))
model.max_load_by_group = Param(model.i, initialize=dict(zip(model.i.keys(), max_load_by_group)))
model.group_lengths = Param(model.i, initialize=dict(zip(model.i.keys(), group_lengths)))
model.weight_on_peak_penalty = Param(default=weight_on_peak_penalty)
# DECISIONS VARIABLES
model.available_gen = Var(model.i, within=NonNegativeReals)
model.scheduled_maintenance = Var(model.i, model.g, within=NonNegativeReals)
model.surplus_capacity = Var(within=NonNegativeReals)
model.peak_penalty = Var(within=NonNegativeReals)
# CONSTRAINTS
model.define_available_gen = Constraint(model.i, rule=define_available_gen)
model.feasible_maintenance_constraint_0 = Constraint(model.i, model.g, rule=feasible_maintenance_constraint_0)
model.feasible_maintenance_constraint_1 = Constraint(model.i, model.g, rule=feasible_maintenance_constraint_1)
model.meet_maintenance_constraint = Constraint(model.g, rule=meet_maintenance_constraint)
model.define_surplus_capacity = Constraint(model.i, rule=define_surplus_capacity)
model.define_penalty_to_preference_high_cost_gen_maint_during_peak = Constraint(rule=define_penalty_to_preference_high_cost_gen_maint_during_peak)
# OBJECTIVE
model.objective = Objective(rule=surplus_capacity, sense=minimize)
# SOLVE AND EXPORT RESULTS
solver = SolverFactory(cfg.solver_name or "cbc") # use cbc by default for testing, when you import config in a test, solver_name is None
results = solver.solve(model, tee=print_opt)
model.solutions.load_from(results)
scheduled_maintenance = np.empty((num_groups, len(pmaxs)))
scheduled_maintenance[:, pmaxs_zero] = annual_maintenance_rates[pmaxs_zero]
scheduled_maintenance[:, pmaxs_not_zero] = np.array([[model.scheduled_maintenance[i, g].value for i in model.i.keys()] for g in model.g.keys()]).T
return scheduled_maintenance
def schedule_generator_maintenance_loop(load, pmaxs, annual_maintenance_rates, dispatch_periods, scheduling_order):
# if nothing else, better to schedule the large generators first
scheduling_order = np.argsort(-pmaxs) if scheduling_order is None else scheduling_order
# annual maintenance rates must be between zero and one
annual_maintenance_rates = np.clip(annual_maintenance_rates, 0, 1)
# gives the index for the change between dispatch_periods
group_cuts = list(np.where(np.diff(dispatch_periods) != 0)[0] + 1) if dispatch_periods is not None else None
group_lengths = np.array([group_cuts[0]] + list(np.diff(group_cuts)) + [len(load) - group_cuts[-1]])
num_groups = len(group_cuts) + 1
# necessary to scale load in some cases for the optimization to work. Basically, load shouldn't be > gen
load_scaled = scale_load_to_system(load, pmaxs)
load_scaled = np.concatenate([[np.max(ls)]*gl for gl, ls in zip(group_lengths, np.array_split(load_scaled, np.array(group_cuts)))])
pmaxs_clipped = copy.deepcopy(pmaxs)
pmaxs_clipped = np.clip(pmaxs_clipped, 1e-1, None)
maintenance_energy = annual_maintenance_rates*pmaxs_clipped*len(load)
scheduled_maintenance = np.zeros((num_groups, len(pmaxs)))
# loop through and schedule maintenance for each generator one at a time. Update the net load after each one.
for i in scheduling_order:
energy_allocation = dispatch_budget.dispatch_to_energy_budget(load_scaled, -maintenance_energy[i], pmins=0, pmaxs=pmaxs_clipped[i])
scheduled_maintenance[:, i] = np.clip(np.array([np.mean(ls) for ls in np.array_split(energy_allocation, np.array(group_cuts))])/pmaxs_clipped[i], 0, 1)
load_scaled += np.concatenate([[sm * pmaxs[i]]*gl for gl, sm in zip(group_lengths, scheduled_maintenance[:, i])])
if not all(np.isclose(annual_maintenance_rates, (scheduled_maintenance.T * group_lengths).sum(axis=1)/len(load))):
logging.warning("scheduled maintance rates don't all match the annual maintenance rates")
return scheduled_maintenance
|
mit
| 2,130,111,249,588,512,000 | 54.152174 | 159 | 0.719054 | false |
alisonken1/openlp-projector-2.0
|
openlp/projectors/projectormanager.py
|
1
|
8836
|
# -*- coding: utf-8 -*-
# vim: autoindent shiftwidth=4 expandtab textwidth=80 tabstop=4 softtabstop=4
###############################################################################
# OpenLP - Open Source Lyrics Projection #
# --------------------------------------------------------------------------- #
# Copyright (c) 2008-2014 Raoul Snyman #
# Portions copyright (c) 2008-2014 Tim Bentley, Gerald Britton, Jonathan #
# Corwin, Samuel Findlay, Michael Gorven, Scott Guerrieri, Matthias Hub, #
# Meinert Jordan, Armin Köhler, Erik Lundin, Edwin Lunando, Brian T. Meyer. #
# Joshua Miller, Stevan Pettit, Andreas Preikschat, Mattias Põldaru, #
# Christian Richter, Philip Ridout, Simon Scudder, Jeffrey Smith, #
# Maikel Stuivenberg, Martin Thompson, Jon Tibble, Dave Warnock, #
# Frode Woldsund, Martin Zibricky, Ken Roberts #
# --------------------------------------------------------------------------- #
# This program is free software; you can redistribute it and/or modify it #
# under the terms of the GNU General Public License as published by the Free #
# Software Foundation; version 2 of the License. #
# #
# This program is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with this program; if not, write to the Free Software Foundation, Inc., 59 #
# Temple Place, Suite 330, Boston, MA 02111-1307 USA #
###############################################################################
__version__ = '0.0.2'
__v = __version__.split('.')
__version_hex__ = int(__v[0]) << 24 | \
int(__v[1]) << 16 | \
int(__v[2]) << 8
__module = 'projectors'
import logging
log = logging.getLogger(__name__)
import os
from PyQt4 import QtCore, QtGui
from openlp.core.lib import OpenLPToolbar, Receiver, SettingsManager
from openlp.core.lib import build_icon, check_item_selected, check_directory_exists, translate
from openlp.core.lib.db import Manager
from openlp.core.lib.settings import Settings
from openlp.core.lib.ui import UiStrings, critical_error_message_box, create_widget_action
from openlp.core.utils import AppLocation
from openlp.core.projectors import ProjectorForm
from openlp.core.projectors.db import init_schema, Projector
class ProjectorManager(QtGui.QWidget):
"""
Manages the projetor connections window
"""
log.info('ProjectorManager loaded')
def __init__(self, mainwindow, parent=None):
super(ProjectorManager, self).__init__(parent)
self.mainwindow = mainwindow
self.settingsSection = u'projectors'
self.manager = Manager(plugin_name=u'projectors', init_schema=init_schema)
self.projectorForm = ProjectorForm(self)
# Layout section
self.layout = QtGui.QVBoxLayout(self)
self.layout.setSpacing(0)
self.layout.setMargin(0)
self.layout.setObjectName(u'layout')
self.toolbar = OpenLPToolbar(self)
self.toolbar.setObjectName(u'toolbar')
self.toolbar.addToolbarAction(u'newProjector',
text=UiStrings().NewTheme,
icon=u':/general/general_new.png',
tooltip=translate('OpenLP.Projector', 'Add a new projector.'),
triggers=self.onAddProjector)
self.toolbar.addToolbarAction(u'editProjector',
text=translate('OpenLP.Projector', 'Edit projector'),
icon=u':/general/general_edit.png',
tooltip=translate('OpenLP.Projector', 'Edit a projector.'),
triggers=self.onEditProjector)
self.deleteToolbarAction = self.toolbar.addToolbarAction(u'deleteProjector',
text=translate('OpenLP.ThemeManager', 'Delete selected projector'),
icon=u':/general/general_delete.png',
tooltip=translate('OpenLP.ThemeManager', 'Delete selected projector.'),
triggers=self.onDeleteProjector)
self.layout.addWidget(self.toolbar)
# Projector manager list
self.projectorWidget = QtGui.QWidgetAction(self.toolbar)
self.projectorWidget.setObjectName(u'projectorWidget')
self.projectorListWidget = QtGui.QListWidget(self)
self.projectorListWidget.setIconSize(QtCore.QSize(75, 50))
#self.projectorListWidget.setContextMenuPolicy(QtCore.Qt.CustomContextmenu)
self.projectorListWidget.setObjectName(u'projectorListWidget')
self.layout.addWidget(self.projectorListWidget)
QtCore.QObject.connect(self.projectorListWidget,
QtCore.SIGNAL('projectorContextMenu(QPoint)'),
self.contextMenu)
# build the context menu
self.menu = QtGui.QMenu()
self.editAction = create_widget_action(self.menu,
text=translate('OpenLP.ProjectorManager', '&Edit Projector'),
icon=u':/general/general_edit.png', triggers=self.onEditProjector)
self.deleteAction = create_widget_action(self.menu,
text=translate('OpenLP.ProjectorManager', '&Delete Projector'),
icon=u':/general/general_delete.png', triggers=self.onDeleteProjector)
# Signals
QtCore.QObject.connect(self.projectorListWidget, QtCore.SIGNAL(
u'currentItemChanged(QListWidgetItem *, QListWidgetItem *)'),
self.checkListState)
QtCore.QObject.connect(Receiver.get_receiver(),
QtCore.SIGNAL(u'config_updated'), self.configUpdated)
# Variables
self.projectorList = []
self.path = AppLocation.get_section_data_path(self.settingsSection)
log.debug('Setting data path location to %s' % self.path)
self.configUpdated()
def contextMenu(self, point):
"""
Build the Right Click Context menu and set state depending on
the type of theme.
"""
log.debug(u'contextMenu(point=%s)' % point)
item = self.projectionListWidget.itemAt(point)
if item is None:
return
real_projector_name = unicode(item.data(QtCore.Qt.UserRole).toString())
theme_name = unicode(item.text())
visible = real_projector_name == projector_name
self.deleteAction.setVisible(visible)
self.globalAction.setVisible(visible)
self.menu.exec_(self.themeListWidget.mapToGlobal(point))
def configUpdated(self):
# Configuration updated - see if we are enabled or disabled
enabled = Settings().value(
self.settingsSection + u'/enabled',
QtCore.QVariant(True)).toBool()
e = 'Enabling' if enabled else 'Disabling'
log.debug(u'configUpdated() - %s projector controls' % e)
self.setVisible(enabled)
if len(self.projectorList) >= 1:
# Call each projector instance and either stop or start
for p in self.projectorList:
e = 'Starting' if enabled else 'Stopping'
log.debug('%s projector %s' % (e, 'testing'))
if e:
p.start()
else:
p.stop()
def checkListState(self, item):
log.debug(u'checkListState()')
if item is None:
return
def contextMenu(self, point):
log.debug(u'contextMenu()')
"""
Build the right-click context menu.
"""
item = self.projectorListWidget.itemAt(point)
if item is None:
return
self.deleteAction.setVisible(visible)
self.editAction.setVisible(visible)
self.menu.exec_(self.projectorListWidget.mapToGlobal(point))
def onAddProjector(self):
log.debug(u'onAddProjector()')
self.projectorForm.exec_()
def onEditProjector(self):
log.debug(u'onEditProjector()')
if check_item_selected(self.projectorListWidget, translate(
'OpenLP.ProjectorManager', 'You must select a projector to edit.')):
# Change this to index
item = self.projectorListWidget.currentRow()
self.projectorForm.exec_(projector=projectorList[item])
def onDeleteProjector(self):
log.debug(u'onDeleteProjector()')
# Delete projetor from db
return
def loadProjectors(self):
log.debug(u'loadProjectors()')
return
|
gpl-2.0
| 7,618,840,756,520,493,000 | 44.307692 | 94 | 0.604935 | false |
blopker/PCLite
|
pclite/http/downloaders/__init__.py
|
1
|
1722
|
'''
The MIT License
Copyright (c) Bo Lopker, http://blopker.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
'''
Module to determine the correct downloader to use.
By @blopker
'''
from . import requests
from . import null
from . import wget
from ... import logger
log = logger.get(__name__)
# Check if this OS supports SSL
try:
import ssl
SSL = True
except ImportError:
SSL = False
def get():
if not SSL and wget.is_available():
log.debug('Using WGET downloader.')
return wget.WgetDownloader()
if SSL:
log.debug('Using Requests downloader.')
return requests.RequestsDownloader()
log.error('No suitable downloader found. Everything is terrible.')
return null.NullDownloader()
|
mit
| 3,695,211,512,528,499,700 | 33.44 | 77 | 0.752613 | false |
sergiusens/snapcraft
|
tests/unit/repo/test_deb.py
|
1
|
19745
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015-2018 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import apt
import os
from subprocess import CalledProcessError
from unittest.mock import ANY, DEFAULT, call, patch, MagicMock
from testtools.matchers import Contains, Equals, FileExists, Not
import snapcraft
from snapcraft.internal import repo
from snapcraft.internal.repo import errors
from tests import fixture_setup, unit
from . import RepoBaseTestCase
class UbuntuTestCase(RepoBaseTestCase):
def setUp(self):
super().setUp()
patcher = patch("snapcraft.repo._deb.apt.Cache")
self.mock_cache = patcher.start()
self.addCleanup(patcher.stop)
def _fetch_binary(download_dir, **kwargs):
path = os.path.join(download_dir, "fake-package.deb")
open(path, "w").close()
return path
self.mock_package = MagicMock()
self.mock_package.candidate.fetch_binary.side_effect = _fetch_binary
self.mock_cache.return_value.get_changes.return_value = [self.mock_package]
@patch("snapcraft.internal.repo._deb._AptCache.fetch_binary")
@patch("snapcraft.internal.repo._deb.apt.apt_pkg")
def test_cache_update_failed(self, mock_apt_pkg, mock_fetch_binary):
fake_package_path = os.path.join(self.path, "fake-package.deb")
open(fake_package_path, "w").close()
mock_fetch_binary.return_value = fake_package_path
self.mock_cache().is_virtual_package.return_value = False
self.mock_cache().update.side_effect = apt.cache.FetchFailedException()
project_options = snapcraft.ProjectOptions(use_geoip=False)
ubuntu = repo.Ubuntu(self.tempdir, project_options=project_options)
self.assertRaises(errors.CacheUpdateFailedError, ubuntu.get, ["fake-package"])
@patch("shutil.rmtree")
@patch("snapcraft.internal.repo._deb._AptCache.fetch_binary")
@patch("snapcraft.internal.repo._deb.apt.apt_pkg")
def test_cache_hashsum_mismatch(self, mock_apt_pkg, mock_fetch_binary, mock_rmtree):
fake_package_path = os.path.join(self.path, "fake-package.deb")
open(fake_package_path, "w").close()
mock_fetch_binary.return_value = fake_package_path
self.mock_cache().is_virtual_package.return_value = False
self.mock_cache().update.side_effect = [
apt.cache.FetchFailedException(
"E:Failed to fetch copy:foo Hash Sum mismatch"
),
DEFAULT,
]
project_options = snapcraft.ProjectOptions(use_geoip=False)
ubuntu = repo.Ubuntu(self.tempdir, project_options=project_options)
ubuntu.get(["fake-package"])
def test_get_pkg_name_parts_name_only(self):
name, version = repo.get_pkg_name_parts("hello")
self.assertThat(name, Equals("hello"))
self.assertThat(version, Equals(None))
def test_get_pkg_name_parts_all(self):
name, version = repo.get_pkg_name_parts("hello:i386=2.10-1")
self.assertThat(name, Equals("hello:i386"))
self.assertThat(version, Equals("2.10-1"))
def test_get_pkg_name_parts_no_arch(self):
name, version = repo.get_pkg_name_parts("hello=2.10-1")
self.assertThat(name, Equals("hello"))
self.assertThat(version, Equals("2.10-1"))
@patch("snapcraft.internal.repo._deb._AptCache.fetch_binary")
@patch("snapcraft.internal.repo._deb.apt.apt_pkg")
def test_get_package(self, mock_apt_pkg, mock_fetch_binary):
fake_package_path = os.path.join(self.path, "fake-package.deb")
open(fake_package_path, "w").close()
mock_fetch_binary.return_value = fake_package_path
self.mock_cache().is_virtual_package.return_value = False
project_options = snapcraft.ProjectOptions(use_geoip=False)
ubuntu = repo.Ubuntu(self.tempdir, project_options=project_options)
ubuntu.get(["fake-package"])
mock_apt_pkg.assert_has_calls(
[
call.config.set("Apt::Install-Recommends", "False"),
call.config.find_file("Dir::Etc::Trusted"),
call.config.set("Dir::Etc::Trusted", ANY),
call.config.find_file("Dir::Etc::TrustedParts"),
call.config.set("Dir::Etc::TrustedParts", ANY),
call.config.clear("APT::Update::Post-Invoke-Success"),
]
)
self.mock_cache.assert_has_calls(
[
call(memonly=True, rootdir=ANY),
call().update(fetch_progress=ANY, sources_list=ANY),
call().open(),
]
)
# __getitem__ is tricky
self.assertThat(
self.mock_cache.return_value.__getitem__.call_args_list,
Contains(call("fake-package")),
)
# Verify that the package was actually fetched and copied into the
# requested location.
self.assertThat(
os.path.join(self.tempdir, "download", "fake-package.deb"), FileExists()
)
@patch("snapcraft.internal.repo._deb._AptCache.fetch_binary")
@patch("snapcraft.internal.repo._deb.apt.apt_pkg")
def test_get_multiarch_package(self, mock_apt_pkg, mock_fetch_binary):
fake_package_path = os.path.join(self.path, "fake-package.deb")
open(fake_package_path, "w").close()
mock_fetch_binary.return_value = fake_package_path
self.mock_cache().is_virtual_package.return_value = False
project_options = snapcraft.ProjectOptions(use_geoip=False)
ubuntu = repo.Ubuntu(self.tempdir, project_options=project_options)
ubuntu.get(["fake-package:arch"])
mock_apt_pkg.assert_has_calls(
[
call.config.set("Apt::Install-Recommends", "False"),
call.config.find_file("Dir::Etc::Trusted"),
call.config.set("Dir::Etc::Trusted", ANY),
call.config.find_file("Dir::Etc::TrustedParts"),
call.config.set("Dir::Etc::TrustedParts", ANY),
call.config.clear("APT::Update::Post-Invoke-Success"),
]
)
self.mock_cache.assert_has_calls(
[
call(memonly=True, rootdir=ANY),
call().update(fetch_progress=ANY, sources_list=ANY),
call().open(),
]
)
# __getitem__ is tricky
self.assertThat(
self.mock_cache.return_value.__getitem__.call_args_list,
Contains(call("fake-package:arch")),
)
# Verify that the package was actually fetched and copied into the
# requested location.
self.assertThat(
os.path.join(self.tempdir, "download", "fake-package.deb"), FileExists()
)
@patch("snapcraft.repo._deb._get_geoip_country_code_prefix")
def test_sources_is_none_uses_default(self, mock_cc):
mock_cc.return_value = "ar"
self.maxDiff = None
sources_list = repo._deb._format_sources_list(
"", use_geoip=True, deb_arch="amd64"
)
expected_sources_list = """deb http://ar.archive.ubuntu.com/ubuntu/ xenial main restricted
deb http://ar.archive.ubuntu.com/ubuntu/ xenial-updates main restricted
deb http://ar.archive.ubuntu.com/ubuntu/ xenial universe
deb http://ar.archive.ubuntu.com/ubuntu/ xenial-updates universe
deb http://ar.archive.ubuntu.com/ubuntu/ xenial multiverse
deb http://ar.archive.ubuntu.com/ubuntu/ xenial-updates multiverse
deb http://security.ubuntu.com/ubuntu xenial-security main restricted
deb http://security.ubuntu.com/ubuntu xenial-security universe
deb http://security.ubuntu.com/ubuntu xenial-security multiverse
"""
self.assertThat(sources_list, Equals(expected_sources_list))
def test_no_geoip_uses_default_archive(self):
sources_list = repo._deb._format_sources_list(
repo._deb._DEFAULT_SOURCES, deb_arch="amd64", use_geoip=False
)
expected_sources_list = """deb http://archive.ubuntu.com/ubuntu/ xenial main restricted
deb http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted
deb http://archive.ubuntu.com/ubuntu/ xenial universe
deb http://archive.ubuntu.com/ubuntu/ xenial-updates universe
deb http://archive.ubuntu.com/ubuntu/ xenial multiverse
deb http://archive.ubuntu.com/ubuntu/ xenial-updates multiverse
deb http://security.ubuntu.com/ubuntu xenial-security main restricted
deb http://security.ubuntu.com/ubuntu xenial-security universe
deb http://security.ubuntu.com/ubuntu xenial-security multiverse
"""
self.assertThat(sources_list, Equals(expected_sources_list))
@patch("snapcraft.internal.repo._deb._get_geoip_country_code_prefix")
def test_sources_amd64_vivid(self, mock_cc):
self.maxDiff = None
mock_cc.return_value = "ar"
sources_list = repo._deb._format_sources_list(
repo._deb._DEFAULT_SOURCES,
deb_arch="amd64",
use_geoip=True,
release="vivid",
)
expected_sources_list = """deb http://ar.archive.ubuntu.com/ubuntu/ vivid main restricted
deb http://ar.archive.ubuntu.com/ubuntu/ vivid-updates main restricted
deb http://ar.archive.ubuntu.com/ubuntu/ vivid universe
deb http://ar.archive.ubuntu.com/ubuntu/ vivid-updates universe
deb http://ar.archive.ubuntu.com/ubuntu/ vivid multiverse
deb http://ar.archive.ubuntu.com/ubuntu/ vivid-updates multiverse
deb http://security.ubuntu.com/ubuntu vivid-security main restricted
deb http://security.ubuntu.com/ubuntu vivid-security universe
deb http://security.ubuntu.com/ubuntu vivid-security multiverse
"""
self.assertThat(sources_list, Equals(expected_sources_list))
@patch("snapcraft.repo._deb._get_geoip_country_code_prefix")
def test_sources_armhf_trusty(self, mock_cc):
sources_list = repo._deb._format_sources_list(
repo._deb._DEFAULT_SOURCES, deb_arch="armhf", release="trusty"
)
expected_sources_list = """deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ trusty universe
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates universe
deb http://ports.ubuntu.com/ubuntu-ports/ trusty multiverse
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates multiverse
deb http://ports.ubuntu.com/ubuntu-ports trusty-security main restricted
deb http://ports.ubuntu.com/ubuntu-ports trusty-security universe
deb http://ports.ubuntu.com/ubuntu-ports trusty-security multiverse
"""
self.assertThat(sources_list, Equals(expected_sources_list))
self.assertFalse(mock_cc.called)
class UbuntuTestCaseWithFakeAptCache(RepoBaseTestCase):
def setUp(self):
super().setUp()
self.fake_apt_cache = fixture_setup.FakeAptCache()
self.useFixture(self.fake_apt_cache)
def test_get_installed_packages(self):
for name, version, installed in (
("test-installed-package", "test-installed-package-version", True),
("test-not-installed-package", "dummy", False),
):
self.fake_apt_cache.add_package(
fixture_setup.FakeAptCachePackage(name, version, installed=installed)
)
self.assertThat(
repo.Repo.get_installed_packages(),
Equals(["test-installed-package=test-installed-package-version"]),
)
class AutokeepTestCase(RepoBaseTestCase):
def test_autokeep(self):
self.fake_apt_cache = fixture_setup.FakeAptCache()
self.useFixture(self.fake_apt_cache)
self.test_packages = (
"main-package",
"dependency",
"sub-dependency",
"conflicting-dependency",
)
self.fake_apt_cache.add_packages(self.test_packages)
self.fake_apt_cache.cache["main-package"].dependencies = [
[
fixture_setup.FakeAptBaseDependency(
"dependency", [self.fake_apt_cache.cache["dependency"]]
),
fixture_setup.FakeAptBaseDependency(
"conflicting-dependency",
[self.fake_apt_cache.cache["conflicting-dependency"]],
),
]
]
self.fake_apt_cache.cache["dependency"].dependencies = [
[
fixture_setup.FakeAptBaseDependency(
"sub-dependency", [self.fake_apt_cache.cache["sub-dependency"]]
)
]
]
self.fake_apt_cache.cache["conflicting-dependency"].conflicts = [
self.fake_apt_cache.cache["dependency"]
]
project_options = snapcraft.ProjectOptions()
ubuntu = repo.Ubuntu(self.tempdir, project_options=project_options)
ubuntu.get(["main-package", "conflicting-dependency"])
# Verify that the package was actually fetched and copied into the
# requested location.
self.assertThat(
os.path.join(self.tempdir, "download", "main-package.deb"), FileExists()
)
self.assertThat(
os.path.join(self.tempdir, "download", "conflicting-dependency.deb"),
FileExists(),
)
self.assertThat(
os.path.join(self.tempdir, "download", "dependency.deb"),
Not(FileExists()),
"Dependency should not have been fetched",
)
self.assertThat(
os.path.join(self.tempdir, "download", "sub-dependency.deb"),
Not(FileExists()),
"Sub-dependency should not have been fetched",
)
class BuildPackagesTestCase(unit.TestCase):
def setUp(self):
super().setUp()
self.fake_apt_cache = fixture_setup.FakeAptCache()
self.useFixture(self.fake_apt_cache)
self.test_packages = (
"package-not-installed",
"package-installed",
"another-uninstalled",
"another-installed",
"repeated-package",
"repeated-package",
"versioned-package=0.2",
"versioned-package",
)
self.fake_apt_cache.add_packages(self.test_packages)
self.fake_apt_cache.cache["package-installed"].installed = True
self.fake_apt_cache.cache["another-installed"].installed = True
self.fake_apt_cache.cache["versioned-package"].version = "0.1"
def get_installable_packages(self, packages):
return [
"package-not-installed",
"another-uninstalled",
"repeated-package",
"versioned-package=0.2",
]
@patch("os.environ")
def install_test_packages(self, test_pkgs, mock_env):
mock_env.copy.return_value = {}
repo.Ubuntu.install_build_packages(test_pkgs)
@patch("snapcraft.repo._deb.is_dumb_terminal")
@patch("subprocess.check_call")
def test_install_build_package(self, mock_check_call, mock_is_dumb_terminal):
mock_is_dumb_terminal.return_value = False
self.install_test_packages(self.test_packages)
installable = self.get_installable_packages(self.test_packages)
mock_check_call.assert_has_calls(
[
call(
"sudo apt-get --no-install-recommends -y "
"-o Dpkg::Progress-Fancy=1 install".split()
+ sorted(set(installable)),
env={
"DEBIAN_FRONTEND": "noninteractive",
"DEBCONF_NONINTERACTIVE_SEEN": "true",
},
)
]
)
@patch("snapcraft.repo._deb.is_dumb_terminal")
@patch("subprocess.check_call")
def test_install_buid_package_in_dumb_terminal(
self, mock_check_call, mock_is_dumb_terminal
):
mock_is_dumb_terminal.return_value = True
self.install_test_packages(self.test_packages)
installable = self.get_installable_packages(self.test_packages)
mock_check_call.assert_has_calls(
[
call(
"sudo apt-get --no-install-recommends -y install".split()
+ sorted(set(installable)),
env={
"DEBIAN_FRONTEND": "noninteractive",
"DEBCONF_NONINTERACTIVE_SEEN": "true",
},
)
]
)
@patch("subprocess.check_call")
def test_install_buid_package_marks_auto_installed(self, mock_check_call):
self.install_test_packages(self.test_packages)
installable = self.get_installable_packages(self.test_packages)
mock_check_call.assert_has_calls(
[
call(
"sudo apt-mark auto".split() + sorted(set(installable)),
env={
"DEBIAN_FRONTEND": "noninteractive",
"DEBCONF_NONINTERACTIVE_SEEN": "true",
},
)
]
)
@patch("subprocess.check_call")
def test_mark_installed_auto_error_is_not_fatal(self, mock_check_call):
error = CalledProcessError(101, "bad-cmd")
mock_check_call.side_effect = lambda c, env: error if "apt-mark" in c else None
self.install_test_packages(["package-not-installed"])
def test_invalid_package_requested(self):
self.assertRaises(
errors.BuildPackageNotFoundError,
repo.Ubuntu.install_build_packages,
["package-does-not-exist"],
)
@patch("subprocess.check_call")
def test_broken_package_requested(self, mock_check_call):
self.fake_apt_cache.add_packages(("package-not-installable",))
self.fake_apt_cache.cache["package-not-installable"].dependencies = [
[fixture_setup.FakeAptBaseDependency("broken-dependency", [])]
]
self.assertRaises(
errors.PackageBrokenError,
repo.Ubuntu.install_build_packages,
["package-not-installable"],
)
@patch("subprocess.check_call")
def test_broken_package_apt_install(self, mock_check_call):
mock_check_call.side_effect = CalledProcessError(100, "apt-get")
self.fake_apt_cache.add_packages(("package-not-installable",))
raised = self.assertRaises(
errors.BuildPackagesNotInstalledError,
repo.Ubuntu.install_build_packages,
["package-not-installable"],
)
self.assertThat(raised.packages, Equals("package-not-installable"))
@patch("subprocess.check_call")
def test_refresh_buid_packages(self, mock_check_call):
repo.Ubuntu.refresh_build_packages()
mock_check_call.assert_called_once_with(["sudo", "apt", "update"])
@patch(
"subprocess.check_call",
side_effect=CalledProcessError(returncode=1, cmd=["sudo", "apt", "update"]),
)
def test_refresh_buid_packages_fails(self, mock_check_call):
self.assertRaises(
errors.CacheUpdateFailedError, repo.Ubuntu.refresh_build_packages
)
mock_check_call.assert_called_once_with(["sudo", "apt", "update"])
|
gpl-3.0
| -2,596,548,642,460,194,300 | 39.879917 | 99 | 0.62669 | false |
ryfeus/lambda-packs
|
Selenium_PhantomJS/source/service.py
|
1
|
1299
|
#!/usr/bin/env python
import httplib2
import datetime
import time
import os
import selenium
import json
import boto3
import requests
from dateutil.parser import parse
from selenium import webdriver
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from apiclient.discovery import build
from oauth2client.client import GoogleCredentials
def handler(event, context):
# set user agent
user_agent = ("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36")
dcap = dict(DesiredCapabilities.PHANTOMJS)
dcap["phantomjs.page.settings.userAgent"] = user_agent
dcap["phantomjs.page.settings.javascriptEnabled"] = True
browser = webdriver.PhantomJS(service_log_path=os.path.devnull, executable_path="/var/task/phantomjs", service_args=['--ignore-ssl-errors=true'], desired_capabilities=dcap)
browser.get('https://en.wikipedia.org/wiki/Special:Random')
line = browser.find_element_by_class_name('firstHeading').text
print(line)
body = {
"message": "Your lambda function executed successfully!",
"event": line
}
response = {
"statusCode": 200,
"body": json.dumps(body)
}
return response
|
mit
| 6,113,150,174,686,944,000 | 29.928571 | 173 | 0.774442 | false |
GermanRuizMarcos/Classical-Composer-Classification
|
code_10_1/classification.py
|
1
|
30838
|
'''
AUDIO CLASSICAL COMPOSER IDENTIFICATION BASED ON:
A SPECTRAL BANDWISE FEATURE-BASED SYSTEM
'''
import essentia
from essentia.standard import *
import glob
import numpy as np
import arff
from scipy import stats
import collections
import cv2
import matplotlib
import matplotlib.pyplot as plt
#### gabor filters
def build_filters():
filters = []
ksize = 31
for theta in np.arange(0, np.pi, np.pi / 16):
kern = cv2.getGaborKernel((ksize, ksize), 4.0, theta, 10.0, 0.5, 0, ktype=cv2.CV_32F)
kern /= 1.5*kern.sum()
filters.append(kern)
return filters
def process(img, filters):
accum = np.zeros_like(img)
for kern in filters:
fimg = cv2.filter2D(img, cv2.CV_8UC3, kern)
np.maximum(accum, fimg, accum)
return accum
###
# Dataset creation with specific attributes (spectral features) and a specific class (composer's name)
'''
Audio files trasformed into the frequency domain through a 1024-sample STFT with 50% overlap.
The spectrum is divided into 50 mel-spaced bands.
'''
dirList = glob.glob("/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/datasets/bach/*.wav")
fft = FFT()
melbands = MelBands(numberBands = 50)
flatness = FlatnessDB()
rolloff = RollOff()
centroid = SpectralCentroidTime()
flux = Flux()
energy = EnergyBand()
zero = ZeroCrossingRate()
spectrum = Spectrum()
w = Windowing(type = 'hann')
mfcc = MFCC()
silence = SilenceRate(thresholds = [0.01])
f = open('definitive_train.txt', 'wb')
f.write('@RELATION "composer dataset"\n')
f.write('\n')
f.write('@ATTRIBUTE filename STRING\n')
f.write('@ATTRIBUTE MFCC-0 REAL\n')
f.write('@ATTRIBUTE MFCC-1 REAL\n')
f.write('@ATTRIBUTE MFCC-2 REAL\n')
f.write('@ATTRIBUTE MFCC-3 REAL\n')
f.write('@ATTRIBUTE MFCC-4 REAL\n')
f.write('@ATTRIBUTE MFCC-5 REAL\n')
f.write('@ATTRIBUTE MFCC-6 REAL\n')
f.write('@ATTRIBUTE MFCC-7 REAL\n')
f.write('@ATTRIBUTE MFCC-8 REAL\n')
f.write('@ATTRIBUTE MFCC-9 REAL\n')
f.write('@ATTRIBUTE MFCC-10 REAL\n')
f.write('@ATTRIBUTE MFCC-11 REAL\n')
f.write('@ATTRIBUTE MFCC-12 REAL\n')
f.write('@ATTRIBUTE flatness-mean REAL\n')
f.write('@ATTRIBUTE flatness-variance REAL\n')
f.write('@ATTRIBUTE rolloff-mean REAL\n')
f.write('@ATTRIBUTE rolloff-variance REAL\n')
f.write('@ATTRIBUTE centroid-mean REAL\n')
f.write('@ATTRIBUTE centroid-variance REAL\n')
f.write('@ATTRIBUTE flux-mean REAL\n')
f.write('@ATTRIBUTE flux-variance REAL\n')
f.write('@ATTRIBUTE energy-mean REAL\n')
f.write('@ATTRIBUTE energy-variance REAL\n')
f.write('@ATTRIBUTE ZCR-mean REAL\n')
f.write('@ATTRIBUTE ZCR-variance REAL\n')
f.write('@ATTRIBUTE flatness-std REAL\n')
f.write('@ATTRIBUTE flatness-hmean REAL\n')
f.write('@ATTRIBUTE silences REAL\n')
f.write('@ATTRIBUTE gaborfilter-mean REAL\n')
f.write('@ATTRIBUTE gaborfilter-variance REAL\n')
f.write('@ATTRIBUTE composer {bach, beethoven, chopin, haydn, liszt, mendelssohn, mozart, vivaldi}\n')
f.write('\n')
f.write('@DATA\n')
dirimg = '/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/code_10/pictures/bach'
dirname = str(dirimg) +'/*.png'
piclist = glob.glob(dirname)
counter = 0
for audio_file in dirList:
# Selecting the expectrogram
for item in piclist:
if item.split('/')[-1].split('.')[0] == audio_file.split('/')[-1].split('.')[0]:
picname = str(dirimg)+'/'+str(audio_file.split('/')[-1].split('.')[0]) + '.png'
flat = []
rol = []
cen = []
flu = []
ene = []
zer = []
mfccs = []
stft = []
sil = []
mean_counter = []
# Loading audio
audio = MonoLoader(filename = audio_file)()
# Features extraction
for frame in FrameGenerator(audio, frameSize = 1024, hopSize = 512, startFromZero=True):
bands = melbands(spectrum(frame))
stft.append(fft(frame))
flat.append(flatness(bands))
rol.append(rolloff(bands))
cen.append(centroid(bands))
flu.append(flux(bands))
ene.append(energy(bands))
zer.append(zero(frame))
mfcc_bands, mfcc_coeffs = mfcc(spectrum(w(frame)))
mfccs.append(mfcc_coeffs)
sil.append(silence(frame))
rate = collections.Counter()
rate.update(sil)
rate = rate.most_common(1)
composer = 'bach'
# Gabor filter analysis
if __name__ == '__main__':
import sys
print __doc__
try:
img_fn = sys.argv[1]
except:
img_fn = picname
img = cv2.imread(img_fn)
if img is None:
print 'Failed to load image file:', img_fn
sys.exit(1)
filters = build_filters()
res1 = process(img, filters)
for i in range(len(res1)-1):
for j in range(len(res1[i])-1):
mean_counter.append(np.mean(res1[i][j]))
f.write('%s' %audio_file.split('/')[-1].split('.')[0].split('bach')[0])
f.write(',')
f.write('%r' %np.mean(mfccs[0]))
f.write(',')
f.write('%r' %np.mean(mfccs[1]))
f.write(',')
f.write('%r' %np.mean(mfccs[2]))
f.write(',')
f.write('%r' %np.mean(mfccs[3]))
f.write(',')
f.write('%r' %np.mean(mfccs[4]))
f.write(',')
f.write('%r' %np.mean(mfccs[5]))
f.write(',')
f.write('%r' %np.mean(mfccs[6]))
f.write(',')
f.write('%r' %np.mean(mfccs[7]))
f.write(',')
f.write('%r' %np.mean(mfccs[8]))
f.write(',')
f.write('%r' %np.mean(mfccs[9]))
f.write(',')
f.write('%r' %np.mean(mfccs[10]))
f.write(',')
f.write('%r' %np.mean(mfccs[11]))
f.write(',')
f.write('%r' %np.mean(mfccs[12]))
f.write(',')
f.write('%r' %np.mean(flat))
f.write(',')
f.write('%r' %np.var(flat))
f.write(',')
f.write('%r' %np.mean(rol))
f.write(',')
f.write('%r' %np.var(rol))
f.write(',')
f.write('%r' %np.mean(cen))
f.write(',')
f.write('%r' %np.var(cen))
f.write(',')
f.write('%r' %np.mean(flu))
f.write(',')
f.write('%r' %np.var(flu))
f.write(',')
f.write('%r' %np.mean(ene))
f.write(',')
f.write('%r' %np.var(ene))
f.write(',')
f.write('%r' %np.mean(zer))
f.write(',')
f.write('%r' %np.var(zer))
f.write(',')
f.write('%r' %np.std(flat))
f.write(',')
f.write('%r' %stats.hmean(flat))
f.write(',')
f.write('%r' %rate[0][1])
f.write(',')
f.write('%r' %np.var(mean_counter))
f.write(',')
f.write('%r' %np.std(mean_counter))
f.write(',')
f.write('%s' %composer)
f.write('\n')
counter += 1
# 2
dirList = glob.glob("/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/datasets/beethoven/*.wav")
dirimg = '/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/code_10/pictures/beethoven'
dirname = str(dirimg) +'/*.png'
piclist = glob.glob(dirname)
counter = 0
for audio_file in dirList:
# Selecting the expectrogram
for item in piclist:
if item.split('/')[-1].split('.')[0] == audio_file.split('/')[-1].split('.')[0]:
picname = str(dirimg)+'/'+str(audio_file.split('/')[-1].split('.')[0]) + '.png'
flat = []
rol = []
cen = []
flu = []
ene = []
zer = []
mfccs = []
stft = []
sil = []
mean_counter = []
# Loading audio
audio = MonoLoader(filename = audio_file)()
# Features extraction
for frame in FrameGenerator(audio, frameSize = 1024, hopSize = 512, startFromZero=True):
bands = melbands(spectrum(frame))
stft.append(fft(frame))
flat.append(flatness(bands))
rol.append(rolloff(bands))
cen.append(centroid(bands))
flu.append(flux(bands))
ene.append(energy(bands))
zer.append(zero(frame))
mfcc_bands, mfcc_coeffs = mfcc(spectrum(w(frame)))
mfccs.append(mfcc_coeffs)
sil.append(silence(frame))
rate = collections.Counter()
rate.update(sil)
rate = rate.most_common(1)
composer = 'beethoven'
# Gabor filter analysis
if __name__ == '__main__':
import sys
print __doc__
try:
img_fn = sys.argv[1]
except:
img_fn = picname
img = cv2.imread(img_fn)
if img is None:
print 'Failed to load image file:', img_fn
sys.exit(1)
filters = build_filters()
res1 = process(img, filters)
for i in range(len(res1)-1):
for j in range(len(res1[i])-1):
mean_counter.append(np.mean(res1[i][j]))
f.write('%s' %audio_file.split('/')[-1].split('.')[0].split('beethoven')[0])
f.write(',')
f.write('%r' %np.mean(mfccs[0]))
f.write(',')
f.write('%r' %np.mean(mfccs[1]))
f.write(',')
f.write('%r' %np.mean(mfccs[2]))
f.write(',')
f.write('%r' %np.mean(mfccs[3]))
f.write(',')
f.write('%r' %np.mean(mfccs[4]))
f.write(',')
f.write('%r' %np.mean(mfccs[5]))
f.write(',')
f.write('%r' %np.mean(mfccs[6]))
f.write(',')
f.write('%r' %np.mean(mfccs[7]))
f.write(',')
f.write('%r' %np.mean(mfccs[8]))
f.write(',')
f.write('%r' %np.mean(mfccs[9]))
f.write(',')
f.write('%r' %np.mean(mfccs[10]))
f.write(',')
f.write('%r' %np.mean(mfccs[11]))
f.write(',')
f.write('%r' %np.mean(mfccs[12]))
f.write(',')
f.write('%r' %np.mean(flat))
f.write(',')
f.write('%r' %np.var(flat))
f.write(',')
f.write('%r' %np.mean(rol))
f.write(',')
f.write('%r' %np.var(rol))
f.write(',')
f.write('%r' %np.mean(cen))
f.write(',')
f.write('%r' %np.var(cen))
f.write(',')
f.write('%r' %np.mean(flu))
f.write(',')
f.write('%r' %np.var(flu))
f.write(',')
f.write('%r' %np.mean(ene))
f.write(',')
f.write('%r' %np.var(ene))
f.write(',')
f.write('%r' %np.mean(zer))
f.write(',')
f.write('%r' %np.var(zer))
f.write(',')
f.write('%r' %np.std(flat))
f.write(',')
f.write('%r' %stats.hmean(flat))
f.write(',')
f.write('%r' %rate[0][1])
f.write(',')
f.write('%r' %np.var(mean_counter))
f.write(',')
f.write('%r' %np.std(mean_counter))
f.write(',')
f.write('%s' %composer)
f.write('\n')
counter += 1
# 3
dirList = glob.glob("/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/datasets/chopin/*.wav")
dirimg = '/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/code_10/pictures/chopin'
dirname = str(dirimg) +'/*.png'
piclist = glob.glob(dirname)
counter = 0
for audio_file in dirList:
# Selecting the expectrogram
for item in piclist:
if item.split('/')[-1].split('.')[0] == audio_file.split('/')[-1].split('.')[0]:
picname = str(dirimg)+'/'+str(audio_file.split('/')[-1].split('.')[0]) + '.png'
flat = []
rol = []
cen = []
flu = []
ene = []
zer = []
mfccs = []
stft = []
sil = []
mean_counter = []
# Loading audio
audio = MonoLoader(filename = audio_file)()
# Features extraction
for frame in FrameGenerator(audio, frameSize = 1024, hopSize = 512, startFromZero=True):
bands = melbands(spectrum(frame))
stft.append(fft(frame))
flat.append(flatness(bands))
rol.append(rolloff(bands))
cen.append(centroid(bands))
flu.append(flux(bands))
ene.append(energy(bands))
zer.append(zero(frame))
mfcc_bands, mfcc_coeffs = mfcc(spectrum(w(frame)))
mfccs.append(mfcc_coeffs)
sil.append(silence(frame))
rate = collections.Counter()
rate.update(sil)
rate = rate.most_common(1)
composer = 'chopin'
# Gabor filter analysis
if __name__ == '__main__':
import sys
print __doc__
try:
img_fn = sys.argv[1]
except:
img_fn = picname
img = cv2.imread(img_fn)
if img is None:
print 'Failed to load image file:', img_fn
sys.exit(1)
filters = build_filters()
res1 = process(img, filters)
for i in range(len(res1)-1):
for j in range(len(res1[i])-1):
mean_counter.append(np.mean(res1[i][j]))
f.write('%s' %audio_file.split('/')[-1].split('.')[0].split('chopin')[0])
f.write(',')
f.write('%r' %np.mean(mfccs[0]))
f.write(',')
f.write('%r' %np.mean(mfccs[1]))
f.write(',')
f.write('%r' %np.mean(mfccs[2]))
f.write(',')
f.write('%r' %np.mean(mfccs[3]))
f.write(',')
f.write('%r' %np.mean(mfccs[4]))
f.write(',')
f.write('%r' %np.mean(mfccs[5]))
f.write(',')
f.write('%r' %np.mean(mfccs[6]))
f.write(',')
f.write('%r' %np.mean(mfccs[7]))
f.write(',')
f.write('%r' %np.mean(mfccs[8]))
f.write(',')
f.write('%r' %np.mean(mfccs[9]))
f.write(',')
f.write('%r' %np.mean(mfccs[10]))
f.write(',')
f.write('%r' %np.mean(mfccs[11]))
f.write(',')
f.write('%r' %np.mean(mfccs[12]))
f.write(',')
f.write('%r' %np.mean(flat))
f.write(',')
f.write('%r' %np.var(flat))
f.write(',')
f.write('%r' %np.mean(rol))
f.write(',')
f.write('%r' %np.var(rol))
f.write(',')
f.write('%r' %np.mean(cen))
f.write(',')
f.write('%r' %np.var(cen))
f.write(',')
f.write('%r' %np.mean(flu))
f.write(',')
f.write('%r' %np.var(flu))
f.write(',')
f.write('%r' %np.mean(ene))
f.write(',')
f.write('%r' %np.var(ene))
f.write(',')
f.write('%r' %np.mean(zer))
f.write(',')
f.write('%r' %np.var(zer))
f.write(',')
f.write('%r' %np.std(flat))
f.write(',')
f.write('%r' %stats.hmean(flat))
f.write(',')
f.write('%r' %rate[0][1])
f.write(',')
f.write('%r' %np.var(mean_counter))
f.write(',')
f.write('%r' %np.std(mean_counter))
f.write(',')
f.write('%s' %composer)
f.write('\n')
counter += 1
# 4
dirList = glob.glob("/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/datasets/haydn/*.wav")
dirimg = '/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/code_10/pictures/haydn'
dirname = str(dirimg) +'/*.png'
piclist = glob.glob(dirname)
counter = 0
for audio_file in dirList:
# Selecting the expectrogram
for item in piclist:
if item.split('/')[-1].split('.')[0] == audio_file.split('/')[-1].split('.')[0]:
picname = str(dirimg)+'/'+str(audio_file.split('/')[-1].split('.')[0]) + '.png'
flat = []
rol = []
cen = []
flu = []
ene = []
zer = []
mfccs = []
stft = []
sil = []
mean_counter = []
# Loading audio
audio = MonoLoader(filename = audio_file)()
# Features extraction
for frame in FrameGenerator(audio, frameSize = 1024, hopSize = 512, startFromZero=True):
bands = melbands(spectrum(frame))
stft.append(fft(frame))
flat.append(flatness(bands))
rol.append(rolloff(bands))
cen.append(centroid(bands))
flu.append(flux(bands))
ene.append(energy(bands))
zer.append(zero(frame))
mfcc_bands, mfcc_coeffs = mfcc(spectrum(w(frame)))
mfccs.append(mfcc_coeffs)
sil.append(silence(frame))
rate = collections.Counter()
rate.update(sil)
rate = rate.most_common(1)
composer = 'haydn'
# Gabor filter analysis
if __name__ == '__main__':
import sys
print __doc__
try:
img_fn = sys.argv[1]
except:
img_fn = picname
img = cv2.imread(img_fn)
if img is None:
print 'Failed to load image file:', img_fn
sys.exit(1)
filters = build_filters()
res1 = process(img, filters)
for i in range(len(res1)-1):
for j in range(len(res1[i])-1):
mean_counter.append(np.mean(res1[i][j]))
f.write('%s' %audio_file.split('/')[-1].split('.')[0].split('haydn')[0])
f.write(',')
f.write('%r' %np.mean(mfccs[0]))
f.write(',')
f.write('%r' %np.mean(mfccs[1]))
f.write(',')
f.write('%r' %np.mean(mfccs[2]))
f.write(',')
f.write('%r' %np.mean(mfccs[3]))
f.write(',')
f.write('%r' %np.mean(mfccs[4]))
f.write(',')
f.write('%r' %np.mean(mfccs[5]))
f.write(',')
f.write('%r' %np.mean(mfccs[6]))
f.write(',')
f.write('%r' %np.mean(mfccs[7]))
f.write(',')
f.write('%r' %np.mean(mfccs[8]))
f.write(',')
f.write('%r' %np.mean(mfccs[9]))
f.write(',')
f.write('%r' %np.mean(mfccs[10]))
f.write(',')
f.write('%r' %np.mean(mfccs[11]))
f.write(',')
f.write('%r' %np.mean(mfccs[12]))
f.write(',')
f.write('%r' %np.mean(flat))
f.write(',')
f.write('%r' %np.var(flat))
f.write(',')
f.write('%r' %np.mean(rol))
f.write(',')
f.write('%r' %np.var(rol))
f.write(',')
f.write('%r' %np.mean(cen))
f.write(',')
f.write('%r' %np.var(cen))
f.write(',')
f.write('%r' %np.mean(flu))
f.write(',')
f.write('%r' %np.var(flu))
f.write(',')
f.write('%r' %np.mean(ene))
f.write(',')
f.write('%r' %np.var(ene))
f.write(',')
f.write('%r' %np.mean(zer))
f.write(',')
f.write('%r' %np.var(zer))
f.write(',')
f.write('%r' %np.std(flat))
f.write(',')
f.write('%r' %stats.hmean(flat))
f.write(',')
f.write('%r' %rate[0][1])
f.write(',')
f.write('%r' %np.var(mean_counter))
f.write(',')
f.write('%r' %np.std(mean_counter))
f.write(',')
f.write('%s' %composer)
f.write('\n')
counter += 1
'''
# 5
dirList = glob.glob("/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/datasets/liszt/*.wav")
dirimg = '/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/code_10/pictures/liszt'
dirname = str(dirimg) +'/*.png'
piclist = glob.glob(dirname)
counter = 0
for audio_file in dirList:
# Selecting the expectrogram
for item in piclist:
if item.split('/')[-1].split('.')[0] == audio_file.split('/')[-1].split('.')[0]:
picname = str(dirimg)+'/'+str(audio_file.split('/')[-1].split('.')[0]) + '.png'
flat = []
rol = []
cen = []
flu = []
ene = []
zer = []
mfccs = []
stft = []
sil = []
mean_counter = []
# Loading audio
audio = MonoLoader(filename = audio_file)()
# Features extraction
for frame in FrameGenerator(audio, frameSize = 1024, hopSize = 512, startFromZero=True):
bands = melbands(spectrum(frame))
stft.append(fft(frame))
flat.append(flatness(bands))
rol.append(rolloff(bands))
cen.append(centroid(bands))
flu.append(flux(bands))
ene.append(energy(bands))
zer.append(zero(frame))
mfcc_bands, mfcc_coeffs = mfcc(spectrum(w(frame)))
mfccs.append(mfcc_coeffs)
sil.append(silence(frame))
rate = collections.Counter()
rate.update(sil)
rate = rate.most_common(1)
composer = 'liszt'
# Gabor filter analysis
if __name__ == '__main__':
import sys
print __doc__
try:
img_fn = sys.argv[1]
except:
img_fn = picname
img = cv2.imread(img_fn)
if img is None:
print 'Failed to load image file:', img_fn
sys.exit(1)
filters = build_filters()
res1 = process(img, filters)
for i in range(len(res1)-1):
for j in range(len(res1[i])-1):
mean_counter.append(np.mean(res1[i][j]))
'''
f.write('%s' %audio_file.split('/')[-1].split('.')[0].split('liszt')[0])
f.write(',')
f.write('%r' %np.mean(mfccs[0]))
f.write(',')
f.write('%r' %np.mean(mfccs[1]))
f.write(',')
f.write('%r' %np.mean(mfccs[2]))
f.write(',')
f.write('%r' %np.mean(mfccs[3]))
f.write(',')
f.write('%r' %np.mean(mfccs[4]))
f.write(',')
f.write('%r' %np.mean(mfccs[5]))
f.write(',')
f.write('%r' %np.mean(mfccs[6]))
f.write(',')
f.write('%r' %np.mean(mfccs[7]))
f.write(',')
f.write('%r' %np.mean(mfccs[8]))
f.write(',')
f.write('%r' %np.mean(mfccs[9]))
f.write(',')
f.write('%r' %np.mean(mfccs[10]))
f.write(',')
f.write('%r' %np.mean(mfccs[11]))
f.write(',')
f.write('%r' %np.mean(mfccs[12]))
f.write(',')
f.write('%r' %np.mean(flat))
f.write(',')
f.write('%r' %np.var(flat))
f.write(',')
f.write('%r' %np.mean(rol))
f.write(',')
f.write('%r' %np.var(rol))
f.write(',')
f.write('%r' %np.mean(cen))
f.write(',')
f.write('%r' %np.var(cen))
f.write(',')
f.write('%r' %np.mean(flu))
f.write(',')
f.write('%r' %np.var(flu))
f.write(',')
f.write('%r' %np.mean(ene))
f.write(',')
f.write('%r' %np.var(ene))
f.write(',')
f.write('%r' %np.mean(zer))
f.write(',')
f.write('%r' %np.var(zer))
f.write(',')
f.write('%r' %np.std(flat))
f.write(',')
f.write('%r' %stats.hmean(flat))
f.write(',')
f.write('%r' %rate[0][1])
f.write(',')
f.write('%r' %np.var(mean_counter))
f.write(',')
f.write('%r' %np.std(mean_counter))
f.write(',')
f.write('%s' %composer)
f.write('\n')
counter += 1
# 6
dirList = glob.glob("/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/datasets/mendelssohn/*.wav")
dirimg = '/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/code_10/pictures/mendelssohn'
dirname = str(dirimg) +'/*.png'
piclist = glob.glob(dirname)
counter = 0
for audio_file in dirList:
# Selecting the expectrogram
for item in piclist:
if item.split('/')[-1].split('.')[0] == audio_file.split('/')[-1].split('.')[0]:
picname = str(dirimg)+'/'+str(audio_file.split('/')[-1].split('.')[0]) + '.png'
flat = []
rol = []
cen = []
flu = []
ene = []
zer = []
mfccs = []
stft = []
sil = []
mean_counter = []
# Loading audio
audio = MonoLoader(filename = audio_file)()
# Features extraction
for frame in FrameGenerator(audio, frameSize = 1024, hopSize = 512, startFromZero=True):
bands = melbands(spectrum(frame))
stft.append(fft(frame))
flat.append(flatness(bands))
rol.append(rolloff(bands))
cen.append(centroid(bands))
flu.append(flux(bands))
ene.append(energy(bands))
zer.append(zero(frame))
mfcc_bands, mfcc_coeffs = mfcc(spectrum(w(frame)))
mfccs.append(mfcc_coeffs)
sil.append(silence(frame))
rate = collections.Counter()
rate.update(sil)
rate = rate.most_common(1)
composer = 'mendelssohn'
# Gabor filter analysis
if __name__ == '__main__':
import sys
print __doc__
try:
img_fn = sys.argv[1]
except:
img_fn = picname
img = cv2.imread(img_fn)
if img is None:
print 'Failed to load image file:', img_fn
sys.exit(1)
filters = build_filters()
res1 = process(img, filters)
for i in range(len(res1)-1):
for j in range(len(res1[i])-1):
mean_counter.append(np.mean(res1[i][j]))
f.write('%s' %audio_file.split('/')[-1].split('.')[0].split('mendelssohn')[0])
f.write(',')
f.write('%r' %np.mean(mfccs[0]))
f.write(',')
f.write('%r' %np.mean(mfccs[1]))
f.write(',')
f.write('%r' %np.mean(mfccs[2]))
f.write(',')
f.write('%r' %np.mean(mfccs[3]))
f.write(',')
f.write('%r' %np.mean(mfccs[4]))
f.write(',')
f.write('%r' %np.mean(mfccs[5]))
f.write(',')
f.write('%r' %np.mean(mfccs[6]))
f.write(',')
f.write('%r' %np.mean(mfccs[7]))
f.write(',')
f.write('%r' %np.mean(mfccs[8]))
f.write(',')
f.write('%r' %np.mean(mfccs[9]))
f.write(',')
f.write('%r' %np.mean(mfccs[10]))
f.write(',')
f.write('%r' %np.mean(mfccs[11]))
f.write(',')
f.write('%r' %np.mean(mfccs[12]))
f.write(',')
f.write('%r' %np.mean(flat))
f.write(',')
f.write('%r' %np.var(flat))
f.write(',')
f.write('%r' %np.mean(rol))
f.write(',')
f.write('%r' %np.var(rol))
f.write(',')
f.write('%r' %np.mean(cen))
f.write(',')
f.write('%r' %np.var(cen))
f.write(',')
f.write('%r' %np.mean(flu))
f.write(',')
f.write('%r' %np.var(flu))
f.write(',')
f.write('%r' %np.mean(ene))
f.write(',')
f.write('%r' %np.var(ene))
f.write(',')
f.write('%r' %np.mean(zer))
f.write(',')
f.write('%r' %np.var(zer))
f.write(',')
f.write('%r' %np.std(flat))
f.write(',')
f.write('%r' %stats.hmean(flat))
f.write(',')
f.write('%r' %rate[0][1])
f.write(',')
f.write('%r' %np.var(mean_counter))
f.write(',')
f.write('%r' %np.std(mean_counter))
f.write(',')
f.write('%s' %composer)
f.write('\n')
counter += 1
# 7
dirList = glob.glob("/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/datasets/mozart/*.wav")
dirimg = '/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/code_10/pictures/mozart'
dirname = str(dirimg) +'/*.png'
piclist = glob.glob(dirname)
counter = 0
for audio_file in dirList:
# Selecting the expectrogram
for item in piclist:
if item.split('/')[-1].split('.')[0] == audio_file.split('/')[-1].split('.')[0]:
picname = str(dirimg)+'/'+str(audio_file.split('/')[-1].split('.')[0]) + '.png'
flat = []
rol = []
cen = []
flu = []
ene = []
zer = []
mfccs = []
stft = []
sil = []
mean_counter = []
# Loading audio
audio = MonoLoader(filename = audio_file)()
# Features extraction
for frame in FrameGenerator(audio, frameSize = 1024, hopSize = 512, startFromZero=True):
bands = melbands(spectrum(frame))
stft.append(fft(frame))
flat.append(flatness(bands))
rol.append(rolloff(bands))
cen.append(centroid(bands))
flu.append(flux(bands))
ene.append(energy(bands))
zer.append(zero(frame))
mfcc_bands, mfcc_coeffs = mfcc(spectrum(w(frame)))
mfccs.append(mfcc_coeffs)
sil.append(silence(frame))
rate = collections.Counter()
rate.update(sil)
rate = rate.most_common(1)
composer = 'mozart'
# Gabor filter analysis
if __name__ == '__main__':
import sys
print __doc__
try:
img_fn = sys.argv[1]
except:
img_fn = picname
img = cv2.imread(img_fn)
if img is None:
print 'Failed to load image file:', img_fn
sys.exit(1)
filters = build_filters()
res1 = process(img, filters)
for i in range(len(res1)-1):
for j in range(len(res1[i])-1):
mean_counter.append(np.mean(res1[i][j]))
f.write('%s' %audio_file.split('/')[-1].split('.')[0].split('mozart')[0])
f.write(',')
f.write('%r' %np.mean(mfccs[0]))
f.write(',')
f.write('%r' %np.mean(mfccs[1]))
f.write(',')
f.write('%r' %np.mean(mfccs[2]))
f.write(',')
f.write('%r' %np.mean(mfccs[3]))
f.write(',')
f.write('%r' %np.mean(mfccs[4]))
f.write(',')
f.write('%r' %np.mean(mfccs[5]))
f.write(',')
f.write('%r' %np.mean(mfccs[6]))
f.write(',')
f.write('%r' %np.mean(mfccs[7]))
f.write(',')
f.write('%r' %np.mean(mfccs[8]))
f.write(',')
f.write('%r' %np.mean(mfccs[9]))
f.write(',')
f.write('%r' %np.mean(mfccs[10]))
f.write(',')
f.write('%r' %np.mean(mfccs[11]))
f.write(',')
f.write('%r' %np.mean(mfccs[12]))
f.write(',')
f.write('%r' %np.mean(flat))
f.write(',')
f.write('%r' %np.var(flat))
f.write(',')
f.write('%r' %np.mean(rol))
f.write(',')
f.write('%r' %np.var(rol))
f.write(',')
f.write('%r' %np.mean(cen))
f.write(',')
f.write('%r' %np.var(cen))
f.write(',')
f.write('%r' %np.mean(flu))
f.write(',')
f.write('%r' %np.var(flu))
f.write(',')
f.write('%r' %np.mean(ene))
f.write(',')
f.write('%r' %np.var(ene))
f.write(',')
f.write('%r' %np.mean(zer))
f.write(',')
f.write('%r' %np.var(zer))
f.write(',')
f.write('%r' %np.std(flat))
f.write(',')
f.write('%r' %stats.hmean(flat))
f.write(',')
f.write('%r' %rate[0][1])
f.write(',')
f.write('%r' %np.var(mean_counter))
f.write(',')
f.write('%r' %np.std(mean_counter))
f.write(',')
f.write('%s' %composer)
f.write('\n')
counter += 1
# 8
dirList = glob.glob("/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/datasets/vivaldi/*.wav")
dirimg = '/home/usuario/Escritorio/SMC/2 term/Music Information Retrieval/Classical Composer Identification/code_10/pictures/vivaldi'
dirname = str(dirimg) +'/*.png'
piclist = glob.glob(dirname)
counter = 0
for audio_file in dirList:
# Selecting the expectrogram
for item in piclist:
if item.split('/')[-1].split('.')[0] == audio_file.split('/')[-1].split('.')[0]:
picname = str(dirimg)+'/'+str(audio_file.split('/')[-1].split('.')[0]) + '.png'
flat = []
rol = []
cen = []
flu = []
ene = []
zer = []
mfccs = []
stft = []
sil = []
mean_counter = []
# Loading audio
audio = MonoLoader(filename = audio_file)()
# Features extraction
for frame in FrameGenerator(audio, frameSize = 1024, hopSize = 512, startFromZero=True):
bands = melbands(spectrum(frame))
stft.append(fft(frame))
flat.append(flatness(bands))
rol.append(rolloff(bands))
cen.append(centroid(bands))
flu.append(flux(bands))
ene.append(energy(bands))
zer.append(zero(frame))
mfcc_bands, mfcc_coeffs = mfcc(spectrum(w(frame)))
mfccs.append(mfcc_coeffs)
sil.append(silence(frame))
rate = collections.Counter()
rate.update(sil)
rate = rate.most_common(1)
composer = 'vivaldi'
# Gabor filter analysis
if __name__ == '__main__':
import sys
print __doc__
try:
img_fn = sys.argv[1]
except:
img_fn = picname
img = cv2.imread(img_fn)
if img is None:
print 'Failed to load image file:', img_fn
sys.exit(1)
filters = build_filters()
res1 = process(img, filters)
for i in range(len(res1)-1):
for j in range(len(res1[i])-1):
mean_counter.append(np.mean(res1[i][j]))
f.write('%s' %audio_file.split('/')[-1].split('.')[0].split('vivaldi')[0])
f.write(',')
f.write('%r' %np.mean(mfccs[0]))
f.write(',')
f.write('%r' %np.mean(mfccs[1]))
f.write(',')
f.write('%r' %np.mean(mfccs[2]))
f.write(',')
f.write('%r' %np.mean(mfccs[3]))
f.write(',')
f.write('%r' %np.mean(mfccs[4]))
f.write(',')
f.write('%r' %np.mean(mfccs[5]))
f.write(',')
f.write('%r' %np.mean(mfccs[6]))
f.write(',')
f.write('%r' %np.mean(mfccs[7]))
f.write(',')
f.write('%r' %np.mean(mfccs[8]))
f.write(',')
f.write('%r' %np.mean(mfccs[9]))
f.write(',')
f.write('%r' %np.mean(mfccs[10]))
f.write(',')
f.write('%r' %np.mean(mfccs[11]))
f.write(',')
f.write('%r' %np.mean(mfccs[12]))
f.write(',')
f.write('%r' %np.mean(flat))
f.write(',')
f.write('%r' %np.var(flat))
f.write(',')
f.write('%r' %np.mean(rol))
f.write(',')
f.write('%r' %np.var(rol))
f.write(',')
f.write('%r' %np.mean(cen))
f.write(',')
f.write('%r' %np.var(cen))
f.write(',')
f.write('%r' %np.mean(flu))
f.write(',')
f.write('%r' %np.var(flu))
f.write(',')
f.write('%r' %np.mean(ene))
f.write(',')
f.write('%r' %np.var(ene))
f.write(',')
f.write('%r' %np.mean(zer))
f.write(',')
f.write('%r' %np.var(zer))
f.write(',')
f.write('%r' %np.std(flat))
f.write(',')
f.write('%r' %stats.hmean(flat))
f.write(',')
f.write('%r' %rate[0][1])
f.write(',')
f.write('%r' %np.var(mean_counter))
f.write(',')
f.write('%r' %np.std(mean_counter))
f.write(',')
f.write('%s' %composer)
f.write('\n')
counter += 1
f.write('%\n')
f.write('%\n')
f.write('%\n')
f.close()
|
gpl-3.0
| -2,955,891,104,601,257,000 | 25.267462 | 147 | 0.573027 | false |
facelessuser/backrefs
|
tests/test_wordbreak.py
|
1
|
2754
|
"""Test `Word Break`."""
import unittest
from backrefs import uniprops
import re
class TestWordBreak(unittest.TestCase):
"""Test `Word Break` access."""
def test_table_integrity(self):
"""Test that there is parity between Unicode and ASCII tables."""
re_key = re.compile(r'^\^?[a-z0-9./]+$')
keys1 = set(uniprops.unidata.unicode_word_break.keys())
keys2 = set(uniprops.unidata.ascii_word_break.keys())
# Ensure all keys are lowercase (only need to check Unicode as the ASCII keys must match the Unicode later)
for k in keys1:
self.assertTrue(re_key.match(k) is not None)
# Ensure the same keys are in both the Unicode table as the ASCII table
self.assertEqual(keys1, keys2)
# Ensure each positive key has an inverse key
for key in keys1:
if not key.startswith('^'):
self.assertTrue('^' + key in keys1)
def test_wordbreak(self):
"""Test `Word Break` properties."""
for k, v in uniprops.unidata.unicode_word_break.items():
result = uniprops.get_unicode_property('wordbreak', k)
self.assertEqual(result, v)
def test_wordbreak_ascii(self):
"""Test `Word Break` ASCII properties."""
for k, v in uniprops.unidata.ascii_word_break.items():
result = uniprops.get_unicode_property('wordbreak', k, mode=uniprops.MODE_NORMAL)
self.assertEqual(result, v)
def test_wordbreak_binary(self):
"""Test `Word Break` ASCII properties."""
for k, v in uniprops.unidata.ascii_word_break.items():
result = uniprops.get_unicode_property('wordbreak', k, mode=uniprops.MODE_ASCII)
self.assertEqual(result, uniprops.fmt_string(v, True))
def test_bad_wordbreak(self):
"""Test `Word Break` property with bad value."""
with self.assertRaises(ValueError):
uniprops.get_unicode_property('wordbreak', 'bad')
def test_alias(self):
"""Test aliases."""
alias = None
for k, v in uniprops.unidata.alias.unicode_alias['_'].items():
if v == 'wordbreak':
alias = k
break
self.assertTrue(alias is not None)
# Ensure alias works
for k, v in uniprops.unidata.unicode_word_break.items():
result = uniprops.get_unicode_property(alias, k)
self.assertEqual(result, v)
break
# Test aliases for values
for k, v in uniprops.unidata.alias.unicode_alias['wordbreak'].items():
result1 = uniprops.get_unicode_property(alias, k)
result2 = uniprops.get_unicode_property(alias, v)
self.assertEqual(result1, result2)
|
mit
| 3,749,934,796,719,694,000 | 34.307692 | 115 | 0.611111 | false |
luoshao23/ML_algorithm
|
Clustering/Pred_KNN.py
|
1
|
4903
|
from random import random, randint
import math
import numpy as np
import plotly.plotly as py
import plotly.graph_objs as go
weightdomain = [(0, 20)] * 4
def wineprice(rating, age):
peak_age = rating - 50
price = float(rating) / 2
if age > peak_age:
price = price * (5 - (age - peak_age))
else:
price = price * (5 * float(age + 1) / peak_age)
if price < 0:
price = 0.0
return price
def wineset1():
rows = []
for i in xrange(300):
rating = random() * 50 + 50
age = random() * 50
price = wineprice(rating, age)
price *= (random() * 0.2 + 0.9)
rows.append((rating, age, price))
rows = np.array(rows)
return rows
def wineset2():
rows = []
for i in xrange(300):
rating = random() * 50 + 50
age = random() * 50
aisle = float(randint(1, 20))
bottlesize = [375.0, 750.0, 1500.0, 3000.0][randint(0, 3)]
price = wineprice(rating, age)
price *= (bottlesize / 750)
price *= (random() * 0.2 + 0.9)
rows.append((rating, age, aisle, bottlesize, price))
rows = np.array(rows)
return rows
def wineset3():
rows = wineset1()
for row in rows:
if random() < 0.5:
row[-1] *= 0.5
return rows
def euclidean(v1, v2):
d = 0.0
for i in xrange(len(v1)):
d += (v1[i] - v2[i])**2
return math.sqrt(d)
def getdistances(data, vec1):
distancelist = []
for i in xrange(len(data)):
vec2 = data[i][:-1]
distancelist.append((euclidean(vec1, vec2), i))
distancelist.sort()
return distancelist
def knnestimate(data, vec1, k=5):
dlist = getdistances(data, vec1)
avg = 0.0
for i in xrange(k):
idx = dlist[i][1]
avg += data[idx][-1]
avg = avg / k
return avg
def inverseweight(dist, num=1.0, const=0.1):
return num / (dist + const)
def subtractweight(dist, const=1.0):
if dist > const:
return 0
else:
return const - dist
def gaussian(dist, sigma=5.0):
return math.exp(-dist**2 / (2 * sigma**2))
def weightedknn(data, vec1, k=5, weightf=gaussian):
dlist = getdistances(data, vec1)
avg = 0.0
totalweight = 0.0
for i in xrange(k):
dist = dlist[i][0]
idx = dlist[i][1]
weight = weightf(dist)
avg += weight * data[idx][-1]
totalweight += weight
if totalweight == 0:
return 0
avg = avg / totalweight
return avg
def dividedata(data, test=0.05):
trainset = []
testset = []
for row in data:
if random() < test:
testset.append(row)
else:
trainset.append(row)
return trainset, testset
def testalgorithm(algf, trainset, testset):
error = 0.0
for row in testset:
guess = algf(trainset, row[:-1])
error += (row[-1] - guess)**2
return error / len(testset)
def crossvalidate(algf, data, trials=100, test=0.05):
error = 0.0
for i in xrange(trials):
trainset, testset = dividedata(data, test)
error += testalgorithm(algf, trainset, testset)
return error / trials
def rescale(data, scale=None):
if scale is not None and len(scale) == data.shape[1] - 1:
scaleddata = data * (scale + [1])
else:
scaleddata = data / (np.mean(data, 0) + 0.0001)
scaleddata[:, -1] = data[:, -1]
return scaleddata
def createcostfunction(algf, data):
def costf(scale):
sdata = rescale(data, scale)
return crossvalidate(algf, sdata, trials=20)
return costf
def probguess(data, vec1, low, high, k=5, weightf=gaussian):
dlist = getdistances(data, vec1)
nweight = 0.0
tweight = 0.0
for i in xrange(k):
dist = dlist[i][0]
idx = dlist[i][1]
weight = weightf(dist)
v = data[idx][-1]
if v>=low and v<=high:
nweight += weight
tweight += weight
if tweight == 0:
return 0
return nweight/tweight
def cumulativegraph(data,vec1,high,k=5,weightf=gaussian):
t1 = np.arange(0.0, high, 0.1)
cprob = np.array([probguess(data, vec1, 0, v, k, weightf) for v in t1])
data = go.Scatter(x=t1, y=cprob)
fig = go.Figure(data=[data])
py.plot(fig, filename='wineguess')
def probabilitygraph(data, vec1, high, k=5, weightf=gaussian, ss=5.0):
t1 = np.arange(0.0, high, 0.1)
probs = np.array([probguess(data, vec1, v, v+0.1, k, weightf) for v in t1])
smoothed = []
for i in xrange(len(probs)):
sv = 0.0
for j in xrange(len(probs)):
dist = abs(i-j)*0.1
weight = gaussian(dist, sigma=ss)
sv += weight*probs[j]
smoothed.append(sv)
smoothed = np.array(smoothed)
data = go.Scatter(x=t1, y=smoothed)
fig = go.Figure(data=[data])
py.plot(fig, filename='wineguess_smoothed')
data = wineset1()
|
mit
| 7,932,099,514,344,801,000 | 22.68599 | 79 | 0.564756 | false |
simonjbeaumont/planex
|
planex/init.py
|
1
|
1365
|
#!/usr/bin/env python
"""
Creates or regenerates a Makefile with special planex-init comments
"""
import os
import logging
MAKEFILE_PATH = "/usr/share/planex"
def create_makefile():
""" Checks if a Makefile exists with special planex-init comments in it.
If not, it creates or regenerates the Makefile while preserving its
existing contents.
"""
name = "Makefile"
firstline = "# Start generated by planex-init\n"
autogen = "include %s/Makefile.rules\n" % (MAKEFILE_PATH)
endline = "# End generated by planex-init\n"
if not os.path.exists(name):
logging.debug("Creating Makefile")
with open(name, 'w') as makefile:
makefile.write(firstline)
makefile.write(autogen)
makefile.write(endline)
return
with open(name, 'r') as makefile:
lines = makefile.readlines()
try:
start = lines.index(firstline)
end = lines.index(endline)
lines = lines[:start + 1] + [autogen] + lines[end:]
except ValueError:
logging.error("Couldn't find planex-init stanza in Makefile")
with open(name, 'w') as makefile:
makefile.writelines(lines)
def main():
"""
Main entry point
"""
logging.basicConfig(format='%(message)s', level=logging.ERROR)
create_makefile()
if __name__ == "__main__":
main()
|
lgpl-2.1
| 3,865,575,972,869,228,500 | 23.818182 | 76 | 0.628571 | false |
jic-dtool/dtool-create
|
setup.py
|
1
|
1165
|
from setuptools import setup
url = "https://github.com/jic-dtool/dtool-create"
version = "0.23.4"
readme = open('README.rst').read()
setup(
name="dtool-create",
packages=["dtool_create"],
package_data={"dtool_create": ["templates/*"]},
version=version,
description="Dtool plugin for creating datasets",
long_description=readme,
include_package_data=True,
author="Tjelvar Olsson",
author_email="tjelvar.olsson@gmail.com",
url=url,
install_requires=[
"click",
"dtoolcore>=3.6",
"dtool_cli>=0.6.0",
"dtool_symlink>=0.2.0",
"dtool_http",
"ruamel.yaml",
],
entry_points={
"dtool.cli": [
"create=dtool_create.dataset:create",
"name=dtool_create.dataset:name",
"readme=dtool_create.dataset:readme",
"add=dtool_create.dataset:add",
"freeze=dtool_create.dataset:freeze",
"copy=dtool_create.dataset:copy",
"cp=dtool_create.dataset:cp",
"publish=dtool_create.publish:publish",
],
},
download_url="{}/tarball/{}".format(url, version),
license="MIT"
)
|
mit
| -483,211,250,469,324,000 | 28.125 | 54 | 0.582833 | false |
Flexget/Flexget
|
setup.py
|
1
|
2371
|
import sys
from pathlib import Path
from typing import List
from setuptools import find_packages, setup
long_description = Path('README.rst').read_text()
# Populates __version__ without importing the package
__version__ = None
with open('flexget/_version.py', encoding='utf-8') as ver_file:
exec(ver_file.read()) # pylint: disable=W0122
if not __version__:
print('Could not find __version__ from flexget/_version.py')
sys.exit(1)
def load_requirements(filename: str) -> List[str]:
return [
line.strip()
for line in Path(filename).read_text().splitlines()
if not line.startswith('#')
]
setup(
name='FlexGet',
version=__version__,
description='FlexGet is a program aimed to automate downloading or processing content (torrents, podcasts, etc.) '
'from different sources like RSS-feeds, html-pages, various sites and more.',
long_description=long_description,
long_description_content_type='text/x-rst',
author='Marko Koivusalo',
author_email='marko.koivusalo@gmail.com',
license='MIT',
url='https://flexget.com',
project_urls={
'Repository': 'https://github.com/Flexget/Flexget',
'Issue Tracker': 'https://github.com/Flexget/Flexget/issues',
'Forum': 'https://discuss.flexget.com',
},
packages=find_packages(exclude=['flexget.tests']),
include_package_data=True,
zip_safe=False,
install_requires=load_requirements('requirements.txt'),
tests_require=['pytest'],
extras_require={'dev': load_requirements('dev-requirements.txt')},
entry_points={
'console_scripts': ['flexget = flexget:main'],
'gui_scripts': [
'flexget-headless = flexget:main'
], # This is useful on Windows to avoid a cmd popup
},
python_requires='>=3.6',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
)
|
mit
| 3,086,470,307,157,315,600 | 34.38806 | 118 | 0.640658 | false |
katarzynamazur/esxi_scripts
|
copy_register_vms/lifesaver.py
|
1
|
1457
|
#!/usr/bin/env python3
import sys
def help():
print("\nUsage:\n\t./{0} {1} {2}\n\n".format(sys.argv[0], "change_from", "change_to"))
if __name__ == "__main__":
if len(sys.argv) != 4 :
help()
else:
chgfrom = str(sys.argv[1])
chgto = str(sys.argv[2])
snapnum = str(sys.argv[3])
infile = "%s_0.vmdk" % (chgfrom)
outfile = "%s_0.vmdk" % (chgto)
with open(infile, 'r',encoding='utf-8', errors='ignore') as inf, open(outfile, 'w') as outf :
try:
for line in inf :
line = line.replace('%s_0-flat.vmdk', "%s_0-flat.vmdk" % (chgfrom, chgto))
outf.write(line)
except Exception:
pass
infiles = ['%s-Snapshot%s.vmsn' % (chgfrom, snapnum), '%s.vmx' % (chgfrom), '%s_0-000001.vmdk' % (chgfrom), '%s_0.vmdk' % (chgfrom), '%s.vmsd' % (chgfrom)]
outfiles = ['%s-Snapshot%s.vmsn'% (chgto, snapnum), '%s.vmx'% (chgto), '%s_0-000001.vmdk'% (chgto), '%s_0.vmdk'% (chgto), '%s.vmsd'% (chgto)]
for infile, outfile in zip(infiles, outfiles) :
with open(infile, 'r',encoding='utf-8', errors='ignore') as inf, open(outfile, 'w') as outf :
try:
for line in inf :
line = line.replace('%s' % chgfrom, '%s' % chgto)
outf.write(line)
except Exception:
pass
|
gpl-3.0
| -6,288,058,962,950,913,000 | 34.536585 | 163 | 0.482498 | false |
crazy-canux/xplugin_nagios
|
plugin/plugins/exchange_2010/src/check_exchange_mounts.py
|
1
|
1410
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
# Copyright (C) Canux CHENG <canuxcheng@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Plugin that checks the Exchange Mailboxes servers MOUNTPOINTS."""
import plugin
from plugin.mounts import PluginXMLMounts
PluginXMLMounts(
version=plugin.version,
description='Check Mailboxes servers Mountpoints of Exchange 2010.'
).run()
|
gpl-2.0
| 1,217,782,461,142,924,300 | 44.483871 | 78 | 0.77234 | false |
vickyting0910/opengeocoding
|
2reinter.py
|
1
|
3991
|
import pandas as pd
import glob
import time
import numpy as num
inter=sorted(glob.glob('*****.csv'))
w='*****.xlsx'
table1=pd.read_excel(w, '*****', index_col=None, na_values=['NA']).fillna(0)
w='*****.csv'
tab=pd.read_csv(w).fillna(0)
tab.is_copy = False
pd.options.mode.chained_assignment = None
t1=time.time()
for i in range(len(tab)):
if tab["IBR"][i]=='9A' or tab["IBR"][i] == '9B' or tab["IBR"][i] == '09A' or tab["IBR"][i] == '09B':
tab["IBR"][i]='9'
if tab["IBR"][i]=='11A' or tab["IBR"][i] == '11B' or tab["IBR"][i]=='11C' or tab["IBR"][i] == '11D' or tab["IBR"][i]=='36B':
tab["IBR"][i]='11'
if tab["IBR"][i]=='36A' or tab["IBR"][i] == '36B':
tab["IBR"][i]='36'
if tab["IBR"][i]=='13A' or tab["IBR"][i] == '13B' or tab["IBR"][i] == '13C':
tab["IBR"][i]='13'
if tab["IBR"][i]=='23A' or tab["IBR"][i] == '23B' or tab["IBR"][i] == '23E' or tab["IBR"][i] == '23F' or tab["IBR"][i] == '23H':
tab["IBR"][i]='23'
if tab["IBR"][i]=='26A' or tab["IBR"][i] == '26B' or tab["IBR"][i] == '26C' or tab["IBR"][i] == '26D' or tab["IBR"][i] == '26E':
tab["IBR"][i]='26'
if tab["IBR"][i]=='35A' or tab["IBR"][i] == '35B':
tab["IBR"][i]='35'
if tab["IBR"][i]=='36A':
tab["IBR"][i]='36'
if tab["IBR"][i]=='39A' or tab["IBR"][i] == '39B' or tab["IBR"][i] == '39C' or tab["IBR"][i] == '39D':
tab["IBR"][i]='39'
if tab["IBR"][i]=='40A' or tab["IBR"][i] == '40B' or tab["IBR"][i] == '40C':
tab["IBR"][i]='40'
if tab["IBR"][i]=='64A' or tab["IBR"][i] == '64B':
tab["IBR"][i]='64'
if tab["IBR"][i]=='90A' or tab["IBR"][i] == '90B' or tab["IBR"][i] == '90C' or tab["IBR"][i] == '90H' or tab["IBR"][i] == '90F' or tab["IBR"][i] == '90G' or tab["IBR"][i]=='90J' or tab["IBR"][i]=='90Z':
tab["IBR"][i]='90'
#convert to string for the join
for i in range(len(table1)):
table1['IBR_code'][i]=str(table1['IBR_code'][i])
description=table1.set_index([ "IBR_code"])
t2=time.time()
print t2-t1
#index crime
tab["index"]=num.nan
for i in range(len(tab)): #convert to integer
tab["index"][i]=tab.index[i]+1
#join
tab=tab.join(description, on=["IBR"], sort=True, rsuffix='_1', how='outer').fillna(0)
tab=tab[(tab["Reported_address"] != 0)].reset_index(drop=True).fillna(0)
tab["IBR_description"]=tab["crime_des12"]
t3=time.time()
print t3-t2
tab=tab[["Global_ID","Reported_address","Incident_date","Incident_time","Report_date","Report_time","Latitude","Longitude","IBR","IBR_description","Police_Department_Code","PD_description","State_Statute_Literal","State_Statute_Number","flag_geocode",'Fdir_n1','Edir_n1','strname_n1','strtype_n1','Enum_n1','Fdir_n2','Edir_n2','strname_n2','strtype_n2','Enum_n2','comname','mroad1','mratio1','wcorr1','wratio1','mroad2','mratio2','wcorr2','wratio2','match']]
tab=tab.replace("",num.nan)
tab=tab.replace("0",num.nan)
tab=tab.replace("00",num.nan)
tab=tab.replace(0,num.nan)
tab.to_csv('*****.csv',index=False)
for i in range(len(tab)):
tab['Global_ID'][i]=str(tab['Global_ID'][i])
description=tab.set_index([ "Global_ID"])
name1=[i[i.find('inter'):i.rfind('C.csv')+1].replace('_matchgeo','') for i in inter]
for p, q in zip((inter), (name1)):
table1=pd.read_csv(p)
for i in range(len(table1)):
tab['Global_ID'][i]=str(tab['Global_ID'][i])
table1=table1.join(description, on=["Global_ID"], sort=True, rsuffix='_1', how='outer').fillna(0)
table1=table1[(table1["Reported_address"] != 0)].reset_index(drop=True).fillna(0)
table1["IBR_description"]=table1["IBR_description_1"]
table1["IBR"]=table1["IBR_1"]
table1=table1[["Global_ID","Reported_address","Incident_date","Incident_time","Report_date","Report_time","Latitude","Longitude","IBR","IBR_description","Police_Department_Code","PD_description","State_Statute_Literal","State_Statute_Number","flag_geocode",'Fdir_n1','Edir_n1','strname_n1','strtype_n1','Enum_n1','Fdir_n2','Edir_n2','strname_n2','strtype_n2','Enum_n2','comname','mroad1','mratio1','wcorr1','wratio1','mroad2','mratio2','wcorr2','wratio2','match']]
table1.to_csv('*****.csv',index=False)
|
bsd-2-clause
| 1,550,327,785,720,675,800 | 41.457447 | 465 | 0.600601 | false |
farert/farert
|
db/scripts/distance_exp.py
|
1
|
4515
|
#!python3.0.1
# -*- coding: utf-8 -*-
"""
指定路線、駅1から駅2までの営業キロ、計算キロを得る
"""
import sys
import os
import jrdb
import time
t0 = time.time()
sql = """
select
(select max(sales_km) from t_lines where line_id=?1 and (station_id=?2 or station_id=?3))-
(select min(sales_km) from t_lines where line_id=?1 and (station_id=?2 or station_id=?3)),
(select max(calc_km) from t_lines where line_id=?1 and (station_id=?2 or station_id=?3))-
(select min(calc_km) from t_lines where line_id=?1 and (station_id=?2 or station_id=?3)),
case when exists (select * from t_lines
where line_id=?1 and (lflg&(1<<21)!=0) and station_id=?2)
then -1 else
abs((select sales_km from t_lines
where line_id=?1 and (lflg&(1<<21)!=0)
and sales_km>(select min(sales_km) from t_lines where line_id=?1 and (station_id=?2 or station_id=?3))
and sales_km<(select max(sales_km) from t_lines where line_id=?1 and (station_id=?2 or station_id=?3)))-
(select sales_km from t_lines where line_id=?1 and station_id=?2)) end,
case when exists (select * from t_lines
where line_id=?1 and (lflg&(1<<21)!=0) and station_id=?3)
then -1 else
abs((select calc_km from t_lines
where line_id=?1 and (lflg&(1<<21)!=0)
and sales_km>(select min(sales_km) from t_lines where line_id=?1 and (station_id=?2 or station_id=?3))
and sales_km<(select max(sales_km) from t_lines where line_id=?1 and (station_id=?2 or station_id=?3)))-
(select calc_km from t_lines where line_id=?1 and station_id=?2)) end,
((select company_id from t_station where rowid=?2) + (65536 * (select company_id from t_station where rowid=?3))),
((select 2147483648*(1&(lflg>>23)) from t_lines where line_id=?1) +
(select sflg&8191 from t_station where rowid=?2) + (select sflg&8191 from t_station where rowid=?3) * 65536)
"""
# s1 or s2が
# result list
for n in range(100):
for inf in jrdb.sqlexec(sql, [ jrdb.line_id(sys.argv[1]),
jrdb.station_id(sys.argv[2]), jrdb.station_id(sys.argv[3]) ] ):
if n == 0: print(inf[0], inf[1], inf[2], inf[3], inf[4], inf[5])
pass
print("lapse ", time.time() - t0)
# col1 : ?1の?2~?3の営業キロ
# col2 : ?1の?2~?3の計算キロ
# col3 : ?2~境界駅の営業キロ(?2が境界駅なら-1を返す, 境界駅が?2~?3間になければ、Noneを返す
# col4 : ?2~境界駅の計算キロ(?3が境界駅なら-1を返す, 境界駅が?2~?3間になければ、Noneを返す
# 2012-9-2
# 2012-12-21 上のを使用
print("----------------------------------------------------------------------------")
t0 = time.time()
sql = """
select (select max(sales_km) from t_lines where line_id=?1 and (station_id=?2 or station_id=?3))-
(select min(sales_km) from t_lines where line_id=?1 and (station_id=?2 or station_id=?3)),
(select max(calc_km) from t_lines where line_id=?1 and (station_id=?2 or station_id=?3))-
(select min(calc_km) from t_lines where line_id=?1 and (station_id=?2 or station_id=?3)),
abs((select sales_km from t_lines where line_id=?1 and (lflg&(1<<21))!=0)-
(select sales_km from t_lines where line_id=?1 and station_id=?2)),
abs((select calc_km from t_lines where line_id=?1 and (lflg&(1<<21))!=0)-
(select calc_km from t_lines where line_id=?1 and station_id=?2)),
((select company_id from t_station where rowid=?2) + (65536 * (select company_id from t_station where rowid=?3))),
((select 2147483648*(1&(lflg>>23)) from t_lines where line_id=?1) +
(select sflg&8191 from t_station where rowid=?2) + (select sflg&8191 from t_station where rowid=?3) * 65536)
"""
# sales_km, calc_km, sales_km(station1の会社区間), calc_km(station1の会社区間), station1のcompany_id, station2のcompany_id
# bit31: 会社線か否か
# result list
for n in range(500):
for inf in jrdb.sqlexec(sql, [ jrdb.line_id(sys.argv[1]),
jrdb.station_id(sys.argv[2]), jrdb.station_id(sys.argv[3]) ] ):
if n == 0: print(inf[0], inf[1], inf[2], inf[3], inf[4], inf[5])
pass
print("lapse ", time.time() - t0)
"""
3167 新山口
3180 門司 -> sales_km: 752 下関まで689
141 山陽線
12 2
15 2
19 2
22 2
29 2
32 2
40 3
48 3 36 = (40-12) + (48-40)
create table lin(km, cid, f);
insert into lin values(12, 2, 0);
insert into lin values(15, 2, 0);
insert into lin values(19, 2, 0);
insert into lin values(22, 2, 0);
insert into lin values(29, 2, 0);
insert into lin values(32, 2, 0);
insert into lin values(40, 3, 1);
insert into lin values(48, 3, 0);
"""
|
gpl-3.0
| -2,113,714,231,040,063,200 | 28.711268 | 116 | 0.638017 | false |
appcelerator/entourage
|
components/services/pylons/appcelerator-module/setup.py
|
1
|
1264
|
from setuptools import setup, find_packages
setup(name='Appcelerator',
version='0.0.0',
description="Python version of the Appcelerator web application framework for building fast, dynamic, AJAX based web 2.0 applications.",
long_description="""
""",
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Programming Language :: Python',
'Programming Language :: JavaScript',
'License :: OSI Approved :: GNU General Public License (GPL)',
],
keywords='wsgi web soa ria javascript',
author='Mark Luffel',
author_email='mluffel@appcelerator.com',
url='http://appcelerator.org',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
zip_safe=False,
install_requires=[
'beaker>=0.8.1',
'simplejson',
'elementtree',
'pastescript'
],
entry_points="""
[paste.app_factory]
service_broker = appcelerator.core:service_broker_factory
cross_domain_proxy = appcelerator.core:cross_domain_proxy_factory
"""
)
|
apache-2.0
| 3,221,890,254,682,057,700 | 31.410256 | 142 | 0.614715 | false |
mic4ael/indico
|
indico/modules/rb/notifications/blockings.py
|
1
|
1361
|
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import render_template
from indico.core.notifications import email_sender, make_email
@email_sender
def notify_request(owner, blocking, blocked_rooms):
"""
Notifies room owner about blockings he has to approve.
Expects only blockings for rooms owned by the specified owner
"""
subject = 'Confirm room blockings'
body = render_template('rb/emails/blockings/awaiting_confirmation_email_to_manager.txt',
owner=owner, blocking=blocking, blocked_rooms=blocked_rooms)
return make_email(owner.email, subject=subject, body=body)
@email_sender
def notify_request_response(blocked_room):
"""
Notifies blocking creator about approval/rejection of his
blocking request for a room
"""
to = blocked_room.blocking.created_by_user.email
verb = blocked_room.State(blocked_room.state).title.upper()
subject = 'Room blocking {}'.format(verb)
body = render_template('rb/emails/blockings/state_email_to_user.txt',
blocking=blocked_room.blocking, blocked_room=blocked_room, verb=verb)
return make_email(to, subject=subject, body=body)
|
mit
| 8,535,712,263,884,984,000 | 36.805556 | 96 | 0.709772 | false |
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_03_01/aio/operations/_ddos_custom_policies_operations.py
|
1
|
20462
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DdosCustomPoliciesOperations:
"""DdosCustomPoliciesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
ddos_custom_policy_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosCustomPolicyName': self._serialize.url("ddos_custom_policy_name", ddos_custom_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
ddos_custom_policy_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified DDoS custom policy.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_custom_policy_name: The name of the DDoS custom policy.
:type ddos_custom_policy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
ddos_custom_policy_name=ddos_custom_policy_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosCustomPolicyName': self._serialize.url("ddos_custom_policy_name", ddos_custom_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}'} # type: ignore
async def get(
self,
resource_group_name: str,
ddos_custom_policy_name: str,
**kwargs
) -> "_models.DdosCustomPolicy":
"""Gets information about the specified DDoS custom policy.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_custom_policy_name: The name of the DDoS custom policy.
:type ddos_custom_policy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DdosCustomPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_03_01.models.DdosCustomPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosCustomPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosCustomPolicyName': self._serialize.url("ddos_custom_policy_name", ddos_custom_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DdosCustomPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
ddos_custom_policy_name: str,
parameters: "_models.DdosCustomPolicy",
**kwargs
) -> "_models.DdosCustomPolicy":
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosCustomPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosCustomPolicyName': self._serialize.url("ddos_custom_policy_name", ddos_custom_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'DdosCustomPolicy')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('DdosCustomPolicy', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('DdosCustomPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
ddos_custom_policy_name: str,
parameters: "_models.DdosCustomPolicy",
**kwargs
) -> AsyncLROPoller["_models.DdosCustomPolicy"]:
"""Creates or updates a DDoS custom policy.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_custom_policy_name: The name of the DDoS custom policy.
:type ddos_custom_policy_name: str
:param parameters: Parameters supplied to the create or update operation.
:type parameters: ~azure.mgmt.network.v2020_03_01.models.DdosCustomPolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either DdosCustomPolicy or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_03_01.models.DdosCustomPolicy]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosCustomPolicy"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
ddos_custom_policy_name=ddos_custom_policy_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('DdosCustomPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosCustomPolicyName': self._serialize.url("ddos_custom_policy_name", ddos_custom_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
ddos_custom_policy_name: str,
parameters: "_models.TagsObject",
**kwargs
) -> "_models.DdosCustomPolicy":
"""Update a DDoS custom policy tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_custom_policy_name: The name of the DDoS custom policy.
:type ddos_custom_policy_name: str
:param parameters: Parameters supplied to update DDoS custom policy resource tags.
:type parameters: ~azure.mgmt.network.v2020_03_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DdosCustomPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_03_01.models.DdosCustomPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosCustomPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosCustomPolicyName': self._serialize.url("ddos_custom_policy_name", ddos_custom_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DdosCustomPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}'} # type: ignore
|
mit
| 1,644,179,812,605,597,700 | 49.774194 | 204 | 0.657218 | false |
kjagoo/wger_stark
|
wger/__init__.py
|
1
|
1027
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:copyright: 2011, 2012 by OpenSlides team, see AUTHORS.
:license: GNU GPL, see LICENSE for more details.
"""
VERSION = (1, 8, 0, 'alpha', 3)
RELEASE = False
def get_version(version=None, release=None):
"""Derives a PEP386-compliant version number from VERSION."""
if version is None:
version = VERSION
if release is None:
release = RELEASE
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
main_parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:main_parts])
if version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'rc'}
sub = mapping[version[3]] + str(version[4])
else:
sub = ''
if not release:
sub += '-dev'
return main + sub
|
agpl-3.0
| 6,015,827,220,061,252,000 | 26.026316 | 65 | 0.56962 | false |
irmen/Pyro5
|
tests/test_echoserver.py
|
1
|
2734
|
"""
Tests for the built-in test echo server.
Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net).
"""
import time
import pytest
from threading import Thread, Event
import Pyro5.client
import Pyro5.errors
import Pyro5.utils.echoserver as echoserver
from Pyro5 import config
class EchoServerThread(Thread):
def __init__(self):
super(EchoServerThread, self).__init__()
self.setDaemon(True)
self.started = Event()
self.echodaemon = self.echoserver = self.uri = None
def run(self):
self.echodaemon, self.echoserver, self.uri = echoserver.main(args=["-q"], returnWithoutLooping=True)
self.started.set()
self.echodaemon.requestLoop(loopCondition=lambda: not self.echoserver._must_shutdown)
class TestEchoserver:
def setup_method(self):
self.echoserverthread = EchoServerThread()
self.echoserverthread.start()
self.echoserverthread.started.wait()
self.uri = self.echoserverthread.uri
def teardown_method(self):
self.echoserverthread.echodaemon.shutdown()
time.sleep(0.02)
self.echoserverthread.join()
config.SERVERTYPE = "thread"
def testExposed(self):
e = Pyro5.utils.echoserver.EchoServer()
assert hasattr(e, "_pyroExposed")
def testEcho(self):
with Pyro5.client.Proxy(self.uri) as echo:
try:
assert echo.echo("hello") == "hello"
assert echo.echo(None) is None
assert echo.echo([1,2,3]) == [1,2,3]
finally:
echo.shutdown()
def testError(self):
with Pyro5.client.Proxy(self.uri) as echo:
with pytest.raises(Exception) as x:
echo.error()
tb = "".join(Pyro5.errors.get_pyro_traceback(x.type, x.value, x.tb))
assert "Remote traceback" in tb
assert "ValueError" in tb
assert str(x.value) == "this is the generated error from echoserver echo() method"
with pytest.raises(Exception) as x:
echo.error_with_text()
tb = "".join(Pyro5.errors.get_pyro_traceback(x.type, x.value, x.tb))
assert "Remote traceback" in tb
assert "ValueError" in tb
assert str(x.value) == "the message of the error"
def testGenerator(self):
with Pyro5.client.Proxy(self.uri) as echo:
remotegenerator = echo.generator()
assert isinstance(remotegenerator, Pyro5.client._StreamResultIterator)
next(remotegenerator)
next(remotegenerator)
next(remotegenerator)
with pytest.raises(StopIteration):
next(remotegenerator)
|
mit
| -4,350,941,326,029,614,000 | 33.607595 | 108 | 0.622165 | false |
normanmaurer/autobahntestsuite-maven-plugin
|
src/main/resources/twisted/trial/runner.py
|
1
|
26192
|
# -*- test-case-name: twisted.trial.test.test_runner -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
A miscellany of code used to run Trial tests.
Maintainer: Jonathan Lange
"""
__all__ = [
'TestSuite',
'DestructiveTestSuite', 'DryRunVisitor', 'ErrorHolder', 'LoggedSuite',
'TestHolder', 'TestLoader', 'TrialRunner', 'TrialSuite',
'filenameToModule', 'isPackage', 'isPackageDirectory', 'isTestCase',
'name', 'samefile', 'NOT_IN_TEST',
]
import os, types, warnings, sys, inspect, imp
import doctest, time
from twisted.python import reflect, log, failure, modules, filepath
from twisted.python.deprecate import deprecatedModuleAttribute
from twisted.python.versions import Version
from twisted.internet import defer
from twisted.trial import util, unittest
from twisted.trial.itrial import ITestCase
from twisted.trial.reporter import UncleanWarningsReporterWrapper
# These are imported so that they remain in the public API for t.trial.runner
from twisted.trial.unittest import TestSuite
from zope.interface import implements
pyunit = __import__('unittest')
def isPackage(module):
"""Given an object return True if the object looks like a package"""
if not isinstance(module, types.ModuleType):
return False
basename = os.path.splitext(os.path.basename(module.__file__))[0]
return basename == '__init__'
def isPackageDirectory(dirname):
"""Is the directory at path 'dirname' a Python package directory?
Returns the name of the __init__ file (it may have a weird extension)
if dirname is a package directory. Otherwise, returns False"""
for ext in zip(*imp.get_suffixes())[0]:
initFile = '__init__' + ext
if os.path.exists(os.path.join(dirname, initFile)):
return initFile
return False
def samefile(filename1, filename2):
"""
A hacky implementation of C{os.path.samefile}. Used by L{filenameToModule}
when the platform doesn't provide C{os.path.samefile}. Do not use this.
"""
return os.path.abspath(filename1) == os.path.abspath(filename2)
def filenameToModule(fn):
"""
Given a filename, do whatever possible to return a module object matching
that file.
If the file in question is a module in Python path, properly import and
return that module. Otherwise, load the source manually.
@param fn: A filename.
@return: A module object.
@raise ValueError: If C{fn} does not exist.
"""
if not os.path.exists(fn):
raise ValueError("%r doesn't exist" % (fn,))
try:
ret = reflect.namedAny(reflect.filenameToModuleName(fn))
except (ValueError, AttributeError):
# Couldn't find module. The file 'fn' is not in PYTHONPATH
return _importFromFile(fn)
# ensure that the loaded module matches the file
retFile = os.path.splitext(ret.__file__)[0] + '.py'
# not all platforms (e.g. win32) have os.path.samefile
same = getattr(os.path, 'samefile', samefile)
if os.path.isfile(fn) and not same(fn, retFile):
del sys.modules[ret.__name__]
ret = _importFromFile(fn)
return ret
def _importFromFile(fn, moduleName=None):
fn = _resolveDirectory(fn)
if not moduleName:
moduleName = os.path.splitext(os.path.split(fn)[-1])[0]
if moduleName in sys.modules:
return sys.modules[moduleName]
fd = open(fn, 'r')
try:
module = imp.load_source(moduleName, fn, fd)
finally:
fd.close()
return module
def _resolveDirectory(fn):
if os.path.isdir(fn):
initFile = isPackageDirectory(fn)
if initFile:
fn = os.path.join(fn, initFile)
else:
raise ValueError('%r is not a package directory' % (fn,))
return fn
def _getMethodNameInClass(method):
"""
Find the attribute name on the method's class which refers to the method.
For some methods, notably decorators which have not had __name__ set correctly:
getattr(method.im_class, method.__name__) != method
"""
if getattr(method.im_class, method.__name__, object()) != method:
for alias in dir(method.im_class):
if getattr(method.im_class, alias, object()) == method:
return alias
return method.__name__
class DestructiveTestSuite(TestSuite):
"""
A test suite which remove the tests once run, to minimize memory usage.
"""
def run(self, result):
"""
Almost the same as L{TestSuite.run}, but with C{self._tests} being
empty at the end.
"""
while self._tests:
if result.shouldStop:
break
test = self._tests.pop(0)
test(result)
return result
# When an error occurs outside of any test, the user will see this string
# in place of a test's name.
NOT_IN_TEST = "<not in test>"
class LoggedSuite(TestSuite):
"""
Any errors logged in this suite will be reported to the L{TestResult}
object.
"""
def run(self, result):
"""
Run the suite, storing all errors in C{result}. If an error is logged
while no tests are running, then it will be added as an error to
C{result}.
@param result: A L{TestResult} object.
"""
observer = unittest._logObserver
observer._add()
super(LoggedSuite, self).run(result)
observer._remove()
for error in observer.getErrors():
result.addError(TestHolder(NOT_IN_TEST), error)
observer.flushErrors()
class TrialSuite(TestSuite):
"""
Suite to wrap around every single test in a C{trial} run. Used internally
by Trial to set up things necessary for Trial tests to work, regardless of
what context they are run in.
"""
def __init__(self, tests=(), forceGarbageCollection=False):
if forceGarbageCollection:
newTests = []
for test in tests:
test = unittest.decorate(
test, unittest._ForceGarbageCollectionDecorator)
newTests.append(test)
tests = newTests
suite = LoggedSuite(tests)
super(TrialSuite, self).__init__([suite])
def _bail(self):
from twisted.internet import reactor
d = defer.Deferred()
reactor.addSystemEventTrigger('after', 'shutdown',
lambda: d.callback(None))
reactor.fireSystemEvent('shutdown') # radix's suggestion
# As long as TestCase does crap stuff with the reactor we need to
# manually shutdown the reactor here, and that requires util.wait
# :(
# so that the shutdown event completes
unittest.TestCase('mktemp')._wait(d)
def run(self, result):
try:
TestSuite.run(self, result)
finally:
self._bail()
def name(thing):
"""
@param thing: an object from modules (instance of PythonModule,
PythonAttribute), a TestCase subclass, or an instance of a TestCase.
"""
if isTestCase(thing):
# TestCase subclass
theName = reflect.qual(thing)
else:
# thing from trial, or thing from modules.
# this monstrosity exists so that modules' objects do not have to
# implement id(). -jml
try:
theName = thing.id()
except AttributeError:
theName = thing.name
return theName
def isTestCase(obj):
"""
@return: C{True} if C{obj} is a class that contains test cases, C{False}
otherwise. Used to find all the tests in a module.
"""
try:
return issubclass(obj, pyunit.TestCase)
except TypeError:
return False
class TestHolder(object):
"""
Placeholder for a L{TestCase} inside a reporter. As far as a L{TestResult}
is concerned, this looks exactly like a unit test.
"""
implements(ITestCase)
failureException = None
def __init__(self, description):
"""
@param description: A string to be displayed L{TestResult}.
"""
self.description = description
def __call__(self, result):
return self.run(result)
def id(self):
return self.description
def countTestCases(self):
return 0
def run(self, result):
"""
This test is just a placeholder. Run the test successfully.
@param result: The C{TestResult} to store the results in.
@type result: L{twisted.trial.itrial.IReporter}.
"""
result.startTest(self)
result.addSuccess(self)
result.stopTest(self)
def shortDescription(self):
return self.description
class ErrorHolder(TestHolder):
"""
Used to insert arbitrary errors into a test suite run. Provides enough
methods to look like a C{TestCase}, however, when it is run, it simply adds
an error to the C{TestResult}. The most common use-case is for when a
module fails to import.
"""
def __init__(self, description, error):
"""
@param description: A string used by C{TestResult}s to identify this
error. Generally, this is the name of a module that failed to import.
@param error: The error to be added to the result. Can be an `exc_info`
tuple or a L{twisted.python.failure.Failure}.
"""
super(ErrorHolder, self).__init__(description)
self.error = util.excInfoOrFailureToExcInfo(error)
def __repr__(self):
return "<ErrorHolder description=%r error=%s%s>" % (
# Format the exception type and arguments explicitly, as exception
# objects do not have nice looking string formats on Python 2.4.
self.description, self.error[0].__name__, self.error[1].args)
def run(self, result):
"""
Run the test, reporting the error.
@param result: The C{TestResult} to store the results in.
@type result: L{twisted.trial.itrial.IReporter}.
"""
result.startTest(self)
result.addError(self, self.error)
result.stopTest(self)
class TestLoader(object):
"""
I find tests inside function, modules, files -- whatever -- then return
them wrapped inside a Test (either a L{TestSuite} or a L{TestCase}).
@ivar methodPrefix: A string prefix. C{TestLoader} will assume that all the
methods in a class that begin with C{methodPrefix} are test cases.
@ivar modulePrefix: A string prefix. Every module in a package that begins
with C{modulePrefix} is considered a module full of tests.
@ivar forceGarbageCollection: A flag applied to each C{TestCase} loaded.
See L{unittest.TestCase} for more information.
@ivar sorter: A key function used to sort C{TestCase}s, test classes,
modules and packages.
@ivar suiteFactory: A callable which is passed a list of tests (which
themselves may be suites of tests). Must return a test suite.
"""
methodPrefix = 'test'
modulePrefix = 'test_'
def __init__(self):
self.suiteFactory = TestSuite
self.sorter = name
self._importErrors = []
def sort(self, xs):
"""
Sort the given things using L{sorter}.
@param xs: A list of test cases, class or modules.
"""
return sorted(xs, key=self.sorter)
def findTestClasses(self, module):
"""Given a module, return all Trial test classes"""
classes = []
for name, val in inspect.getmembers(module):
if isTestCase(val):
classes.append(val)
return self.sort(classes)
def findByName(self, name):
"""
Return a Python object given a string describing it.
@param name: a string which may be either a filename or a
fully-qualified Python name.
@return: If C{name} is a filename, return the module. If C{name} is a
fully-qualified Python name, return the object it refers to.
"""
if os.path.exists(name):
return filenameToModule(name)
return reflect.namedAny(name)
def loadModule(self, module):
"""
Return a test suite with all the tests from a module.
Included are TestCase subclasses and doctests listed in the module's
__doctests__ module. If that's not good for you, put a function named
either C{testSuite} or C{test_suite} in your module that returns a
TestSuite, and I'll use the results of that instead.
If C{testSuite} and C{test_suite} are both present, then I'll use
C{testSuite}.
"""
## XXX - should I add an optional parameter to disable the check for
## a custom suite.
## OR, should I add another method
if not isinstance(module, types.ModuleType):
raise TypeError("%r is not a module" % (module,))
if hasattr(module, 'testSuite'):
return module.testSuite()
elif hasattr(module, 'test_suite'):
return module.test_suite()
suite = self.suiteFactory()
for testClass in self.findTestClasses(module):
suite.addTest(self.loadClass(testClass))
if not hasattr(module, '__doctests__'):
return suite
docSuite = self.suiteFactory()
for doctest in module.__doctests__:
docSuite.addTest(self.loadDoctests(doctest))
return self.suiteFactory([suite, docSuite])
loadTestsFromModule = loadModule
def loadClass(self, klass):
"""
Given a class which contains test cases, return a sorted list of
C{TestCase} instances.
"""
if not (isinstance(klass, type) or isinstance(klass, types.ClassType)):
raise TypeError("%r is not a class" % (klass,))
if not isTestCase(klass):
raise ValueError("%r is not a test case" % (klass,))
names = self.getTestCaseNames(klass)
tests = self.sort([self._makeCase(klass, self.methodPrefix+name)
for name in names])
return self.suiteFactory(tests)
loadTestsFromTestCase = loadClass
def getTestCaseNames(self, klass):
"""
Given a class that contains C{TestCase}s, return a list of names of
methods that probably contain tests.
"""
return reflect.prefixedMethodNames(klass, self.methodPrefix)
def loadMethod(self, method):
"""
Given a method of a C{TestCase} that represents a test, return a
C{TestCase} instance for that test.
"""
if not isinstance(method, types.MethodType):
raise TypeError("%r not a method" % (method,))
return self._makeCase(method.im_class, _getMethodNameInClass(method))
def _makeCase(self, klass, methodName):
return klass(methodName)
def loadPackage(self, package, recurse=False):
"""
Load tests from a module object representing a package, and return a
TestSuite containing those tests.
Tests are only loaded from modules whose name begins with 'test_'
(or whatever C{modulePrefix} is set to).
@param package: a types.ModuleType object (or reasonable facsimilie
obtained by importing) which may contain tests.
@param recurse: A boolean. If True, inspect modules within packages
within the given package (and so on), otherwise, only inspect modules
in the package itself.
@raise: TypeError if 'package' is not a package.
@return: a TestSuite created with my suiteFactory, containing all the
tests.
"""
if not isPackage(package):
raise TypeError("%r is not a package" % (package,))
pkgobj = modules.getModule(package.__name__)
if recurse:
discovery = pkgobj.walkModules()
else:
discovery = pkgobj.iterModules()
discovered = []
for disco in discovery:
if disco.name.split(".")[-1].startswith(self.modulePrefix):
discovered.append(disco)
suite = self.suiteFactory()
for modinfo in self.sort(discovered):
try:
module = modinfo.load()
except:
thingToAdd = ErrorHolder(modinfo.name, failure.Failure())
else:
thingToAdd = self.loadModule(module)
suite.addTest(thingToAdd)
return suite
def loadDoctests(self, module):
"""
Return a suite of tests for all the doctests defined in C{module}.
@param module: A module object or a module name.
"""
if isinstance(module, str):
try:
module = reflect.namedAny(module)
except:
return ErrorHolder(module, failure.Failure())
if not inspect.ismodule(module):
warnings.warn("trial only supports doctesting modules")
return
extraArgs = {}
if sys.version_info > (2, 4):
# Work around Python issue2604: DocTestCase.tearDown clobbers globs
def saveGlobals(test):
"""
Save C{test.globs} and replace it with a copy so that if
necessary, the original will be available for the next test
run.
"""
test._savedGlobals = getattr(test, '_savedGlobals', test.globs)
test.globs = test._savedGlobals.copy()
extraArgs['setUp'] = saveGlobals
return doctest.DocTestSuite(module, **extraArgs)
def loadAnything(self, thing, recurse=False):
"""
Given a Python object, return whatever tests that are in it. Whatever
'in' might mean.
@param thing: A Python object. A module, method, class or package.
@param recurse: Whether or not to look in subpackages of packages.
Defaults to False.
@return: A C{TestCase} or C{TestSuite}.
"""
if isinstance(thing, types.ModuleType):
if isPackage(thing):
return self.loadPackage(thing, recurse)
return self.loadModule(thing)
elif isinstance(thing, types.ClassType):
return self.loadClass(thing)
elif isinstance(thing, type):
return self.loadClass(thing)
elif isinstance(thing, types.MethodType):
return self.loadMethod(thing)
raise TypeError("No loader for %r. Unrecognized type" % (thing,))
def loadByName(self, name, recurse=False):
"""
Given a string representing a Python object, return whatever tests
are in that object.
If C{name} is somehow inaccessible (e.g. the module can't be imported,
there is no Python object with that name etc) then return an
L{ErrorHolder}.
@param name: The fully-qualified name of a Python object.
"""
try:
thing = self.findByName(name)
except:
return ErrorHolder(name, failure.Failure())
return self.loadAnything(thing, recurse)
loadTestsFromName = loadByName
def loadByNames(self, names, recurse=False):
"""
Construct a TestSuite containing all the tests found in 'names', where
names is a list of fully qualified python names and/or filenames. The
suite returned will have no duplicate tests, even if the same object
is named twice.
"""
things = []
errors = []
for name in names:
try:
things.append(self.findByName(name))
except:
errors.append(ErrorHolder(name, failure.Failure()))
suites = [self.loadAnything(thing, recurse)
for thing in self._uniqueTests(things)]
suites.extend(errors)
return self.suiteFactory(suites)
def _uniqueTests(self, things):
"""
Gather unique suite objects from loaded things. This will guarantee
uniqueness of inherited methods on TestCases which would otherwise hash
to same value and collapse to one test unexpectedly if using simpler
means: e.g. set().
"""
seen = set()
for thing in things:
if isinstance(thing, types.MethodType):
thing = (thing, thing.im_class)
else:
thing = (thing,)
if thing not in seen:
yield thing[0]
seen.add(thing)
class DryRunVisitor(object):
"""
A visitor that makes a reporter think that every test visited has run
successfully.
"""
deprecatedModuleAttribute(
Version("Twisted", 13, 0, 0),
"Trial no longer has support for visitors",
"twisted.trial.runner", "DryRunVisitor")
def __init__(self, reporter):
"""
@param reporter: A C{TestResult} object.
"""
self.reporter = reporter
def markSuccessful(self, testCase):
"""
Convince the reporter that this test has been run successfully.
"""
self.reporter.startTest(testCase)
self.reporter.addSuccess(testCase)
self.reporter.stopTest(testCase)
class TrialRunner(object):
"""
A specialised runner that the trial front end uses.
"""
DEBUG = 'debug'
DRY_RUN = 'dry-run'
def _setUpTestdir(self):
self._tearDownLogFile()
currentDir = os.getcwd()
base = filepath.FilePath(self.workingDirectory)
testdir, self._testDirLock = util._unusedTestDirectory(base)
os.chdir(testdir.path)
return currentDir
def _tearDownTestdir(self, oldDir):
os.chdir(oldDir)
self._testDirLock.unlock()
_log = log
def _makeResult(self):
reporter = self.reporterFactory(self.stream, self.tbformat,
self.rterrors, self._log)
if self.uncleanWarnings:
reporter = UncleanWarningsReporterWrapper(reporter)
return reporter
def __init__(self, reporterFactory,
mode=None,
logfile='test.log',
stream=sys.stdout,
profile=False,
tracebackFormat='default',
realTimeErrors=False,
uncleanWarnings=False,
workingDirectory=None,
forceGarbageCollection=False,
debugger=None):
self.reporterFactory = reporterFactory
self.logfile = logfile
self.mode = mode
self.stream = stream
self.tbformat = tracebackFormat
self.rterrors = realTimeErrors
self.uncleanWarnings = uncleanWarnings
self._result = None
self.workingDirectory = workingDirectory or '_trial_temp'
self._logFileObserver = None
self._logFileObject = None
self._forceGarbageCollection = forceGarbageCollection
self.debugger = debugger
if profile:
self.run = util.profiled(self.run, 'profile.data')
def _tearDownLogFile(self):
if self._logFileObserver is not None:
log.removeObserver(self._logFileObserver.emit)
self._logFileObserver = None
if self._logFileObject is not None:
self._logFileObject.close()
self._logFileObject = None
def _setUpLogFile(self):
self._tearDownLogFile()
if self.logfile == '-':
logFile = sys.stdout
else:
logFile = file(self.logfile, 'a')
self._logFileObject = logFile
self._logFileObserver = log.FileLogObserver(logFile)
log.startLoggingWithObserver(self._logFileObserver.emit, 0)
def run(self, test):
"""
Run the test or suite and return a result object.
"""
test = unittest.decorate(test, ITestCase)
return self._runWithoutDecoration(test, self._forceGarbageCollection)
def _runWithoutDecoration(self, test, forceGarbageCollection=False):
"""
Private helper that runs the given test but doesn't decorate it.
"""
result = self._makeResult()
# decorate the suite with reactor cleanup and log starting
# This should move out of the runner and be presumed to be
# present
suite = TrialSuite([test], forceGarbageCollection)
startTime = time.time()
if self.mode == self.DRY_RUN:
for single in unittest._iterateTests(suite):
result.startTest(single)
result.addSuccess(single)
result.stopTest(single)
else:
if self.mode == self.DEBUG:
run = lambda: self.debugger.runcall(suite.run, result)
else:
run = lambda: suite.run(result)
oldDir = self._setUpTestdir()
try:
self._setUpLogFile()
run()
finally:
self._tearDownLogFile()
self._tearDownTestdir(oldDir)
endTime = time.time()
done = getattr(result, 'done', None)
if done is None:
warnings.warn(
"%s should implement done() but doesn't. Falling back to "
"printErrors() and friends." % reflect.qual(result.__class__),
category=DeprecationWarning, stacklevel=3)
result.printErrors()
result.writeln(result.separator)
result.writeln('Ran %d tests in %.3fs', result.testsRun,
endTime - startTime)
result.write('\n')
result.printSummary()
else:
result.done()
return result
def runUntilFailure(self, test):
"""
Repeatedly run C{test} until it fails.
"""
count = 0
while True:
count += 1
self.stream.write("Test Pass %d\n" % (count,))
if count == 1:
result = self.run(test)
else:
result = self._runWithoutDecoration(test)
if result.testsRun == 0:
break
if not result.wasSuccessful():
break
return result
|
apache-2.0
| 4,596,502,648,089,220,000 | 32.029004 | 83 | 0.608048 | false |
rvbelefonte/Rockfish2
|
rockfish2/extensions/cps/model.py
|
1
|
3390
|
"""
Tools for working with Computer Programs in Seismology velocity models
"""
import os
import numpy as np
import datetime
import pandas as pd
from scipy.interpolate import interp1d
import matplotlib.pyplot as plt
from rockfish2 import logging
from rockfish2.models.profile import Profile
class CPSModel1d(Profile):
def __init__(self, *args, **kwargs):
self.NAME = kwargs.pop('name', '1D model')
self.UNITS = kwargs.pop('units', 'KGS')
self.ISOTROPY = kwargs.pop('isotropy', 'ISOTROPIC')
self.SHAPE = kwargs.pop('shape', 'FLAT EARTH')
self.DIM = kwargs.pop('dim', '1-D')
Profile.__init__(self, *args, **kwargs)
def __str__(self):
return self.write()
def write(self, path_or_buf=None, float_format='%10.6f', **kwargs):
"""
Write profile to the Computer Programs in Seismology model format
Parameters
----------
path_or_buf : string or file handle, default None
File path or object, if None is provided the result is returned as
a string.
"""
model = self.model.copy()
col = ['hr'] + [k for k in model if k != 'hr']
model['hr'] = np.concatenate((np.diff(np.asarray(model.index)), [0.0]))
model.index = np.arange(len(model))
#model = model[0:len(model) - 1]
sng = "MODEL\n"
sng += "{:}\n".format(self.NAME)
sng += "{:}\n".format(self.ISOTROPY)
sng += "{:}\n".format(self.UNITS)
sng += "{:}\n".format(self.SHAPE)
sng += "{:}\n".format(self.DIM)
sng += "CONSTANT VELOCITY\n"
sng += "#\n"
sng += "Created by: {:}{:}\n"\
.format(self.__module__, self.__class__.__name__)
sng += "Created on: {:}\n".format(datetime.datetime.now())
sng += "#\n"
sng += model[col].to_csv(sep='\t', index=False,
float_format=float_format, **kwargs)
if path_or_buf is None:
return sng
if hasattr(path_or_buf, 'write'):
path_or_buf.write(sng)
else:
f = open(path_or_buf, 'w')
f.write(sng)
def read(self, filename, sep='\t'):
"""
Write profile from the Computer Programs in Seismology model format
"""
f = open(filename, 'rb')
kind = f.readline().replace('\n', '')
assert kind.startswith('MODEL'),\
'File does not appear to be CPS format'
self.NAME = f.readline().replace('\n', '')
self.ISOTROPY = f.readline().replace('\n', '')
self.UNITS = f.readline().replace('\n', '')
self.SHAPE = f.readline().replace('\n', '')
self.DIM = f.readline().replace('\n', '')
_ = f.readline().replace('\n', '')
_ = f.readline().replace('\n', '')
_ = f.readline().replace('\n', '')
_ = f.readline().replace('\n', '')
_ = f.readline().replace('\n', '')
cols = f.readline().replace('\n', '').split()
self.model = pd.read_csv(filename, sep=sep, skiprows=11,
index_col=0)
try:
dz = self.model.index[:]
z = np.cumsum(np.asarray(dz)) - dz[0]
if z[-1] == 0:
z[-1] = dz[-2]
self.model.index = z
self.model.index.name = 'depth'
except:
pass
|
gpl-2.0
| -763,694,622,314,815,400 | 31.596154 | 79 | 0.515929 | false |
Metronote/metronotesd-alpha
|
lib/api.py
|
1
|
26200
|
#! /usr/bin/python3
import sys
import os
import threading
import decimal
import time
import json
import re
import requests
import collections
import logging
from logging import handlers as logging_handlers
D = decimal.Decimal
import apsw
import flask
from flask.ext.httpauth import HTTPBasicAuth
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
import jsonrpc
from jsonrpc import dispatcher
from . import (config, bitcoin, exceptions, util)
from . import (send, order, btcpay, issuance, broadcast, bet, dividend, burn, cancel, callback, rps, rpsresolve, publish)
API_TABLES = ['balances', 'credits', 'debits', 'bets', 'bet_matches',
'broadcasts', 'btcpays', 'burns', 'callbacks', 'cancels',
'dividends', 'issuances', 'orders', 'order_matches', 'sends',
'bet_expirations', 'order_expirations', 'bet_match_expirations',
'order_match_expirations', 'bet_match_resolutions', 'rps',
'rpsresolves', 'rps_matches', 'rps_expirations', 'rps_match_expirations',
'mempool']
API_TRANSACTIONS = ['bet', 'broadcast', 'btcpay', 'burn', 'cancel',
'callback', 'dividend', 'issuance', 'order', 'send',
'rps', 'rpsresolve', 'publish']
COMMONS_ARGS = ['encoding', 'fee_per_kb', 'regular_dust_size',
'multisig_dust_size', 'op_return_value', 'pubkey',
'allow_unconfirmed_inputs', 'fee', 'fee_provided']
API_MAX_LOG_SIZE = 10 * 1024 * 1024 #max log size of 20 MB before rotation (make configurable later)
API_MAX_LOG_COUNT = 10
current_api_status_code = None #is updated by the APIStatusPoller
current_api_status_response_json = None #is updated by the APIStatusPoller
# TODO: ALL queries EVERYWHERE should be done with these methods
def db_query(db, statement, bindings=(), callback=None, **callback_args):
cursor = db.cursor()
if hasattr(callback, '__call__'):
cursor.execute(statement, bindings)
for row in cursor:
callback(row, **callback_args)
results = None
else:
results = list(cursor.execute(statement, bindings))
cursor.close()
return results
def get_rows(db, table, filters=[], filterop='AND', order_by=None, order_dir=None, start_block=None, end_block=None,
status=None, limit=1000, offset=0, show_expired=True):
"""Filters results based on a filter data structure (as used by the API)"""
def value_to_marker(value):
# if value is an array place holder is (?,?,?,..)
if isinstance(value, list):
return '''({})'''.format(','.join(['?' for e in range(0,len(value))]))
else:
return '''?'''
# TODO: Document that op can be anything that SQLite3 accepts.
if not table or table.lower() not in API_TABLES:
raise Exception('Unknown table')
if filterop and filterop.upper() not in ['OR', 'AND']:
raise Exception('Invalid filter operator (OR, AND)')
if order_dir and order_dir.upper() not in ['ASC', 'DESC']:
raise Exception('Invalid order direction (ASC, DESC)')
if not isinstance(limit, int):
raise Exception('Invalid limit')
elif limit > 1000:
raise Exception('Limit should be lower or equal to 1000')
if not isinstance(offset, int):
raise Exception('Invalid offset')
# TODO: accept an object: {'field1':'ASC', 'field2': 'DESC'}
if order_by and not re.compile('^[a-z0-9_]+$').match(order_by):
raise Exception('Invalid order_by, must be a field name')
if isinstance(filters, dict): #single filter entry, convert to a one entry list
filters = [filters,]
elif not isinstance(filters, list):
filters = []
# TODO: Document this! (Each filter can be an ordered list.)
new_filters = []
for filter_ in filters:
if type(filter_) in (list, tuple) and len(filter_) in [3, 4]:
new_filter = {'field': filter_[0], 'op': filter_[1], 'value': filter_[2]}
if len(filter_) == 4: new_filter['case_sensitive'] = filter_[3]
new_filters.append(new_filter)
elif type(filter_) == dict:
new_filters.append(filter_)
else:
raise Exception('Unknown filter type')
filters = new_filters
# validate filter(s)
for filter_ in filters:
for field in ['field', 'op', 'value']: #should have all fields
if field not in filter_:
raise Exception("A specified filter is missing the '%s' field" % field)
if not isinstance(filter_['value'], (str, int, float, list)):
raise Exception("Invalid value for the field '%s'" % filter_['field'])
if isinstance(filter_['value'], list) and filter_['op'].upper() not in ['IN', 'NOT IN']:
raise Exception("Invalid value for the field '%s'" % filter_['field'])
if filter_['op'].upper() not in ['=', '==', '!=', '>', '<', '>=', '<=', 'IN', 'LIKE', 'NOT IN', 'NOT LIKE']:
raise Exception("Invalid operator for the field '%s'" % filter_['field'])
if 'case_sensitive' in filter_ and not isinstance(filter_['case_sensitive'], bool):
raise Exception("case_sensitive must be a boolean")
# SELECT
statement = '''SELECT * FROM {}'''.format(table)
# WHERE
bindings = []
conditions = []
for filter_ in filters:
case_sensitive = False if 'case_sensitive' not in filter_ else filter_['case_sensitive']
if filter_['op'] == 'LIKE' and case_sensitive == False:
filter_['field'] = '''UPPER({})'''.format(filter_['field'])
filter_['value'] = filter_['value'].upper()
marker = value_to_marker(filter_['value'])
conditions.append('''{} {} {}'''.format(filter_['field'], filter_['op'], marker))
if isinstance(filter_['value'], list):
bindings += filter_['value']
else:
bindings.append(filter_['value'])
# AND filters
more_conditions = []
if table not in ['balances', 'order_matches', 'bet_matches']:
if start_block != None:
more_conditions.append('''block_index >= ?''')
bindings.append(start_block)
if end_block != None:
more_conditions.append('''block_index <= ?''')
bindings.append(end_block)
elif table in ['order_matches', 'bet_matches']:
if start_block != None:
more_conditions.append('''tx0_block_index >= ?''')
bindings.append(start_block)
if end_block != None:
more_conditions.append('''tx1_block_index <= ?''')
bindings.append(end_block)
# status
if isinstance(status, list) and len(status) > 0:
more_conditions.append('''status IN {}'''.format(value_to_marker(status)))
bindings += status
elif isinstance(status, str) and status != '':
more_conditions.append('''status == ?''')
bindings.append(status)
# legacy filters
if not show_expired and table == 'orders':
#Ignore BTC orders one block early.
expire_index = util.last_block(db)['block_index'] + 1
more_conditions.append('''((give_asset == ? AND expire_index > ?) OR give_asset != ?)''')
bindings += [config.BTC, expire_index, config.BTC]
if (len(conditions) + len(more_conditions)) > 0:
statement += ''' WHERE'''
all_conditions = []
if len(conditions) > 0:
all_conditions.append('''({})'''.format(''' {} '''.format(filterop.upper()).join(conditions)))
if len(more_conditions) > 0:
all_conditions.append('''({})'''.format(''' AND '''.join(more_conditions)))
statement += ''' {}'''.format(''' AND '''.join(all_conditions))
# ORDER BY
if order_by != None:
statement += ''' ORDER BY {}'''.format(order_by)
if order_dir != None:
statement += ''' {}'''.format(order_dir.upper())
# LIMIT
if limit:
statement += ''' LIMIT {}'''.format(limit)
if offset:
statement += ''' OFFSET {}'''.format(offset)
return db_query(db, statement, tuple(bindings))
def compose_transaction(db, name, params,
encoding='auto',
fee_per_kb=config.DEFAULT_FEE_PER_KB,
regular_dust_size=config.DEFAULT_REGULAR_DUST_SIZE,
multisig_dust_size=config.DEFAULT_MULTISIG_DUST_SIZE,
op_return_value=config.DEFAULT_OP_RETURN_VALUE,
pubkey=None,
allow_unconfirmed_inputs=False,
fee=None,
fee_provided=0):
tx_info = sys.modules['lib.{}'.format(name)].compose(db, **params)
return bitcoin.transaction(tx_info, encoding=encoding,
fee_per_kb=fee_per_kb,
regular_dust_size=regular_dust_size,
multisig_dust_size=multisig_dust_size,
op_return_value=op_return_value,
public_key_hex=pubkey,
allow_unconfirmed_inputs=allow_unconfirmed_inputs,
exact_fee=fee,
fee_provided=fee_provided)
def sign_transaction(unsigned_tx_hex, private_key_wif=None):
return bitcoin.sign_tx(unsigned_tx_hex, private_key_wif=private_key_wif)
def broadcast_transaction(signed_tx_hex):
if not config.TESTNET and config.BROADCAST_TX_MAINNET in ['bci', 'bci-failover']:
url = "https://blockchain.info/pushtx"
params = {'tx': signed_tx_hex}
response = requests.post(url, data=params)
if response.text.lower() != 'transaction submitted' or response.status_code != 200:
if config.BROADCAST_TX_MAINNET == 'bci-failover':
return bitcoin.broadcast_tx(signed_tx_hex)
else:
raise Exception(response.text)
return response.text
else:
return bitcoin.broadcast_tx(signed_tx_hex)
def do_transaction(db, name, params, private_key_wif=None, **kwargs):
unsigned_tx = compose_transaction(db, name, params, **kwargs)
signed_tx = sign_transaction(unsigned_tx, private_key_wif=private_key_wif)
return broadcast_transaction(signed_tx)
class APIStatusPoller(threading.Thread):
"""Poll every few seconds for the length of time since the last version check, as well as the bitcoin status"""
def __init__(self):
self.last_version_check = 0
self.last_database_check = 0
threading.Thread.__init__(self)
def run(self):
global current_api_status_code, current_api_status_response_json
db = util.connect_to_db(flags='SQLITE_OPEN_READONLY')
while True:
try:
# Check version.
if time.time() - self.last_version_check >= 10: # Four hours since last check.
code = 10
util.version_check(db)
self.last_version_check = time.time()
# Check that bitcoind is running, communicable, and caught up with the blockchain.
# Check that the database has caught up with bitcoind.
if time.time() - self.last_database_check > 10 * 60: # Ten minutes since last check.
code = 11
bitcoin.bitcoind_check(db)
code = 12
util.database_check(db, bitcoin.get_block_count()) # TODO: If not reparse or rollback, once those use API.
self.last_database_check = time.time()
except Exception as e:
exception_name = e.__class__.__name__
exception_text = str(e)
jsonrpc_response = jsonrpc.exceptions.JSONRPCServerError(message=exception_name, data=exception_text)
current_api_status_code = code
current_api_status_response_json = jsonrpc_response.json.encode()
else:
current_api_status_code = None
current_api_status_response_json = None
time.sleep(2)
class APIServer(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
db = util.connect_to_db(flags='SQLITE_OPEN_READONLY')
app = flask.Flask(__name__)
auth = HTTPBasicAuth()
@auth.get_password
def get_pw(username):
if username == config.RPC_USER:
return config.RPC_PASSWORD
return None
######################
#READ API
# Generate dynamically get_{table} methods
def generate_get_method(table):
def get_method(**kwargs):
return get_rows(db, table=table, **kwargs)
return get_method
for table in API_TABLES:
new_method = generate_get_method(table)
new_method.__name__ = 'get_{}'.format(table)
dispatcher.add_method(new_method)
@dispatcher.add_method
def sql(query, bindings=[]):
return db_query(db, query, tuple(bindings))
######################
#WRITE/ACTION API
# Generate dynamically create_{transaction} and do_{transaction} methods
def generate_create_method(transaction):
def split_params(**kwargs):
transaction_args = {}
common_args = {}
private_key_wif = None
for key in kwargs:
if key in COMMONS_ARGS:
common_args[key] = kwargs[key]
elif key == 'privkey':
private_key_wif = kwargs[key]
else:
transaction_args[key] = kwargs[key]
return transaction_args, common_args, private_key_wif
def create_method(**kwargs):
transaction_args, common_args, private_key_wif = split_params(**kwargs)
return compose_transaction(db, name=transaction, params=transaction_args, **common_args)
def do_method(**kwargs):
transaction_args, common_args, private_key_wif = split_params(**kwargs)
return do_transaction(db, name=transaction, params=transaction_args, private_key_wif=private_key_wif, **common_args)
return create_method, do_method
for transaction in API_TRANSACTIONS:
create_method, do_method = generate_create_method(transaction)
create_method.__name__ = 'create_{}'.format(transaction)
do_method.__name__ = 'do_{}'.format(transaction)
dispatcher.add_method(create_method)
dispatcher.add_method(do_method)
@dispatcher.add_method
def sign_tx(unsigned_tx_hex, privkey=None):
return sign_transaction(unsigned_tx_hex, private_key_wif=privkey)
@dispatcher.add_method
def broadcast_tx(signed_tx_hex):
return broadcast_transaction(signed_tx_hex)
@dispatcher.add_method
def get_messages(block_index):
if not isinstance(block_index, int):
raise Exception("block_index must be an integer.")
cursor = db.cursor()
cursor.execute('select * from messages where block_index = ? order by message_index asc', (block_index,))
messages = cursor.fetchall()
cursor.close()
return messages
@dispatcher.add_method
def get_messages_by_index(message_indexes):
"""Get specific messages from the feed, based on the message_index.
@param message_index: A single index, or a list of one or more message indexes to retrieve.
"""
if not isinstance(message_indexes, list):
message_indexes = [message_indexes,]
for idx in message_indexes: #make sure the data is clean
if not isinstance(idx, int):
raise Exception("All items in message_indexes are not integers")
cursor = db.cursor()
cursor.execute('SELECT * FROM messages WHERE message_index IN (%s) ORDER BY message_index ASC'
% (','.join([str(x) for x in message_indexes]),))
messages = cursor.fetchall()
cursor.close()
return messages
@dispatcher.add_method
def get_xmn_supply():
return util.xmn_supply(db)
@dispatcher.add_method
def get_asset_info(assets):
if not isinstance(assets, list):
raise Exception("assets must be a list of asset names, even if it just contains one entry")
assetsInfo = []
for asset in assets:
# BTC and XMN.
if asset in [config.BTC, config.XMN]:
if asset == config.BTC:
supply = bitcoin.get_btc_supply(normalize=False)
else:
supply = util.xmn_supply(db)
assetsInfo.append({
'asset': asset,
'owner': None,
'divisible': True,
'locked': False,
'supply': supply,
'callable': False,
'call_date': None,
'call_price': None,
'description': '',
'issuer': None
})
continue
# User‐created asset.
cursor = db.cursor()
issuances = list(cursor.execute('''SELECT * FROM issuances WHERE (status = ? AND asset = ?) ORDER BY block_index ASC''', ('valid', asset)))
cursor.close()
if not issuances: break #asset not found, most likely
else: last_issuance = issuances[-1]
supply = 0
locked = False
for e in issuances:
if e['locked']: locked = True
supply += e['quantity']
assetsInfo.append({
'asset': asset,
'owner': last_issuance['issuer'],
'divisible': bool(last_issuance['divisible']),
'locked': locked,
'supply': supply,
'callable': bool(last_issuance['callable']),
'call_date': last_issuance['call_date'],
'call_price': last_issuance['call_price'],
'description': last_issuance['description'],
'issuer': last_issuance['issuer']})
return assetsInfo
@dispatcher.add_method
def get_block_info(block_index):
assert isinstance(block_index, int)
cursor = db.cursor()
cursor.execute('''SELECT * FROM blocks WHERE block_index = ?''', (block_index,))
try:
blocks = list(cursor)
assert len(blocks) == 1
block = blocks[0]
except IndexError:
raise exceptions.DatabaseError('No blocks found.')
cursor.close()
return block
@dispatcher.add_method
def get_blocks(block_indexes):
"""fetches block info and messages for the specified block indexes"""
if not isinstance(block_indexes, (list, tuple)):
raise Exception("block_indexes must be a list of integers.")
if len(block_indexes) >= 250:
raise Exception("can only specify up to 250 indexes at a time.")
block_indexes_str = ','.join([str(x) for x in block_indexes])
cursor = db.cursor()
cursor.execute('SELECT * FROM blocks WHERE block_index IN (%s) ORDER BY block_index ASC'
% (block_indexes_str,))
blocks = cursor.fetchall()
cursor.execute('SELECT * FROM messages WHERE block_index IN (%s) ORDER BY block_index ASC, message_index ASC'
% (block_indexes_str,))
messages = collections.deque(cursor.fetchall())
for block in blocks:
messages_in_block = []
block['_messages'] = []
while len(messages) and messages[0]['block_index'] == block['block_index']:
block['_messages'].append(messages.popleft())
assert not len(messages) #should have been cleared out
cursor.close()
return blocks
@dispatcher.add_method
def get_running_info():
latestBlockIndex = bitcoin.get_block_count()
try:
util.database_check(db, latestBlockIndex)
except exceptions.DatabaseError as e:
caught_up = False
else:
caught_up = True
try:
last_block = util.last_block(db)
except:
last_block = {'block_index': None, 'block_hash': None, 'block_time': None}
try:
last_message = util.last_message(db)
except:
last_message = None
return {
'db_caught_up': caught_up,
'bitcoin_block_count': latestBlockIndex,
'last_block': last_block,
'last_message_index': last_message['message_index'] if last_message else -1,
'running_testnet': config.TESTNET,
'running_testcoin': config.TESTCOIN,
'version_major': config.VERSION_MAJOR,
'version_minor': config.VERSION_MINOR,
'version_revision': config.VERSION_REVISION
}
@dispatcher.add_method
def get_element_counts():
counts = {}
cursor = db.cursor()
for element in ['transactions', 'blocks', 'debits', 'credits', 'balances', 'sends', 'orders',
'order_matches', 'btcpays', 'issuances', 'broadcasts', 'bets', 'bet_matches', 'dividends',
'burns', 'cancels', 'callbacks', 'order_expirations', 'bet_expirations', 'order_match_expirations',
'bet_match_expirations', 'messages']:
cursor.execute("SELECT COUNT(*) AS count FROM %s" % element)
count_list = cursor.fetchall()
assert len(count_list) == 1
counts[element] = count_list[0]['count']
cursor.close()
return counts
@dispatcher.add_method
def get_asset_names():
cursor = db.cursor()
names = [row['asset'] for row in cursor.execute("SELECT DISTINCT asset FROM issuances WHERE status = 'valid' ORDER BY asset ASC")]
cursor.close()
return names
def _set_cors_headers(response):
if config.RPC_ALLOW_CORS:
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'GET, POST, OPTIONS'
response.headers['Access-Control-Allow-Headers'] = 'DNT,X-Mx-ReqToken,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type';
@app.route('/', methods=["OPTIONS",])
@app.route('/api/', methods=["OPTIONS",])
def handle_options():
response = flask.Response('', 204)
_set_cors_headers(response)
return response
@app.route('/', methods=["POST",])
@app.route('/api/', methods=["POST",])
@auth.login_required
def handle_post():
try:
request_json = flask.request.get_data().decode('utf-8')
request_data = json.loads(request_json)
assert 'id' in request_data and request_data['jsonrpc'] == "2.0" and request_data['method']
# params may be omitted
except:
obj_error = jsonrpc.exceptions.JSONRPCInvalidRequest(data="Invalid JSON-RPC 2.0 request format")
return flask.Response(obj_error.json.encode(), 200, mimetype='application/json')
#only arguments passed as a dict are supported
if request_data.get('params', None) and not isinstance(request_data['params'], dict):
obj_error = jsonrpc.exceptions.JSONRPCInvalidRequest(
data='Arguments must be passed as a JSON object (list of unnamed arguments not supported)')
return flask.Response(obj_error.json.encode(), 200, mimetype='application/json')
#return an error if API fails checks
if not config.FORCE and current_api_status_code:
return flask.Response(current_api_status_response_json, 200, mimetype='application/json')
jsonrpc_response = jsonrpc.JSONRPCResponseManager.handle(request_json, dispatcher)
response = flask.Response(jsonrpc_response.json.encode(), 200, mimetype='application/json')
_set_cors_headers(response)
return response
if not config.UNITTEST: #skip setting up logs when for the test suite
api_logger = logging.getLogger("tornado")
h = logging_handlers.RotatingFileHandler(os.path.join(config.DATA_DIR, "api.access.log"), 'a', API_MAX_LOG_SIZE, API_MAX_LOG_COUNT)
api_logger.setLevel(logging.INFO)
api_logger.addHandler(h)
api_logger.propagate = False
http_server = HTTPServer(WSGIContainer(app), xheaders=True)
try:
http_server.listen(config.RPC_PORT, address=config.RPC_HOST)
IOLoop.instance().start()
except OSError:
raise Exception("Cannot start the API subsystem. Is {} already running, or is something else listening on port {}?".format(config.XMN_CLIENT, config.RPC_PORT))
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
mit
| 844,158,280,509,889,700 | 43.328257 | 171 | 0.558554 | false |
madirish/hector
|
app/scripts/screenshot_scan/screenshot_scan.py
|
1
|
4015
|
#!/usr/bin/python
"""
This script is part of HECTOR.
by Josh Bauer <joshbauer3@gmail.com>
Modified by: Justin C. Klein Keane <jukeane@sas.upenn.edu>
Last modified: 31 July, 2014
This script requires python 2.5 or higher.
This script is a threaded screenshot scan
using phantomjs to render screenshots for urls
in Hector's url table. Files are stored in the
"app/screenshots" directory. This script is
called by "screenshot_scan.php".
"""
import Queue
import threading
import MySQLdb
import time
import ConfigParser
import urllib2
import subprocess
import logging
import sys, os
# appPath - for example /opt/hector/app
appPath = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/../../")
sys.path.append(appPath + "/lib/pylib")
from pull_config import Configurator
DEBUG = False
# Credentials used for the database connection
configr = Configurator()
DB = configr.get_var('db')
HOST = configr.get_var('db_host')
USERNAME = configr.get_var('db_user')
PASSWORD = configr.get_var('db_pass')
PHANTOMJS = configr.get_var('phantomjs_exec_path')
if PHANTOMJS == '/no/such/path' :
raise Exception('phantomJS not configured, please update your config.ini with the proper path')
#logging set up
logger = logging.getLogger('screenshot scan')
hdlr = logging.FileHandler(appPath + '/logs/message_log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.WARNING)
if DEBUG : logger.setLevel(logging.DEBUG)
logger.info('screenshot_scan.py is starting in Python')
class ScreenShotThread(threading.Thread):
"""Threaded Screenshot Grab"""
def __init__(self, urls):
threading.Thread.__init__(self)
self.urls = urls
def run(self):
while True:
#grabs url from queue
self.url = self.urls.get() #url for database purposes
self.full_url='' #url for phantomjs/urllib2
if not self.url.startswith('http'):
self.full_url = 'http://'
self.full_url += self.url
try :
response = urllib2.urlopen(self.full_url,timeout=10)
response = response.getcode()
except :
response = 'failed'
logger.debug(self.name + " " + self.full_url + ' gave response: ' + str(response))
if response != 'failed': self.take_snapshot()
#signals to queue job is done
self.urls.task_done()
def take_snapshot(self):
"""calls phantomjs to capture screenshot and updates the database"""
filter=['/','.',':',';']
filename = self.full_url
for c in filter : filename=filename.replace(c, '_')
filename += '_' + str(int(time.time())) + '.png'
command = PHANTOMJS + ' /opt/hector/app/scripts/snapshot.js \'' + self.full_url +'\' \'' + filename +'\''
logger.debug(self.name + " command: " + command + " start")
proc = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
(out, err) = proc.communicate()
logger.debug(self.name + " command: "+ command + "\n\toutput: " + out)
if out.count('Status: success')>0 :
conn = MySQLdb.connect(host=HOST,
user=USERNAME,
passwd=PASSWORD,
db=DB)
cursor = conn.cursor()
cursor.execute('update url set url_screenshot=%s where url_url=%s',(filename,self.url))
conn.commit()
conn = MySQLdb.connect(host=HOST,
user=USERNAME,
passwd=PASSWORD,
db=DB)
cursor = conn.cursor()
cursor.execute('select url_url from url')
results = cursor.fetchall()
conn.close()
urls=Queue.Queue()
#initialize threads
for i in range(10):
t = ScreenShotThread(urls)
t.setDaemon(True)
t.start()
#populate the queue
for result in results: urls.put(result[0])
#wait for the queue to be emptied
urls.join()
|
gpl-3.0
| 7,748,538,781,216,978,000 | 33.62069 | 113 | 0.630386 | false |
neeasade/qutebrowser
|
tests/unit/completion/test_models.py
|
1
|
15355
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016 Ryan Roden-Corrent (rcorre) <ryan@rcorre.net>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Tests for completion models."""
import collections
from datetime import datetime
import pytest
from PyQt5.QtCore import QUrl
from PyQt5.QtWidgets import QTreeView
from qutebrowser.completion.models import miscmodels, urlmodel, configmodel
from qutebrowser.browser.webkit import history
from qutebrowser.config import sections, value
def _get_completions(model):
"""Collect all the completion entries of a model, organized by category.
The result is a list of form:
[
(CategoryName: [(name, desc, misc), ...]),
(CategoryName: [(name, desc, misc), ...]),
...
]
"""
completions = []
for i in range(0, model.rowCount()):
category = model.item(i)
entries = []
for j in range(0, category.rowCount()):
name = category.child(j, 0)
desc = category.child(j, 1)
misc = category.child(j, 2)
entries.append((name.text(), desc.text(), misc.text()))
completions.append((category.text(), entries))
return completions
def _patch_cmdutils(monkeypatch, stubs, symbol):
"""Patch the cmdutils module to provide fake commands."""
cmd_utils = stubs.FakeCmdUtils({
'stop': stubs.FakeCommand(name='stop', desc='stop qutebrowser'),
'drop': stubs.FakeCommand(name='drop', desc='drop all user data'),
'roll': stubs.FakeCommand(name='roll', desc='never gonna give you up'),
'hide': stubs.FakeCommand(name='hide', hide=True),
'depr': stubs.FakeCommand(name='depr', deprecated=True),
})
monkeypatch.setattr(symbol, cmd_utils)
def _patch_configdata(monkeypatch, stubs, symbol):
"""Patch the configdata module to provide fake data."""
data = collections.OrderedDict([
('general', sections.KeyValue(
('time',
value.SettingValue(stubs.FakeConfigType('fast', 'slow'),
default='slow'),
'Is an illusion.\n\nLunchtime doubly so.'),
('volume',
value.SettingValue(stubs.FakeConfigType('0', '11'),
default='11'),
'Goes to 11'))),
('ui', sections.KeyValue(
('gesture',
value.SettingValue(stubs.FakeConfigType(('on', 'off')),
default='off'),
'Waggle your hands to control qutebrowser'),
('mind',
value.SettingValue(stubs.FakeConfigType(('on', 'off')),
default='off'),
'Enable mind-control ui (experimental)'),
('voice',
value.SettingValue(stubs.FakeConfigType(('on', 'off')),
default='off'),
'Whether to respond to voice commands'))),
])
monkeypatch.setattr(symbol, data)
def _patch_config_section_desc(monkeypatch, stubs, symbol):
"""Patch the configdata module to provide fake SECTION_DESC."""
section_desc = {
'general': 'General/miscellaneous options.',
'ui': 'General options related to the user interface.',
}
monkeypatch.setattr(symbol, section_desc)
def _mock_view_index(model, category_idx, child_idx, qtbot):
"""Create a tree view from a model and set the current index.
Args:
model: model to create a fake view for.
category_idx: index of the category to select.
child_idx: index of the child item under that category to select.
"""
view = QTreeView()
qtbot.add_widget(view)
view.setModel(model)
idx = model.indexFromItem(model.item(category_idx).child(child_idx))
view.setCurrentIndex(idx)
return view
@pytest.fixture
def quickmarks(quickmark_manager_stub):
"""Pre-populate the quickmark-manager stub with some quickmarks."""
quickmark_manager_stub.marks = collections.OrderedDict([
('aw', 'https://wiki.archlinux.org'),
('ddg', 'https://duckduckgo.com'),
('wiki', 'https://wikipedia.org'),
])
return quickmark_manager_stub
@pytest.fixture
def bookmarks(bookmark_manager_stub):
"""Pre-populate the bookmark-manager stub with some quickmarks."""
bookmark_manager_stub.marks = collections.OrderedDict([
('https://github.com', 'GitHub'),
('https://python.org', 'Welcome to Python.org'),
('http://qutebrowser.org', 'qutebrowser | qutebrowser'),
])
return bookmark_manager_stub
@pytest.fixture
def web_history(stubs, web_history_stub):
"""Pre-populate the web-history stub with some history entries."""
web_history_stub.history_dict = collections.OrderedDict([
('http://qutebrowser.org', history.Entry(
datetime(2015, 9, 5).timestamp(),
QUrl('http://qutebrowser.org'), 'qutebrowser | qutebrowser')),
('https://python.org', history.Entry(
datetime(2016, 3, 8).timestamp(),
QUrl('https://python.org'), 'Welcome to Python.org')),
('https://github.com', history.Entry(
datetime(2016, 5, 1).timestamp(),
QUrl('https://github.com'), 'GitHub')),
])
return web_history_stub
def test_command_completion(monkeypatch, stubs, config_stub, key_config_stub):
"""Test the results of command completion.
Validates that:
- only non-hidden and non-deprecated commands are included
- commands are sorted by name
- the command description is shown in the desc column
- the binding (if any) is shown in the misc column
- aliases are included
"""
_patch_cmdutils(monkeypatch, stubs,
'qutebrowser.completion.models.miscmodels.cmdutils')
config_stub.data['aliases'] = {'rock': 'roll'}
key_config_stub.set_bindings_for('normal', {'s': 'stop', 'rr': 'roll'})
actual = _get_completions(miscmodels.CommandCompletionModel())
assert actual == [
("Commands", [
('drop', 'drop all user data', ''),
('rock', "Alias for 'roll'", ''),
('roll', 'never gonna give you up', 'rr'),
('stop', 'stop qutebrowser', 's')
])
]
def test_help_completion(monkeypatch, stubs):
"""Test the results of command completion.
Validates that:
- only non-hidden and non-deprecated commands are included
- commands are sorted by name
- the command description is shown in the desc column
- the binding (if any) is shown in the misc column
- aliases are included
- only the first line of a multiline description is shown
"""
module = 'qutebrowser.completion.models.miscmodels'
_patch_cmdutils(monkeypatch, stubs, module + '.cmdutils')
_patch_configdata(monkeypatch, stubs, module + '.configdata.DATA')
actual = _get_completions(miscmodels.HelpCompletionModel())
assert actual == [
("Commands", [
(':drop', 'drop all user data', ''),
(':roll', 'never gonna give you up', ''),
(':stop', 'stop qutebrowser', '')
]),
("Settings", [
('general->time', 'Is an illusion.', ''),
('general->volume', 'Goes to 11', ''),
('ui->gesture', 'Waggle your hands to control qutebrowser', ''),
('ui->mind', 'Enable mind-control ui (experimental)', ''),
('ui->voice', 'Whether to respond to voice commands', ''),
])
]
def test_quickmark_completion(quickmarks):
"""Test the results of quickmark completion."""
actual = _get_completions(miscmodels.QuickmarkCompletionModel())
assert actual == [
("Quickmarks", [
('aw', 'https://wiki.archlinux.org', ''),
('ddg', 'https://duckduckgo.com', ''),
('wiki', 'https://wikipedia.org', ''),
])
]
def test_bookmark_completion(bookmarks):
"""Test the results of bookmark completion."""
actual = _get_completions(miscmodels.BookmarkCompletionModel())
assert actual == [
("Bookmarks", [
('https://github.com', 'GitHub', ''),
('https://python.org', 'Welcome to Python.org', ''),
('http://qutebrowser.org', 'qutebrowser | qutebrowser', ''),
])
]
def test_url_completion(config_stub, web_history, quickmarks, bookmarks):
"""Test the results of url completion.
Verify that:
- quickmarks, bookmarks, and urls are included
- no more than 'web-history-max-items' history entries are included
- the most recent entries are included
"""
config_stub.data['completion'] = {'timestamp-format': '%Y-%m-%d',
'web-history-max-items': 2}
actual = _get_completions(urlmodel.UrlCompletionModel())
assert actual == [
("Quickmarks", [
('https://wiki.archlinux.org', 'aw', ''),
('https://duckduckgo.com', 'ddg', ''),
('https://wikipedia.org', 'wiki', ''),
]),
("Bookmarks", [
('https://github.com', 'GitHub', ''),
('https://python.org', 'Welcome to Python.org', ''),
('http://qutebrowser.org', 'qutebrowser | qutebrowser', ''),
]),
("History", [
('https://python.org', 'Welcome to Python.org', '2016-03-08'),
('https://github.com', 'GitHub', '2016-05-01'),
]),
]
def test_url_completion_delete_bookmark(config_stub, web_history, quickmarks,
bookmarks, qtbot):
"""Test deleting a bookmark from the url completion model."""
config_stub.data['completion'] = {'timestamp-format': '%Y-%m-%d',
'web-history-max-items': 2}
model = urlmodel.UrlCompletionModel()
# delete item (1, 0) -> (bookmarks, 'https://github.com' )
view = _mock_view_index(model, 1, 0, qtbot)
model.delete_cur_item(view)
assert 'https://github.com' not in bookmarks.marks
assert 'https://python.org' in bookmarks.marks
assert 'http://qutebrowser.org' in bookmarks.marks
def test_url_completion_delete_quickmark(config_stub, web_history, quickmarks,
bookmarks, qtbot):
"""Test deleting a bookmark from the url completion model."""
config_stub.data['completion'] = {'timestamp-format': '%Y-%m-%d',
'web-history-max-items': 2}
model = urlmodel.UrlCompletionModel()
# delete item (0, 1) -> (quickmarks, 'ddg' )
view = _mock_view_index(model, 0, 1, qtbot)
model.delete_cur_item(view)
assert 'aw' in quickmarks.marks
assert 'ddg' not in quickmarks.marks
assert 'wiki' in quickmarks.marks
def test_session_completion(session_manager_stub):
session_manager_stub.sessions = ['default', '1', '2']
actual = _get_completions(miscmodels.SessionCompletionModel())
assert actual == [
("Sessions", [('default', '', ''), ('1', '', ''), ('2', '', '')])
]
def test_tab_completion(fake_web_tab, app_stub, win_registry,
tabbed_browser_stubs):
tabbed_browser_stubs[0].tabs = [
fake_web_tab(QUrl('https://github.com'), 'GitHub', 0),
fake_web_tab(QUrl('https://wikipedia.org'), 'Wikipedia', 1),
fake_web_tab(QUrl('https://duckduckgo.com'), 'DuckDuckGo', 2),
]
tabbed_browser_stubs[1].tabs = [
fake_web_tab(QUrl('https://wiki.archlinux.org'), 'ArchWiki', 0),
]
actual = _get_completions(miscmodels.TabCompletionModel())
assert actual == [
('0', [
('0/1', 'https://github.com', 'GitHub'),
('0/2', 'https://wikipedia.org', 'Wikipedia'),
('0/3', 'https://duckduckgo.com', 'DuckDuckGo')
]),
('1', [
('1/1', 'https://wiki.archlinux.org', 'ArchWiki'),
])
]
def test_tab_completion_delete(fake_web_tab, qtbot, app_stub, win_registry,
tabbed_browser_stubs):
"""Verify closing a tab by deleting it from the completion widget."""
tabbed_browser_stubs[0].tabs = [
fake_web_tab(QUrl('https://github.com'), 'GitHub', 0),
fake_web_tab(QUrl('https://wikipedia.org'), 'Wikipedia', 1),
fake_web_tab(QUrl('https://duckduckgo.com'), 'DuckDuckGo', 2)
]
tabbed_browser_stubs[1].tabs = [
fake_web_tab(QUrl('https://wiki.archlinux.org'), 'ArchWiki', 0),
]
model = miscmodels.TabCompletionModel()
view = _mock_view_index(model, 0, 1, qtbot)
qtbot.add_widget(view)
model.delete_cur_item(view)
actual = [tab.url() for tab in tabbed_browser_stubs[0].tabs]
assert actual == [QUrl('https://github.com'),
QUrl('https://duckduckgo.com')]
def test_setting_section_completion(monkeypatch, stubs):
module = 'qutebrowser.completion.models.configmodel'
_patch_configdata(monkeypatch, stubs, module + '.configdata.DATA')
_patch_config_section_desc(monkeypatch, stubs,
module + '.configdata.SECTION_DESC')
actual = _get_completions(configmodel.SettingSectionCompletionModel())
assert actual == [
("Sections", [
('general', 'General/miscellaneous options.', ''),
('ui', 'General options related to the user interface.', ''),
])
]
def test_setting_option_completion(monkeypatch, stubs, config_stub):
module = 'qutebrowser.completion.models.configmodel'
_patch_configdata(monkeypatch, stubs, module + '.configdata.DATA')
config_stub.data = {'ui': {'gesture': 'off',
'mind': 'on',
'voice': 'sometimes'}}
actual = _get_completions(configmodel.SettingOptionCompletionModel('ui'))
assert actual == [
("ui", [
('gesture', 'Waggle your hands to control qutebrowser', 'off'),
('mind', 'Enable mind-control ui (experimental)', 'on'),
('voice', 'Whether to respond to voice commands', 'sometimes'),
])
]
def test_setting_value_completion(monkeypatch, stubs, config_stub):
module = 'qutebrowser.completion.models.configmodel'
_patch_configdata(monkeypatch, stubs, module + '.configdata.DATA')
config_stub.data = {'general': {'volume': '0'}}
model = configmodel.SettingValueCompletionModel('general', 'volume')
actual = _get_completions(model)
assert actual == [
("Current/Default", [
('0', 'Current value', ''),
('11', 'Default value', ''),
]),
("Completions", [
('0', '', ''),
('11', '', ''),
])
]
|
gpl-3.0
| -80,187,015,943,444,350 | 37.873418 | 79 | 0.590231 | false |
mariodebian/jclic-browser
|
jclic_downloader.py
|
1
|
14268
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
#
# Por Mario Izquierdo Rodríguez
#
# JclicDownloader descarga las actividades que se le pasen como filtro
#
import sgmllib
import sys
import urllib
import os
from time import sleep
import getopt
import zipfile
from xml.dom import minidom
# si test es True se usan archivos descargados
# sino se descarga de la web ( en una conexion lenta puede tardar años
test=False
debug=False
# leemos los enlaces con este link
install_url="http://clic.xtec.net/jnlp/jclic/install.jnlp"
MAX=1000
# todas las actividades
todas_url="http://clic.xtec.es/db/listact_es.jsp?lang=es&ordre=0&desc=1&from=1&area=*&idioma=*&nivell=*&text_titol=&text_aut=&text_desc=&num=1000"
# tipo de url de cada actividad
act_url="http://clic.xtec.es/db/act_es.jsp?id="
#http://clic.xtec.net/projects/sis2x2/jclic/sis2x2.jclic.inst
proy_url="http://clic.xtec.net/projects"
zips_dir="/var/lib/jclic_browser/zips"
img_dirs="/var/lib/jclic_browser/imgs"
class HTMLParser(sgmllib.SGMLParser):
def __init__(self, mycod=None):
sgmllib.SGMLParser.__init__(self)
self.insideTag = 0
self.links = []
self.links_with_name = {}
self.project_names = {}
self.mycod=mycod
def parse(self, data):
self.feed(data)
self.close()
#return self.links
def start_a(self, args):
for key, value in args:
if key.lower() == 'href':
self.insideTag = 1
self.lastHref = value
def handle_data(self, data):
if self.insideTag:
self.hrefText = data
def end_a(self):
#self.links.append( [self.lastHref, self.hrefText] )
self.links.append( self.lastHref )
self.insideTag = 0
cod_act=self.get_cod_act(self.lastHref)
if cod_act != "":
#print "cod_act=%s nombre=%s" %(cod_act, self.hrefText)
self.links_with_name[ cod_act ]=self.hrefText
if self.mycod != None:
project=self.read_project(self.lastHref)
if project != "":
self.project_names[ self.mycod ] = project
def get_cod_act(self, params):
if not "id=" in params: return ""
params=params.split("=")
return params[1]
def get_act_names(self):
return self.links_with_name
def get_act_name(self, id_act):
for link in self.links_with_name:
if link == id_act :
return self.links_with_name[id_act]
def read_project(self, params):
if not "argument=" in params: return ""
params=params.split("=")
return params[1]
def get_project_names(self, params):
return self.project_names
def get_project_name(self, id_act):
#print "buscando id_act=%s" %(id_act)
for project in self.project_names:
if project == id_act:
return self.project_names[id_act]
def get_hyperlinks(self):
return self.links
class JclicDownloader:
def __init__(self):
if debug: print "__init()__"
self.todas=None
def get_todas(self):
if debug: print "get_todas()"
if not test: f=urllib.urlopen(todas_url)
if test: f=open("lista.html","r")
self.todas = f.read()
f.close()
return self.todas
def get_proy_inst(self, id_act):
#if not test: print "Leyendo proyecto num: %s" %(id_act)
if not test: f=urllib.urlopen(act_url + id_act)
if test: f=open("proy.html", "r")
myparser2 = HTMLParser(id_act)
myparser2.parse( f.read() )
enlaces =myparser2.get_hyperlinks()
f.close
return myparser2.get_project_name(id_act)
def crea_directorio(self, dir_name):
if os.path.isdir(dir_name):
return
path_completo="/"
for path in dir_name.split("/"):
if path != "":
path_completo+=path + "/"
if not os.path.isdir(path_completo):
os.mkdir(path_completo)
def get_proy_filelist(self, url):
#check if file exists
file_name=url.split('/')[-1]
proy_dir=zips_dir + "/" + url.split('/')[-3]
if os.path.isfile(proy_dir + "/" + file_name):
#if debug: print "%s encontrado, no descargando de nuevo..." %(file_name)
f=open(proy_dir + "/" + file_name, "r")
else:
if debug: print "Descargando %s" %(url)
f=urllib.urlopen(url)
# read
file_src=[]
folder=None
name=None
data=f.readlines()
f.close
for line in data:
if "file src" in line:
file_src.append( line.split('"')[1] )
if "folder" in line:
folder=line.split('folder=')[1]
folder=folder.split('"')[1]
if "title" in line:
name=line.split('title=')[1]
name=name.split('"')[1]
if not os.path.isfile(proy_dir + "/" + file_name):
# save inst file
proy_dir=zips_dir + "/" + folder
if not os.path.isdir(proy_dir):
print "Creando directorio %s" %(proy_dir)
self.crea_directorio(proy_dir)
#if debug: print "Guardando proyecto en:" + proy_dir + "/" + file_name
f=open(proy_dir + "/" + file_name, "w")
f.write("".join(data))
f.close()
if folder!= None:
# return data
return [folder, name, file_src]
def get_todas_id(self):
if debug: print "get_todas_id()"
self.get_todas()
myparser = HTMLParser()
if debug: print "parsing...%d" %len(self.todas)
myparser.parse(self.todas)
enlaces=myparser.get_hyperlinks()
self.id_todas=myparser.get_act_names()
self.actividades={}
parametros={}
cod_act=None
counter=0
for enlace in enlaces:
if counter > max_files: continue
#print enlace
cod_act=None
if not "?" in enlace: continue
enlace = enlace.split('?',1)[1]
if not "id=" in enlace: continue
enlace=enlace.split('=')
self.actividades[enlace[1]]=[ ]
counter+=1
if debug: print "Encontradas %d actividades." %len(self.actividades)
return self.actividades
def get_file_list(self):
if debug: print "get_file_list()"
if debug: print "Leyendo información de proyectos... (tarda un rato)"
counter=0
for act in self.actividades:
if counter > max_files: continue
inst_file=self.get_proy_inst(act)
self.actividades[act].append( inst_file )
if inst_file != None:
tmp=self.get_proy_filelist(inst_file)
folder=tmp[0]
name=tmp[1]
src=tmp[2]
else:
folder=None
name=None
src=[]
self.actividades[act].append( folder )
self.actividades[act].append( name )
self.actividades[act].append( src )
counter+=1
#print self.actividades
#sys.exit(1)
def download_file(self, url, destino):
#if debug: print "download_file(%s, %s)" %(url, destino)
print ":::>>> Descargando %s" %(url.split("/")[-1])
basedir="/".join(destino.split("/")[:-1])
self.crea_directorio(basedir)
f=urllib.urlopen(url)
data=f.read()
f.close
f=open(destino, "w")
f.write(data)
f.close()
def get_zips(self):
if debug: print "get_zips()"
counter=1
for id_act in self.actividades:
if counter > max_files : continue
if len(self.actividades[id_act]) == 0: continue
#print self.actividades[id_act]
if not self.actividades[id_act][1]: continue
proy_dir=zips_dir + "/" + self.actividades[id_act][1]
files=self.actividades[id_act][3]
#check for files
for _file in files:
if not os.path.isfile(proy_dir + "/" + _file):
#http://clic.xtec.net/projects/sis2x2/jclic/sis2x2.jclic.inst
url=proy_url + "/" + self.actividades[id_act][1] + "/jclic/" + _file
#print url
self.download_file(url, proy_dir + "/" + _file)
else:
print "El archivo %s ya existe" %(_file)
counter+=1
def read_jclic_xml(self, data):
parsed={}
import StringIO
xmldoc = minidom.parse(StringIO.StringIO(str(data)))
try:
parsed["title"]=xmldoc.firstChild.childNodes[1].childNodes[1].firstChild.nodeValue
except:
pass
try:
parsed["revision_date"]=xmldoc.firstChild.childNodes[1].childNodes[3].getAttribute("date")
except:
pass
try:
parsed["revision_description"]=xmldoc.firstChild.childNodes[1].childNodes[3].getAttribute("description")
except:
pass
try:
parsed["author_mail"]=xmldoc.firstChild.childNodes[1].childNodes[7].getAttribute("mail")
except:
pass
try:
parsed["author_name"]=xmldoc.firstChild.childNodes[1].childNodes[7].getAttribute("name")
except:
pass
try:
parsed["language"]=xmldoc.firstChild.childNodes[1].childNodes[9].firstChild.nodeValue
except:
pass
try:
parsed["description"]=xmldoc.firstChild.childNodes[1].childNodes[11].childNodes[1].firstChild.toxml()
except:
pass
try:
parsed["descriptors"]=xmldoc.firstChild.childNodes[1].childNodes[13].firstChild.nodeValue
except:
pass
try:
parsed["descriptors_area"]=xmldoc.firstChild.childNodes[1].childNodes[13].getAttribute("area")
except:
pass
try:
parsed["descriptors_level"]= xmldoc.firstChild.childNodes[1].childNodes[13].getAttribute("level")
except:
pass
return parsed
def read_jclic_from_zip(self, zip_file):
#print "Reading ZIP %s" %(zip_file)
"""
z = zipfile.ZipFile(zip_file, "r")
for filename in z.namelist():
if filename.split(".")[-1] != "jclic" : continue
print "Parseando %s" %filename
bytes=z.read(filename)
data=self.read_jclic_xml(bytes)
print data
return data
"""
try:
z = zipfile.ZipFile(zip_file, "r")
for filename in z.namelist():
if filename.split(".")[-1] != "jclic" : continue
print "Parseando %s" %filename
bytes=z.read(filename)
data=self.read_jclic_xml(bytes)
print data
return data
except:
pass
#print "Error reading ZIP file %s" %(zip_file.split("/")[-1] )
def read_zips(self):
counter=0
self.zip_files={}
for id_act in self.actividades:
if counter > max_files: continue
if len(self.actividades[id_act]) == 0: continue
#print self.actividades[id_act]
if not self.actividades[id_act][1]: continue
proy_dir=zips_dir + "/" + self.actividades[id_act][1]
files=self.actividades[id_act][3]
for _file in files:
if _file.split(".")[-1] == "zip":
self.zip_files[id_act]=[zips_dir + "/" + self.actividades[id_act][1] + "/" + _file]
self.read_jclic_from_zip(zips_dir + "/" + self.actividades[id_act][1] + "/" + _file)
#print self.zip_files
##########################################
def usage():
print ""
print "jclic_downloader"
print " Usage:"
print " --help (this help)"
print " --debug (show verbose text)"
print ""
print " --update-inst (update/download all inst files)"
print " --update-zips (parse ints files and get jclic.zip files)"
print " --update-imgs (get all image activities)"
print ""
print " --read-zips (read all zips info)"
# parametros de arranque
options=["help", "debug", "update-inst", "update-zips", "update-imgs", "max=", "read-zips"]
try:
opts, args = getopt.getopt(sys.argv[1:], ":hd", options)
except getopt.error, msg:
print msg
print "for command line options use jclic_downloader --help"
sys.exit(2)
mode=0
max_files=MAX
# process options
for o, a in opts:
#print o
#print a
#print "-----"
if o in ("-d", "--debug"):
print "DEBUG ACTIVE"
debug = True
if o == "--update-inst": mode=1
if o == "--update-zips": mode=2
if o == "--update-imgs": mode=3
if o == "--read-zips": mode=4
if o == "--max":
max_files=int(a)
if o in ("-h", "--help"):
usage()
sys.exit()
##########################################
# self.actividades es un diccionario
# * el key es el id de actividad
# * el valor es una lista
# lista[0] = fichero jclic.inst
# lista[1] = directorio de descarga
# lista[2] = Nombre de actividad (sacado del jclic.inst)
# lista[3] = otra lista con los ficheros que contiene
if __name__ == "__main__":
if max_files != MAX: print "Límite de número de actividades=%d" %(max_files)
app = JclicDownloader()
app.get_todas_id()
app.get_file_list()
if mode == 2: app.get_zips()
if mode == 3: app.get_imgs()
if mode == 4: app.read_zips()
sys.exit(0)
|
gpl-2.0
| 1,716,028,012,631,787,000 | 30.4163 | 146 | 0.523242 | false |
adamnew123456/myweb
|
myweb/frontend/cli.py
|
1
|
9534
|
"""
A command-line based frontend for interacting with myweb.
"""
from myweb.backend import config, db, query, utils
import argparse
import os
import re
import sys
import tempfile
HELP = """myweb-cli - A command-line interface to myweb.
Usage:
myweb-cli <command> <args>
Commands:
search QUERY
Searches the database using the given query.
print URL
Shows the content of the URL, including backlinks, to stdout.
view [--no-backlinks] URL
Dumps the content of the given URL to stdout.
view-backlinks URL
Dumps the list of backlinks for the given URL to stdout.
view-tags URL
Dumps the list of tags for the given URL to stdout.
create URL [TAG...]
Creates a new article with the given tag, and with the contents of
the article coming from stdin.
update URL
Updates the content of the URL from the contents of stdin.
edit URL
Invokes $VISUAL (or $EDITOR) on the content of the given URL, and then
saves the result back into the database.
set-tags URL [TAG...]
Updates the list of tags for the URL.
delete
Removes the given URL from the database.
help
Show a complete help page.
"""
def load_config():
"""
Loads a configuration object.
"""
return config.load_config({})
def init_db(config_opts):
"""
Loads the database.
"""
db.load_database(config_opts['myweb']['db'])
def main():
"Parses the command line and initializes the given action."
arg_parser = argparse.ArgumentParser()
sub_args = arg_parser.add_subparsers(help='Commands', dest='command')
help_parser = sub_args.add_parser('help',
help='Shows a complete help page')
search_parser = sub_args.add_parser('search',
help='Search the database, printing out a list of matching URLs')
search_parser.add_argument('QUERY',
help='A well-formed myweb query')
print_parser = sub_args.add_parser('print',
help='Prints the article for the URL, plus backlinks, to stdout.')
print_parser.add_argument('URL',
help='A URL which exists in the database')
view_parser = sub_args.add_parser('view',
help='Dump the article for the URL to stdout')
view_parser.add_argument('URL',
help='A URL which exists in the database')
view_backlinks_parser = sub_args.add_parser('view-backlinks',
help='Dumps the backlinks of the given article to stdout')
view_backlinks_parser.add_argument('URL',
help='A URL which exists in the database')
view_tags_parser = sub_args.add_parser('view-tags',
help='Dumps the tags of the given article to stdout')
view_tags_parser.add_argument('URL',
help='A URL which exists in the database')
create_parser = sub_args.add_parser('create',
help='Adds the article for the URL by reading stdin')
create_parser.add_argument('URL',
help='A URL which does not exist in the database')
create_parser.add_argument('TAGS', nargs='+',
help='The tags to give to the new article')
update_parser = sub_args.add_parser('update',
help='Replaces the article for the URL by reading stdin')
update_parser.add_argument('URL',
help='A URL which exists in the database')
edit_parser = sub_args.add_parser('edit',
help='Invokes $VISUAL (or $EDITOR) to edit an article')
edit_parser.add_argument('URL',
help='A URL which exists in the database')
set_tags_parser = sub_args.add_parser('set-tags',
help='Sets the list of tags on an article')
set_tags_parser.add_argument('URL',
help='A URL which exists in the database')
set_tags_parser.add_argument('TAGS', nargs='+',
help='The tags to give to the article')
delete_parser = sub_args.add_parser('delete',
help='Removes an article from the database')
delete_parser.add_argument('URL',
help='A URL which exists in the database')
arg_context = arg_parser.parse_args(sys.argv[1:])
if arg_context.command is None:
# We weren't provided with a command, so show the short help listing
arg_parser.print_usage()
return 1
elif arg_context.command == 'help':
arg_parser.print_help()
elif arg_context.command == 'search':
config_opts = load_config()
init_db(config_opts)
try:
parsed = query.parse_query(arg_context.QUERY)
arg_contexts = db.execute_query(parsed)
for arg_context in arg_contexts:
print(arg_context)
except (IndexError, SyntaxError) as ex:
print('Invalid query string "{}"'.format(arg_context.QUERY),
file=sys.stderr)
print('\t' + str(ex), file=sys.stderr)
return 1
elif arg_context.command == 'print':
config_opts = load_config()
init_db(config_opts)
try:
article = db.get_article(arg_context.URL)
except KeyError:
print('No article exists for', arg_context.URL,
file=sys.stderr)
return 1
print(article.content)
print('\n----- Backlinks -----')
for backlink in article.backlinks:
print(' - ', backlink)
print('\n----- Tags -----')
for tag in article.tags:
print(' - ', tag)
elif arg_context.command == 'view':
config_opts = load_config()
init_db(config_opts)
try:
article = db.get_article(arg_context.URL)
except KeyError:
print('No article exists for', arg_context.URL,
file=sys.stderr)
return 1
print(article.content)
elif arg_context.command == 'view-backlinks':
config_opts = load_config()
init_db(config_opts)
try:
article = db.get_article(arg_context.URL)
except KeyError:
print('No article exists for', arg_context.URL,
file=sys.stderr)
return 1
for backlink in article.backlinks:
print(backlink)
elif arg_context.command == 'view-tags':
config_opts = load_config()
init_db(config_opts)
try:
article = db.get_article(arg_context.URL)
except KeyError:
print('No article exists for', arg_context.URL,
file=sys.stderr)
return 1
for tag in article.tags:
print(tag)
elif arg_context.command == 'create':
config_opts = load_config()
init_db(config_opts)
article = sys.stdin.read()
tags = set(arg_context.TAGS)
links = utils.get_links(article)
try:
db.create_article(arg_context.URL, article, links, tags)
except KeyError:
print('Article for', arg_context.URL, 'already exists',
file=sys.stderr)
return 1
elif arg_context.command == 'update':
config_opts = load_config()
init_db(config_opts)
article = sys.stdin.read()
try:
old_article = db.get_article(arg_context.URL)
links = utils.get_links(article)
db.update_article(arg_context.URL, article, links,
old_article.tags)
except KeyError:
print('Article for', arg_context.URL, 'does not exist',
file=sys.stderr)
return 1
elif arg_context.command == 'edit':
config_opts = load_config()
init_db(config_opts)
if not os.environ.get('VISUAL', ''):
if not os.environ.get('EDITOR', ''):
print('No setting for $VISUAL or $EDITOR', file=sys.stderr)
return 1
else:
editor = os.environ['EDITOR']
else:
editor = os.environ['VISUAL']
try:
article = db.get_article(arg_context.URL)
# Dump the article to a temp file, so that the editor has
# something to edit (we *could* pass the text in via stdin, but
# if the user screwed up, they would have no original copy to
# work from - you can't run :e! in Vim on stdin, for example).
with tempfile.NamedTemporaryFile(mode='w+') as article_file:
article_file.write(article.content)
article_file.flush()
os.system(editor + ' ' + article_file.name)
article_file.seek(0)
new_article_text = article_file.read()
links = utils.get_links(new_article_text)
db.update_article(arg_context.URL, new_article_text, links,
article.tags)
except KeyError:
print('Article for', arg_context.URL, 'does not exist',
file=sys.stderr)
return 1
elif arg_context.command == 'set-tags':
config_opts = load_config()
init_db(config_opts)
try:
old_article = db.get_article(arg_context.URL)
tags = set(arg_context.TAGS)
db.update_article(arg_context.URL, old_article.content,
old_article.links, tags)
except KeyError:
print('Article for', arg_context.URL, 'does not exist',
file=sys.stderr)
return 1
elif arg_context.command == 'delete':
config_opts = load_config()
init_db(config_opts)
db.delete_article(arg_context.URL)
|
bsd-2-clause
| -6,819,532,572,932,881,000 | 33.05 | 78 | 0.588316 | false |
F5Networks/f5-common-python
|
f5/bigip/tm/util/clientssl_ciphers.py
|
1
|
1459
|
# coding=utf-8
#
# Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""BIG-IP® utility module
REST URI
``http://localhost/mgmt/tm/util/clientssl-ciphers``
GUI Path
N/A
REST Kind
``tm:util:clientssl-ciphers:*``
"""
from f5.bigip.mixins import CommandExecutionMixin
from f5.bigip.resource import UnnamedResource
class Clientssl_Ciphers(UnnamedResource, CommandExecutionMixin):
"""BIG-IP® utility command
.. note::
This is an unnamed resource so it has no ~Partition~Name pattern
at the end of its URI.
"""
def __init__(self, util):
super(Clientssl_Ciphers, self).__init__(util)
self._meta_data['required_command_parameters'].update(('utilCmdArgs',))
self._meta_data['required_json_kind'] =\
'tm:util:clientssl-ciphers:runstate'
self._meta_data['allowed_commands'].append('run')
self._meta_data['minimum_version'] = '12.1.0'
|
apache-2.0
| -7,911,479,951,064,563,000 | 29.354167 | 79 | 0.695264 | false |
Lamaw/Newzer
|
lemonde_extractor.py
|
1
|
6762
|
# -*- coding: utf-8 -*-
"""
This page defines the Implementation of an analyzer for "www.lemonde.fr"
"""
from HTMLParser import HTMLParser
from Isite_extractor import ISiteExtractor
class LeMondeExtractor(ISiteExtractor):
"""
This class implements the Page analyzer interface for "lemonde.fr" website
"""
def __init__(self):
self.base_url = "http://www.lemonde.fr"
def get_news_feed(self):
"""
Get the news feed with newly published articles from the website home base_url
:return type: str
:return: the url on the news feed webpage
"""
try:
news_feed_url = self.base_url + "/actualite-en-continu/"
except:
news_feed_url = None
return news_feed_url
def get_article_webpage_list(self, news_feed_webpage):
"""
Get the article webpage list from the webpage containing all the newly added articles.
:type news_feed_webpage: str
:param news_feed_webpage: the html page where articles' urls are
:return type: list()
:return: the list of urls for each article webpage
"""
url_list = list()
# Use HTML parser to extract appropriates urls
lemonde_parser = LeMondeHTMLParser()
lemonde_parser.feed(news_feed_webpage)
partial_url_list = lemonde_parser.links
# add the base url of the website if not present in the article url
for url in partial_url_list:
if not 'http' in url:
url_list.append(self.base_url + url)
else:
url_list.append(url)
return url_list
def get_article_text(self, article_webpage):
"""
Extract the text of the article from the raw webpage
:type article_webpage: str
:param article_webpage: The webpage containing the article to extract
:return type: str
:return: the text from the article on a web page
"""
lemonde_parser = LeMondeHTMLParser()
lemonde_parser.feed(article_webpage)
return lemonde_parser.article_data
def get_article_category(self, article_webpage):
"""
Extract the category of the article from the raw webpage
:type article_webpage: str
:param article_webpage: The webpage containing the article to extract
:return type: str
:return: the category from the article on a web page (e.g: sport, economy, politics, etc...)
"""
lemonde_parser = LeMondeHTMLParser()
lemonde_parser.feed(article_webpage)
return lemonde_parser.category
def get_article_author(self, article_webpage):
"""
Extract the author of the article from the raw webpage
:type article_webpage: str
:param article_webpage: The webpage containing the article to extract
:return type: str
:return: the author name from the article on a web page
"""
pass
class LeMondeHTMLParser(HTMLParser):
"""
Class implementating some methods of the HTMLParser pytho lib, in order to acquire specific data for Lemonde website
"""
def __init__(self):
HTMLParser.__init__(self) # Parents constructor
self.links = list() # The list of links from the news feed
self.article_section = False # Flag for news feed parsing
self.article_body = False # Flag for article text acquisition
self.suspend_acquisition = False # flag to suspend data aqcuisition in the article body
self.div_open_in_article_body = 0 # Number of open inside the main article div
self.article_data = "" # store the text from the article
self.category = "" # store the category of the article
def handle_starttag(self, tag, attrs):
"""
Method that manage tag opening in the HTML source code, to retrieve article content
"""
try:
if tag == "article": # Set flag for news feed parsing to true
for name, value in attrs:
if name == 'class' and 'grid_12 alpha enrichi' in value:
self.article_section = True
elif tag == "a" and self.article_section == True: # get a link from the news feed
for name, value in attrs:
if name == "href":
if value not in self.links and "/journaliste/" not in value:
self.links.append(value)
elif tag == "div" and not self.article_body: # Set flag from article body to true
for name, value in attrs:
if name == 'id' and value == 'articleBody':
self.article_body = True
elif tag == 'div' and self.article_body: # Increment number of open div in the main div of article (used to determine when the main article div is closed)
self.div_open_in_article_body += 1
elif tag == 'p' and self.article_body: # Suspend aqcuisition for "lire aussi" section
for name, value in attrs:
if name == 'class' and value == 'lire':
self.suspend_acquisition = True
elif tag == 'section' and self.article_body:
self.suspend_acquisition == True
elif tag == 'iframe' and self.article_body:
self.suspend_acquisition == True
elif tag == 'body':
for name, value in attrs:
if name == "class":
self.category = value
except:
pass
def handle_endtag(self, tag):
"""
Method that Manage tag ending, in order to determine when parsing get out of appropriate sections
"""
try:
if tag == "article":
self.article_section = False
elif tag == "div" and self.article_body and self.div_open_in_article_body == 0:
self.article_body = False
elif tag == 'div' and self.article_body and self.div_open_in_article_body > 0:
self.div_open_in_article_body -= 1
elif tag == 'p' and self.suspend_acquisition == True:
self.suspend_acquisition == False
elif tag == 'section' and self.suspend_acquisition == True:
self.suspend_acquisition == False
elif tag == 'iframe' and self.suspend_acquisition == True:
self.suspend_acquisition == False
except:
pass
def handle_data(self, data):
"""
Store data when in right section of parsing
"""
if self.article_body:
if not self.suspend_acquisition:
self.article_data += data
|
mit
| 5,026,453,896,365,185,000 | 39.740964 | 166 | 0.587844 | false |
murat1985/bagpipe-bgp
|
bagpipe/bgp/engine/worker.py
|
2
|
5205
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# encoding: utf-8
# Copyright 2014 Orange
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import traceback
from Queue import Queue
from threading import Event
from bagpipe.bgp.engine import RouteEntry, RouteEvent, \
Subscription, Unsubscription
from bagpipe.bgp.common.looking_glass import LookingGlass, LGMap
log = logging.getLogger(__name__)
class Worker(LookingGlass):
"""This is the base class for objects that interact with the route table
manager to produce and consume events related to BGP routes.
These objects will:
* use _subscribe(...) and _unsubscribe(...) to subscribe to routing events
* will specialize _onEvent(event) to react to received events
* use _pushEvent(event) to publish routing events
"""
stopEvent = object()
def __init__(self, bgpManager, workerName):
self.bgpManager = bgpManager
self._queue = Queue()
self._pleaseStop = Event()
log.debug("Setting worker name to %s", workerName)
self.name = workerName
assert(self.name is not None)
log.debug("Instantiated %s worker", self.name)
def stop(self):
"""
Stop this worker.
Set the _pleaseStop internal event to stop the event processor loop
and indicate to the route table manager that this worker is stopped.
Then call _stopped() to let a subclass implement any further work.
"""
self._pleaseStop.set()
self._queue.put(Worker.stopEvent)
self.bgpManager.cleanup(self)
self._stopped()
def _stopped(self):
"""
Hook for subclasses to react when Worker is stopped (NoOp in base
Worker class)
"""
def _eventQueueProcessorLoop(self):
"""
Main loop where the worker consumes events.
"""
while not self._pleaseStop.isSet():
# log.debug("%s worker waiting on queue",self.name )
event = self._dequeue()
if (event == Worker.stopEvent):
log.debug("StopEvent, breaking queue processor loop")
self._pleaseStop.set()
break
# log.debug("%s worker calling _onEvent for %s",self.name,event)
try:
self._onEvent(event)
except Exception as e:
log.error("Exception raised on subclass._onEvent: %s", e)
log.error("%s", traceback.format_exc())
def run(self):
self._eventQueueProcessorLoop()
def _onEvent(self, event):
"""
This method is implemented by subclasses to react to routing events.
"""
log.debug("Worker %s _onEvent: %s", self.name, event)
raise NotImplementedError
def _dequeue(self):
return self._queue.get()
def enqueue(self, event):
# TODO(tmmorin): replace Queue by a PriorityQueue and use a higher
# priority for ReInit event
self._queue.put(event)
def _subscribe(self, afi, safi, rt=None):
subobj = Subscription(afi, safi, rt, self)
log.info("Subscribe: %s ", subobj)
self.bgpManager.routeEventSubUnsub(subobj)
def _unsubscribe(self, afi, safi, rt=None):
subobj = Unsubscription(afi, safi, rt, self)
log.info("Unsubscribe: %s ", subobj)
self.bgpManager.routeEventSubUnsub(subobj)
def getWorkerSubscriptions(self):
return self.bgpManager.routeTableManager.getWorkerSubscriptions(self)
def getWorkerRouteEntries(self):
return self.bgpManager.routeTableManager.getWorkerRouteEntries(self)
def _pushEvent(self, routeEvent):
assert(isinstance(routeEvent, RouteEvent))
log.debug("Pushing route event to BGPManager")
if routeEvent.source is None:
routeEvent.source = self
self.bgpManager._pushEvent(routeEvent)
def _newRouteEntry(self, afi, safi, rts, nlri, attributes):
return RouteEntry(afi, safi, rts, nlri, attributes, self)
def __repr__(self):
return "Worker %s" % (self.name)
# Looking glass ###
def getLookingGlassLocalInfo(self, pathPrefix):
return {
"name": self.name,
"internals": {
"event queue length": self._queue.qsize(),
"subscriptions":
[repr(sub) for sub in self.getWorkerSubscriptions()],
}
}
def getLGMap(self):
return {
"routes": (LGMap.SUBTREE, self.getLGRoutes)
}
def getLGRoutes(self, pathPrefix):
return [route.getLookingGlassInfo(pathPrefix) for route in
self.getWorkerRouteEntries()]
|
apache-2.0
| 6,153,653,472,413,614,000 | 30.737805 | 78 | 0.634006 | false |
Balannen/LSMASOMM
|
atom3/Kernel/Layout/RandomLayout.py
|
1
|
2169
|
"""
RandomLayout.py
Generates a random layout by moving all the nodes positions randomly in
a 640x480 pixel box. The connections are then optimized for the new layout.
Guaranteed to hit an aesthetic layout at infinity, not recognize it, and
keep on going for another infinity :p
Created Summer 2004, Denis Dube
"""
from random import randint
from Utilities import selectAllVisibleObjects, optimizeLinks
from ModelSpecificCode import isEntityNode
def applyLayout(self):
for nodetype in self.ASGroot.nodeTypes:
for node in self.ASGroot.listNodes[nodetype]:
if( isEntityNode( node.graphObject_ ) ):
# Move the nodes around
currPos = node.graphObject_.getCenterCoord()
newPos = [ randint(0,640), randint(0,480) ]
node.graphObject_.Move( -currPos[0],-currPos[1], False) # Go back to the origin
node.graphObject_.Move( newPos[0], newPos[1], False) # Move to random location
else:
# Move the links around
currPos= node.graphObject_.getCenterCoord()
newPos = [ randint(0,640), randint(0,480) ]
node.graphObject_.Move( -currPos[0],-currPos[1]) # Go back to the origin
node.graphObject_.Move( newPos[0], newPos[1]) # Move to random location
selectAllVisibleObjects( self )
optimizeLinks( self.cb )
"""
# This code fragment can spill all the co-ordinates making up an edge
for nodetype in core.ASGroot.nodeTypes:
for node in core.ASGroot.listNodes[nodetype]:
size = node.graphObject_.getSize()
if( size[0] == 0 ):
print "Size is 0", node, node.graphObject_.getCenterCoord(), "<--conns"
node.graphObject_.Move(20,20)
else:
if( node.graphObject_.getConnectionCoordinates( "OUT", node.graphObject_) != None ):
coords = node.graphObject_.getConnectionCoordinates( "OUT", node.graphObject_)[0]
middlePos = [coords[2],coords[3] ]
print node,middlePos, "<--getConn"
"""
|
gpl-3.0
| 2,312,483,921,526,542,000 | 34.79661 | 94 | 0.612725 | false |
Ell/goonauth
|
profiles/migrations/0012_auto__add_oauthapplication.py
|
1
|
12056
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'OAuthApplication'
db.create_table(u'profiles_oauthapplication', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('client', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['oauth2_provider.Application'])),
('description', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal(u'profiles', ['OAuthApplication'])
def backwards(self, orm):
# Deleting model 'OAuthApplication'
db.delete_table(u'profiles_oauthapplication')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'oauth2_provider.application': {
'Meta': {'object_name': 'Application'},
'authorization_grant_type': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'client_id': ('django.db.models.fields.CharField', [], {'default': "u'k=YN_eI9uae.;!dF2yJQau=ItrEFxgedxm8Py5-2'", 'unique': 'True', 'max_length': '100'}),
'client_secret': ('django.db.models.fields.CharField', [], {'default': "u'CLLYJQfFmPtCTaIlwbPpi4I!@AiwpphwdW6lxQ:;qeb.o!3HfvPjMGm@pG:hP;aahfZI:lK;IgmJ;WeqZv!oCQ9paPT0e9V83=Us5T-oW4YFtbjNBDU=_BL5UIf0MWu9'", 'max_length': '255', 'blank': 'True'}),
'client_type': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'redirect_uris': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'profiles.battlefieldfourprofile': {
'Meta': {'object_name': 'BattlefieldFourProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.blizzardprofile': {
'Meta': {'object_name': 'BlizzardProfile'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'realid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.eveonlineprofile': {
'Meta': {'object_name': 'EveOnlineProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.leagueoflegendsprofile': {
'Meta': {'object_name': 'LeagueOfLegendsProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.minecraftprofile': {
'Meta': {'object_name': 'MinecraftProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.nintendoprofile': {
'Meta': {'object_name': 'NintendoProfile'},
'friendcode': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'profiles.oauthapplication': {
'Meta': {'object_name': 'OAuthApplication'},
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oauth2_provider.Application']"}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'profiles.playstationnetworkprofile': {
'Meta': {'object_name': 'PlaystationNetworkProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.somethingawfulprofile': {
'Meta': {'object_name': 'SomethingAwfulProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'postcount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'regdate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'userid': ('django.db.models.fields.TextField', [], {}),
'username': ('django.db.models.fields.TextField', [], {})
},
u'profiles.steamprofile': {
'Meta': {'object_name': 'SteamProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'userid': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'bf4': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.BattlefieldFourProfile']", 'null': 'True', 'blank': 'True'}),
'blizzard': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.BlizzardProfile']", 'null': 'True', 'blank': 'True'}),
'eveonline': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.EveOnlineProfile']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'leagueoflegends': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.LeagueOfLegendsProfile']", 'null': 'True', 'blank': 'True'}),
'minecraft': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.MinecraftProfile']", 'null': 'True', 'blank': 'True'}),
'nintendo': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.NintendoProfile']", 'null': 'True', 'blank': 'True'}),
'psn': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.PlaystationNetworkProfile']", 'null': 'True', 'blank': 'True'}),
'somethingawful': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['profiles.SomethingAwfulProfile']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'steam': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.SteamProfile']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'}),
'verification_code': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'worldoftanks': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.WorldOfTanksProfile']", 'null': 'True', 'blank': 'True'}),
'xbl': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.XboxLiveProfile']", 'null': 'True', 'blank': 'True'})
},
u'profiles.worldoftanksprofile': {
'Meta': {'object_name': 'WorldOfTanksProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.xboxliveprofile': {
'Meta': {'object_name': 'XboxLiveProfile'},
'gamertag': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['profiles']
|
mit
| -8,942,637,767,845,839,000 | 73.425926 | 257 | 0.560883 | false |
BorgERP/borg-erp-6of3
|
verticals/garage61/acy_mrp_operator/mrp_operator.py
|
1
|
10435
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2011 Acysos S.L. (http://acysos.com) All Rights Reserved.
# Ignacio Ibeas <ignacio@acysos.com>
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime
from osv import osv, fields
from tools.translate import _
import netsvc
import time
import tools
class mrp_operator_registry(osv.osv):
_description = 'MRP Operator Registry'
_name = 'mrp.operator.registry'
_columns = {
'name': fields.char('Reference', size=64, required=True, states={'draft':[('readonly',False)]}, readonly=True),
'date': fields.date('Date', required=True, select=True, states={'draft':[('readonly',False)]}, readonly=True),
'operator_id': fields.many2one('hr.employee', 'Operator', required=True, states={'draft':[('readonly',False)]}, readonly=True),
'workcenter_lines': fields.one2many('mrp.workcenter.registry', 'operator_registry_id', 'MRP Workcenter Registry', states={'draft':[('readonly',False)]}, readonly=True),
'state': fields.selection([('draft','Draft'),('confirmed','Confirmed'),('cancel','Cancelled')],'State', readonly=True),
}
_defaults = {
'name':lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get(cr, uid, 'operator_registry'),
'date': lambda *a: time.strftime('%Y-%m-%d'),
'state': lambda *a: 'draft',
}
def action_confirm(self, cr, uid, ids, context=None):
registry = self.browse(cr,uid,ids,context)[0]
for workcenter_line in registry.workcenter_lines:
if workcenter_line.production_id.id:
sql = "SELECT MAX(sequence) FROM mrp_production_workcenter_line WHERE production_id = %s" % (workcenter_line.production_id.id)
cr.execute(sql)
sequence = cr.fetchone()[0]
prod_obj = self.pool.get('mrp.production')
stock_obj = self.pool.get('stock.move')
if workcenter_line.production_id.state in ['draft','picking_except','cancel','done']:
raise osv.except_osv(_('Error'), _("Can't make production if the Manufacturing order is %s") % (workcenter_line.production_id.state))
if workcenter_line.product_id:
if not workcenter_line.workcenter_line_id:
raise osv.except_osv(_('Error'), _("Can't produce a product without Workcenter %s") % (workcenter_line.product_id.name))
if workcenter_line.workcenter_line_id:
if not workcenter_line.product_id:
raise osv.except_osv(_('Error'), _("Can't use a workcenter without product %s") % (workcenter_line.workcenter_line_id.name))
prod_obj.action_in_production(cr,uid,workcenter_line.production_id.id)
if sequence == workcenter_line.workcenter_line_id.sequence:
if workcenter_line.go_product_qty > 0:
prod_obj.action_produce(cr, uid,workcenter_line.production_id.id,workcenter_line.go_product_qty,'consume_produce',context)
for workcenter_line2 in registry.workcenter_lines:
if workcenter_line.production_id.id == workcenter_line2.production_id.id:
if workcenter_line2.workcenter_line_id.sequence <= workcenter_line.workcenter_line_id.sequence:
if workcenter_line.de_product_qty > 0:
#mrp_routing_ids = self.pool.get('mrp.routing.workcenter').search(cr,uid,[('routing_id','=',workcenter_line2.production_id.routing_id.id)], order='sequence', context=context)
#for mrp_routing_id in mrp_routing_ids:
#product_line_id = self.pool.get('mrp.production.product.line').search(cr, uid, [('production_id','=',workcenter_line2.production_id.id),('consumed_on','=',mrp_routing_id)], context=context)
#print product_line_id
#if len(product_line_id) == 1:
#break
mrp_routing_id = self.pool.get('mrp.routing.workcenter').search(cr,uid,[('routing_id','=',workcenter_line2.production_id.routing_id.id),('workcenter_id','=',workcenter_line2.workcenter_id.id)], context=context)
product_line_id = self.pool.get('mrp.production.product.line').search(cr, uid, [('production_id','=',workcenter_line2.production_id.id),('consumed_on','=',mrp_routing_id[0])], context=context)
if len(product_line_id) > 0:
product_line = self.pool.get('mrp.production.product.line').browse(cr, uid, product_line_id, context)[0]
move_name = 'PROD:'+workcenter_line2.production_id.name
stock_move_id = stock_obj.search(cr,uid,[('product_id','=',product_line.product_id.id),('state','=','assigned'),('name','=',move_name)],context=context)
bom_id = self.pool.get('mrp.bom').search(cr, uid, [('bom_id','=',workcenter_line2.production_id.bom_id.id),('product_id','=',product_line.product_id.id),('consumed_on','=',mrp_routing_id[0])], context=context)
bom = self.pool.get('mrp.bom').browse(cr, uid, bom_id, context)[0]
defective_qty = bom.product_qty*bom.product_efficiency*workcenter_line.de_product_qty
context = {'operator_registry':1,'location_src':workcenter_line2.production_id.location_src_id.id}
stock_obj.action_scrap(cr, uid,stock_move_id,defective_qty,4,context)
self.write(cr, uid, ids, {'state': 'confirmed'})
return True
def action_cancel(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'cancel'})
return True
def action_cancel_draft(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'draft'})
return True
mrp_operator_registry()
class mrp_production_workcenter_line(osv.osv):
_inherit = 'mrp.production.workcenter.line'
def _number_get(self,cr,uid,ids,name,arg,context={}):
res={}
for line in self.browse(cr,uid,ids,context):
res[line.id] = line.production_id.name +'-'+ str(line.sequence)
return res
_columns = {
'number': fields.function(_number_get, method=True, store=True, type='char', size=64, string='Number', readonly=True),
}
_rec_name = "number"
mrp_production_workcenter_line()
class mrp_workcenter_registry_key(osv.osv):
_name = 'mrp.workcenter.registry.key'
_description = 'MRP Workcenter Registry Key'
_columns = {
'name': fields.char('Name', required=True, size=46, translate=True),
}
mrp_workcenter_registry_key()
class mrp_workcenter_registry(osv.osv):
_description = 'MRP Workcenter Registry'
_name = 'mrp.workcenter.registry'
_columns = {
'key': fields.many2one('mrp.workcenter.registry.key','Key'),
'workcenter_line_id': fields.many2one('mrp.production.workcenter.line', 'Workcenter'),
'product_id': fields.many2one('product.product', 'Product'),
'name': fields.char('Operation Code', size=64, required=True),
'workcenter_id': fields.many2one('mrp.workcenter', 'Resource'),
'de_product_qty': fields.float('Defective Product Qty'),
'go_product_qty': fields.float('Good Product Qty'),
'date_start': fields.date('Date start'),
'time_start': fields.time('Time start'),
'date_stop': fields.date('Date stop'),
'time_stop': fields.time('Time stop'),
'note': fields.text('Notes'),
'operator_registry_id': fields.many2one('mrp.operator.registry', 'Operator registry', ondelete='cascade'),
'production_id': fields.many2one('mrp.production', 'Manufacturing Order', ondelete='set null'),
'operator_id': fields.related('operator_registry_id', 'operator_id', type='many2one', relation='hr.employee', string='Operator'),
}
_defaults = {
'name':'/',
'date_start': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'date_stop': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
}
def workcenter_line_change(self, cr, uid, ids,workcenter_line_id,context={}):
if (workcenter_line_id):
workcenter_line = self.pool.get('mrp.production.workcenter.line').browse(cr, uid, [workcenter_line_id], context)[0]
return {'value': {'workcenter_line_id': workcenter_line.id,'product_id':workcenter_line.production_id.product_id.id,'name':workcenter_line.name,'workcenter_id':workcenter_line.workcenter_id.id,'production_id':workcenter_line.production_id.id}}
mrp_workcenter_registry()
class mrp_production(osv.osv):
_inherit = 'mrp.production'
_columns = {
'operator_ids': fields.one2many('mrp.workcenter.registry', 'production_id', 'Operator Registry'),
}
mrp_production()
class mrp_routing_workcenter(osv.osv):
_inherit = 'mrp.routing.workcenter'
_sql_constraints = [
('sequence_routing_uniq', 'unique (sequence,routing_id)', 'The sequence must be unique per routing !')
]
mrp_routing_workcenter()
|
agpl-3.0
| -5,955,302,422,474,086,000 | 54.216931 | 255 | 0.589459 | false |
merlinmarek/ICTScan
|
Python/perspective_transformation/perspective_transformation.py
|
1
|
2001
|
#!/usr/bin/env python
import sys
import cv2
import numpy as np
point_count = 0;
y = [];
x = [];
def run_prespective_transform():
global src
src_quad = np.array([(x[0], y[0]), (x[1], y[1]), (x[2], y[2]), (x[3], y[3])], np.float32);
dst_quad = np.array([(0.0, 0.0), (1032.0, 0.0), (1032.0, 581.0), (0.0, 581.0)], np.float32);
transf_matr = cv2.getPerspectiveTransform(src_quad, dst_quad); # src, dst,
transf_img = cv2.warpPerspective(src, transf_matr, (1032, 581));
print transf_matr
cv2.imwrite('pers_t.jpg', transf_img);
cv2.namedWindow("Transformiert", cv2.WINDOW_AUTOSIZE);
cv2.imshow("Transformiert", transf_img);
grau = cv2.cvtColor(transf_img, cv2.COLOR_BGR2GRAY);
cannyImg = cv2.Canny(grau, 50, 150, 3);
cv2.namedWindow("Canny", cv2.WINDOW_AUTOSIZE);
cv2.imshow("Canny", cannyImg);
pass
#par1 = 0 -> Mouse move
#par1 = 1 -> Mouse down
#par1 = 4 -> Mouse up
#par 2 = x-coord
#par3 = y-coord
#par4 = ?
#par5 = userdata
def callback_onMouse(par1, par2, par3, par4, par5):
global point_count;
global src;
if par1 == 1:
point_count = point_count + 1;
print("Point{2}: X:{0}; Y:{1}".format(par2, par3,point_count));
x.append(par2);
y.append(par3);
if point_count == 4:
#cv2.line(src, (x[0], y[0]), (x[1], y[1]), (0, 0, 255), 1);
#cv2.line(src, (x[1], y[1]), (x[2], y[2]), (0, 0, 255), 1);
#cv2.line(src, (x[2], y[2]), (x[3], y[3]), (0, 0, 255), 1);
#cv2.line(src, (x[3], y[3]), (x[0], y[0]), (0, 0, 255), 1);
run_prespective_transform()
cv2.imshow("Quelle", src);
pass
pass
pass
help_message = "USAGE: perspective_transform.py [<image>]\nSelect 4 Points in following order:\nupper-left, upper-right, bottom-right, bottom-left\nClose with 'Esc'\n"
try: fn = sys.argv[1]
except:
print help_message
exit()
src = cv2.imread(fn, True);
cv2.namedWindow("Quelle", cv2.WINDOW_NORMAL);
cv2.imshow("Quelle", src);
cv2.setMouseCallback("Quelle", callback_onMouse, "Hello World!");
c = 0;
while c != 1048603:
c = cv2.waitKey(0)
print(c);
pass
|
bsd-2-clause
| -1,209,146,062,546,825,700 | 27.183099 | 167 | 0.623188 | false |
EBI-Metagenomics/emgapi
|
emgcli/routers.py
|
1
|
1165
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2017 EMBL - European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from rest_framework import routers
class ApiBrowserView(routers.APIRootView):
"""MGnify API provides programmatic access to the data for cross-database complex queries. For more details review the documentation.""" # noqa
pass
class DefaultRouter(routers.DefaultRouter):
"""
Custom default router extends the rest_framework DefaultRouter and
adds in a default API root view
"""
APIRootView = ApiBrowserView
def extend(self, router):
self.registry.extend(router.registry)
|
apache-2.0
| 6,991,314,538,226,056,000 | 32.285714 | 148 | 0.747639 | false |
rzr/synapse
|
synapse/handlers/__init__.py
|
1
|
2767
|
# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.appservice.scheduler import AppServiceScheduler
from synapse.appservice.api import ApplicationServiceApi
from .register import RegistrationHandler
from .room import (
RoomCreationHandler, RoomMemberHandler, RoomListHandler
)
from .message import MessageHandler
from .events import EventStreamHandler, EventHandler
from .federation import FederationHandler
from .profile import ProfileHandler
from .presence import PresenceHandler
from .directory import DirectoryHandler
from .typing import TypingNotificationHandler
from .admin import AdminHandler
from .appservice import ApplicationServicesHandler
from .sync import SyncHandler
from .auth import AuthHandler
from .identity import IdentityHandler
from .receipts import ReceiptsHandler
class Handlers(object):
""" A collection of all the event handlers.
There's no need to lazily create these; we'll just make them all eagerly
at construction time.
"""
def __init__(self, hs):
self.registration_handler = RegistrationHandler(hs)
self.message_handler = MessageHandler(hs)
self.room_creation_handler = RoomCreationHandler(hs)
self.room_member_handler = RoomMemberHandler(hs)
self.event_stream_handler = EventStreamHandler(hs)
self.event_handler = EventHandler(hs)
self.federation_handler = FederationHandler(hs)
self.profile_handler = ProfileHandler(hs)
self.presence_handler = PresenceHandler(hs)
self.room_list_handler = RoomListHandler(hs)
self.directory_handler = DirectoryHandler(hs)
self.typing_notification_handler = TypingNotificationHandler(hs)
self.admin_handler = AdminHandler(hs)
self.receipts_handler = ReceiptsHandler(hs)
asapi = ApplicationServiceApi(hs)
self.appservice_handler = ApplicationServicesHandler(
hs, asapi, AppServiceScheduler(
clock=hs.get_clock(),
store=hs.get_datastore(),
as_api=asapi
)
)
self.sync_handler = SyncHandler(hs)
self.auth_handler = AuthHandler(hs)
self.identity_handler = IdentityHandler(hs)
|
apache-2.0
| -1,623,106,301,995,591,200 | 38.528571 | 76 | 0.731117 | false |
rob-smallshire/asq
|
asq/test/test_single.py
|
1
|
1353
|
import unittest
from asq.queryables import Queryable
__author__ = "Sixty North"
class TestSingle(unittest.TestCase):
def test_single(self):
a = [5]
b = Queryable(a).single()
self.assertEqual(b, 5)
def test_single_empty(self):
a = []
self.assertRaises(ValueError, lambda: Queryable(a).single())
def test_single_multiple(self):
a = [4, 7]
self.assertRaises(ValueError, lambda: Queryable(a).single())
def test_single_predicate(self):
a = ["Aardvark", "Cat", "Dog", "Elephant"]
b = Queryable(a).single(lambda x: x.startswith('D'))
self.assertEqual(b, "Dog")
def test_single_predicate_not_callable(self):
a = ["Aardvark", "Cat", "Dog", "Elephant"]
self.assertRaises(TypeError, lambda: Queryable(a).single("not callable"))
def test_single_predicate_empty(self):
a = []
self.assertRaises(ValueError, lambda: Queryable(a).single(lambda x: x.startswith('D')))
def test_single_predicate_multiple(self):
a = ["Aardvark", "Cat", "Dog", "Elephant", "Dolphin"]
self.assertRaises(ValueError, lambda: Queryable(a).single(lambda x: x.startswith('D')))
def test_single_closed(self):
a = [5]
b = Queryable(a)
b.close()
self.assertRaises(ValueError, lambda: b.single())
|
mit
| 4,489,519,764,823,837,700 | 30.465116 | 95 | 0.610495 | false |
muratcansahin/itucsdb1626
|
foodle/controllers/post_comments_controller.py
|
1
|
3956
|
#!/usr/bin/env python3
import foodle
import psycopg2
from psycopg2.extras import DictCursor
from flask import Blueprint, render_template, current_app, request, redirect, make_response, g
from foodle.utils.auth_hook import auth_hook_functor
post_comments_controller = Blueprint('post_comments_controller', __name__)
@post_comments_controller.route('/', methods=['GET'])
def index():
limit = request.args.get('limit') or 20
offset = request.args.get('offset') or 0
with psycopg2.connect(foodle.app.config['dsn']) as conn:
with conn.cursor(cursor_factory=DictCursor) as curs:
curs.execute(
"""
SELECT pc.id, u.username, pc.post_id, pc.body
FROM post_comments AS pc
INNER JOIN users AS u ON pc.user_id = u.id
LIMIT %s
OFFSET %s
""",
[limit, offset])
post_comments = curs.fetchall()
curs.execute(
"""
SELECT count(id)
FROM post_comments
""")
count = curs.fetchone()[0]
return render_template('/post_comments/index.html', post_comments=post_comments, count=count)
@post_comments_controller.route('/<int:id>', methods=['GET'])
def show(id):
with psycopg2.connect(foodle.app.config['dsn']) as conn:
with conn.cursor(cursor_factory=DictCursor) as curs:
curs.execute(
"""
SELECT *
FROM post_comments
WHERE id = %s
""",
[id])
post_comment = curs.fetchone()
if post_comment is not None:
return render_template('/post_comments/show.html', post_comment=post_comment)
else:
return "Entity not found.", 404
@post_comments_controller.route('/<int:post_id>/comments/', methods=['POST'])
@auth_hook_functor
def create(post_id):
user_id = g.current_user['id']
body = request.json['body']
if not isinstance(body, str) or not isinstance(user_id, int):
return "Request body is unprocessable", 422
with psycopg2.connect(foodle.app.config['dsn']) as conn:
with conn.cursor(cursor_factory=DictCursor) as curs:
curs.execute(
"""
INSERT INTO post_comments
(user_id, post_id, body)
VALUES (%s, %s, %s)
RETURNING id
""",
[user_id, post_id, body])
post_comment = curs.fetchone()
resp = make_response()
resp.headers['location'] = '/post_comments/' + str(post_comment['id'])
return resp, 201
@post_comments_controller.route('/<int:id>', methods=['PUT', 'PATCH'])
def update(id):
if request.json.get('id') is not None or not isinstance(request.json.get('body'), str):
return "Request is unprocessable.", 422
request.json['id'] = id
with psycopg2.connect(foodle.app.config['dsn']) as conn:
with conn.cursor(cursor_factory=DictCursor) as curs:
curs.execute(
"""
UPDATE post_comments
SET body = %(body)s
WHERE id = %(id)s
""", request.json)
if curs.rowcount is not 0:
resp = make_response()
resp.headers['location'] = '/post_comments/' + str(id)
return resp, 200
else:
return "Entity not found.", 404
@post_comments_controller.route('/<int:post_id>/comments/<int:id>/', methods=['DELETE'])
def delete(post_id, id):
with psycopg2.connect(foodle.app.config['dsn']) as conn:
with conn.cursor(cursor_factory=DictCursor) as curs:
curs.execute(
"""
DELETE FROM post_comments
WHERE id = %s
""",
[id])
if curs.rowcount is not 0:
return "", 204
else:
return "Entity not found.", 404
|
gpl-3.0
| 8,332,786,629,607,195,000 | 30.149606 | 105 | 0.555106 | false |
StackStorm/python-mistralclient
|
mistralclient/tests/unit/v2/test_cli_actions.py
|
1
|
6033
|
# Copyright 2014 Mirantis, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import mock
import six
from mistralclient.api.v2 import actions
from mistralclient.commands.v2 import actions as action_cmd
from mistralclient.commands.v2 import base as cmd_base
from mistralclient.tests.unit import base
ACTION_DICT = {
'id': '1234-4567-7894-7895',
'name': 'a',
'is_system': True,
'input': "param1",
'description': 'My cool action',
'tags': ['test'],
'created_at': '1',
'updated_at': '1'
}
ACTION_DEF = """
---
version: '2.0'
base: std.echo
base-parameters:
output: "<% $.str1 %><% $.str2 %>"
output: "<% $ %><% $ %>"
"""
ACTION_WITH_DEF_DICT = ACTION_DICT.copy()
ACTION_WITH_DEF_DICT.update({'definition': ACTION_DEF})
ACTION = actions.Action(mock, ACTION_DICT)
ACTION_WITH_DEF = actions.Action(mock, ACTION_WITH_DEF_DICT)
class TestCLIActionsV2(base.BaseCommandTest):
@mock.patch('argparse.open', create=True)
def test_create(self, mock_open):
self.client.actions.create.return_value = [ACTION]
result = self.call(action_cmd.Create, app_args=['1.txt'])
self.assertEqual(
[('1234-4567-7894-7895', 'a', True, "param1",
'My cool action', 'test', '1', '1')],
result[1]
)
@mock.patch('argparse.open', create=True)
def test_create_public(self, mock_open):
self.client.actions.create.return_value = [ACTION]
result = self.call(
action_cmd.Create,
app_args=['1.txt', '--public']
)
self.assertEqual(
[('1234-4567-7894-7895', 'a', True, "param1",
'My cool action', 'test', '1', '1')],
result[1]
)
self.assertEqual(
'public',
self.client.actions.create.call_args[1]['scope']
)
@mock.patch('argparse.open', create=True)
def test_create_long_input(self, mock_open):
action_long_input_dict = ACTION_DICT.copy()
long_input = ', '.join(
['var%s' % i for i in six.moves.xrange(10)]
)
action_long_input_dict['input'] = long_input
workflow_long_input = actions.Action(
mock.Mock(),
action_long_input_dict
)
self.client.actions.create.return_value = [workflow_long_input]
result = self.call(action_cmd.Create, app_args=['1.txt'])
self.assertEqual(
[('1234-4567-7894-7895', 'a', True, cmd_base.cut(long_input),
'My cool action', 'test', '1', '1')],
result[1]
)
@mock.patch('argparse.open', create=True)
def test_update(self, mock_open):
self.client.actions.update.return_value = [ACTION]
result = self.call(action_cmd.Update, app_args=['my_action.yaml'])
self.assertEqual(
[('1234-4567-7894-7895', 'a', True, "param1",
'My cool action', 'test', '1', '1')],
result[1]
)
@mock.patch('argparse.open', create=True)
def test_update_public(self, mock_open):
self.client.actions.update.return_value = [ACTION]
result = self.call(
action_cmd.Update,
app_args=['my_action.yaml', '--public']
)
self.assertEqual(
[('1234-4567-7894-7895', 'a', True, "param1",
'My cool action', 'test', '1', '1')],
result[1]
)
self.assertEqual(
'public',
self.client.actions.update.call_args[1]['scope']
)
def test_list(self):
self.client.actions.list.return_value = [ACTION]
result = self.call(action_cmd.List)
self.assertEqual(
[('1234-4567-7894-7895', 'a', True, "param1",
'My cool action', 'test', '1', '1')],
result[1]
)
def test_get(self):
self.client.actions.get.return_value = ACTION
result = self.call(action_cmd.Get, app_args=['name'])
self.assertEqual(
('1234-4567-7894-7895', 'a', True, "param1",
'My cool action', 'test', '1', '1'),
result[1]
)
def test_delete(self):
self.call(action_cmd.Delete, app_args=['name'])
self.client.actions.delete.assert_called_once_with('name')
def test_delete_with_multi_names(self):
self.call(action_cmd.Delete, app_args=['name1', 'name2'])
self.assertEqual(2, self.client.actions.delete.call_count)
self.assertEqual(
[mock.call('name1'), mock.call('name2')],
self.client.actions.delete.call_args_list
)
def test_get_definition(self):
self.client.actions.get.return_value = ACTION_WITH_DEF
self.call(action_cmd.GetDefinition, app_args=['name'])
self.app.stdout.write.assert_called_with(ACTION_DEF)
@mock.patch('argparse.open', create=True)
def test_validate(self, mock_open):
self.client.actions.validate.return_value = {'valid': True}
result = self.call(action_cmd.Validate, app_args=['action.yaml'])
self.assertEqual((True, None), result[1])
@mock.patch('argparse.open', create=True)
def test_validate_failed(self, mock_open):
self.client.actions.validate.return_value = {
'valid': False,
'error': 'Invalid DSL...'
}
result = self.call(action_cmd.Validate, app_args=['action.yaml'])
self.assertEqual((False, 'Invalid DSL...'), result[1])
|
apache-2.0
| -2,008,417,211,071,417,300 | 29.316583 | 78 | 0.582132 | false |
lfblogs/aiopy
|
aiopy/required/aiohttp/protocol.py
|
1
|
27699
|
"""Http related parsers and protocol."""
import collections
import functools
import http.server
import itertools
import re
import string
import sys
import zlib
from wsgiref.handlers import format_date_time
from aiopy.required import aiohttp
from aiopy.required.aiohttp import hdrs, errors
from .multidict import CIMultiDict
from .log import internal_logger
__all__ = ('HttpMessage', 'Request', 'Response',
'HttpVersion', 'HttpVersion10', 'HttpVersion11',
'RawRequestMessage', 'RawResponseMessage',
'HttpPrefixParser', 'HttpRequestParser', 'HttpResponseParser',
'HttpPayloadParser')
ASCIISET = set(string.printable)
METHRE = re.compile('[A-Z0-9$-_.]+')
VERSRE = re.compile('HTTP/(\d+).(\d+)')
HDRRE = re.compile('[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]')
CONTINUATION = (' ', '\t')
EOF_MARKER = object()
EOL_MARKER = object()
STATUS_LINE_READY = object()
RESPONSES = http.server.BaseHTTPRequestHandler.responses
HttpVersion = collections.namedtuple(
'HttpVersion', ['major', 'minor'])
HttpVersion10 = HttpVersion(1, 0)
HttpVersion11 = HttpVersion(1, 1)
RawStatusLineMessage = collections.namedtuple(
'RawStatusLineMessage', ['method', 'path', 'version'])
RawRequestMessage = collections.namedtuple(
'RawRequestMessage',
['method', 'path', 'version', 'headers', 'should_close', 'compression'])
RawResponseMessage = collections.namedtuple(
'RawResponseMessage',
['version', 'code', 'reason', 'headers', 'should_close', 'compression'])
class HttpParser:
def __init__(self, max_line_size=8190, max_headers=32768,
max_field_size=8190):
self.max_line_size = max_line_size
self.max_headers = max_headers
self.max_field_size = max_field_size
def parse_headers(self, lines):
"""Parses RFC2822 headers from a stream.
Line continuations are supported. Returns list of header name
and value pairs. Header name is in upper case.
"""
close_conn = None
encoding = None
headers = CIMultiDict()
lines_idx = 1
line = lines[1]
while line:
header_length = len(line)
# Parse initial header name : value pair.
try:
name, value = line.split(':', 1)
except ValueError:
raise errors.InvalidHeader(line) from None
name = name.strip(' \t').upper()
if HDRRE.search(name):
raise errors.InvalidHeader(name)
# next line
lines_idx += 1
line = lines[lines_idx]
# consume continuation lines
continuation = line and line[0] in CONTINUATION
if continuation:
value = [value]
while continuation:
header_length += len(line)
if header_length > self.max_field_size:
raise errors.LineTooLong(
'limit request headers fields size')
value.append(line)
# next line
lines_idx += 1
line = lines[lines_idx]
continuation = line[0] in CONTINUATION
value = '\r\n'.join(value)
else:
if header_length > self.max_field_size:
raise errors.LineTooLong(
'limit request headers fields size')
value = value.strip()
# keep-alive and encoding
if name == hdrs.CONNECTION:
v = value.lower()
if v == 'close':
close_conn = True
elif v == 'keep-alive':
close_conn = False
elif name == hdrs.CONTENT_ENCODING:
enc = value.lower()
if enc in ('gzip', 'deflate'):
encoding = enc
headers.add(name, value)
return headers, close_conn, encoding
class HttpPrefixParser:
"""Waits for 'HTTP' prefix (non destructive)"""
def __init__(self, allowed_methods=()):
self.allowed_methods = [m.upper() for m in allowed_methods]
def __call__(self, out, buf):
raw_data = yield from buf.waituntil(b' ', 12)
method = raw_data.decode('ascii', 'surrogateescape').strip()
# method
method = method.upper()
if not METHRE.match(method):
raise errors.BadStatusLine(method)
# allowed method
if self.allowed_methods and method not in self.allowed_methods:
raise errors.HttpMethodNotAllowed(message=method)
out.feed_data(method, len(method))
out.feed_eof()
class HttpRequestParser(HttpParser):
"""Read request status line. Exception errors.BadStatusLine
could be raised in case of any errors in status line.
Returns RawRequestMessage.
"""
def __call__(self, out, buf):
# read http message (request line + headers)
try:
raw_data = yield from buf.readuntil(
b'\r\n\r\n', self.max_headers)
except errors.LineLimitExceededParserError as exc:
raise errors.LineTooLong(exc.limit) from None
lines = raw_data.decode(
'utf-8', 'surrogateescape').split('\r\n')
# request line
line = lines[0]
try:
method, path, version = line.split(None, 2)
except ValueError:
raise errors.BadStatusLine(line) from None
# method
method = method.upper()
if not METHRE.match(method):
raise errors.BadStatusLine(method)
# version
try:
if version.startswith('HTTP/'):
n1, n2 = version[5:].split('.', 1)
version = HttpVersion(int(n1), int(n2))
else:
raise errors.BadStatusLine(version)
except:
raise errors.BadStatusLine(version)
# read headers
headers, close, compression = self.parse_headers(lines)
if close is None: # then the headers weren't set in the request
if version <= HttpVersion10: # HTTP 1.0 must asks to not close
close = True
else: # HTTP 1.1 must ask to close.
close = False
out.feed_data(
RawRequestMessage(
method, path, version, headers, close, compression),
len(raw_data))
out.feed_eof()
class HttpResponseParser(HttpParser):
"""Read response status line and headers.
BadStatusLine could be raised in case of any errors in status line.
Returns RawResponseMessage"""
def __call__(self, out, buf):
# read http message (response line + headers)
try:
raw_data = yield from buf.readuntil(
b'\r\n\r\n', self.max_line_size + self.max_headers)
except errors.LineLimitExceededParserError as exc:
raise errors.LineTooLong(exc.limit) from None
lines = raw_data.decode(
'utf-8', 'surrogateescape').split('\r\n')
line = lines[0]
try:
version, status = line.split(None, 1)
except ValueError:
raise errors.BadStatusLine(line) from None
else:
try:
status, reason = status.split(None, 1)
except ValueError:
reason = ''
# version
match = VERSRE.match(version)
if match is None:
raise errors.BadStatusLine(line)
version = HttpVersion(int(match.group(1)), int(match.group(2)))
# The status code is a three-digit number
try:
status = int(status)
except ValueError:
raise errors.BadStatusLine(line) from None
if status < 100 or status > 999:
raise errors.BadStatusLine(line)
# read headers
headers, close, compression = self.parse_headers(lines)
if close is None:
close = version <= HttpVersion10
out.feed_data(
RawResponseMessage(
version, status, reason.strip(),
headers, close, compression),
len(raw_data))
out.feed_eof()
class HttpPayloadParser:
def __init__(self, message, length=None, compression=True,
readall=False, response_with_body=True):
self.message = message
self.length = length
self.compression = compression
self.readall = readall
self.response_with_body = response_with_body
def __call__(self, out, buf):
# payload params
length = self.message.headers.get(hdrs.CONTENT_LENGTH, self.length)
if hdrs.SEC_WEBSOCKET_KEY1 in self.message.headers:
length = 8
# payload decompression wrapper
if self.compression and self.message.compression:
out = DeflateBuffer(out, self.message.compression)
# payload parser
if not self.response_with_body:
# don't parse payload if it's not expected to be received
pass
elif 'chunked' in self.message.headers.get(
hdrs.TRANSFER_ENCODING, ''):
yield from self.parse_chunked_payload(out, buf)
elif length is not None:
try:
length = int(length)
except ValueError:
raise errors.InvalidHeader(hdrs.CONTENT_LENGTH) from None
if length < 0:
raise errors.InvalidHeader(hdrs.CONTENT_LENGTH)
elif length > 0:
yield from self.parse_length_payload(out, buf, length)
else:
if self.readall and getattr(self.message, 'code', 0) != 204:
yield from self.parse_eof_payload(out, buf)
elif getattr(self.message, 'method', None) in ('PUT', 'POST'):
internal_logger.warning( # pragma: no cover
'Content-Length or Transfer-Encoding header is required')
out.feed_eof()
def parse_chunked_payload(self, out, buf):
"""Chunked transfer encoding parser."""
while True:
# read next chunk size
line = yield from buf.readuntil(b'\r\n', 8192)
i = line.find(b';')
if i >= 0:
line = line[:i] # strip chunk-extensions
else:
line = line.strip()
try:
size = int(line, 16)
except ValueError:
raise errors.TransferEncodingError(line) from None
if size == 0: # eof marker
break
# read chunk and feed buffer
while size:
chunk = yield from buf.readsome(size)
out.feed_data(chunk, len(chunk))
size = size - len(chunk)
# toss the CRLF at the end of the chunk
yield from buf.skip(2)
# read and discard trailer up to the CRLF terminator
yield from buf.skipuntil(b'\r\n')
def parse_length_payload(self, out, buf, length=0):
"""Read specified amount of bytes."""
required = length
while required:
chunk = yield from buf.readsome(required)
out.feed_data(chunk, len(chunk))
required -= len(chunk)
def parse_eof_payload(self, out, buf):
"""Read all bytes until eof."""
try:
while True:
chunk = yield from buf.readsome()
out.feed_data(chunk, len(chunk))
except aiohttp.EofStream:
pass
class DeflateBuffer:
"""DeflateStream decompress stream and feed data into specified stream."""
def __init__(self, out, encoding):
self.out = out
zlib_mode = (16 + zlib.MAX_WBITS
if encoding == 'gzip' else -zlib.MAX_WBITS)
self.zlib = zlib.decompressobj(wbits=zlib_mode)
def feed_data(self, chunk, size):
try:
chunk = self.zlib.decompress(chunk)
except Exception:
raise errors.ContentEncodingError('deflate')
if chunk:
self.out.feed_data(chunk, len(chunk))
def feed_eof(self):
chunk = self.zlib.flush()
self.out.feed_data(chunk, len(chunk))
if not self.zlib.eof:
raise errors.ContentEncodingError('deflate')
self.out.feed_eof()
def wrap_payload_filter(func):
"""Wraps payload filter and piped filters.
Filter is a generator that accepts arbitrary chunks of data,
modify data and emit new stream of data.
For example we have stream of chunks: ['1', '2', '3', '4', '5'],
we can apply chunking filter to this stream:
['1', '2', '3', '4', '5']
|
response.add_chunking_filter(2)
|
['12', '34', '5']
It is possible to use different filters at the same time.
For a example to compress incoming stream with 'deflate' encoding
and then split data and emit chunks of 8192 bytes size chunks:
>>> response.add_compression_filter('deflate')
>>> response.add_chunking_filter(8192)
Filters do not alter transfer encoding.
Filter can receive types types of data, bytes object or EOF_MARKER.
1. If filter receives bytes object, it should process data
and yield processed data then yield EOL_MARKER object.
2. If Filter received EOF_MARKER, it should yield remaining
data (buffered) and then yield EOF_MARKER.
"""
@functools.wraps(func)
def wrapper(self, *args, **kw):
new_filter = func(self, *args, **kw)
filter = self.filter
if filter is not None:
next(new_filter)
self.filter = filter_pipe(filter, new_filter)
else:
self.filter = new_filter
next(self.filter)
return wrapper
def filter_pipe(filter, filter2, *,
EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER):
"""Creates pipe between two filters.
filter_pipe() feeds first filter with incoming data and then
send yielded from first filter data into filter2, results of
filter2 are being emitted.
1. If filter_pipe receives bytes object, it sends it to the first filter.
2. Reads yielded values from the first filter until it receives
EOF_MARKER or EOL_MARKER.
3. Each of this values is being send to second filter.
4. Reads yielded values from second filter until it receives EOF_MARKER
or EOL_MARKER. Each of this values yields to writer.
"""
chunk = yield
while True:
eof = chunk is EOF_MARKER
chunk = filter.send(chunk)
while chunk is not EOL_MARKER:
chunk = filter2.send(chunk)
while chunk not in (EOF_MARKER, EOL_MARKER):
yield chunk
chunk = next(filter2)
if chunk is not EOF_MARKER:
if eof:
chunk = EOF_MARKER
else:
chunk = next(filter)
else:
break
chunk = yield EOL_MARKER
class HttpMessage:
"""HttpMessage allows to write headers and payload to a stream.
For example, lets say we want to read file then compress it with deflate
compression and then send it with chunked transfer encoding, code may look
like this:
>>> response = aiohttp.Response(transport, 200)
We have to use deflate compression first:
>>> response.add_compression_filter('deflate')
Then we want to split output stream into chunks of 1024 bytes size:
>>> response.add_chunking_filter(1024)
We can add headers to response with add_headers() method. add_headers()
does not send data to transport, send_headers() sends request/response
line and then sends headers:
>>> response.add_headers(
... ('Content-Disposition', 'attachment; filename="..."'))
>>> response.send_headers()
Now we can use chunked writer to write stream to a network stream.
First call to write() method sends response status line and headers,
add_header() and add_headers() method unavailable at this stage:
>>> with open('...', 'rb') as f:
... chunk = fp.read(8192)
... while chunk:
... response.write(chunk)
... chunk = fp.read(8192)
>>> response.write_eof()
"""
writer = None
# 'filter' is being used for altering write() behaviour,
# add_chunking_filter adds deflate/gzip compression and
# add_compression_filter splits incoming data into a chunks.
filter = None
HOP_HEADERS = None # Must be set by subclass.
SERVER_SOFTWARE = 'Python/{0[0]}.{0[1]} aiohttp/{1}'.format(
sys.version_info, aiohttp.__version__)
status = None
status_line = b''
upgrade = False # Connection: UPGRADE
websocket = False # Upgrade: WEBSOCKET
has_chunked_hdr = False # Transfer-encoding: chunked
# subclass can enable auto sending headers with write() call,
# this is useful for wsgi's start_response implementation.
_send_headers = False
def __init__(self, transport, version, close):
self.transport = transport
self.version = version
self.closing = close
self.keepalive = None
self.chunked = False
self.length = None
self.headers = CIMultiDict()
self.headers_sent = False
self.output_length = 0
self._output_size = 0
def force_close(self):
self.closing = True
self.keepalive = False
def enable_chunked_encoding(self):
self.chunked = True
def keep_alive(self):
if self.keepalive is None:
if self.version < HttpVersion10:
# keep alive not supported at all
return False
if self.version == HttpVersion10:
if self.headers.get(hdrs.CONNECTION) == 'keep-alive':
return True
else: # no headers means we close for Http 1.0
return False
else:
return not self.closing
else:
return self.keepalive
def is_headers_sent(self):
return self.headers_sent
def add_header(self, name, value):
"""Analyze headers. Calculate content length,
removes hop headers, etc."""
assert not self.headers_sent, 'headers have been sent already'
assert isinstance(name, str), \
'Header name should be a string, got {!r}'.format(name)
assert set(name).issubset(ASCIISET), \
'Header name should contain ASCII chars, got {!r}'.format(name)
assert isinstance(value, str), \
'Header {!r} should have string value, got {!r}'.format(
name, value)
name = name.strip().upper()
value = value.strip()
if name == hdrs.CONTENT_LENGTH:
self.length = int(value)
if name == hdrs.TRANSFER_ENCODING:
self.has_chunked_hdr = value.lower().strip() == 'chunked'
if name == hdrs.CONNECTION:
val = value.lower()
# handle websocket
if 'upgrade' in val:
self.upgrade = True
# connection keep-alive
elif 'close' in val:
self.keepalive = False
elif 'keep-alive' in val:
self.keepalive = True
elif name == hdrs.UPGRADE:
if 'websocket' in value.lower():
self.websocket = True
self.headers[name] = value
elif name not in self.HOP_HEADERS:
# ignore hop-by-hop headers
self.headers.add(name, value)
def add_headers(self, *headers):
"""Adds headers to a http message."""
for name, value in headers:
self.add_header(name, value)
def send_headers(self, _sep=': ', _end='\r\n'):
"""Writes headers to a stream. Constructs payload writer."""
# Chunked response is only for HTTP/1.1 clients or newer
# and there is no Content-Length header is set.
# Do not use chunked responses when the response is guaranteed to
# not have a response body (304, 204).
assert not self.headers_sent, 'headers have been sent already'
self.headers_sent = True
if self.chunked or (self.length is None and
self.version >= HttpVersion11 and
self.status not in (304, 204)):
self.writer = self._write_chunked_payload()
self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'
elif self.length is not None:
self.writer = self._write_length_payload(self.length)
else:
self.writer = self._write_eof_payload()
next(self.writer)
self._add_default_headers()
# status + headers
headers = ''.join(itertools.chain(
(self.status_line,),
*((k, _sep, v, _end) for k, v in self.headers.items())))
headers = headers.encode('utf-8') + b'\r\n'
self.output_length += len(headers)
self.transport.write(headers)
def _add_default_headers(self):
# set the connection header
if self.upgrade:
connection = 'upgrade'
elif not self.closing if self.keepalive is None else self.keepalive:
connection = 'keep-alive'
else:
connection = 'close'
self.headers[hdrs.CONNECTION] = connection
def write(self, chunk, *,
drain=False, EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER):
"""Writes chunk of data to a stream by using different writers.
writer uses filter to modify chunk of data.
write_eof() indicates end of stream.
writer can't be used after write_eof() method being called.
write() return drain future.
"""
assert (isinstance(chunk, (bytes, bytearray)) or
chunk is EOF_MARKER), chunk
size = self.output_length
if self._send_headers and not self.headers_sent:
self.send_headers()
assert self.writer is not None, 'send_headers() is not called.'
if self.filter:
chunk = self.filter.send(chunk)
while chunk not in (EOF_MARKER, EOL_MARKER):
self.writer.send(chunk)
chunk = next(self.filter)
else:
if chunk is not EOF_MARKER:
self.writer.send(chunk)
self._output_size += self.output_length - size
if self._output_size > 64 * 1024:
if drain:
self._output_size = 0
return self.transport.drain()
return ()
def write_eof(self):
self.write(EOF_MARKER)
try:
self.writer.throw(aiohttp.EofStream())
except StopIteration:
pass
return self.transport.drain()
def _write_chunked_payload(self):
"""Write data in chunked transfer encoding."""
while True:
try:
chunk = yield
except aiohttp.EofStream:
self.transport.write(b'0\r\n\r\n')
self.output_length += 5
break
chunk = bytes(chunk)
chunk_len = '{:x}\r\n'.format(len(chunk)).encode('ascii')
self.transport.write(chunk_len)
self.transport.write(chunk)
self.transport.write(b'\r\n')
self.output_length += len(chunk_len) + len(chunk) + 2
def _write_length_payload(self, length):
"""Write specified number of bytes to a stream."""
while True:
try:
chunk = yield
except aiohttp.EofStream:
break
if length:
l = len(chunk)
if length >= l:
self.transport.write(chunk)
self.output_length += l
length = length-l
else:
self.transport.write(chunk[:length])
self.output_length += length
length = 0
def _write_eof_payload(self):
while True:
try:
chunk = yield
except aiohttp.EofStream:
break
self.transport.write(chunk)
self.output_length += len(chunk)
@wrap_payload_filter
def add_chunking_filter(self, chunk_size=16*1024, *,
EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER):
"""Split incoming stream into chunks."""
buf = bytearray()
chunk = yield
while True:
if chunk is EOF_MARKER:
if buf:
yield buf
yield EOF_MARKER
else:
buf.extend(chunk)
while len(buf) >= chunk_size:
chunk = bytes(buf[:chunk_size])
del buf[:chunk_size]
yield chunk
chunk = yield EOL_MARKER
@wrap_payload_filter
def add_compression_filter(self, encoding='deflate', *,
EOF_MARKER=EOF_MARKER, EOL_MARKER=EOL_MARKER):
"""Compress incoming stream with deflate or gzip encoding."""
zlib_mode = (16 + zlib.MAX_WBITS
if encoding == 'gzip' else -zlib.MAX_WBITS)
zcomp = zlib.compressobj(wbits=zlib_mode)
chunk = yield
while True:
if chunk is EOF_MARKER:
yield zcomp.flush()
chunk = yield EOF_MARKER
else:
yield zcomp.compress(chunk)
chunk = yield EOL_MARKER
class Response(HttpMessage):
"""Create http response message.
Transport is a socket stream transport. status is a response status code,
status has to be integer value. http_version is a tuple that represents
http version, (1, 0) stands for HTTP/1.0 and (1, 1) is for HTTP/1.1
"""
HOP_HEADERS = ()
@staticmethod
def calc_reason(status):
record = RESPONSES.get(status)
if record is not None:
reason = record[0]
else:
reason = str(status)
return reason
def __init__(self, transport, status,
http_version=HttpVersion11, close=False, reason=None):
super().__init__(transport, http_version, close)
self.status = status
if reason is None:
reason = self.calc_reason(status)
self.reason = reason
self.status_line = 'HTTP/{}.{} {} {}\r\n'.format(
http_version[0], http_version[1], status, reason)
def _add_default_headers(self):
super()._add_default_headers()
if hdrs.DATE not in self.headers:
# format_date_time(None) is quite expensive
self.headers.setdefault(hdrs.DATE, format_date_time(None))
self.headers.setdefault(hdrs.SERVER, self.SERVER_SOFTWARE)
class Request(HttpMessage):
HOP_HEADERS = ()
def __init__(self, transport, method, path,
http_version=HttpVersion11, close=False):
# set the default for HTTP 1.0 to be different
# will only be overwritten with keep-alive header
if http_version < HttpVersion11:
close = True
super().__init__(transport, http_version, close)
self.method = method
self.path = path
self.status_line = '{0} {1} HTTP/{2[0]}.{2[1]}\r\n'.format(
method, path, http_version)
def _add_default_headers(self):
super()._add_default_headers()
self.headers.setdefault(hdrs.USER_AGENT, self.SERVER_SOFTWARE)
|
gpl-3.0
| 7,311,727,650,136,133,000 | 31.021965 | 79 | 0.567205 | false |
catmaid/catpy
|
tests/test_catmaid_client_application.py
|
1
|
1603
|
from __future__ import absolute_import
try:
import mock
except ImportError:
from unittest import mock
import pytest
from catpy.client import CatmaidClient
from catpy.applications.base import CatmaidClientApplication
PROJECT_ID = 10
BASE_URL = "http://not-catmaid.org"
@pytest.fixture
def catmaid_mock():
catmaid = mock.Mock()
catmaid.project_id = PROJECT_ID
catmaid.base_url = BASE_URL
return catmaid
@pytest.fixture
def ConcreteApp():
class Subclass(CatmaidClientApplication):
pass
return Subclass
def test_property_passthrough(catmaid_mock, ConcreteApp):
app = ConcreteApp(catmaid_mock)
assert app.project_id == catmaid_mock.project_id == PROJECT_ID
assert app.base_url == catmaid_mock.base_url == BASE_URL
def test_get_post_call_fetch(catmaid_mock, ConcreteApp):
app = ConcreteApp(catmaid_mock)
rel_url = "potato"
app.get(rel_url, params=None, raw=False)
catmaid_mock.fetch.assert_called_with(rel_url, method="GET", data=None, raw=False)
app.post(rel_url, data=None, raw=False)
catmaid_mock.fetch.assert_called_with(rel_url, method="POST", data=None, raw=False)
def test_fetch_passthrough(catmaid_mock, ConcreteApp):
app = ConcreteApp(catmaid_mock)
args = (1, 2)
kwargs = {"a": 1}
app.fetch(*args, **kwargs)
catmaid_mock.fetch.assert_called_with(*args, **kwargs)
def test_from_json(ConcreteApp):
cred_path = "cred/path.json"
with mock.patch.object(CatmaidClient, "from_json") as from_json:
ConcreteApp.from_json(cred_path)
from_json.assert_called_with(cred_path)
|
mit
| 3,556,959,279,604,480,500 | 24.046875 | 87 | 0.704304 | false |
devsar/ae-people
|
apps/stats/views.py
|
1
|
1376
|
"""
Stats views
"""
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, HttpResponse, Http404
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils import simplejson
from django.utils.translation import ugettext_lazy as _
from google.appengine.api import taskqueue
from google.appengine.ext.deferred import deferred
from country.models import Country, COUNTRIES_CODE
from users.models import Developer
from country.models import Country
from stats.models import DeveloperStats, TagStats
def update_stats(request):
"""
Update stats trigger view
"""
DeveloperStats.update()
return HttpResponse("")
def view_stats(request):
"""
Show AppEngine general stats
"""
countries = Country.all().filter("total >", 0).order("-total").fetch(250)
#Get last stats
stats = DeveloperStats.all().order("-timestamp").get()
tags = TagStats.all().filter("developer_stats =", stats)
tags = tags.order("-total").fetch(20)
return render_to_response("stats/stats.html",
{'stats': stats,
'countries': countries,
'tags': tags},
RequestContext(request))
|
apache-2.0
| -5,724,563,752,425,288,000 | 27.081633 | 77 | 0.655523 | false |
dc3-plaso/plaso
|
tests/engine/engine.py
|
1
|
4086
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests the engine."""
import unittest
try:
from guppy import hpy
except ImportError:
hpy = None
from dfvfs.helpers import fake_file_system_builder
from dfvfs.lib import definitions as dfvfs_definitions
from dfvfs.path import factory as path_spec_factory
from dfvfs.path import path_spec
from dfvfs.resolver import context
from dfvfs.vfs import file_system
from plaso.engine import engine
from tests import test_lib as shared_test_lib
class TestEngine(engine.BaseEngine):
"""Class that defines the processing engine for testing."""
def __init__(self):
"""Initialize a test engine object."""
file_system_builder = fake_file_system_builder.FakeFileSystemBuilder()
test_file_path = shared_test_lib.GetTestFilePath([u'SOFTWARE'])
file_system_builder.AddFileReadData(
u'/Windows/System32/config/SOFTWARE', test_file_path)
test_file_path = shared_test_lib.GetTestFilePath([u'SYSTEM'])
file_system_builder.AddFileReadData(
u'/Windows/System32/config/SYSTEM', test_file_path)
super(TestEngine, self).__init__()
self._file_system = file_system_builder.file_system
self._mount_point = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_FAKE, location=u'/')
def GetSourceFileSystem(self, source_path_spec, resolver_context=None):
"""Retrieves the file system of the source.
Args:
source_path_spec (dfvfs.PathSpec): path specifications of the sources
to process.
resolver_context (dfvfs.Context): resolver context.
Returns:
tuple: containing:
dfvfs.FileSystem: file system
path.PathSpec: mount point path specification. The mount point path
specification refers to either a directory or a volume on a storage
media device or image. It is needed by the dfVFS file system
searcher (FileSystemSearcher) to indicate the base location of
the file system
"""
self._file_system.Open(self._mount_point)
return self._file_system, self._mount_point
class BaseEngineTest(shared_test_lib.BaseTestCase):
"""Tests for the engine object."""
# pylint: disable=protected-access
@shared_test_lib.skipUnlessHasTestFile([u'ímynd.dd'])
def testGetSourceFileSystem(self):
"""Tests the GetSourceFileSystem function."""
test_engine = engine.BaseEngine()
source_path = self._GetTestFilePath([u'ímynd.dd'])
os_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_OS, location=source_path)
source_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_TSK, location=u'/',
parent=os_path_spec)
resolver_context = context.Context()
test_file_system, test_mount_point = test_engine.GetSourceFileSystem(
source_path_spec, resolver_context=resolver_context)
self.assertIsNotNone(test_file_system)
self.assertIsInstance(test_file_system, file_system.FileSystem)
self.assertIsNotNone(test_mount_point)
self.assertIsInstance(test_mount_point, path_spec.PathSpec)
test_file_system.Close()
with self.assertRaises(RuntimeError):
test_engine.GetSourceFileSystem(None)
@shared_test_lib.skipUnlessHasTestFile([u'SOFTWARE'])
@shared_test_lib.skipUnlessHasTestFile([u'SYSTEM'])
def testPreprocessSources(self):
"""Tests the PreprocessSources function."""
test_engine = TestEngine()
source_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_FAKE, location=u'/')
test_engine.PreprocessSources([source_path_spec])
self.assertEqual(test_engine.knowledge_base.platform, u'Windows')
test_engine.PreprocessSources([None])
def testSupportsMemoryProfiling(self):
"""Tests the SupportsMemoryProfiling function."""
test_engine = engine.BaseEngine()
expected_result = hpy is not None
result = test_engine.SupportsMemoryProfiling()
self.assertEqual(result, expected_result)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
| -3,996,707,161,918,341,600 | 32.752066 | 79 | 0.718413 | false |
svensaeger/Hatschi
|
wetterstation2.py
|
1
|
1364
|
from tkinter import *
import serial
import time
def ausgeben(daten):
datenEinzeln = daten.split()
root = Tk()
root.attributes("-fullscreen", True)
root.config(bg = "light green")
frameo = Frame(root)
frameu = Frame(root, bg = "light green")
temp = "0"
luftfeu = "0"
luftdruck = "0"
gas = "0"
regen = "0"
Label(frameo,
text="Die Daten der Hatschi Wetterstation:",
fg = "green",
bg = "dark blue",
font = "Times 50").pack()
Label(frameu,
text="Temperatur: " + temp,
fg = "green",
bg = "yellow",
font = "Times 50").pack()
Label(frameu,
text="Luftfeuchtigkeit: " + luftfeu,
fg = "green",
bg = "red",
font = "Times 50").pack()
Label(frameu,
text="Luftdruck: " + luftdruck,
fg = "green",
bg = "light blue",
font = "Times 50").pack()
Label(frameu,
text="Gas: " + gas,
fg = "green",
bg = "blue",
font = "Times 50").pack()
Label(frameu,
text="Regen: " + regen,
fg = "green",
bg = "yellow",
font = "Times 50").pack()
frameo.pack()
frameu.pack(pady = 20)
root.mainloop()
s = serial.Serial('/dev/ttyACMO', 9600);
try:
s.open()
except serial-seralutil.SerialException:
s.close()
s.open()
time.sleep(5)
try:
while True:
response = s.readline()
ausgeben(response)
except KeyboardInterrupt:
s.close()
|
gpl-3.0
| -2,927,763,637,541,461,000 | 14.678161 | 47 | 0.57478 | false |
MrHamdulay/daily-problems
|
yaseen/problem-3-binary-search.py
|
1
|
1162
|
class Node:
def __init__(self, value):
self.value = float(value)
self.parent = None
self.children = []
def addChild(self, child):
self.children.append(child)
def __repr__(self):
return '<value: %s children: (%s)>' % (self.value, len(self.children))
@property
def right(self):
return self.children[1] if len(self.children) >= 2 else None
@property
def left(self):
return self.children[0] if len(self.children) >= 1 else None
input = '''
1 0
1 2
0 -10
2 1.5
'''
nodes = {'1': Node('1')}
for line in input.split('\n'):
if line:
parent, value = line.split()
if value in nodes:
node = nodes[value]
else:
node = Node(value)
nodes[value] = node
node.parent = nodes[parent]
nodes[parent].addChild(node)
def isBinarySearchTree(node, left=-1e10, right=1e10):
if node.value < left or node.value > right:
return False
if node.left and not isBinarySearchTree(node.left, left, min(node.value, right)):
return False
if node.right and not isBinarySearchTree(node.right, max(node.value, left), right):
return False
return True
print isBinarySearchTree(nodes['1'])
|
mit
| 3,047,727,185,043,517,000 | 20.924528 | 85 | 0.641997 | false |
beihaiguaishou/HackRunningGo-SC
|
HaRunGo.py
|
1
|
10445
|
import fileinput
import requests
import json
import base64
import random
import datetime
import re
import time
import hashlib
from Phone import *
import uuid
import codecs
# Globle Var
file1 = open('route.data')
routes = file1.readlines()
file1.close()
file2 = codecs.open('tp.data', 'r', 'utf-8')
tps = file2.readlines()
file2.close()
#tots = []
#for route in routes:
# times = re.findall(r'\\\"totalTime\\\"\:(\d+)', route)
# t = times[len(times) - 1]
# tots.append(int(t))
# print tots
tot_cnt = len(routes)
def base16encode(username):
return str(base64.b16encode(username))
def base64encode(username, pwd):
list = [username, pwd]
sign = ':'
strr = sign.join(list)
return "Basic " + str(base64.b64encode(strr))
#def virtualDevicedId(username):
# fi = base16encode(username)
# la = username[1:]
# id = fi + la
# res = "%s-%s-%s-%s-%s" % (id[0:8], id[8:12], id[12:16], id[16:20], id[20:])
# return res
#def virtualCustomDeviceId(username):
# return virtualDevicedId(username) + "_iOS_sportsWorld_campus"
def selectRoute():
return int(random.uniform(0, tot_cnt - 1))
def datetime_to_timestamp_in_milliseconds(d):
return int(time.mktime(d.timetuple()) * 1000)
#def format(data, totTime):
# data = str(data)
# res = re.findall(r'\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}', data)
# startTime = res[0]
# startDate = startTime[0:10]
# dateToday = datetime.date.today()
# newData = data.replace(startDate, str(dateToday))
# startTimeDtObj = datetime.datetime.now() + datetime.timedelta(seconds = -int(totTime))
# endTimeDtObj = startTimeDtObj + datetime.timedelta(seconds = int(totTime))
# startTimeDtObj = datetime.datetime.strptime(startTime, "%Y-%m-%d %H:%M:%S")
# startTimeTiObj = time.strptime(startTime, "%Y-%m-%d %H:%M:%S")
#st = datetime_to_timestamp_in_milliseconds(startTimeDtObj)
#et = datetime_to_timestamp_in_milliseconds(endTimeDtObj)
# newData = data.replace(str(dataDate), str(data_today))
#res = re.findall(r'\d{13}', newData)
#newData = newData.replace(res[0], str(st))
# print("new data: " + newData)
# print("totTime: " + str(totTime))
# print("start: " + str(st))
# print("end: " + str(et))
#return str(newData), int(st), int(et)
def login(username, pwd):
url = 'http://gxapp.iydsj.com/api/v9/login'
headers = {
"Host": "gxapp.iydsj.com",
"Accept": "application/json",
"Authorization": base64encode(username, pwd),
"osType": "0",
"Content-Type": "application/json",
"DeviceId": getDeviceId(),
"CustomDeviceId": getCustomDeviceId(),
"User-Agent": "Dalvik/2.1.0 (Linux; U; Android 5.0; SM-N9002 Build/LRX21V)",
"appVersion": "1.3.10",
"timeStamp": str(int(time.time()*1000))
}
Session = requests.Session()
data = {
"device_model":getDeviceModel(),
"imei":getImei(),
"loginType":1,
"mac_address":getMacAdress(),
"os_version":"0"
}
Request = Session.post(url, headers = headers, data = json.dumps(data))
reqData = Request.content
print ('login response: ' + reqData)
dicData = json.loads(reqData)
return dicData['data']
def dataUpload(userInfo):
url = 'http://gxapp.iydsj.com/api/v10/runnings/add_record'
timeStamp = str(int(time.time()*1000))
dic = {
'uid':userInfo['uid'],
'token':userInfo['token'],
'timeStamp':timeStamp
}
headers = {
"Host": "gxapp.iydsj.com",
"Accept": "application/json",
"osType": "0",
"Content-Type": "application/json",
"deviceName": getDeviceModel(),
"osType": "0",
"osVersion": "1.3.10",
"DeviceId": getDeviceId(),
"CustomDeviceId": getCustomDeviceId(),
"User-Agent": "Dalvik/2.1.0 (Linux; U; Android 5.0; SM-N9002 Build/LRX21V)",
"appVersion":"1.3.10",
"uid":str(dic['uid']),
"token":dic['token'],
"tokenSign":digestDict(dic),
"timeStamp":dic['timeStamp']
}
#index = 0
#while index == 0:
index = selectRoute()
print ("Use " + str(index) + " data")
alllocjson = json.loads(routes[index])
fivepointjson = json.loads(tps[index])
allloc = json.loads(alllocjson['allLocJson'])
fivepoint = json.loads(fivepointjson['fivePointJson'])
oldflag = allloc[0]['flag']
totaltime = allloc[len(allloc)-1]['totalTime']
newflag = int(time.time()*1000) - totaltime*1000
delta = newflag-oldflag
timedelta = datetime.timedelta(days = int(delta/86400000), seconds = int(delta/1000)%86400, microseconds = delta%1000)
speedid = int(random.uniform(0, 250))
stepid = int(random.uniform(0, 250))
currentdis = 0.0
currenttime = newflag
allstep = []
allspeed = []
for i in fivepoint:
i['flag'] = newflag
#i['pointName'] = 'gogogo'
for i in allloc:
i['flag'] = newflag
oldtime = datetime.datetime.strptime(i['gainTime'],'%Y-%m-%d %H:%M:%S')
newtime = oldtime + timedelta
#print newtime
endtime = datetime_to_timestamp_in_milliseconds(newtime)
distance = float(i['totalDis']) - currentdis
currentdis = float(i['totalDis'])
i['gainTime'] = newtime.strftime('%Y-%m-%d %H:%M:%S')
step = {
"avgDiff": random.uniform(12, 14),
"beginTime": currenttime,
"endTime": endtime,
"flag": newflag,
"id": stepid,
"maxDiff": random.uniform(15, 20),
"minDiff": random.uniform(8, 10),
"stepsNum": int(distance/0.8)
}
allstep.append(step)
speed = {
"beginTime": currenttime,
"distance": distance,
"endTime": endtime,
"flag": newflag,
"id": speedid
}
allspeed.append(speed)
currenttime = endtime
speedid += 1
stepid += 1
# thisdata, st, et = format(routes[index], tots[index])
# print thisdata
# totDisA = re.findall(r'\\\"totalDis\\\"\:\\\"(\d+.\d+)\\\"', thisdata)
# totDis = float(totDisA[len(totDisA) - 1]) / 1000
# print totDis, tots[index]
# speed = random.uniform(5, 7)
# print speed
# speed_str = "%.2f" % (speed)
# totDis_str = "%.2f" % (totDis)
# print speed_str
# print totDis_str
alllocjson['allLocJson'] = json.dumps(allloc)
fivepointjson['fivePointJson'] = json.dumps(fivepoint, ensure_ascii=False)
postjson = {
"allLocJson": json.dumps(alllocjson),
"sportType": 1,
"totalTime": totaltime,
"totalDis": int(currentdis),
"speed": int(1000/(currentdis/totaltime)/60*1000),
"startTime": newflag,
"stopTime": currenttime,
"fivePointJson": json.dumps(fivepointjson, ensure_ascii=False),
"complete": True,
"selDistance": 1,
"unCompleteReason": 0,
"getPrize": False,
"status": 0,
"uid": userInfo['uid'],
"avgStepFreq": int(currentdis/1.2/totaltime*60),
"totalSteps": int(currentdis/1.2),
"selectedUnid": userInfo['unid'],
"uuid": str(uuid.uuid1())
}
signature = digestDict(postjson)
postjson['signature'] = signature
postjson['isUpload'] = False
postjson['more'] = True
postjson['roomId'] = 0
postjson['speedPerTenSec'] = allspeed
postjson['stepsPerTenSec'] = allstep
# print json.dumps(postjson)
# print signature
Session = requests.Session()
Request = Session.post(url, headers = headers, data=json.dumps(postjson))
print ('upload response: ' + Request.content)
def logout(userInfo):
url = 'http://gxapp.iydsj.com/api/v6/user/logout'
timeStamp = str(int(time.time()*1000))
dic = {
'uid':userInfo['uid'],
'token':userInfo['token'],
'timeStamp':timeStamp
}
headers = {
"Host": "gxapp.iydsj.com",
"Accept": "application/json",
"osType": "0",
"Content-Type": "application/json",
"DeviceId": getDeviceId(),
"CustomDeviceId": getCustomDeviceId(),
"User-Agent": "Dalvik/2.1.0 (Linux; U; Android 5.0; SM-N9002 Build/LRX21V)",
"appVersion":"1.3.10",
"uid":str(dic['uid']),
"token":dic['token'],
"tokenSign":digestDict(dic),
"timeStamp":dic['timeStamp']
}
# print headers
Session = requests.Session()
Request = Session.post(url, headers = headers)
print ('logout response: ' + Request.content)
def digestDict(dic):
keys = dic.keys()
keys.sort()
digeststr = u''
for key in keys:
if not isinstance(dic[key],bool):
digeststr = digeststr+unicode(key)+u'='+unicode(dic[key])+u'&'
else:
if dic[key]:
digeststr = digeststr+unicode(key)+u'='+u'true'+u'&'
else:
digeststr = digeststr+unicode(key)+u'='+u'false'+u'&'
digeststr+=u'wh2016_swcampus'
md5 = hashlib.md5()
#digeststr = digeststr.encode('utf-8')
length = len(digeststr)
count = 0
while count<length:
if not ord(digeststr[count])<=0x7F:
#md5.update(digeststr[count+2])
codepoint = ord(digeststr[count])
lowbyte = codepoint - ((codepoint >>8 ) << 8)
md5.update(chr(lowbyte))
count+=1
else:
md5.update(digeststr[count])
count+=1
return md5.hexdigest()
# charArray.append
def writeByData():
file = open('user.data', 'r')
# line = file.readlines()
line = []
for l in open('user.data'):
l = file.readline()
if l != '\n':
line.append(l.strip('\n'))
# print line
# for l in line:
# user, pwd = l.split(' ')
# print (base64encode(user, pwd))
print line
file.close()
return line
def main():
users = writeByData()
# index = selectRoute()
# format(routes[index], 100)
for u in users:
username, password = u.split(' ')
print username, password
print "Start : %s" % time.ctime()
userInfo = login(username, password)
try:
dataUpload(userInfo)
finally:
logout(userInfo)
sleeptime = random.randint(20, 120)
print "Sleep %d seconds" % sleeptime
time.sleep(sleeptime)
if __name__== '__main__':
main()
|
mit
| 6,853,569,064,474,251,000 | 29.811209 | 122 | 0.578076 | false |
wangjiaxi/django-dynamic-forms
|
dynamic_forms/models.py
|
2
|
8474
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from collections import OrderedDict
from django.core.urlresolvers import reverse
from django.db import models
from django.db.transaction import atomic
from django.template.defaultfilters import slugify
from django.utils.crypto import get_random_string
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.html import format_html, format_html_join
from django.utils.translation import ugettext_lazy as _
from dynamic_forms.actions import action_registry
from dynamic_forms.conf import settings
from dynamic_forms.fields import TextMultiSelectField
from dynamic_forms.formfields import formfield_registry
@python_2_unicode_compatible
class FormModel(models.Model):
name = models.CharField(_('Name'), max_length=50, unique=True)
submit_url = models.CharField(_('Submit URL'), max_length=100, unique=True,
help_text=_('The full URL path to the form. It should start '
'and end with a forward slash (<code>/</code>).'))
success_url = models.CharField(_('Success URL'), max_length=100,
help_text=_('The full URL path where the user will be '
'redirected after successfully sending the form. It should start '
'and end with a forward slash (<code>/</code>). If empty, the '
'success URL is generated by appending <code>done/</code> to the '
'“Submit URL”.'), blank=True, default='')
actions = TextMultiSelectField(_('Actions'), default='',
choices=action_registry.get_as_choices())
form_template = models.CharField(_('Form template path'), max_length=100,
default='dynamic_forms/form.html',
choices=settings.DYNAMIC_FORMS_FORM_TEMPLATES)
success_template = models.CharField(_('Success template path'),
max_length=100, default='dynamic_forms/form_success.html',
choices=settings.DYNAMIC_FORMS_SUCCESS_TEMPLATES)
allow_display = models.BooleanField(_('Allow display'), default=False,
help_text=_('Allow a user to view the input at a later time. This '
'requires the “Store in database” action to be active. The sender '
'will be given a unique URL to recall the data.'))
recipient_email = models.EmailField(_('Recipient email'), blank=True,
null=True, help_text=_('Email address to send form data.'))
class Meta:
ordering = ['name']
verbose_name = _('Dynamic form')
verbose_name_plural = _('Dynamic forms')
def __str__(self):
return self.name
def get_fields_as_dict(self):
"""
Returns an ``OrderedDict`` (``SortedDict`` when ``OrderedDict is not
available) with all fields associated with this form where their name
is the key and their label is the value.
"""
return OrderedDict(self.fields.values_list('name', 'label').all())
def save(self, *args, **kwargs):
"""
Makes sure that the ``submit_url`` and -- if defined the
``success_url`` -- end with a forward slash (``'/'``).
"""
if not self.submit_url.endswith('/'):
self.submit_url = self.submit_url + '/'
if self.success_url:
if not self.success_url.endswith('/'):
self.success_url = self.success_url + '/'
else:
self.success_url = self.submit_url + 'done/'
super(FormModel, self).save(*args, **kwargs)
@python_2_unicode_compatible
class FormFieldModel(models.Model):
parent_form = models.ForeignKey(FormModel, on_delete=models.CASCADE,
related_name='fields')
field_type = models.CharField(_('Type'), max_length=255,
choices=formfield_registry.get_as_choices())
label = models.CharField(_('Label'), max_length=255)
name = models.SlugField(_('Name'), max_length=50, blank=True)
_options = models.TextField(_('Options'), blank=True, null=True)
position = models.SmallIntegerField(_('Position'), blank=True, default=0)
class Meta:
ordering = ['parent_form', 'position']
unique_together = ("parent_form", "name",)
verbose_name = _('Form field')
verbose_name_plural = _('Form fields')
def __str__(self):
return _('Field “%(field_name)s” in form “%(form_name)s”') % {
'field_name': self.label,
'form_name': self.parent_form.name,
}
def generate_form_field(self, form):
field_type_cls = formfield_registry.get(self.field_type)
field = field_type_cls(**self.get_form_field_kwargs())
field.contribute_to_form(form)
return field
def get_form_field_kwargs(self):
kwargs = self.options
kwargs.update({
'name': self.name,
'label': self.label,
})
return kwargs
@property
def options(self):
"""Options passed to the form field during construction."""
if not hasattr(self, '_options_cached'):
self._options_cached = {}
if self._options:
try:
self._options_cached = json.loads(self._options)
except ValueError:
pass
return self._options_cached
@options.setter
def options(self, opts):
if hasattr(self, '_options_cached'):
del self._options_cached
self._options = json.dumps(opts)
def save(self, *args, **kwargs):
if not self.name:
self.name = slugify(self.label)
given_options = self.options
field_type_cls = formfield_registry.get(self.field_type)
invalid = set(self.options.keys()) - set(field_type_cls._meta.keys())
if invalid:
for key in invalid:
del given_options[key]
self.options = given_options
super(FormFieldModel, self).save(*args, **kwargs)
@python_2_unicode_compatible
class FormModelData(models.Model):
form = models.ForeignKey(FormModel, on_delete=models.SET_NULL,
related_name='data', null=True)
value = models.TextField(_('Form data'), blank=True, default='')
submitted = models.DateTimeField(_('Submitted on'), auto_now_add=True)
display_key = models.CharField(_('Display key'), max_length=24, null=True,
blank=True, db_index=True, default=None, unique=True,
help_text=_('A unique identifier that is used to allow users to view '
'their sent data. Unique over all stored data sets.'))
class Meta:
verbose_name = _('Form data')
verbose_name_plural = _('Form data')
def __str__(self):
return _('Form: “%(form)s” on %(date)s') % {
'form': self.form,
'date': self.submitted,
}
def save(self, *args, **kwargs):
with atomic():
if self.form.allow_display and not self.display_key:
dk = get_random_string(24)
while FormModelData.objects.filter(display_key=dk).exists():
dk = get_random_string(24)
self.display_key = dk
super(FormModelData, self).save(*args, **kwargs)
@property
def json_value(self):
return OrderedDict(sorted(json.loads(self.value).items()))
def pretty_value(self):
try:
value = format_html_join('',
'<dt>{0}</dt><dd>{1}</dd>',
(
(force_text(k), force_text(v))
for k, v in self.json_value.items()
)
)
return format_html('<dl>{0}</dl>', value)
except ValueError:
return self.value
pretty_value.allow_tags = True
@property
def show_url(self):
"""
If the form this data set belongs to has
:attr:`~FormModel.allow_display` ``== True``, return the permanent URL.
If displaying is not allowed, return an empty string.
"""
if self.form.allow_display:
return reverse('dynamic_forms:data-set-detail',
kwargs={'display_key': self.display_key})
return ''
@property
def show_url_link(self):
"""
Similar to :attr:`show_url` but wraps the display key in an `<a>`-tag
linking to the permanent URL.
"""
if self.form.allow_display:
return format_html('<a href="{0}">{1}</a>', self.show_url, self.display_key)
return ''
|
bsd-3-clause
| -1,659,677,495,397,635,600 | 37.60274 | 88 | 0.602792 | false |
google/makani
|
config/m600/tether.py
|
1
|
1922
|
# Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tether parameters."""
from makani.config import mconfig
from makani.control import system_types
@mconfig.Config(deps={
'flight_plan': 'common.flight_plan',
'gs_model': 'base_station.gs_model',
})
def MakeParams(params):
if (params['gs_model'] == system_types.kGroundStationModelGSv2
and params['flight_plan'] == system_types.kFlightPlanHoverInPlace):
length = 80.0
else:
length = 425.8
# The following properties pertain to tether FC1-02 installed
# for RPX-08. See b/70513834 for references.
return {
# Tether length [m] under zero load.
'length': length,
# Linear density [kg/m].
'linear_density': 0.917,
# Tether outer diameter [m].
'outer_diameter': 0.0294,
# Tensile stiffness, EA, [N] of the tether core.
'tensile_stiffness': 18e6,
# Bending stiffness, EI, [N*m**2] of the tether.
'bending_stiffness': 35.0,
# Cross-sectional drag coefficient [#].
'section_drag_coeff': 0.7,
# Distance [m] from the GSG elevation pin to the tether termination pin
# on the top hat.
#
# Eli notes that the distance from the elevation pin to the nose of the
# GSG is more like 1.2m, which is roughly the point at which the tether
# can start bending.
'gsg_ele_to_termination': 0.712,
}
|
apache-2.0
| -8,597,662,665,080,379,000 | 31.576271 | 77 | 0.680021 | false |
fenceFoil/canopto
|
tween_test_pygame.py
|
1
|
4093
|
#!/usr/bin/python
# M.E.Farmer 2013
# demo for tween library
# showing integration with PyGame
# moves text from random points using various tweens
# changes from random color to random color using the same tween
# Mouse click rotates through tweens and ESC closes demo
import sys
import pygame
import random
import tween
# higher number equal slower transitions
# stall/fps = seconds per transition
stall=offset = 60
FPS = 60
BACKGROUND_COLOR = (0,0,0)
size = width, height = (800,600)
text_pos = (0,0)
text_color = (0,128,0)
tweens = [
(tween.easeLinear,"easeLinear"),
(tween.easeInQuad,"easeInQuad"),
(tween.easeInOutQuad,"easeInOutQuad"),
(tween.easeOutQuad,"easeOutQuad"),
(tween.easeInCubic,"easeInCubic"),
(tween.easeInOutCubic,"easeInOutCubic"),
(tween.easeOutCubic,"easeOutCubic"),
(tween.easeInQuartic,"easeInQuartic"),
(tween.easeInOutQuartic,"easeInOutQuartic"),
(tween.easeOutQuartic,"easeOutQuartic"),
(tween.easeInQuintic,"easeInQuintic"),
(tween.easeInOutQuintic,"easeInOutQuintic"),
(tween.easeOutQuintic,"easeOutQuintic"),
(tween.easeInSine,"easeInSine"),
(tween.easeInOutSine,"easeInOutSine"),
(tween.easeOutSine,"easeOutSine"),
(tween.easeInExpo,"easeInExpo"),
(tween.easeInOutExpo,"easeInOutExpo"),
(tween.easeOutExpo,"easeOutExpo"),
(tween.easeInCirc,"easeInCirc"),
(tween.easeInOutCirc,"easeInOutCirc"),
(tween.easeOutCirc,"easeOutCirc"),
(tween.easeInElasticBig,"easeInElasticBig"),
(tween.easeOutElasticBig,"easeOutElasticBig"),
(tween.easeInElasticSmall,"easeInElasticSmall"),
(tween.easeOutElasticSmall,"easeOutElasticSmall"),
(tween.easeLoop,"easeLoop"),
(tween.easeInchWorm,"easeInchWorm"),
(tween.customTween(
"b+c*(26.65*tc*ts + -91.5925*ts*ts + 115.285*tc + -62.89*ts + 13.5475*t)"),
"customTween")
]
# setup the intial tween
tween_index = 0
ease_func,text_displayed = tweens[tween_index]
pygame.init()
screen = pygame.display.set_mode(size,pygame.FULLSCREEN)
FPSTICKER = pygame.time.Clock()
font = pygame.font.SysFont("comicsansms",65)
text = font.render(text_displayed, True, text_color)
while True:
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
if tween_index == len(tweens)-1:
tween_index=0
else:
tween_index+=1
ease_func,text_displayed = tweens[tween_index]
# set our stall counter to change the tween on next check
stall = offset
elif event.type == pygame.QUIT or (event.type == pygame.KEYDOWN
and event.key == pygame.K_ESCAPE):
sys.exit()
screen.fill(BACKGROUND_COLOR)
# the pygame clock runs faster than we want to update
# our tweens so we just stall for a few cycles then
# update and reset our counter
stall+=1
if stall >= offset:
stall=0
old_pos = text_pos
text_pos = (random.randint(1,width),random.randint(1,height))
# set a new tween function for the coordinates
xy_out = tween.xyTween(ease_func,old_pos,text_pos,offset,False,True)
##x_out = tween.tween(tween.easeLoop,old_pos[0],text_pos[0],offset,False,True)
##y_out = tween.tween(tween.easeInElasticSmall,old_pos[1],text_pos[1],offset,False,True)
old_color = text_color
text_color = (random.randint(1,255),random.randint(1,255),random.randint(1,255))
# set a new tween function for the text colors
color_out = tween.colorTween(ease_func,old_color,text_color,offset,False,True)
# every frame we just call .next() and the tween does the work
text = font.render(text_displayed, True, (color_out.next()))
screen.blit(text, xy_out.next())
##screen.blit(text, (x_out.next(),y_out.next()))
pygame.display.flip()
FPSTICKER.tick(FPS)
|
bsd-3-clause
| -5,670,639,753,451,498,000 | 38.737864 | 96 | 0.638651 | false |
alexakarpov/python-practice
|
primes.py
|
1
|
1815
|
#!/usr/bin/env python3
from math import sqrt, ceil
import unittest
DEFAULT_NUM_OF_PRIMES = 10
def get_n_primes(n=DEFAULT_NUM_OF_PRIMES):
def is_prime(num):
if num == 1 or num == 2:
return True
for i in range(2, ceil(sqrt(num))+1):
if num % i == 0:
return False
return True
result = []
candidate = 2
while len(result) < n:
if is_prime(candidate):
result.append(candidate)
candidate += 1
return result
def print_multiplication_table(top, side):
# how wide is the largest number in the table
digits = len(str(top[-1] * side[-1]))
# how wide should the side (left) column be?
side_width = len(str(side[-1]))
# build and print the table header
head_str = " " * (side_width+1)
for n in top:
head_str += str(n).rjust(digits+1)
print(head_str)
print(" " * side_width + "_" * len(head_str))
# now build and print every row
for i in range(0, len(side)): # i is the row index
# takes care of the side 'prefix'
row_string = ("%d" % (side[i],)).rjust(side_width) + "|"
for j in range(0, len(top)):
row_string += str(top[j]*side[i]).rjust(digits+1)
print(row_string)
class InterviewProblemsTest(unittest.TestCase):
def test_get_n_primes(self):
assert([2, 3, 5, 7, 11, 13, 17, 19, 23, 29] == get_n_primes())
# not really proper tests, other than making sure we handle the edge case and don't crush
def test_print_table_single(self):
col = row = get_n_primes(1)
print_multiplication_table(row, col)
def test_print_table(self):
col = [1,2,3,4,5]
row = [6,7,8]
print_multiplication_table(row, col)
if __name__ == '__main__':
unittest.main()
|
mit
| 1,045,455,945,428,647,200 | 26.923077 | 93 | 0.571901 | false |
ianjuma/ubuntu-git-notify
|
app/github-notif.py
|
1
|
2513
|
#!/usr/bin/python
import sys
import pynotify
capabilities = {'actions': False,
'body': False,
'body-hyperlinks': False,
'body-images': False,
'body-markup': False,
'icon-multi': False,
'icon-static': False,
'sound': False,
'image/svg+xml': False,
'private-synchronous': False,
'append': False,
'private-icon-only': False}
def mainWindow():
pass
def notificWin():
pass
def initCaps():
caps = pynotify.get_server_caps()
if caps is None:
print "Failed to receive server caps."
sys.exit(1)
for cap in caps:
capabilities[cap] = True
def printCaps ():
info = pynotify.get_server_info ()
print "Name: " + info["name"]
print "Vendor: " + info["vendor"]
print "Version: " + info["version"]
print "Spec. Version: " + info["spec-version"]
caps = pynotify.get_server_caps ()
if caps is None:
print "Failed to receive server caps."
sys.exit(1)
print "Supported capabilities/hints:"
if capabilities['actions']:
print "tactions"
if capabilities['body']:
print "tbody"
if capabilities['body-hyperlinks']:
print "tbody-hyperlinks"
if capabilities['body-images']:
print "tbody-images"
if capabilities['body-markup']:
print "tbody-markup"
if capabilities['icon-multi']:
print "ticon-multi"
if capabilities['icon-static']:
print "ticon-static"
if capabilities['sound']:
print "tsound"
if capabilities['image/svg+xml']:
print "timage/svg+xml"
if capabilities['private-synchronous']:
print "tprivate-synchronous"
if capabilities['append']:
print "tappend"
if capabilities['private-icon-only']:
print "tprivate-icon-only"
print "Notes:"
if info["name"] == "notify-osd":
print "tx- and y-coordinates hints are ignored"
print "texpire-timeout is ignored"
print "tbody-markup is accepted but filtered"
else:
print "tnone"
if __name__ == '__main__':
if not pynotify.init ("icon-summary"):
sys.exit(1)
# call this so we can savely use capabilities dictionary later
initCaps ()
# show what's supported
printCaps ()
# try the icon-summary case
n = pynotify.Notification ("WiFi connection lost",
"",
"notification-network-wireless-disconnected")
n.show ()
|
mit
| 5,087,499,934,471,996,000 | 25.177083 | 66 | 0.585356 | false |
blaiseli/p4-phylogenetics
|
p4/tree_optsim.py
|
1
|
19918
|
import sys
import string
import types
import cStringIO
import math
import copy
import os
import func
import time
import glob
from var import var
from p4exceptions import P4Error
from node import Node, NodeBranch, NodePart, NodeBranchPart
import nexustoken
from distancematrix import DistanceMatrix
import numpy
import pf
from model import Model
from data import Data
from alignment import Part
import random
if True:
def __del__(self, freeTree=pf.p4_freeTree, freeNode=pf.p4_freeNode, mysys=sys):
#mysys.stdout.write('Tree.__del__() here.\n')
# mysys.stdout.flush()
# Refers to nodes, which causes grief.
if hasattr(self, "splitKeyHash"):
del(self.splitKeyHash)
self._data = None
# self._model = None # model is needed for freeNode()
# If this is not here, then nodes tend to hang around forever ...
if 1:
for n in self.nodes:
n.wipe()
for n in self.nodes:
if n.cNode:
#mysys.stdout.write(' Tree.__del__(), freeing node %i\n' % n.nodeNum)
# mysys.stdout.flush()
freeNode(n.cNode)
n.cNode = None
for n in self.nodes:
del(n)
self.root = None
self.nodes = None
if self.cTree:
if self.doDataPart:
dp_freeTree(self.cTree)
else:
freeTree(self.cTree)
self.cTree = None
#mysys.stdout.write('Tree.__del__() finished.\n')
# mysys.stdout.flush()
def deleteCStuff(self):
"""Deletes c-pointers from nodes, self, and model, but not the data."""
# print 'Tree.deleteCStuff() here.'
for n in self.nodes:
if n.cNode:
# print ' about to free node %i, cNode %s' % (n.nodeNum,
# n.cNode)
pf.p4_freeNode(n.cNode)
n.cNode = 0
if self.cTree:
# print ' about to free cTree'
pf.p4_freeTree(self.cTree)
self.cTree = 0
# I need to delay deleting the cModel until after deleting the
# self.cStuff, because free-ing self.cStuff (eg nodes)
# requires the cModel.
if self.model and self.model.cModel:
# print ' about to free cModel'
pf.p4_freeModel(self.model.cModel)
self.model.cModel = 0
def _allocCStuff(self, resetEmpiricalComps=True):
"""Allocate c-memory for self and its nodes."""
gm = ['Tree._allocCStuff()']
# Make sure the nodeNums go from zero to N-1
for i in range(len(self.nodes)):
if self.nodes[i].nodeNum != i:
gm.append(
"Programming error: Problem with node number %i." % i)
gm.append("Nodes should be numbered consecutively from zero.")
raise P4Error(gm)
self.modelSanityCheck(resetEmpiricalComps=resetEmpiricalComps)
if not self.data.cData:
self.data._setCStuff()
if not self.model.cModel:
self.model.allocCStuff()
if var.doDataPart:
# print 'about to dp_newTree'
self.cTree = pf.dp_newTree(len(self.nodes), self.preOrder,
self.postOrder, self.data.cData, self.model.cModel)
self.doDataPart = 1
if not self.cTree:
gm.append("Unable to allocate a cTree")
raise P4Error(gm)
for n in self.nodes:
n.doDataPart = 1
# print 'about to dp_newNode (%i)' % n.nodeNum
cNode = pf.dp_newNode(
n.nodeNum, self.cTree, n.seqNum, n.isLeaf)
if not cNode:
gm.append("Unable to allocate a cNode.")
raise P4Error(gm)
n.cNode = cNode
else:
nLeaves = 0
for n in self.nodes:
if n.isLeaf:
nLeaves += 1
self.partLikes = numpy.zeros(self.model.nParts, numpy.float)
self.cTree = pf.p4_newTree(len(self.nodes), nLeaves, self.preOrder,
self.postOrder, self.partLikes, self.data.cData, self.model.cModel)
if not self.cTree:
gm.append("Unable to allocate a cTree")
raise P4Error(gm)
for i in range(len(self.nodes)):
n = self.nodes[i]
if i in self.preOrder:
inTree = 1
else:
inTree = 0
# We include the inTree as a flag for whether the node
# is in the tree or not. If the inTree flag is 0,
# then the node is not actually part of the tree, and so
# clNeedsUpdating is turned off.
n.cNode = pf.p4_newNode(
n.nodeNum, self.cTree, n.seqNum, n.isLeaf, inTree)
if not n.cNode:
gm.append("Unable to allocate a cNode")
raise P4Error(gm)
# print "finished Tree._allocCStuff()"
def setCStuff(self):
"""Transfer info about self to c-language stuff.
Transfer relationships among nodes, the root position, branch
lengths, model usage info (ie what model attributes apply to what
nodes), and pre- and post-order."""
#gm = ['Tree.setCStuff()']
# Set node relations, br.len, root, node modelNums, preOrder?,
# postOrder
# Set relations- parent, leftChild, sibling. Here's the code for
# pf.p4_setRelative(int theCNode, int relation, int relNum)
# parent- relation = 0, leftChild- relation = 1, sibling- relation
# = 2
for n in self.nodes:
if n.parent:
pf.p4_setNodeRelation(n.cNode, 0, n.parent.nodeNum)
else:
pf.p4_setNodeRelation(n.cNode, 0, -1) # "-1" gives NULL
if n.leftChild:
pf.p4_setNodeRelation(n.cNode, 1, n.leftChild.nodeNum)
else:
pf.p4_setNodeRelation(n.cNode, 1, -1)
if n.sibling:
pf.p4_setNodeRelation(n.cNode, 2, n.sibling.nodeNum)
else:
pf.p4_setNodeRelation(n.cNode, 2, -1)
# Root
pf.p4_setTreeRoot(self.cTree, self.root.cNode)
# br.lens
for n in self.iterNodesNoRoot():
#pf.p4_setBrLen(n.cNode, n.br.len, n.br.lenChanged)
pf.p4_setBrLen(n.cNode, n.br.len)
# Model usage info
if self.model.isHet:
for pNum in range(self.model.nParts):
if self.model.parts[pNum].isHet:
# print "setCStuff(). about to setCompNum"
for n in self.nodes:
pf.p4_setCompNum(n.cNode, pNum, n.parts[pNum].compNum)
if n != self.root:
pf.p4_setRMatrixNum(
n.cNode, pNum, n.br.parts[pNum].rMatrixNum)
pf.p4_setGdasrvNum(
n.cNode, pNum, n.br.parts[pNum].gdasrvNum)
# pre- and postOrder
if not self.preAndPostOrderAreValid:
self.setPreAndPostOrder()
# for i in range(len(self.nodes)):
# pf.p4_setPreAndPostOrder(self.cTree, i, self.preOrder[i],
# self.postOrder[i]) # no longer needed
# print "finished Tree.setCStuff()"
def _commonCStuff(self, resetEmpiricalComps=True):
"""Allocate and set c-stuff, and setPrams."""
if not self.data:
if self.name:
gm = ["Tree %s (_commonCStuff)" % self.name]
else:
gm = ["Tree (_commonCStuff)"]
gm.append(
"This tree has no data attached. Before doing an optimization, likelihood")
gm.append(
"calculation, or simulation, you need to do something like this:")
gm.append(" theTree.data = theData")
raise P4Error(gm)
# print "self.cTree = %s" % self.cTree
if not self.cTree:
# This calls self.modelSanityCheck(), which calls
# self.setEmpiricalComps()
self._allocCStuff(resetEmpiricalComps=resetEmpiricalComps)
# print "About to self.model.setCStuff()"
self.model.setCStuff()
# print "About to self.setCStuff()"
self.setCStuff()
# print "about to p4_setPrams()..."
pf.p4_setPrams(self.cTree, -1) # "-1" means do all parts
def calcLogLike(self, verbose=1, resetEmpiricalComps=True):
"""Calculate the likelihood of the tree, without optimization."""
self._commonCStuff(resetEmpiricalComps=resetEmpiricalComps)
# print "about to p4_treeLogLike()..."
# second arg is getSiteLikes
self.logLike = pf.p4_treeLogLike(self.cTree, 0)
if verbose:
print "Tree.calcLogLike(). %f" % self.logLike
def optLogLike(self, verbose=1, newtAndBrentPowell=1, allBrentPowell=0, simplex=0):
"""Calculate the likelihood of the tree, with optimization.
There are 3 optimization methods-- choose one. I've made
'newtAndBrentPowell' the default, as it is fast and seems to be
working. The 'allBrentPowell' optimizer used to be the default,
as it seems to be the most robust, although it is slow. It would
be good for checking important calculations. The simplex
optimizer is the slowest, and will sometimes find better optima
for difficult data, but often fails to optimize (with no
warning)."""
if verbose:
theStartTime = time.clock()
self._commonCStuff()
# We want only one opt method.
if newtAndBrentPowell:
newtAndBrentPowell = 1
if allBrentPowell:
allBrentPowell = 1
if simplex:
simplex = 1
if (newtAndBrentPowell + allBrentPowell + simplex) != 1:
gm = ['Tree.optLogLike()']
gm.append("Choose 1 opt method.")
raise P4Error(gm)
# Do the opt.
if allBrentPowell:
pf.p4_allBrentPowellOptimize(self.cTree)
elif simplex:
pf.p4_simplexOptimize(self.cTree, self, Tree.simplexDump)
else:
pf.p4_newtSetup(self.cTree)
pf.p4_newtAndBrentPowellOpt(self.cTree)
# second arg is getSiteLikes
self.logLike = pf.p4_treeLogLike(self.cTree, 0)
# get the brLens
brLens = pf.p4_getBrLens(self.cTree)
for n in self.iterNodesNoRoot():
n.br.len = brLens[n.nodeNum]
# get the other free prams
prams = pf.p4_getFreePrams(self.cTree)
self.model.restoreFreePrams(prams)
if verbose:
print "optLogLike = %f" % self.logLike
theEndTime = time.clock()
print "cpu time %s seconds." % (theEndTime - theStartTime)
def optTest(self):
self._commonCStuff()
theStartTime = time.clock()
doXfer = 0
for i in range(1):
if doXfer:
self.model.setCStuff()
self.setCStuff()
pf.p4_setPrams(self.cTree, -1)
self.logLike = pf.p4_treeLogLike(self.cTree, 0)
if doXfer:
# get the brLens
brLens = pf.p4_getBrLens(self.cTree)
for i in range(len(self.nodes)):
n = self.nodes[i]
if n != self.root:
n.br.len = brLens[i]
# get the other free prams
prams = pf.p4_getFreePrams(self.cTree)
self.model.restoreFreePrams(prams)
print "time %s seconds." % (time.clock() - theStartTime)
def simulate(self, calculatePatterns=True, resetSequences=True, resetNexusSetsConstantMask=True, refTree=None):
"""Simulate into the attached data.
The tree self needs to have a data and model attached.
This week, generation of random numbers uses the C language random
function, which is in stdlib on Linux. It will use the same
series of random numbers over and over, unless you tell it
otherwise. That means that (unless you tell it otherwise) it will
generate the same simulated data if you run it twice. To reset
the randomizer, you can use func.reseedCRandomizer(), eg
func.reseedCRandomizer(os.getpid())
The usual way to simulate does not use reference data. An unsual way to
simulate comes from (inspired by?) PhyloBayes, where the simulation is
conditional on the original data. It uses conditional likelihoods of
that reference data at the root. To turn that on, set refTree to the
tree+model+data that you would like to use. Calculate a likelihood with
that refTree before using it, so that conditional likelihoods are set.
The tree and model for refTree should be identical to the tree and model
for self.
Args:
calculatePatterns (bool): True by default. Whether to "compress" the
newly simulated data to facilitate a faster likelihood
calculation.
resetSequences (bool): True by default. whether to bring the
simulated sequences in C back into Python
resetNexusSetsConstantMask (bool): True by default. When
simulations are made, the constant mask in any associated nexus
sets will get out of sync. Setting this to True makes a new
mask and sets it.
refTree (Tree): None by default. If supplied, a tree+model+data
which has had its likelihood calculated, where the tree+model is
identical to self.
"""
if refTree:
from tree import Tree
assert isinstance(refTree, Tree)
assert refTree.model
assert refTree.data
if not refTree.cTree:
refTree.calcLogLike(verbose=False)
assert refTree.model.cModel
assert refTree.data.cData
self._commonCStuff()
if refTree:
assert refTree.data.cData != self.data.cData
assert refTree.data.nParts == self.data.nParts
assert refTree.data.nTax == self.data.nTax
for i in range(self.data.nTax):
assert refTree.data.taxNames[i] == self.data.taxNames[i]
assert len(refTree.data.alignments) == len(self.data.alignments)
assert refTree.logLike, "Do a likelihood calculation with the refTree before using it here."
# could have some more checks ...
# If there is a NexusSets object attached to any of the alignments
# in the Data, the constant sites mask at least will become out of sync, but we can't just
# delete the whole nexusSets object, as they define what the parts are.
# for a in self.data.alignments:
#
# if a.nexusSets:
# a.nexusSets = None
# Probably better to do something like this
# a.nexusSets.constant.mask = self.constantMask()
# at the end.
# print "About to pf.p4_simulate(self.cTree)"
if refTree:
pf.p4_simulate(self.cTree, refTree.cTree)
else:
pf.p4_simulate(self.cTree, 0)
if calculatePatterns:
for p in self.data.parts:
pf.makePatterns(p.cPart)
pf.setGlobalInvarSitesVec(p.cPart)
if resetSequences:
self.data.resetSequencesFromParts()
if resetNexusSetsConstantMask:
for a in self.data.alignments:
if a.nexusSets:
a.nexusSets.constant.mask = a.constantMask()
else:
if resetNexusSetsConstantMask:
gm = ['Tree.simulate().']
gm.append(
"resetSequences is not set, but resetNexusSetsConstantMask is set,")
gm.append("which is probably not going to work as you want.")
raise P4Error(gm)
def getSiteLikes(self):
"""Likelihoods, not log likes. Placed in self.siteLikes, a list."""
self._commonCStuff()
# second arg is getSiteLikes
self.logLike = pf.p4_treeLogLike(self.cTree, 1)
self.siteLikes = []
for p in self.data.parts:
self.siteLikes += pf.getSiteLikes(p.cPart)
# def getWinningGammaCats(self):
def getSiteRates(self):
"""Get posterior mean site rate, and gamma category.
This says two things --
1. The posterior mean site rate, calculated like PAML
2. Which GDASRV category contributes most to the likelihood.
The posterior mean site rate calculation requires that there be
only one gdasrv over the tree, which will usually be the case.
For placement in categories, if its a tie score, then it is placed
in the first one.
The list of site rates, and the list of categories, both with one
value for each site, are put into separate numpy arrays, returned
as a list, ie [siteRatesArray, categoriesArray]
There is one of these lists for each data partition, and the results as a
whole are returned as a list. So if you only have one data
partition, then you get a 1-item list, and that single item is a list with 2
numpy arrays. Ie [[siteRatesArray, categoriesArray]]
If nGammaCat for a partition is 1, it will give that partition an
array of ones for the site rates and zeros for the categories.
"""
self._commonCStuff()
# second arg is getSiteLikes
self.logLike = pf.p4_treeLogLike(self.cTree, 0)
#self.winningGammaCats = []
# for p in self.data.parts:
# self.winningGammaCats += pf.getWinningGammaCats(p.cPart)
results = []
for partNum in range(len(self.data.parts)):
if len(self.model.parts[partNum].gdasrvs) > 1:
gm = ['Tree.getSiteRates()']
gm.append("Part %i has %i gdasrvs. Maximum 1 allowed." % (
partNum, len(self.model.parts[partNum].gdasrvs)))
raise P4Error(gm)
for partNum in range(len(self.data.parts)):
p = self.data.parts[partNum]
if self.model.parts[partNum].nGammaCat == 1:
siteRates = numpy.ones(p.nChar, numpy.float)
gammaCats = numpy.zeros(p.nChar, numpy.int32)
elif self.model.parts[partNum].nGammaCat > 1:
siteRates = numpy.zeros(p.nChar, numpy.float)
gammaCats = numpy.zeros(p.nChar, numpy.int32)
work = numpy.zeros(
self.model.parts[partNum].nGammaCat, numpy.float)
for charNum in range(p.nChar):
gammaCats[charNum] = -1
#pf.getWinningGammaCats(self.cTree, p.cPart, i, gammaCats, work)
pf.getSiteRates(
self.cTree, p.cPart, partNum, siteRates, gammaCats, work)
# print siteRates
# print gammaCats
# print work
if 0:
counts = numpy.zeros(
self.model.parts[partNum].nGammaCat, numpy.int32)
for charNum in range(p.nChar):
counts[winningGammaCats[charNum]] += 1
print counts
else:
raise P4Error("This should not happen.")
results.append([siteRates, gammaCats])
return results
|
gpl-2.0
| 7,518,539,616,332,160,000 | 38.441584 | 115 | 0.566523 | false |
jkettleb/iris
|
lib/iris/aux_factory.py
|
1
|
70417
|
# (C) British Crown Copyright 2010 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Definitions of derived coordinates.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
from abc import ABCMeta, abstractmethod, abstractproperty
import warnings
import zlib
import numpy as np
from iris._cube_coord_common import CFVariableMixin
import iris.coords
import iris.unit
import iris.util
class LazyArray(object):
"""
Represents a simplified NumPy array which is only computed on demand.
It provides the :meth:`view()` and :meth:`reshape()` methods so it
can be used in place of a standard NumPy array under some
circumstances.
The first use of either of these methods causes the array to be
computed and cached for any subsequent access.
"""
def __init__(self, shape, func, dtype=None):
"""
Args:
* shape (tuple):
The shape of the array which will be created.
* func:
The function which will be called to supply the real array.
Kwargs:
* dtype (np.dtype):
The numpy dtype of the array which will be created.
Defaults to None to signify the dtype is unknown.
"""
self.shape = tuple(shape)
self._func = func
self.dtype = dtype
self._array = None
def __repr__(self):
return '<LazyArray(shape={}, dtype={!r})>'.format(self.shape,
self.dtype)
def _cached_array(self):
if self._array is None:
self._array = np.asarray(self._func())
del self._func
return self._array
def reshape(self, *args, **kwargs):
"""
Returns a view of this array with the given shape.
See :meth:`numpy.ndarray.reshape()` for argument details.
"""
return self._cached_array().reshape(*args, **kwargs)
def to_xml_attr(self):
"""
Returns a string describing this array, suitable for use in CML.
"""
crc = zlib.crc32(np.array(self._cached_array(), order='C'))
crc &= 0xffffffff
return 'LazyArray(shape={}, checksum=0x{:08x})'.format(self.shape, crc)
def view(self, *args, **kwargs):
"""
Returns a view of this array.
See :meth:`numpy.ndarray.view()` for argument details.
"""
return self._cached_array().view(*args, **kwargs)
class AuxCoordFactory(CFVariableMixin):
"""
Represents a "factory" which can manufacture an additional auxiliary
coordinate on demand, by combining the values of other coordinates.
Each concrete subclass represents a specific formula for deriving
values from other coordinates.
The `standard_name`, `long_name`, `var_name`, `units`, `attributes` and
`coord_system` of the factory are used to set the corresponding
properties of the resulting auxiliary coordinates.
"""
__metaclass__ = ABCMeta
def __init__(self):
#: Descriptive name of the coordinate made by the factory
self.long_name = None
#: CF variable name of the coordinate made by the factory
self.var_name = None
#: Coordinate system (if any) of the coordinate made by the factory
self.coord_system = None
@abstractproperty
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
def _as_defn(self):
defn = iris.coords.CoordDefn(self.standard_name, self.long_name,
self.var_name, self.units,
self.attributes, self.coord_system)
return defn
@abstractmethod
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this
factory.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate.
See :meth:`iris.cube.Cube.coord_dims()`.
"""
@abstractmethod
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of a removal/replacement of a dependency.
Args:
* old_coord:
The dependency coordinate to be removed/replaced.
* new_coord:
If None, the dependency using old_coord is removed, otherwise
the dependency is updated to use new_coord.
"""
def __repr__(self):
def arg_text(item):
key, coord = item
return '{}={}'.format(key, str(coord and repr(coord.name())))
items = sorted(self.dependencies.items(), key=lambda item: item[0])
args = map(arg_text, items)
return '<{}({})>'.format(type(self).__name__, ', '.join(args))
def derived_dims(self, coord_dims_func):
"""
Returns the virtual dim-mapping for the derived coordinate.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate.
See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Which dimensions are relevant?
# e.g. If sigma -> [1] and orog -> [2, 3] then result = [1, 2, 3]
derived_dims = set()
for coord in self.dependencies.itervalues():
if coord:
derived_dims.update(coord_dims_func(coord))
# Apply a fixed order so we know how to map dependency dims to
# our own dims (and so the Cube can map them to Cube dims).
derived_dims = tuple(sorted(derived_dims))
return derived_dims
def updated(self, new_coord_mapping):
"""
Creates a new instance of this factory where the dependencies
are replaced according to the given mapping.
Args:
* new_coord_mapping:
A dictionary mapping from the object IDs potentially used
by this factory, to the coordinate objects that should be
used instead.
"""
new_dependencies = {}
for key, coord in self.dependencies.iteritems():
if coord:
coord = new_coord_mapping[id(coord)]
new_dependencies[key] = coord
return type(self)(**new_dependencies)
def xml_element(self, doc):
"""
Returns a DOM element describing this coordinate factory.
"""
element = doc.createElement('coordFactory')
for key, coord in self.dependencies.iteritems():
element.setAttribute(key, coord._xml_id())
element.appendChild(self.make_coord().xml_element(doc))
return element
def _dependency_dims(self, coord_dims_func):
dependency_dims = {}
for key, coord in self.dependencies.iteritems():
if coord:
dependency_dims[key] = coord_dims_func(coord)
return dependency_dims
def _nd_bounds(self, coord, dims, ndim):
"""
Returns the coord's bounds in Cube-orientation and
broadcastable to N dimensions.
Example:
coord.shape == (70,)
coord.nbounds = 2
dims == [3]
ndim == 5
results in:
nd_bounds.shape == (1, 1, 1, 70, 1, 2)
"""
# Transpose to be consistent with the Cube.
sorted_pairs = sorted(enumerate(dims), key=lambda pair: pair[1])
transpose_order = [pair[0] for pair in sorted_pairs] + [len(dims)]
bounds = coord.bounds
if dims:
bounds = bounds.transpose(transpose_order)
# Figure out the n-dimensional shape.
nd_shape = [1] * ndim + [coord.nbounds]
for dim, size in zip(dims, coord.shape):
nd_shape[dim] = size
bounds.shape = tuple(nd_shape)
return bounds
@staticmethod
def _nd_points(coord, dims, ndim):
"""
Returns the coord's points in Cube-orientation and
broadcastable to N dimensions.
Example:
coord.shape == (4, 3)
dims == [3, 2]
ndim == 5
results in:
nd_points.shape == (1, 1, 3, 4, 1)
"""
# Transpose to be consistent with the Cube.
sorted_pairs = sorted(enumerate(dims), key=lambda pair: pair[1])
transpose_order = [pair[0] for pair in sorted_pairs]
points = coord._points
if dims and transpose_order != list(range(len(dims))):
points = points.transpose(transpose_order)
# Expand dimensionality to be consistent with the Cube.
if dims:
keys = [None] * ndim
for dim, size in zip(dims, coord.shape):
keys[dim] = slice(None)
points = points[tuple(keys)]
else:
# Scalar coordinates have one dimensional points despite
# mapping to zero dimensions, so we only need to add N-1
# new dimensions.
keys = (None,) * (ndim - 1)
points = points[keys]
return points
def _remap(self, dependency_dims, derived_dims):
if derived_dims:
ndim = max(derived_dims) + 1
else:
ndim = 1
nd_points_by_key = {}
for key, coord in self.dependencies.iteritems():
if coord:
# Get the points as consistent with the Cube.
nd_points = self._nd_points(coord, dependency_dims[key], ndim)
# Restrict to just the dimensions relevant to the
# derived coord. NB. These are always in Cube-order, so
# no transpose is needed.
if derived_dims:
keys = tuple(slice(None) if dim in derived_dims else 0 for
dim in range(ndim))
nd_points = nd_points[keys]
else:
# If no coord, treat value as zero.
# Use a float16 to provide `shape` attribute and avoid
# promoting other arguments to a higher precision.
nd_points = np.float16(0)
nd_points_by_key[key] = nd_points
return nd_points_by_key
def _remap_with_bounds(self, dependency_dims, derived_dims):
if derived_dims:
ndim = max(derived_dims) + 1
else:
ndim = 1
nd_values_by_key = {}
for key, coord in self.dependencies.iteritems():
if coord:
# Get the bounds or points as consistent with the Cube.
if coord.nbounds:
nd_values = self._nd_bounds(coord, dependency_dims[key],
ndim)
else:
nd_values = self._nd_points(coord, dependency_dims[key],
ndim)
# Restrict to just the dimensions relevant to the
# derived coord. NB. These are always in Cube-order, so
# no transpose is needed.
shape = []
for dim in derived_dims:
shape.append(nd_values.shape[dim])
# Ensure the array always has at least one dimension to be
# compatible with normal coordinates.
if not derived_dims:
shape.append(1)
# Add on the N-bounds dimension
if coord.nbounds:
shape.append(nd_values.shape[-1])
else:
# NB. For a non-bounded coordinate we still need an
# extra dimension to make the shape compatible, so
# we just add an extra 1.
shape.append(1)
nd_values = np.array(nd_values)
nd_values.shape = shape
else:
# If no coord, treat value as zero.
# Use a float16 to provide `shape` attribute and avoid
# promoting other arguments to a higher precision.
nd_values = np.float16(0)
nd_values_by_key[key] = nd_values
return nd_values_by_key
def _shape(self, nd_values_by_key):
nd_values = sorted(nd_values_by_key.values(),
key=lambda value: value.ndim)
shape = list(nd_values.pop().shape)
for array in nd_values:
for i, size in enumerate(array.shape):
if size > 1:
# NB. If there's an inconsistency it can only come
# from a mismatch in the number of bounds (the Cube
# ensures the other dimensions must match).
# But we can't afford to raise an error now - it'd
# break Cube.derived_coords. Instead, we let the
# error happen when the derived coordinate's bounds
# are accessed.
shape[i] = size
return shape
def _dtype(self, arrays_by_key, **other_args):
dummy_args = {}
for key, array in arrays_by_key.iteritems():
dummy_args[key] = np.zeros(1, dtype=array.dtype)
dummy_args.update(other_args)
dummy_data = self._derive(**dummy_args)
return dummy_data.dtype
class HybridHeightFactory(AuxCoordFactory):
"""
Defines a hybrid-height coordinate factory with the formula:
z = a + b * orog
"""
def __init__(self, delta=None, sigma=None, orography=None):
"""
Creates a hybrid-height coordinate factory with the formula:
z = a + b * orog
At least one of `delta` or `orography` must be provided.
Args:
* delta: Coord
The coordinate providing the `a` term.
* sigma: Coord
The coordinate providing the `b` term.
* orography: Coord
The coordinate providing the `orog` term.
"""
super(HybridHeightFactory, self).__init__()
if delta and delta.nbounds not in (0, 2):
raise ValueError('Invalid delta coordinate: must have either 0 or'
' 2 bounds.')
if sigma and sigma.nbounds not in (0, 2):
raise ValueError('Invalid sigma coordinate: must have either 0 or'
' 2 bounds.')
if orography and orography.nbounds:
msg = 'Orography coordinate {!r} has bounds.' \
' These will be disregarded.'.format(orography.name())
warnings.warn(msg, UserWarning, stacklevel=2)
self.delta = delta
self.sigma = sigma
self.orography = orography
self.standard_name = 'altitude'
if delta is None and orography is None:
raise ValueError('Unable to determine units: no delta or orography'
' available.')
if delta and orography and delta.units != orography.units:
raise ValueError('Incompatible units: delta and orography must'
' have the same units.')
self.units = (delta and delta.units) or orography.units
if not self.units.is_convertible('m'):
raise ValueError('Invalid units: delta and/or orography'
' must be expressed in length units.')
self.attributes = {'positive': 'up'}
@property
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
return {'delta': self.delta, 'sigma': self.sigma,
'orography': self.orography}
def _derive(self, delta, sigma, orography):
temp = delta + sigma * orography
return temp
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this
factory.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate.
See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Which dimensions are relevant?
derived_dims = self.derived_dims(coord_dims_func)
dependency_dims = self._dependency_dims(coord_dims_func)
# Build a "lazy" points array.
nd_points_by_key = self._remap(dependency_dims, derived_dims)
# Define the function here to obtain a closure.
def calc_points():
return self._derive(nd_points_by_key['delta'],
nd_points_by_key['sigma'],
nd_points_by_key['orography'])
shape = self._shape(nd_points_by_key)
dtype = self._dtype(nd_points_by_key)
points = LazyArray(shape, calc_points, dtype)
bounds = None
if ((self.delta and self.delta.nbounds) or
(self.sigma and self.sigma.nbounds)):
# Build a "lazy" bounds array.
nd_values_by_key = self._remap_with_bounds(dependency_dims,
derived_dims)
# Define the function here to obtain a closure.
def calc_bounds():
delta = nd_values_by_key['delta']
sigma = nd_values_by_key['sigma']
orography = nd_values_by_key['orography']
ok_bound_shapes = [(), (1,), (2,)]
if delta.shape[-1:] not in ok_bound_shapes:
raise ValueError('Invalid delta coordinate bounds.')
if sigma.shape[-1:] not in ok_bound_shapes:
raise ValueError('Invalid sigma coordinate bounds.')
if orography.shape[-1:] not in [(), (1,)]:
warnings.warn('Orography coordinate has bounds. '
'These are being disregarded.',
UserWarning, stacklevel=2)
orography_pts = nd_points_by_key['orography']
orography_pts_shape = list(orography_pts.shape)
orography = orography_pts.reshape(
orography_pts_shape.append(1))
return self._derive(delta, sigma, orography)
b_shape = self._shape(nd_values_by_key)
b_dtype = self._dtype(nd_values_by_key)
bounds = LazyArray(b_shape, calc_bounds, b_dtype)
hybrid_height = iris.coords.AuxCoord(points,
standard_name=self.standard_name,
long_name=self.long_name,
var_name=self.var_name,
units=self.units,
bounds=bounds,
attributes=self.attributes,
coord_system=self.coord_system)
return hybrid_height
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of the removal/replacement of a coordinate
which might be a dependency.
Args:
* old_coord:
The coordinate to be removed/replaced.
* new_coord:
If None, any dependency using old_coord is removed, otherwise
any dependency using old_coord is updated to use new_coord.
"""
if self.delta is old_coord:
if new_coord and new_coord.nbounds not in (0, 2):
raise ValueError('Invalid delta coordinate:'
' must have either 0 or 2 bounds.')
self.delta = new_coord
elif self.sigma is old_coord:
if new_coord and new_coord.nbounds not in (0, 2):
raise ValueError('Invalid sigma coordinate:'
' must have either 0 or 2 bounds.')
self.sigma = new_coord
elif self.orography is old_coord:
if new_coord and new_coord.nbounds:
msg = 'Orography coordinate {!r} has bounds.' \
' These will be disregarded.'.format(new_coord.name())
warnings.warn(msg, UserWarning, stacklevel=2)
self.orography = new_coord
class HybridPressureFactory(AuxCoordFactory):
"""
Defines a hybrid-pressure coordinate factory with the formula:
p = ap + b * ps
"""
def __init__(self, delta=None, sigma=None, surface_air_pressure=None):
"""
Creates a hybrid-height coordinate factory with the formula:
p = ap + b * ps
At least one of `delta` or `surface_air_pressure` must be provided.
Args:
* delta: Coord
The coordinate providing the `ap` term.
* sigma: Coord
The coordinate providing the `b` term.
* surface_air_pressure: Coord
The coordinate providing the `ps` term.
"""
super(HybridPressureFactory, self).__init__()
# Check that provided coords meet necessary conditions.
self._check_dependencies(delta, sigma, surface_air_pressure)
self.delta = delta
self.sigma = sigma
self.surface_air_pressure = surface_air_pressure
self.standard_name = 'air_pressure'
self.attributes = {}
@property
def units(self):
if self.delta is not None:
units = self.delta.units
else:
units = self.surface_air_pressure.units
return units
@staticmethod
def _check_dependencies(delta, sigma,
surface_air_pressure):
# Check for sufficient coordinates.
if (delta is None and (sigma is None or
surface_air_pressure is None)):
msg = 'Unable to contruct hybrid pressure coordinate factory ' \
'due to insufficient source coordinates.'
raise ValueError(msg)
# Check bounds.
if delta and delta.nbounds not in (0, 2):
raise ValueError('Invalid delta coordinate: must have either 0 or'
' 2 bounds.')
if sigma and sigma.nbounds not in (0, 2):
raise ValueError('Invalid sigma coordinate: must have either 0 or'
' 2 bounds.')
if surface_air_pressure and surface_air_pressure.nbounds:
msg = 'Surface pressure coordinate {!r} has bounds. These will' \
' be disregarded.'.format(surface_air_pressure.name())
warnings.warn(msg, UserWarning, stacklevel=2)
# Check units.
if sigma is not None and not sigma.units.is_dimensionless():
raise ValueError('Invalid units: sigma must be dimensionless.')
if delta is not None and surface_air_pressure is not None and \
delta.units != surface_air_pressure.units:
msg = 'Incompatible units: delta and ' \
'surface_air_pressure must have the same units.'
raise ValueError(msg)
if delta is not None:
units = delta.units
else:
units = surface_air_pressure.units
if not units.is_convertible('Pa'):
msg = 'Invalid units: delta and ' \
'surface_air_pressure must have units of pressure.'
raise ValueError(msg)
@property
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
return {'delta': self.delta, 'sigma': self.sigma,
'surface_air_pressure': self.surface_air_pressure}
def _derive(self, delta, sigma, surface_air_pressure):
temp = delta + sigma * surface_air_pressure
return temp
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this
factory.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate.
See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Which dimensions are relevant?
derived_dims = self.derived_dims(coord_dims_func)
dependency_dims = self._dependency_dims(coord_dims_func)
# Build a "lazy" points array.
nd_points_by_key = self._remap(dependency_dims, derived_dims)
# Define the function here to obtain a closure.
def calc_points():
return self._derive(nd_points_by_key['delta'],
nd_points_by_key['sigma'],
nd_points_by_key['surface_air_pressure'])
shape = self._shape(nd_points_by_key)
dtype = self._dtype(nd_points_by_key)
points = LazyArray(shape, calc_points, dtype)
bounds = None
if ((self.delta and self.delta.nbounds) or
(self.sigma and self.sigma.nbounds)):
# Build a "lazy" bounds array.
nd_values_by_key = self._remap_with_bounds(dependency_dims,
derived_dims)
# Define the function here to obtain a closure.
def calc_bounds():
delta = nd_values_by_key['delta']
sigma = nd_values_by_key['sigma']
surface_air_pressure = nd_values_by_key['surface_air_pressure']
ok_bound_shapes = [(), (1,), (2,)]
if delta.shape[-1:] not in ok_bound_shapes:
raise ValueError('Invalid delta coordinate bounds.')
if sigma.shape[-1:] not in ok_bound_shapes:
raise ValueError('Invalid sigma coordinate bounds.')
if surface_air_pressure.shape[-1:] not in [(), (1,)]:
warnings.warn('Surface pressure coordinate has bounds. '
'These are being disregarded.')
surface_air_pressure_pts = nd_points_by_key[
'surface_air_pressure']
surface_air_pressure_pts_shape = list(
surface_air_pressure_pts.shape)
surface_air_pressure = surface_air_pressure_pts.reshape(
surface_air_pressure_pts_shape.append(1))
return self._derive(delta, sigma, surface_air_pressure)
b_shape = self._shape(nd_values_by_key)
b_dtype = self._dtype(nd_values_by_key)
bounds = LazyArray(b_shape, calc_bounds, b_dtype)
hybrid_pressure = iris.coords.AuxCoord(
points, standard_name=self.standard_name, long_name=self.long_name,
var_name=self.var_name, units=self.units, bounds=bounds,
attributes=self.attributes, coord_system=self.coord_system)
return hybrid_pressure
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of the removal/replacement of a coordinate
which might be a dependency.
Args:
* old_coord:
The coordinate to be removed/replaced.
* new_coord:
If None, any dependency using old_coord is removed, otherwise
any dependency using old_coord is updated to use new_coord.
"""
new_dependencies = self.dependencies
for name, coord in self.dependencies.items():
if old_coord is coord:
new_dependencies[name] = new_coord
try:
self._check_dependencies(**new_dependencies)
except ValueError as e:
msg = 'Failed to update dependencies. ' + e.message
raise ValueError(msg)
else:
setattr(self, name, new_coord)
break
class OceanSigmaZFactory(AuxCoordFactory):
"""Defines an ocean sigma over z coordinate factory."""
def __init__(self, sigma=None, eta=None, depth=None,
depth_c=None, nsigma=None, zlev=None):
"""
Creates a ocean sigma over z coordinate factory with the formula:
if k < nsigma:
z(n, k, j, i) = eta(n, j, i) + sigma(k) *
(min(depth_c, depth(j, i)) + eta(n, j, i))
if k >= nsigma:
z(n, k, j, i) = zlev(k)
The `zlev` and 'nsigma' coordinates must be provided, and at least
either `eta`, or 'sigma' and `depth` and `depth_c` coordinates.
"""
super(OceanSigmaZFactory, self).__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev)
self.sigma = sigma
self.eta = eta
self.depth = depth
self.depth_c = depth_c
self.nsigma = nsigma
self.zlev = zlev
self.standard_name = 'sea_surface_height_above_reference_ellipsoid'
self.attributes = {'positive': 'up'}
@property
def units(self):
return self.zlev.units
@staticmethod
def _check_dependencies(sigma, eta, depth, depth_c, nsigma, zlev):
# Check for sufficient factory coordinates.
if zlev is None:
raise ValueError('Unable to determine units: '
'no zlev coordinate available.')
if nsigma is None:
raise ValueError('Missing nsigma coordinate.')
if eta is None and (sigma is None or depth_c is None or
depth is None):
msg = 'Unable to construct ocean sigma over z coordinate ' \
'factory due to insufficient source coordinates.'
raise ValueError(msg)
# Check bounds and shape.
for coord, term in ((sigma, 'sigma'), (zlev, 'zlev')):
if coord is not None and coord.nbounds not in (0, 2):
msg = 'Invalid {} coordinate {!r}: must have either ' \
'0 or 2 bounds.'.format(term, coord.name())
raise ValueError(msg)
if sigma and sigma.nbounds != zlev.nbounds:
msg = 'The sigma coordinate {!r} and zlev coordinate {!r} ' \
'must be equally bounded.'.format(sigma.name(), zlev.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'),
(depth_c, 'depth_c'), (nsigma, 'nsigma'))
for coord, term in coords:
if coord is not None and coord.nbounds:
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(term, coord.name())
warnings.warn(msg, UserWarning, stacklevel=2)
for coord, term in ((depth_c, 'depth_c'), (nsigma, 'nsigma')):
if coord is not None and coord.shape != (1,):
msg = 'Expected scalar {} coordinate {!r}: ' \
'got shape {!r}.'.format(term, coord.name(), coord.shape)
raise ValueError(msg)
# Check units.
if not zlev.units.is_convertible('m'):
msg = 'Invalid units: zlev coordinate {!r} ' \
'must have units of distance.'.format(zlev.name())
raise ValueError(msg)
if sigma is not None and not sigma.units.is_dimensionless():
msg = 'Invalid units: sigma coordinate {!r} ' \
'must be dimensionless.'.format(sigma.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth_c, 'depth_c'), (depth, 'depth'))
for coord, term in coords:
if coord is not None and coord.units != zlev.units:
msg = 'Incompatible units: {} coordinate {!r} and zlev ' \
'coordinate {!r} must have ' \
'the same units.'.format(term, coord.name(), zlev.name())
raise ValueError(msg)
@property
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
return dict(sigma=self.sigma, eta=self.eta, depth=self.depth,
depth_c=self.depth_c, nsigma=self.nsigma, zlev=self.zlev)
def _derive(self, sigma, eta, depth, depth_c,
nsigma, zlev, shape, nsigma_slice):
# Perform the ocean sigma over z coordinate nsigma slice.
if eta.ndim:
eta = eta[nsigma_slice]
if sigma.ndim:
sigma = sigma[nsigma_slice]
if depth.ndim:
depth = depth[nsigma_slice]
# Note that, this performs a point-wise minimum.
temp = eta + sigma * (np.minimum(depth_c, depth) + eta)
# Calculate the final derived result.
result = np.ones(shape, dtype=temp.dtype) * zlev
result[nsigma_slice] = temp
return result
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this factory.
Args:
* coord_dims_func:
A callable which can return the list of dimesions relevant
to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Determine the relevant dimensions.
derived_dims = self.derived_dims(coord_dims_func)
dependency_dims = self._dependency_dims(coord_dims_func)
# Build a "lazy" points array.
nd_points_by_key = self._remap(dependency_dims, derived_dims)
points_shape = self._shape(nd_points_by_key)
points_dtype = self._dtype(nd_points_by_key, shape=(), nsigma_slice=())
# Calculate the nsigma slice.
nsigma_slice = [slice(None)] * len(derived_dims)
dim, = dependency_dims['zlev']
index = derived_dims.index(dim)
nsigma_slice[index] = slice(0, int(nd_points_by_key['nsigma']))
nsigma_slice = tuple(nsigma_slice)
# Define the function here to obtain a closure.
def calc_points():
return self._derive(nd_points_by_key['sigma'],
nd_points_by_key['eta'],
nd_points_by_key['depth'],
nd_points_by_key['depth_c'],
nd_points_by_key['nsigma'],
nd_points_by_key['zlev'],
points_shape,
nsigma_slice)
points = LazyArray(points_shape, calc_points, points_dtype)
bounds = None
if self.zlev.nbounds or (self.sigma and self.sigma.nbounds):
# Build a "lazy" bounds array.
nd_values_by_key = self._remap_with_bounds(dependency_dims,
derived_dims)
bounds_shape = self._shape(nd_values_by_key)
bounds_dtype = self._dtype(nd_values_by_key, shape=(),
nsigma_slice=())
nsigma_slice_bounds = nsigma_slice + (slice(None),)
# Define the function here to obtain a closure.
def calc_bounds():
valid_shapes = [(), (1,), (2,)]
for key in ('sigma', 'zlev'):
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'Invalid bounds for {} ' \
'coordinate {!r}.'.format(key, name)
raise ValueError(msg)
valid_shapes.pop()
for key in ('eta', 'depth', 'depth_c', 'nsigma'):
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(key, name)
warnings.warn(msg, UserWarning, stacklevel=2)
# Swap bounds with points.
shape = list(nd_points_by_key[key].shape)
bounds = nd_points_by_key[key].reshape(shape.append(1))
nd_values_by_key[key] = bounds
return self._derive(nd_values_by_key['sigma'],
nd_values_by_key['eta'],
nd_values_by_key['depth'],
nd_values_by_key['depth_c'],
nd_values_by_key['nsigma'],
nd_values_by_key['zlev'],
bounds_shape,
nsigma_slice_bounds)
bounds = LazyArray(bounds_shape, calc_bounds, bounds_dtype)
coord = iris.coords.AuxCoord(points,
standard_name=self.standard_name,
long_name=self.long_name,
var_name=self.var_name,
units=self.units,
bounds=bounds,
attributes=self.attributes,
coord_system=self.coord_system)
return coord
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of the removal/replacement of a coordinate
which might be a dependency.
Args:
* old_coord:
The coordinate to be removed/replaced.
* new_coord:
If None, any dependency using old_coord is removed, otherwise
any dependency using old_coord is updated to use new_coord.
"""
new_dependencies = self.dependencies
for name, coord in self.dependencies.items():
if old_coord is coord:
new_dependencies[name] = new_coord
try:
self._check_dependencies(**new_dependencies)
except ValueError as e:
msg = 'Failed to update dependencies. ' + e.message
raise ValueError(msg)
else:
setattr(self, name, new_coord)
break
class OceanSigmaFactory(AuxCoordFactory):
"""Defines an ocean sigma coordinate factory."""
def __init__(self, sigma=None, eta=None, depth=None):
"""
Creates an ocean sigma coordinate factory with the formula:
z(n, k, j, i) = eta(n, j, i) + sigma(k) *
(depth(j, i) + eta(n, j, i))
"""
super(OceanSigmaFactory, self).__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(sigma, eta, depth)
self.sigma = sigma
self.eta = eta
self.depth = depth
self.standard_name = 'sea_surface_height_above_reference_ellipsoid'
self.attributes = {'positive': 'up'}
@property
def units(self):
return self.depth.units
@staticmethod
def _check_dependencies(sigma, eta, depth):
# Check for sufficient factory coordinates.
if eta is None or sigma is None or depth is None:
msg = 'Unable to construct ocean sigma coordinate ' \
'factory due to insufficient source coordinates.'
raise ValueError(msg)
# Check bounds and shape.
coord, term = (sigma, 'sigma')
if coord is not None and coord.nbounds not in (0, 2):
msg = 'Invalid {} coordinate {!r}: must have either ' \
'0 or 2 bounds.'.format(term, coord.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'))
for coord, term in coords:
if coord is not None and coord.nbounds:
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(term, coord.name())
warnings.warn(msg, UserWarning, stacklevel=2)
# Check units.
if sigma is not None and not sigma.units.is_dimensionless():
msg = 'Invalid units: sigma coordinate {!r} ' \
'must be dimensionless.'.format(sigma.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'))
for coord, term in coords:
if coord is not None and coord.units != depth.units:
msg = 'Incompatible units: {} coordinate {!r} and depth ' \
'coordinate {!r} must have ' \
'the same units.'.format(term, coord.name(), depth.name())
raise ValueError(msg)
@property
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
return dict(sigma=self.sigma, eta=self.eta, depth=self.depth)
def _derive(self, sigma, eta, depth):
result = eta + sigma * (depth + eta)
return result
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this factory.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Determine the relevant dimensions.
derived_dims = self.derived_dims(coord_dims_func)
dependency_dims = self._dependency_dims(coord_dims_func)
# Build a "lazy" points array.
nd_points_by_key = self._remap(dependency_dims, derived_dims)
points_shape = self._shape(nd_points_by_key)
# Define the function here to obtain a closure.
def calc_points():
return self._derive(nd_points_by_key['sigma'],
nd_points_by_key['eta'],
nd_points_by_key['depth'])
points = LazyArray(points_shape, calc_points)
bounds = None
if self.sigma and self.sigma.nbounds:
# Build a "lazy" bounds array.
nd_values_by_key = self._remap_with_bounds(dependency_dims,
derived_dims)
bounds_shape = self._shape(nd_values_by_key)
# Define the function here to obtain a closure.
def calc_bounds():
valid_shapes = [(), (1,), (2,)]
key = 'sigma'
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'Invalid bounds for {} ' \
'coordinate {!r}.'.format(key, name)
raise ValueError(msg)
valid_shapes.pop()
for key in ('eta', 'depth'):
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(key, name)
warnings.warn(msg, UserWarning, stacklevel=2)
# Swap bounds with points.
shape = list(nd_points_by_key[key].shape)
bounds = nd_points_by_key[key].reshape(shape.append(1))
nd_values_by_key[key] = bounds
return self._derive(nd_values_by_key['sigma'],
nd_values_by_key['eta'],
nd_values_by_key['depth'],
bounds_shape)
bounds = LazyArray(bounds_shape, calc_bounds)
coord = iris.coords.AuxCoord(points,
standard_name=self.standard_name,
long_name=self.long_name,
var_name=self.var_name,
units=self.units,
bounds=bounds,
attributes=self.attributes,
coord_system=self.coord_system)
return coord
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of the removal/replacement of a coordinate
which might be a dependency.
Args:
* old_coord:
The coordinate to be removed/replaced.
* new_coord:
If None, any dependency using old_coord is removed, otherwise
any dependency using old_coord is updated to use new_coord.
"""
new_dependencies = self.dependencies
for name, coord in self.dependencies.items():
if old_coord is coord:
new_dependencies[name] = new_coord
try:
self._check_dependencies(**new_dependencies)
except ValueError as e:
msg = 'Failed to update dependencies. ' + e.message
raise ValueError(msg)
else:
setattr(self, name, new_coord)
break
class OceanSg1Factory(AuxCoordFactory):
"""Defines an Ocean s-coordinate, generic form 1 factory."""
def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None):
"""
Creates an Ocean s-coordinate, generic form 1 factory with the formula:
z(n,k,j,i) = S(k,j,i) + eta(n,j,i) * (1 + S(k,j,i) / depth(j,i))
where:
S(k,j,i) = depth_c * s(k) + (depth(j,i) - depth_c) * C(k)
"""
super(OceanSg1Factory, self).__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(s, c, eta, depth, depth_c)
self.s = s
self.c = c
self.eta = eta
self.depth = depth
self.depth_c = depth_c
self.standard_name = 'sea_surface_height_above_reference_ellipsoid'
self.attributes = {'positive': 'up'}
@property
def units(self):
return self.depth.units
@staticmethod
def _check_dependencies(s, c, eta, depth, depth_c):
# Check for sufficient factory coordinates.
if (eta is None or s is None or c is None or
depth is None or depth_c is None):
msg = 'Unable to construct Ocean s-coordinate, generic form 1 ' \
'factory due to insufficient source coordinates.'
raise ValueError(msg)
# Check bounds and shape.
coords = ((s, 's'), (c, 'c'))
for coord, term in coords:
if coord is not None and coord.nbounds not in (0, 2):
msg = 'Invalid {} coordinate {!r}: must have either ' \
'0 or 2 bounds.'.format(term, coord.name())
raise ValueError(msg)
if s and s.nbounds != c.nbounds:
msg = 'The s coordinate {!r} and c coordinate {!r} ' \
'must be equally bounded.'.format(s.name(), c.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'))
for coord, term in coords:
if coord is not None and coord.nbounds:
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(term, coord.name())
warnings.warn(msg, UserWarning, stacklevel=2)
if depth_c is not None and depth_c.shape != (1,):
msg = 'Expected scalar {} coordinate {!r}: ' \
'got shape {!r}.'.format(term, coord.name(), coord.shape)
raise ValueError(msg)
# Check units.
coords = ((s, 's'), (c, 'c'))
for coord, term in coords:
if coord is not None and not coord.units.is_dimensionless():
msg = 'Invalid units: {} coordinate {!r} ' \
'must be dimensionless.'.format(term, coord.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'), (depth_c, 'depth_c'))
for coord, term in coords:
if coord is not None and coord.units != depth.units:
msg = 'Incompatible units: {} coordinate {!r} and depth ' \
'coordinate {!r} must have ' \
'the same units.'.format(term, coord.name(), depth.name())
raise ValueError(msg)
@property
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
return dict(s=self.s, c=self.c, eta=self.eta, depth=self.depth,
depth_c=self.depth_c)
def _derive(self, s, c, eta, depth, depth_c):
S = depth_c * s + (depth - depth_c) * c
result = S + eta * (1 + S / depth)
return result
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this factory.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Determine the relevant dimensions.
derived_dims = self.derived_dims(coord_dims_func)
dependency_dims = self._dependency_dims(coord_dims_func)
# Build a "lazy" points array.
nd_points_by_key = self._remap(dependency_dims, derived_dims)
points_shape = self._shape(nd_points_by_key)
# Define the function here to obtain a closure.
def calc_points():
return self._derive(nd_points_by_key['s'],
nd_points_by_key['c'],
nd_points_by_key['eta'],
nd_points_by_key['depth'],
nd_points_by_key['depth_c'])
points = LazyArray(points_shape, calc_points)
bounds = None
if self.s.nbounds or (self.c and self.c.nbounds):
# Build a "lazy" bounds array.
nd_values_by_key = self._remap_with_bounds(dependency_dims,
derived_dims)
bounds_shape = self._shape(nd_values_by_key)
# Define the function here to obtain a closure.
def calc_bounds():
valid_shapes = [(), (1,), (2,)]
key = 's'
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'Invalid bounds for {} ' \
'coordinate {!r}.'.format(key, name)
raise ValueError(msg)
valid_shapes.pop()
for key in ('eta', 'depth', 'depth_c'):
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(key, name)
warnings.warn(msg, UserWarning, stacklevel=2)
# Swap bounds with points.
shape = list(nd_points_by_key[key].shape)
bounds = nd_points_by_key[key].reshape(shape.append(1))
nd_values_by_key[key] = bounds
return self._derive(nd_values_by_key['s'],
nd_values_by_key['c'],
nd_values_by_key['eta'],
nd_values_by_key['depth'],
nd_values_by_key['depth_c'],
bounds_shape)
bounds = LazyArray(bounds_shape, calc_bounds)
coord = iris.coords.AuxCoord(points,
standard_name=self.standard_name,
long_name=self.long_name,
var_name=self.var_name,
units=self.units,
bounds=bounds,
attributes=self.attributes,
coord_system=self.coord_system)
return coord
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of the removal/replacement of a coordinate
which might be a dependency.
Args:
* old_coord:
The coordinate to be removed/replaced.
* new_coord:
If None, any dependency using old_coord is removed, otherwise
any dependency using old_coord is updated to use new_coord.
"""
new_dependencies = self.dependencies
for name, coord in self.dependencies.items():
if old_coord is coord:
new_dependencies[name] = new_coord
try:
self._check_dependencies(**new_dependencies)
except ValueError as e:
msg = 'Failed to update dependencies. ' + e.message
raise ValueError(msg)
else:
setattr(self, name, new_coord)
break
class OceanSFactory(AuxCoordFactory):
"""Defines an Ocean s-coordinate factory."""
def __init__(self, s=None, eta=None, depth=None, a=None, b=None,
depth_c=None):
"""
Creates an Ocean s-coordinate factory with the formula:
z(n,k,j,i) = eta(n,j,i)*(1+s(k)) + depth_c*s(k) +
(depth(j,i)-depth_c)*C(k)
where:
C(k) = (1-b) * sinh(a*s(k)) / sinh(a) +
b * [tanh(a * (s(k) + 0.5)) / (2 * tanh(0.5*a)) - 0.5]
"""
super(OceanSFactory, self).__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(s, eta, depth, a, b, depth_c)
self.s = s
self.eta = eta
self.depth = depth
self.a = a
self.b = b
self.depth_c = depth_c
self.standard_name = 'sea_surface_height_above_reference_ellipsoid'
self.attributes = {'positive': 'up'}
@property
def units(self):
return self.depth.units
@staticmethod
def _check_dependencies(s, eta, depth, a, b, depth_c):
# Check for sufficient factory coordinates.
if (eta is None or s is None or depth is None or
a is None or b is None or depth_c is None):
msg = 'Unable to construct Ocean s-coordinate ' \
'factory due to insufficient source coordinates.'
raise ValueError(msg)
# Check bounds and shape.
if s is not None and s.nbounds not in (0, 2):
msg = 'Invalid s coordinate {!r}: must have either ' \
'0 or 2 bounds.'.format(s.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'))
for coord, term in coords:
if coord is not None and coord.nbounds:
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(term, coord.name())
warnings.warn(msg, UserWarning, stacklevel=2)
coords = ((a, 'a'), (b, 'b'), (depth_c, 'depth_c'))
for coord, term in coords:
if coord is not None and coord.shape != (1,):
msg = 'Expected scalar {} coordinate {!r}: ' \
'got shape {!r}.'.format(term, coord.name(), coord.shape)
raise ValueError(msg)
# Check units.
if s is not None and not s.units.is_dimensionless():
msg = 'Invalid units: s coordinate {!r} ' \
'must be dimensionless.'.format(s.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'), (depth_c, 'depth_c'))
for coord, term in coords:
if coord is not None and coord.units != depth.units:
msg = 'Incompatible units: {} coordinate {!r} and depth ' \
'coordinate {!r} must have ' \
'the same units.'.format(term, coord.name(), depth.name())
raise ValueError(msg)
@property
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
return dict(s=self.s, eta=self.eta, depth=self.depth, a=self.a,
b=self.b, depth_c=self.depth_c)
def _derive(self, s, eta, depth, a, b, depth_c):
c = ((1 - b) * np.sinh(a * s) / np.sinh(a) + b *
(np.tanh(a * (s + 0.5)) / (2 * np.tanh(0.5 * a)) - 0.5))
result = eta * (1 + s) + depth_c * s + (depth - depth_c) * c
return result
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this factory.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Determine the relevant dimensions.
derived_dims = self.derived_dims(coord_dims_func)
dependency_dims = self._dependency_dims(coord_dims_func)
# Build a "lazy" points array.
nd_points_by_key = self._remap(dependency_dims, derived_dims)
points_shape = self._shape(nd_points_by_key)
# Define the function here to obtain a closure.
def calc_points():
return self._derive(nd_points_by_key['s'],
nd_points_by_key['eta'],
nd_points_by_key['depth'],
nd_points_by_key['a'],
nd_points_by_key['b'],
nd_points_by_key['depth_c'])
points = LazyArray(points_shape, calc_points)
bounds = None
if self.s.nbounds:
# Build a "lazy" bounds array.
nd_values_by_key = self._remap_with_bounds(dependency_dims,
derived_dims)
bounds_shape = self._shape(nd_values_by_key)
# Define the function here to obtain a closure.
def calc_bounds():
valid_shapes = [(), (1,), (2,)]
key = 's'
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'Invalid bounds for {} ' \
'coordinate {!r}.'.format(key, name)
raise ValueError(msg)
valid_shapes.pop()
for key in ('eta', 'depth', 'a', 'b', 'depth_c'):
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(key, name)
warnings.warn(msg, UserWarning, stacklevel=2)
# Swap bounds with points.
shape = list(nd_points_by_key[key].shape)
bounds = nd_points_by_key[key].reshape(shape.append(1))
nd_values_by_key[key] = bounds
return self._derive(nd_values_by_key['s'],
nd_values_by_key['eta'],
nd_values_by_key['depth'],
nd_values_by_key['a'],
nd_values_by_key['b'],
nd_values_by_key['depth_c'],
bounds_shape)
bounds = LazyArray(bounds_shape, calc_bounds)
coord = iris.coords.AuxCoord(points,
standard_name=self.standard_name,
long_name=self.long_name,
var_name=self.var_name,
units=self.units,
bounds=bounds,
attributes=self.attributes,
coord_system=self.coord_system)
return coord
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of the removal/replacement of a coordinate
which might be a dependency.
Args:
* old_coord:
The coordinate to be removed/replaced.
* new_coord:
If None, any dependency using old_coord is removed, otherwise
any dependency using old_coord is updated to use new_coord.
"""
new_dependencies = self.dependencies
for name, coord in self.dependencies.items():
if old_coord is coord:
new_dependencies[name] = new_coord
try:
self._check_dependencies(**new_dependencies)
except ValueError as e:
msg = 'Failed to update dependencies. ' + e.message
raise ValueError(msg)
else:
setattr(self, name, new_coord)
break
class OceanSg2Factory(AuxCoordFactory):
"""Defines an Ocean s-coordinate, generic form 2 factory."""
def __init__(self, s=None, c=None, eta=None, depth=None, depth_c=None):
"""
Creates an Ocean s-coordinate, generic form 2 factory with the formula:
z(n,k,j,i) = eta(n,j,i) + (eta(n,j,i) + depth(j,i)) * S(k,j,i)
where:
S(k,j,i) = (depth_c * s(k) + depth(j,i) * C(k)) /
(depth_c + depth(j,i))
"""
super(OceanSg2Factory, self).__init__()
# Check that provided coordinates meet necessary conditions.
self._check_dependencies(s, c, eta, depth, depth_c)
self.s = s
self.c = c
self.eta = eta
self.depth = depth
self.depth_c = depth_c
self.standard_name = 'sea_surface_height_above_reference_ellipsoid'
self.attributes = {'positive': 'up'}
@property
def units(self):
return self.depth.units
@staticmethod
def _check_dependencies(s, c, eta, depth, depth_c):
# Check for sufficient factory coordinates.
if (eta is None or s is None or c is None or
depth is None or depth_c is None):
msg = 'Unable to construct Ocean s-coordinate, generic form 2 ' \
'factory due to insufficient source coordinates.'
raise ValueError(msg)
# Check bounds and shape.
coords = ((s, 's'), (c, 'c'))
for coord, term in coords:
if coord is not None and coord.nbounds not in (0, 2):
msg = 'Invalid {} coordinate {!r}: must have either ' \
'0 or 2 bounds.'.format(term, coord.name())
raise ValueError(msg)
if s and s.nbounds != c.nbounds:
msg = 'The s coordinate {!r} and c coordinate {!r} ' \
'must be equally bounded.'.format(s.name(), c.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'))
for coord, term in coords:
if coord is not None and coord.nbounds:
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(term, coord.name())
warnings.warn(msg, UserWarning, stacklevel=2)
if depth_c is not None and depth_c.shape != (1,):
msg = 'Expected scalar depth_c coordinate {!r}: ' \
'got shape {!r}.'.format(depth_c.name(), depth_c.shape)
raise ValueError(msg)
# Check units.
coords = ((s, 's'), (c, 'c'))
for coord, term in coords:
if coord is not None and not coord.units.is_dimensionless():
msg = 'Invalid units: {} coordinate {!r} ' \
'must be dimensionless.'.format(term, coord.name())
raise ValueError(msg)
coords = ((eta, 'eta'), (depth, 'depth'), (depth_c, 'depth_c'))
for coord, term in coords:
if coord is not None and coord.units != depth.units:
msg = 'Incompatible units: {} coordinate {!r} and depth ' \
'coordinate {!r} must have ' \
'the same units.'.format(term, coord.name(), depth.name())
raise ValueError(msg)
@property
def dependencies(self):
"""
Returns a dictionary mapping from constructor argument names to
the corresponding coordinates.
"""
return dict(s=self.s, c=self.c, eta=self.eta, depth=self.depth,
depth_c=self.depth_c)
def _derive(self, s, c, eta, depth, depth_c):
S = (depth_c * s + depth * c) / (depth_c + depth)
result = eta + (eta + depth) * S
return result
def make_coord(self, coord_dims_func):
"""
Returns a new :class:`iris.coords.AuxCoord` as defined by this factory.
Args:
* coord_dims_func:
A callable which can return the list of dimensions relevant
to a given coordinate. See :meth:`iris.cube.Cube.coord_dims()`.
"""
# Determine the relevant dimensions.
derived_dims = self.derived_dims(coord_dims_func)
dependency_dims = self._dependency_dims(coord_dims_func)
# Build a "lazy" points array.
nd_points_by_key = self._remap(dependency_dims, derived_dims)
points_shape = self._shape(nd_points_by_key)
# Define the function here to obtain a closure.
def calc_points():
return self._derive(nd_points_by_key['s'],
nd_points_by_key['c'],
nd_points_by_key['eta'],
nd_points_by_key['depth'],
nd_points_by_key['depth_c'])
points = LazyArray(points_shape, calc_points)
bounds = None
if self.s.nbounds or (self.c and self.c.nbounds):
# Build a "lazy" bounds array.
nd_values_by_key = self._remap_with_bounds(dependency_dims,
derived_dims)
bounds_shape = self._shape(nd_values_by_key)
# Define the function here to obtain a closure.
def calc_bounds():
valid_shapes = [(), (1,), (2,)]
key = 's'
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'Invalid bounds for {} ' \
'coordinate {!r}.'.format(key, name)
raise ValueError(msg)
valid_shapes.pop()
for key in ('eta', 'depth', 'depth_c'):
if nd_values_by_key[key].shape[-1:] not in valid_shapes:
name = self.dependencies[key].name()
msg = 'The {} coordinate {!r} has bounds. ' \
'These are being disregarded.'.format(key, name)
warnings.warn(msg, UserWarning, stacklevel=2)
# Swap bounds with points.
shape = list(nd_points_by_key[key].shape)
bounds = nd_points_by_key[key].reshape(shape.append(1))
nd_values_by_key[key] = bounds
return self._derive(nd_values_by_key['s'],
nd_values_by_key['c'],
nd_values_by_key['eta'],
nd_values_by_key['depth'],
nd_values_by_key['depth_c'],
bounds_shape)
bounds = LazyArray(bounds_shape, calc_bounds)
coord = iris.coords.AuxCoord(points,
standard_name=self.standard_name,
long_name=self.long_name,
var_name=self.var_name,
units=self.units,
bounds=bounds,
attributes=self.attributes,
coord_system=self.coord_system)
return coord
def update(self, old_coord, new_coord=None):
"""
Notifies the factory of the removal/replacement of a coordinate
which might be a dependency.
Args:
* old_coord:
The coordinate to be removed/replaced.
* new_coord:
If None, any dependency using old_coord is removed, otherwise
any dependency using old_coord is updated to use new_coord.
"""
new_dependencies = self.dependencies
for name, coord in self.dependencies.items():
if old_coord is coord:
new_dependencies[name] = new_coord
try:
self._check_dependencies(**new_dependencies)
except ValueError as e:
msg = 'Failed to update dependencies. ' + e.message
raise ValueError(msg)
else:
setattr(self, name, new_coord)
break
|
lgpl-3.0
| 7,830,086,257,781,286,000 | 38.427212 | 79 | 0.525384 | false |
hirobert/svgwrite
|
svgwrite/utils.py
|
1
|
6106
|
#!/usr/bin/env python
#coding:utf-8
# Author: mozman
# Purpose: svg util functions and classes
# Created: 08.09.2010
# Copyright (C) 2010, Manfred Moitzi
# License: GPLv3
"""
.. autofunction:: rgb
.. autofunction:: iterflatlist
.. autofunction:: strlist
.. autofunction:: get_unit
.. autofunction:: split_coordinate
.. autofunction:: split_angle
.. autofunction:: rect_top_left_corner
"""
import sys
PYTHON3 = sys.version_info[0] > 2
# Python 3 adaption
if PYTHON3:
to_unicode = str
basestring = str
else:
to_unicode = unicode
# Python 3 adaption
def is_string(value):
return isinstance(value, basestring)
from svgwrite.data import pattern
def rgb(r=0, g=0, b=0, mode='RGB'):
"""
Convert **r**, **g**, **b** values to a `string`.
:param r: red part
:param g: green part
:param b: blue part
:param string mode: ``'RGB | %'``
:rtype: string
========= =============================================================
mode Description
========= =============================================================
``'RGB'`` returns a rgb-string format: ``'rgb(r, g, b)'``
``'%'`` returns percent-values as rgb-string format: ``'rgb(r%, g%, b%)'``
========= =============================================================
"""
def percent(value):
value = int(value)
if value < 0:
value = 0
if value > 100:
value = 100
return value
if mode.upper() == 'RGB':
return "rgb(%d,%d,%d)" % (int(r) & 255, int(g) & 255, int(b) & 255)
elif mode == "%":
# see http://www.w3.org/TR/SVG11/types.html#DataTypeColor
# percentage is an 'integer' value
return "rgb(%d%%,%d%%,%d%%)" % (percent(r), percent(g), percent(b))
else:
raise ValueError("Invalid mode '%s'" % mode)
def iterflatlist(values):
"""
Flatten nested *values*, returns an `iterator`.
"""
for element in values:
if hasattr(element, "__iter__") and not is_string(element):
for item in iterflatlist(element):
yield item
else:
yield element
def strlist(values, seperator=","):
"""
Concatenate **values** with **sepertator**, `None` values will be excluded.
:param values: `iterable` object
:returns: `string`
"""
if is_string(values):
return values
else:
return seperator.join([str(value) for value in iterflatlist(values) if value is not None])
def get_unit(coordinate):
"""
Get the `unit` identifier of **coordinate**, if **coordinate** has a valid
`unit` identifier appended, else returns `None`.
"""
if isinstance(coordinate, (int, float)):
return None
result = pattern.coordinate.match(coordinate)
if result:
return result.group(3)
else:
raise ValueError("Invalid format: '%s'" % coordinate)
def split_coordinate(coordinate):
"""
Split coordinate into `<number>` and 'unit` identifier.
:returns: <2-tuple> (number, unit-identifier) or (number, None) if no unit-identifier
is present or coordinate is an int or float.
"""
if isinstance(coordinate, (int, float)):
return (float(coordinate), None)
result = pattern.coordinate.match(coordinate)
if result:
return (float(result.group(1)), result.group(3))
else:
raise ValueError("Invalid format: '%s'" % coordinate)
def split_angle(angle):
"""
Split angle into `<number>` and `<angle>` identifier.
:returns: <2-tuple> (number, angle-identifier) or (number, None) if no angle-identifier
is present or angle is an int or float.
"""
if isinstance(angle, (int, float)):
return (float(angle), None)
result = pattern.angle.match(angle)
if result:
return (float(result.group(1)), result.group(3))
else:
raise ValueError("Invalid format: '%s'" % angle)
def rect_top_left_corner(insert, size, pos='top-left'):
"""
Calculate top-left corner of a rectangle.
**insert** and **size** must have the same units.
:param 2-tuple insert: insert point
:param 2-tuple size: (width, height)
:param string pos: insert position ``'vert-horiz'``
:return: ``'top-left'`` corner of the rect
:rtype: 2-tuple
========== ==============================
pos valid values
========== ==============================
**vert** ``'top | middle | bottom'``
**horiz** ``'left'|'center'|'right'``
========== ==============================
"""
vert, horiz = pos.lower().split('-')
x, xunit = split_coordinate(insert[0])
y, yunit = split_coordinate(insert[1])
width, wunit = split_coordinate(size[0])
height, hunit = split_coordinate(size[1])
if xunit != wunit:
raise ValueError("x-coordinate and width has to have the same unit")
if yunit != hunit:
raise ValueError("y-coordinate and height has to have the same unit")
if horiz == 'center':
x = x - width / 2.
elif horiz == 'right':
x = x - width
elif horiz != 'left':
raise ValueError("Invalid horizontal position: '%s'" % horiz)
if vert == 'middle':
y = y - height / 2.
elif vert == 'bottom':
y = y - height
elif vert != 'top':
raise ValueError("Invalid vertical position: '%s'" % vert)
if xunit:
x = "%s%s" %(x, xunit)
if yunit:
y = "%s%s" %(y, yunit)
return (x, y)
class AutoID(object):
_nextid = 1
def __init__(self, value=None):
self._set_value(value)
@classmethod
def _set_value(cls, value=None):
if value is not None:
cls._nextid = value
@classmethod
def next_id(cls, value=None):
cls._set_value(value)
retval = "id%d" % cls._nextid
cls._nextid += 1
return retval
|
gpl-3.0
| -841,790,652,813,798,900 | 26.138249 | 98 | 0.533082 | false |
AlvinPH/StockTool
|
StockTool/core.py
|
1
|
7480
|
from . import helpers
import pandas as pd
import numpy as np
from pandas import DataFrame, Series
from pandas_datareader import data
from datetime import datetime, timedelta
import re
import os
import requests
import time
class StockInfo():
def __init__(self, StockNumber):
if isinstance(StockNumber, str) is False:
print('StockNumber must be string')
self.__StockNumber = '2330.TW'
else:
self.__StockNumber = StockNumber+'.TW'
def get_StockNumber(self):
return self.__StockNumber
def fetch_StockPrice(self, StartTime, EndTime):
# self.__StockPrice = data.DataReader(self.__StockNumber,
# 'yahoo',StartTime, EndTime)
self.__StockPrice = data.DataReader(self.__StockNumber,
'yahoo',StartTime, EndTime)
def get_StockPrice(self):
return self.__StockPrice
def fetch_StockActions(self, StartTime, EndTime):
self.__StockActions = data.DataReader(self.__StockNumber,
'yahoo-actions',StartTime, EndTime)
def get_StockActions(self):
return self.__StockActions
class Crawler():
def __init__(self, prefix='data'):
if not os.path.isdir(prefix):
os.mkdir(prefix)
self.prefix = prefix
# pass
def get_tse_one_day(self, spec_date):
date_str = '{0}{1:02d}{2:02d}'.format(spec_date.year, spec_date.month, spec_date.day)
url = 'http://www.twse.com.tw/exchangeReport/MI_INDEX'
query_params = {
'date': date_str,
'response': 'json',
'type': 'ALL',
'_': str(round(time.time() * 1000) - 500)
}
# Get json data
page = requests.get(url, params=query_params)
if not page.ok:
logging.error("Can not get TSE data at {}".format(date_str))
content = page.json()
# print(content)
# key = 'Nodata'
isoffday = True
for key in content.keys():
if isinstance(content[key], list):
if len(content[key][0]) == 16:
isoffday = False
break
if isoffday:
print('No data at this day %4d/%02d/%02d'%
(spec_date.year,spec_date.month, spec_date.day))
return -1
# For compatible with original data
# date_str_mingguo = '{0}/{1:02d}/{2:02d}'.format(spec_date.year - 1911,\
# spec_date.month, spec_date.day)
data_df = DataFrame(data=content[key],
columns=['code','name','volume','transaction','turnover',
'open','high','low','close','UD','difference',
'last_buy', 'last_buy_volume',
'last_sell','last_sell_volume','PE_ratio'])
data_df = data_df.applymap(lambda x: re.sub(",","",x))# clear comma
data_df.replace({'UD':{'<p style= color:red>+</p>':'+',
'<p style= color:green>-</p>':'-'}},
inplace=True)
return data_df
def get_otc_one_day(self, spec_date):
date_str = '{0}/{1:02d}/{2:02d}'.format(spec_date.year-1911, spec_date.month, spec_date.day)
ttime = str(int(time.time()*100))
url = 'http://www.tpex.org.tw/web/stock/aftertrading/daily_close_quotes/stk_quote_result.php?l=zh-tw&d={}&_={}'.format(date_str, ttime)
page = requests.get(url)
if not page.ok:
logging.error("Can not get OTC data at {}".format(date_str))
# print(page.content)
content = page.json()
# print(content)
# key = 'Nodata'
if (len(content['aaData']) + len(content['mmData'])) == 0:
print('No data at this day ' + date_str)
return -1
data_df = DataFrame(data=content['aaData'] + content['mmData'],
columns=['code','name','close','difference','open',
'high','low','avg','volume','turnover',
'transaction','last_buy',
'last_sell','NumOfShare','NextRefPrice',
'NextUpperPrice', 'NextLowerPrice'])
data_df = data_df.applymap(lambda x: re.sub(",","",x))# clear comma
return data_df
def check_all_tse_data(self):
Filelist = os.listdir(self.prefix)
if 'offday.xlsx' in Filelist:
offday_ser = pd.read_excel(self.prefix + '/offday.xlsx')
offday_ser = offday_ser['date'].copy()
else:
offday_ser = Series(name='date', data='First')
offday_update = False
lastday_update = False
Now = datetime.now()
Nowdate = datetime(Now.year, Now.month, Now.day)
if 'lastday.txt' in Filelist:
with open(self.prefix + '/lastday.txt', 'r') as f:
read_data = f.read()
f.close()
Startdate = datetime(int(read_data[0:4]),
int(read_data[4:6]),
int(read_data[6:8]))
else:
#Start from 2004(093)/02/11
Startdate = datetime(2004, 2, 11)
datediff = timedelta(days=1)
while Startdate <= Nowdate:
date_str = '{0}{1:02d}{2:02d}'.\
format(Startdate.year-1911,Startdate.month, Startdate.day)
print('Read ' + date_str)
if ('%s.xlsx' %(date_str)) not in Filelist:# not in FileList
if (offday_ser != date_str).all():# not a offday
lastday_update = True
data_df = self.get_tse_one_day(Startdate) # collect data
if isinstance(data_df, DataFrame):# success
data_df.to_excel('{0}/{1}.xlsx'.format(self.prefix,date_str))# save data
else:# is an offday, update offday series
offday_ser.set_value( len(offday_ser), date_str)
offday_update = True
print(date_str + 'is an offday')
else:
print(date_str + ' is known as an offday')
else:
print(date_str + ' is in FileList')
Startdate = Startdate + datediff
if offday_update:
offday_ser.to_excel(self.prefix + '/offday.xlsx')
if lastday_update:
with open(self.prefix + '/lastday.txt', 'w') as f:
# Nowdate += timedelta(days=-1)
date_str = '{0}{1:02d}{2:02d}'.\
format(Nowdate.year,Nowdate.month, Nowdate.day)
f.write(date_str)
f.close()
def check_all_otc_data(self):
Filelist = os.listdir(self.prefix)
if 'offdayOTC.xlsx' in Filelist:
offday_ser = pd.read_excel(self.prefix + '/offdayOTC.xlsx')
offday_ser = offday_ser['date'].copy()
else:
offday_ser = Series(name='date', data='First')
offday_update = False
lastday_update = False
Now = datetime.now()
Nowdate = datetime(Now.year, Now.month, Now.day)
if 'lastdayOTC.txt' in Filelist:
with open(self.prefix + '/lastdayOTC.txt', 'r') as f:
read_data = f.read()
f.close()
Startdate = datetime(int(read_data[0:4]),
int(read_data[4:6]),
int(read_data[6:8]))
else:
#Start from 2007(096)/04/23
Startdate = datetime(2007, 4, 23)
# Startdate = datetime(2008, 2, 28)
datediff = timedelta(days=1)
while Startdate <= Nowdate:
date_str = '{0}{1:02d}{2:02d}'.\
format(Startdate.year-1911,Startdate.month, Startdate.day)
print('Read ' + date_str + ' OTC')
if ('%sOTC.xlsx' %(date_str)) not in Filelist:# not in FileList
if (offday_ser != date_str).all():# not a offday
lastday_update = True
time.sleep(np.random.random())
data_df = self.get_otc_one_day(Startdate) # collect data
if isinstance(data_df, DataFrame):# success
data_df.to_excel('{0}/{1}OTC.xlsx'.format(self.prefix,date_str))# save data
else:# is an offday, update offday series
offday_ser.set_value( len(offday_ser), date_str)
offday_update = True
print(date_str + 'is an offday')
else:
print(date_str + ' is known as an offday')
else:
print(date_str + ' is in FileList')
Startdate = Startdate + datediff
if offday_update:
offday_ser.to_excel(self.prefix + '/offdayOTC.xlsx')
if lastday_update:
with open(self.prefix + '/lastdayOTC.txt', 'w') as f:
# Nowdate += timedelta(days=-1)
date_str = '{0}{1:02d}{2:02d}'.\
format(Nowdate.year,Nowdate.month, Nowdate.day)
f.write(date_str)
f.close()
|
bsd-2-clause
| -3,977,140,881,011,436,500 | 27.226415 | 137 | 0.629278 | false |
dtroyer/python-openstacksdk
|
openstack/tests/unit/clustering/v1/test_event.py
|
1
|
2273
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.unit import base
from openstack.clustering.v1 import event
FAKE = {
'action': 'NODE_CREATE',
'cluster_id': None,
'id': 'ffaed25e-46f5-4089-8e20-b3b4722fd597',
'level': '20',
'oid': 'efff1c11-2ada-47da-bedd-2c9af4fd099a',
'oname': 'node_create_b4a49016',
'otype': 'NODEACTION',
'project': '42d9e9663331431f97b75e25136307ff',
'status': 'START',
'status_reason': 'The action was abandoned.',
'timestamp': '2016-10-10T12:46:36.000000',
'user': '5e5bf8027826429c96af157f68dc9072'
}
class TestEvent(base.TestCase):
def setUp(self):
super(TestEvent, self).setUp()
def test_basic(self):
sot = event.Event()
self.assertEqual('event', sot.resource_key)
self.assertEqual('events', sot.resources_key)
self.assertEqual('/events', sot.base_path)
self.assertEqual('clustering', sot.service.service_type)
self.assertTrue(sot.allow_get)
self.assertTrue(sot.allow_list)
def test_instantiate(self):
sot = event.Event(**FAKE)
self.assertEqual(FAKE['id'], sot.id)
self.assertEqual(FAKE['action'], sot.action)
self.assertEqual(FAKE['cluster_id'], sot.cluster_id)
self.assertEqual(FAKE['level'], sot.level)
self.assertEqual(FAKE['oid'], sot.obj_id)
self.assertEqual(FAKE['oname'], sot.obj_name)
self.assertEqual(FAKE['otype'], sot.obj_type)
self.assertEqual(FAKE['project'], sot.project_id)
self.assertEqual(FAKE['status'], sot.status)
self.assertEqual(FAKE['status_reason'], sot.status_reason)
self.assertEqual(FAKE['timestamp'], sot.generated_at)
self.assertEqual(FAKE['user'], sot.user_id)
|
apache-2.0
| -3,078,022,408,368,148,500 | 36.262295 | 75 | 0.673119 | false |
grycap/clues
|
cluesplugins/kubernetes.py
|
1
|
13204
|
#!/usr/bin/env python
#
# CLUES - Cluster Energy Saving System
# Copyright (C) 2015 - GRyCAP - Universitat Politecnica de Valencia
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import collections
import requests
import base64
import json
import cpyutils.config
import clueslib.helpers as Helpers
from cpyutils.evaluate import TypedNumber, TypedClass, TypedList
from cpyutils.log import Log
from clueslib.node import NodeInfo
from clueslib.platform import LRMS
from clueslib.request import Request, ResourcesNeeded, JobInfo
_LOGGER = Log("PLUGIN-KUBERNETES")
class lrms(LRMS):
def _get_auth_header(self, auth):
"""
Generate the auth header needed to contact with the Kubernetes API server.
"""
auth_header = {}
if 'username' in auth and 'password' in auth:
passwd = auth['password']
user = auth['username']
auth_header = {'Authorization': 'Basic ' +
(base64.encodestring((user + ':' + passwd).encode('utf-8'))).strip().decode('utf-8')}
elif 'token' in auth:
token = auth['token']
auth_header = {'Authorization': 'Bearer ' + token}
return auth_header
def _create_request(self, method, url, auth_data, headers=None, body=None):
try:
if headers is None:
headers = {}
auth_header = self._get_auth_header(auth_data)
if auth_header:
headers.update(auth_header)
url = "%s%s" % (self._server_url, url)
resp = requests.request(method, url, verify=False, headers=headers, data=body)
if resp.status_code == 200:
return resp.json()
else:
_LOGGER.error("Error contacting Kubernetes API: %s - %s" % (resp.status_code, resp.text))
return None
except Exception as ex:
_LOGGER.error("Error contacting Kubernetes API: %s" % str(ex))
return None
def __init__(self, KUBERNETES_SERVER=None, KUBERNETES_PODS_API_URL_PATH=None,
KUBERNETES_NODES_API_URL_PATH=None, KUBERNETES_TOKEN=None, KUBERNETES_NODE_MEMORY=None,
KUBERNETES_NODE_SLOTS=None, KUBERNETES_NODE_PODS=None):
config_kube = cpyutils.config.Configuration(
"KUBERNETES",
{
"KUBERNETES_SERVER": "http://localhost:8080",
"KUBERNETES_PODS_API_URL_PATH": "/api/v1/pods",
"KUBERNETES_NODES_API_URL_PATH": "/api/v1/nodes",
"KUBERNETES_TOKEN": None,
"KUBERNETES_NODE_MEMORY": 1073741824,
"KUBERNETES_NODE_SLOTS": 1,
"KUBERNETES_NODE_PODS": 110,
}
)
self._server_url = Helpers.val_default(KUBERNETES_SERVER, config_kube.KUBERNETES_SERVER)
self._pods_api_url_path = Helpers.val_default(KUBERNETES_PODS_API_URL_PATH,
config_kube.KUBERNETES_PODS_API_URL_PATH)
self._nodes_api_url_path = Helpers.val_default(KUBERNETES_NODES_API_URL_PATH,
config_kube.KUBERNETES_NODES_API_URL_PATH)
token = Helpers.val_default(KUBERNETES_TOKEN, config_kube.KUBERNETES_TOKEN)
self._node_memory = Helpers.val_default(KUBERNETES_NODE_MEMORY, config_kube.KUBERNETES_NODE_MEMORY)
self._node_slots = Helpers.val_default(KUBERNETES_NODE_SLOTS, config_kube.KUBERNETES_NODE_SLOTS)
self._node_pods = Helpers.val_default(KUBERNETES_NODE_PODS, config_kube.KUBERNETES_NODE_PODS)
if token:
self.auth_data = {"token": token}
else:
self.auth_data = {}
LRMS.__init__(self, "KUBERNETES_%s" % self._server_url)
def _get_memory_in_bytes(self, str_memory):
str_memory = str_memory.lower()
if str_memory.strip()[-2:] in ['mi', 'gi', 'ki', 'ti']:
unit = str_memory.strip()[-2:][0]
memory = int(str_memory.strip()[:-2])
elif str_memory.strip()[-1:] in ['m', 'g', 'k', 't']:
unit = str_memory.strip()[-1:]
memory = int(str_memory.strip()[:-1])
else:
return int(str_memory)
if unit == 'k':
memory *= 1024
elif unit == 'm':
memory *= 1024 * 1024
elif unit == 'g':
memory *= 1024 * 1024 * 1024
elif unit == 't':
memory *= 1024 * 1024 * 1024 * 1024
return memory
def _get_node_used_resources(self, nodename, pods_data):
used_mem = 0
used_cpus = 0.0
used_pods = 0
system_pods = 0
if pods_data:
for pod in pods_data["items"]:
if "nodeName" in pod["spec"] and nodename == pod["spec"]["nodeName"]:
# do not count the number of pods in case finished jobs
if pod["status"]["phase"] not in ["Succeeded", "Failed"]:
# do not count the number of pods in case of system ones
if pod["metadata"]["namespace"] == "kube-system":
system_pods += 1
used_pods += 1
cpus, memory = self._get_pod_cpus_and_memory(pod)
used_mem += memory
used_cpus += cpus
return used_mem, used_cpus, used_pods, system_pods
def get_nodeinfolist(self):
nodeinfolist = collections.OrderedDict()
nodes_data = self._create_request('GET', self._nodes_api_url_path, self.auth_data)
if nodes_data:
pods_data = self._create_request('GET', self._pods_api_url_path, self.auth_data)
if not pods_data:
_LOGGER.error("Error getting Kubernetes pod list. Node usage will not be obtained.")
for node in nodes_data["items"]:
name = node["metadata"]["name"]
memory_total = self._get_memory_in_bytes(node["status"]["allocatable"]["memory"])
slots_total = int(node["status"]["allocatable"]["cpu"])
pods_total = int(node["status"]["allocatable"]["pods"])
skip_node = False
# Get Taints
if 'taints' in node["spec"] and node["spec"]['taints']:
for taint in node["spec"]['taints']:
if taint['effect'] in ["NoSchedule", "PreferNoSchedule", "NoExecute"]:
skip_node = True
_LOGGER.debug("Node %s is tainted with %s, skiping." % (name, taint['effect']))
if not skip_node:
used_mem, used_cpus, used_pods, system_pods = self._get_node_used_resources(name, pods_data)
memory_free = memory_total - used_mem
slots_free = slots_total - used_cpus
pods_free = pods_total - used_pods
is_ready = True
for conditions in node["status"]["conditions"]:
if conditions['type'] == "Ready":
if conditions['status'] != "True":
is_ready = False
keywords = {'pods_free': TypedNumber(pods_free),
'nodeName': TypedClass(name, TypedClass.STRING)}
# Add labels as keywords
for key, value in list(node["metadata"]["labels"].items()):
keywords[key] = TypedClass(value, TypedClass.STRING)
nodeinfolist[name] = NodeInfo(name, slots_total, slots_free, memory_total, memory_free, keywords)
if is_ready:
nodeinfolist[name].state = NodeInfo.IDLE
if (used_pods - system_pods) > 0:
nodeinfolist[name].state = NodeInfo.USED
else:
nodeinfolist[name].state = NodeInfo.OFF
else:
_LOGGER.error("Error getting Kubernetes node list.")
# Add the "virtual" nodes
try:
vnodes = json.load(open('/etc/clues2/kubernetes_vnodes.info', 'r'))
for vnode in vnodes:
name = vnode["name"]
if name not in nodeinfolist:
keywords = {'pods_free': TypedNumber(self._node_pods),
'nodeName': TypedClass(name, TypedClass.STRING)}
cpus = self._node_slots
if "cpu" in vnode:
cpus = int(vnode["cpu"])
memory = self._node_memory
if "memory" in vnode:
memory = self._get_memory_in_bytes(vnode["memory"])
if "queues" in vnode:
queues = vnode["queues"].split(",")
if queues:
keywords['queues'] = TypedList([TypedClass.auto(q) for q in queues])
if "keywords" in vnode:
for keypair in vnode["keywords"].split(','):
parts = keypair.split('=')
keywords[parts[0].strip()] = TypedClass(parts[1].strip(), TypedClass.STRING)
nodeinfolist[name] = NodeInfo(name, cpus, cpus, memory, memory, keywords)
nodeinfolist[name].state = NodeInfo.OFF
except Exception as ex:
_LOGGER.error("Error processing file /etc/clues2/kubernetes_vnodes.info: %s" % str(ex))
return nodeinfolist
def _get_cpu_float(self, cpu_info):
if cpu_info.strip()[-1:] == "m":
return float(cpu_info.strip()[:-1]) / 1000.0
else:
return float(cpu_info)
def _get_pod_cpus_and_memory(self, pod):
cpus = 0.0
memory = 0
for cont in pod["spec"]["containers"]:
if "resources" in cont:
if "requests" in cont["resources"]:
if "cpu" in cont["resources"]["requests"]:
cpus += self._get_cpu_float(cont["resources"]["requests"]["cpu"])
if "memory" in cont["resources"]["requests"]:
memory += self._get_memory_in_bytes(cont["resources"]["requests"]["memory"])
return cpus, memory
def get_jobinfolist(self):
'''Method in charge of monitoring the job queue of Mesos plus Marathon
The Mesos info about jobs has to be obtained from frameworks and not from tasks,
because if there are not available resources to execute new tasks, Mesos
do not create them but frameworks are created
'''
jobinfolist = []
pods_data = self._create_request('GET', self._pods_api_url_path, self.auth_data)
if pods_data:
for pod in pods_data["items"]:
if pod["metadata"]["namespace"] != "kube-system":
job_id = pod["metadata"]["uid"]
state = pod["status"]["phase"] # Pending, Running, Succeeded, Failed or Unknown
hostIP = None
if "hostIP" in pod["status"]:
hostIP = pod["status"]["hostIP"] # Pending, Running, Succeeded, Failed or Unknown
job_state = Request.UNKNOWN
if state == "Pending":
job_state = Request.PENDING
if hostIP:
job_state = Request.SERVED
elif state in ["Running", "Succeeded", "Failed"]:
job_state = Request.SERVED
cpus, memory = self._get_pod_cpus_and_memory(pod)
req_str = '(pods_free > 0)'
if 'nodeName' in pod["spec"] and pod["spec"]["nodeName"]:
req_str += ' && (nodeName = "%s")' % pod["spec"]["nodeName"]
# Add node selector labels
if 'nodeSelector' in pod['spec'] and pod['spec']['nodeSelector']:
for key, value in list(pod['spec']['nodeSelector'].items()):
req_str += ' && (%s == "%s")' % (key, value)
resources = ResourcesNeeded(cpus, memory, [req_str], 1)
job_info = JobInfo(resources, job_id, 1)
job_info.set_state(job_state)
jobinfolist.append(job_info)
else:
_LOGGER.error("Error getting Kubernetes pod list")
return jobinfolist
if __name__ == '__main__':
pass
|
gpl-3.0
| -3,341,491,502,030,024,000 | 43.160535 | 117 | 0.529612 | false |
Dhole/miniBoy
|
helpers/gen.py
|
1
|
1027
|
regs = ["B", "C", "D", "E", "H", "L", "(HL)", "A"]
d = 0x80
for i in range(0, 8):
print('\tSET_OP(0x%02X, "ADD A,%s", op_add, A, %s, 4);' % (i+d, regs[i % 8], regs[i % 8]));
for i in range(8, 16):
print('\tSET_OP(0x%02X, "ADC A,%s", op_adc, A, %s, 4);' % (i+d, regs[i % 8], regs[i % 8]));
d = 0x90
for i in range(0, 8):
print('\tSET_OP(0x%02X, "SUB %s", op_sub, %s, NULL, 4);' % (i+d, regs[i % 8], regs[i % 8]));
for i in range(8, 16):
print('\tSET_OP(0x%02X, "SBC A,%s", op_sbc, A, %s, 4);' % (i+d, regs[i % 8], regs[i % 8]));
d = 0xA0
for i in range(0, 8):
print('\tSET_OP(0x%02X, "AND %s", op_and, %s, NULL, 4);' % (i+d, regs[i % 8], regs[i % 8]));
for i in range(8, 16):
print('\tSET_OP(0x%02X, "XOR %s", op_xor, %s, NULL, 4);' % (i+d, regs[i % 8], regs[i % 8]));
d = 0xB0
for i in range(0, 8):
print('\tSET_OP(0x%02X, "OR %s", op_or, %s, NULL, 4);' % (i+d, regs[i % 8], regs[i % 8]));
for i in range(8, 16):
print('\tSET_OP(0x%02X, "CP %s", op_cp, %s, NULL, 4);' % (i+d, regs[i % 8], regs[i % 8]));
|
bsd-3-clause
| -4,138,096,646,816,942,000 | 34.448276 | 93 | 0.478092 | false |
kirsty-tortoise/mathsmap
|
mathsmap/new_map.py
|
1
|
2417
|
"""
Contains classes controlling wizards for making new maps
"""
import tkinter as tk
import mathsmap.colours as colours
class Wizard:
"""
A base class for all wizards in this project
"""
def clear(self):
"""
Remove all current widgets from top level of wizard
"""
for child in self.top.winfo_children():
child.destroy()
class NewMap(Wizard):
"""
Class for any new map
"""
def __init__(self, controller):
"""
Set up NewMap wizard
"""
self.controller = controller
self.top = tk.Toplevel()
self.top.title("Make a new map")
self.welcome_screen()
def welcome_screen(self):
"""
Sets up first screen of wizard
"""
self.clear()
self.scheme = colours.BLUESCHEME
self.background = self.scheme.lighter
self.top.configure(background=self.background)
self.title = tk.Label(self.top, text="Let's make a new map!", font=(None, 20),
background=self.background)
self.title.grid(row=0, column=0, columnspan=2)
self.text = tk.Label(self.top,
text=("When do you need to make your mathsmap? " +
"Is it right now, possibly in a rush before exams, " +
"or over time, while you attend lectures and learn?"),
wraplength=400, background=self.background
)
self.text.grid(row=1, column=0, columnspan=2)
buttons_needed = [("Right now!", 0),
("Over time!", 1)]
for text, column in buttons_needed:
button = tk.Button(self.top, text=text, width=15, height=3,
background=self.scheme.darkest, activebackground=self.scheme.darker,
foreground="white", font=(None, 15))
button.grid(row=2, column=column, pady=5)
def clear(self):
"""
Remove all current widgets from top level
"""
for child in self.top.winfo_children():
child.destroy()
class NewFutureMap(Wizard):
"""
Class for new maps to be added to slowly in the future
"""
pass
class NewNowMap(Wizard):
"""
Class for new maps to be added to and completed right now
"""
pass
|
mit
| 4,506,016,835,669,896,000 | 31.226667 | 99 | 0.541167 | false |
tiagocoutinho/bliss
|
bliss/controllers/emulators/keithley.py
|
1
|
2009
|
# -*- coding: utf-8 -*-
#
# This file is part of the bliss project
#
# Copyright (c) 2016 Beamline Control Unit, ESRF
# Distributed under the GNU LGPLv3. See LICENSE for more info.
import time
import random
import gevent
from bliss.comm.scpi import Commands
from bliss.controllers.keithley_scpi_mapping import COMMANDS, MODEL_COMMANDS
from .scpi import SCPI
# 'KEITHLEY INSTRUMENTS INC.,MODEL 6485,1008577,B03 Sep 25 2002 10:53:29/A02 /E'
class BaseKeithley(SCPI):
Manufacturer = 'KEITHLEY INSTRUMENTS INC.'
Version = '1008577'
Firmware = 'B03 Sep 25 2002 10:53:29/A02 /E'
IDNFieldSep = ','
def __init__(self, *args, **kwargs):
super(BaseKeithley, self).__init__(*args, **kwargs)
self.start_time = time.time()
def syst_ver(self):
return self.Version
class Keithley6485(BaseKeithley):
Model = 'MODEL 6485'
PLC = 50 # 50Hz
NPLC = 5.0
FormElem = 'READ',
def __init__(self, *args, **kwargs):
kwargs['commands'] = Commands(COMMANDS, MODEL_COMMANDS['6485'])
super(Keithley6485, self).__init__(*args, **kwargs)
# def curr_nplc(self, is_query, value=None):
# if is_query:
# return self.NPLC
# self.NPLC = float(value)
def curr_rang(self):
return 123.456
def form_elem(self, is_query, value=None):
if is_query:
return ','.join(self.FormElem)
self.FormElem = tuple(map(str.upper, value.split(',')))
def read(self):
# assumptions: reading from sensor and result in SCI notation
# emulate read time
gevent.sleep(self.NPLC * 1./self.PLC)
result = []
for i in self.FormElem:
if i == 'READ':
result.append('%EA' % (random.random()*(20E-3 - 2E-9) + 2E-9))
elif i == 'TIME':
ts = (time.time() - self.start_time) % 99999.99
result.append('%E' % ts)
return ','.join(result)
def meas(self):
return self.read()
|
lgpl-3.0
| -4,398,936,189,385,790,500 | 25.786667 | 83 | 0.592832 | false |
r-rathi/error-control-coding
|
perf/plot-pegd.py
|
1
|
1496
|
import numpy as np
import matplotlib.pyplot as plt
from errsim import *
def label(d, pe, pb, n):
if pb is None:
pb = pe
label = 'd={} pe={} n={} BSC'.format(d, pe, n)
else:
label = 'd={} pe={} n={} pb={}'.format(d, pe, n, pb)
return label
def plot(pe, fpath=None):
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=plt.figaspect(1/2))
r = np.arange(8, 65)
pWL = jointpmf5(pe, pe, 128)
ax.plot(r, r_vs_pegd(pWL, 3, r) , 'g--', lw=2, label=label(3, pe, None, 128))
ax.plot(r, r_vs_pegd(pWL, 6, r) , 'g-', lw=2, label=label(6, pe, None, 128))
pWL = jointpmf5(pe, .1, 128)
ax.plot(r, r_vs_pegd(pWL, 3, r) , 'b--', lw=2, label=label(3, pe, .1, 128))
ax.plot(r, r_vs_pegd(pWL, 6, r) , 'b-', lw=2, label=label(6, pe, .1, 128))
pWL = jointpmf5(pe, .5, 128)
ax.plot(r, r_vs_pegd(pWL, 3, r) , 'r--', lw=2, label=label(3, pe, .5, 128))
ax.plot(r, r_vs_pegd(pWL, 6, r) , 'r-', lw=2, label=label(6, pe, .5, 128))
ax.set_yscale('log')
ax.set_xticks(r[::8])
ax.set_xlim(r[0], r[-1])
#ax.set_ylim(1e-30, 1e-1)
ax.set_xlabel('Burst error correction capability, $r$')
ax.set_ylabel('$P_{egd}$')
ax.set_title('Probability of Exceeding Guarenteed Error Detection Capability')
ax.legend(loc='lower right')
ax.grid(True)
#plt.tight_layout()
if fpath:
fig.savefig(fpath)
plt.show()
plt.close('all')
plot(1e-15, 'plots/pegd-pe=1e15.png')
plot(1e-6, 'plots/pegd-pe=1e6.png')
|
mit
| -2,650,719,119,440,430,600 | 29.530612 | 82 | 0.561497 | false |
grimli/life-game
|
life_thread.py
|
1
|
3242
|
#!/usr/bin/python3
import random
from tkinter import *
import threading
import queue
class Evolver( threading.Thread ):
""" this thread calculate next status
The result is maid available to the shower trough a queue """
def __init__(self, queue):
threading.Thread.__init__(self)
self.queue = queue
def run(self):
while True:
print("time: %d\n" % board.time)
board.evolve()
self.queue.put(1)
class CellularAutoma:
def __init__(self, lenght=6, scale=10):
"""genera la tabella iniziale quadrata e di dimensione iniziale lenght"""
self.board = [[]]
self.scale = scale
line = []
random.seed()
self.time = 0
for a in range( lenght ):
line = []
for b in range( lenght ):
tmp = random.randint( -1, 1 )
if tmp > 0:
line.append( 1 )
else:
line.append( 0 )
self.board.append( line )
self.board.remove([])
#init GUI
self.master = Tk()
self.master.title('life game')
self.w = Canvas(self.master, width=lenght*self.scale, height=lenght*self.scale)
self.w.pack()
def evolve( self ):
"""esegue lo step di evoluzione del gioco life su una tabella sferica"""
rows = len(self.board)
columns = len(self.board[0])
board2 = [[0 for j in range(rows)] for i in range(columns)]
for i in range( 0, rows ):
for j in range( 0, columns ):
totale = self.board[(i-1)%(rows)][(j-1)%(columns)]+self.board[(i-1)%(rows)][j%(columns)]+self.board[(i-1)%(rows)][(j+1)%(columns)]+self.board[i%(rows)][(j-1)%(columns)]+self.board[i%(rows)][j%(columns)]+self.board[i%(rows)][(j+1)%(columns)]+self.board[(i+1)%(rows)][(j-1)%(columns)]+self.board[(i+1)%(rows)][j%(columns)]+self.board[(i+1)%(rows)][(j+1)%(columns)]
if self.board[i][j] == 0:
if totale == 3:
board2[i][j]=1
else:
board2[i][j]=0
if self.board[i][j] == 1:
if totale <= 2:
board2[i][j]=0
elif totale <= 4:
board2[i][j]=1
else:
board2[i][j]=0
self.board = board2
self.time = self.time + 1
def show( self ):
"""Gives a graphical representation of the data"""
self.w.delete(ALL)
for i,v in enumerate(self.board):
for j,w in enumerate( self.board[i] ):
if (self.board[i][j] == 0):
self.w.create_rectangle(i*self.scale, j*self.scale, i*self.scale+self.scale, j*self.scale+self.scale, fill="blue")
else:
self.w.create_rectangle(i*self.scale, j*self.scale, i*self.scale+self.scale, j*self.scale+self.scale, fill="yellow")
if __name__ == '__main__':
dim = input( "Inserisci la dimensione della board: ")
board = CellularAutoma( lenght=int(dim), scale=5)
queue = queue.Queue( maxsize=1 )
t1 = Evolver(queue)
t1.start()
# Tkinter cannot be executed on a separeted thread
while True:
flag = queue.get()
board2 = board
queue.task_done()
board2.show()
board.master.update()
|
gpl-3.0
| -5,946,927,809,734,057,000 | 32.42268 | 374 | 0.541641 | false |
RamonGuiuGou/l10n-spain
|
l10n_es_account_balance_report/__openerp__.py
|
1
|
1064
|
# -*- coding: utf-8 -*-
# Copyright 2004-2011 Pexego Sistemas Informáticos
# Copyright 2013 Zikzakmedia
# Copyright 2014 Juanjo Algaz
# Copyright 2014 Joaquín Gutierrez <joaquing.pedrosa@gmail.com>
# Copyright 2014-2016 Tecnativa - Pedro M. Baeza
# Copyright 2016 Tecnativa - Vicent Cubells
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl-3.0).
{
"name": "Informes de cuentas anuales españoles",
"version": "9.0.1.0.0",
"author": "Pexego, "
"Tecnativa,"
"Zikzakmedia,"
"Odoo Community Association (OCA)",
"license": "AGPL-3",
"website": "http://www.pexego.es",
"category": "Localisation/Accounting",
"depends": [
'l10n_es',
'account_balance_reporting',
],
"data": [
'data/balance_pymes.xml',
'data/pyg_pymes.xml',
'data/balance_abreviado.xml',
'data/pyg_abreviado.xml',
'data/balance_normal.xml',
'data/pyg_normal.xml',
'data/estado_ingresos_gastos_normal.xml',
],
'installable': True,
}
|
agpl-3.0
| -6,034,689,401,956,224,000 | 30.205882 | 67 | 0.606975 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.