repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
insiderr/insiderr-app
|
app/widgets/items/comment.py
|
1
|
3202
|
from .placeholder import Placeholder
from .template import TemplateItem, ItemCallback
from widgets.layoutint import GridLayoutInt
from kivy.properties import StringProperty, ObjectProperty, NumericProperty, AliasProperty, OptionProperty
from utilities.formatting import get_formatted_when
from kivy.logger import Logger
class Comment(Placeholder):
key = None
theme = 'unknown'
icon = 'unknown'
time = None
upvote_count = 0
downvote_count = 0
def __init__(self, data, factor, target, **kwargs):
self.key = data.get('key', None)
self._take_essentials(data)
super(Comment, self).__init__(data, factor, target, **kwargs)
def _take_essentials(self, data):
if not self.key and data:
self.key = data.get('key', None)
if data:
self.time = data.get('time', None)
self.theme = data.get('theme', 'unknown')
self.icon = data.get('icon', 'unknown')
self.upvote_count = data.get('upvote_count', 0)
self.downvote_count = data.get('downvote_count', 0)
def assign(self, widget, scrollview_container=None):
self.key = widget.key
self.theme = widget.theme
self.icon = widget.icon
self.time = widget.time
self.upvote_count = widget.upvote_count
self.downvote_count = widget.downvote_count
super(Comment, self).assign(widget, scrollview_container)
def update_widget(self, data=None, ignore_old_data=False, **kwargs):
ret = super(Comment, self).update_widget(data, ignore_old_data, **kwargs)
self._take_essentials(self.data)
return ret
def update_widget_data(self, data, retries=0, ignore_old_data=False):
ret = super(Comment, self).update_widget_data(data, retries, ignore_old_data)
self._take_essentials(self.data)
return ret
class CommentTemplate(TemplateItem, GridLayoutInt):
icon_widget = ObjectProperty()
icon = StringProperty()
icon_color = StringProperty('FFFFFFFF')
content = StringProperty()
role = StringProperty()
role_text = StringProperty()
time = NumericProperty()
theme = StringProperty()
upvote_count = NumericProperty(0)
downvote_count = NumericProperty(0)
attitude = OptionProperty('none', options=('like', 'dislike', 'none'))
item_like = ObjectProperty()
item_dislike = ObjectProperty()
_hotspots = None
def get_formatted_time(self):
return get_formatted_when(self.time, absolute_time=False)
formatted_time = AliasProperty(get_formatted_time, None, bind=('time',))
def __init__(self, **kwargs):
super(CommentTemplate, self).__init__(**kwargs)
def get_hotspots(self):
if not self._hotspots:
self._hotspots = self.collect_hotspots(ItemCallback)
return self._hotspots
def set_load_callback(self, cb, *args):
return False
def dispose(self):
self.time = 0
self.icon = ''
self.content = ''
self.role = ''
self.theme = ''
self.upvote_count = 0
self.downvote_count = 0
self.attitude = 'none'
super(CommentTemplate, self).dispose()
|
gpl-3.0
| -1,765,818,321,988,416,800 | 32.705263 | 106 | 0.639913 | false |
shobhitmishra/CodingProblems
|
LeetCode/Session3/SerializeDeserializeBST.py
|
1
|
1582
|
import sys
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Codec:
def serialize(self, root):
result = []
self.serializeHelper(root, result)
result.reverse()
return ",".join(result)
def serializeHelper(self, root, result):
if root:
result.append(str(root.val))
self.serializeHelper(root.left, result)
self.serializeHelper(root.right, result)
def deserialize(self, data):
if not data:
return None
dataq = [int(num) for num in data.split(",")]
return self.deserializeHelper(dataq, -sys.maxsize, sys.maxsize)
def deserializeHelper(self, data, leftLimit, rightLimit):
if not data:
return None
if data[-1] < leftLimit or data[-1] > rightLimit:
return None
node = TreeNode(data.pop())
node.left = self.deserializeHelper(data, leftLimit, node.val)
node.right = self.deserializeHelper(data, node.val, rightLimit)
return node
root = TreeNode(10)
root.left = TreeNode(8)
root.left.left = TreeNode(7)
root.left.right = TreeNode(9)
root.right = TreeNode(12)
root.right.left = TreeNode(11)
root.right.right = TreeNode(15)
root.right.right.right = TreeNode(17)
root.right.right.right.right = TreeNode(18)
root.right.right.left = TreeNode(13)
root.right.right.left.right = TreeNode(14)
ob = Codec()
data = ob.serialize(None)
print(data)
node = ob.deserialize(data)
print(node)
|
mit
| 1,336,546,976,220,845,000 | 25.383333 | 71 | 0.616941 | false |
agoose77/hivesystem
|
manual/movingpanda/panda-11d.py
|
1
|
6687
|
import dragonfly
import dragonfly.pandahive
import bee
from bee import connect
import math, functools
from panda3d.core import NodePath
import dragonfly.scene.unbound, dragonfly.scene.bound
import dragonfly.std
import dragonfly.io
import dragonfly.canvas
import Spyder
# ## random matrix generator
from random import random
def random_matrix_generator():
while 1:
a = Spyder.AxisSystem()
a.rotateZ(360 * random())
a.origin = Spyder.Coordinate(15 * random() - 7.5, 15 * random() - 7.5, 0)
yield dragonfly.scene.matrix(a, "AxisSystem")
def id_generator():
n = 0
while 1:
n += 1
yield "spawnedpanda" + str(n)
from dragonfly.canvas import box2d
from bee.mstr import mstr
class parameters: pass
class myscene(dragonfly.pandahive.spyderframe):
a = Spyder.AxisSystem()
a *= 0.25
a.origin += (-8, 42, 0)
env = Spyder.Model3D("models/environment", "egg", a)
a = Spyder.AxisSystem()
a *= 0.005
mypanda = Spyder.Actor3D("models/panda-model", "egg", [("walk", "models/panda-walk4", "egg")], a,
entityname="mypanda")
a = Spyder.AxisSystem()
a *= 0.005
pandaclass = Spyder.ActorClass3D("models/panda-model", "egg", [("walk", "models/panda-walk4", "egg")], a,
actorclassname="pandaclass")
box = Spyder.Box2D(50, 470, 96, 96)
icon = Spyder.Icon("pandaicon.png", "pandaicon", box, transparency=True)
camcenter = Spyder.Entity3D(
"camcenter",
(
Spyder.NewMaterial("white", color=(255, 255, 255)),
Spyder.Block3D((1, 1, 1), material="white"),
)
)
del a, box
class pandawalkhive(bee.inithive):
animation = dragonfly.scene.bound.animation()
walk = dragonfly.std.variable("str")("walk")
connect(walk, animation.animation_name)
key_w = dragonfly.io.keyboardsensor_trigger("W")
connect(key_w, animation.loop)
key_s = dragonfly.io.keyboardsensor_trigger("S")
connect(key_s, animation.stop)
setPos = dragonfly.scene.bound.setPos()
setHpr = dragonfly.scene.bound.setHpr()
interval = dragonfly.time.interval_time(18)
connect(key_w, interval.start)
connect(key_s, interval.pause)
sequence = dragonfly.time.sequence(4)(8, 1, 8, 1)
connect(interval.value, sequence.inp)
ip1 = dragonfly.time.interpolation("Coordinate")((0, 0, 0), (0, -10, 0))
connect(sequence.outp1, ip1)
connect(ip1, setPos)
connect(key_w, ip1.start)
connect(key_s, ip1.stop)
ip2 = dragonfly.time.interpolation("Coordinate")((0, 0, 0), (180, 0, 0))
connect(sequence.outp2, ip2)
connect(ip2, setHpr)
connect(key_w, ip2.start)
connect(key_s, ip2.stop)
ip3 = dragonfly.time.interpolation("Coordinate")((0, -10, 0), (0, 0, 0))
connect(sequence.outp3, ip3)
connect(ip3, setPos)
connect(key_w, ip3.start)
connect(key_s, ip3.stop)
ip4 = dragonfly.time.interpolation("Coordinate")((180, 0, 0), (0, 0, 0))
connect(sequence.outp4, ip4)
connect(ip4, setHpr)
connect(key_w, ip4.start)
connect(key_s, ip4.stop)
connect(ip4.reach_end, interval.start)
from bee.staticbind import staticbind_baseclass
class pandawalkbind(staticbind_baseclass,
dragonfly.event.bind,
dragonfly.io.bind,
dragonfly.sys.bind,
dragonfly.scene.bind,
dragonfly.time.bind):
hive = pandawalkhive
class camerabindhive(bee.inithive):
interval = dragonfly.time.interval_time(30)
sequence = dragonfly.time.sequence(2)(1, 1)
connect(interval.value, sequence.inp)
startsensor = dragonfly.sys.startsensor()
ip1 = dragonfly.time.interpolation("Coordinate")((180, -20, 0), (360, -20, 0))
ip2 = dragonfly.time.interpolation("Coordinate")((0, -20, 0), (180, -20, 0))
connect(sequence.outp1, ip1.inp)
connect(sequence.outp2, ip2.inp)
connect(startsensor, interval.start)
connect(startsensor, ip1.start)
connect(ip1.reach_end, ip1.stop)
connect(ip1.reach_end, ip2.start)
connect(ip2.reach_end, ip2.stop)
connect(ip2.reach_end, ip1.start)
connect(ip2.reach_end, interval.start)
sethpr = dragonfly.scene.bound.setHpr()
connect(ip1, sethpr)
connect(ip2, sethpr)
class camerabind(staticbind_baseclass,
dragonfly.event.bind,
dragonfly.io.bind,
dragonfly.sys.bind,
dragonfly.scene.bind,
dragonfly.time.bind):
hive = camerabindhive
class myhive(dragonfly.pandahive.pandahive):
pandaname = "mypanda"
pandaname_ = bee.attribute("pandaname")
pandaclassname = "pandaclass"
pandaclassname_ = bee.attribute("pandaclassname")
canvas = dragonfly.pandahive.pandacanvas()
mousearea = dragonfly.canvas.mousearea()
raiser = bee.raiser()
connect("evexc", raiser)
z_pandawalk = pandawalkbind().worker()
pandaid = dragonfly.std.variable("id")(pandaname_)
connect(pandaid, z_pandawalk.bindname)
camerabind = camerabind().worker()
camcenter = dragonfly.std.variable("id")("camcenter")
connect(camcenter, camerabind.bindname)
startsensor = dragonfly.sys.startsensor()
cam = dragonfly.scene.get_camera()
camparent = dragonfly.scene.unbound.parent()
connect(cam, camparent.entityname)
connect(camcenter, camparent.entityparentname)
connect(startsensor, camparent)
cphide = dragonfly.scene.unbound.hide()
connect(camcenter, cphide)
connect(startsensor, cphide)
pandaspawn = dragonfly.scene.spawn_actor()
v_panda = dragonfly.std.variable("id")(pandaclassname_)
connect(v_panda, pandaspawn)
panda_id = dragonfly.std.generator("id", id_generator)()
random_matrix = dragonfly.std.generator(("object", "matrix"), random_matrix_generator)()
w_spawn = dragonfly.std.weaver(("id", ("object", "matrix")))()
connect(panda_id, w_spawn.inp1)
connect(random_matrix, w_spawn.inp2)
do_spawn = dragonfly.std.transistor(("id", ("object", "matrix")))()
connect(w_spawn, do_spawn)
connect(do_spawn, pandaspawn.spawn_matrix)
key_z = dragonfly.io.keyboardsensor_trigger("Z")
connect(key_z, do_spawn)
pandaicon_click = dragonfly.io.mouseareasensor("pandaicon")
connect(pandaicon_click, do_spawn)
myscene = myscene(
scene="scene",
canvas=canvas,
mousearea=mousearea,
)
wininit = bee.init("window")
wininit.camera.setPos(0, 45, 25)
wininit.camera.setHpr(180, -20, 0)
main = myhive().getinstance()
main.build("main")
main.place()
main.close()
main.init()
main.run()
|
bsd-2-clause
| 210,151,778,070,107,100 | 27.948052 | 109 | 0.649469 | false |
Liuchang0812/COC_dashboard
|
src/app.py
|
1
|
1216
|
__author__ = 'liuchang'
from tornado.web import RequestHandler, Application
import tornado
from tornado import gen
from tornado.options import define
import tornado.options
import os
from core import clan
class IndexHandler(RequestHandler):
def get(self, *args, **kwargs):
return self.render('index.html')
@gen.coroutine
def post(self, *args, **kwargs):
kw = self.get_argument("kw", clans = None)
res = yield clan.service.search(kw)
self.render('index.html', clans = res)
self.finish()
class ClanApplication(Application):
def __init__(self):
handlers = [
(r'/', IndexHandler),
(r'/clan', IndexHandler)
]
settings = dict(
template_path = os.path.join(os.path.dirname(__file__), "templates"),
static_path = os.path.join(os.path.dirname(__file__), "static"),
debug = True,
title = 'Clan Of CCCCC'
)
super(ClanApplication, self).__init__(handlers, **settings)
def main():
tornado.options.parse_command_line()
app = ClanApplication()
app.listen(8088)
tornado.ioloop.IOLoop.current().start()
if __name__ == "__main__":
main()
|
apache-2.0
| -5,759,482,561,828,794,000 | 25.456522 | 81 | 0.603618 | false |
awacha/cct
|
cct/processing/mainwindow/projectdialog/projectdialog.py
|
1
|
3004
|
import os
import appdirs
from PyQt5 import QtWidgets, QtCore, QtGui
from .projectdialog_ui import Ui_Form
class ProjectDialog(QtWidgets.QWidget, Ui_Form):
projectSelected = QtCore.pyqtSignal(str)
def __init__(self, mainwindow: QtWidgets.QWidget):
self.mainwindow = mainwindow
super().__init__()
self.projectname = None
self.setupUi(self)
def setupUi(self, Form):
super().setupUi(Form)
self.updateRecentList()
self.recentsListWidget.itemActivated.connect(self.onRecentSelected)
self.newPushButton.clicked.connect(self.onNewProject)
self.openPushButton.clicked.connect(self.onOpenProject)
self.quitPushButton.clicked.connect(self.onQuit)
self.openSelectedPushButton.clicked.connect(self.onRecentSelected)
mainpos=self.mainwindow.pos()
cx=mainpos.x()+self.mainwindow.width()*0.5
cy=mainpos.y()+self.mainwindow.height()*0.5
self.adjustSize()
self.move(cx-self.width()*0.5, cy-self.height()*0.5)
def closeEvent(self, event: QtGui.QCloseEvent):
if not self.projectname:
self.onQuit()
event.accept()
def updateRecentList(self):
recentsfile = os.path.join(appdirs.user_config_dir('cpt', 'CREDO', roaming=True), 'projecthistory')
try:
with open(recentsfile, 'rt') as f:
for l in f:
l = l.strip()
if os.path.exists(l) and l.lower().endswith('.cpt'):
self.recentsListWidget.addItem(QtWidgets.QListWidgetItem(l))
else:
pass
except FileNotFoundError:
return
def onRecentSelected(self, item: QtWidgets.QListWidgetItem=None):
if not isinstance(item, QtWidgets.QListWidgetItem):
item = self.recentsListWidget.currentItem()
print(item)
if item is None:
return
self.projectname = item.text()
self.projectSelected.emit(item.text())
self.close()
def onNewProject(self):
filename, lastfilter = QtWidgets.QFileDialog.getSaveFileName(
self, 'Select a file name for the new project...', '',
'CPT projects (*.cpt);;All files (*)', 'CPT projects (*.cpt)'
)
if not filename:
return
if not filename.endswith('.cpt'):
filename=filename+'.cpt'
self.projectname = filename
self.projectSelected.emit(filename)
self.close()
def onOpenProject(self):
filename, lastfilter = QtWidgets.QFileDialog.getOpenFileName(
self, 'Open a processing project...', '',
'CPT projects (*.cpt);;All files (*)', 'CPT projects (*.cpt)'
)
if not filename:
return
self.projectname = filename
self.projectSelected.emit(filename)
self.close()
def onQuit(self):
self.mainwindow.close()
self.close()
|
bsd-3-clause
| -4,091,614,986,864,855,600 | 33.930233 | 107 | 0.601198 | false |
crossgovernmentservices/sue-my-brother
|
tests/test_authentication.py
|
1
|
3591
|
import mock
import pytest
import time
from datetime import datetime
from flask import url_for
from mock import Mock, patch
from app.main.models import User, Suit
from app.main.views import authenticated_within
max_age = 50
mock_time_more_than_max_age = Mock()
mock_time_more_than_max_age.return_value = (
time.mktime(datetime(2011, 6, 21, 10, 10, 0).timetuple()))
mock_time_less_than_max_age = Mock()
mock_time_less_than_max_age.return_value = (
time.mktime(datetime(2011, 6, 21, 10, 0, max_age - 5).timetuple()))
mock_session = Mock()
mock_session = {
'iat': time.mktime(datetime(2011, 6, 21, 10, 0, 0).timetuple())}
mock_openid_config = Mock()
mock_openid_config.return_value = {
'authorization_endpoint':
'http://dex.example.com:5556/auth',
'discovery_url':
'http://dex.example.com:5556',
'client_id': None}
@pytest.yield_fixture
def mock_views_current_app_config():
with mock.patch("app.main.views.current_app") as current_app:
current_app.config.get.return_value = max_age
yield
@pytest.yield_fixture
def mock_notify():
with mock.patch("app.main.views.notify") as notify:
notify.send_email.return_value = ""
yield
@pytest.fixture
def test_suit(db_session):
suit = Suit(
plaintiff=User(
email='plaintiff@example.com', name='Plaintiff Test', active=True
),
defendant=User(
email='defendant@example.com', name='Defendant Test', active=True
)
)
db_session.add(suit)
db_session.commit()
return suit
@pytest.fixture
def post_accept_suit(client, test_suit):
return client.post(url_for('main.accept', suit=test_suit.id),
follow_redirects=False)
class WhenTimeSinceLastAuthenticatedIsMoreThanMaxAge(object):
@patch.dict('app.main.views.session', mock_session)
@patch('time.time', mock_time_more_than_max_age)
def it_returns_false(self, client):
assert authenticated_within(max_age) is False
class WhenTimeSinceLastAuthenticatedIsLessThanMaxAge(object):
@patch.dict('app.main.views.session', mock_session)
@patch('time.time', mock_time_less_than_max_age)
def it_returns_true(self, client):
assert authenticated_within(max_age) is True
class WhenAcceptingASuitWithinAuthenticatedTime(object):
@patch('time.time', mock_time_less_than_max_age)
def it_redirects_to_admin(self, test_admin_user, client, test_suit,
mock_notify, mock_views_current_app_config):
with client.session_transaction() as session:
session['user_id'] = test_admin_user.id
session['_fresh'] = False
session['iat'] = mock_session['iat']
response = client.post(url_for('main.accept', suit=test_suit.id))
assert response.status_code == 302
assert "/admin" in response.headers["Location"]
@pytest.mark.xfail(reason='reauthentication not yet implemented')
class WhenAcceptingASuitOutsideAuthenticatedTime(object):
@patch('time.time', mock_time_more_than_max_age)
def it_redirects_to_identity_broker(
self, test_admin_user, client, test_suit,
mock_views_current_app_config):
with client.session_transaction() as session:
session['user_id'] = test_admin_user.id
session['_fresh'] = False
session['iat'] = mock_session['iat']
response = client.post(url_for('main.accept', suit=test_suit.id))
assert response.status_code == 302
assert "prompt=login" in response.headers["Location"]
|
mit
| -7,666,764,198,254,559,000 | 28.677686 | 77 | 0.661376 | false |
osamja/local_sockets
|
app/main.py
|
1
|
4410
|
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from Tkinter import *
from tkFileDialog import askopenfilenames
from ip import get_ip_addr
import time
from threading import Thread
""" Display landing webpage at IP_ADDR:PORT and allow download of PATH file. """
class MyHandler(BaseHTTPRequestHandler):
def do_HEAD(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
def do_GET(s): #"""Respond to a GET request."""
print("Path: ", filepaths)
for file in filepaths:
if (s.path == file): # retrieve uploaded file
print("Sending download...")
s.send_response(200)
s.send_header("Content-type", "multipart/form-data")
s.end_headers()
file = open(file, 'rb')
l = file.read(1024)
while(l):
s.wfile.write(l)
l = file.read(1024)
file.close()
return
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<h3>Click on any of the files below to download them. </h3>")
s.wfile.write("<ul>")
for file in filepaths:
s.wfile.write("<li><a href='{0}'>{0}</a></li>".format(file))
s.wfile.write("</ul>")
""" The main GUI app. Initializes GUI window and button events. """
class App:
def __init__(self, master):
self.frame = Frame(master, width=5000, height=5000)
self.serve_counter = 0
self.ip_addr = ip_addr
self.port = 8080
self.url = "Others on the same WiFi can type " + str(self.ip_addr) \
+ ':' + str(self.port) + " into their browser to download the uploaded file. "
self.url_label = Label(self.frame, text="%s" % self.url)
self.filenames = 'N/A'
self.frame.pack(fill=X, padx=100, pady=100,)
self.upload = Button(master, text="Choose file", command=self.chooseFile).pack()
self.serve = Button(master, text="Upload file", command=self.threadServer).pack()
self.servedFilenames = Label(self.frame, text="")
self.servedFilenames.pack()
self.t1 = None # server thread
""" Update the GUI to display the file to be uploaded. """
def showUploadedFile(self):
self.servedFilenames.configure(text="%s" % str(self.filenames))
""" Use another thread to serve files since the GUI runs on main thread. """
def threadServer(self):
print("Serve Counter: ", self.serve_counter)
if (self.serve_counter == 0):
self.t1 = Thread(target=self.uploadFile)
self.t1.start()
self.serve_counter += 1
else:
self.serve_counter += 1
print("Serve counter: ", self.serve_counter)
self.t1.run()
""" Upload PATH to IP_ADDR at PORT to the built-in http server. """
def uploadFile(self):
HOST_NAME, PORT_NUMBER = self.ip_addr, self.port
self.httpd = HTTPServer((HOST_NAME, PORT_NUMBER), MyHandler)
self.httpd.allow_reuse_address = True
self.url_label.pack()
print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)
try:
self.httpd.serve_forever()
except KeyboardInterrupt:
pass
self.httpd.server_close()
print time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER)
""" Set PATH to chosen uploaded destination. """
def chooseFile(self):
while True:
uploadedfilenames = askopenfilenames(multiple=True)
if uploadedfilenames == '':
return
uploadedfiles = root.tk.splitlist(uploadedfilenames)
self.filenames = uploadedfilenames
self.showUploadedFile()
global filepaths
filepaths = uploadedfiles
return
""" User closed window. Shutdown GUI and server. """
def on_closing(self):
if (self.serve_counter > 0):
print("Closed server")
self.httpd.server_close()
root.destroy()
filepaths = None # path for each file to be uploaded
ip_addr = get_ip_addr()
root = Tk()
root.wm_title("Local File Share")
app = App(root)
root.protocol("WM_DELETE_WINDOW", app.on_closing)
root.mainloop()
|
mit
| 5,952,390,639,927,118,000 | 38.026549 | 91 | 0.582313 | false |
josenavas/labman
|
labman/db/equipment.py
|
1
|
6002
|
# ----------------------------------------------------------------------------
# Copyright (c) 2017-, labman development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from . import base
from . import sql_connection
from . import exceptions
class Equipment(base.LabmanObject):
"""Equipment object
Attributes
----------
id
external_id
equipment_type
notes
"""
_table = 'qiita.equipment'
_id_column = 'equipment_id'
@staticmethod
def list_equipment(equipment_type=None):
"""Generates a list of equipment
Parameters
----------
equipment_type: str, optional
If provided, limit the equipment list to the given type
Returns
-------
list of dicts
The list of equipment information with the structure:
[{'equipment_id': int, 'external_id': string}]
"""
with sql_connection.TRN as TRN:
sql_where = ('WHERE description = %s'
if equipment_type is not None else '')
sql = """SELECT equipment_id, external_id
FROM qiita.equipment
JOIN qiita.equipment_type USING (equipment_type_id)
{}
ORDER BY equipment_id""".format(sql_where)
TRN.add(sql, [equipment_type])
return [dict(r) for r in TRN.execute_fetchindex()]
@staticmethod
def list_equipment_types():
"""Generates a list of equipment types
Returns
-------
list of str
The list of equipment type strings
"""
with sql_connection.TRN as TRN:
sql = """SELECT description
FROM qiita.equipment_type
ORDER BY equipment_type_id"""
TRN.add(sql)
return TRN.execute_fetchflatten()
@classmethod
def create_type(cls, description):
"""Creates a new equipment type in the system
Parameters
----------
description : str
The description of the new type
Raises
------
LabmanDuplicateError
If the given type already exists
"""
with sql_connection.TRN as TRN:
# Check if the equipment type already exists
sql = """SELECT EXISTS(SELECT 1 FROM qiita.equipment_type
WHERE description = %s)"""
TRN.add(sql, [description])
if TRN.execute_fetchlast():
raise exceptions.LabmanDuplicateError(
'Equipment type', [('description', description)])
# Proceed to create the new type
sql = "INSERT INTO qiita.equipment_type (description) VALUES (%s)"
TRN.add(sql, [description])
TRN.execute()
@classmethod
def create(cls, equipment_type, external_id, notes=None):
"""Creates a new equipment item in the system
Parameters
----------
equipment_type : str
The equipment type
external_id : str
The equipment's external id
notes : str, optional
Equipments notes
Returns
-------
Equipment
The newly created equipment
Raises
------
LabmanUnknownIdError
If the equipment_type is not recognized
LabmanDuplicateError
If an equipment with the given external id already exists
"""
with sql_connection.TRN as TRN:
# Check if the equipment type exists by getting his id
sql = """SELECT equipment_type_id
FROM qiita.equipment_type
WHERE description = %s"""
TRN.add(sql, [equipment_type])
res = TRN.execute_fetchindex()
if res:
# Fetchindex returns a list of results. If the previous call
# didn't return anything the list would be empty, and accessing
# to this values would've generated and IndexError. By the DB
# constraints, the above query can at most return one result
# with a single value, hence the [0][0]
equipment_type_id = res[0][0]
else:
raise exceptions.LabmanUnknownIdError(
'Equipment type', equipment_type)
# Check if there is already an equipment with the external id
if cls._attr_exists('external_id', external_id):
raise exceptions.LabmanDuplicateError(
'Equipment', [('external id', external_id)])
# Proceed to create the new quipment
sql = """INSERT INTO qiita.equipment
(external_id, equipment_type_id, notes)
VALUES (%s, %s, %s)
RETURNING equipment_id"""
TRN.add(sql, [external_id, equipment_type_id, notes])
return cls(TRN.execute_fetchlast())
@property
def external_id(self):
"""The equipment's external identifier"""
return self._get_attr('external_id')
@property
def equipment_type(self):
"""The type of the equipment"""
with sql_connection.TRN as TRN:
sql = """SELECT description
FROM qiita.equipment_type
JOIN qiita.equipment USING (equipment_type_id)
WHERE equipment_id = %s"""
TRN.add(sql, [self.id])
return TRN.execute_fetchlast()
@property
def notes(self):
"""The equipment notes"""
return self._get_attr('notes')
@notes.setter
def notes(self, value):
"""Set the new value for the notes attribute"""
self._set_attr('notes', value)
|
bsd-3-clause
| -6,006,356,591,627,091,000 | 32.909605 | 79 | 0.53099 | false |
irl/gajim
|
src/session.py
|
1
|
21949
|
# -*- coding:utf-8 -*-
## src/session.py
##
## Copyright (C) 2008-2014 Yann Leboulanger <asterix AT lagaule.org>
## Copyright (C) 2008 Brendan Taylor <whateley AT gmail.com>
## Jonathan Schleifer <js-gajim AT webkeks.org>
## Stephan Erb <steve-e AT h3c.de>
##
## This file is part of Gajim.
##
## Gajim is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published
## by the Free Software Foundation; version 3 only.
##
## Gajim is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
##
from common import helpers
from common import exceptions
from common import gajim
from common import stanza_session
from common import contacts
from common import ged
from common.connection_handlers_events import ChatstateReceivedEvent, \
InformationEvent
import message_control
import notify
import dialogs
import negotiation
class ChatControlSession(stanza_session.EncryptedStanzaSession):
def __init__(self, conn, jid, thread_id, type_='chat'):
stanza_session.EncryptedStanzaSession.__init__(self, conn, jid, thread_id,
type_='chat')
gajim.ged.register_event_handler('decrypted-message-received', ged.GUI1,
self._nec_decrypted_message_received)
self.control = None
def detach_from_control(self):
if self.control:
self.control.set_session(None)
def acknowledge_termination(self):
self.detach_from_control()
stanza_session.EncryptedStanzaSession.acknowledge_termination(self)
def terminate(self, send_termination = True):
stanza_session.EncryptedStanzaSession.terminate(self, send_termination)
self.detach_from_control()
def _nec_decrypted_message_received(self, obj):
"""
Dispatch a received <message> stanza
"""
if obj.session != self:
return
if self.resource != obj.resource:
self.resource = obj.resource
if self.control and self.control.resource:
self.control.change_resource(self.resource)
if obj.mtype == 'chat':
if not obj.stanza.getTag('body') and obj.chatstate is None:
return
log_type = 'chat_msg'
else:
log_type = 'single_msg'
end = '_recv'
if obj.forwarded and obj.sent:
end = '_sent'
log_type += end
if self.is_loggable() and obj.msgtxt:
try:
if obj.xhtml and gajim.config.get('log_xhtml_messages'):
msg_to_log = obj.xhtml
else:
msg_to_log = obj.msgtxt
obj.msg_id = gajim.logger.write(log_type, obj.fjid,
msg_to_log, tim=obj.timestamp, subject=obj.subject)
except exceptions.PysqliteOperationalError as e:
gajim.nec.push_incoming_event(InformationEvent(None,
conn=self.conn, level='error', pri_txt=_('Disk Write Error'),
sec_txt=str(e)))
except exceptions.DatabaseMalformed:
pritext = _('Database Error')
sectext = _('The database file (%s) cannot be read. Try to '
'repair it (see http://trac.gajim.org/wiki/DatabaseBackup) '
'or remove it (all history will be lost).') % \
gajim.logger.LOG_DB_PATH
gajim.nec.push_incoming_event(InformationEvent(None,
conn=self.conn, level='error', pri_txt=pritext,
sec_txt=sectext))
treat_as = gajim.config.get('treat_incoming_messages')
if treat_as:
obj.mtype = treat_as
pm = False
if obj.gc_control and obj.resource:
# It's a Private message
pm = True
obj.mtype = 'pm'
# Handle chat states
contact = gajim.contacts.get_contact(self.conn.name, obj.jid,
obj.resource)
if contact and (not obj.forwarded or not obj.sent):
if self.control and self.control.type_id == \
message_control.TYPE_CHAT:
if obj.chatstate is not None:
# other peer sent us reply, so he supports jep85 or jep22
contact.chatstate = obj.chatstate
if contact.our_chatstate == 'ask': # we were jep85 disco?
contact.our_chatstate = 'active' # no more
gajim.nec.push_incoming_event(ChatstateReceivedEvent(None,
conn=obj.conn, msg_obj=obj))
elif contact.chatstate != 'active':
# got no valid jep85 answer, peer does not support it
contact.chatstate = False
elif obj.chatstate == 'active':
# Brand new message, incoming.
contact.our_chatstate = obj.chatstate
contact.chatstate = obj.chatstate
if obj.msg_id: # Do not overwrite an existing msg_id with None
contact.msg_id = obj.msg_id
# THIS MUST BE AFTER chatstates handling
# AND BEFORE playsound (else we ear sounding on chatstates!)
if not obj.msgtxt: # empty message text
return True
if gajim.config.get_per('accounts', self.conn.name,
'ignore_unknown_contacts') and not gajim.contacts.get_contacts(
self.conn.name, obj.jid) and not pm:
return True
highest_contact = gajim.contacts.get_contact_with_highest_priority(
self.conn.name, obj.jid)
# does this resource have the highest priority of any available?
is_highest = not highest_contact or not highest_contact.resource or \
obj.resource == highest_contact.resource or highest_contact.show ==\
'offline'
if not self.control:
ctrl = gajim.interface.msg_win_mgr.search_control(obj.jid,
obj.conn.name, obj.resource)
if ctrl:
self.control = ctrl
self.control.set_session(self)
self.control.contact = contact
if not pm:
self.roster_message2(obj)
if gajim.interface.remote_ctrl:
gajim.interface.remote_ctrl.raise_signal('NewMessage', (
self.conn.name, [obj.fjid, obj.msgtxt, obj.timestamp,
obj.encrypted, obj.mtype, obj.subject, obj.chatstate,
obj.msg_id, obj.user_nick, obj.xhtml, obj.form_node]))
def roster_message2(self, obj):
"""
Display the message or show notification in the roster
"""
contact = None
jid = obj.jid
resource = obj.resource
fjid = jid
# Try to catch the contact with correct resource
if resource:
fjid = jid + '/' + resource
contact = gajim.contacts.get_contact(obj.conn.name, jid, resource)
highest_contact = gajim.contacts.get_contact_with_highest_priority(
obj.conn.name, jid)
if not contact:
# If there is another resource, it may be a message from an
# invisible resource
lcontact = gajim.contacts.get_contacts(obj.conn.name, jid)
if (len(lcontact) > 1 or (lcontact and lcontact[0].resource and \
lcontact[0].show != 'offline')) and jid.find('@') > 0:
contact = gajim.contacts.copy_contact(highest_contact)
contact.resource = resource
contact.priority = 0
contact.show = 'offline'
contact.status = ''
gajim.contacts.add_contact(obj.conn.name, contact)
else:
# Default to highest prio
fjid = jid
contact = highest_contact
if not contact:
# contact is not in roster
contact = gajim.interface.roster.add_to_not_in_the_roster(
obj.conn.name, jid, obj.user_nick)
if not self.control:
ctrl = gajim.interface.msg_win_mgr.search_control(obj.jid,
obj.conn.name, obj.resource)
if ctrl:
self.control = ctrl
self.control.set_session(self)
else:
fjid = jid
obj.popup = helpers.allow_popup_window(self.conn.name)
type_ = 'chat'
event_type = 'message_received'
if obj.mtype == 'normal':
type_ = 'normal'
event_type = 'single_message_received'
if self.control and obj.mtype != 'normal':
obj.show_in_roster = False
obj.show_in_systray = False
else:
obj.show_in_roster = notify.get_show_in_roster(event_type,
self.conn.name, contact, self)
obj.show_in_systray = notify.get_show_in_systray(event_type,
self.conn.name, contact)
if (not self.control and obj.mtype != 'normal') or \
(obj.mtype == 'normal' and not obj.popup):
event = gajim.events.create_event(type_, (obj.msgtxt, obj.subject,
obj.mtype, obj.timestamp, obj.encrypted, obj.resource,
obj.msg_id, obj.xhtml, self, obj.form_node, obj.displaymarking,
obj.forwarded and obj.sent),
show_in_roster=obj.show_in_roster,
show_in_systray=obj.show_in_systray)
gajim.events.add_event(self.conn.name, fjid, event)
def roster_message(self, jid, msg, tim, encrypted=False, msg_type='',
subject=None, resource='', msg_id=None, user_nick='', xhtml=None,
form_node=None, displaymarking=None):
"""
Display the message or show notification in the roster
"""
contact = None
fjid = jid
# Try to catch the contact with correct resource
if resource:
fjid = jid + '/' + resource
contact = gajim.contacts.get_contact(self.conn.name, jid, resource)
highest_contact = gajim.contacts.get_contact_with_highest_priority(
self.conn.name, jid)
if not contact:
# If there is another resource, it may be a message from an invisible
# resource
lcontact = gajim.contacts.get_contacts(self.conn.name, jid)
if (len(lcontact) > 1 or (lcontact and lcontact[0].resource and \
lcontact[0].show != 'offline')) and jid.find('@') > 0:
contact = gajim.contacts.copy_contact(highest_contact)
contact.resource = resource
if resource:
fjid = jid + '/' + resource
contact.priority = 0
contact.show = 'offline'
contact.status = ''
gajim.contacts.add_contact(self.conn.name, contact)
else:
# Default to highest prio
fjid = jid
contact = highest_contact
if not contact:
# contact is not in roster
contact = gajim.interface.roster.add_to_not_in_the_roster(
self.conn.name, jid, user_nick)
if not self.control:
ctrl = gajim.interface.msg_win_mgr.get_control(fjid, self.conn.name)
if ctrl:
self.control = ctrl
self.control.set_session(self)
else:
fjid = jid
# Do we have a queue?
no_queue = len(gajim.events.get_events(self.conn.name, fjid)) == 0
popup = helpers.allow_popup_window(self.conn.name)
if msg_type == 'normal' and popup: # it's single message to be autopopuped
dialogs.SingleMessageWindow(self.conn.name, contact.jid,
action='receive', from_whom=jid, subject=subject, message=msg,
resource=resource, session=self, form_node=form_node)
return
# We print if window is opened and it's not a single message
if self.control and msg_type != 'normal':
typ = ''
if msg_type == 'error':
typ = 'error'
self.control.print_conversation(msg, typ, tim=tim, encrypted=encrypted,
subject=subject, xhtml=xhtml, displaymarking=displaymarking)
if msg_id:
gajim.logger.set_read_messages([msg_id])
return
# We save it in a queue
type_ = 'chat'
event_type = 'message_received'
if msg_type == 'normal':
type_ = 'normal'
event_type = 'single_message_received'
show_in_roster = notify.get_show_in_roster(event_type, self.conn.name,
contact, self)
show_in_systray = notify.get_show_in_systray(event_type, self.conn.name,
contact)
event = gajim.events.create_event(type_, (msg, subject, msg_type, tim,
encrypted, resource, msg_id, xhtml, self, form_node, displaymarking,
False), show_in_roster=show_in_roster,
show_in_systray=show_in_systray)
gajim.events.add_event(self.conn.name, fjid, event)
if popup:
if not self.control:
self.control = gajim.interface.new_chat(contact,
self.conn.name, session=self)
if len(gajim.events.get_events(self.conn.name, fjid)):
self.control.read_queue()
else:
if no_queue: # We didn't have a queue: we change icons
gajim.interface.roster.draw_contact(jid, self.conn.name)
gajim.interface.roster.show_title() # we show the * or [n]
# Select the big brother contact in roster, it's visible because it has
# events.
family = gajim.contacts.get_metacontacts_family(self.conn.name, jid)
if family:
nearby_family, bb_jid, bb_account = \
gajim.contacts.get_nearby_family_and_big_brother(family,
self.conn.name)
else:
bb_jid, bb_account = jid, self.conn.name
gajim.interface.roster.select_contact(bb_jid, bb_account)
# ---- ESessions stuff ---
def handle_negotiation(self, form):
if form.getField('accept') and not form['accept'] in ('1', 'true'):
self.cancelled_negotiation()
return
# encrypted session states. these are described in stanza_session.py
try:
if form.getType() == 'form' and 'security' in form.asDict():
security_options = [x[1] for x in form.getField('security').\
getOptions()]
if security_options == ['none']:
self.respond_archiving(form)
else:
# bob responds
# we don't support 3-message negotiation as the responder
if 'dhkeys' in form.asDict():
self.fail_bad_negotiation('3 message negotiation not '
'supported when responding', ('dhkeys',))
return
negotiated, not_acceptable, ask_user = \
self.verify_options_bob(form)
if ask_user:
def accept_nondefault_options(is_checked):
self.dialog.destroy()
negotiated.update(ask_user)
self.respond_e2e_bob(form, negotiated,
not_acceptable)
def reject_nondefault_options():
self.dialog.destroy()
for key in ask_user.keys():
not_acceptable.append(key)
self.respond_e2e_bob(form, negotiated,
not_acceptable)
self.dialog = dialogs.YesNoDialog(_('Confirm these '
'session options'),
_('The remote client wants to negotiate a session '
'with these features:\n\n%s\n\nAre these options '
'acceptable?''') % (
negotiation.describe_features(ask_user)),
on_response_yes=accept_nondefault_options,
on_response_no=reject_nondefault_options,
transient_for=self.control.parent_win.window)
else:
self.respond_e2e_bob(form, negotiated, not_acceptable)
return
elif self.status == 'requested-archiving' and form.getType() == \
'submit':
try:
self.archiving_accepted(form)
except exceptions.NegotiationError as details:
self.fail_bad_negotiation(details)
return
# alice accepts
elif self.status == 'requested-e2e' and form.getType() == 'submit':
negotiated, not_acceptable, ask_user = self.verify_options_alice(
form)
if ask_user:
def accept_nondefault_options(is_checked):
if dialog:
dialog.destroy()
if is_checked:
allow_no_log_for = gajim.config.get_per(
'accounts', self.conn.name,
'allow_no_log_for').split()
jid = str(self.jid)
if jid not in allow_no_log_for:
allow_no_log_for.append(jid)
gajim.config.set_per('accounts', self.conn.name,
'allow_no_log_for', ' '.join(allow_no_log_for))
negotiated.update(ask_user)
try:
self.accept_e2e_alice(form, negotiated)
except exceptions.NegotiationError as details:
self.fail_bad_negotiation(details)
def reject_nondefault_options():
self.reject_negotiation()
dialog.destroy()
allow_no_log_for = gajim.config.get_per('accounts',
self.conn.name, 'allow_no_log_for').split()
if str(self.jid) in allow_no_log_for:
dialog = None
accept_nondefault_options(False)
else:
dialog = dialogs.YesNoDialog(_('Confirm these session '
'options'),
_('The remote client selected these options:\n\n%s'
'\n\nContinue with the session?') % (
negotiation.describe_features(ask_user)),
_('Always accept for this contact'),
on_response_yes = accept_nondefault_options,
on_response_no = reject_nondefault_options,
transient_for=self.control.parent_win.window)
else:
try:
self.accept_e2e_alice(form, negotiated)
except exceptions.NegotiationError as details:
self.fail_bad_negotiation(details)
return
elif self.status == 'responded-archiving' and form.getType() == \
'result':
try:
self.we_accept_archiving(form)
except exceptions.NegotiationError as details:
self.fail_bad_negotiation(details)
return
elif self.status == 'responded-e2e' and form.getType() == 'result':
try:
self.accept_e2e_bob(form)
except exceptions.NegotiationError as details:
self.fail_bad_negotiation(details)
return
elif self.status == 'identified-alice' and form.getType() == 'result':
try:
self.final_steps_alice(form)
except exceptions.NegotiationError as details:
self.fail_bad_negotiation(details)
return
except exceptions.Cancelled:
# user cancelled the negotiation
self.reject_negotiation()
return
if form.getField('terminate') and\
form.getField('terminate').getValue() in ('1', 'true'):
self.acknowledge_termination()
self.conn.delete_session(str(self.jid), self.thread_id)
return
# non-esession negotiation. this isn't very useful, but i'm keeping it
# around to test my test suite.
if form.getType() == 'form':
if not self.control:
jid, resource = gajim.get_room_and_nick_from_fjid(str(self.jid))
account = self.conn.name
contact = gajim.contacts.get_contact(account, str(self.jid),
resource)
if not contact:
contact = gajim.contacts.create_contact(jid=jid, account=account,
resource=resource, show=self.conn.get_status())
gajim.interface.new_chat(contact, account, resource=resource,
session=self)
negotiation.FeatureNegotiationWindow(account, str(self.jid), self,
form)
|
gpl-3.0
| 8,008,322,647,868,001,000 | 39.49631 | 85 | 0.537245 | false |
pooler/electrum-ltc
|
electrum_ltc/gui/qt/qrcodewidget.py
|
1
|
4704
|
import qrcode
from PyQt5.QtGui import QColor, QPen
import PyQt5.QtGui as QtGui
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (
QApplication, QVBoxLayout, QTextEdit, QHBoxLayout, QPushButton, QWidget,
QFileDialog,
)
from electrum_ltc.i18n import _
from electrum_ltc.simple_config import SimpleConfig
from .util import WindowModalDialog, WWLabel, getSaveFileName
class QRCodeWidget(QWidget):
def __init__(self, data = None, fixedSize=False):
QWidget.__init__(self)
self.data = None
self.qr = None
self.fixedSize=fixedSize
if fixedSize:
self.setFixedSize(fixedSize, fixedSize)
self.setData(data)
def setData(self, data):
if self.data != data:
self.data = data
if self.data:
self.qr = qrcode.QRCode(
error_correction=qrcode.constants.ERROR_CORRECT_L,
box_size=10,
border=0,
)
self.qr.add_data(self.data)
if not self.fixedSize:
k = len(self.qr.get_matrix())
self.setMinimumSize(k*5,k*5)
else:
self.qr = None
self.update()
def paintEvent(self, e):
if not self.data:
return
black = QColor(0, 0, 0, 255)
white = QColor(255, 255, 255, 255)
black_pen = QPen(black)
black_pen.setJoinStyle(Qt.MiterJoin)
if not self.qr:
qp = QtGui.QPainter()
qp.begin(self)
qp.setBrush(white)
qp.setPen(white)
r = qp.viewport()
qp.drawRect(0, 0, r.width(), r.height())
qp.end()
return
matrix = self.qr.get_matrix()
k = len(matrix)
qp = QtGui.QPainter()
qp.begin(self)
r = qp.viewport()
margin = 10
framesize = min(r.width(), r.height())
boxsize = int((framesize - 2*margin)/k)
size = k*boxsize
left = (framesize - size)/2
top = (framesize - size)/2
# Draw white background with margin
qp.setBrush(white)
qp.setPen(white)
qp.drawRect(0, 0, framesize, framesize)
# Draw qr code
qp.setBrush(black)
qp.setPen(black_pen)
for r in range(k):
for c in range(k):
if matrix[r][c]:
qp.drawRect(int(left+c*boxsize), int(top+r*boxsize),
boxsize - 1, boxsize - 1)
qp.end()
class QRDialog(WindowModalDialog):
def __init__(
self,
*,
data,
parent=None,
title="",
show_text=False,
help_text=None,
show_copy_text_btn=False,
config: SimpleConfig,
):
WindowModalDialog.__init__(self, parent, title)
self.config = config
vbox = QVBoxLayout()
qrw = QRCodeWidget(data)
qr_hbox = QHBoxLayout()
qr_hbox.addWidget(qrw)
qr_hbox.addStretch(1)
vbox.addLayout(qr_hbox)
help_text = data if show_text else help_text
if help_text:
qr_hbox.setContentsMargins(0, 0, 0, 44)
text_label = WWLabel()
text_label.setText(help_text)
vbox.addWidget(text_label)
hbox = QHBoxLayout()
hbox.addStretch(1)
def print_qr():
filename = getSaveFileName(
parent=self,
title=_("Select where to save file"),
filename="qrcode.png",
config=self.config,
)
if not filename:
return
p = qrw.grab()
p.save(filename, 'png')
self.show_message(_("QR code saved to file") + " " + filename)
def copy_image_to_clipboard():
p = qrw.grab()
QApplication.clipboard().setPixmap(p)
self.show_message(_("QR code copied to clipboard"))
def copy_text_to_clipboard():
QApplication.clipboard().setText(data)
self.show_message(_("Text copied to clipboard"))
b = QPushButton(_("Copy Image"))
hbox.addWidget(b)
b.clicked.connect(copy_image_to_clipboard)
if show_copy_text_btn:
b = QPushButton(_("Copy Text"))
hbox.addWidget(b)
b.clicked.connect(copy_text_to_clipboard)
b = QPushButton(_("Save"))
hbox.addWidget(b)
b.clicked.connect(print_qr)
b = QPushButton(_("Close"))
hbox.addWidget(b)
b.clicked.connect(self.accept)
b.setDefault(True)
vbox.addLayout(hbox)
self.setLayout(vbox)
|
mit
| 2,888,548,823,002,609,700 | 26.83432 | 76 | 0.528061 | false |
shinrisama/Jp
|
JP.py
|
1
|
262490
|
# coding: utf-8
import wx
import dataset
import xlsxwriter
from xlsxwriter.workbook import Workbook
import xlwt
import tagtool
import codecs
import csv
import csvkit
from stuf import stuf
# import the newly created GUI file
import JukuPlanner
import subprocess
import os
import sys
import glob
import funzioni
import calendar
import ConfigParser
import hashlib
from time import gmtime, strftime, sleep, localtime
from datetime import datetime
parser = ConfigParser.SafeConfigParser()
parser.read('./cfg.ini')
giorno=''
#Config = ConfigParser.SafeConfigParser()
#Config.read('./cfg.ini')
#pathdatabase = Config.get('Paths','databasepath')
#percorsoDatabase='sqlite:///'+pathdatabase
Freq = 1500 # Set Frequency To 2500 Hertz
Dur = 250 # Set Duration To 1000 ms == 1 second
db = dataset.connect('sqlite:///users.db', row_type=stuf)
dbins = dataset.connect('sqlite:///teacher.db', row_type=stuf)
#db = dataset.connect(percorsoDatabase, row_type=stuf)
tabella = db["users"]
tabellaTempo = db['timeTable']
tabellaGiorni = db['giorni']
tabellaCalcoli = db['calcoli']
tabellaIns = dbins['insegnanti']
tabellaTempoIns = dbins['timeTable']
tabellaDateIns = dbins['datePersonalizzate']
contaPrivate = 0
#settingsdb = dataset.connect('sqlite:///settings.db', row_type=stuf)
#tabellaSettings = settingsdb['settaggi']
colonna = 0
riga = 0
rigaSelezionata = 0
colonnaSelezionata = 0
rigaMaterie = 0
colonnaMaterie = 0
rigaMaterie1 = 0
colonnaMaterie1 = 0
idSelezionato = 0
idDatePersonalizzate = 0
idGiorni=0
idCalcoli = 0
stanza = 0
datavecchia= ''
percorso = ''
materia = ''
materieArray=[]
materieTesto = []
switchmaterie=0
switchmaterieOriginale=0
mostraInfoStud = False
datiInfoStudente = ''
copia1 = ' '
coordinateCopia1 = []
copia2 = ' '
coordinateCopia2 = []
copia1m = ' '
coordinateCopia1m = []
copia2m = ' '
coordinateCopia2m = []
copia1Colore = ''
copia2Colore = ''
copia1Kojin = False
copia2Kojin = False
# importing * : to enable writing sin(13) instead of math.sin(13)先生
from math import *
class UTF8Recoder:
"""
Iterator that reads an encoded stream and reencodes the input to UTF-8
"""
def __init__(self, f, encoding):
self.reader = codecs.getreader(encoding)(f)
def __iter__(self):
return self
def next(self):
return self.reader.next().encode("utf-8")
def unicode_csv_reader(unicode_csv_data, dialect=csv.excel, **kwargs):
# csv.py doesn't do Unicode; encode temporarily as UTF-8:
csv_reader = csv.reader(utf_8_encodermultiplo(unicode_csv_data),
dialect=dialect, **kwargs)
dizionario = {}
for row in csv_reader:
# decode UTF-8 back to Unicode, cell by cell:
yield [unicode(cell, 'utf-8') for cell in row]
def utf_8_encoder(unicode_csv_data):
return unicode_csv_data.encode('utf-8')
def utf_8_encodermultiplo(unicode_csv_data):
for line in unicode_csv_data:
yield line.encode('utf-8')
class UnicodeReader:
"""
A CSV reader which will iterate over lines in the CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
f = UTF8Recoder(f, encoding)
self.reader = csv.reader(f, dialect=dialect, **kwds)
def next(self):
row = self.reader.next()
return [unicode(s, "utf-8") for s in row]
def __iter__(self):
return self
# inherit from the MainFrame created in wxFowmBuilder and create CalcFrame
class CalcFrame(JukuPlanner.FramePrincipale):
# constructor先生
def __init__(self, parent):
# initialize parent class
Config = ConfigParser.ConfigParser()
Config.read('./cfg.ini')
colorestanza1 = Config.get('Colori stanze','colorestanza1')
colorestanza2 = Config.get('Colori stanze', 'colorestanza2')
colorestanza3 = Config.get('Colori stanze', 'colorestanza3')
colorestanza4 = Config.get('Colori stanze', 'colorestanza4')
percorsocsv = Config.get('Paths','csvpath')
colore1 = funzioni.coonvertiStringaInColore(colorestanza1)
colore2 = funzioni.coonvertiStringaInColore(colorestanza2)
colore3 = funzioni.coonvertiStringaInColore(colorestanza3)
colore4 = funzioni.coonvertiStringaInColore(colorestanza4)
JukuPlanner.FramePrincipale.__init__(self, parent)
insegnante = u"先生"
studente = u"生徒"
materia = u"科目"
room1 = u'部屋1'
room2 = u'部屋2'
room3 = u'部屋3'
room4 = u'部屋4'
global datavecchia
datavecchia = str(self.calendario.Date)
self.griglia.SetColLabelValue(0, "9:10 - 10:20")
self.griglia.SetColLabelValue(1, "10:30 - 11:40")
self.griglia.SetColLabelValue(2, "11:50 - 13:00")
self.griglia.SetColLabelValue(3, "13:40 - 14:50")
self.griglia.SetColLabelValue(4, "15:00 - 16:10")
self.griglia.SetColLabelValue(5, "16:40 - 17:50")
self.griglia.SetColLabelValue(6, "18:00 - 19:10")
self.griglia.SetColLabelValue(7, "19:20 - 20:30")
self.griglia.SetColLabelValue(8, "20:40 - 21:50")
self.griglia.SetRowLabelValue(0, insegnante)
self.griglia.SetRowLabelValue(1, studente)
self.griglia.SetRowLabelValue(2, studente)
self.griglia.SetRowLabelValue(3, studente)
self.griglia.SetRowLabelValue(4, studente)
self.griglia.SetRowLabelValue(5, insegnante)
self.griglia.SetRowLabelValue(6, studente)
self.griglia.SetRowLabelValue(7, studente)
self.griglia.SetRowLabelValue(8, studente)
self.griglia.SetRowLabelValue(9, studente)
self.griglia.SetRowLabelValue(10, insegnante)
self.griglia.SetRowLabelValue(11, studente)
self.griglia.SetRowLabelValue(12, studente)
self.griglia.SetRowLabelValue(13, studente)
self.griglia.SetRowLabelValue(14, studente)
self.griglia.SetRowLabelValue(15, insegnante)
self.griglia.SetRowLabelValue(16, studente)
self.griglia.SetRowLabelValue(17, studente)
self.griglia.SetRowLabelValue(18, studente)
self.griglia.SetRowLabelValue(19, studente)
for i in range(0, 9, 1):
#self.griglia.SetCellBackgroundColour(0, i, wx.GREEN)
self.griglia.SetCellBackgroundColour(0, i,wx.Colour(int(colore1[0]), int(colore1[1]), int(colore1[2]), int(colore1[3])))
self.griglia.SetCellBackgroundColour(5, i, wx.Colour(int(colore2[0]), int(colore2[1]), int(colore2[2]), int(colore2[3])))
self.griglia.SetCellBackgroundColour(10, i,wx.Colour(int(colore3[0]), int(colore3[1]), int(colore3[2]), int(colore3[3])))
self.griglia.SetCellBackgroundColour(15, i, wx.Colour(int(colore4[0]), int(colore4[1]), int(colore4[2]), int(colore4[3])))
self.griglia.SetColSize(0, 100)
self.griglia.SetColSize(1, 100)
self.griglia.SetColSize(2, 100)
self.griglia.SetColSize(3, 100)
self.griglia.SetColSize(4, 100)
self.griglia.SetColSize(5, 100)
self.griglia.SetColSize(6, 100)
self.griglia.SetColSize(7, 100)
self.griglia.SetColSize(8, 100)
popolaInsegnanti = tabella.find(teacher='1')
popolaStudenti = tabella.find(student='1',)
listaMaterie = [u'国語', u'英語', u'数学', u'理科', u'社会', u'特別']
#for i in popolaStudenti:
# self.listaStudenti.Append(i.name)
for i in popolaInsegnanti:
self.listaInsegnanti.Append(i.name)
for i in listaMaterie:
self.listaMaterie.Append(i)
nomeFile = str(self.calendario.Date)
nomeFile = nomeFile.replace('/', '-')
nomeFile = nomeFile.replace(' 00:00:00', '')
anno = '20' + nomeFile[-2:]
global percorso
percorso = percorsocsv+'/' + anno + '/' + nomeFile[:2] + '/' + nomeFile + '.csv'
if not os.path.exists(os.path.dirname(percorso)):
try:
os.makedirs(os.path.dirname(percorso))
except OSError as exc: # Guard against race condition
pass
print percorso
controllaPercorso = os.path.exists(percorso)
if controllaPercorso == True:
with open(percorso, 'rb') as f:
reader = csv.DictReader(f)
contarighe = 0
converti = csvkit.unicsv.UnicodeCSVDictReader(f=f, encoding='utf-8')
for i in converti:
print i, 'i', type(i)
self.griglia.SetCellValue(contarighe, 0, i['9:10 - 10:20'])
self.griglia.SetCellValue(contarighe, 1, i['10:30 - 11:40'])
self.griglia.SetCellValue(contarighe, 2, i['11:50 - 13:00'])
self.griglia.SetCellValue(contarighe, 3, i['13:40 - 14:50'])
self.griglia.SetCellValue(contarighe, 4, i['15:00 - 16:10'])
self.griglia.SetCellValue(contarighe, 5, i['16:40 - 17:50'])
self.griglia.SetCellValue(contarighe, 6, i['18:00 - 19:10'])
self.griglia.SetCellValue(contarighe, 7, i['19:20 - 20:30'])
self.griglia.SetCellValue(contarighe, 8, i['20:40 - 21:50'])
contarighe = contarighe + 1
def inserimentoAutomatico( self, event ):
lista = []
nonInseriti = []
dizionario = dict()
for i in self.studentiDelGiorno.Items:
risultati = self.preparativiInserimentoAutomatico(i)
#lista.append(risultati)
dizionario[i]=risultati
print dizionario, 'risultati'
for dizio in dizionario:
soloIndividuali = False
studente = tabella.find_one(name=dizio, student=1)
if studente.individual == True and studente.shared == False:
soloIndividuali = True
if soloIndividuali == True:
print dizio, 'supporta solo lezioni singole'
print 'cerco stanze disponibili'
print dizionario[dizio]
contaore = 0
for diz in dizionario[dizio]:
for i in range(0, self.griglia.NumberRows):
if dizio in self.griglia.GetCellValue(i,contaore):
inserito= True
break
else:
#print 'Elemento da inserire', dizio
inserito = False
if diz != u'':
if self.griglia.GetCellValue(1,contaore) == '' and inserito == False:
self.griglia.SetCellValue(1, contaore,
'(K)' + unicode(dizio) + u' ' + u'(' + unicode(diz.strip('K')) + u')')
self.griglia.SetCellValue(2,contaore,'(K)')
self.griglia.SetCellValue(3, contaore,
'(K)')
self.griglia.SetCellValue(4, contaore,
'(K)' )
#inserito == True
elif self.griglia.GetCellValue(6 ,contaore) == '' and inserito == False:
self.griglia.SetCellValue(6,contaore,'(K)'+unicode(dizio)+u' '+u'('+unicode(diz.strip('K'))+u')')
self.griglia.SetCellValue(7, contaore, '(K)')
self.griglia.SetCellValue(8, contaore,
'(K)')
self.griglia.SetCellValue(9, contaore,
'(K)')
#inserito == True
elif self.griglia.GetCellValue(11 ,contaore) == '' and inserito == False:
self.griglia.SetCellValue(11,contaore,'(K)'+unicode(dizio)+u' '+u'('+unicode(diz.strip('K'))+u')')
self.griglia.SetCellValue(12, contaore, '(K)')
self.griglia.SetCellValue(13, contaore,
'(K)')
self.griglia.SetCellValue(14, contaore,
'(K)')
#inserito == True
elif self.griglia.GetCellValue(16 ,contaore) == '' and inserito == False:
self.griglia.SetCellValue(16,contaore,'(K)'+unicode(dizio)+u' '+u'('+unicode(diz.strip('K'))+u')')
self.griglia.SetCellValue(17, contaore, '(K)')
self.griglia.SetCellValue(18, contaore,
'(K)')
self.griglia.SetCellValue(19, contaore,
'(K)')
elif self.griglia.GetCellValue(16 ,contaore) != '' and inserito == False:
if contaore == 0:
nonInseriti.append(dizio + ' ' + '9:10')
elif contaore == 1:
nonInseriti.append(dizio + ' ' + '10:30')
elif contaore == 2:
nonInseriti.append(dizio + ' ' + '11:50')
elif contaore == 3:
nonInseriti.append(dizio + ' ' + '13:40')
elif contaore == 4:
nonInseriti.append(dizio + ' ' + '15:00')
elif contaore == 5:
nonInseriti.append(dizio + ' ' + '16:40')
elif contaore == 6:
nonInseriti.append(dizio + ' ' + '18:00')
elif contaore == 7:
nonInseriti.append(dizio + ' ' + '19:20')
elif contaore == 8:
nonInseriti.append(dizio + ' ' + '20:40')
#inserito == True
contaore = contaore + 1
if soloIndividuali == False:
print dizio, 'supporta lezioni di gruppo'
print 'cerco stanze disponibili'
print dizionario[dizio]
contaore = 0
for diz in dizionario[dizio]:
for i in range(0, self.griglia.NumberRows):
if dizio in self.griglia.GetCellValue(i,contaore):
inserito= True
break
else:
inserito = False
if u'K' in diz:
if self.griglia.GetCellValue(1,contaore) == '' and inserito == False:
self.griglia.SetCellValue(1, contaore,
unicode(dizio) + u' ' + u'(' + unicode(diz) + u')')
self.griglia.SetCellValue(2,contaore,'(K)')
self.griglia.SetCellValue(3, contaore,
'(K)')
self.griglia.SetCellValue(4, contaore,
'(K)' )
#inserito == True
elif self.griglia.GetCellValue(6 ,contaore) == '' and inserito == False:
self.griglia.SetCellValue(6,contaore,unicode(dizio)+u' '+u'('+unicode(diz)+u')')
self.griglia.SetCellValue(7, contaore, '(K)')
self.griglia.SetCellValue(8, contaore,
'(K)')
self.griglia.SetCellValue(9, contaore,
'(K)')
#inserito == True
elif self.griglia.GetCellValue(11 ,contaore) == '' and inserito == False:
self.griglia.SetCellValue(11,contaore,unicode(dizio)+u' '+u'('+unicode(diz)+u')')
self.griglia.SetCellValue(12, contaore, '(K)')
self.griglia.SetCellValue(13, contaore,
'(K)')
self.griglia.SetCellValue(14, contaore,
'(K)')
#inserito == True
elif self.griglia.GetCellValue(16 ,contaore) == '' and inserito == False:
self.griglia.SetCellValue(16,contaore,unicode(dizio)+u' '+u'('+unicode(diz)+u')')
self.griglia.SetCellValue(17, contaore, '(K)')
self.griglia.SetCellValue(18, contaore,
'(K)')
self.griglia.SetCellValue(19, contaore,
'(K)')
elif self.griglia.GetCellValue(16 ,contaore) != '' and inserito == False:
if contaore == 0:
nonInseriti.append(dizio + ' ' + '9:10')
elif contaore == 1:
nonInseriti.append(dizio + ' ' + '10:30')
elif contaore == 2:
nonInseriti.append(dizio + ' ' + '11:50')
elif contaore == 3:
nonInseriti.append(dizio + ' ' + '13:40')
elif contaore == 4:
nonInseriti.append(dizio + ' ' + '15:00')
elif contaore == 5:
nonInseriti.append(dizio + ' ' + '16:40')
elif contaore == 6:
nonInseriti.append(dizio + ' ' + '18:00')
elif contaore == 7:
nonInseriti.append(dizio + ' ' + '19:20')
elif contaore == 8:
nonInseriti.append(dizio + ' ' + '20:40')
else:
if diz != u'':
if self.griglia.GetCellValue(1,contaore) == '' and inserito == False:
self.griglia.SetCellValue(1,contaore,unicode(dizio)+u' '+u'('+unicode(diz)+u')')
#inserito= True
elif self.griglia.GetCellValue(2, contaore) == '' and inserito == False:
self.griglia.SetCellValue(2, contaore,unicode(dizio) + u' ' + u'(' + unicode(diz) + u')')
#inserito = True
elif self.griglia.GetCellValue(3, contaore) == '' and inserito == False:
self.griglia.SetCellValue(3, contaore,
unicode(dizio) + u' ' + u'(' + unicode(diz) + u')')
#inserito = True
elif self.griglia.GetCellValue(4, contaore) == '' and inserito == False:
self.griglia.SetCellValue(4, contaore,
unicode(dizio) + u' ' + u'(' + unicode(diz) + u')')
#inserito = True
elif self.griglia.GetCellValue(6,contaore) == '' and inserito == False:
self.griglia.SetCellValue(6,contaore,unicode(dizio)+u' '+u'('+unicode(diz)+u')')
#inserito= True
elif self.griglia.GetCellValue(7, contaore) == '' and inserito == False:
self.griglia.SetCellValue(7, contaore,unicode(dizio) + u' ' + u'(' + unicode(diz) + u')')
#inserito = True
elif self.griglia.GetCellValue(8, contaore) == '' and inserito == False:
self.griglia.SetCellValue(8, contaore,
unicode(dizio) + u' ' + u'(' + unicode(diz) + u')')
#inserito = True
elif self.griglia.GetCellValue(9, contaore) == '' and inserito == False:
self.griglia.SetCellValue(9, contaore,
unicode(dizio) + u' ' + u'(' + unicode(diz) + u')')
#inserito = True
elif self.griglia.GetCellValue(11,contaore) == '' and inserito == False:
self.griglia.SetCellValue(11,contaore,unicode(dizio)+u' '+u'('+unicode(diz)+u')')
#inserito= True
elif self.griglia.GetCellValue(12, contaore) == '' and inserito == False:
self.griglia.SetCellValue(12, contaore, unicode(dizio) + u' ' + u'(' + unicode(diz) + u')')
#inserito = True
elif self.griglia.GetCellValue(13, contaore) == '' and inserito == False:
self.griglia.SetCellValue(13, contaore,
unicode(dizio) + u' ' + u'(' + unicode(diz) + u')')
#inserito = True
elif self.griglia.GetCellValue(14, contaore) == '' and inserito == False:
self.griglia.SetCellValue(14, contaore,
unicode(dizio) + u' ' + u'(' + unicode(diz) + u')')
#inserito = True
elif self.griglia.GetCellValue(16,contaore) == '' and inserito == False:
self.griglia.SetCellValue(16,contaore,unicode(dizio)+u' '+u'('+unicode(diz)+u')')
#inserito= True
elif self.griglia.GetCellValue(17, contaore) == '' and inserito == False:
self.griglia.SetCellValue(17, contaore, unicode(dizio) + u' ' + u'(' + unicode(diz) + u')')
#inserito = True
elif self.griglia.GetCellValue(18, contaore) == '' and inserito == False:
self.griglia.SetCellValue(18, contaore,
unicode(dizio) + u' ' + u'(' + unicode(diz) + u')')
#inserito = True
elif self.griglia.GetCellValue(19, contaore) == '' and inserito == False:
self.griglia.SetCellValue(19, contaore,
unicode(dizio) + u' ' + u'(' + unicode(diz) + u')')
elif self.griglia.GetCellValue(16, contaore) != '' and inserito == False:
if contaore == 0:
nonInseriti.append(dizio + ' ' + '9:10')
elif contaore == 1:
nonInseriti.append(dizio + ' ' + '10:30')
elif contaore == 2:
nonInseriti.append(dizio + ' ' + '11:50')
elif contaore == 3:
nonInseriti.append(dizio + ' ' + '13:40')
elif contaore == 4:
nonInseriti.append(dizio + ' ' + '15:00')
elif contaore == 5:
nonInseriti.append(dizio + ' ' + '16:40')
elif contaore == 6:
nonInseriti.append(dizio + ' ' + '18:00')
elif contaore == 7:
nonInseriti.append(dizio + ' ' + '19:20')
elif contaore == 8:
nonInseriti.append(dizio + ' ' + '20:40')
#inserito = True
contaore = contaore + 1
stringa = u'この生徒記入出来ません'
for i in nonInseriti:
stringa = unicode(stringa) + u' ' +unicode(i)
print stringa
d = wx.MessageDialog(None, stringa, '', wx.OK | wx.ICON_QUESTION)
d.ShowModal()
d.Destroy()
# stanza1 = []
# stanza2 = []
# stanza3 = []
# stanza4 = []
# self.stanza1.Enabled = False
# self.stanza2.Enabled = False
# self.stanza3.Enabled = False
# self.stanza4.Enabled = False
# postiLiberi1 = 0
# postiLiberi2 = 0
# postiLiberi3 = 0
# postiLiberi4 = 0
# self.ore.DeselectAll()
# global colonna
# global riga
# global colonnaSelezionata
# global rigaSelezionata
# for i in range(0, 9, 1):
# if self.materieVere.StringSelection == self.oreMaterie.GetCellValue(i, colonna):
# self.ore.Select(i)
# if self.override.Value == True:
# self.listaMaterie.Clear()
# self.listaMaterie.Append(self.materieVere.StringSelection)
# colonnaSelezionata = self.ore.Selections[0]
# # [1, 2, 3, 4, 6, 7, 8, 9, 11, 12, 13, 14, 16, 17, 18, 19]
# for i in range(0, self.griglia.NumberRows):
# if self.studentiDelGiorno.StringSelection in self.griglia.GetCellValue(i, colonnaSelezionata):
# self.stanza1.Enabled = False
# self.stanza2.Enabled = False
# self.stanza3.Enabled = False
# self.stanza4.Enabled = False
# stanza1 = []
# stanza2 = []
# stanza3 = []
# stanza4 = []
# print 'sonouscito'
# break
# if i >= 1 and i <= 4:
# stanza1.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# if i >= 6 and i <= 9:
# stanza2.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# if i >= 11 and i <= 14:
# stanza3.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# if i >= 16 and i <= 19:
# stanza4.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# for i in stanza1:
# if i == unicode(''):
# postiLiberi1 = postiLiberi1 + 1
# for i in stanza2:
# if i == u'':
# postiLiberi2 = postiLiberi2 + 1
# for i in stanza3:
# if i == u'':
# postiLiberi3 = postiLiberi3 + 1
# for i in stanza4:
# if i == u'':
# postiLiberi4 = postiLiberi4 + 1
# print postiLiberi1, postiLiberi2, postiLiberi3, postiLiberi4
# if postiLiberi1 >= 1:
# self.stanza1.Enabled = True
# else:
# self.stanza1.Enabled = False
# if postiLiberi2 >= 1:
# self.stanza2.Enabled = True
# else:
# self.stanza2.Enabled = False
# if postiLiberi3 >= 1:
# self.stanza3.Enabled = True
# else:
# self.stanza3.Enabled = False
# if postiLiberi4 >= 1:
# self.stanza4.Enabled = True
# else:
# self.stanza4.Enabled = False
# for i in stanza1:
# if 'K ' in i:
# self.stanza1.Enabled = False
# for i in stanza2:
# if 'K ' in i:
# self.stanza2.Enabled = False
# for i in stanza3:
# if 'K ' in i:
# self.stanza3.Enabled = False
# for i in stanza4:
# if 'K ' in i:
# self.stanza4.Enabled = False
def roomChange( self, event ):
global colonnaSelezionata
global rigaSelezionata
global copia1
global copia2
global coordinateCopia1
global coordinateCopia2
global copia1m
global copia2m
global coordinateCopia1m
global coordinateCopia2m
global copia1Colore
global copia2Colore
global copia1Kojin
global copia2Kojin
colonnaSelezionata = event.GetCol()
rigaSelezionata = event.GetRow()
if rigaSelezionata == 0 or rigaSelezionata == 5 or rigaSelezionata == 10 or rigaSelezionata == 15:
if copia1m != ' ' and copia2m == ' ':
#copia2Colore = self.griglia.GetCellBackgroundColour(rigaSelezionata, colonnaSelezionata)
copia2m = self.griglia.GetCellValue(rigaSelezionata, colonnaSelezionata)
coordinateCopia2m = [rigaSelezionata, colonnaSelezionata]
self.griglia.SetCellValue(rigaSelezionata, colonnaSelezionata, copia1m)
self.griglia.SetCellValue(coordinateCopia1m[0], coordinateCopia1m[1], copia2m)
copia1m = ' '
copia2m = ' '
coordinateCopia1m = []
coordinateCopia2m = []
if copia1m == ' ' and copia2m == ' ':
copia1m = self.griglia.GetCellValue(rigaSelezionata, colonnaSelezionata)
coordinateCopia1m = [rigaSelezionata, colonnaSelezionata]
# if copia1 != ' ' and copia2 != ' ':
# # if copia1Kojin==True and copia2Kojin ==True:
# #copia2Colore == self.griglia.GetCellBackgroundColour(coordinateCopia1[0], coordinateCopia1[1])
# #copia1Colore == self.griglia.GetCellBackgroundColour(rigaSelezionata, colonnaSelezionata)
# self.griglia.SetCellBackgroundColour(coordinateCopia1[0], coordinateCopia1[1], copia2Colore)
# self.griglia.SetCellBackgroundColour(rigaSelezionata, colonnaSelezionata, copia1Colore)
elif rigaSelezionata != 0 or rigaSelezionata !=5 or rigaSelezionata != 10 or rigaSelezionata !=15:
if copia1 !=' ' and copia2 == ' ':
copia2Colore = self.griglia.GetCellBackgroundColour(rigaSelezionata,colonnaSelezionata)
copia2 = self.griglia.GetCellValue(rigaSelezionata,colonnaSelezionata)
coordinateCopia2 = [rigaSelezionata, colonnaSelezionata]
self.griglia.SetCellValue(rigaSelezionata,colonnaSelezionata,copia1)
self.griglia.SetCellValue(coordinateCopia1[0],coordinateCopia1[1],copia2)
if self.griglia.GetCellValue(rigaSelezionata + 1, colonnaSelezionata) == u'(K)':
copia2Kojin = True
if copia1 == ' ' and copia2 == ' ':
copia1 = self.griglia.GetCellValue(rigaSelezionata,colonnaSelezionata)
coordinateCopia1 = [rigaSelezionata,colonnaSelezionata]
copia1Colore = self.griglia.GetCellBackgroundColour(rigaSelezionata,colonnaSelezionata)
#self.griglia.SetCellBackgroundColour(rigaSelezionata,colonnaSelezionata,wx.BLUE)
if self.griglia.GetCellValue(rigaSelezionata+1,colonnaSelezionata) == u'(K)':
copia1Kojin = True
if copia1 != ' ' and copia2 != ' ':
#if copia1Kojin==True and copia2Kojin ==True:
copia2Colore == self.griglia.GetCellBackgroundColour(coordinateCopia1[0],coordinateCopia1[1])
copia1Colore == self.griglia.GetCellBackgroundColour(rigaSelezionata,colonnaSelezionata)
self.griglia.SetCellBackgroundColour(coordinateCopia1[0],coordinateCopia1[1],copia2Colore)
self.griglia.SetCellBackgroundColour(rigaSelezionata,colonnaSelezionata, copia1Colore)
copia1 = ' '
copia2 = ' '
coordinateCopia1 = []
coordinateCopia2 = []
copia1Colore = ''
copia2Colore = ''
def primaOraCheck( self, event ):
acceso = self.primaCheck.IsChecked()
print acceso, 'acceso'
if acceso==1:
global giorno
materie = funzioni.cercaMaterie(1,giorno,tabella,tabellaTempo)
self.kokugol.LabelText = str(materie[0])
self.eigol.LabelText = str(materie[1])
self.suugakul.LabelText = str(materie[2])
self.rikal.LabelText = str(materie[3])
self.shakail.LabelText = str(materie[4])
self.tokubetsul.LabelText = str(materie[5])
if acceso==0:
self.kokugol.LabelText = ''
self.eigol.LabelText = ''
self.suugakul.LabelText = ''
self.rikal.LabelText = ''
self.shakail.LabelText = ''
self.tokubetsul.LabelText = ''
def secondaOraCheck( self, event ):
acceso = self.secondaCheck.IsChecked()
print acceso, 'acceso'
if acceso == 1:
global giorno
materie = funzioni.cercaMaterie(2, giorno, tabella, tabellaTempo)
self.kokugol.LabelText = str(materie[0])
self.eigol.LabelText = str(materie[1])
self.suugakul.LabelText = str(materie[2])
self.rikal.LabelText = str(materie[3])
self.shakail.LabelText = str(materie[4])
self.tokubetsul.LabelText = str(materie[5])
if acceso == 0:
self.kokugol.LabelText = ''
self.eigol.LabelText = ''
self.suugakul.LabelText = ''
self.rikal.LabelText = ''
self.shakail.LabelText = ''
self.tokubetsul.LabelText = ''
def terzaOraCheck( self, event ):
acceso = self.terzaCheck.IsChecked()
print acceso, 'acceso'
if acceso == 1:
global giorno
materie = funzioni.cercaMaterie(3, giorno, tabella, tabellaTempo)
self.kokugol.LabelText = str(materie[0])
self.eigol.LabelText = str(materie[1])
self.suugakul.LabelText = str(materie[2])
self.rikal.LabelText = str(materie[3])
self.shakail.LabelText = str(materie[4])
self.tokubetsul.LabelText = str(materie[5])
if acceso == 0:
self.kokugol.LabelText = ''
self.eigol.LabelText = ''
self.suugakul.LabelText = ''
self.rikal.LabelText = ''
self.shakail.LabelText = ''
self.tokubetsul.LabelText = ''
def quartaOraCheck( self, event ):
acceso = self.quartaCheck.IsChecked()
print acceso, 'acceso'
if acceso == 1:
global giorno
materie = funzioni.cercaMaterie(4, giorno, tabella, tabellaTempo)
self.kokugol.LabelText = str(materie[0])
self.eigol.LabelText = str(materie[1])
self.suugakul.LabelText = str(materie[2])
self.rikal.LabelText = str(materie[3])
self.shakail.LabelText = str(materie[4])
self.tokubetsul.LabelText = str(materie[5])
if acceso == 0:
self.kokugol.LabelText = ''
self.eigol.LabelText = ''
self.suugakul.LabelText = ''
self.rikal.LabelText = ''
self.shakail.LabelText = ''
self.tokubetsul.LabelText = ''
def quintaOraCheck( self, event ):
acceso = self.quintaCheck.IsChecked()
print acceso, 'acceso'
if acceso == 1:
global giorno
materie = funzioni.cercaMaterie(5, giorno, tabella, tabellaTempo)
self.kokugol.LabelText = str(materie[0])
self.eigol.LabelText = str(materie[1])
self.suugakul.LabelText = str(materie[2])
self.rikal.LabelText = str(materie[3])
self.shakail.LabelText = str(materie[4])
self.tokubetsul.LabelText = str(materie[5])
if acceso == 0:
self.kokugol.LabelText = ''
self.eigol.LabelText = ''
self.suugakul.LabelText = ''
self.rikal.LabelText = ''
self.shakail.LabelText = ''
self.tokubetsul.LabelText = ''
def sestaOraCheck( self, event ):
acceso = self.sestaCheck.IsChecked()
print acceso, 'acceso'
if acceso == 1:
global giorno
materie = funzioni.cercaMaterie(6, giorno, tabella, tabellaTempo)
self.kokugol.LabelText = str(materie[0])
self.eigol.LabelText = str(materie[1])
self.suugakul.LabelText = str(materie[2])
self.rikal.LabelText = str(materie[3])
self.shakail.LabelText = str(materie[4])
self.tokubetsul.LabelText = str(materie[5])
if acceso == 0:
self.kokugol.LabelText = ''
self.eigol.LabelText = ''
self.suugakul.LabelText = ''
self.rikal.LabelText = ''
self.shakail.LabelText = ''
self.tokubetsul.LabelText = ''
def settimaOraCheck( self, event ):
acceso = self.settimaCheck.IsChecked()
print acceso, 'acceso'
if acceso == 1:
global giorno
materie = funzioni.cercaMaterie(7, giorno, tabella, tabellaTempo)
self.kokugol.LabelText = str(materie[0])
self.eigol.LabelText = str(materie[1])
self.suugakul.LabelText = str(materie[2])
self.rikal.LabelText = str(materie[3])
self.shakail.LabelText = str(materie[4])
self.tokubetsul.LabelText = str(materie[5])
if acceso == 0:
self.kokugol.LabelText = ''
self.eigol.LabelText = ''
self.suugakul.LabelText = ''
self.rikal.LabelText = ''
self.shakail.LabelText = ''
self.tokubetsul.LabelText = ''
def ottavaOraCheck( self, event ):
acceso = self.ottavaCheck.IsChecked()
print acceso, 'acceso'
if acceso == 1:
global giorno
materie = funzioni.cercaMaterie(8, giorno, tabella, tabellaTempo)
self.kokugol.LabelText = str(materie[0])
self.eigol.LabelText = str(materie[1])
self.suugakul.LabelText = str(materie[2])
self.rikal.LabelText = str(materie[3])
self.shakail.LabelText = str(materie[4])
self.tokubetsul.LabelText = str(materie[5])
if acceso == 0:
self.kokugol.LabelText = ''
self.eigol.LabelText = ''
self.suugakul.LabelText = ''
self.rikal.LabelText = ''
self.shakail.LabelText = ''
self.tokubetsul.LabelText = ''
def nonaOraCheck( self, event ):
acceso = self.nonaCheck.IsChecked()
print acceso, 'acceso'
if acceso == 1:
global giorno
materie = funzioni.cercaMaterie(9, giorno, tabella, tabellaTempo)
self.kokugol.LabelText = str(materie[0])
self.eigol.LabelText = str(materie[1])
self.suugakul.LabelText = str(materie[2])
self.rikal.LabelText = str(materie[3])
self.shakail.LabelText = str(materie[4])
self.tokubetsul.LabelText = str(materie[5])
if acceso == 0:
self.kokugol.LabelText = ''
self.eigol.LabelText = ''
self.suugakul.LabelText = ''
self.rikal.LabelText = ''
self.shakail.LabelText = ''
self.tokubetsul.LabelText = ''
def manualCheckOut( self, event ):
global rigaSelezionata
global colonnaSelezionata
data = funzioni.aggiungizeri(self.calendario.Date.Year,self.calendario.Date.Month+1,self.calendario.Date.Day)
ora = '00:00:00'
tempo = data+' '+ora
nomeVero = funzioni.ripuliscinome(self.griglia.GetCellValue(rigaSelezionata, colonnaSelezionata))
if nomeVero is not None:
tabellaTempo.insert(dict(name=nomeVero, time=tempo, giorno=data, ora=ora))
def lezioniAggiuntive( self, event ):
global colonnaSelezionata
global rigaSelezionata
studentedaElaborare = self.griglia.GetCellValue(rigaSelezionata, colonnaSelezionata)
aggiungiAsterisco = funzioni.aggiungiAsterisco(self.griglia.GetCellValue(rigaSelezionata, colonnaSelezionata))
if aggiungiAsterisco == True:
self.griglia.SetCellValue(rigaSelezionata, colonnaSelezionata, studentedaElaborare.strip('*'))
if aggiungiAsterisco == False:
self.griglia.SetCellValue(rigaSelezionata, colonnaSelezionata, '*'+studentedaElaborare)
def cancellaCelle( self, event ):
global colonnaSelezionata
global rigaSelezionata
colonnaSelezionata = event.GetCol()
rigaSelezionata = event.GetRow()
if rigaSelezionata >= 0 and rigaSelezionata <= 4:
indiceriga = 0
if rigaSelezionata >=5 and rigaSelezionata <= 9:
indiceriga = 5
if rigaSelezionata >= 10 and rigaSelezionata <= 14:
indiceriga = 10
if rigaSelezionata >= 15 and rigaSelezionata <= 19:
indiceriga = 15
dlg = wx.MessageDialog(None, u"データ削除しますか", '', wx.YES_NO | wx.ICON_QUESTION)
result = dlg.ShowModal()
if result == wx.ID_YES:
for i in range(0, 5):
self.griglia.SetCellValue(indiceriga + i, colonnaSelezionata, '')
def aggiungiStudentiAllaTabella(self,riga,colonna):
global colonnaSelezionata
global rigaSelezionata
global stanza
if rigaSelezionata >= 0 and rigaSelezionata <= 4:
stanza = 0
if rigaSelezionata >= 5 and rigaSelezionata <= 9:
stanza = 5
if rigaSelezionata >= 10 and rigaSelezionata <= 14:
stanza = 10
if rigaSelezionata >= 15 and rigaSelezionata <= 19:
stanza = 15
listaDaMandare = []
# creare una lista con solo gli elementi che servono
listaRigheStudenti = [1, 2, 3, 4, 6, 7, 8, 9, 11, 12, 13, 14, 16, 17, 18, 19]
if stanza == 0:
for i in range(6, 10, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(11, 15, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(16, 20, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
if stanza == 5:
for i in range(1, 5, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(11, 14, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(16, 20, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
if stanza == 10:
for i in range(6, 9, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(1, 5, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(16, 20, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
if stanza == 15:
for i in range(6, 9, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(11, 14, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(1, 5, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
print listaDaMandare, 'lista da mandare'
if rigaSelezionata >= 0 + stanza and rigaSelezionata <= 4 + stanza:
# print 'stanza 1'
# print self.listaStudenti.GetSelections(), type(self.listaStudenti.GetSelections())
elementoget = self.listaStudenti.GetSelections()
# print elementoget, 'elementoget'
valoriStudenti = []
print 'valore stanza', stanza
if len(self.listaStudenti.GetSelections()) == 1:
valoriStudenti.append(self.listaStudenti.Items[elementoget[0]])
controlloDuplicati = funzioni.controlloDuplicatiStudenti(colonnaSelezionata, rigaSelezionata,
valoriStudenti, listaDaMandare)
if controlloDuplicati == True :
self.griglia.SetCellValue(1 + stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[0]])
self.griglia.SetCellValue(2 + stanza, colonnaSelezionata, u'')
self.griglia.SetCellValue(3 + stanza, colonnaSelezionata, u'')
self.griglia.SetCellValue(4 + stanza, colonnaSelezionata, u'')
if controlloDuplicati == False:
dlg = wx.MessageDialog(None, u"生徒入力済み", '', wx.OK | wx.ICON_QUESTION)
result = dlg.ShowModal()
if len(self.listaStudenti.GetSelections()) == 4:
valoriStudenti.append(self.listaStudenti.Items[elementoget[0]])
valoriStudenti.append(self.listaStudenti.Items[elementoget[1]])
valoriStudenti.append(self.listaStudenti.Items[elementoget[2]])
valoriStudenti.append(self.listaStudenti.Items[elementoget[3]])
controlloDuplicati = funzioni.controlloDuplicatiStudenti(colonnaSelezionata, rigaSelezionata,
valoriStudenti, listaDaMandare)
if controlloDuplicati == True:
self.griglia.SetCellValue(1 + stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[0]])
self.griglia.SetCellValue(2 + stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[1]])
self.griglia.SetCellValue(3 + stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[2]])
self.griglia.SetCellValue(4 + stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[3]])
if controlloDuplicati == False:
dlg = wx.MessageDialog(None, u"生徒入力済み", '', wx.YES_NO | wx.ICON_QUESTION)
result = dlg.ShowModal()
if len(self.listaStudenti.GetSelections()) == 2:
valoriStudenti.append(self.listaStudenti.Items[elementoget[0]])
valoriStudenti.append(self.listaStudenti.Items[elementoget[1]])
controlloDuplicati = funzioni.controlloDuplicatiStudenti(colonnaSelezionata, rigaSelezionata,
valoriStudenti, listaDaMandare)
if controlloDuplicati == True:
self.griglia.SetCellValue(1 + stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[0]])
self.griglia.SetCellValue(2 + stanza, colonnaSelezionata, '')
self.griglia.SetCellValue(3 + stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[1]])
self.griglia.SetCellValue(4 + stanza, colonnaSelezionata, '')
if controlloDuplicati == False:
dlg = wx.MessageDialog(None, u"生徒入力済み", '', wx.YES_NO | wx.ICON_QUESTION)
result = dlg.ShowModal()
if len(self.listaStudenti.GetSelections()) == 3:
valoriStudenti.append(self.listaStudenti.Items[elementoget[0]])
valoriStudenti.append(self.listaStudenti.Items[elementoget[1]])
valoriStudenti.append(self.listaStudenti.Items[elementoget[2]])
controlloDuplicati = funzioni.controlloDuplicatiStudenti(colonnaSelezionata, rigaSelezionata,
valoriStudenti, listaDaMandare)
if controlloDuplicati == True:
self.griglia.SetCellValue(1 + stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[0]])
self.griglia.SetCellValue(2 + stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[1]])
self.griglia.SetCellValue(3 + stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[2]])
self.griglia.SetCellValue(4 + stanza, colonnaSelezionata, '')
if controlloDuplicati == False:
dlg = wx.MessageDialog(None, u"生徒入力済み", '', wx.YES_NO | wx.ICON_QUESTION)
result = dlg.ShowModal()
def aggiungiStudentiAllaTabellaPerStanze(self,riga,colonna,elemento1,elemento2,elemento3,elemento4,nelementi):
global colonnaSelezionata
global rigaSelezionata
global stanza
if rigaSelezionata >= 0 and rigaSelezionata <= 4:
stanza = 0
if rigaSelezionata >= 5 and rigaSelezionata <= 9:
stanza = 5
if rigaSelezionata >= 10 and rigaSelezionata <= 14:
stanza = 10
if rigaSelezionata >= 15 and rigaSelezionata <= 19:
stanza = 15
listaDaMandare = []
# creare una lista con solo gli elementi che servono
listaRigheStudenti = [1, 2, 3, 4, 6, 7, 8, 9, 11, 12, 13, 14, 16, 17, 18, 19]
if stanza == 0:
for i in range(6, 10, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(11, 15, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(16, 20, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
if stanza == 5:
for i in range(1, 5, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(11, 14, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(16, 20, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
if stanza == 10:
for i in range(6, 9, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(1, 5, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(16, 20, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
if stanza == 15:
for i in range(6, 9, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(11, 14, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
for i in range(1, 5, 1):
listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
print listaDaMandare, 'lista da mandare'
if rigaSelezionata >= 0 + stanza and rigaSelezionata <= 4 + stanza:
# print 'stanza 1'
# print self.listaStudenti.GetSelections(), type(self.listaStudenti.GetSelections())
elementoget = self.listaStudenti.GetSelections()
# print elementoget, 'elementoget'
valoriStudenti = []
print 'valore stanza', stanza
# print self.griglia.GetCellValue(rigaSelezionata, colonnaSelezionata)
#elf.griglia.SetCellValue(rigaSelezionata, colonnaSelezionata, pulisciStudente)
if nelementi == 1:
valoriStudenti.append(elemento1)
controlloDuplicati = funzioni.controlloDuplicatiStudenti(colonnaSelezionata, rigaSelezionata,
valoriStudenti, listaDaMandare)
if controlloDuplicati == True:
self.griglia.SetCellValue(1 + stanza, colonnaSelezionata, elemento1)
self.griglia.SetCellValue(2 + stanza, colonnaSelezionata, u'')
self.griglia.SetCellValue(3 + stanza, colonnaSelezionata, u'')
self.griglia.SetCellValue(4 + stanza, colonnaSelezionata, u'')
if controlloDuplicati == False:
dlg = wx.MessageDialog(None, u"生徒入力済み", '', wx.OK | wx.ICON_QUESTION)
result = dlg.ShowModal()
if nelementi == 4:
valoriStudenti.append(elemento1)
valoriStudenti.append(elemento2)
valoriStudenti.append(elemento3)
valoriStudenti.append(elemento4)
controlloDuplicati = funzioni.controlloDuplicatiStudenti(colonnaSelezionata, rigaSelezionata,
valoriStudenti, listaDaMandare)
if controlloDuplicati == True:
self.griglia.SetCellValue(1 + stanza, colonnaSelezionata,elemento1)
self.griglia.SetCellValue(2 + stanza, colonnaSelezionata, elemento2)
self.griglia.SetCellValue(3 + stanza, colonnaSelezionata, elemento3)
self.griglia.SetCellValue(4 + stanza, colonnaSelezionata, elemento4)
if controlloDuplicati == False:
dlg = wx.MessageDialog(None, u"生徒入力済み", '', wx.YES_NO | wx.ICON_QUESTION)
result = dlg.ShowModal()
if nelementi == 2:
valoriStudenti.append(elemento1)
valoriStudenti.append(elemento2)
controlloDuplicati = funzioni.controlloDuplicatiStudenti(colonnaSelezionata, rigaSelezionata,
valoriStudenti, listaDaMandare)
if controlloDuplicati == True:
self.griglia.SetCellValue(1 + stanza, colonnaSelezionata, elemento1)
self.griglia.SetCellValue(2 + stanza, colonnaSelezionata, elemento2)
self.griglia.SetCellValue(3 + stanza, colonnaSelezionata, '')
self.griglia.SetCellValue(4 + stanza, colonnaSelezionata, '')
if controlloDuplicati == False:
dlg = wx.MessageDialog(None, u"生徒入力済み", '', wx.YES_NO | wx.ICON_QUESTION)
result = dlg.ShowModal()
if nelementi == 3:
valoriStudenti.append(elemento1)
valoriStudenti.append(elemento2)
valoriStudenti.append(elemento3)
controlloDuplicati = funzioni.controlloDuplicatiStudenti(colonnaSelezionata, rigaSelezionata,
valoriStudenti, listaDaMandare)
if controlloDuplicati == True:
self.griglia.SetCellValue(1 + stanza, colonnaSelezionata, elemento1)
self.griglia.SetCellValue(2 + stanza, colonnaSelezionata, elemento2)
self.griglia.SetCellValue(3 + stanza, colonnaSelezionata, elemento3)
self.griglia.SetCellValue(4 + stanza, colonnaSelezionata, '')
if controlloDuplicati == False:
dlg = wx.MessageDialog(None, u"生徒入力済み", '', wx.YES_NO | wx.ICON_QUESTION)
result = dlg.ShowModal()
def selezionaStudenti(self,event):
global colonnaSelezionata
global rigaSelezionata
global stanza
self.aggiungiStudentiAllaTabella(rigaSelezionata,colonnaSelezionata)
# global colonnaSelezionata
# global rigaSelezionata
# global stanza
#
# if rigaSelezionata >= 0 and rigaSelezionata <= 4:
# stanza = 0
#
# if rigaSelezionata >= 5 and rigaSelezionata <= 9:
# stanza = 5
#
# if rigaSelezionata >= 10 and rigaSelezionata <= 14:
# stanza = 10
#
# if rigaSelezionata >= 15 and rigaSelezionata <= 19:
# stanza = 15
#
# listaDaMandare = []
#
# # creare una lista con solo gli elementi che servono
# listaRigheStudenti = [1, 2, 3, 4, 6, 7, 8, 9, 11, 12, 13, 14,16,17,18,19]
# if stanza == 0:
# for i in range(6, 10, 1):
# listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# for i in range(11, 15, 1):
# listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# for i in range(16,20,1):
# listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# if stanza == 5:
# for i in range(1, 5, 1):
# listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# for i in range(11, 14, 1):
# listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# for i in range(16, 20, 1):
# listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# if stanza == 10:
# for i in range(6, 9, 1):
# listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# for i in range(1, 5, 1):
# listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# for i in range(16, 20, 1):
# listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# if stanza == 15:
# for i in range(6, 9, 1):
# listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# for i in range(11, 14, 1):
# listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# for i in range(1, 5, 1):
# listaDaMandare.append(self.griglia.GetCellValue(i, colonnaSelezionata))
# print listaDaMandare, 'lista da mandare'
# if rigaSelezionata >= 0+stanza and rigaSelezionata <= 4+stanza:
# # print 'stanza 1'
# # print self.listaStudenti.GetSelections(), type(self.listaStudenti.GetSelections())
# elementoget = self.listaStudenti.GetSelections()
# # print elementoget, 'elementoget'
# valoriStudenti = []
# print 'valore stanza',stanza
# if len(self.listaStudenti.GetSelections()) == 1:
# valoriStudenti.append(self.listaStudenti.Items[elementoget[0]])
# controlloDuplicati= funzioni.controlloDuplicatiStudenti(colonnaSelezionata,rigaSelezionata,valoriStudenti,listaDaMandare)
# if controlloDuplicati == True:
# self.griglia.SetCellValue(1+stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[0]])
# self.griglia.SetCellValue(2+stanza, colonnaSelezionata, u'個人')
# self.griglia.SetCellValue(3+stanza, colonnaSelezionata, u'個人')
# self.griglia.SetCellValue(4+stanza, colonnaSelezionata, u'個人')
# if controlloDuplicati == False:
# dlg = wx.MessageDialog(None, u"生徒入力済み", '', wx.OK | wx.ICON_QUESTION)
# result = dlg.ShowModal()
# if len(self.listaStudenti.GetSelections()) == 4:
# valoriStudenti.append(self.listaStudenti.Items[elementoget[0]])
# valoriStudenti.append(self.listaStudenti.Items[elementoget[1]])
# valoriStudenti.append(self.listaStudenti.Items[elementoget[2]])
# valoriStudenti.append(self.listaStudenti.Items[elementoget[3]])
# controlloDuplicati = funzioni.controlloDuplicatiStudenti(colonnaSelezionata, rigaSelezionata,
# valoriStudenti, listaDaMandare)
# if controlloDuplicati == True:
# self.griglia.SetCellValue(1+stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[0]])
# self.griglia.SetCellValue(2+stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[1]])
# self.griglia.SetCellValue(3+stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[2]])
# self.griglia.SetCellValue(4+stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[3]])
# if controlloDuplicati == False:
# dlg = wx.MessageDialog(None, u"生徒入力済み", '', wx.YES_NO | wx.ICON_QUESTION)
# result = dlg.ShowModal()
# if len(self.listaStudenti.GetSelections()) == 2:
# valoriStudenti.append(self.listaStudenti.Items[elementoget[0]])
# valoriStudenti.append(self.listaStudenti.Items[elementoget[1]])
# controlloDuplicati = funzioni.controlloDuplicatiStudenti(colonnaSelezionata, rigaSelezionata,
# valoriStudenti, listaDaMandare)
# if controlloDuplicati == True:
# self.griglia.SetCellValue(1+stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[0]])
# self.griglia.SetCellValue(2+stanza, colonnaSelezionata, '')
# self.griglia.SetCellValue(3+stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[1]])
# self.griglia.SetCellValue(4+stanza, colonnaSelezionata, '')
# if controlloDuplicati == False:
# dlg = wx.MessageDialog(None, u"生徒入力済み", '', wx.YES_NO | wx.ICON_QUESTION)
# result = dlg.ShowModal()
# if len(self.listaStudenti.GetSelections()) == 3:
# valoriStudenti.append(self.listaStudenti.Items[elementoget[0]])
# valoriStudenti.append(self.listaStudenti.Items[elementoget[1]])
# valoriStudenti.append(self.listaStudenti.Items[elementoget[2]])
# controlloDuplicati = funzioni.controlloDuplicatiStudenti(colonnaSelezionata, rigaSelezionata,
# valoriStudenti, listaDaMandare)
# if controlloDuplicati == True:
# self.griglia.SetCellValue(1+stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[0]])
# self.griglia.SetCellValue(2+stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[1]])
# self.griglia.SetCellValue(3+stanza, colonnaSelezionata, self.listaStudenti.Items[elementoget[2]])
# self.griglia.SetCellValue(4+stanza, colonnaSelezionata, '')
# if controlloDuplicati == False:
# dlg = wx.MessageDialog(None, u"生徒入力済み", '', wx.YES_NO | wx.ICON_QUESTION)
# result = dlg.ShowModal()
def kojinsettei( self, event ):
global rigaSelezionata
global colonnaSelezionata
if self.griglia.GetCellValue(rigaSelezionata,colonnaSelezionata)!= u'':
stringaDaRipulire = self.griglia.GetCellValue(rigaSelezionata,colonnaSelezionata)
if 'K 'in self.griglia.GetCellValue(rigaSelezionata,colonnaSelezionata):
stringaRipulita = stringaDaRipulire.strip('K ')
self.griglia.SetCellValue(rigaSelezionata, colonnaSelezionata,stringaRipulita)
else:
self.griglia.SetCellValue(rigaSelezionata, colonnaSelezionata, 'K '+self.griglia.GetCellValue(rigaSelezionata,colonnaSelezionata))
def mandaAStanza1( self, event ):
global colonnaSelezionata
global rigaSelezionata
rigaSelezionata =1
colonnaSelezionata = self.ore.Selections[0]
#self.aggiungiStudentiAllaTabella(1,self.ore.Selections[0])
materiaConParentesi = ' (' + self.materieVere.StringSelection + ')'
listafittizia = []
listaStudenti = []
listaStudentiPulita = []
for i in range (1,5):
listafittizia.append(self.griglia.GetCellValue(i,colonnaSelezionata))
print listafittizia, 'listafit'
for i in listafittizia:
if i == u'':
listaStudenti.append(self.studentiDelGiorno.StringSelection+materiaConParentesi)
break
if 'K ' in i:
break
else:
listaStudenti.append(i)
print len(listaStudenti)
calcololunghezzaloop = 4-len(listaStudenti)
for i in range(0,calcololunghezzaloop):
listaStudenti.append(u'')
print listaStudenti, 'listastudenti'
contaelementibuoni = 0
for i in listaStudenti:
if i != u'':
contaelementibuoni = contaelementibuoni+1
#for i in listaStudentiPulita:
#self.listaStudenti.SetSelection(self.studentiDelGiorno.Selections[0])
self.aggiungiStudentiAllaTabellaPerStanze(rigaSelezionata,colonnaSelezionata, listaStudenti[0],listaStudenti[1],listaStudenti[2],listaStudenti[3],contaelementibuoni)
def mandaAStanza2(self, event):
global colonnaSelezionata
global rigaSelezionata
rigaSelezionata= 6
colonnaSelezionata = self.ore.Selections[0]
# self.aggiungiStudentiAllaTabella(1,self.ore.Selections[0])
materiaConParentesi = ' (' + self.materieVere.StringSelection + ')'
listafittizia = []
listaStudenti = []
listaStudentiPulita = []
for i in range(6, 10):
listafittizia.append(self.griglia.GetCellValue(i, colonnaSelezionata))
print listafittizia, 'listafit'
for i in listafittizia:
if i == u'':
listaStudenti.append(self.studentiDelGiorno.StringSelection + materiaConParentesi)
break
if 'K ' in i:
break
else:
listaStudenti.append(i)
print len(listaStudenti)
calcololunghezzaloop = 4 - len(listaStudenti)
for i in range(0, calcololunghezzaloop):
listaStudenti.append(u'')
print listaStudenti, 'listastudenti'
contaelementibuoni = 0
for i in listaStudenti:
if i != u'':
contaelementibuoni = contaelementibuoni + 1
# for i in listaStudentiPulita:
# self.listaStudenti.SetSelection(self.studentiDelGiorno.Selections[0])
self.aggiungiStudentiAllaTabellaPerStanze(rigaSelezionata, colonnaSelezionata, listaStudenti[0], listaStudenti[1],
listaStudenti[2], listaStudenti[3], contaelementibuoni)
def mandaAStanza3(self, event):
global colonnaSelezionata
global rigaSelezionata
rigaSelezionata= 11
colonnaSelezionata = self.ore.Selections[0]
# self.aggiungiStudentiAllaTabella(1,self.ore.Selections[0])
materiaConParentesi = ' (' + self.materieVere.StringSelection + ')'
listafittizia = []
listaStudenti = []
listaStudentiPulita = []
for i in range(11, 15):
listafittizia.append(self.griglia.GetCellValue(i, colonnaSelezionata))
print listafittizia, 'listafit'
for i in listafittizia:
if i == u'':
listaStudenti.append(self.studentiDelGiorno.StringSelection + materiaConParentesi)
break
if 'K ' in i:
break
else:
listaStudenti.append(i)
print len(listaStudenti)
calcololunghezzaloop = 4 - len(listaStudenti)
for i in range(0, calcololunghezzaloop):
listaStudenti.append(u'')
print listaStudenti, 'listastudenti'
contaelementibuoni = 0
for i in listaStudenti:
if i != u'':
contaelementibuoni = contaelementibuoni + 1
# for i in listaStudentiPulita:
# self.listaStudenti.SetSelection(self.studentiDelGiorno.Selections[0])
self.aggiungiStudentiAllaTabellaPerStanze(rigaSelezionata, colonnaSelezionata, listaStudenti[0], listaStudenti[1],
listaStudenti[2], listaStudenti[3], contaelementibuoni)
def mandaAStanza4(self, event):
global colonnaSelezionata
global rigaSelezionata
rigaSelezionata= 16
colonnaSelezionata = self.ore.Selections[0]
# self.aggiungiStudentiAllaTabella(1,self.ore.Selections[0])
materiaConParentesi = ' (' + self.materieVere.StringSelection + ')'
listafittizia = []
listaStudenti = []
listaStudentiPulita = []
for i in range(16, 20):
listafittizia.append(self.griglia.GetCellValue(i, colonnaSelezionata))
print listafittizia, 'listafit'
for i in listafittizia:
if i == u'':
listaStudenti.append(self.studentiDelGiorno.StringSelection + materiaConParentesi)
break
if 'K ' in i:
break
else:
listaStudenti.append(i)
print len(listaStudenti)
calcololunghezzaloop = 4 - len(listaStudenti)
for i in range(0, calcololunghezzaloop):
listaStudenti.append(u'')
print listaStudenti, 'listastudenti'
contaelementibuoni = 0
for i in listaStudenti:
if i != u'':
contaelementibuoni = contaelementibuoni + 1
# for i in listaStudentiPulita:
# self.listaStudenti.SetSelection(self.studentiDelGiorno.Selections[0])
self.aggiungiStudentiAllaTabellaPerStanze(rigaSelezionata, colonnaSelezionata, listaStudenti[0], listaStudenti[1],
listaStudenti[2], listaStudenti[3], contaelementibuoni)
def aggiungiMateria( self, event ):
global colonnaSelezionata
global rigaSelezionata
#print self.griglia.GetCellValue(rigaSelezionata, colonnaSelezionata)
materiaConParentesi = ' ('+self.listaMaterie.StringSelection+')'
pulisciStudente = funzioni.puliscinome(self.griglia.GetCellValue(rigaSelezionata, colonnaSelezionata),materiaConParentesi)
self.griglia.SetCellValue(rigaSelezionata, colonnaSelezionata, pulisciStudente)
self.listaMaterie.DeselectAll()
def scremaMateria(self, event):
self.listaStudenti.Clear()
popolaStudenti = tabella.find(student='1')
global colonnaSelezionata
# colonnaSelezionata = self.griglia.wxGridSelectCells
listaAggiornataStudenti = funzioni.elaboraOraStudenti(tabella, colonnaSelezionata,
self.listaMaterie.StringSelection)
for i in listaAggiornataStudenti:
self.listaStudenti.Append(i)
def scremaGente(self, event):
popolaInsegnanti = tabellaIns.find(teacher='1')
popolaStudenti = tabella.find(student='1')
ottieniColonna = event.GetCol()
global colonnaSelezionata
colonnaSelezionata = event.GetCol()
global rigaSelezionata
rigaSelezionata = event.GetRow()
print rigaSelezionata, 'riga selezionata', colonnaSelezionata, ' Colonna selezionata'
ottieniRiga = event.GetRow()
self.contenutoCella.LabelText=self.griglia.GetCellValue(rigaSelezionata,colonnaSelezionata)
dataComposta = funzioni.aggiungizeri(self.calendario.Date.Year, self.calendario.Date.Month + 1,
self.calendario.Date.Day)
listaAggiornataInsegnanti = funzioni.elaboraOra(ottieniColonna, popolaInsegnanti, tabella,tabellaIns, tabellaTempoIns,dataComposta,tabellaDateIns, str(self.calendario.Date))
#listaAggiornataStudenti=funzioni.elaboraOraStudenti(ottieniColonna,popolaStudenti,tabella,tabellaTempo,str(self.calendario.Date))
self.listaInsegnanti.Clear()
#self.listaStudenti.Clear()
for i in listaAggiornataInsegnanti:
self.listaInsegnanti.Append(i)
#for i in listaAggiornataStudenti:
# self.listaStudenti.Append(i)
def caricaCSV(self):
self.listaStudenti.Clear()
dataComposta = funzioni.aggiungizeri(self.calendario.Date.Year, self.calendario.Date.Month + 1,
self.calendario.Date.Day)
studentiElaborati = funzioni.elaboraOraStudenti(colonnaSelezionata, tabella, tabellaTempo, dataComposta,
self.listaMaterie.StringSelection)
for i in studentiElaborati:
controlloDuplicati = funzioni.controlloNomiDuplicati(i, self.listaStudenti.Items)
if controlloDuplicati == True:
self.listaStudenti.Append(i)
print colonnaSelezionata, rigaSelezionata
self.studentiDelGiorno.Clear()
calendario = calendar
global giorno
giornoDelMeseCorrente = str(self.calendario.Date)
dataDatetime = datetime.strptime(giornoDelMeseCorrente, '%m/%d/%y %H:%M:%S')
lungezzaMese = calendario.monthrange(dataDatetime.year, dataDatetime.month)
dataComp = str(self.calendario.Date.Year) + '/' + str(
self.calendario.Date.Month + 1) + '/' + str(self.calendario.Date.Day)
dataComposta = funzioni.aggiungizeri(self.calendario.Date.Year, self.calendario.Date.Month + 1,
self.calendario.Date.Day)
studentiPerData = tabellaTempo.find(uscite=dataComposta)
# self.kokugoCheck.SetValue(0)
# self.eigoCheck.SetValue(0)
# self.suugakuCheck.SetValue(0)
# self.rikaCheck.SetValue(0)
# self.shakaiCheck.SetValue(0)
# self.tokubetsuCheck.SetValue(0)
self.dataText.LabelText = dataComposta
giorno = dataComposta
primaConta=0
secondaConta = 0
terzaConta = 0
quartaConta = 0
quintaConta = 0
sestaConta = 0
settimaConta = 0
ottavaConta = 0
nonaConta = 0
kokugoConta = 0
eigoConta = 0
suugakuConta = 0
rikaConta = 0
shakaiConta = 0
tokubetsuConta = 0
cercaorariStudente = tabella.find(student='1')
for i in cercaorariStudente:
cercaStudentiDelGiorno = tabellaTempo.find_one(name=i.name, uscite =dataComposta)
#print cercaStudentiDelGiorno.name
if cercaStudentiDelGiorno is not None:
self.studentiDelGiorno.Append(cercaStudentiDelGiorno.name)
for i in studentiPerData:
prima = tabella.count(name=i.name, primaOra=1)
if prima == 1:
primaConta = primaConta+prima
seconda = tabella.count(name=i.name, secondaOra=1)
if seconda == 1:
secondaConta=secondaConta + seconda
terza = tabella.count(name=i.name, terzaOra=1)
if terza == 1:
terzaConta = terzaConta + terza
quarta = tabella.count(name=i.name, quartaOra=1)
if quarta == 1:
quartaConta = quartaConta + quarta
quinta = tabella.count(name=i.name, quintaOra=1)
if quinta == 1:
quintaConta = quintaConta + quinta
sesta = tabella.count(name=i.name, sestaOra=1)
if sesta == 1:
sestaConta = sestaConta + sesta
settima = tabella.count(name=i.name, settimaOra=1)
if settima == 1:
settimaConta = settimaConta + settima
ottava = tabella.count(name=i.name, ottavaOra=1)
if ottava == 1:
ottavaConta = ottavaConta + ottava
nona = tabella.count(name=i.name, nonaOra=1)
if nona == 1:
nonaConta = nonaConta + nona
for i in studentiPerData:
kokugo = tabella.count(name=i.name, kokugo=1)
if kokugo == 1:
kokugoConta = kokugoConta+kokugo
eigo = tabella.count(name=i.name, eigo=1)
if eigo == 1:
eigoConta = eigoConta + eigo
suugaku = tabella.count(name=i.name, suugaku=1)
if suugaku == 1:
suugakuConta = suugakuConta + suugaku
rika = tabella.count(name=i.name, rika=1)
if rika == 1:
rikaConta = rikaConta + rika
shakai = tabella.count(name=i.name, shakai=1)
if shakai == 1:
shakaiConta = shakaiConta + shakai
tokubetsu = tabella.count(name=i.name, tokubetsu=1)
if tokubetsu == 1:
tokubetsuConta = tokubetsuConta + tokubetsu
# self.prima.LabelText = str(primaConta)
# self.seconda.LabelText = str(secondaConta)
# self.terza.LabelText = str(terzaConta)
# self.quarta.LabelText = str(quartaConta)
# self.quinta.LabelText = str(quintaConta)
# self.sesta.LabelText = str(sestaConta)
# self.settima.LabelText = str(settimaConta)
# self.ottava.LabelText = str(ottavaConta)
# self.nona.LabelText = str(nonaConta)
nomeFile = str(self.calendario.Date)
nomeFile = nomeFile.replace('/', '-')
nomeFile = nomeFile.replace(' 00:00:00', '')
anno = '20' + nomeFile[-2:]
global percorso
percorso = './csv/' + anno + '/' + nomeFile[:2] + '/' + nomeFile + '.csv'
if not os.path.exists(os.path.dirname(percorso)):
try:
os.makedirs(os.path.dirname(percorso))
except OSError as exc: # Guard against race condition
pass
print percorso
controllaPercorso = os.path.exists(percorso)
if controllaPercorso == True:
with open(percorso, 'rb') as f:
reader = csv.DictReader(f)
contarighe = 0
converti = csvkit.unicsv.UnicodeCSVDictReader(f=f, encoding='utf-8')
for i in converti:
self.griglia.SetCellValue(contarighe, 0, i['9:10 - 10:20'])
self.griglia.SetCellValue(contarighe, 1, i['10:30 - 11:40'])
self.griglia.SetCellValue(contarighe, 2, i['11:50 - 13:00'])
self.griglia.SetCellValue(contarighe, 3, i['13:40 - 14:50'])
self.griglia.SetCellValue(contarighe, 4, i['15:00 - 16:10'])
self.griglia.SetCellValue(contarighe, 5, i['16:40 - 17:50'])
self.griglia.SetCellValue(contarighe, 6, i['18:00 - 19:10'])
self.griglia.SetCellValue(contarighe, 7, i['19:20 - 20:30'])
self.griglia.SetCellValue(contarighe, 8, i['20:40 - 21:50'])
contarighe = contarighe + 1
if controllaPercorso == False:
contarighe = 0
for i in range(0,20):
self.griglia.SetCellValue(contarighe, 0, '')
self.griglia.SetCellValue(contarighe, 1, '')
self.griglia.SetCellValue(contarighe, 2, '')
self.griglia.SetCellValue(contarighe, 3, '')
self.griglia.SetCellValue(contarighe, 4, '')
self.griglia.SetCellValue(contarighe, 5, '')
self.griglia.SetCellValue(contarighe, 6, '')
self.griglia.SetCellValue(contarighe, 7, '')
self.griglia.SetCellValue(contarighe, 8, '')
contarighe = contarighe + 1
for r in range(1,5):
for c in range (0,9):
self.griglia.SetCellBackgroundColour(r, c, wx.WHITE)
for r in range(6,10):
for c in range (0,9):
self.griglia.SetCellBackgroundColour(r, c, wx.WHITE)
for r in range(11,15):
for c in range (0,9):
self.griglia.SetCellBackgroundColour(r, c, wx.WHITE)
for r in range(16,20):
for c in range (0,9):
self.griglia.SetCellBackgroundColour(r, c, wx.WHITE)
for r in range(1,5):
for c in range (0,9):
dataComposta = funzioni.aggiungizeri(self.calendario.Date.Year,
self.calendario.Date.Month + 1,
self.calendario.Date.Day)
controlloCheckIn = funzioni.controlloCheckIn(self.griglia.GetCellValue(r, c),tabellaTempo,dataComposta)
if controlloCheckIn == 'OUT':
self.griglia.SetCellBackgroundColour(r,c,wx.GREEN)
if controlloCheckIn == "IN":
self.griglia.SetCellBackgroundColour(r,c,wx.RED)
if controlloCheckIn == "NON":
self.griglia.SetCellBackgroundColour(r, c, wx.WHITE)
for r in range(6,10):
for c in range (0,9):
dataComposta = funzioni.aggiungizeri(self.calendario.Date.Year,
self.calendario.Date.Month + 1,
self.calendario.Date.Day)
controlloCheckIn = funzioni.controlloCheckIn(self.griglia.GetCellValue(r, c),tabellaTempo,dataComposta)
if controlloCheckIn == 'OUT':
self.griglia.SetCellBackgroundColour(r,c,wx.GREEN)
if controlloCheckIn == "IN":
self.griglia.SetCellBackgroundColour(r,c,wx.RED)
if controlloCheckIn == "NON":
self.griglia.SetCellBackgroundColour(r, c, wx.WHITE)
for r in range(11,15):
for c in range (0,9):
dataComposta = funzioni.aggiungizeri(self.calendario.Date.Year,
self.calendario.Date.Month + 1,
self.calendario.Date.Day)
controlloCheckIn = funzioni.controlloCheckIn(self.griglia.GetCellValue(r, c),tabellaTempo,dataComposta)
if controlloCheckIn == 'OUT':
self.griglia.SetCellBackgroundColour(r,c,wx.GREEN)
if controlloCheckIn == "IN":
self.griglia.SetCellBackgroundColour(r,c,wx.RED)
if controlloCheckIn == "NON":
self.griglia.SetCellBackgroundColour(r, c, wx.WHITE)
for r in range(16,20):
for c in range (0,9):
dataComposta = funzioni.aggiungizeri(self.calendario.Date.Year,
self.calendario.Date.Month + 1,
self.calendario.Date.Day)
controlloCheckIn = funzioni.controlloCheckIn(self.griglia.GetCellValue(r, c),tabellaTempo,dataComposta)
if controlloCheckIn == 'OUT':
self.griglia.SetCellBackgroundColour(r,c,wx.GREEN)
if controlloCheckIn == "IN":
self.griglia.SetCellBackgroundColour(r,c,wx.RED)
if controlloCheckIn == "NON":
self.griglia.SetCellBackgroundColour(r, c, wx.WHITE)
def preparativiInserimentoAutomatico(self,studente):
valoriDaRestituire = []
global colonna
global riga
self.materieVere.Clear()
materieNecessarie = []
colonna = 0
riga = 0
cercaStudente = studente
studente = tabella.find_one(name=cercaStudente, student=1)
# self.kokugoCheck.SetValue(studente.kokugo)
# self.eigoCheck.SetValue(studente.eigo)
# self.suugakuCheck.SetValue(studente.suugaku)
# self.rikaCheck.SetValue(studente.rika)
# self.shakaiCheck.SetValue(studente.shakai)
# self.tokubetsuCheck.SetValue(studente.tokubetsu)
self.individualCheck.SetValue(studente.individual)
self.groupCheck.SetValue(studente.shared)
popolastudenti = tabella.find_one(name=cercaStudente, student='1')
percorsoStudenti = './StudentsData/' + popolastudenti.name + popolastudenti.telephone + '.txt'
controllaPercorso = os.path.exists(percorsoStudenti)
if controllaPercorso == True:
with open(percorsoStudenti, 'rb') as f:
reader = csv.DictReader(f)
contarighe = 0
converti = csvkit.unicsv.UnicodeCSVDictReader(f=f, encoding='utf-8')
for i in converti:
self.oreMaterie.SetCellValue(contarighe, 0, i[u'月曜日'])
self.oreMaterie.SetCellValue(contarighe, 1, i[u'火曜日'])
self.oreMaterie.SetCellValue(contarighe, 2, i[u'水曜日'])
self.oreMaterie.SetCellValue(contarighe, 3, i[u'木曜日'])
self.oreMaterie.SetCellValue(contarighe, 4, i[u'金曜日'])
self.oreMaterie.SetCellValue(contarighe, 5, i[u'土曜日'])
self.oreMaterie.SetCellValue(contarighe, 6, i[u'日曜日'])
contarighe = contarighe + 1
if controllaPercorso == False:
self.oreMaterie.SelectAll()
self.oreMaterie.ClearSelection()
self.oreMaterie.ClearGrid()
self.oreMaterie.Refresh()
self.oreMaterie1.SelectAll()
self.oreMaterie1.ClearSelection()
self.oreMaterie1.ClearGrid()
self.oreMaterie1.Refresh()
if self.calendario.Date.WeekDay == 0:
colonna = 6
if self.calendario.Date.WeekDay == 1:
colonna = 0
if self.calendario.Date.WeekDay == 2:
colonna = 1
if self.calendario.Date.WeekDay == 3:
colonna = 2
if self.calendario.Date.WeekDay == 4:
colonna = 3
if self.calendario.Date.WeekDay == 5:
colonna = 4
if self.calendario.Date.WeekDay == 6:
colonna = 5
for i in range(0, 9, 1):
valoriDaRestituire.append(self.oreMaterie.GetCellValue(i, colonna))
print valoriDaRestituire, 'vloridarestituire'
materieUniche = set(materieNecessarie)
return valoriDaRestituire
for i in materieUniche:
self.materieVere.Append(i)
percorsoStudenti1 = './StudentsData/' + popolastudenti.name + popolastudenti.telephone + 'tokubetsu.txt'
controllaPercorso1 = os.path.exists(percorsoStudenti1)
if controllaPercorso1 == True:
with open(percorsoStudenti1, 'rb') as f1:
reader1 = csv.DictReader(f1)
contarighe1 = 0
converti1 = csvkit.unicsv.UnicodeCSVDictReader(f=f1, encoding='utf-8')
for i in converti1:
self.oreMaterie1.SetCellValue(contarighe1, 0, i[u'月曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 1, i[u'火曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 2, i[u'水曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 3, i[u'木曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 4, i[u'金曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 5, i[u'土曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 6, i[u'日曜日'])
contarighe1 = contarighe1 + 1
def mostraStudentiDelGiorno( self, event ):
global colonna
global riga
self.materieVere.Clear()
materieNecessarie = []
colonna=0
riga = 0
cercaStudente=self.studentiDelGiorno.StringSelection
studente = tabella.find_one(name=cercaStudente, student=1)
# self.kokugoCheck.SetValue(studente.kokugo)
# self.eigoCheck.SetValue(studente.eigo)
# self.suugakuCheck.SetValue(studente.suugaku)
# self.rikaCheck.SetValue(studente.rika)
# self.shakaiCheck.SetValue(studente.shakai)
# self.tokubetsuCheck.SetValue(studente.tokubetsu)
self.individualCheck.SetValue(studente.individual)
self.groupCheck.SetValue(studente.shared)
popolastudenti = tabella.find_one(name=self.studentiDelGiorno.StringSelection, student='1')
percorsoStudenti = './StudentsData/' +popolastudenti.name + popolastudenti.telephone + '.txt'
controllaPercorso = os.path.exists(percorsoStudenti)
if controllaPercorso == True:
with open(percorsoStudenti, 'rb') as f:
reader = csv.DictReader(f)
contarighe = 0
converti = csvkit.unicsv.UnicodeCSVDictReader(f=f, encoding='utf-8')
for i in converti:
self.oreMaterie.SetCellValue(contarighe, 0, i[u'月曜日'])
self.oreMaterie.SetCellValue(contarighe, 1, i[u'火曜日'])
self.oreMaterie.SetCellValue(contarighe, 2, i[u'水曜日'])
self.oreMaterie.SetCellValue(contarighe, 3, i[u'木曜日'])
self.oreMaterie.SetCellValue(contarighe, 4, i[u'金曜日'])
self.oreMaterie.SetCellValue(contarighe, 5, i[u'土曜日'])
self.oreMaterie.SetCellValue(contarighe, 6, i[u'日曜日'])
contarighe = contarighe + 1
if controllaPercorso == False:
self.oreMaterie.SelectAll()
self.oreMaterie.ClearSelection()
self.oreMaterie.ClearGrid()
self.oreMaterie.Refresh()
self.oreMaterie1.SelectAll()
self.oreMaterie1.ClearSelection()
self.oreMaterie1.ClearGrid()
self.oreMaterie1.Refresh()
if self.calendario.Date.WeekDay == 0:
colonna = 6
if self.calendario.Date.WeekDay == 1:
colonna = 0
if self.calendario.Date.WeekDay == 2:
colonna = 1
if self.calendario.Date.WeekDay == 3:
colonna = 2
if self.calendario.Date.WeekDay == 4:
colonna = 3
if self.calendario.Date.WeekDay == 5:
colonna = 4
if self.calendario.Date.WeekDay == 6:
colonna = 5
for i in range (0,9,1):
if self.oreMaterie.GetCellValue(i,colonna)!= '':
materieNecessarie.append(self.oreMaterie.GetCellValue(i,colonna))
materieUniche = set(materieNecessarie)
for i in materieUniche:
self.materieVere.Append(i)
percorsoStudenti1 = './StudentsData/' + popolastudenti.name + popolastudenti.telephone + 'tokubetsu.txt'
controllaPercorso1 = os.path.exists(percorsoStudenti1)
if controllaPercorso1 == True:
with open(percorsoStudenti1, 'rb') as f1:
reader1 = csv.DictReader(f1)
contarighe1 = 0
converti1 = csvkit.unicsv.UnicodeCSVDictReader(f=f1, encoding='utf-8')
for i in converti1:
self.oreMaterie1.SetCellValue(contarighe1, 0, i[u'月曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 1, i[u'火曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 2, i[u'水曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 3, i[u'木曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 4, i[u'金曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 5, i[u'土曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 6, i[u'日曜日'])
contarighe1 = contarighe1 + 1
def materieGiuste( self, event ):
stanza1 = []
stanza2 = []
stanza3 = []
stanza4 = []
self.stanza1.Enabled = False
self.stanza2.Enabled = False
self.stanza3.Enabled = False
self.stanza4.Enabled = False
postiLiberi1 = 0
postiLiberi2 = 0
postiLiberi3 = 0
postiLiberi4 = 0
self.ore.DeselectAll()
global colonna
global riga
global colonnaSelezionata
global rigaSelezionata
for i in range (0,9,1):
if self.materieVere.StringSelection == self.oreMaterie.GetCellValue(i,colonna):
self.ore.Select(i)
if self.override.Value == True:
self.listaMaterie.Clear()
self.listaMaterie.Append(self.materieVere.StringSelection)
colonnaSelezionata = self.ore.Selections[0]
#[1, 2, 3, 4, 6, 7, 8, 9, 11, 12, 13, 14, 16, 17, 18, 19]
for i in range(0,self.griglia.NumberRows):
if self.studentiDelGiorno.StringSelection in self.griglia.GetCellValue(i,colonnaSelezionata):
self.stanza1.Enabled = False
self.stanza2.Enabled = False
self.stanza3.Enabled = False
self.stanza4.Enabled = False
stanza1 = []
stanza2 = []
stanza3 = []
stanza4 = []
print 'sonouscito'
break
if i >=1 and i <=4:
stanza1.append(self.griglia.GetCellValue(i,colonnaSelezionata))
if i >=6 and i <=9:
stanza2.append(self.griglia.GetCellValue(i,colonnaSelezionata))
if i >=11 and i <=14:
stanza3.append(self.griglia.GetCellValue(i,colonnaSelezionata))
if i >=16 and i <=19:
stanza4.append(self.griglia.GetCellValue(i,colonnaSelezionata))
for i in stanza1:
if i== unicode(''):
postiLiberi1 = postiLiberi1+1
for i in stanza2:
if i== u'':
postiLiberi2 = postiLiberi2+1
for i in stanza3:
if i== u'':
postiLiberi3 = postiLiberi3+1
for i in stanza4:
if i== u'':
postiLiberi4 = postiLiberi4+1
print postiLiberi1,postiLiberi2,postiLiberi3,postiLiberi4
if postiLiberi1 >=1 :
self.stanza1.Enabled = True
else:
self.stanza1.Enabled = False
if postiLiberi2 >=1:
self.stanza2.Enabled = True
else:
self.stanza2.Enabled = False
if postiLiberi3 >=1:
self.stanza3.Enabled = True
else:
self.stanza3.Enabled = False
if postiLiberi4 >=1:
self.stanza4.Enabled = True
else:
self.stanza4.Enabled = False
for i in stanza1:
if 'K ' in i:
self.stanza1.Enabled = False
for i in stanza2:
if 'K ' in i:
self.stanza2.Enabled = False
for i in stanza3:
if 'K ' in i:
self.stanza3.Enabled = False
for i in stanza4:
if 'K ' in i:
self.stanza4.Enabled = False
def materieSettateOverride( self, event ):
if self.override.Value == True:
self.listaMaterie.Clear()
self.listaMaterie.Append(self.materieVere.StringSelection)
if self.override.Value == False:
self.listaMaterie.Clear()
listaMaterie = [u'国語', u'英語', u'数学', u'理科', u'社会', u'特別']
for i in listaMaterie:
self.listaMaterie.Append(i)
def inserisciInsegnante(self, event):
global colonnaSelezionata
global rigaSelezionata
global stanza
global materia
print rigaSelezionata, type(rigaSelezionata)
print self.griglia.GetCellValue(colonnaSelezionata, 0)
controlloDuplicati = funzioni.controlloDuplicati(colonnaSelezionata, rigaSelezionata,
self.listaInsegnanti.StringSelection,
self.griglia.GetCellValue(0, colonnaSelezionata),
self.griglia.GetCellValue(5, colonnaSelezionata),
self.griglia.GetCellValue(10, colonnaSelezionata),
self.griglia.GetCellValue(15, colonnaSelezionata))
if controlloDuplicati == True:
#seleziona righe e stenze
if rigaSelezionata >= 0 and rigaSelezionata <= 4:
stanza = 0
self.griglia.SetCellValue(0, colonnaSelezionata, self.listaInsegnanti.StringSelection)
#self.griglia.SetCellValue(1, colonnaSelezionata, self.listaMaterie.StringSelection)
#self.selezionaStudenti(self.listaMaterie.StringSelection,stanza)
if rigaSelezionata >= 5 and rigaSelezionata <= 9:
stanza = 5
self.griglia.SetCellValue(5, colonnaSelezionata, self.listaInsegnanti.StringSelection)
#self.griglia.SetCellValue(7, colonnaSelezionata, self.listaMaterie.StringSelection)
#self.selezionaStudenti(self.listaMaterie.StringSelection,stanza)
if rigaSelezionata >= 10 and rigaSelezionata <= 14:
stanza = 10
self.griglia.SetCellValue(10, colonnaSelezionata, self.listaInsegnanti.StringSelection)
#self.griglia.SetCellValue(13, colonnaSelezionata, self.listaMaterie.StringSelection)
#self.selezionaStudenti(self.listaMaterie.StringSelection,stanza)
if rigaSelezionata >= 15 and rigaSelezionata <= 19:
stanza = 15
self.griglia.SetCellValue(15, colonnaSelezionata, self.listaInsegnanti.StringSelection)
#self.griglia.SetCellValue(19, colonnaSelezionata, self.listaMaterie.StringSelection)
#self.selezionaStudenti(self.listaMaterie.StringSelection,stanza)
if controlloDuplicati == False:
pass
def selezionaStudentiEMaterie( self, event ):
pass
def selezionaMaterie(self, event):
self.listaMaterie.Clear()
materieSelezionateInsegnanti = funzioni.materieInsegnanti(self.listaInsegnanti.StringSelection, tabella)
for i in materieSelezionateInsegnanti:
self.listaMaterie.Append(i)
def selezioneCalendario(self, event):
with open(percorso, 'wb') as f:
fieldnames = ['9:10 - 10:20', '10:30 - 11:40', '11:50 - 13:00', '13:40 - 14:50', '15:00 - 16:10',
'16:40 - 17:50', '18:00 - 19:10', '19:20 - 20:30', '20:40 - 21:50']
writer = csv.DictWriter(f, fieldnames=fieldnames, dialect='excel')
writer.writeheader()
for i in range(0, 23, 1):
#print i
ciao = utf_8_encoder(self.griglia.GetCellValue(i, 0))
#print ciao, 'ciao'
writer.writerow(
{'9:10 - 10:20': utf_8_encoder(self.griglia.GetCellValue(i, 0)),
'10:30 - 11:40': utf_8_encoder(self.griglia.GetCellValue(i, 1))
, '11:50 - 13:00': utf_8_encoder(self.griglia.GetCellValue(i, 2)),
'13:40 - 14:50': utf_8_encoder(self.griglia.GetCellValue(i, 3))
, '15:00 - 16:10': utf_8_encoder(self.griglia.GetCellValue(i, 4)),
'16:40 - 17:50': utf_8_encoder(self.griglia.GetCellValue(i, 5))
, '18:00 - 19:10': utf_8_encoder(self.griglia.GetCellValue(i, 6)),
'19:20 - 20:30': utf_8_encoder(self.griglia.GetCellValue(i, 7))
, '20:40 - 21:50': utf_8_encoder(self.griglia.GetCellValue(i, 8))})
def controlloGiornaliero( self, event ):
Config = ConfigParser.ConfigParser()
Config.read('./cfg.ini')
colorestanza1 = Config.get('Colori stanze', 'colorestanza1')
colorestanza2 = Config.get('Colori stanze', 'colorestanza2')
colorestanza3 = Config.get('Colori stanze', 'colorestanza3')
colorestanza4 = Config.get('Colori stanze', 'colorestanza4')
listaMaterie = [u' (国語)', u' (英語)', u' (数学)', u' (理科)', u' (社会)', u' (特別)']
for r in range(1, 5):
for c in range(0, 9):
if self.griglia.GetCellValue(r, c) != '':
for i in listaMaterie:
if i in self.griglia.GetCellValue(r, c) !=-1 :
self.griglia.SetCellBackgroundColour(r, c, wx.YELLOW)
for r in range(6, 10):
for c in range(0, 9):
if self.griglia.GetCellValue(r, c) != '':
for i in listaMaterie:
if i in self.griglia.GetCellValue(r, c) !=-1 :
self.griglia.SetCellBackgroundColour(r, c, wx.YELLOW)
for r in range(11, 15):
for c in range(0, 9):
if self.griglia.GetCellValue(r, c) != '':
for i in listaMaterie:
if i in self.griglia.GetCellValue(r, c) !=-1 :
self.griglia.SetCellBackgroundColour(r, c, wx.YELLOW)
for r in range(16, 20):
for c in range(0, 9):
if self.griglia.GetCellValue(r, c) != '':
for i in listaMaterie:
if i in self.griglia.GetCellValue(r, c) !=-1 :
self.griglia.SetCellBackgroundColour(r, c, wx.YELLOW)
self.griglia.Refresh()
print 'aspetta'
def salvaCSV(self, event):
# self.primaCheck.SetValue(0)
# self.secondaCheck.SetValue(0)
# self.terzaCheck.SetValue(0)
# self.quartaCheck.SetValue(0)
# self.quintaCheck.SetValue(0)
# self.sestaCheck.SetValue(0)
# self.settimaCheck.SetValue(0)
# self.ottavaCheck.SetValue(0)
# self.nonaCheck.SetValue(0)
# self.kokugol.LabelText=''
# self.eigol.LabelText=''
# self.suugakul.LabelText=''
# self.rikal.LabelText=''
# self.shakail.LabelText=''
# self.tokubetsul.LabelText=''
global percorso
#self.caricaCSV()
global datavecchia
nomeFile = datavecchia
nomeFile = nomeFile.replace('/', '-')
nomeFile = nomeFile.replace(' 00:00:00', '')
anno = '20' + nomeFile[-2:]
percorso = './csv/' + anno + '/' + nomeFile[:2] + '/' + nomeFile + '.csv'
with open(percorso, 'wb') as f:
fieldnames = ['9:10 - 10:20', '10:30 - 11:40', '11:50 - 13:00', '13:40 - 14:50', '15:00 - 16:10',
'16:40 - 17:50', '18:00 - 19:10', '19:20 - 20:30', '20:40 - 21:50']
writer = csv.DictWriter(f, fieldnames=fieldnames, dialect='excel')
writer.writeheader()
for i in range(0, 20, 1):
#print i
ciao = utf_8_encoder(self.griglia.GetCellValue(i, 0))
writer.writerow(
{'9:10 - 10:20': utf_8_encoder(self.griglia.GetCellValue(i, 0)),
'10:30 - 11:40': utf_8_encoder(self.griglia.GetCellValue(i, 1))
, '11:50 - 13:00': utf_8_encoder(self.griglia.GetCellValue(i, 2)),
'13:40 - 14:50': utf_8_encoder(self.griglia.GetCellValue(i, 3))
, '15:00 - 16:10': utf_8_encoder(self.griglia.GetCellValue(i, 4)),
'16:40 - 17:50': utf_8_encoder(self.griglia.GetCellValue(i, 5))
, '18:00 - 19:10': utf_8_encoder(self.griglia.GetCellValue(i, 6)),
'19:20 - 20:30': utf_8_encoder(self.griglia.GetCellValue(i, 7))
, '20:40 - 21:50': utf_8_encoder(self.griglia.GetCellValue(i, 8))})
#print datavecchia, 'datavecchiasalvacsv'
datavecchia = str(self.calendario.Date)
self.caricaCSV()
def salvaDatiCSV(self, percorso):
global datavecchia
with open(percorso, 'wb') as f:
fieldnames = ['9:10 - 10:20', '10:30 - 11:40', '11:50 - 13:00', '13:40 - 14:50', '15:00 - 16:10',
'16:40 - 17:50', '18:00 - 19:10', '19:20 - 20:30', '20:40 - 21:50']
writer = csv.DictWriter(f, fieldnames=fieldnames, dialect='excel')
writer.writeheader()
for i in range(0, 23, 1):
#print i
ciao = utf_8_encoder(self.griglia.GetCellValue(i, 0))
#print ciao, 'ciao'
writer.writerow(
{'9:10 - 10:20': utf_8_encoder(self.griglia.GetCellValue(i, 0)), '10:30 - 11:40': utf_8_encoder(self.griglia.GetCellValue(i, 1))
, '11:50 - 13:00': utf_8_encoder(self.griglia.GetCellValue(i, 2)),
'13:40 - 14:50': utf_8_encoder(self.griglia.GetCellValue(i, 3))
, '15:00 - 16:10': utf_8_encoder(self.griglia.GetCellValue(i, 4)),
'16:40 - 17:50': utf_8_encoder(self.griglia.GetCellValue(i, 5))
, '18:00 - 19:10': utf_8_encoder(self.griglia.GetCellValue(i, 6)),
'19:20 - 20:30': utf_8_encoder(self.griglia.GetCellValue(i, 7))
, '20:40 - 21:50': utf_8_encoder(self.griglia.GetCellValue(i, 8))})
#print datavecchia, 'datavecchiasalvaDATIcsv'
datavecchia = str(self.calendario.Date)
# put a blank string in text when 'Clear' is clicked
def clearFunc(self, event):
self.text.SetValue(str(''))
def FunzioneUserMenu(self, event):
self.finestrautenti = finestraUtenti(None)
self.finestrautenti.Show(True)
def funzioneOpzioni( self, event ):
self.finestraopzioni = finestraOpzioni(None)
self.finestraopzioni.Show(True)
def shiftinsegnanti(self, event):
self.shiftInsegnanti = shiftinsegnanti(None)
self.shiftInsegnanti.Show(True)
def shiftstudenti(self, event):
self.shiftStudenti = shiftstudenti(None)
self.shiftStudenti.Show(True)
def gestioneStudenti(self, event):
self.finestrastudenti = finestraStudenti(None)
self.finestrastudenti.Show(True)
def mostraSalva( self, event ):
self.salvadialog = saveDialog(None)
self.salvadialog.Show(True)
def mostraInfoStudente( self, event ):
global mostraInfoStud
global datiInfoStudente
mostraInfoStud= True
datiInfoStudente = self.studentiDelGiorno.StringSelection
self.finestrastudenti = finestraStudenti(None)
self.finestrastudenti.Show(True)
datiInfoStudente = ''
class saveDialog(JukuPlanner.SaveDialog):
def __init__(self, parent):
# initialize parent class
JukuPlanner.SaveDialog.__init__(self, parent)
class shiftstudenti(JukuPlanner.shiftGakusei):
def __init__(self, parent):
JukuPlanner.shiftGakusei.__init__(self, parent)
popolastudenti = tabella.find(student='1')
popolaInsegnanti = tabellaIns.find(teacher='1')
for i in popolastudenti:
self.listaSt.Append(unicode(i.name))
def creaLoShift( self, event ):
popolaInsegnanti = tabellaIns.find(teacher='1')
#funzioni.generaShiftStudenti(self.listaSt.StringSelection,self.selezioneCartella.TextCtrlValue)
funzioni.creashiftStudente(self.selezioneCartella.TextCtrlValue,self.listaSt.StringSelection,tabellaIns)
def sendToOne( self, event ):
popolastudenti = tabella.find_one(name=self.listaSt.StringSelection)
funzioni.mandaShiftStudenti(popolastudenti.name,popolastudenti.email)
class finestraOpzioni(JukuPlanner.Opzioni):
def __init__(self, parent):
# initialize parent class
JukuPlanner.Opzioni.__init__(self, parent)
Config = ConfigParser.SafeConfigParser()
Config.read('./cfg.ini')
colorestanza1 = Config.get('Colori stanze', 'colorestanza1')
colorestanza2 = Config.get('Colori stanze', 'colorestanza2')
colorestanza3 = Config.get('Colori stanze', 'colorestanza3')
colorestanza4 = Config.get('Colori stanze', 'colorestanza4')
percorsoDB= Config.get('Paths','databasepath')
percorsoCSV = Config.get('Paths', 'csvpath')
percorsoStudenti = Config.get('Paths', 'studentpath')
colore1 = funzioni.coonvertiStringaInColore(colorestanza1)
colore2 = funzioni.coonvertiStringaInColore(colorestanza2)
colore3 = funzioni.coonvertiStringaInColore(colorestanza3)
colore4 = funzioni.coonvertiStringaInColore(colorestanza4)
self.pickerstanza1.SetColour(wx.Colour(int(colore1[0]), int(colore1[1]), int(colore1[2]), int(colore1[3])))
self.pickerstanza2.SetColour(wx.Colour(int(colore2[0]), int(colore2[1]), int(colore2[2]), int(colore2[3])))
self.pickerstanza3.SetColour(wx.Colour(int(colore3[0]), int(colore3[1]), int(colore3[2]), int(colore3[3])))
self.pickerstanza4.SetColour(wx.Colour(int(colore4[0]), int(colore4[1]), int(colore4[2]), int(colore4[3])))
print percorsoDB
self.percorsoDatabase.SetPath(percorsoDB)
self.percorsoCSV.SetPath(percorsoCSV)
self.percorsoStudenti.SetPath(percorsoStudenti)
#self.mailLogin.Value = Config.get('Paths','databasePath')
def settaColori( self, event ):
Config = ConfigParser.ConfigParser()
Config.add_section('Colori stanze')
cfgfile = open("cfg.ini", 'w')
Config.set('Colori stanze', 'ColoreStanza1', self.pickerstanza1.Colour)
Config.set('Colori stanze', 'ColoreStanza2', self.pickerstanza2.Colour)
Config.set('Colori stanze', 'ColoreStanza3', self.pickerstanza3.Colour)
Config.set('Colori stanze', 'ColoreStanza4', self.pickerstanza4.Colour)
Config.add_section('Paths')
Config.set('Paths','databasePath',self.percorsoDatabase.TextCtrlValue)
Config.set('Paths', 'csvpath', self.percorsoCSV.TextCtrlValue)
Config.set('Paths', 'studentpath', self.percorsoStudenti.TextCtrlValue)
Config.add_section('MailSetting')
Config.set('MailSetting', 'login',self.mailLogin.Value)
Config.set('MailSetting', 'password', self.mailPassword.Value)
Config.set('MailSetting', 'server', self.mailServer.Value)
Config.write(cfgfile)
cfgfile.close()
def caricaColori( self, event ):
pass
class shiftinsegnanti(JukuPlanner.shiftSensei):
def __init__(self, parent):
JukuPlanner.shiftSensei.__init__(self,parent)
popolaInsegnanti = tabellaIns.find(teacher='1')
for i in popolaInsegnanti:
self.listaIns.Append(unicode(i.name))
print i.name
def creaLoShift( self, event ):
mailInsegnante = tabellaIns.find_one(teacher='1', name=self.listaIns.Selection)
cartellaSelezionata = self.selezioneCartella.TextCtrlValue
funzioni.creashift(cartellaSelezionata,self.listaIns.StringSelection)
def sendToOne( self, event ):
cartellaSelezionata = self.selezioneCartella.TextCtrlValue
mailInsegnante = tabellaIns.find_one(teacher='1', name=self.listaIns.StringSelection)
print mailInsegnante
funzioni.mandaShift(mailInsegnante.name, mailInsegnante.email, self.linkDrive.Value)
class finestraStudenti(JukuPlanner.gakuseiFrame):
def __init__(self, parent):
# initialize parent class
JukuPlanner.gakuseiFrame.__init__(self, parent)
popolaStudenti = tabella.find(student='1')
for i in popolaStudenti:
self.listaStudenti.Append(unicode(i.name))
self.oreMaterie.SetColMinimalWidth(0,30)
self.grigliaTotali.SetCellValue(0, 0, '0')
self.grigliaTotali.SetCellValue(0, 1, '0')
self.grigliaTotali.SetCellValue(0, 2, '0')
self.grigliaTotali.SetCellValue(0, 3, '0')
self.grigliaTotali.SetCellValue(0, 4, '0')
self.grigliaTotali.SetCellValue(0, 5, '0')
self.grigliaTotali.SetCellValue(2, 0, '0')
self.grigliaTotali.SetCellValue(2, 1, '0')
self.grigliaTotali.SetCellValue(2, 2, '0')
self.grigliaTotali.SetCellValue(2, 3, '0')
self.grigliaTotali.SetCellValue(2, 4, '0')
self.grigliaTotali.SetCellValue(2, 5, '0')
self.nuovo.Enabled = False
self.cancella.Enabled = False
self.aggiorna.Enabled = False
global contaPrivate
global mostraInfoStud
global datiInfoStudente
if datiInfoStudente != '':
self.listaStudenti.Clear()
self.listaStudenti.Append(datiInfoStudente)
contaPrivate = 0
def cardDelete( self, event ):
self.cardid.LabelText= ''
def funzioneInvio(self, event):
global contaPrivate
contaPrivate = 0
orario = {}
orario1 = {}
for creaorariofasullo in range(0, 9, 1):
orario[creaorariofasullo] = False
for creaorariofasullo in range(0, 9, 1):
orario1[creaorariofasullo] = False
#print orario[creaorariofasullo]
cercaNome = tabella.find_one(name=self.casellaNome.Value)
#print self.tabellaOre.Selections
for i in self.tabellaOre.Selections:
#print len(self.tabellaOre.Items)
orario[i] = True
#print orario[i]
for i in self.tabellaOre1.Selections:
#print len(self.tabellaOre.Items)
orario1[i] = True
#print orario[i]
caselleDaCompletare = u''
if self.casellaNome.Value == '':
caselleDaCompletare = unicode(caselleDaCompletare)+ u'お名前入力してください. '
if self.furigana.Value == '':
caselleDaCompletare = unicode(caselleDaCompletare) + u'振り仮名入力してください. '
if caselleDaCompletare != '':
dlg = wx.MessageDialog(None, caselleDaCompletare, '', wx.ICON_QUESTION)
result = dlg.ShowModal()
if cercaNome is not None:
self.errorCheck.LabelText = 'Name Already on database'
if cercaNome is None and caselleDaCompletare == u'':
self.errorCheck.LabelText = u'データ保存されました'
tabellaGiorni.insert(dict(name=self.casellaNome.Value, lunedi=self.lunedi.Value,
martedi=self.martedi.Value, mercoledi=self.mercoledi.Value,
giovedi=self.giovedi.Value, venerdi=self.venerdi.Value,
sabato=self.sabato.Value, domenica=self.domenica.Value,
lunedi1=self.lunedi1.Value,
martedi1=self.martedi1.Value, mercoledi1=self.mercoledi1.Value,
giovedi1=self.giovedi1.Value, venerdi1=self.venerdi1.Value,
sabato1=self.sabato1.Value, domenica1=self.domenica1.Value))
tabella.insert(
dict(name=self.casellaNome.Value, cardID=self.cardid.Label, telephone=self.casellaTelefono.Value,furigana = self.furigana.Value ,
email=self.casellaEmail.Value, parentMail=self.mailGenitori.Value, scuola=self.casellaScuola.Value,
maschio=self.danseiBox.Value, femmina=self.joseiBox.Value, student=1, sonota=self.sonota.Value,
teacher=0, kokugo=self.kokugo.Value, eigo=self.eigo.Value, suugaku=self.suugaku.Value,
rika=self.rika.Value, shakai=self.shakai.Value, tokubetsu=self.tokubetsu.Value,
primaOra=orario[0], secondaOra=orario[1], terzaOra=orario[2], quartaOra=orario[3],
quintaOra=orario[4], sestaOra=orario[5], settimaOra=orario[6], ottavaOra=orario[7],
nonaOra=orario[8],individual=self.individual.Value, shared=self.shared.Value,
kokugo1=self.kokugo1.Value, eigo1=self.eigo1.Value, suugaku1=self.suugaku1.Value,
rika1=self.rika1.Value, shakai1=self.shakai1.Value, tokubetsu1=self.tokubetsu1.Value,
primaOra1=orario1[0], secondaOra1=orario1[1], terzaOra1=orario1[2], quartaOra1=orario1[3],
quintaOra1=orario1[4], sestaOra1=orario1[5], settimaOra1=orario1[6], ottavaOra1=orario1[7],
nonaOra1=orario1[8],))
tabellaCalcoli.insert(dict(name=self.casellaNome.Value,
anno=self.calendarioStudenti.Date.Year ,
mese=self.calendarioStudenti.Date.Month,
normaleigo=self.grigliaTotali.GetCellValue(0,0),
normalsuugaku=self.grigliaTotali.GetCellValue(0,1),
normalkokugo=self.grigliaTotali.GetCellValue(0,2),
normalrika=self.grigliaTotali.GetCellValue(0, 3),
normalshakai=self.grigliaTotali.GetCellValue(0,4),
normaltokubetsu=self.grigliaTotali.GetCellValue(0,5,),
tsuikaeigo=self.grigliaTotali.GetCellValue(2,0),
tsuikasuugaku=self.grigliaTotali.GetCellValue(2,1),
tsuikakokugo=self.grigliaTotali.GetCellValue(2,2),
tsuikarika=self.grigliaTotali.GetCellValue(2, 3),
tsuikashakai=self.grigliaTotali.GetCellValue(2,4),
tsuikatokubetsu=self.grigliaTotali.GetCellValue(2,5,)
))
for i in self.usciteStudenti.Items:
tabellaTempo.insert(dict(name=self.casellaNome.Value, uscite=i))
self.listaStudenti.Clear()
popolaStudenti = tabella.find(student='1')
for i in popolaStudenti:
self.listaStudenti.Append(unicode(i.name))
self.oreMaterie.SetColMinimalWidth(0, 30)
self.invio.Enabled = False
#self.errorCheck.LabelText = u'データ保存されました'
def materiePrivate( self, event ):
global rigaMaterie
global colonnaMaterie
if self.oreMaterie.GetCellValue(rigaMaterie,colonnaMaterie)!= '':
if self.oreMaterie.GetCellValue(rigaMaterie,colonnaMaterie)[0] == 'K':
prestrippata = self.oreMaterie.GetCellValue(rigaMaterie,colonnaMaterie)
strippata = prestrippata.strip('K')
self.oreMaterie.SetCellValue(rigaMaterie, colonnaMaterie,
strippata)
else:
self.oreMaterie.SetCellValue(rigaMaterie,colonnaMaterie, u'K'+ self.oreMaterie.GetCellValue(rigaMaterie,colonnaMaterie))
def cancellaMaterie( self, event ):
global rigaMaterie
global colonnaMaterie
rigaMaterie = event.GetRow()
colonnaMaterie = event.GetCol()
self.oreMaterie.SetCellValue(rigaMaterie, colonnaMaterie, '')
def inviaShift( self, event ):
random_data = os.urandom(128)
nomerandom = hashlib.md5(random_data).hexdigest()[:16]
shiftTemp = open('./shift/'+nomerandom+'.txt','w')
datavecchia = str(self.calendarioStudenti.Date)
nomeFile = datavecchia
lezioniPrivate = 0
nomeFile = nomeFile.replace('/', '-')
nomeFile = nomeFile.replace(' 00:00:00', '')
anno = '20' + nomeFile[-2:]
percorso = './csv/' + anno + '/' + nomeFile[:2] + '/'
files = os.listdir(percorso)
files_txt = [i for i in files if i.endswith('.csv')]
print files_txt
for i in files_txt:
with open(percorso + i, 'rb') as f:
reader = csv.DictReader(f)
contarighe = 0
converti = csvkit.unicsv.UnicodeCSVDictReader(f=f, encoding='utf-8')
print converti, converti
for i in converti:
#if i['9:10 - 10:20'] == '':
#shiftTemp.write(i+ '9:10 - 10:20' + )
if i['10:30 - 11:40'] == self.listaStudenti.StringSelection:
lezioniPrivate = lezioniPrivate + 1
if i['11:50 - 13:00'] == self.listaStudenti.StringSelection:
lezioniPrivate = lezioniPrivate + 1
if i['13:40 - 14:50'] == self.listaStudenti.StringSelection:
lezioniPrivate = lezioniPrivate + 1
if i['15:00 - 16:10'] == self.listaStudenti.StringSelection:
lezioniPrivate = lezioniPrivate + 1
if i['18:00 - 19:10'] == self.listaStudenti.StringSelection:
lezioniPrivate = lezioniPrivate + 1
if i['19:20 - 20:30'] == self.listaStudenti.StringSelection:
lezioniPrivate = lezioniPrivate + 1
if i['20:40 - 21:50'] == self.listaStudenti.StringSelection:
lezioniPrivate = lezioniPrivate + 1
if i['9:10 - 10:20'] == str(self.listaStudenti.StringSelection):
#shiftTemp.write(i+ '9:10 - 10:20' + )
pass
if i['10:30 - 11:40'] == self.listaStudenti.StringSelection:
lezioniPrivate = lezioniPrivate + 1
if i['11:50 - 13:00'] == self.listaStudenti.StringSelection:
lezioniPrivate = lezioniPrivate + 1
if i['13:40 - 14:50'] == self.listaStudenti.StringSelection:
lezioniPrivate = lezioniPrivate + 1
if i['15:00 - 16:10'] == self.listaStudenti.StringSelection:
lezioniPrivate = lezioniPrivate + 1
if i['18:00 - 19:10'] == self.listaStudenti.StringSelection:
lezioniPrivate = lezioniPrivate + 1
if i['19:20 - 20:30'] == self.listaStudenti.StringSelection:
lezioniPrivate = lezioniPrivate + 1
if i['20:40 - 21:50'] == self.listaStudenti.StringSelection:
lezioniPrivate = lezioniPrivate + 1
print lezioniPrivate, 'lezioni private'
def materieGiorno( self, event ):
global rigaMaterie
global colonnaMaterie
print rigaMaterie,colonnaMaterie
if len(self.materieGiorni.Items)==1:
self.oreMaterie.SetCellValue(rigaMaterie, colonnaMaterie,self.materieGiorni.StringSelection)
self.materieGiorni.DeselectAll()
if len(self.materieGiorni.Items) > 1:
self.oreMaterie.SetCellValue(rigaMaterie, colonnaMaterie, self.materieGiorni.StringSelection)
self.materieGiorni.DeselectAll()
def materieGiorno1( self, event ):
global rigaMaterie1
global colonnaMaterie1
#print rigaMaterie,colonnaMaterie
if len(self.materieGiorni1.Items)==1:
self.oreMaterie1.SetCellValue(rigaMaterie1, colonnaMaterie1,self.materieGiorni1.StringSelection)
self.materieGiorni1.DeselectAll()
if len(self.materieGiorni1.Items) > 1:
self.oreMaterie1.SetCellValue(rigaMaterie1, colonnaMaterie1, self.materieGiorni1.StringSelection)
self.materieGiorni1.DeselectAll()
def nuovoStudente( self, event ):
self.grigliaLezioniSingole.SelectAll()
self.grigliaLezioniSingole.ClearSelection()
self.grigliaLezioniSingole.ClearGrid()
self.grigliaLezioniSingole.Refresh()
self.grigliaTotali.SelectAll()
self.grigliaTotali.ClearSelection()
self.grigliaTotali.ClearGrid()
self.grigliaTotali.Refresh()
for i in range(0,6,1):
self.grigliaTotali.SetCellBackgroundColour(3,i,wx.WHITE)
self.invio.Enabled=True
self.aggiorna.Enabled=False
self.cancella.Enabled=False
self.casellaNome.Clear()
self.casellaTelefono.Clear()
self.furigana.Clear()
self.danseiBox.Value = False
self.joseiBox.Value = False
self.sonota.Value = False
self.casellaEmail.Clear()
self.mailGenitori.Clear()
self.casellaScuola.Clear()
self.tabellaOre.DeselectAll()
self.usciteStudenti.Clear()
self.materieGiorni.Clear()
self.kokugo.Value = False
self.eigo.Value = False
self.suugaku.Value = False
self.rika.Value = False
self.tokubetsu.Value = False
self.shakai.Value = False
self.oreMaterie.ClearGrid()
self.lunedi.Value = False
self.martedi.Value = False
self.mercoledi.Value = False
self.giovedi.Value = False
self.venerdi.Value = False
self.sabato.Value = False
self.domenica.Value = False
self.tabellaOre1.DeselectAll()
self.materieGiorni1.Clear()
self.kokugo1.Value = False
self.eigo1.Value = False
self.suugaku1.Value = False
self.rika1.Value = False
self.tokubetsu1.Value = False
self.shakai1.Value = False
self.oreMaterie1.ClearGrid()
self.lunedi1.Value = False
self.martedi1.Value = False
self.mercoledi1.Value = False
self.giovedi1.Value = False
self.venerdi1.Value = False
self.sabato1.Value = False
self.domenica1.Value = False
self.individual.Value = False
self.shared.Value = False
self.cardid.LabelText=''
self.cardcancel.Enabled = False
self.CardRegistration.Enabled = True
for i in range(0, 9, 1):
#self.griglia.SetCellBackgroundColour(0, i, wx.GREEN)
self.oreMaterie1.SetCellBackgroundColour(0, i,wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(1, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(2, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(3, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(4, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(5, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(6, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(7, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(8, i, wx.WHITE)
for i in range(0, 9, 1):
#self.griglia.SetCellBackgroundColour(0, i, wx.GREEN)
self.oreMaterie.SetCellBackgroundColour(0, i,wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(1, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(2, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(3, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(4, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(5, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(6, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(7, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(8, i, wx.WHITE)
self.listaStudenti.DeselectAll()
def femmina( self, event ):
self.danseiBox.Value = False
self.sonota.Value = False
def maschio( self, event ):
self.joseiBox.Value = False
self.sonota.Value = False
def mostraMeseCorrente( self, event ):
listadate = []
dataComp = str(self.calendarioStudenti.Date.Year) + '/' + str(self.calendarioStudenti.Date.Month + 1) + '/'
dataComposta = funzioni.aggiungizeriSenzaGiorno(self.calendarioStudenti.Date.Year, self.calendarioStudenti.Date.Month + 1)
dataunicode = unicode(dataComposta)
contaitem = 0
popolaDate = tabellaTempo.find(name=self.listaStudenti.StringSelection)
if self.meseCorrente.Value == True:
for i in self.usciteStudenti.Items:
if dataunicode in i :
listadate.append(i)
self.usciteStudenti.Clear()
for i in listadate:
self.usciteStudenti.Append(i)
if self.meseCorrente.Value == False:
self.usciteStudenti.Clear()
for i in popolaDate:
if len((str(i.uscite))) >= 5:
self.usciteStudenti.Append(str(i.uscite))
def lgbt( self, event ):
self.joseiBox.Value = False
self.danseiBox.Value = False
def selezionaCellaMateria( self, event ):
global rigaMaterie
global colonnaMaterie
rigaMaterie = event.GetRow()
colonnaMaterie = event.GetCol()
self.oreMaterie1.SetCellValue(rigaMaterie1, colonnaMaterie1,self.materieGiorni1.StringSelection)
def calcoliDiFineMese( self, event ):
dlg = wx.MessageDialog(None, u"選択された生徒月末処理しますか", '', wx.YES_NO | wx.ICON_QUESTION)
result = dlg.ShowModal()
if result == wx.ID_YES:
popolaCalcoli = tabellaCalcoli.find_one(name=self.listaStudenti.StringSelection,
anno=self.calendarioStudenti.Date.Year,
mese=self.calendarioStudenti.Date.Month)
if popolaCalcoli is not None:
datiCalcoli = (
dict(id=popolaCalcoli.id, name=self.casellaNome.Value, mese=self.calendarioStudenti.Date.Month,
anno=self.calendarioStudenti.Date.Year, normaleigo=self.grigliaTotali.GetCellValue(0, 0),
normalsuugaku=self.grigliaTotali.GetCellValue(0, 1),
normalkokugo=self.grigliaTotali.GetCellValue(0, 2),
normalrika=self.grigliaTotali.GetCellValue(0, 3),
normalshakai=self.grigliaTotali.GetCellValue(0, 4),
normaltokubetsu=self.grigliaTotali.GetCellValue(0, 5),
tsuikaeigo=self.grigliaTotali.GetCellValue(2, 0),
tsuikasuugaku=self.grigliaTotali.GetCellValue(2, 1),
tsuikakokugo=self.grigliaTotali.GetCellValue(2, 2),
tsuikarika=self.grigliaTotali.GetCellValue(2, 3),
tsuikashakai=self.grigliaTotali.GetCellValue(2, 4),
tsuikatokubetsu=self.grigliaTotali.GetCellValue(2, 5),
balanceeigo=self.grigliaLezioniSingole.GetCellValue(33, 0),
balancesuugaku=self.grigliaLezioniSingole.GetCellValue(33, 1),
balancekokugo=self.grigliaLezioniSingole.GetCellValue(33, 2),
balancerika=self.grigliaLezioniSingole.GetCellValue(33, 3),
balanceshakai=self.grigliaLezioniSingole.GetCellValue(33, 4),
balancetokubetu=self.grigliaLezioniSingole.GetCellValue(33, 5)))
tabellaCalcoli.update(datiCalcoli, ['id'])
if popolaCalcoli is None:
tabellaCalcoli.insert(dict(name=self.casellaNome.Value, anno=self.calendarioStudenti.Date.Year,
mese=self.calendarioStudenti.Date.Month,
normaleigo=self.grigliaTotali.GetCellValue(0, 0),
normalsuugaku=self.grigliaTotali.GetCellValue(0, 1),
normalkokugo=self.grigliaTotali.GetCellValue(0, 2),
normalrika=self.grigliaTotali.GetCellValue(0, 3),
normalshakai=self.grigliaTotali.GetCellValue(0, 4),
normaltokubetsu=self.grigliaTotali.GetCellValue(0, 5),
tsuikaeigo=self.grigliaTotali.GetCellValue(2, 0),
tsuikasuugaku=self.grigliaTotali.GetCellValue(2, 1),
tsuikakokugo=self.grigliaTotali.GetCellValue(2, 2),
tsuikarika=self.grigliaTotali.GetCellValue(2, 3),
tsuikashakai=self.grigliaTotali.GetCellValue(2, 4),
tsuikatokubetsu=self.grigliaTotali.GetCellValue(2, 5),
balanceeigo=self.grigliaLezioniSingole.GetCellValue(33, 0),
balancesuugaku=self.grigliaLezioniSingole.GetCellValue(33, 1),
balancekokugo=self.grigliaLezioniSingole.GetCellValue(33, 2),
balancerika=self.grigliaLezioniSingole.GetCellValue(33, 3),
balanceshakai=self.grigliaLezioniSingole.GetCellValue(33, 4),
balancetokubetu=self.grigliaLezioniSingole.GetCellValue(33, 5)))
def aggiornaCalcoli( self, event ):
global contaPrivate
self.grigliaLezioniSingole.ClearGrid()
for i in range(0, 31, 1):
#self.griglia.SetCellBackgroundColour(0, i, wx.GREEN)
self.grigliaLezioniSingole.SetCellBackgroundColour(i, 0,wx.WHITE)
self.grigliaLezioniSingole.SetCellBackgroundColour(i, 1, wx.WHITE)
self.grigliaLezioniSingole.SetCellBackgroundColour(i, 2, wx.WHITE)
self.grigliaLezioniSingole.SetCellBackgroundColour(i, 3, wx.WHITE)
self.grigliaLezioniSingole.SetCellBackgroundColour(i, 4, wx.WHITE)
self.grigliaLezioniSingole.SetCellBackgroundColour(i, 5, wx.WHITE)
self.grigliaLezioniSingole.SetCellValue(31,0,'0')
self.grigliaLezioniSingole.SetCellValue(31, 1, '0')
self.grigliaLezioniSingole.SetCellValue(31, 2, '0')
self.grigliaLezioniSingole.SetCellValue(31, 3, '0')
self.grigliaLezioniSingole.SetCellValue(31, 4, '0')
self.grigliaLezioniSingole.SetCellValue(31, 5, '0')
self.grigliaTotali.SetCellValue(0, 0, '0')
self.grigliaTotali.SetCellValue(0, 1, '0')
self.grigliaTotali.SetCellValue(0, 2, '0')
self.grigliaTotali.SetCellValue(0, 3, '0')
self.grigliaTotali.SetCellValue(0, 4, '0')
self.grigliaTotali.SetCellValue(0, 5, '0')
self.grigliaTotali.SetCellValue(1, 0, '0')
self.grigliaTotali.SetCellValue(1, 1, '0')
self.grigliaTotali.SetCellValue(1, 2, '0')
self.grigliaTotali.SetCellValue(1, 3, '0')
self.grigliaTotali.SetCellValue(1, 4, '0')
self.grigliaTotali.SetCellValue(1, 5, '0')
self.grigliaTotali.SetCellValue(2, 0, '0')
self.grigliaTotali.SetCellValue(2, 1, '0')
self.grigliaTotali.SetCellValue(2, 2, '0')
self.grigliaTotali.SetCellValue(2, 3, '0')
self.grigliaTotali.SetCellValue(2, 4, '0')
self.grigliaTotali.SetCellValue(2, 5, '0')
datavecchia = str(self.calendarioStudenti.Date)
nomeFile = datavecchia
lezioniPrivate = 0
nomeFile = nomeFile.replace('/', '-')
nomeFile = nomeFile.replace(' 00:00:00', '')
anno = '20' + nomeFile[-2:]
percorso = './csv/' + anno + '/' + nomeFile[:2] + '/'
if not os.path.exists(percorso):
os.makedirs(percorso)
print percorso
files = os.listdir(percorso)
files_txt = [i for i in files if i.endswith('.csv')]
# print files_txt
# contaPrivate = 0
# for files in files_txt:
# self.riempiTabella(percorso, files)
# print files_txt
print files_txt
popolaCalcoli = tabellaCalcoli.find_one(name=self.listaStudenti.StringSelection, anno=self.calendarioStudenti.Date.Year, mese=self.calendarioStudenti.Date.Month)
popolaCalcoliMesePassato = tabellaCalcoli.find_one(name=self.listaStudenti.StringSelection,
anno=self.calendarioStudenti.Date.Year,
mese=self.calendarioStudenti.Date.Month-1)
print self.calendarioStudenti.Date.Month, 'self.calendarioStudenti.Date.Month'
if popolaCalcoli is not None:
self.grigliaTotali.SetCellValue(0,0,popolaCalcoli.normaleigo)
self.grigliaTotali.SetCellValue(0, 1, popolaCalcoli.normalsuugaku)
self.grigliaTotali.SetCellValue(0, 2, popolaCalcoli.normalkokugo)
self.grigliaTotali.SetCellValue(0, 3, popolaCalcoli.normalrika)
self.grigliaTotali.SetCellValue(0, 4, popolaCalcoli.normalshakai)
self.grigliaTotali.SetCellValue(0, 5, popolaCalcoli.normaltokubetsu)
# self.grigliaTotali.SetCellValue(2, 0, popolaCalcoli.tsuikaeigo)
# self.grigliaTotali.SetCellValue(2, 1, popolaCalcoli.tsuikasuugaku)
# self.grigliaTotali.SetCellValue(2, 2, popolaCalcoli.tsuikakokugo)
# self.grigliaTotali.SetCellValue(2, 3, popolaCalcoli.tsuikarika)
# self.grigliaTotali.SetCellValue(2, 4, popolaCalcoli.tsuikashakai)
# self.grigliaTotali.SetCellValue(2, 5, popolaCalcoli.tsuikatokubetsu)
if popolaCalcoliMesePassato is not None and popolaCalcoliMesePassato.balanceeigo is not None:
self.grigliaTotali.SetCellValue(1, 0, popolaCalcoliMesePassato.balanceeigo)
self.grigliaTotali.SetCellValue(1, 1, popolaCalcoliMesePassato.balancesuugaku)
self.grigliaTotali.SetCellValue(1, 2, popolaCalcoliMesePassato.balancekokugo)
self.grigliaTotali.SetCellValue(1, 3, popolaCalcoliMesePassato.balancerika)
self.grigliaTotali.SetCellValue(1, 4, popolaCalcoliMesePassato.balanceshakai)
self.grigliaTotali.SetCellValue(1, 5, popolaCalcoliMesePassato.balancetokubetu)
if popolaCalcoliMesePassato is None:
self.grigliaTotali.SetCellValue(1, 0, '0')
self.grigliaTotali.SetCellValue(1, 1, '0')
self.grigliaTotali.SetCellValue(1, 2, '0')
self.grigliaTotali.SetCellValue(1, 3, '0')
self.grigliaTotali.SetCellValue(1, 4, '0')
self.grigliaTotali.SetCellValue(1, 5, '0')
if popolaCalcoli is None:
self.grigliaTotali.SetCellValue(0,0,'0')
self.grigliaTotali.SetCellValue(0, 1, '0')
self.grigliaTotali.SetCellValue(0, 2,'0')
self.grigliaTotali.SetCellValue(0, 3, '0')
self.grigliaTotali.SetCellValue(0, 4,'0')
self.grigliaTotali.SetCellValue(0, 5, '0')
# self.grigliaTotali.SetCellValue(2, 0, '0')
# self.grigliaTotali.SetCellValue(2, 1, '0')
# self.grigliaTotali.SetCellValue(2, 2,'0')
# self.grigliaTotali.SetCellValue(2, 3, '0')
# self.grigliaTotali.SetCellValue(2, 4, '0')
# self.grigliaTotali.SetCellValue(2, 5, '0')
if files_txt is not None:
contaPrivate = 0
for files in files_txt:
self.riempiTabella(percorso, files)
if files_txt is None:
self.grigliaLezioniSingole.ClearGrid()
if popolaCalcoli is not None:
datiCalcoli = (
dict(id=popolaCalcoli.id, name=self.casellaNome.Value, mese=self.calendarioStudenti.Date.Month,
anno=self.calendarioStudenti.Date.Year, normaleigo=self.grigliaTotali.GetCellValue(0, 0),
normalsuugaku=self.grigliaTotali.GetCellValue(0, 1),
normalkokugo=self.grigliaTotali.GetCellValue(0, 2),
normalrika=self.grigliaTotali.GetCellValue(0, 3), normalshakai=self.grigliaTotali.GetCellValue(0, 4),
normaltokubetsu=self.grigliaTotali.GetCellValue(0, 5),
# tsuikaeigo=self.grigliaTotali.GetCellValue(2, 0),
# tsuikasuugaku=self.grigliaTotali.GetCellValue(2, 1),
# tsuikakokugo=self.grigliaTotali.GetCellValue(2, 2),
# tsuikarika=self.grigliaTotali.GetCellValue(2, 3),
# tsuikashakai=self.grigliaTotali.GetCellValue(2, 4),
# tsuikatokubetsu=self.grigliaTotali.GetCellValue(2, 5, ),
#balanceeigo = self.grigliaLezioniSingole.GetCellValue(33, 0),
#balancesuugaku = self.grigliaLezioniSingole.GetCellValue(33, 1),
#balancekokugo = self.grigliaLezioniSingole.GetCellValue(33, 2),
#balancerika = self.grigliaLezioniSingole.GetCellValue(33, 3),
#balanceshakai = self.grigliaLezioniSingole.GetCellValue(33, 4),
#balancetokubetu = self.grigliaLezioniSingole.GetCellValue(33, 5)
))
tabellaCalcoli.update(datiCalcoli, ['id'])
if popolaCalcoli is None:
tabellaCalcoli.insert(dict(name=self.casellaNome.Value, anno=self.calendarioStudenti.Date.Year,
mese=self.calendarioStudenti.Date.Month,
normaleigo=self.grigliaTotali.GetCellValue(0, 0),
normalsuugaku=self.grigliaTotali.GetCellValue(0, 1),
normalkokugo=self.grigliaTotali.GetCellValue(0, 2),
normalrika=self.grigliaTotali.GetCellValue(0, 3),
normalshakai=self.grigliaTotali.GetCellValue(0, 4),
normaltokubetsu=self.grigliaTotali.GetCellValue(0, 5),
# tsuikaeigo=self.grigliaTotali.GetCellValue(2, 0),
# tsuikasuugaku=self.grigliaTotali.GetCellValue(2, 1),
# tsuikakokugo=self.grigliaTotali.GetCellValue(2, 2),
# tsuikarika=self.grigliaTotali.GetCellValue(2, 3),
# tsuikashakai=self.grigliaTotali.GetCellValue(2, 4),
# tsuikatokubetsu=self.grigliaTotali.GetCellValue(2, 5),
# balanceeigo= self.grigliaLezioniSingole.GetCellValue(33,0),
# balancesuugaku = self.grigliaLezioniSingole.GetCellValue(33,1),
# balancekokugo=self.grigliaLezioniSingole.GetCellValue(33, 2),
# balancerika=self.grigliaLezioniSingole.GetCellValue(33, 3),
# balanceshakai=self.grigliaLezioniSingole.GetCellValue(33, 4),
# balancetokubetu=self.grigliaLezioniSingole.GetCellValue(33, 5)
))
totaleeigo = int(self.grigliaTotali.GetCellValue(0, 0))+int(self.grigliaTotali.GetCellValue(1, 0))+int(self.grigliaTotali.GetCellValue(2, 0))
totalesuugaku = int(self.grigliaTotali.GetCellValue(0, 1)) + int(self.grigliaTotali.GetCellValue(1, 1))+ int(self.grigliaTotali.GetCellValue(2, 1))
totalekokugo = int(self.grigliaTotali.GetCellValue(0, 2))+int(self.grigliaTotali.GetCellValue(1, 2))+int(self.grigliaTotali.GetCellValue(2, 2))
totalerika = int(self.grigliaTotali.GetCellValue(0, 3)) + int(self.grigliaTotali.GetCellValue(1, 3))+ int(self.grigliaTotali.GetCellValue(2,3))
totaleshakai = int(self.grigliaTotali.GetCellValue(0, 4))+int(self.grigliaTotali.GetCellValue(1, 4))+int(self.grigliaTotali.GetCellValue(2,4))
totaletokubetsu = int(self.grigliaTotali.GetCellValue(0, 5)) + int(self.grigliaTotali.GetCellValue(1, 5))+ int(self.grigliaTotali.GetCellValue(2, 5))
self.grigliaTotali.SetCellValue(3, 0,str(totaleeigo))
self.grigliaTotali.SetCellValue(3, 1, str(totalesuugaku))
self.grigliaTotali.SetCellValue(3, 2, str(totalekokugo))
self.grigliaTotali.SetCellValue(3, 3, str(totalerika))
self.grigliaTotali.SetCellValue(3, 4, str(totaleshakai))
self.grigliaTotali.SetCellValue(3, 5, str(totaletokubetsu))
nokorieigo = int(self.grigliaTotali.GetCellValue(3, 0)) - int(self.grigliaLezioniSingole.GetCellValue(31, 0))
nokorisuugaku = int(self.grigliaTotali.GetCellValue(3, 1)) - int(self.grigliaLezioniSingole.GetCellValue(31, 1))
nokorikokugo = int(self.grigliaTotali.GetCellValue(3, 2)) - int(self.grigliaLezioniSingole.GetCellValue(31, 2))
nokoririka = int(self.grigliaTotali.GetCellValue(3, 3)) - int(self.grigliaLezioniSingole.GetCellValue(31, 3))
nokorishakai = int(self.grigliaTotali.GetCellValue(3, 4)) - int(self.grigliaLezioniSingole.GetCellValue(31,4))
nokoritokubetsu = int(self.grigliaTotali.GetCellValue(3, 5)) - int(self.grigliaLezioniSingole.GetCellValue(31, 5))
# self.grigliaLezioniSingole.SetCellValue(32,0,str(nokorieigo))
# self.grigliaLezioniSingole.SetCellValue(32, 1, str(nokorisuugaku))
# self.grigliaLezioniSingole.SetCellValue(32, 2, str(nokorikokugo))
# self.grigliaLezioniSingole.SetCellValue(32, 3, str(nokoririka))
# self.grigliaLezioniSingole.SetCellValue(32, 4, str(nokorishakai))
# self.grigliaLezioniSingole.SetCellValue(32, 5, str(nokoritokubetsu))
def selezionaCellaMateria1( self, event ):
global rigaMaterie1
global colonnaMaterie1
rigaMaterie1 = event.GetRow()
colonnaMaterie1 = event.GetCol()
self.oreMaterie1.SetCellValue(rigaMaterie1, colonnaMaterie1, self.materieGiorni1.StringSelection)
def caricaDate(self, event):
global materieArray
global materieTesto
global contaPrivate
contaPrivate = 0
self.aggiorna.Enabled = True
self.cancella.Enabled = True
self.nuovo.Enabled = True
self.materieGiorni.Clear()
self.materieGiorni1.Clear()
self.grigliaLezioniSingole.ClearGrid()
self.grigliaTotali.ClearGrid()
self.errorCheck.LabelText='-------------------------------------------------------------------------------------------------------------------------------------------------'
self.usciteStudenti.Clear()
self.invio.Enabled= False
self.oreMaterie.ClearGrid()
self.oreMaterie1.ClearGrid()
for i in range(0, 31, 1):
#self.griglia.SetCellBackgroundColour(0, i, wx.GREEN)
self.grigliaLezioniSingole.SetCellBackgroundColour(i, 0,wx.WHITE)
self.grigliaLezioniSingole.SetCellBackgroundColour(i, 1, wx.WHITE)
self.grigliaLezioniSingole.SetCellBackgroundColour(i, 2, wx.WHITE)
self.grigliaLezioniSingole.SetCellBackgroundColour(i, 3, wx.WHITE)
self.grigliaLezioniSingole.SetCellBackgroundColour(i, 4, wx.WHITE)
self.grigliaLezioniSingole.SetCellBackgroundColour(i, 5, wx.WHITE)
for i in range(0, 9, 1):
#self.griglia.SetCellBackgroundColour(0, i, wx.GREEN)
self.oreMaterie1.SetCellBackgroundColour(0, i,wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(1, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(2, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(3, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(4, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(5, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(6, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(7, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(8, i, wx.WHITE)
for i in range(0, 9, 1):
#self.griglia.SetCellBackgroundColour(0, i, wx.GREEN)
self.oreMaterie.SetCellBackgroundColour(0, i,wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(1, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(2, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(3, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(4, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(5, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(6, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(7, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(8, i, wx.WHITE)
popolaGiorni = tabellaGiorni.find(name=self.listaStudenti.StringSelection)
popolaDate = tabellaTempo.find(name=self.listaStudenti.StringSelection)
popolastudenti = tabella.find(name=self.listaStudenti.StringSelection, student='1')
global idSelezionato
global idGiorni
global idCalcoli
materieTesto = []
popolaCalcoli = tabellaCalcoli.find_one(name=self.listaStudenti.StringSelection,
anno=self.calendarioStudenti.Date.Year,
mese=self.calendarioStudenti.Date.Month)
if popolaCalcoli is not None:
self.grigliaTotali.SetCellValue(0, 0, popolaCalcoli.normaleigo)
self.grigliaTotali.SetCellValue(0, 1, popolaCalcoli.normalsuugaku)
self.grigliaTotali.SetCellValue(0, 2, popolaCalcoli.normalkokugo)
self.grigliaTotali.SetCellValue(0, 3, popolaCalcoli.normalrika)
self.grigliaTotali.SetCellValue(0, 4, popolaCalcoli.normalshakai)
self.grigliaTotali.SetCellValue(0, 5, popolaCalcoli.normaltokubetsu)
# self.grigliaTotali.SetCellValue(2, 0, popolaCalcoli.tsuikaeigo)
# self.grigliaTotali.SetCellValue(2, 1, popolaCalcoli.tsuikasuugaku)
# self.grigliaTotali.SetCellValue(2, 2, popolaCalcoli.tsuikakokugo)
# self.grigliaTotali.SetCellValue(2, 3, popolaCalcoli.tsuikarika)
# self.grigliaTotali.SetCellValue(2, 4, popolaCalcoli.tsuikashakai)
# self.grigliaTotali.SetCellValue(2, 5, popolaCalcoli.tsuikatokubetsu)
if popolaCalcoli is None:
self.grigliaTotali.SetCellValue(0, 0, '0')
self.grigliaTotali.SetCellValue(0, 1, '0')
self.grigliaTotali.SetCellValue(0, 2, '0')
self.grigliaTotali.SetCellValue(0, 3, '0')
self.grigliaTotali.SetCellValue(0, 4, '0')
self.grigliaTotali.SetCellValue(0, 5, '0')
# self.grigliaTotali.SetCellValue(2, 0, '0')
# self.grigliaTotali.SetCellValue(2, 1, '0')
# self.grigliaTotali.SetCellValue(2, 2, '0')
# self.grigliaTotali.SetCellValue(2, 3, '0')
# self.grigliaTotali.SetCellValue(2, 4, '0')
# self.grigliaTotali.SetCellValue(2, 5, '0')
popolaCalcoliMesePassato = tabellaCalcoli.find_one(name=self.listaStudenti.StringSelection,
anno=self.calendarioStudenti.Date.Year,
mese=self.calendarioStudenti.Date.Month - 1)
if popolaCalcoliMesePassato is not None and popolaCalcoliMesePassato.balanceeigo is not None:
self.grigliaTotali.SetCellValue(1, 0, popolaCalcoliMesePassato.balanceeigo)
self.grigliaTotali.SetCellValue(1, 1, popolaCalcoliMesePassato.balancesuugaku)
self.grigliaTotali.SetCellValue(1, 2, popolaCalcoliMesePassato.balancekokugo)
self.grigliaTotali.SetCellValue(1, 3, popolaCalcoliMesePassato.balancerika)
self.grigliaTotali.SetCellValue(1, 4, popolaCalcoliMesePassato.balanceshakai)
self.grigliaTotali.SetCellValue(1, 5, popolaCalcoliMesePassato.balancetokubetu)
if popolaCalcoliMesePassato is not None and popolaCalcoliMesePassato.balanceeigo is None:
self.grigliaTotali.SetCellValue(1, 0, '0')
self.grigliaTotali.SetCellValue(1, 1, '0')
self.grigliaTotali.SetCellValue(1, 2, '0')
self.grigliaTotali.SetCellValue(1, 3, '0')
self.grigliaTotali.SetCellValue(1, 4, '0')
self.grigliaTotali.SetCellValue(1, 5, '0')
if popolaCalcoliMesePassato is None:
self.grigliaTotali.SetCellValue(1, 0, '0')
self.grigliaTotali.SetCellValue(1, 1, '0')
self.grigliaTotali.SetCellValue(1, 2, '0')
self.grigliaTotali.SetCellValue(1, 3, '0')
self.grigliaTotali.SetCellValue(1, 4, '0')
self.grigliaTotali.SetCellValue(1, 5, '0')
#self.grigliaTotali.SetCellValue(0,0,i.)
for i in range(0, 9, 1):
#self.griglia.SetCellBackgroundColour(0, i, wx.GREEN)
self.oreMaterie.SetCellBackgroundColour(0, i,wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(1, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(2, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(3, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(4, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(5, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(6, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(7, i, wx.WHITE)
self.oreMaterie.SetCellBackgroundColour(8, i, wx.WHITE)
for i in range(0, 9, 1):
#self.griglia.SetCellBackgroundColour(0, i, wx.GREEN)
self.oreMaterie1.SetCellBackgroundColour(0, i,wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(1, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(2, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(3, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(4, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(5, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(6, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(7, i, wx.WHITE)
self.oreMaterie1.SetCellBackgroundColour(8, i, wx.WHITE)
for i in popolastudenti:
idSelezionato = i.id
self.furigana.Value = i.furigana
self.joseiBox.Value =i.femmina
self.danseiBox.Value = i.maschio
self.sonota.Value = i.sonota
self.casellaScuola.LabelText = i.scuola
self.casellaNome.LabelText = i.name
self.casellaEmail.LabelText = i.email
self.mailGenitori.LabelText = i.parentMail
self.casellaTelefono.LabelText = i.telephone
self.eigo.Value = i.eigo
self.rika.Value = i.rika
self.shakai.Value = i.shakai
self.suugaku.Value = i.suugaku
self.tokubetsu.Value = i.tokubetsu
self.kokugo.Value = i.kokugo
self.eigo1.Value = i.eigo1
self.rika1.Value = i.rika1
self.shakai1.Value = i.shakai1
self.suugaku1.Value = i.suugaku1
self.tokubetsu1.Value = i.tokubetsu1
self.kokugo1.Value = i.kokugo1
self.individual.Value = i.individual
self.shared.Value = i.shared
materieArray=[i.eigo,i.rika,i.shakai,i.suugaku,i.tokubetsu,i.kokugo]
materieArray1 = [i.eigo, i.rika, i.shakai, i.suugaku, i.tokubetsu, i.kokugo]
if self.eigo.Value == True:
self.materieGiorni.Append(u'英語')
if self.rika.Value == True:
self.materieGiorni.Append(u'理科')
if self.shakai.Value == True:
self.materieGiorni.Append(u'社会')
if self.suugaku.Value == True:
self.materieGiorni.Append(u'数学')
if self.tokubetsu.Value == True:
self.materieGiorni.Append(u'特別')
if self.kokugo.Value == True:
self.materieGiorni.Append(u'国語')
if self.eigo1.Value == True:
self.materieGiorni1.Append(u'英語')
if self.rika1.Value == True:
self.materieGiorni1.Append(u'理科')
if self.shakai1.Value == True:
self.materieGiorni1.Append(u'社会')
if self.suugaku1.Value == True:
self.materieGiorni1.Append(u'数学')
if self.tokubetsu1.Value == True:
self.materieGiorni1.Append(u'特別')
if self.kokugo1.Value == True:
self.materieGiorni1.Append(u'国語')
if i.cardID == '' or i.cardID == u"カード未登録です、登録してください" :
self.cardid.LabelText = u"カード未登録です、登録してください"
self.cardcancel.Enabled = False
self.CardRegistration.Enabled= True
else:
self.cardid.LabelText = i.cardID
self.cardcancel.Enabled = True
self.CardRegistration.Enabled= False
arrayore = [i.primaOra, i.secondaOra, i.terzaOra, i.quartaOra, i.quintaOra, i.sestaOra, i.settimaOra,
i.ottavaOra, i.nonaOra]
arrayore1 = [i.primaOra1, i.secondaOra1, i.terzaOra1, i.quartaOra1, i.quintaOra1, i.sestaOra1, i.settimaOra1,
i.ottavaOra1, i.nonaOra1]
#print arrayore
for settaOre in range(0, 9, 1):
self.tabellaOre.SetSelection(settaOre, select=arrayore[settaOre])
for settaOre in range(0, 9, 1):
self.tabellaOre1.SetSelection(settaOre, select=arrayore1[settaOre])
for i in popolaDate:
if len((str(i.uscite)))>=5:
self.usciteStudenti.Append(str(i.uscite))
for i in popolaGiorni:
idGiorni = i.id
self.lunedi.Value = i.lunedi
self.martedi.Value = i.martedi
self.mercoledi.Value = i.mercoledi
self.giovedi.Value = i.giovedi
self.venerdi.Value = i.venerdi
self.sabato.Value = i.sabato
self.domenica.Value = i.domenica
self.lunedi1.Value = i.lunedi1
self.martedi1.Value = i.martedi1
self.mercoledi1.Value = i.mercoledi1
self.giovedi1.Value = i.giovedi1
self.venerdi1.Value = i.venerdi1
self.sabato1.Value = i.sabato1
self.domenica1.Value = i.domenica1
datavecchia = str(self.calendarioStudenti.Date)
nomeFile = datavecchia
lezioniPrivate = 0
nomeFile = nomeFile.replace('/', '-')
nomeFile = nomeFile.replace(' 00:00:00', '')
anno = '20' + nomeFile[-2:]
percorso = './csv/' + anno + '/' + nomeFile[:2] + '/'
listashift = []
listashift1 = []
if self.lunedi.Value == True:
listashift.append(0)
if self.martedi.Value == True:
listashift.append(1)
if self.mercoledi.Value == True:
listashift.append(2)
if self.giovedi.Value == True:
listashift.append(3)
if self.venerdi.Value == True:
listashift.append(4)
if self.sabato.Value == True:
listashift.append(5)
if self.domenica.Value == True:
listashift.append(6)
if self.lunedi1.Value == True:
listashift1.append(0)
if self.martedi1.Value == True:
listashift1.append(1)
if self.mercoledi1.Value == True:
listashift1.append(2)
if self.giovedi1.Value == True:
listashift1.append(3)
if self.venerdi1.Value == True:
listashift1.append(4)
if self.sabato1.Value == True:
listashift1.append(5)
if self.domenica1.Value == True:
listashift1.append(6)
self.grigliaLezioniSingole.SetColFormatNumber(0)
self.grigliaLezioniSingole.SetColFormatNumber(1)
self.grigliaLezioniSingole.SetColFormatNumber(2)
self.grigliaLezioniSingole.SetColFormatNumber(3)
self.grigliaLezioniSingole.SetColFormatNumber(4)
self.grigliaLezioniSingole.SetColFormatNumber(5)
contagiri = 0
contagiri1 = 0
lunghezzaShift = len(listashift)
lunghezzaShift1 = len(listashift1)
for i in range(0,9,1):
if lunghezzaShift >=1:
for giorni in listashift:
if arrayore[contagiri] == True:
self.oreMaterie.SetCellBackgroundColour(contagiri, giorni, wx.GREEN)
contagiri = contagiri+1
self.oreMaterie.Refresh()
# percorsoStudentimemo = './StudentsData/' + self.casellaNome.Value + self.casellaTelefono.Value + 'memo.txt'
# controllaPercorso = os.path.exists(percorsoStudentimemo)
#
# if controllaPercorso == True:
# with open(percorsoStudentimemo, 'rb') as f:
# self.memo.Value = f
percorsoStudenti = './StudentsData/'+self.casellaNome.Value+self.casellaTelefono.Value+'.txt'
controllaPercorso = os.path.exists(percorsoStudenti)
if controllaPercorso == True:
with open(percorsoStudenti, 'rb') as f:
reader = csv.DictReader(f)
contarighe = 0
converti = csvkit.unicsv.UnicodeCSVDictReader(f=f, encoding='utf-8')
for i in converti:
self.oreMaterie.SetCellValue(contarighe, 0, i[u'月曜日'])
self.oreMaterie.SetCellValue(contarighe, 1, i[u'火曜日'])
self.oreMaterie.SetCellValue(contarighe, 2, i[u'水曜日'])
self.oreMaterie.SetCellValue(contarighe, 3, i[u'木曜日'])
self.oreMaterie.SetCellValue(contarighe, 4, i[u'金曜日'])
self.oreMaterie.SetCellValue(contarighe, 5, i[u'土曜日'])
self.oreMaterie.SetCellValue(contarighe, 6, i[u'日曜日'])
contarighe = contarighe + 1
for i in range(0,9,1):
if lunghezzaShift1 >=1:
for giorni in listashift1:
if arrayore1[contagiri1] == True:
self.oreMaterie1.SetCellBackgroundColour(contagiri1, giorni, wx.RED)
contagiri1 = contagiri1+1
self.oreMaterie1.Refresh()
percorsoStudenti1 = './StudentsData/'+self.casellaNome.Value+self.casellaTelefono.Value+'tokubetsu.txt'
controllaPercorso1 = os.path.exists(percorsoStudenti1)
if controllaPercorso1 == True:
with open(percorsoStudenti1, 'rb') as f1:
reader1 = csv.DictReader(f1)
contarighe1 = 0
converti1 = csvkit.unicsv.UnicodeCSVDictReader(f=f1, encoding='utf-8')
for i in converti1:
self.oreMaterie1.SetCellValue(contarighe1, 0, i[u'月曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 1, i[u'火曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 2, i[u'水曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 3, i[u'木曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 4, i[u'金曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 5, i[u'土曜日'])
self.oreMaterie1.SetCellValue(contarighe1, 6, i[u'日曜日'])
contarighe1 = contarighe1 + 1
files = os.listdir(percorso)
files_txt = [i for i in files if i.endswith('.csv')]
#print files_txt
for files in files_txt:
self.riempiTabella(percorso,files)
totaleeigo = int(self.grigliaTotali.GetCellValue(0, 0)) + int(self.grigliaTotali.GetCellValue(1, 0)) + int(
self.grigliaTotali.GetCellValue(2, 0))
totalesuugaku = int(self.grigliaTotali.GetCellValue(0, 1)) + int(self.grigliaTotali.GetCellValue(1, 1)) + int(
self.grigliaTotali.GetCellValue(2, 1))
totalekokugo = int(self.grigliaTotali.GetCellValue(0, 2)) + int(self.grigliaTotali.GetCellValue(1, 2)) + int(
self.grigliaTotali.GetCellValue(2, 2))
totalerika = int(self.grigliaTotali.GetCellValue(0, 3)) + int(self.grigliaTotali.GetCellValue(1, 3)) + int(
self.grigliaTotali.GetCellValue(2, 3))
totaleshakai = int(self.grigliaTotali.GetCellValue(0, 4)) + int(self.grigliaTotali.GetCellValue(1, 4)) + int(
self.grigliaTotali.GetCellValue(2, 4))
totaletokubetsu = int(self.grigliaTotali.GetCellValue(0, 5)) + int(self.grigliaTotali.GetCellValue(1, 5)) + int(
self.grigliaTotali.GetCellValue(2, 5))
self.grigliaTotali.SetCellValue(3, 0, str(totaleeigo))
self.grigliaTotali.SetCellValue(3, 1, str(totalesuugaku))
self.grigliaTotali.SetCellValue(3, 2, str(totalekokugo))
self.grigliaTotali.SetCellValue(3, 3, str(totalerika))
self.grigliaTotali.SetCellValue(3, 4, str(totaleshakai))
self.grigliaTotali.SetCellValue(3, 5, str(totaletokubetsu))
if totaleeigo == int(self.grigliaLezioniSingole.GetCellValue(34,0)):
self.grigliaTotali.SetCellBackgroundColour(3,0,wx.GREEN)
elif totaleeigo < int(self.grigliaLezioniSingole.GetCellValue(34,0)):
self.grigliaTotali.SetCellBackgroundColour(3, 0, wx.RED)
elif totaleeigo > int(self.grigliaLezioniSingole.GetCellValue(34,0)):
self.grigliaTotali.SetCellBackgroundColour(3, 0, wx.YELLOW)
if totalesuugaku == int(self.grigliaLezioniSingole.GetCellValue(34,1)):
self.grigliaTotali.SetCellBackgroundColour(3,1,wx.GREEN)
elif totalesuugaku < int(self.grigliaLezioniSingole.GetCellValue(34,1)):
self.grigliaTotali.SetCellBackgroundColour(3, 1, wx.RED)
elif totalesuugaku > int(self.grigliaLezioniSingole.GetCellValue(34,1)):
self.grigliaTotali.SetCellBackgroundColour(3, 1,wx.YELLOW)
if totalekokugo == int(self.grigliaLezioniSingole.GetCellValue(34, 2)):
self.grigliaTotali.SetCellBackgroundColour(3, 2, wx.GREEN)
elif totalekokugo < int(self.grigliaLezioniSingole.GetCellValue(34, 2)):
self.grigliaTotali.SetCellBackgroundColour(3, 2, wx.RED)
elif totalekokugo > int(self.grigliaLezioniSingole.GetCellValue(34, 2)):
self.grigliaTotali.SetCellBackgroundColour(3, 2, wx.YELLOW)
if totalerika == int(self.grigliaLezioniSingole.GetCellValue(34, 3)):
self.grigliaTotali.SetCellBackgroundColour(3, 3, wx.GREEN)
elif totalerika < int(self.grigliaLezioniSingole.GetCellValue(34, 3)):
self.grigliaTotali.SetCellBackgroundColour(3, 3, wx.RED)
elif totalerika > int(self.grigliaLezioniSingole.GetCellValue(34, 3)):
self.grigliaTotali.SetCellBackgroundColour(3, 3, wx.YELLOW)
if totaleshakai == int(self.grigliaLezioniSingole.GetCellValue(34, 4)):
self.grigliaTotali.SetCellBackgroundColour(3, 4, wx.GREEN)
elif totaleshakai < int(self.grigliaLezioniSingole.GetCellValue(34, 4)):
self.grigliaTotali.SetCellBackgroundColour(3, 4, wx.RED)
elif totaleshakai > int(self.grigliaLezioniSingole.GetCellValue(34, 4)):
self.grigliaTotali.SetCellBackgroundColour(3, 4, wx.YELLOW)
if totaletokubetsu == int(self.grigliaLezioniSingole.GetCellValue(34, 5)):
self.grigliaTotali.SetCellBackgroundColour(3, 5, wx.GREEN)
elif totaletokubetsu < int(self.grigliaLezioniSingole.GetCellValue(34, 5)):
self.grigliaTotali.SetCellBackgroundColour(3, 5, wx.RED)
elif totaletokubetsu > int(self.grigliaLezioniSingole.GetCellValue(34, 5)):
self.grigliaTotali.SetCellBackgroundColour(3, 5, wx.YELLOW)
# nokorieigo = int(self.grigliaTotali.GetCellValue(3, 0)) - int(self.grigliaLezioniSingole.GetCellValue(31, 0))
# nokorisuugaku = int(self.grigliaTotali.GetCellValue(3, 1)) - int(self.grigliaLezioniSingole.GetCellValue(31, 1))
# nokorikokugo = int(self.grigliaTotali.GetCellValue(3, 2)) - int(self.grigliaLezioniSingole.GetCellValue(31, 2))
# nokoririka = int(self.grigliaTotali.GetCellValue(3, 3)) - int(self.grigliaLezioniSingole.GetCellValue(31, 3))
# nokorishakai = int(self.grigliaTotali.GetCellValue(3, 4)) - int(self.grigliaLezioniSingole.GetCellValue(31, 4))
nokorieigo = int(self.grigliaLezioniSingole.GetCellValue(31, 0))-int(self.grigliaTotali.GetCellValue(2, 0))
nokorisuugaku = int(self.grigliaLezioniSingole.GetCellValue(31, 1))-int(self.grigliaTotali.GetCellValue(2, 1))
nokorikokugo = int(self.grigliaLezioniSingole.GetCellValue(31, 2))-int(self.grigliaTotali.GetCellValue(2, 2))
nokoririka = int(self.grigliaLezioniSingole.GetCellValue(31, 3))-int(self.grigliaTotali.GetCellValue(2, 3))
nokorishakai = int(self.grigliaLezioniSingole.GetCellValue(31, 4))-int(self.grigliaTotali.GetCellValue(2, 4))
nokoritokubetsu = int(self.grigliaLezioniSingole.GetCellValue(31, 5))-int(self.grigliaTotali.GetCellValue(2, 5))
# if nokorieigo < 0:
# self.grigliaTotali.SetCellValue(2,0,str(nokorieigo))
# if nokorisuugaku < 0:
# self.grigliaTotali.SetCellValue(2,1,str(nokorisuugaku))
# if nokorikokugo < 0:
# self.grigliaTotali.SetCellValue(2,2,str(nokorikokugo))
# if nokoririka < 0:
# self.grigliaTotali.SetCellValue(2,3,str(nokoririka))
# if nokorishakai < 0:
# self.grigliaTotali.SetCellValue(2,4,str(nokorishakai))
# if nokoritokubetsu < 0:
# self.grigliaTotali.SetCellValue(2,5,str(nokoritokubetsu))
#if nokorieigo < 0:
# self.grigliaTotali.SetCellValue(2, 0, str(nokorieigo))
# #if nokorisuugaku < 0:
# self.grigliaTotali.SetCellValue(2, 1, str(nokorisuugaku))
# #if nokorikokugo < 0:
# self.grigliaTotali.SetCellValue(2, 2, str(nokorikokugo))
# #if nokoririka < 0:
# self.grigliaTotali.SetCellValue(2, 3, str(nokoririka))
# #if nokorishakai < 0:
# self.grigliaTotali.SetCellValue(2, 4, str(nokorishakai))
# #if nokoritokubetsu < 0:
# self.grigliaTotali.SetCellValue(2, 5, str(nokoritokubetsu))
# self.grigliaLezioniSingole.SetCellValue(32, 0, str(nokorieigo))
# self.grigliaLezioniSingole.SetCellValue(32, 1, str(nokorisuugaku))
# self.grigliaLezioniSingole.SetCellValue(32, 2, str(nokorikokugo))
# self.grigliaLezioniSingole.SetCellValue(32, 3, str(nokoririka))
# self.grigliaLezioniSingole.SetCellValue(32, 4, str(nokorishakai))
# self.grigliaLezioniSingole.SetCellValue(32, 5, str(nokoritokubetsu))
#for i in listashift:
# print i
self.invio.Enabled = False
def riempiTabella(self,percorso,files):
global contaPrivate
with open(percorso + files, 'rb') as f:
sommainglese = 0
sommakokugo = 0
sommashakai = 0
sommarika = 0
sommatokubetsu = 0
sommasuugaku = 0
reader = csv.DictReader(f)
contarighe = 0
converti = csvkit.unicsv.UnicodeCSVDictReader(f=f, encoding='utf-8')
# print converti, converti
kokugotemp = 0
suugakutemp = 0
eigotemp = 0
rikatemp = 0
shakaitemp = 0
tokubetsutemp = 0
kokugotemp1 = 0
suugakutemp1 = 0
eigotemp1 = 0
rikatemp1 = 0
shakaitemp1 = 0
tokubetsutemp1 = 0
dataComposta = funzioni.trasformaNomefileInOra(f.name)
controlloCheckIn = funzioni.controlloCheckIn(self.listaStudenti.StringSelection, tabellaTempo, dataComposta)
for i in converti:
if controlloCheckIn == 'NON':
if self.listaStudenti.StringSelection in i['9:10 - 10:20']:
aggiunngimateria = funzioni.contalezioni(i['9:10 - 10:20'])
if aggiunngimateria == 1:
kokugotemp1 = kokugotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2,
str(kokugotemp1))
if aggiunngimateria == 2:
eigotemp1 = eigotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0,
str(eigotemp1))
if aggiunngimateria == 3:
suugakutemp1 = suugakutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1,
str(suugakutemp1))
if aggiunngimateria == 4:
rikatemp1 = rikatemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3,
str(rikatemp1))
if aggiunngimateria == 5:
shakaitemp1 = shakaitemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4,
str(shakaitemp1))
if aggiunngimateria == 6:
tokubetsutemp1 = tokubetsutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5,
str(tokubetsutemp1))
if self.listaStudenti.StringSelection in i['10:30 - 11:40']:
aggiunngimateria = funzioni.contalezioni(i['10:30 - 11:40'])
if aggiunngimateria == 1:
kokugotemp1 = kokugotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2,
str(kokugotemp1))
if aggiunngimateria == 2:
eigotemp1 = eigotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0,
str(eigotemp1))
if aggiunngimateria == 3:
suugakutemp1 = suugakutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1,
str(suugakutemp1))
if aggiunngimateria == 4:
rikatemp1 = rikatemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3,
str(rikatemp1))
if aggiunngimateria == 5:
shakaitemp1 = shakaitemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4,
str(shakaitemp1))
if aggiunngimateria == 6:
tokubetsutemp1 = tokubetsutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5,
str(tokubetsutemp1))
if self.listaStudenti.StringSelection in i['11:50 - 13:00']:
aggiunngimateria = funzioni.contalezioni(i['11:50 - 13:00'])
if aggiunngimateria == 1:
kokugotemp1 = kokugotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2,
str(kokugotemp1))
if aggiunngimateria == 2:
eigotemp1 = eigotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0,
str(eigotemp1))
if aggiunngimateria == 3:
suugakutemp1 = suugakutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1,
str(suugakutemp1))
if aggiunngimateria == 4:
rikatemp1 = rikatemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3,
str(rikatemp1))
if aggiunngimateria == 5:
shakaitemp1 = shakaitemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4,
str(shakaitemp1))
if aggiunngimateria == 6:
tokubetsutemp1 = tokubetsutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5,
str(tokubetsutemp1))
if self.listaStudenti.StringSelection in i['13:40 - 14:50']:
aggiunngimateria = funzioni.contalezioni(i['13:40 - 14:50'])
if aggiunngimateria == 1:
kokugotemp1 = kokugotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2,
str(kokugotemp1))
if aggiunngimateria == 2:
eigotemp1 = eigotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0,
str(eigotemp1))
if aggiunngimateria == 3:
suugakutemp1 = suugakutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1,
str(suugakutemp1))
if aggiunngimateria == 4:
rikatemp1 = rikatemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3,
str(rikatemp1))
if aggiunngimateria == 5:
shakaitemp1 = shakaitemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4,
str(shakaitemp1))
if aggiunngimateria == 6:
tokubetsutemp1 = tokubetsutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5,
str(tokubetsutemp1))
if self.listaStudenti.StringSelection in i['15:00 - 16:10']:
aggiunngimateria = funzioni.contalezioni(i['15:00 - 16:10'])
if aggiunngimateria == 1:
kokugotemp1 = kokugotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2,
str(kokugotemp1))
if aggiunngimateria == 2:
eigotemp1 = eigotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0,
str(eigotemp1))
if aggiunngimateria == 3:
suugakutemp1 = suugakutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1,
str(suugakutemp1))
if aggiunngimateria == 4:
rikatemp1 = rikatemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3,
str(rikatemp1))
if aggiunngimateria == 5:
shakaitemp1 = shakaitemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4,
str(shakaitemp1))
if aggiunngimateria == 6:
tokubetsutemp1 = tokubetsutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5,
str(tokubetsutemp1))
if self.listaStudenti.StringSelection in i['16:40 - 17:50']:
aggiunngimateria = funzioni.contalezioni(i['16:40 - 17:50'])
if aggiunngimateria == 1:
kokugotemp1 = kokugotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2,
str(kokugotemp1))
if aggiunngimateria == 2:
eigotemp1 = eigotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0,
str(eigotemp1))
if aggiunngimateria == 3:
suugakutemp1 = suugakutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1,
str(suugakutemp1))
if aggiunngimateria == 4:
rikatemp1 = rikatemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3,
str(rikatemp1))
if aggiunngimateria == 5:
shakaitemp1 = shakaitemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4,
str(shakaitemp1))
if aggiunngimateria == 6:
tokubetsutemp1 = tokubetsutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5,
str(tokubetsutemp1))
if self.listaStudenti.StringSelection in i['18:00 - 19:10']:
aggiunngimateria = funzioni.contalezioni(i['18:00 - 19:10'])
if aggiunngimateria == 1:
kokugotemp1 = kokugotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2,
str(kokugotemp1))
if aggiunngimateria == 2:
eigotemp1 = eigotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0,
str(eigotemp1))
if aggiunngimateria == 3:
suugakutemp1 = suugakutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1,
str(suugakutemp1))
if aggiunngimateria == 4:
rikatemp1 = rikatemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3,
str(rikatemp1))
if aggiunngimateria == 5:
shakaitemp1 = shakaitemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4,
str(shakaitemp1))
if aggiunngimateria == 6:
tokubetsutemp1 = tokubetsutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5,
str(tokubetsutemp1))
if self.listaStudenti.StringSelection in i['19:20 - 20:30']:
aggiunngimateria = funzioni.contalezioni(i['19:20 - 20:30'])
if aggiunngimateria == 1:
kokugotemp1 = kokugotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2,
str(kokugotemp1))
if aggiunngimateria == 2:
eigotemp1 = eigotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0,
str(eigotemp1))
if aggiunngimateria == 3:
suugakutemp1= suugakutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1,
str(suugakutemp1))
if aggiunngimateria == 4:
rikatemp1 = rikatemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3,
str(rikatemp1))
if aggiunngimateria == 5:
shakaitemp1 = shakaitemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4,
str(shakaitemp1))
if aggiunngimateria == 6:
tokubetsutemp1 = tokubetsutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5,
str(tokubetsutemp1))
if self.listaStudenti.StringSelection in i['20:40 - 21:50']:
aggiunngimateria = funzioni.contalezioni(i['20:40 - 21:50'])
if aggiunngimateria == 1:
kokugotemp1 = kokugotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2,
str(kokugotemp1))
if aggiunngimateria == 2:
eigotemp1 = eigotemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0,
str(eigotemp1))
if aggiunngimateria == 3:
suugakutemp1 = suugakutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1,
str(suugakutemp1))
if aggiunngimateria == 4:
rikatemp1 = rikatemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3,
str(rikatemp1))
if aggiunngimateria == 5:
shakaitemp1 = shakaitemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4,
str(shakaitemp1))
if aggiunngimateria == 6:
tokubetsutemp1 = tokubetsutemp1 + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5,
str(tokubetsutemp1))
# print lezioniPrivate, 'lezioni private'
if controlloCheckIn == 'OUT' or controlloCheckIn == 'IN':
if self.listaStudenti.StringSelection in i['9:10 - 10:20']:
aggiunngimateria = funzioni.contalezioni(i['9:10 - 10:20'])
if aggiunngimateria == 1:
kokugotemp = kokugotemp +1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2, str(kokugotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 2, wx.GREEN)
if aggiunngimateria == 2:
eigotemp = eigotemp+1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0, str(eigotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 0, wx.GREEN)
if aggiunngimateria == 3:
suugakutemp = suugakutemp +1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1, str(suugakutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 1, wx.GREEN)
if aggiunngimateria == 4:
rikatemp = rikatemp+1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3, str(rikatemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 3, wx.GREEN)
if aggiunngimateria == 5:
shakaitemp = shakaitemp+1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4, str(shakaitemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 4, wx.GREEN)
if aggiunngimateria == 6:
tokubetsutemp = tokubetsutemp+1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5, str(tokubetsutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 5, wx.GREEN)
if self.listaStudenti.StringSelection in i['10:30 - 11:40']:
aggiunngimateria = funzioni.contalezioni(i['10:30 - 11:40'])
if aggiunngimateria == 1:
kokugotemp = kokugotemp +1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2, str(kokugotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 2, wx.GREEN)
if aggiunngimateria == 2:
eigotemp = eigotemp+1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0, str(eigotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 0, wx.GREEN)
if aggiunngimateria == 3:
suugakutemp = suugakutemp +1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1, str(suugakutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 1, wx.GREEN)
if aggiunngimateria == 4:
rikatemp = rikatemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3, str(rikatemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 3, wx.GREEN)
if aggiunngimateria == 5:
shakaitemp = shakaitemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4, str(shakaitemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 4, wx.GREEN)
if aggiunngimateria == 6:
tokubetsutemp = tokubetsutemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5, str(tokubetsutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 5, wx.GREEN)
if self.listaStudenti.StringSelection in i['11:50 - 13:00']:
aggiunngimateria = funzioni.contalezioni(i['11:50 - 13:00'])
if aggiunngimateria == 1:
kokugotemp = kokugotemp +1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2, str(kokugotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 2, wx.GREEN)
if aggiunngimateria == 2:
eigotemp = eigotemp+1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0, str(eigotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 0, wx.GREEN)
if aggiunngimateria == 3:
suugakutemp = suugakutemp +1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1, str(suugakutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 1, wx.GREEN)
if aggiunngimateria == 4:
rikatemp = rikatemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3, str(rikatemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 3, wx.GREEN)
if aggiunngimateria == 5:
shakaitemp = shakaitemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4, str(shakaitemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 4, wx.GREEN)
if aggiunngimateria == 6:
tokubetsutemp = tokubetsutemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5, str(tokubetsutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 5, wx.GREEN)
if self.listaStudenti.StringSelection in i['13:40 - 14:50']:
aggiunngimateria = funzioni.contalezioni(i['13:40 - 14:50'])
if aggiunngimateria == 1:
kokugotemp = kokugotemp +1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2, str(kokugotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 2, wx.GREEN)
if aggiunngimateria == 2:
eigotemp = eigotemp+1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0, str(eigotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 0, wx.GREEN)
if aggiunngimateria == 3:
suugakutemp = suugakutemp +1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1, str(suugakutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 1, wx.GREEN)
if aggiunngimateria == 4:
rikatemp = rikatemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3, str(rikatemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 3, wx.GREEN)
if aggiunngimateria == 5:
shakaitemp = shakaitemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4, str(shakaitemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 4, wx.GREEN)
if aggiunngimateria == 6:
tokubetsutemp = tokubetsutemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5, str(tokubetsutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 5, wx.GREEN)
if self.listaStudenti.StringSelection in i['15:00 - 16:10']:
aggiunngimateria = funzioni.contalezioni(i['15:00 - 16:10'])
if aggiunngimateria == 1:
kokugotemp = kokugotemp +1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2, str(kokugotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 2, wx.GREEN)
if aggiunngimateria == 2:
eigotemp = eigotemp+1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0, str(eigotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 0, wx.GREEN)
if aggiunngimateria == 3:
suugakutemp = suugakutemp +1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1, str(suugakutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 1, wx.GREEN)
if aggiunngimateria == 4:
rikatemp = rikatemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3, str(rikatemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 3, wx.GREEN)
if aggiunngimateria == 5:
shakaitemp = shakaitemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4, str(shakaitemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 4, wx.GREEN)
if aggiunngimateria == 6:
tokubetsutemp = tokubetsutemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5, str(tokubetsutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 5, wx.GREEN)
if self.listaStudenti.StringSelection in i['16:40 - 17:50']:
aggiunngimateria = funzioni.contalezioni(i['16:40 - 17:50'])
if aggiunngimateria == 1:
kokugotemp = kokugotemp +1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2, str(kokugotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 2, wx.GREEN)
if aggiunngimateria == 2:
eigotemp = eigotemp+1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0, str(eigotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 0, wx.GREEN)
if aggiunngimateria == 3:
suugakutemp = suugakutemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1, str(suugakutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 1, wx.GREEN)
if aggiunngimateria == 4:
rikatemp = rikatemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3, str(rikatemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 3, wx.GREEN)
if aggiunngimateria == 5:
shakaitemp = shakaitemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4, str(shakaitemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 4, wx.GREEN)
if aggiunngimateria == 6:
tokubetsutemp = tokubetsutemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5, str(tokubetsutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 5, wx.GREEN)
if self.listaStudenti.StringSelection in i['18:00 - 19:10']:
aggiunngimateria = funzioni.contalezioni(i['18:00 - 19:10'])
if aggiunngimateria == 1:
kokugotemp = kokugotemp +1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2, str(kokugotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 2, wx.GREEN)
if aggiunngimateria == 2:
eigotemp = eigotemp+1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0, str(eigotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 0, wx.GREEN)
if aggiunngimateria == 3:
suugakutemp = suugakutemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1, str(suugakutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 1, wx.GREEN)
if aggiunngimateria == 4:
rikatemp = rikatemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3, str(rikatemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 3, wx.GREEN)
if aggiunngimateria == 5:
shakaitemp = shakaitemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4, str(shakaitemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 4, wx.GREEN)
if aggiunngimateria == 6:
tokubetsutemp = tokubetsutemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5, str(tokubetsutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 5, wx.GREEN)
if self.listaStudenti.StringSelection in i['19:20 - 20:30']:
aggiunngimateria = funzioni.contalezioni(i['19:20 - 20:30'])
if aggiunngimateria == 1:
kokugotemp = kokugotemp +1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2, str(kokugotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 2, wx.GREEN)
if aggiunngimateria == 2:
eigotemp = eigotemp+1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0, str(eigotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 0, wx.GREEN)
if aggiunngimateria == 3:
suugakutemp = suugakutemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1, str(suugakutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 1, wx.GREEN)
if aggiunngimateria == 4:
rikatemp = rikatemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3, str(rikatemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 3, wx.GREEN)
if aggiunngimateria == 5:
shakaitemp = shakaitemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4, str(shakaitemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 4, wx.GREEN)
if aggiunngimateria == 6:
tokubetsutemp = tokubetsutemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5, str(tokubetsutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 5, wx.GREEN)
if self.listaStudenti.StringSelection in i['20:40 - 21:50']:
aggiunngimateria = funzioni.contalezioni(i['20:40 - 21:50'])
if aggiunngimateria == 1:
kokugotemp = kokugotemp +1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 2, str(kokugotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 2, wx.GREEN)
if aggiunngimateria == 2:
eigotemp = eigotemp+1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 0, str(eigotemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 0, wx.GREEN)
if aggiunngimateria == 3:
suugakutemp = suugakutemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 1, str(suugakutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 1, wx.GREEN)
if aggiunngimateria == 4:
rikatemp = rikatemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 3, str(rikatemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 3, wx.GREEN)
if aggiunngimateria == 5:
shakaitemp = shakaitemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 4, str(shakaitemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 4, wx.GREEN)
if aggiunngimateria == 6:
tokubetsutemp = tokubetsutemp + 1
self.grigliaLezioniSingole.SetCellValue(int(files[3:5]) - 1, 5, str(tokubetsutemp))
self.grigliaLezioniSingole.SetCellBackgroundColour(int(files[3:5]) - 1, 5, wx.GREEN)
if u'K '+self.listaStudenti.StringSelection in i['9:10 - 10:20']:
contaPrivate= contaPrivate+1
if u'K '+self.listaStudenti.StringSelection in i['10:30 - 11:40']:
contaPrivate= contaPrivate+1
if u'K '+self.listaStudenti.StringSelection in i['11:50 - 13:00']:
contaPrivate= contaPrivate+1
if u'K '+self.listaStudenti.StringSelection in i['13:40 - 14:50']:
contaPrivate= contaPrivate+1
if u'K '+self.listaStudenti.StringSelection in i['15:00 - 16:10']:
contaPrivate= contaPrivate+1
if u'K '+self.listaStudenti.StringSelection in i['16:40 - 17:50']:
contaPrivate= contaPrivate+1
if u'K '+self.listaStudenti.StringSelection in i['18:00 - 19:10']:
contaPrivate= contaPrivate+1
if u'K '+self.listaStudenti.StringSelection in i['19:20 - 20:30']:
contaPrivate= contaPrivate+1
if u'K '+self.listaStudenti.StringSelection in i['20:40 - 21:50']:
contaPrivate= contaPrivate+1
#print contaPrivate , 'contaprivate'
sommainglese = 0
sommakokugo = 0
sommashakai = 0
sommarika = 0
sommatokubetsu = 0
sommasuugaku = 0
sommainglese1 = 0
sommakokugo1 = 0
sommashakai1 = 0
sommarika1 = 0
sommatokubetsu1 = 0
sommasuugaku1 = 0
for i in range(0, 31, 1):
if self.grigliaLezioniSingole.GetCellValue(i, 0) != u'' and self.grigliaLezioniSingole.GetCellBackgroundColour(i,0)==wx.GREEN:
#print self.grigliaLezioniSingole.GetCellBackgroundColour(i,0)
conv = self.grigliaLezioniSingole.GetCellValue(i, 0)
sommainglese = sommainglese + int(conv)
for i in range(0, 31, 1):
if self.grigliaLezioniSingole.GetCellValue(i, 1) != u''and self.grigliaLezioniSingole.GetCellBackgroundColour(i,1)==wx.GREEN:
conv = self.grigliaLezioniSingole.GetCellValue(i, 1)
sommasuugaku = sommasuugaku + int(conv)
for i in range(0, 31, 1):
if self.grigliaLezioniSingole.GetCellValue(i, 2) != u''and self.grigliaLezioniSingole.GetCellBackgroundColour(i,2)==wx.GREEN:
conv = self.grigliaLezioniSingole.GetCellValue(i, 2)
sommakokugo = sommakokugo + int(conv)
for i in range(0, 31, 1):
if self.grigliaLezioniSingole.GetCellValue(i, 3) != u''and self.grigliaLezioniSingole.GetCellBackgroundColour(i,3)==wx.GREEN:
conv = self.grigliaLezioniSingole.GetCellValue(i, 3)
sommarika = sommarika + int(conv)
for i in range(0, 31, 1):
if self.grigliaLezioniSingole.GetCellValue(i, 4) != u''and self.grigliaLezioniSingole.GetCellBackgroundColour(i,4)==wx.GREEN:
conv = self.grigliaLezioniSingole.GetCellValue(i, 4)
sommashakai = sommashakai + int(conv)
for i in range(0, 31, 1):
if self.grigliaLezioniSingole.GetCellValue(i, 5) != u''and self.grigliaLezioniSingole.GetCellBackgroundColour(i,5)==wx.GREEN:
conv = self.grigliaLezioniSingole.GetCellValue(i, 5)
sommatokubetsu = sommatokubetsu + int(conv)
for i in range(0, 31, 1):
if self.grigliaLezioniSingole.GetCellValue(i, 0) != u'' and self.grigliaLezioniSingole.GetCellBackgroundColour(i,0)==wx.WHITE:
#print self.grigliaLezioniSingole.GetCellBackgroundColour(i,0)
conv = self.grigliaLezioniSingole.GetCellValue(i, 0)
sommainglese1 = sommainglese1 + int(conv)
for i in range(0, 31, 1):
if self.grigliaLezioniSingole.GetCellValue(i, 1) != u''and self.grigliaLezioniSingole.GetCellBackgroundColour(i,1)==wx.WHITE:
conv = self.grigliaLezioniSingole.GetCellValue(i, 1)
sommasuugaku1 = sommasuugaku1 + int(conv)
for i in range(0, 31, 1):
if self.grigliaLezioniSingole.GetCellValue(i, 2) != u''and self.grigliaLezioniSingole.GetCellBackgroundColour(i,2)==wx.WHITE:
conv = self.grigliaLezioniSingole.GetCellValue(i, 2)
sommakokugo1 = sommakokugo1 + int(conv)
for i in range(0, 31, 1):
if self.grigliaLezioniSingole.GetCellValue(i, 3) != u''and self.grigliaLezioniSingole.GetCellBackgroundColour(i,3)==wx.WHITE:
conv = self.grigliaLezioniSingole.GetCellValue(i, 3)
sommarika1 = sommarika1 + int(conv)
for i in range(0, 31, 1):
if self.grigliaLezioniSingole.GetCellValue(i, 4) != u''and self.grigliaLezioniSingole.GetCellBackgroundColour(i,4)==wx.WHITE:
conv = self.grigliaLezioniSingole.GetCellValue(i, 4)
sommashakai1 = sommashakai1 + int(conv)
for i in range(0, 31, 1):
if self.grigliaLezioniSingole.GetCellValue(i, 5) != u''and self.grigliaLezioniSingole.GetCellBackgroundColour(i,5)==wx.WHITE:
conv = self.grigliaLezioniSingole.GetCellValue(i, 5)
sommatokubetsu1 = sommatokubetsu1 + int(conv)
sommarika1 = sommarika1 + int(sommarika)
sommakokugo1 = sommakokugo1 + int(sommakokugo)
sommasuugaku1 = sommasuugaku1 + int(sommasuugaku)
sommainglese1 = sommainglese1 + int(sommainglese)
sommashakai1 = sommashakai1 + int(sommashakai)
sommatokubetsu1 = sommatokubetsu1 + int(sommatokubetsu)
self.grigliaLezioniSingole.SetCellValue(32,0,str(contaPrivate))
self.grigliaLezioniSingole.SetCellValue(31, 0, str(sommainglese))
self.grigliaLezioniSingole.SetCellValue(31, 1, str(sommasuugaku))
self.grigliaLezioniSingole.SetCellValue(31, 2, str(sommakokugo))
self.grigliaLezioniSingole.SetCellValue(31, 3, str(sommarika))
self.grigliaLezioniSingole.SetCellValue(31, 4, str(sommashakai))
self.grigliaLezioniSingole.SetCellValue(31, 5, str(sommatokubetsu))
self.grigliaLezioniSingole.SetCellValue(34, 0, str(sommainglese1))
self.grigliaLezioniSingole.SetCellValue(34, 1, str(sommasuugaku1))
self.grigliaLezioniSingole.SetCellValue(34, 2, str(sommakokugo1))
self.grigliaLezioniSingole.SetCellValue(34, 3, str(sommarika1))
self.grigliaLezioniSingole.SetCellValue(34, 4, str(sommashakai1))
self.grigliaLezioniSingole.SetCellValue(34, 5, str(sommatokubetsu1))
balanceEigo = int(self.grigliaLezioniSingole.GetCellValue(34, 0)) - sommainglese
balancesuugaku = int(self.grigliaLezioniSingole.GetCellValue(34, 1)) - sommasuugaku
balancekokugo = int(self.grigliaLezioniSingole.GetCellValue(34, 2))- sommakokugo
balancerika = int(self.grigliaLezioniSingole.GetCellValue(34, 3)) - sommarika
balanceshakai = int(self.grigliaLezioniSingole.GetCellValue(34, 4)) - sommashakai
balancetokubetu = int(self.grigliaLezioniSingole.GetCellValue(34, 5)) - sommatokubetsu
self.grigliaLezioniSingole.SetCellValue(33, 0, str(balanceEigo))
self.grigliaLezioniSingole.SetCellValue(33, 1, str(balancesuugaku))
self.grigliaLezioniSingole.SetCellValue(33, 2, str(balancekokugo))
self.grigliaLezioniSingole.SetCellValue(33, 3, str(balancerika))
self.grigliaLezioniSingole.SetCellValue(33, 4, str(balanceshakai))
self.grigliaLezioniSingole.SetCellValue(33, 5, str(balancetokubetu))
if self.grigliaTotali.GetCellValue(0, 0) == '':
self.grigliaTotali.SetCellValue(0, 0,'0')
if self.grigliaTotali.GetCellValue(0, 1) == '':
self.grigliaTotali.SetCellValue(0, 1,'0')
if self.grigliaTotali.GetCellValue(0, 2) == '':
self.grigliaTotali.SetCellValue(0, 2,'0')
if self.grigliaTotali.GetCellValue(0, 3) == '':
self.grigliaTotali.SetCellValue(0, 3,'0')
if self.grigliaTotali.GetCellValue(0, 4) == '':
self.grigliaTotali.SetCellValue(0, 4,'0')
if self.grigliaTotali.GetCellValue(0, 5) == '':
self.grigliaTotali.SetCellValue(0, 5,'0')
tsuikaeigo = int(self.grigliaLezioniSingole.GetCellValue(34,0))-int(self.grigliaTotali.GetCellValue(0, 0))-int(self.grigliaTotali.GetCellValue(1, 0))
tsuikakokugo = int(self.grigliaLezioniSingole.GetCellValue(34,2) ) - int(self.grigliaTotali.GetCellValue(0, 2)) - int(self.grigliaTotali.GetCellValue(1, 2))
tsuikasuugaku = int(self.grigliaLezioniSingole.GetCellValue(34, 1)) - int(self.grigliaTotali.GetCellValue(0,1)) - int(self.grigliaTotali.GetCellValue(1, 1))
tsuikarika = int(self.grigliaLezioniSingole.GetCellValue(34, 3)) - int(self.grigliaTotali.GetCellValue(0, 3)) - int(self.grigliaTotali.GetCellValue(1, 3))
tsuikashakai = int(self.grigliaLezioniSingole.GetCellValue(34, 4)) - int(self.grigliaTotali.GetCellValue(0, 4)) - int(self.grigliaTotali.GetCellValue(1, 4))
tsuikatokubetsu = int(self.grigliaLezioniSingole.GetCellValue(34, 5)) - int(self.grigliaTotali.GetCellValue(0, 5)) - int(self.grigliaTotali.GetCellValue(1, 5))
if tsuikaeigo >= 0:
self.grigliaTotali.SetCellValue(2, 0, str(tsuikaeigo))
else:
self.grigliaTotali.SetCellValue(2, 0, '0')
if tsuikasuugaku >= 0:
self.grigliaTotali.SetCellValue(2, 1, str(tsuikasuugaku))
else:
self.grigliaTotali.SetCellValue(2, 1, '0')
if tsuikakokugo >= 0:
self.grigliaTotali.SetCellValue(2, 2, str(tsuikakokugo))
else:
self.grigliaTotali.SetCellValue(2, 2, '0')
if tsuikarika >= 0:
self.grigliaTotali.SetCellValue(2, 3, str(tsuikarika))
else:
self.grigliaTotali.SetCellValue(2, 3, '0')
if tsuikashakai >= 0:
self.grigliaTotali.SetCellValue(2, 4, str(tsuikashakai))
else:
self.grigliaTotali.SetCellValue(2, 4, '0')
if tsuikatokubetsu >= 0:
self.grigliaTotali.SetCellValue(2, 5, str(tsuikatokubetsu))
else:
self.grigliaTotali.SetCellValue(2, 5, '0')
def aggiornaDati(self, event):
orario = {}
orario1 = {}
for creaorariofasullo in range(0, 9, 1):
orario[creaorariofasullo] = False
#print orario[creaorariofasullo]
for creaorariofasullo in range(0, 9, 1):
orario1[creaorariofasullo] = False
#print orario[creaorariofasullo]
for i in self.tabellaOre.Selections:
#print len(self.tabellaOre.Items)
orario[i] = True
#print 'orarioi', orario[i]
for i in self.tabellaOre1.Selections:
#print len(self.tabellaOre.Items)
orario1[i] = True
#print orario[i]
dati = dict(id=idSelezionato, name=self.casellaNome.Value, cardID=self.cardid.Label,
telephone=self.casellaTelefono.Value,
email=self.casellaEmail.Value, student=1,scuola=self.casellaScuola.Value,
maschio=self.danseiBox.Value, sonota=self.sonota.Value, femmina=self.joseiBox.Value,
furigana=self.furigana.Value,
parentMail=self.mailGenitori.Value,
teacher=0, kokugo=self.kokugo.Value, eigo=self.eigo.Value, suugaku=self.suugaku.Value,
rika=self.rika.Value, shakai=self.shakai.Value, tokubetsu=self.tokubetsu.Value,
primaOra=orario[0], secondaOra=orario[1], terzaOra=orario[2], quartaOra=orario[3],
quintaOra=orario[4], sestaOra=orario[5], settimaOra=orario[6], ottavaOra=orario[7],
nonaOra=orario[8],individual=self.individual.Value, shared=self.shared.Value,
kokugo1=self.kokugo1.Value, eigo1=self.eigo1.Value, suugaku1=self.suugaku1.Value,
rika1=self.rika1.Value, shakai1=self.shakai1.Value, tokubetsu1=self.tokubetsu1.Value,
primaOra1=orario1[0], secondaOra1=orario1[1], terzaOra1=orario1[2], quartaOra1=orario1[3],
quintaOra1=orario1[4], sestaOra1=orario1[5], settimaOra1=orario1[6], ottavaOra1=orario1[7],
nonaOra1=orario1[8])
tabella.update(dati, ['id'])
datigiorni = (dict(id = idGiorni, name=self.casellaNome.Value, lunedi=self.lunedi.Value,
martedi=self.martedi.Value, mercoledi=self.mercoledi.Value,
giovedi=self.giovedi.Value, venerdi=self.venerdi.Value,
sabato=self.sabato.Value, domenica=self.domenica.Value,lunedi1=self.lunedi1.Value,
martedi1=self.martedi1.Value, mercoledi1=self.mercoledi1.Value,
giovedi1=self.giovedi1.Value, venerdi1=self.venerdi1.Value,
sabato1=self.sabato1.Value, domenica1=self.domenica1.Value))
tabellaGiorni.update(datigiorni,['id'])
popolaCalcoli = tabellaCalcoli.find_one(name=self.listaStudenti.StringSelection,
anno=self.calendarioStudenti.Date.Year,
mese=self.calendarioStudenti.Date.Month)
if popolaCalcoli is not None:
datiCalcoli= (dict(id = popolaCalcoli.id,name=self.casellaNome.Value, mese=self.calendarioStudenti.Date.Month, anno=self.calendarioStudenti.Date.Year, normaleigo=self.grigliaTotali.GetCellValue(0,0),
normalsuugaku=self.grigliaTotali.GetCellValue(0,1),normalkokugo=self.grigliaTotali.GetCellValue(0,2),
normalrika=self.grigliaTotali.GetCellValue(0, 3),normalshakai=self.grigliaTotali.GetCellValue(0,4),
normaltokubetsu=self.grigliaTotali.GetCellValue(0,5),
tsuikaeigo=self.grigliaTotali.GetCellValue(2, 0),
tsuikasuugaku=self.grigliaTotali.GetCellValue(2, 1),
tsuikakokugo=self.grigliaTotali.GetCellValue(2, 2),
tsuikarika=self.grigliaTotali.GetCellValue(2, 3),
tsuikashakai=self.grigliaTotali.GetCellValue(2, 4),
tsuikatokubetsu=self.grigliaTotali.GetCellValue(2, 5),
# balanceeigo = self.grigliaLezioniSingole.GetCellValue(33, 0),
# balancesuugaku = self.grigliaLezioniSingole.GetCellValue(33, 1),
# balancekokugo = self.grigliaLezioniSingole.GetCellValue(33, 2),
# balancerika = self.grigliaLezioniSingole.GetCellValue(33, 3),
# balanceshakai = self.grigliaLezioniSingole.GetCellValue(33, 4),
# balancetokubetu = self.grigliaLezioniSingole.GetCellValue(33, 5)
))
tabellaCalcoli.update(datiCalcoli,['id'])
if popolaCalcoli is None:
tabellaCalcoli.insert(dict(name=self.casellaNome.Value, anno=self.calendarioStudenti.Date.Year,
mese=self.calendarioStudenti.Date.Month,
normaleigo=self.grigliaTotali.GetCellValue(0, 0),
normalsuugaku=self.grigliaTotali.GetCellValue(0, 1),
normalkokugo=self.grigliaTotali.GetCellValue(0, 2),
normalrika=self.grigliaTotali.GetCellValue(0, 3),
normalshakai=self.grigliaTotali.GetCellValue(0, 4),
normaltokubetsu=self.grigliaTotali.GetCellValue(0, 5),
tsuikaeigo=self.grigliaTotali.GetCellValue(2, 0),
tsuikasuugaku=self.grigliaTotali.GetCellValue(2, 1),
tsuikakokugo=self.grigliaTotali.GetCellValue(2, 2),
tsuikarika=self.grigliaTotali.GetCellValue(2, 3),
tsuikashakai=self.grigliaTotali.GetCellValue(2, 4),
tsuikatokubetsu=self.grigliaTotali.GetCellValue(2, 5),
# balanceeigo = self.grigliaLezioniSingole.GetCellValue(33, 0),
# balancesuugaku = self.grigliaLezioniSingole.GetCellValue(33, 1),
# balancekokugo = self.grigliaLezioniSingole.GetCellValue(33, 2),
# balancerika = self.grigliaLezioniSingole.GetCellValue(33, 3),
# balanceshakai = self.grigliaLezioniSingole.GetCellValue(33, 4),
# balancetokubetu = self.grigliaLezioniSingole.GetCellValue(33, 5)
))
nomefile = './StudentsData/'+self.casellaNome.Value+self.casellaTelefono.Value+'.txt'
nomefile1 = './StudentsData/' + self.casellaNome.Value + self.casellaTelefono.Value+'tokubetsu.txt'
with open(nomefile, 'wb') as f:
fieldnames = ['月曜日', '火曜日', '水曜日', '木曜日', '金曜日','土曜日', '日曜日']
writer = csv.DictWriter(f, fieldnames=fieldnames, dialect='excel')
writer.writeheader()
for i in range(0,9 , 1):
#print i
#ciao = utf_8_encoder(self.O.GetCellValue(i, 0))
#print ciao, 'ciao'
writer.writerow(
{'月曜日': utf_8_encoder(self.oreMaterie.GetCellValue(i, 0)), '火曜日': utf_8_encoder(self.oreMaterie.GetCellValue(i, 1))
, '水曜日': utf_8_encoder(self.oreMaterie.GetCellValue(i, 2)),
'木曜日': utf_8_encoder(self.oreMaterie.GetCellValue(i, 3))
, '金曜日': utf_8_encoder(self.oreMaterie.GetCellValue(i, 4)),
'土曜日': utf_8_encoder(self.oreMaterie.GetCellValue(i, 5))
, '日曜日': utf_8_encoder(self.oreMaterie.GetCellValue(i, 6))})
with open(nomefile1, 'wb') as f:
fieldnames = ['月曜日', '火曜日', '水曜日', '木曜日', '金曜日','土曜日', '日曜日']
writer = csv.DictWriter(f, fieldnames=fieldnames, dialect='excel')
writer.writeheader()
for i in range(0,9 , 1):
#print i
#ciao = utf_8_encoder(self.O.GetCellValue(i, 0))
#print ciao, 'ciao'
writer.writerow(
{'月曜日': utf_8_encoder(self.oreMaterie1.GetCellValue(i, 0)), '火曜日': utf_8_encoder(self.oreMaterie1.GetCellValue(i, 1))
, '水曜日': utf_8_encoder(self.oreMaterie1.GetCellValue(i, 2)),
'木曜日': utf_8_encoder(self.oreMaterie1.GetCellValue(i, 3))
, '金曜日': utf_8_encoder(self.oreMaterie1.GetCellValue(i, 4)),
'土曜日': utf_8_encoder(self.oreMaterie1.GetCellValue(i, 5))
, '日曜日': utf_8_encoder(self.oreMaterie1.GetCellValue(i, 6))})
for i in self.usciteStudenti.Items:
if tabellaTempo.find_one(name=self.casellaNome.Value, uscite=i)is None:
tabellaTempo.insert(dict(name=self.casellaNome.Value, uscite=i))
self.listaStudenti.Clear()
popolaStudenti = tabella.find(student='1')
for i in popolaStudenti:
self.listaStudenti.Append(unicode(i.name))
def abilitaNuovo(self, event):
self.invio.Enabled = True
self.cardid.LabelText = ''
def cancellaDati( self, event ):
dlg = wx.MessageDialog(None, u"データ削除しますか", '', wx.YES_NO | wx.ICON_QUESTION)
result = dlg.ShowModal()
if result == wx.ID_YES:
tabella.delete(name=self.casellaNome.Value)
tabellaTempo.delete(name=self.casellaNome.Value)
tabellaGiorni.delete(name=self.casellaNome.Value)
tabellaCalcoli.delete(name=self.casellaNome.Value)
self.listaStudenti.Clear()
popolaStudenti = tabella.find(student='1')
for i in popolaStudenti:
self.listaStudenti.Append(unicode(i.name))
else:
pass
#print "No pressed"
def aggiungiData(self, event):
calendario = calendar
giornoDelMeseCorrente = str(self.calendarioStudenti.Date)
dataDatetime = datetime.strptime(giornoDelMeseCorrente, '%m/%d/%y %H:%M:%S')
lungezzaMese = calendario.monthrange(dataDatetime.year,dataDatetime.month)
dataComp = str(self.calendarioStudenti.Date.Year)+'/'+str(self.calendarioStudenti.Date.Month+1)+'/'+str(self.calendarioStudenti.Date.Day)
dataComposta = funzioni.aggiungizeri(self.calendarioStudenti.Date.Year,self.calendarioStudenti.Date.Month+1,self.calendarioStudenti.Date.Day)
print dataComposta
controllaDate = funzioni.controlloDateDuplicate(dataComposta, self.usciteStudenti.Items)
print type (self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(dataComposta)
else:
self.errorCheck.LabelText = u'すでに追加されました'
def cancellaLezioni(self, event):
# tabellaTempo.delete(name=self.casellaNome.LabelText,riposi=self.riposiInsegnanti.GetSelections)
selections = list(self.usciteStudenti.GetSelections())
datadaCancellare = self.usciteStudenti.StringSelection
for index in selections:
#print self.casellaNome.LabelText, self.usciteStudenti.StringSelection, type(self.listaStudenti.LabelText), type (self.riposiInsegnanti.StringSelection)
self.usciteStudenti.Delete(index)
tabellaTempo.delete(name=self.listaStudenti.StringSelection,uscite=datadaCancellare)
def readCard( self, event ):
idcarta = cardScan()
cercacarta = tabella.find_one(cardID=idcarta)
if cercacarta is not None:
self.errorCheck.LabelText = 'Card Already on database'
else:
self.errorCheck.LabelText = 'Card reading successfully'
self.cardid.Label = idcarta
def aggiuntaGiorni( self, event ):
giornoDelMeseCorrente = str(self.calendarioStudenti.Date)
dataDatetime = datetime.strptime(giornoDelMeseCorrente, '%m/%d/%y %H:%M:%S')
print dataDatetime
calendario = calendar
print calendario.month(dataDatetime.year,dataDatetime.month)
print calendario.monthrange(dataDatetime.year,dataDatetime.month), type (calendario.monthrange(dataDatetime.year,dataDatetime.month))
lungezzaMese = calendario.monthrange(dataDatetime.year,dataDatetime.month)
if self.lunedi.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,0,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.usciteStudenti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(i)
if self.martedi.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,1,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.usciteStudenti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(i)
if self.mercoledi.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,2,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.usciteStudenti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(i)
if self.giovedi.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,3,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.usciteStudenti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(i)
if self.venerdi.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,4,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.usciteStudenti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(i)
if self.sabato.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,5,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.usciteStudenti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(i)
if self.domenica.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,6,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.usciteStudenti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(i)
def aggiuntaGiorni1( self, event ):
giornoDelMeseCorrente = str(self.calendarioStudenti.Date)
dataDatetime = datetime.strptime(giornoDelMeseCorrente, '%m/%d/%y %H:%M:%S')
print dataDatetime
calendario = calendar
print calendario.month(dataDatetime.year,dataDatetime.month)
print calendario.monthrange(dataDatetime.year,dataDatetime.month), type (calendario.monthrange(dataDatetime.year,dataDatetime.month))
lungezzaMese = calendario.monthrange(dataDatetime.year,dataDatetime.month)
if self.lunedi1.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,0,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.usciteStudenti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(i)
if self.martedi1.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,1,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.usciteStudenti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(i)
if self.mercoledi1.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,2,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.usciteStudenti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(i)
if self.giovedi1.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,3,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.usciteStudenti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(i)
if self.venerdi1.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,4,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.usciteStudenti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(i)
if self.sabato1.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,5,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.usciteStudenti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(i)
if self.domenica1.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,6,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.usciteStudenti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.usciteStudenti.Append(i)
class finestraUtenti(JukuPlanner.addUser):
# constructor
def __init__(self, parent):
# initialize parent class
JukuPlanner.addUser.__init__(self, parent)
popolaInsegnanti = tabellaIns.find(teacher='1')
for i in popolaInsegnanti:
self.listaInsegnanti.Append(unicode(i.name))
self.invio.Enabled = True
self.aggiorna.Enabled = False
self.cancella.Enabled = False
def mostraMeseCorrente( self, event ):
listadate = []
dataComp = str(self.calendarioStudenti.Date.Year) + '/' + str(self.calendarioStudenti.Date.Month + 1) + '/'
dataComposta = funzioni.aggiungizeriSenzaGiorno(self.calendarioStudenti.Date.Year, self.calendarioStudenti.Date.Month + 1)
dataunicode = unicode(dataComposta)
contaitem = 0
popolaDate = tabellaTempoIns.find(name=self.listaInsegnanti.StringSelection)
if self.meseCorrente.Value == True:
for i in self.riposiInsegnanti.Items:
if dataunicode in i :
listadate.append(i)
self.riposiInsegnanti.Clear()
for i in listadate:
self.riposiInsegnanti.Append(i)
if self.meseCorrente.Value == False:
self.riposiInsegnanti.Clear()
for i in popolaDate:
if len((str(i.riposi))) >= 5:
self.riposiInsegnanti.Append(str(i.riposi))
def cancellaRiposi(self, event):
# tabellaTempo.delete(name=self.casellaNome.LabelText,riposi=self.riposiInsegnanti.GetSelections)
selections = list(self.riposiInsegnanti.GetSelections())
for index in selections:
print self.casellaNome.LabelText, self.riposiInsegnanti.StringSelection, type(self.casellaNome.LabelText), type (self.riposiInsegnanti.StringSelection)
self.riposiInsegnanti.Delete(index)
tabellaTempoIns.delete(name=self.casellaNome.LabelText)
def nuovoInsegnante( self, event ):
self.invio.Enabled = True
self.aggiorna.Enabled = False
self.cancella.Enabled = False
self.casellaNome.Clear()
self.casellaTelefono.Clear()
self.casellaEmail.Clear()
self.tabellaOre.DeselectAll()
self.tabellaOre1.DeselectAll()
self.riposiInsegnanti.Clear()
self.furigana.Clear()
self.lunedi.Value = False
self.martedi.Value = False
self.mercoledi.Value = False
self.giovedi.Value = False
self.venerdi.Value = False
self.sabato.Value = False
self.domenica.Value = False
self.listaInsegnanti.Clear()
popolaStudenti = tabellaIns.find(teacher='1')
for i in popolaStudenti:
self.listaInsegnanti.Append(unicode(i.name))
def orePersonalizzate( self, event ):
print str(self.calendarioStudenti.Date)
popolaDateIns = tabellaDateIns.find_one(name=self.casellaNome.Value, data=unicode(self.calendarioStudenti.Date))
popolainsegnanti = tabellaIns.find_one(name=self.listaInsegnanti.StringSelection)
if popolaDateIns is not None:
arrayore = [popolaDateIns.primaOra,popolaDateIns.secondaOra,popolaDateIns.terzaOra,popolaDateIns.quartaOra,popolaDateIns.quintaOra,popolaDateIns.sestaOra,popolaDateIns.settimaOra,popolaDateIns.ottavaOra,popolaDateIns.nonaOra]
for settaOre in range(0, 9, 1):
self.tabellaOre1.SetSelection(settaOre, select=arrayore[settaOre])
if popolaDateIns is None:
arrayore = [popolainsegnanti.primaOra, popolainsegnanti.secondaOra, popolainsegnanti.terzaOra, popolainsegnanti.quartaOra, popolainsegnanti.quintaOra, popolainsegnanti.sestaOra, popolainsegnanti.settimaOra,
popolainsegnanti.ottavaOra, popolainsegnanti.nonaOra]
for settaOre in range(0, 9, 1):
self.tabellaOre1.SetSelection(settaOre, select=arrayore[settaOre])
def caricaDate(self, event):
self.errorCheck.LabelText='-------------------------------------------------------------------------------------------------------------------------------------------------'
self.riposiInsegnanti.Clear()
self.aggiorna.Enabled = True
self.cancella.Enabled = True
self.invio.Enabled = False
print self.listaInsegnanti.StringSelection
popolaDateIns = tabellaDateIns.find_one(name=self.casellaNome.Value, data=str(self.calendarioStudenti.Date))
popolaDate = tabellaTempoIns.find(name=self.listaInsegnanti.StringSelection)
popolainsegnanti = tabellaIns.find(name=self.listaInsegnanti.StringSelection, teacher='1')
global idSelezionato
global idDatePersonalizzate
if popolaDateIns is not None:
idDatePersonalizzate = popolaDateIns.id
for i in popolainsegnanti:
idSelezionato = i.id
self.lunedi.Value = i.lunedi
self.martedi.Value = i.martedi
self.mercoledi.Value = i.mercoledi
self.giovedi.Value = i.giovedi
self.venerdi.Value = i.venerdi
self.sabato.Value = i.sabato
self.domenica.Value = i.domenica
self.casellaNome.LabelText = i.name
self.casellaEmail.LabelText = i.email
self.furigana.Value = i.furigana
self.casellaTelefono.LabelText = i.telephone
# self.eigo.Value = i.eigo
# self.rika.Value = i.rika
# self.shakai.Value = i.shakai
# self.suugaku.Value = i.suugaku
# self.tokubetsu.Value = i.tokubetsu
# self.kokugo.Value = i.kokugo
if i.cardID == '':
self.cardid.LabelText = u"カード未登録です、登録してください"
self.CardRegistration.Enabled=True
self.cardcancel.Enabled = False
else:
self.cardid.LabelText = i.cardID
self.CardRegistration.Enabled=False
self.cardcancel.Enabled=True
arrayore = [i.primaOra, i.secondaOra, i.terzaOra, i.quartaOra, i.quintaOra, i.sestaOra, i.settimaOra,
i.ottavaOra, i.nonaOra]
print arrayore
for settaOre in range(0, 9, 1):
self.tabellaOre.SetSelection(settaOre, select=arrayore[settaOre])
for settaOre in range(0, 9, 1):
self.tabellaOre1.SetSelection(settaOre, select=arrayore[settaOre])
for i in popolaDate:
self.riposiInsegnanti.Append(str(i.riposi))
self.invio.Enabled = False
def cancellaDati(self, event):
dlg = wx.MessageDialog(None, u"データ削除しますか", '', wx.YES_NO | wx.ICON_QUESTION)
result = dlg.ShowModal()
if result == wx.ID_YES:
tabellaIns = dbins['insegnanti']
tabellaTempoIns = dbins['timeTable']
tabellaDateIns = dbins['datePersonalizzate']
tabellaIns.delete(name=self.casellaNome.Value)
tabellaTempoIns.delete(name=self.casellaNome.Value)
tabellaDateIns.delete(name=self.casellaNome.Value)
self.listaInsegnanti.Clear()
popolaStudenti = tabellaIns.find(teacher='1')
for i in popolaStudenti:
self.listaInsegnanti.Append(unicode(i.name))
else:
pass
def aggiornaDati(self, event):
global idSelezionato
global idDatePersonalizzate
popolaDateIns = tabellaDateIns.find_one(name=self.casellaNome.Value, data=str(self.calendarioStudenti.Date))
if popolaDateIns is not None:
idDatePersonalizzate = popolaDateIns.id
#idDatePersonalizzate = popolaDateIns.id
orario = {}
for creaorariofasullo in range(0, 9, 1):
orario[creaorariofasullo] = False
orario1 = {}
for creaorariofasullo in range(0, 9, 1):
orario1[creaorariofasullo] = False
#print orario[creaorariofasullo]
for i in self.tabellaOre.Selections:
print len(self.tabellaOre.Items)
orario[i] = True
print 'orarioi', orario[i]
for i in self.tabellaOre1.Selections:
print len(self.tabellaOre1.Items)
orario1[i] = True
print orario, orario1, 'orari'
if orario == orario1:
dati = dict(id=idSelezionato, name=self.casellaNome.Value, cardID=self.cardid.Label,
telephone=self.casellaTelefono.Value,
email=self.casellaEmail.Value, student=0,
teacher=1,furigana = self.furigana.Value,
primaOra=orario[0], secondaOra=orario[1], terzaOra=orario[2], quartaOra=orario[3],
quintaOra=orario[4], sestaOra=orario[5], settimaOra=orario[6], ottavaOra=orario[7],
nonaOra=orario[8], lunedi=self.lunedi.Value,
martedi=self.martedi.Value, mercoledi=self.mercoledi.Value,
giovedi=self.giovedi.Value, venerdi=self.venerdi.Value,
sabato=self.sabato.Value, domenica=self.domenica.Value)
tabellaIns.update(dati, ['id'])
else:
if popolaDateIns is None:
print str(self.calendarioStudenti.Date)
tabellaDateIns.insert(
dict(name=self.casellaNome.Value, data=str(self.calendarioStudenti.Date),
primaOra=orario1[0], secondaOra=orario1[1], terzaOra=orario1[2], quartaOra=orario1[3],
quintaOra=orario1[4], sestaOra=orario1[5], settimaOra=orario1[6], ottavaOra=orario1[7],
nonaOra=orario1[8]))
dati = dict(id=idSelezionato, name=self.casellaNome.Value, cardID=self.cardid.Label,
telephone=self.casellaTelefono.Value,
email=self.casellaEmail.Value, student=0,
teacher=1,furigana = self.furigana.Value,
primaOra=orario[0], secondaOra=orario[1], terzaOra=orario[2], quartaOra=orario[3],
quintaOra=orario[4], sestaOra=orario[5], settimaOra=orario[6], ottavaOra=orario[7],
nonaOra=orario[8], lunedi=self.lunedi.Value,
martedi=self.martedi.Value, mercoledi=self.mercoledi.Value,
giovedi=self.giovedi.Value, venerdi=self.venerdi.Value,
sabato=self.sabato.Value, domenica=self.domenica.Value)
tabellaIns.update(dati, ['id'])
if popolaDateIns is not None:
dati1 = dict(id=idDatePersonalizzate, name=self.casellaNome.Value,
primaOra=orario1[0], secondaOra=orario1[1], terzaOra=orario1[2], quartaOra=orario1[3],
quintaOra=orario1[4], sestaOra=orario1[5], settimaOra=orario1[6], ottavaOra=orario1[7],
nonaOra=orario1[8])
tabellaDateIns.update(dati1, ['id'])
dati = dict(id=idSelezionato, name=self.casellaNome.Value, cardID=self.cardid.Label,
telephone=self.casellaTelefono.Value,
email=self.casellaEmail.Value, student=0,
teacher=1,furigana = self.furigana.Value,
primaOra=orario[0], secondaOra=orario[1], terzaOra=orario[2], quartaOra=orario[3],
quintaOra=orario[4], sestaOra=orario[5], settimaOra=orario[6], ottavaOra=orario[7],
nonaOra=orario[8], lunedi=self.lunedi.Value,
martedi=self.martedi.Value, mercoledi=self.mercoledi.Value,
giovedi=self.giovedi.Value, venerdi=self.venerdi.Value,
sabato=self.sabato.Value, domenica=self.domenica.Value)
tabellaIns.update(dati, ['id'])
print 'diversi'
for i in self.riposiInsegnanti.Items:
if tabellaTempoIns.find_one(name=self.casellaNome.Value, riposi=i) is None:
tabellaTempoIns.insert(dict(name=self.casellaNome.Value, riposi=i))
#self.listaInsegnanti.Clear()
#popolaStudenti = tabellaIns.find(teacher='1')
#for i in popolaStudenti:
# self.listaInsegnanti.Append(unicode(i.name))
def cardDelete(self, event):
self.cardid.Label=''
orario = {}
for creaorariofasullo in range(0, 9, 1):
orario[creaorariofasullo] = False
print orario[creaorariofasullo]
for i in self.tabellaOre.Selections:
print len(self.tabellaOre.Items)
orario[i] = True
print 'orarioi', orario[i]
dati = dict(id=idSelezionato, name=self.casellaNome.Value, cardID=self.cardid.Label,
telephone=self.casellaTelefono.Value,
email=self.casellaEmail.Value,furigana = self.furigana.Value,
teacher=1, kokugo=self.kokugo.Value, eigo=self.eigo.Value, suugaku=self.suugaku.Value,
rika=self.rika.Value, shakai=self.shakai.Value, tokubetsu=self.tokubetsu.Value,
primaOra=orario[0], secondaOra=orario[1], terzaOra=orario[2], quartaOra=orario[3],
quintaOra=orario[4], sestaOra=orario[5], settimaOra=orario[6], ottavaOra=orario[7],
nonaOra=orario[8])
tabellaIns.update(dati, ['id'])
for i in self.riposiInsegnanti.Items:
if tabellaTempoIns.find_one(name=self.casellaNome.Value, riposi=i) is None:
tabellaTempoIns.insert(dict(name=self.casellaNome.Value, riposi=i))
def abilitaNuovo(self, event):
self.invio.Enabled = True
self.cardid.LabelText = ''
def aggiuntaGiorni( self, event ):
giornoDelMeseCorrente = str(self.calendarioStudenti.Date)
dataDatetime = datetime.strptime(giornoDelMeseCorrente, '%m/%d/%y %H:%M:%S')
print dataDatetime
calendario = calendar
print calendario.month(dataDatetime.year,dataDatetime.month)
print calendario.monthrange(dataDatetime.year,dataDatetime.month), type (calendario.monthrange(dataDatetime.year,dataDatetime.month))
lungezzaMese = calendario.monthrange(dataDatetime.year,dataDatetime.month)
if self.lunedi.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,0,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.riposiInsegnanti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.riposiInsegnanti.Append(i)
if self.martedi.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,1,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.riposiInsegnanti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.riposiInsegnanti.Append(i)
if self.mercoledi.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,2,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.riposiInsegnanti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.riposiInsegnanti.Append(i)
if self.giovedi.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,3,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.riposiInsegnanti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.riposiInsegnanti.Append(i)
if self.venerdi.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,4,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.riposiInsegnanti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.riposiInsegnanti.Append(i)
if self.sabato.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,5,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.riposiInsegnanti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.riposiInsegnanti.Append(i)
if self.domenica.Value == True:
tuplaGiorni = funzioni.giorniSettimana(dataDatetime.year,dataDatetime.month,6,lungezzaMese[1])
for i in tuplaGiorni:
controllaDate = funzioni.controlloDateDuplicate(i, self.riposiInsegnanti.Items)
print type(self.calendarioStudenti.Date), type(str(self.calendarioStudenti.Date)), str(
self.calendarioStudenti.Date)
if controllaDate == True:
self.riposiInsegnanti.Append(i)
def aggiungiData(self, event):
calendario = calendar
giornoDelMeseCorrente = str(self.calendarioStudenti.Date)
dataDatetime = datetime.strptime(giornoDelMeseCorrente, '%m/%d/%y %H:%M:%S')
lungezzaMese = calendario.monthrange(dataDatetime.year, dataDatetime.month)
dataComposta = funzioni.aggiungizeri(self.calendarioStudenti.Date.Year, self.calendarioStudenti.Date.Month + 1,
self.calendarioStudenti.Date.Day)
controllaDate = funzioni.controlloDateDuplicate(dataComposta, self.riposiInsegnanti.Items)
if controllaDate == True:
self.riposiInsegnanti.Append(dataComposta)
else:
self.errorCheck.LabelText = u'すでに追加されました'
def selezioneCalendario(self, event):
controllaDate = funzioni.controlloDateDuplicate(self.calendarioStudenti.Date, )
self.text.SetValue(str(self.calendarioStudenti.Date))
# put a blank string in text when 'Clear' is clicked
def clearFunc(self, event):
self.text.SetValue(str(''))
def funzioneInvio(self, event):
orario = {}
for creaorariofasullo in range(0, 9, 1):
orario[creaorariofasullo] = False
print orario[creaorariofasullo]
cercaNome = tabella.find_one(name=self.casellaNome.Value)
print self.tabellaOre.Selections
for i in self.tabellaOre.Selections:
print len(self.tabellaOre.Items)
orario[i] = True
print orario[i]
if cercaNome is not None:
self.errorCheck.LabelText = 'Name Already on database'
if self.casellaNome.Value is None:
self.errorCheck.LabelText = 'Please fill the name'
else:
tabellaIns.insert(
dict(name=self.casellaNome.Value, cardID=self.cardid.Label, telephone=self.casellaTelefono.Value,
email=self.casellaEmail.Value, student=0,furigana = self.furigana.Value,
teacher=1,lunedi=self.lunedi.Value,
martedi=self.martedi.Value, mercoledi=self.mercoledi.Value,
giovedi=self.giovedi.Value, venerdi=self.venerdi.Value,
sabato=self.sabato.Value, domenica=self.domenica.Value,
primaOra=orario[0], secondaOra=orario[1], terzaOra=orario[2], quartaOra=orario[3],
quintaOra=orario[4], sestaOra=orario[5], settimaOra=orario[6], ottavaOra=orario[7],
nonaOra=orario[8]))
for i in self.riposiInsegnanti.Items:
tabellaTempoIns.insert(dict(name=self.casellaNome.Value, riposi=i))
print tabella
self.errorCheck.LabelText = 'Data has been saved!'
def noMaster(self, event):
self.teacherCheckBox.Value = 0
def noStudent(self, event):
self.studentCheckBok.Value = 0
def readCard(self, event):
idcarta = cardScan()
cercacarta = tabellaIns.find_one(cardID=idcarta)
if cercacarta is not None:
self.errorCheck.LabelText = 'Card Already on database'
else:
self.errorCheck.LabelText = 'Card reading successfully'
self.cardid.Label = idcarta
def cardScan():
cmd = ['python', 'tagtool.py']
subprocess.Popen(cmd).wait()
while True:
quantiTxt = glob.glob("tag.txt")
if len(quantiTxt) >= 1:
filenfc = open('tag.txt', 'r')
linea = filenfc.readlines()
tagnfc = linea[0]
# print tagnfc
print ('Card reading complete')
print tagnfc, 'Tagnfc'
return tagnfc
break
else:
time.sleep(1)
# mandatory in wx, create an app, False stands for not deteriction stdin/stdout
# refer manual for details
app = wx.App(False)
# create an object of CalcFrame
frame = CalcFrame(None)
# show the frame
frame.Show(True)
# start the applications
app.MainLoop()
|
gpl-3.0
| -7,033,408,689,322,820,000 | 54.919572 | 237 | 0.568567 | false |
MaxTyutyunnikov/lino
|
obsolete/voc/makesql.py
|
1
|
3136
|
#coding: utf-8
## Copyright 2008-2009 Luc Saffre.
## This file is part of the Lino project.
## Lino is free software; you can redistribute it and/or modify it
## under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## Lino is distributed in the hope that it will be useful, but WITHOUT
## ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
## or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
## License for more details.
## You should have received a copy of the GNU General Public License
## along with Lino; if not, write to the Free Software Foundation,
## Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import csv
import codecs
def unicode_csv_reader(unicode_csv_data, dialect=csv.excel, **kwargs):
# csv.py doesn't do Unicode; encode temporarily as UTF-8:
csv_reader = csv.reader(utf_8_encoder(unicode_csv_data),
dialect=dialect, **kwargs)
for row in csv_reader:
# decode UTF-8 back to Unicode, cell by cell:
yield [unicode(cell, 'utf-8') for cell in row]
def utf_8_encoder(unicode_csv_data):
for line in unicode_csv_data:
yield line.encode('utf-8')
#~ UNIT_DATA = (
#~ ("1", u"Leçon 1"),
#~ ("2", u"Leçon 2"),
#~ ("3", u"Leçon 3"),
#~ ("4", u"Leçon 4"),
#~ ("5", u"Leçon 5"),
#~ ("6", u"Leçon 6"),
#~ )
#~ UNIT_STMT = "INSERT INTO voc_unit (name, title) VALUES %s;\n"
#~ ENTRY_DATA = (
#~ (1, "je", "", "mina" ),
#~ (1, "tu", "", "sina" ),
#~ (1, "il", "", "tema" ),
#~ (1, "pullover", "nm", "kampsun" ),
#~ )
#ENTRY_STMT = "INSERT INTO voc_entry (unit_id, question,question_type,answer) VALUES %s;\n"
ENTRY_STMT = "INSERT INTO voc_entry (%s) VALUES (%s);\n"
#~ def assql(x):
#~ if type(x) == int:
#~ return str(x)
#~ if type(x) == tuple:
#~ return "(" + ",".join([assql(y) for y in x]) + ")"
#~ return '"' + x.encode('utf-8') + '"'
#~ f=file("sql/unit.sql","w")
#~ for e in UNIT_DATA:
#~ f.write(UNIT_STMT % assql(e))
#~ f.close()
#~ f=file("sql/entry.sql","w")
#~ for e in ENTRY_DATA:
#~ f.write(ENTRY_STMT % assql(e))
def int2sql(x):
#print repr(x)
return str(int(x))
def str2sql(x):
#return '"' + x.encode('utf-8') + '"'
return '"' + x + '"'
fieldmap = dict(
id=int2sql,
word1=str2sql,
word2=str2sql,
word1_suffix=str2sql)
r = unicode_csv_reader(codecs.open('voc.csv','r',"utf-8"))
titles = r.next()
fields=[]
i=0
for fieldname in titles:
converter=fieldmap.get(fieldname,None)
if converter is not None:
fields.append( (fieldname,converter,i) )
i+=1
for name,cv,i in fields:
print i,name
sqlcolumns = ",".join([fld[0] for fld in fields])
n=1
f=file("sql/entry.sql","w")
for row in r:
n+=1
try:
sqlvalues=",".join([cv(row[i]) for fn,cv,i in fields])
except ValueError,e:
print n,row,e
else:
#print sqlvalues
stmt=ENTRY_STMT % (sqlcolumns,sqlvalues)
#print stmt
f.write(stmt.encode("utf-8"))
|
gpl-3.0
| 2,935,300,112,210,750,000 | 26.699115 | 91 | 0.601917 | false |
jandom/rdkit
|
rdkit/Chem/Draw/qtCanvas.py
|
1
|
3513
|
# $Id$
#
# Copyright (C) 2014 Seiji Matsuoka
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
from rdkit.Chem.Draw.canvasbase import CanvasBase
from PySide import QtGui, QtCore
class Canvas(CanvasBase):
def __init__(self, size):
self.size = size
self.qsize = QtCore.QSize(*size)
self.pixmap = QtGui.QPixmap(self.qsize)
self.painter = QtGui.QPainter(self.pixmap)
self.painter.setRenderHint(QtGui.QPainter.Antialiasing, True)
self.painter.setRenderHint(QtGui.QPainter.SmoothPixmapTransform, True)
self.painter.fillRect(0, 0, size[0], size[1], QtCore.Qt.white)
def addCanvasLine(self, p1, p2, color=(0, 0, 0), color2=None, **kwargs):
if 'dash' in kwargs:
line_type = QtCore.Qt.DashLine
else:
line_type = QtCore.Qt.SolidLine
qp1 = QtCore.QPointF(*p1)
qp2 = QtCore.QPointF(*p2)
qpm = QtCore.QPointF((p1[0] + p2[0]) / 2, (p1[1] + p2[1]) / 2)
if color2 and color2 != color:
rgb = [int(c * 255) for c in color]
pen = QtGui.QPen(QtGui.QColor(*rgb), 1, line_type)
self.painter.setPen(pen)
self.painter.drawLine(qp1, qpm)
rgb2 = [int(c * 255) for c in color2]
pen.setColor(QtGui.QColor(*rgb2))
self.painter.setPen(pen)
self.painter.drawLine(qpm, qp2)
else:
rgb = [int(c * 255) for c in color]
pen = QtGui.QPen(QtGui.QColor(*rgb), 1, line_type)
self.painter.setPen(pen)
self.painter.drawLine(qp1, qp2)
def addCanvasText(self, text, pos, font, color=(0, 0, 0), **kwargs):
orientation = kwargs.get('orientation', 'E')
qfont = QtGui.QFont("Helvetica", font.size * 1.5)
qtext = QtGui.QTextDocument()
qtext.setDefaultFont(qfont)
colored = [int(c * 255) for c in color]
colored.append(text)
html_format = "<span style='color:rgb({},{},{})'>{}</span>"
formatted = html_format.format(*colored)
qtext.setHtml(formatted)
if orientation == 'N':
qpos = QtCore.QPointF(pos[0] - qtext.idealWidth() / 2, pos[1] - font.size)
elif orientation == 'W':
qpos = QtCore.QPointF(pos[0] - qtext.idealWidth() + font.size, pos[1] - font.size)
else:
qpos = QtCore.QPointF(pos[0] - font.size, pos[1] - font.size)
self.painter.save()
self.painter.translate(qpos)
qtext.drawContents(self.painter)
self.painter.restore()
return font.size * 1.8, font.size * 1.8, 0
def addCanvasPolygon(self, ps, color=(0, 0, 0), fill=True, stroke=False, **kwargs):
polygon = QtGui.QPolygonF()
for ver in ps:
polygon.append(QtCore.QPointF(*ver))
pen = QtGui.QPen(QtGui.QColor(*color), 1, QtCore.Qt.SolidLine)
self.painter.setPen(pen)
self.painter.setBrush(QtGui.QColor(0, 0, 0))
self.painter.drawPolygon(polygon)
def addCanvasDashedWedge(self, p1, p2, p3, dash=(2, 2), color=(0, 0, 0), color2=None, **kwargs):
rgb = [int(c * 255) for c in color]
pen = QtGui.QPen(QtGui.QColor(*rgb), 1, QtCore.Qt.SolidLine)
self.painter.setPen(pen)
dash = (4, 4)
pts1 = self._getLinePoints(p1, p2, dash)
pts2 = self._getLinePoints(p1, p3, dash)
if len(pts2) < len(pts1):
pts2, pts1 = pts1, pts2
for i in range(len(pts1)):
qp1 = QtCore.QPointF(pts1[i][0], pts1[i][1])
qp2 = QtCore.QPointF(pts2[i][0], pts2[i][1])
self.painter.drawLine(qp1, qp2)
def flush(self):
self.painter.end()
|
bsd-3-clause
| 2,263,329,650,974,698,000 | 35.59375 | 98 | 0.63877 | false |
SKA-ScienceDataProcessor/integration-prototype
|
sip/execution_control/docker_api/sip_docker_swarm/docker_swarm_client.py
|
1
|
21591
|
# -*- coding: utf-8 -*-
"""Docker Swarm Client API."""
from typing import List
import re
import os
import logging
import copy
import docker
import yaml
LOG = logging.getLogger('sip.ec.docker_swarm_client')
class DockerSwarmClient:
"""Docker Swarm Client Interface."""
def __init__(self):
"""Initialise of the class."""
# Create a docker client
self._client = docker.from_env()
# Store a flag to show whether we are on a manager node or a worker.
self._manager = self._client.info()['Swarm']['ControlAvailable']
# Docker low-level API
self._api_client = docker.APIClient()
###########################################################################
# Properties / attributes
###########################################################################
@property
def services(self) -> List[str]:
"""Get list of docker services.
Returns:
list, list of service ids
"""
return self.get_service_list()
@property
def containers(self)-> List[str]:
"""Get list of docker containers.
Returns:
list, list of container ids
"""
return self.get_container_list()
@property
def volumes(self)-> List[str]:
"""Get list of docker volumes.
Returns:
list, list of volume names
"""
return self.get_volume_list()
@property
def nodes(self)-> List[str]:
"""Get list of docker nodes.
Returns:
list, list of node ids
"""
return self.get_node_list()
@property
def delete_services(self):
"""Delete all services."""
self.delete_all_services()
@property
def delete_volumes(self):
"""Delete all volumes."""
self.delete_all_volumes()
###########################################################################
# Create functions
###########################################################################
def create_services(self, compose_str: str) -> list:
"""Create new docker services.
Args:
compose_str (string): Docker compose 'file' string
Return:
service_names, list
"""
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Services can only be run on '
'swarm manager nodes')
# Initialise empty list
services_ids = []
try:
service_config = yaml.load(compose_str)
# Deepcopy the service config
service_list = copy.deepcopy(service_config)
# Removing version and service from the dict
service_config.pop('version')
service_config.pop('services')
for service_name in service_list['services']:
service_exist = self._client.services.list(
filters={'name': service_name})
if not service_exist:
service_config['name'] = service_name
service_spec = self._parse_services(
service_config, service_name, service_list)
created_service = self._client.services.create(
**service_spec)
service_id = created_service.short_id
LOG.debug('Service created: %s', service_id)
services_ids.append(service_id)
else:
LOG.debug('Services already exists')
except yaml.YAMLError as exc:
print(exc)
# Returning list of services created
return services_ids
def create_volume(self, volume_name: str, driver_spec: str = None):
"""Create new docker volumes.
Only the manager nodes can create a volume
Args:
volume_name (string): Name for the new docker volume
driver_spec (string): Driver for the docker volume
"""
# Default values
if driver_spec:
driver = driver_spec
else:
driver = 'local'
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Services can only be deleted '
'on swarm manager nodes')
self._client.volumes.create(name=volume_name, driver=driver)
###########################################################################
# Delete functions
###########################################################################
def delete_service(self, service: str):
"""Removes/stops a docker service.
Only the manager nodes can delete a service
Args:
service (string): Service name or ID
"""
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Services can only be deleted '
'on swarm manager nodes')
# Remove service
self._api_client.remove_service(service)
def delete_all_services(self):
"""Removes/stops a service.
Only the manager nodes can delete a service
"""
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Services can only be deleted '
'on swarm manager nodes')
service_list = self.get_service_list()
for services in service_list:
# Remove all the services
self._api_client.remove_service(services)
def delete_volume(self, volume_name: str):
"""Removes/stops a docker volume.
Only the manager nodes can delete a volume
Args:
volume_name (string): Name of the volume
"""
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Volumes can only be deleted '
'on swarm manager nodes')
# Remove volume
self._api_client.remove_volume(volume_name)
def delete_all_volumes(self):
"""Remove all the volumes.
Only the manager nodes can delete a volume
"""
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Volumes can only be deleted '
'on swarm manager nodes')
volume_list = self.get_volume_list()
for volumes in volume_list:
# Remove all the services
self._api_client.remove_volume(volumes, force=True)
###########################################################################
# Get functions
###########################################################################
def get_service_list(self) -> list:
"""Get a list of docker services.
Only the manager nodes can retrieve all the services
Returns:
list, all the ids of the services in swarm
"""
# Initialising empty list
services = []
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Only the Swarm manager node can retrieve'
' all the services.')
service_list = self._client.services.list()
for s_list in service_list:
services.append(s_list.short_id)
return services
def get_service_name(self, service_id: str) -> str:
"""Get the name of the docker service.
Only the manager nodes can retrieve service name
Args:
service_id (string): List of service ID
Returns:
string, name of the docker service
"""
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Only the Swarm manager node can retrieve all'
' the services details.')
service = self._client.services.get(service_id)
return service.name
def get_service_details(self, service_id: str) -> dict:
"""Get details of a service.
Only the manager nodes can retrieve service details
Args:
service_id (string): List of service id
Returns:
dict, details of the service
"""
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Only the Swarm manager node can retrieve all'
' the services details.')
service = self._client.services.get(service_id)
return service.attrs
def get_service_state(self, service_id: str) -> str:
"""Get the state of the service.
Only the manager nodes can retrieve service state
Args:
service_id (str): Service id
Returns:
str, state of the service
"""
# Get service
service = self._client.services.get(service_id)
# Get the state of the service
for service_task in service.tasks():
service_state = service_task['DesiredState']
return service_state
def get_node_list(self) -> list:
"""Get a list of nodes.
Only the manager nodes can retrieve all the nodes
Returns:
list, all the ids of the nodes in swarm
"""
# Initialising empty list
nodes = []
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Only the Swarm manager node '
'can retrieve all the nodes.')
node_list = self._client.nodes.list()
for n_list in node_list:
nodes.append(n_list.id)
return nodes
def get_node_details(self, node_id: list) -> dict:
"""Get details of a node.
Only the manager nodes can retrieve details of a node
Args:
node_id (list): List of node ID
Returns:
dict, details of the node
"""
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Only the Swarm manager node can '
'retrieve node details.')
node = self._client.nodes.get(node_id)
return node.attrs
def get_container_list(self) -> list:
"""Get list of containers.
Returns:
list, all the ids of containers
"""
# Initialising empty list
containers = []
containers_list = self._client.containers.list()
for c_list in containers_list:
containers.append(c_list.short_id)
return containers
def get_container_details(self, container_id_or_name: str) -> dict:
"""Get details of a container.
Args:
container_id_or_name (string): docker container id or name
Returns:
dict, details of the container
"""
container = self._client.containers.get(container_id_or_name)
return container.attrs
def get_volume_list(self) -> list:
"""Get a list of docker volumes.
Only the manager nodes can retrieve all the volumes
Returns:
list, all the names of the volumes in swarm
"""
# Initialising empty list
volumes = []
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Only the Swarm manager node can retrieve'
' all the services.')
volume_list = self._client.volumes.list()
for v_list in volume_list:
volumes.append(v_list.name)
return volumes
def get_volume_details(self, volume_name: str) -> dict:
"""Get details of the volume.
Args:
volume_name (str): Name of the volume
Returns:
dict, details of the volume
"""
if volume_name not in self.volumes:
raise RuntimeError('No such volume found: ', volume_name)
volume = self._client.volumes.get(volume_name)
return volume.attrs
def get_actual_replica(self, service_id: str) -> str:
"""Get the actual replica level of a service.
Args:
service_id (str): docker swarm service id
Returns:
str, replicated level of the service
"""
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Only the Swarm manager node can retrieve '
'replication level of the service')
service_details = self.get_service_details(service_id)
actual_replica = service_details["Spec"]["Mode"][
"Replicated"]["Replicas"]
return actual_replica
def get_replicas(self, service_id: str) -> str:
"""Get the replication level of a service.
Args:
service_id (str): docker swarm service id
Returns:
str, replication level of the service
"""
# Initialising empty list
replicas = []
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Only the Swarm manager node can retrieve '
'replication level of the service')
service_tasks = self._client.services.get(service_id).tasks()
for task in service_tasks:
if task['Status']['State'] == "running":
replicas.append(task)
return len(replicas)
###########################################################################
# Update functions
###########################################################################
def update_labels(self, node_name: str, labels: dict):
"""Update label of a node.
Args:
node_name (string): Name of the node.
labels (dict): Label to add to the node
"""
# Raise an exception if we are not a manager
if not self._manager:
raise RuntimeError('Only the Swarm manager node can update '
'node details.')
# Node specification
node_spec = {'Availability': 'active',
'Name': node_name,
'Role': 'manager',
'Labels': labels}
node = self._client.nodes.get(node_name)
node.update(node_spec)
###########################################################################
# Parsing functions
###########################################################################
def _parse_services(self, service_config: dict, service_name: str,
service_list: dict) -> dict:
"""Parse the docker compose file.
Args:
service_config (dict): Service configurations from the compose file
service_name (string): Name of the services
service_list (dict): Service configuration list
Returns:
dict, service specifications extracted from the compose file
"""
for key, value in service_list['services'][service_name].items():
service_config[key] = value
if 'command' in key:
key = "args"
service_config['args'] = value
service_config.pop('command')
if 'ports' in key:
endpoint_spec = self._parse_ports(value)
service_config['endpoint_spec'] = endpoint_spec
service_config.pop('ports')
if 'volumes' in key:
volume_spec = self._parse_volumes(value)
service_config['mounts'] = volume_spec
service_config.pop('volumes')
if 'deploy' in key:
self._parse_deploy(value, service_config)
service_config.pop('deploy')
if 'networks' in key:
network_spec = self._parse_networks(service_list)
service_config['networks'] = network_spec
if 'logging' in key:
self._parse_logging(value, service_config)
service_config.pop('logging')
if 'environment' in key:
service_config['env'] = value
service_config.pop('environment')
# LOG.info('Service Config: %s', service_config)
return service_config
def _parse_deploy(self, deploy_values: dict, service_config: dict):
"""Parse deploy key.
Args:
deploy_values (dict): deploy configuration values
service_config (dict): Service configuration
"""
# Initialising empty dictionary
mode = {}
for d_value in deploy_values:
if 'restart_policy' in d_value:
restart_spec = docker.types.RestartPolicy(
**deploy_values[d_value])
service_config['restart_policy'] = restart_spec
if 'placement' in d_value:
for constraints_key, constraints_value in \
deploy_values[d_value].items():
service_config[constraints_key] = constraints_value
if 'mode' in d_value:
mode[d_value] = deploy_values[d_value]
if 'replicas' in d_value:
mode[d_value] = deploy_values[d_value]
if 'resources' in d_value:
resource_spec = self._parse_resources(
deploy_values, d_value)
service_config['resources'] = resource_spec
# Setting the types
mode_spec = docker.types.ServiceMode(**mode)
service_config['mode'] = mode_spec
###########################################################################
# Static methods
###########################################################################
@staticmethod
def _parse_ports(port_values: dict) -> dict:
"""Parse ports key.
Args:
port_values (dict): ports configuration values
Returns:
dict, Ports specification which contains exposed ports
"""
# Initialising empty dictionary
endpoints = {}
for port_element in port_values:
target_port = port_element.split(':')
for port in target_port:
endpoints[int(port)] = int(port)
# Setting the types
endpoint_spec = docker.types.EndpointSpec(ports=endpoints)
return endpoint_spec
@staticmethod
def _parse_volumes(volume_values: dict) -> str:
"""Parse volumes key.
Args:
volume_values (dict): volume configuration values
Returns:
string, volume specification with mount source and container path
"""
for v_values in volume_values:
for v_key, v_value in v_values.items():
if v_key == 'source':
if v_value == '.':
source = os.path.dirname(
os.path.abspath(__file__))
else:
source = v_value
if v_key == 'target':
target = v_value
volume_spec = [source + ':' + target]
return volume_spec
@staticmethod
def _parse_resources(resource_values: dict, resource_name: str) -> dict:
"""Parse resources key.
Args:
resource_values (dict): resource configurations values
resource_name (string): Resource name
Returns:
dict, resources specification
"""
# Initialising empty dictionary
resources = {}
for r_values in resource_values[resource_name]:
if 'limits' in r_values:
for r_key, r_value in \
resource_values[resource_name][r_values].items():
if 'cpu' in r_key:
cpu_value = float(r_value) * 10 ** 9
cpu_key = r_key[:3] + '_limit'
resources[cpu_key] = int(cpu_value)
if 'mem' in r_key:
mem_value = re.sub('M', '', r_value)
mem_key = r_key[:3] + '_limit'
resources[mem_key] = int(mem_value) * 1048576
resources_spec = docker.types.Resources(**resources)
return resources_spec
@staticmethod
def _parse_networks(service_list: dict) -> list:
"""Parse network key.
Args:
service_list (dict): Service configurations
Returns:
list, List of networks
"""
# Initialising empty list
networks = []
for n_values in service_list['networks'].values():
for n_key, n_value in n_values.items():
if 'name' in n_key:
networks.append(n_value)
return networks
@staticmethod
def _parse_logging(log_values: dict, service_config: dict):
"""Parse log key.
Args:
log_values (dict): logging configuration values
service_config (dict): Service specification
"""
for log_key, log_value in log_values.items():
if 'driver' in log_key:
service_config['log_driver'] = log_value
if 'options' in log_key:
service_config['log_driver_options'] = log_value
|
bsd-3-clause
| 469,997,558,959,020,900 | 30.798233 | 79 | 0.519105 | false |
AlexPereverzyev/spidy
|
spidy/document/web_client.py
|
1
|
3066
|
''' High-level HTTP communication interface. '''
import re
import codecs
import httplib
from spidy.common.errors import WebException
WEB_URL_PATTERN = re.compile('''([a-z]{3,9}:\/\/|[a-z]{3,9}:\/\/www\.|www\.)([\w.-]+)((?:\/[\w\/:@\-_~.%!$&'()*+=,;]*)?(?:\?[\w\-\+=&;%@._]*)?(?:#[\w\/:?@\-_~.%!$&'()*+=,;]*)?)''')
WEB_GET = 'GET'
WEB_POST = 'POST'
WEB_HTTPS = 'https'
WEB_SUCCESS = [200, 201, 202, 203, 204]
WEB_REDIRECT = [300, 301, 302, 303]
WEB_REDIRECT_MAX = 5
WEB_HEAD_LOCATION = 'location'
WEB_HTML_HEADERS = {'Accept':'text/html,application/xhtml+xml,application/xml',
'Accept-Language': 'en-US,en;q=0.5',
'Connection':'keep-alive',
'Cache-Control':'max-age=0'}
class WebClient(object):
''' Basic HTTP client for GET and POST requests. '''
def get(self, url_string, headers):
''' Sends GET request, handles redirects automatically. '''
doc = None
location = url_string
# merge headers with default ones
rq_headers = {}
for hk in WEB_HTML_HEADERS.keys():
rq_headers[hk] = WEB_HTML_HEADERS[hk]
if headers != None:
for hk in headers.keys():
rq_headers[hk] = headers[hk]
redirects = 0
while True:
# decompose URL
m = WEB_URL_PATTERN.match(location)
if m == None:
if conn != None: conn.close()
raise WebException('WebClient: invalid document URL')
schema = m.group(1)
domain = m.group(2)
path = m.group(3)
# get the document
try:
conn = None
if WEB_HTTPS in schema.lower():
conn = httplib.HTTPSConnection(domain)
else:
conn = httplib.HTTPConnection(domain)
conn.request(WEB_GET, path, headers = rq_headers)
resp = conn.getresponse()
except Exception as e:
conn.close()
raise e
# process status
if resp.status in WEB_REDIRECT:
if redirects > WEB_REDIRECT_MAX:
conn.close()
raise WebException('WebClient: exceeded max number of HTTP redirects')
location = resp.getheader(WEB_HEAD_LOCATION)
elif resp.status in WEB_SUCCESS:
doc = unicode(resp.read(), 'UTF8', 'ignore')
conn.close()
break
else:
conn.close()
raise WebException('WebClient: GET request failed')
return doc
def post(self, url, headers, body):
''' Sends POST request, handles redirects automatically. Not implemented yet. '''
pass
|
bsd-3-clause
| -5,077,729,482,255,502,000 | 34.094118 | 184 | 0.468037 | false |
jwmatthews/r3
|
cds/cds_api/test/unit/managers/test_cds.py
|
1
|
2640
|
import logging
import os
import sys
from mongoengine.errors import NotUniqueError
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)) + "/../")
import base
from pulp_cds.api.managers.cds import CDSManager
log = logging.getLogger(__name__)
class TestCDSManager(base.BaseTestCase):
def setUp(self):
super(TestCDSManager, self).setUp()
self.cds_manager = CDSManager()
def tearDown(self):
super(TestCDSManager, self).tearDown()
def test_simple_get_all(self):
found = self.cds_manager.get_all()
self.assertEquals(len(found), 0)
hostnames = ["cds1.example.com", "cds2.example.com"]
cluster_id = "unit_test_cluster"
for h in hostnames:
self.cds_manager.create(hostname=h, cluster_id=cluster_id)
found = self.cds_manager.get_all()
self.assertEquals(len(found), 2)
for f in found:
self.assertIn(f.hostname, hostnames)
def test_simple_create(self):
hostname = "cds1.example.com"
cluster_id = "unit_test_cluster"
c = self.cds_manager.create(hostname=hostname, cluster_id=cluster_id)
self.assertIsNotNone(c)
from pulp_cds.api.models.cds import CDS
found = CDS.objects(hostname=hostname)
self.assertEquals(found[0], c)
def test_create_cds_already_exists(self):
hostname = "cds1.example.com"
cluster_id = "unit_test_cluster"
c = self.cds_manager.create(hostname=hostname, cluster_id=cluster_id)
self.assertIsNotNone(c)
self.assertRaises(NotUniqueError,
lambda: self.cds_manager.create(hostname=hostname, cluster_id=cluster_id))
def test_delete(self):
hostname = "cds1.example.com"
cluster_id = "unit_test_cluster"
c = self.cds_manager.create(hostname=hostname, cluster_id=cluster_id)
from pulp_cds.api.models.cds import CDS
found = CDS.objects(hostname=hostname)
self.assertEquals(found[0], c)
self.cds_manager.delete(hostname)
found = CDS.objects(hostname=hostname)
self.assertEquals(len(found), 0)
def test_get(self):
hostname = "cds1.example.com"
cluster_id = "unit_test_cluster"
found = self.cds_manager.get(hostname=hostname)
self.assertIsNone(found)
created = self.cds_manager.create(hostname=hostname, cluster_id=cluster_id)
found = self.cds_manager.get(hostname=hostname)
self.assertEquals(created, found)
def test_update(self):
pass
def test_sync_history(self):
pass
def test_sync(self):
pass
|
gpl-2.0
| 2,412,351,400,109,083,000 | 28.662921 | 90 | 0.642045 | false |
Belgingur/WrfUtils
|
PointForecast/bilinear_interpolation.py
|
1
|
5510
|
#!/usr/bin/env python
"""
Functions to perform bilinear interpolation based only on the station lon/lat and wrfout file.
"""
import math
import logging
import netCDF4
import numpy as np
LOG = logging.getLogger('belgingur.bilinear')
EARTH_RADIUS_M = 6378168
class TargetOutsideGridError(ValueError):
""" Raised when the target point is outside the borders of the forecast grid. """
def distance_to_side(point_1, point_2, station):
""" Analytical geometric distance between a point and a line crossing two other points. """
s_x, s_y = station['lon'], station['lat']
p1_x, p1_y = point_1['lon'], point_1['lat']
p2_x, p2_y = point_2['lon'], point_2['lat']
if p2_x == p1_x: # because if the line is vertical, the line equation would need need to divide by zero.
p1_x, p1_y = p1_y, p1_x
p2_x, p2_y = p2_y, p2_x
s_x, s_y = s_y, s_x
top = ((p2_y - p1_y) / (p2_x - p1_x)) * s_x - s_y + ((p2_x * p1_y - p1_x * p2_y) / (p2_x - p1_x))
bottom = (1 + ((p2_y - p1_y)/(p2_x - p1_x)) ** 2) ** 0.5
dist = abs(top/bottom)
return dist
def generate_weights_bilinear(station, corners):
""" Calculate weights for bilinear interpolation based on distances from each 'wall' of a grid cell. """
distances = {
"_0": distance_to_side(corners[(0, 0)], corners[(1, 0)], station),
"_1": distance_to_side(corners[(0, 1)], corners[(1, 1)], station),
"0_": distance_to_side(corners[(0, 0)], corners[(0, 1)], station),
"1_": distance_to_side(corners[(1, 0)], corners[(1, 1)], station)
}
denominator = (distances['_0'] + distances['_1']) * (distances['0_'] + distances['1_'])
weights = [ # [point_id, weight]
[corners[(0, 0)]['point_id'], distances["1_"] * distances["_1"] / denominator],
[corners[(0, 1)]['point_id'], distances["1_"] * distances["_0"] / denominator],
[corners[(1, 0)]['point_id'], distances["0_"] * distances["_1"] / denominator],
[corners[(1, 1)]['point_id'], distances["0_"] * distances["_0"] / denominator]
]
return weights
def globe_distance_deg(lat1, lon1, lat2, lon2):
""" Distance between two points [deg lat/lon]. The distance has the same units as the radius, m by default. """
lat1, lon1, lat2, lon2 = list(map(math.radians, [lat1, lon1, lat2, lon2]))
d_lat = (lat2 - lat1) / 2
d_lon = (lon2 - lon1) / 2
a = math.sin(d_lat) * math.sin(d_lat) + math.cos(lat1) * math.cos(lat2) * math.sin(d_lon) * math.sin(d_lon)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
d = EARTH_RADIUS_M * c
return d
def closest_point(s_lon, s_lat, longs, lats):
"""
First find nearest neighbour candidate by approximating the cell size
and collecting all points distant in lon/lat less than cell size in lon and lat, respectively.
"""
long_span = (longs.max() - longs.min()) / longs.shape[1]
lat_span = (lats.max() - lats.min()) / lats.shape[0]
candidates = []
for (j, i) in np.ndindex(longs.shape):
if abs(longs[j, i] - s_lon) < long_span and abs(lats[j, i] - s_lat) < lat_span:
candidates.append((i, j))
if not candidates:
# print('No estimated candidates, indexing the whole grid')
for (j, i) in np.ndindex(longs.shape):
candidates.append((i, j))
cand_dict = {(i, j): globe_distance_deg(s_lat, s_lon, lats[j, i], longs[j, i]) for (i, j) in candidates}
(i, j) = min(cand_dict, key=cand_dict.get)
return {'point_id': (i, j), 'lon': longs[j, i], 'lat': lats[j, i], 'i': i, 'j': j}
def point_info(i, j, lons, lats):
if i < 0 or j < 0:
raise IndexError('Negative value in point indexes')
return {'point_id': (i, j), 'lon': lons[j, i], 'lat': lats[j, i]}
def extract_coordinates(wrfout, margin):
with netCDF4.Dataset(wrfout) as dataset:
if margin:
y, x = dataset.variables['XLAT'][0].shape
if y - 2 * margin <= 0 or x - 2 * margin <= 0:
raise ValueError('Requested margin is larger than the domain dimensions')
lats = dataset.variables['XLAT'][0, margin:-margin, margin:-margin]
lons = dataset.variables['XLONG'][0, margin:-margin, margin:-margin]
else:
lats, lons = dataset.variables['XLAT'][0], dataset.variables['XLONG'][0]
return lats, lons
def do_weights(station, wrfout, margin=0, nearest_neighbour=False):
"""
Given a station and wrfout pair, seeks the 'corner grid points' for the station location
and calculates bilinear interpolation weights from the distances to each corner.
"""
lats, lons = extract_coordinates(wrfout, margin)
s_lon = station['lon']
s_lat = station['lat']
nearest = closest_point(s_lon, s_lat, lons, lats)
x2_off = 1 if s_lon > nearest['lon'] else -1
y2_off = 1 if s_lat > nearest['lat'] else -1
try:
corners = {
(0, 0): nearest,
(0, 1): point_info(nearest['i'], nearest['j'] + y2_off, lons, lats),
(1, 0): point_info(nearest['i'] + x2_off, nearest['j'], lons, lats),
(1, 1): point_info(nearest['i'] + x2_off, nearest['j'] + y2_off, lons, lats)
}
except IndexError:
raise TargetOutsideGridError('The selected point is outside the borders of the grid.')
if nearest_neighbour:
return {nearest['point_id']: 1}
weights = generate_weights_bilinear(station, corners)
weights = {k: v for [k, v] in weights}
return weights
|
lgpl-3.0
| 4,345,679,591,206,021,600 | 33.654088 | 115 | 0.592015 | false |
weibohit/tools
|
unittest_serial.py
|
1
|
1764
|
from driver.serial_impl import SerialImpl
from utility.log import InitLogging
from utility.log import VLOG
import optparse
import time
import sys
def Init():
ser = SerialImpl()
ser.LoopBackTest()
ser.Close()
def ContinueSend3(ser, seconds):
timeout = time.time() + seconds
while time.time() < timeout:
time.sleep(0.04)
ser.Write('3')
def Start(ser):
ser.Write("{CUR20;MCS16;SPD5000;ENA;};")
def Before(ser):
ser.Write("{CUR20;MCS16;SPD5000;STP-5000;ENA;};")
def Step(ser):
ser.Write("STP1")
def Abort(ser):
ser.Write("OFF;")
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option('--version', action='store_false', dest='version', \
help='info current version')
parser.add_option('--debug', action='store', dest='opt_debug', \
help='enable debug mode of application')
parser.add_option('--port', action='store', dest='opt_port', type='int', \
help='enable debug mode of application')
parser.add_option('--log-path', action='store', dest="opt_log_path", \
help='write server log to file instead of stderr, \
increase log level to INFO')
parser.add_option('--verbose', action='store_false', dest="verbose", \
help='log verbosely')
parser.add_option('--silent', action='store_false', dest="silent", \
help='log nothing')
parser.add_option('--unittest', action='store_false', dest="silent", \
help='run unit test cases during launching')
(opts, _) = parser.parse_args()
# log system
InitLogging(opts)
# init serial
ser = SerialImpl()
# ContinueSend3(ser, 10)
st = time.time()
ast = time.asctime()
for i in range(180):
Before(ser)
time.sleep(1)
Abort(ser)
# Start(ser)
# time.sleep(5)
# Abort(ser)
ser.Close()
|
mit
| -8,875,937,484,170,843,000 | 25.727273 | 76 | 0.651361 | false |
opencord/voltha
|
voltha/extensions/omci/omci_messages.py
|
1
|
18479
|
#
# Copyright 2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import structlog
from scapy.fields import ByteField, ThreeBytesField, StrFixedLenField, ConditionalField, IntField, Field
from scapy.fields import ShortField, BitField
from scapy.packet import Packet
from voltha.extensions.omci.omci_defs import AttributeAccess, OmciSectionDataSize
from voltha.extensions.omci.omci_fields import OmciTableField, OmciVariableLenZeroPadField
import voltha.extensions.omci.omci_entities as omci_entities
log = structlog.get_logger()
class OmciData(Field):
__slots__ = Field.__slots__ + ['_entity_class']
def __init__(self, name, entity_class="entity_class"):
Field.__init__(self, name=name, default=None, fmt='s')
self._entity_class = entity_class
def addfield(self, pkt, s, val):
class_id = getattr(pkt, self._entity_class)
entity_class = omci_entities.entity_id_to_class_map.get(class_id)
for attribute in entity_class.attributes:
if AttributeAccess.SetByCreate not in attribute.access:
continue
if attribute.field.name == 'managed_entity_id':
continue
fld = attribute.field
s = fld.addfield(pkt, s, val.get(fld.name, fld.default))
return s
def getfield(self, pkt, s):
"""Extract an internal value from a string"""
class_id = getattr(pkt, self._entity_class)
entity_class = omci_entities.entity_id_to_class_map.get(class_id)
data = {}
for attribute in entity_class.attributes:
if AttributeAccess.SetByCreate not in attribute.access:
continue
if attribute.field.name == 'managed_entity_id':
continue
fld = attribute.field
s, value = fld.getfield(pkt, s)
data[fld.name] = value
return s, data
class OmciMaskedData(Field):
__slots__ = Field.__slots__ + ['_entity_class', '_attributes_mask']
def __init__(self, name, entity_class="entity_class",
attributes_mask="attributes_mask"):
Field.__init__(self, name=name, default=None, fmt='s')
self._entity_class = entity_class
self._attributes_mask = attributes_mask
def addfield(self, pkt, s, val):
class_id = getattr(pkt, self._entity_class)
attribute_mask = getattr(pkt, self._attributes_mask)
entity_class = omci_entities.entity_id_to_class_map.get(class_id)
indices = entity_class.attribute_indices_from_mask(attribute_mask)
for index in indices:
fld = entity_class.attributes[index].field
s = fld.addfield(pkt, s, val[fld.name])
return s
def getfield(self, pkt, s):
"""Extract an internal value from a string"""
class_id = getattr(pkt, self._entity_class)
attribute_mask = getattr(pkt, self._attributes_mask)
entity_class = omci_entities.entity_id_to_class_map[class_id]
indices = entity_class.attribute_indices_from_mask(attribute_mask)
data = {}
table_attribute_mask = 0
for index in indices:
try:
fld = entity_class.attributes[index].field
except IndexError, e:
log.error("attribute-decode-failure", attribute_index=index,
entity_class=entity_class, e=e)
continue
try:
s, value = fld.getfield(pkt, s)
except Exception, _e:
raise
if isinstance(pkt, OmciGetResponse) and isinstance(fld, OmciTableField):
data[fld.name + '_size'] = value
table_attribute_mask = table_attribute_mask | (1 << (16 - index))
else:
data[fld.name] = value
if table_attribute_mask:
data['table_attribute_mask'] = table_attribute_mask
return s, data
class OmciMessage(Packet):
name = "OmciMessage"
message_id = None # OMCI message_type value, filled by derived classes
fields_desc = []
class OmciCreate(OmciMessage):
name = "OmciCreate"
message_id = 0x44
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
OmciData("data")
]
class OmciCreateResponse(OmciMessage):
name = "OmciCreateResponse"
message_id = 0x24
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", None),
ByteField("success_code", 0),
ShortField("parameter_error_attributes_mask", None),
]
class OmciDelete(OmciMessage):
name = "OmciDelete"
message_id = 0x46
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", None),
]
class OmciDeleteResponse(OmciMessage):
name = "OmciDeleteResponse"
message_id = 0x26
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", None),
ByteField("success_code", 0),
]
class OmciSet(OmciMessage):
name = "OmciSet"
message_id = 0x48
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
ShortField("attributes_mask", None),
OmciMaskedData("data")
]
class OmciSetResponse(OmciMessage):
name = "OmciSetResponse"
message_id = 0x28
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", None),
ByteField("success_code", 0),
ShortField("unsupported_attributes_mask", None),
ShortField("failed_attributes_mask", None),
]
class OmciGet(OmciMessage):
name = "OmciGet"
message_id = 0x49
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
ShortField("attributes_mask", None)
]
class OmciGetResponse(OmciMessage):
name = "OmciGetResponse"
message_id = 0x29
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
ByteField("success_code", 0),
ShortField("attributes_mask", None),
ConditionalField(OmciMaskedData("data"),
lambda pkt: pkt.success_code in (0, 9)),
ConditionalField(OmciVariableLenZeroPadField("zero_padding", 36),
lambda pkt: pkt.success_code == 9),
# These fields are only valid if attribute error (status == 9)
ConditionalField(ShortField("unsupported_attributes_mask", 0),
lambda pkt: pkt.success_code == 9),
ConditionalField(ShortField("failed_attributes_mask", 0),
lambda pkt: pkt.success_code == 9)
]
class OmciGetAllAlarms(OmciMessage):
name = "OmciGetAllAlarms"
message_id = 0x4b
fields_desc = [
ShortField("entity_class", 2), # Always 2 (ONT data)
ShortField("entity_id", 0), # Always 0 (ONT instance)
ByteField("alarm_retrieval_mode", 0) # 0 or 1
]
class OmciGetAllAlarmsResponse(OmciMessage):
name = "OmciGetAllAlarmsResponse"
message_id = 0x2b
fields_desc = [
ShortField("entity_class", 2), # Always 2 (ONT data)
ShortField("entity_id", 0),
ShortField("number_of_commands", None)
]
class OmciGetAllAlarmsNext(OmciMessage):
name = "OmciGetAllAlarmsNext"
message_id = 0x4c
fields_desc = [
ShortField("entity_class", 2), # Always 2 (ONT data)
ShortField("entity_id", 0),
ShortField("command_sequence_number", None)
]
class OmciGetAllAlarmsNextResponse(OmciMessage):
name = "OmciGetAllAlarmsNextResponse"
message_id = 0x2c
fields_desc = [
ShortField("entity_class", 2), # Always 2 (ONT data)
ShortField("entity_id", 0),
ShortField("alarmed_entity_class", None),
ShortField("alarmed_entity_id", 0),
BitField("alarm_bit_map", None, 224)
]
class OmciMibUpload(OmciMessage):
name = "OmciMibUpload"
message_id = 0x4d
fields_desc = [
ShortField("entity_class", 2), # Always 2 (ONT data)
ShortField("entity_id", 0),
]
class OmciMibUploadResponse(OmciMessage):
name = "OmciMibUploadResponse"
message_id = 0x2d
fields_desc = [
ShortField("entity_class", 2), # Always 2 (ONT data)
ShortField("entity_id", 0),
ShortField("number_of_commands", None)
]
class OmciMibUploadNext(OmciMessage):
name = "OmciMibUploadNext"
message_id = 0x4e
fields_desc = [
ShortField("entity_class", 2), # Always 2 (ONT data)
ShortField("entity_id", 0),
ShortField("command_sequence_number", None)
]
class OmciMibUploadNextResponse(OmciMessage):
name = "OmciMibUploadNextResponse"
message_id = 0x2e
fields_desc = [
ShortField("entity_class", 2), # Always 2 (ONT data)
ShortField("entity_id", 0),
ShortField("object_entity_class", None),
ShortField("object_entity_id", 0),
ShortField("object_attributes_mask", None),
OmciMaskedData("object_data", entity_class='object_entity_class',
attributes_mask='object_attributes_mask')
]
class OmciMibReset(OmciMessage):
name = "OmciMibReset"
message_id = 0x4f
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0)
]
class OmciMibResetResponse(OmciMessage):
name = "OmciMibResetResponse"
message_id = 0x2f
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
ByteField("success_code", 0)
]
class OmciAlarmNotification(OmciMessage):
name = "AlarmNotification"
message_id = 0x10
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
BitField("alarm_bit_map", 0, 224),
ThreeBytesField("zero_padding", 0),
ByteField("alarm_sequence_number", None)
]
class OmciAttributeValueChange(OmciMessage):
name = "AttributeValueChange"
message_id = 0x11
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
ShortField("attributes_mask", None),
OmciMaskedData("data")
]
class OmciReboot(OmciMessage):
name = "OmciOnuReboot"
message_id = 0x59
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
ByteField("reboot_code", 0)
]
class OmciRebootResponse(OmciMessage):
name = "OmciOnuRebootResponse"
message_id = 0x39
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
ByteField("success_code", 0)
]
class OmciGetNext(OmciMessage):
name = "OmciGetNext"
message_id = 0x5A
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
ShortField("attributes_mask", None),
ShortField("command_sequence_number", None)
]
class OmciGetNextResponse(OmciMessage):
name = "OmciGetNextResponse"
message_id = 0x3A
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
ByteField("success_code", 0),
ShortField("attributes_mask", None),
ConditionalField(OmciMaskedData("data"),
lambda pkt: pkt.success_code == 0)
]
class OmciSynchronizeTime(OmciMessage):
name = "OmciSynchronizeTime"
message_id = 0x58
fields_desc = [
ShortField("entity_class", 256), # OntG
ShortField("entity_id", 0),
ShortField("year", 0), # eg) 2018
ByteField("month", 0), # 1..12
ByteField("day", 0), # 1..31
ByteField("hour", 0), # 0..23
ByteField("minute", 0), # 0..59
ByteField("second", 0) # 0..59
]
class OmciSynchronizeTimeResponse(OmciMessage):
name = "OmciSynchronizeTimeResponse"
message_id = 0x38
fields_desc = [
ShortField("entity_class", 256), # OntG
ShortField("entity_id", 0),
ByteField("success_code", 0),
ConditionalField(ShortField("success_info", None),
lambda pkt: pkt.success_code == 0)
]
class OmciGetCurrentData(OmciMessage):
name = "OmciGetCurrentData"
message_id = 0x5C
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
ShortField("attributes_mask", None),
]
class OmciGetCurrentDataResponse(OmciMessage):
name = "OmciGetCurrentDataResponse"
message_id = 0x3C
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
ByteField("success_code", 0),
ShortField("attributes_mask", None),
ShortField("unsupported_attributes_mask", None),
ShortField("failed_attributes_mask", None),
ConditionalField(
OmciMaskedData("data"), lambda pkt: pkt.success_code == 0)
]
class OmciStartSoftwareDownload(OmciMessage):
name = "OmciStartSoftwareDownload"
message_id = 0x53
fields_desc = [
ShortField("entity_class", 7), # Always 7 (Software image)
ShortField("entity_id", None),
ByteField("window_size", 0),
IntField("image_size", 0),
ByteField("image_number", 1), # Always only 1 in parallel
ShortField("instance_id", None) # should be same as "entity_id"
]
class OmciStartSoftwareDownloadResponse(OmciMessage):
name = "OmciStartSoftwareDownloadResponse"
message_id = 0x33
fields_desc = [
ShortField("entity_class", 7), # Always 7 (Software image)
ShortField("entity_id", None),
ByteField("result", 0),
ByteField("window_size", 0),
ByteField("image_number", 1), # Always only 1 in parallel
ShortField("instance_id", None) # should be same as "entity_id"
]
class OmciEndSoftwareDownload(OmciMessage):
name = "OmciEndSoftwareDownload"
message_id = 0x55
fields_desc = [
ShortField("entity_class", 7), # Always 7 (Software image)
ShortField("entity_id", None),
IntField("crc32", 0),
IntField("image_size", 0),
ByteField("image_number", 1), # Always only 1 in parallel
ShortField("instance_id", None),# should be same as "entity_id"
]
class OmciEndSoftwareDownloadResponse(OmciMessage):
name = "OmciEndSoftwareDownload"
message_id = 0x35
fields_desc = [
ShortField("entity_class", 7), # Always 7 (Software image)
ShortField("entity_id", None),
ByteField("result", 0),
ByteField("image_number", 1), # Always only 1 in parallel
ShortField("instance_id", None),# should be same as "entity_id"
ByteField("result0", 0) # same as result
]
class OmciDownloadSection(OmciMessage):
name = "OmciDownloadSection"
message_id = 0x14
fields_desc = [
ShortField("entity_class", 7), # Always 7 (Software image)
ShortField("entity_id", None),
ByteField("section_number", 0), # Always only 1 in parallel
StrFixedLenField("data", 0, length=OmciSectionDataSize) # section data
]
class OmciDownloadSectionLast(OmciMessage):
name = "OmciDownloadSection"
message_id = 0x54
fields_desc = [
ShortField("entity_class", 7), # Always 7 (Software image)
ShortField("entity_id", None),
ByteField("section_number", 0), # Always only 1 in parallel
StrFixedLenField("data", 0, length=OmciSectionDataSize) # section data
]
class OmciDownloadSectionResponse(OmciMessage):
name = "OmciDownloadSectionResponse"
message_id = 0x34
fields_desc = [
ShortField("entity_class", 7), # Always 7 (Software image)
ShortField("entity_id", None),
ByteField("result", 0),
ByteField("section_number", 0), # Always only 1 in parallel
]
class OmciActivateImage(OmciMessage):
name = "OmciActivateImage"
message_id = 0x56
fields_desc = [
ShortField("entity_class", 7), # Always 7 (Software image)
ShortField("entity_id", None),
ByteField("activate_flag", 0) # Activate image unconditionally
]
class OmciActivateImageResponse(OmciMessage):
name = "OmciActivateImageResponse"
message_id = 0x36
fields_desc = [
ShortField("entity_class", 7), # Always 7 (Software image)
ShortField("entity_id", None),
ByteField("result", 0) # Activate image unconditionally
]
class OmciCommitImage(OmciMessage):
name = "OmciCommitImage"
message_id = 0x57
fields_desc = [
ShortField("entity_class", 7), # Always 7 (Software image)
ShortField("entity_id", None),
]
class OmciCommitImageResponse(OmciMessage):
name = "OmciCommitImageResponse"
message_id = 0x37
fields_desc = [
ShortField("entity_class", 7), # Always 7 (Software image)
ShortField("entity_id", None),
ByteField("result", 0) # Activate image unconditionally
]
class OmciTest(OmciMessage):
name = "OmciTest"
message_id = 0x52
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
ShortField('self_test', 0x07)
]
class OmciTestResponse(OmciMessage):
name = "OmciTesResponse"
message_id = 0x32
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
ByteField("success_code", None)
]
class OmciTestResult(OmciMessage):
name = "TestResult"
message_id = 0x1B
fields_desc = [
ShortField("entity_class", None),
ShortField("entity_id", 0),
ShortField("power_feed_voltage", 1),
ShortField('received_optical_power', 3),
ShortField('mean_optical_launch_power', 5),
ShortField('laser_bias_current', 9),
ShortField('temperature', 12)
]
|
apache-2.0
| 2,495,795,283,369,438,700 | 30.320339 | 104 | 0.616159 | false |
venthur/mushu
|
libmushu/driver/gtec.py
|
1
|
10764
|
#!/usb/bin/env python
# gtec.py
# Copyright (C) 2013 Bastian Venthur
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# TODO: update to new version of pyusb
import struct
import time
from exceptions import Exception
import logging
import usb
from scipy.signal import iirfilter
import numpy as np
from libmushu.amplifier import Amplifier
logger = logging.getLogger(__name__)
logger.info('Logger started')
ID_VENDOR_GTEC = 0x153c
# I saw an am with this vendorid too
ID_VENDOR_GTEC2 = 0x15c3
ID_PRODUCT_GUSB_AMP = 0x0001
CX_OUT = usb.TYPE_VENDOR | usb.ENDPOINT_OUT
class GUSBamp(Amplifier):
def __init__(self):
logger.info('Initializing GUSBamp instance')
# list of available amps
self.amps = []
for bus in usb.busses():
for device in bus.devices:
if (device.idVendor in [ID_VENDOR_GTEC, ID_VENDOR_GTEC2] and
device.idProduct == ID_PRODUCT_GUSB_AMP):
self.amps.append(device)
self.devh = None
self.mode = None
# Initialize the amplifier and make it ready.
device = self.amps[0]
self.devh = device.open()
# detach kernel driver if nessecairy
config = device.configurations[0]
self.devh.setConfiguration(config)
assert(len(config.interfaces) > 0)
# sometimes it is the other one
first_interface = config.interfaces[0][0]
if first_interface is None:
first_interface = config.interfaces[0][1]
first_setting = first_interface.alternateSetting
self.devh.claimInterface(first_interface)
self.devh.setAltInterface(first_interface)
# initialization straight from the usb-dump
self.set_mode('data')
self.devh.controlMsg(CX_OUT, 0xb6, value=0x80, buffer=0)
self.devh.controlMsg(CX_OUT, 0xb5, value=0x80, buffer=0)
self.devh.controlMsg(CX_OUT, 0xb9, value=0x00, buffer="\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10")
self.set_slave_mode(False)
self.devh.controlMsg(CX_OUT, 0xd3, value=0x01, buffer=0)
self.devh.controlMsg(CX_OUT, 0xca, value=0x01, buffer=0)
self.devh.controlMsg(CX_OUT, 0xc8, value=0x01, buffer="\x00"*16)
self.set_common_reference()
self.set_common_ground()
self.set_calibration_mode('sine')
self.set_sampling_ferquency(128, [False for i in range(16)], None, None)
def start(self):
self.devh.controlMsg(CX_OUT, 0xb5, value=0x08, buffer=0)
self.devh.controlMsg(CX_OUT, 0xf7, value=0x00, buffer=0)
def stop(self):
self.devh.controlMsg(CX_OUT, 0xb8, [])
def get_data(self):
"""Get data."""
# TODO: should we use numpy arrays right here?
# TODO: what is the in-endpoint
# 0x2 or 0x86
endpoint = 0x86
# TODO what is the optimal number here
size = 2028 #512
try:
# TODO what is the optimal timeout here?
data = self.devh.bulkRead(endpoint, size, 100)
except usb.USBError:
data = []
data = ''.join(map(chr, data))
data = np.fromstring(data, np.float32, len(data)/4)
try:
data = data.reshape(-1, 17)
except:
logger.error("Got incomplete packet from the amp, discarding it!")
data = np.array([]).reshape(-1, 17)
if self.mode == 'impedance':
data = self.calculate_impedance(data)
elif self.mode == 'data':
# get data in mV
data /= 8.15
return data, []
def get_channels(self):
return [str(i) for i in range(17)]
@staticmethod
def is_available():
for bus in usb.busses():
for device in bus.devices:
if (device.idVendor in [ID_VENDOR_GTEC, ID_VENDOR_GTEC2] and
device.idProduct == ID_PRODUCT_GUSB_AMP):
return True
return False
###########################################################################
# Low level amplifier methods
###########################################################################
def set_mode(self, mode):
"""Set mode, 'impedance', 'data'."""
if mode == 'impedance':
self.devh.controlMsg(CX_OUT, 0xc9, value=0x00, buffer=0)
self.devh.controlMsg(CX_OUT, 0xc2, value=0x03, buffer=0)
self.mode = 'impedance'
elif mode == 'calibrate':
self.devh.controlMsg(CX_OUT, 0xc1, value=0x00, buffer=0)
self.devh.controlMsg(CX_OUT, 0xc2, value=0x02, buffer=0)
self.mode = 'calibration'
elif mode == 'data':
self.devh.controlMsg(CX_OUT, 0xc0, value=0x00, buffer=0)
self.devh.controlMsg(CX_OUT, 0xc2, value=0x01, buffer=0)
self.mode = 'data'
else:
raise AmpError('Unknown mode: %s' % mode)
def set_sampling_ferquency(self, fs, channels, bpfilter, notchfilter):
""" Set the sampling frequency and filters for individual channels.
Parameters:
fs -- sampling frequency
channels -- list of booleans: channels[0] == True: enable filter for channel 0
bpfilter -- tuple: parameters for the band pass filter (hp, lp, fs, order) or None
notchfilter -- tuple: parameters for the band stop filter (hp, lp, fs, order) or None
"""
# we have: hp, lp, fs, order, typ
# signal.iirfilter(order/2, [hp/(fs/2), lp/(fs/2)], ftype='butter', btype='band')
# we get 18 coeffs and put them in as '<d' in the buffer
# struct.pack('<'+'d'*18, *coeffs)
# special filter: means no filter
null_filter = "\x00\x00\x00\x00\x00\x00\xf0\x3f"+"\x00\x00\x00\x00\x00\x00\x00\x00"*17
if bpfilter:
bp_hp, bp_lp, bp_fs, bp_order = bpfilter
bp_b, bp_a = iirfilter(bp_order/2, [bp_hp/(bp_fs/2), bp_lp/(bp_fs/2)], ftype='butter', btype='band')
bp_filter = list(bp_b)
bp_filter.extend(list(bp_a))
bp_filter = struct.pack("<"+"d"*18, *bp_filter)
else:
bp_filter = null_filter
if notchfilter:
bs_hp, bs_lp, bs_fs, bs_order = notchfilter
bs_b, bs_a = iirfilter(bs_order/2, [bs_hp/(bs_fs/2), bs_lp/(bs_fs/2)], ftype='butter', btype='bandstop')
bs_filter = list(bs_b)
# the notch filter has (always?) an order of 4 so fill the gaps with
# zeros
if len(bs_filter) < 9:
diff = 9 - len(bs_filter)
bs_filter.extend([0.0 for i in range(diff)])
bs_filter.extend(list(bs_a))
if len(bs_filter) < 18:
diff = 18 - len(bs_filter)
bs_filter.extend([0.0 for i in range(diff)])
bs_filter = struct.pack("<"+"d"*18, *bs_filter)
else:
bs_filter = null_filter
# set the filters for all channels
if bpfilter == notchfilter == None:
self.devh.controlMsg(CX_OUT, 0xc6, value=0x01, buffer=bp_filter)
self.devh.controlMsg(CX_OUT, 0xc7, value=0x01, buffer=bs_filter)
else:
idx = 1
for i in channels:
if i:
self.devh.controlMsg(CX_OUT, 0xc6, value=idx, buffer=bp_filter)
self.devh.controlMsg(CX_OUT, 0xc7, value=idx, buffer=bs_filter)
idx += 1
# set the sampling frequency
self.devh.controlMsg(CX_OUT, 0xb6, value=fs, buffer=0)
def set_calibration_mode(self, mode):
# buffer: [0x03, 0xd0, 0x07, 0x02, 0x00, 0xff, 0x07]
# ==== ==========
# (1) mode:
# (2) amplitude: little endian (0x07d0 = 2000)
if mode == 'sine':
self.devh.controlMsg(CX_OUT, 0xcb, value=0x00, buffer="\x03\xd0\x07\x02\x00\xff\x07")
elif mode == 'sawtooth':
self.devh.controlMsg(CX_OUT, 0xcb, value=0x00, buffer="\x02\xd0\x07\x02\x00\xff\x07")
elif mode == 'whitenoise':
self.devh.controlMsg(CX_OUT, 0xcb, value=0x00, buffer="\x05\xd0\x07\x02\x00\xff\x07")
elif mode == 'square':
self.devh.controlMsg(CX_OUT, 0xcb, value=0x00, buffer="\x01\xd0\x07\x02\x00\xff\x07")
else:
raise AmpError('Unknown mode: %s' % mode)
def calculate_impedance(self, u_measured, u_applied=1e4):
return (u_measured * 1e6) / (u_applied - u_measured) - 1e4
def set_common_ground(self, a=False, b=False, c=False, d=False):
"""Set common ground for the electrodes.
Parameters:
a, b, c, d -- correspond to the groups on the amp, either of them
can be true or false
"""
v = (d << 3) + (c << 2) + (b << 1) + a
self.devh.controlMsg(CX_OUT, 0xbe, value=v, buffer=0)
def set_common_reference(self, a=False, b=False, c=False, d=False):
"""Set common reference for the electrodes.
Parameters:
a, b, c, d -- correspond to the groups on the amp, either of them
can be true or false
"""
v = (d << 3) + (c << 2) + (b << 1) + a
self.devh.controlMsg(CX_OUT, 0xbf, value=v, buffer=0)
def set_slave_mode(self, slave):
"""Set amp into slave or master mode.
Parameters:
slave -- if true, set into slave mode, set to master otherwise
"""
v = 1 if slave else 0
self.devh.controlMsg(CX_OUT, 0xcd, value=v, buffer=0)
class AmpError(Exception):
pass
def main():
amp = GUSBamp()
amp.start()
try:
while True:
t = time.time()
data = amp.get_data()
dt = time.time() - t
if len(data) > 0:
print "%.5f seconds (%.5f ps), length: %d" % (dt, (len(data) / 16.) * 1/dt, len(data))
finally:
amp.stop()
if __name__ == '__main__':
import sys
import cProfile
if len(sys.argv) > 1 and sys.argv[1].startswith('prof'):
cProfile.run('main()', 'prof')
else:
main()
|
gpl-2.0
| -3,668,518,099,025,746,000 | 35.364865 | 129 | 0.571906 | false |
raymontag/keepassc
|
setup.py
|
1
|
1140
|
from os import mkdir, stat
from stat import ST_MODE
from distutils.core import setup
setup(name = "keepassc",
version = "1.8.2",
author = "Karsten-Kai König, Scott Hansen",
author_email = "grayfox@outerhaven.de",
url = "http://raymontag.github.com/keepassc",
download_url = "https://github.com/raymontag/keepassc/tarball/master",
description = "A password manager that is fully compatible to KeePass v.1.x and KeePassX",
packages = ['keepassc'],
scripts = ['bin/keepassc', 'bin/keepassc-server', 'bin/keepassc-agent'],
install_requires = ['kppy', 'pycryptodomex'],
classifiers = [
'Programming Language :: Python :: 3.3',
'Operating System :: POSIX',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Development Status :: 5 - Production/Stable',
'Environment :: Console :: Curses'],
license = "ISC, MIT",
data_files = [('share/man/man1', ['keepassc.1', 'keepassc-server.1', 'keepassc-agent.1']),
('share/doc/keepassc', ['README.md', 'LICENSE.md', 'CHANGELOG'])],
)
|
isc
| 1,510,634,092,117,000,700 | 44.56 | 96 | 0.615452 | false |
askbow/cloaked-octo-ironman
|
google-python-exercises/hello.py
|
1
|
1089
|
#!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
"""A tiny Python program to check that Python is working.
Try running this program from the command line like this:
python hello.py
python hello.py Alice
That should print:
Hello World -or- Hello Alice
Try changing the 'Hello' to 'Howdy' and run again.
Once you have that working, you're ready for class -- you can edit
and run Python code; now you just need to learn Python!
"""
import sys
# Define a main() function that prints a little greeting.
def main():
# Get the name from the command line, using 'World' as a fallback.
if len(sys.argv) >= 2:
name = sys.argv[1]
else:
name = 'and prosper!' #student's note: no point in following instructions concerning the contents of strings: better get creative
print 'Live long', name
# This is the standard boilerplate that calls the main() function.
if __name__ == '__main__':
main()
|
bsd-2-clause
| -7,344,168,670,453,875,000 | 32 | 133 | 0.719927 | false |
ericholscher/fabric
|
setup.py
|
1
|
2369
|
#!/usr/bin/env python
import sys
from setuptools import setup, find_packages
from fabric.version import get_version
readme = open('README').read()
long_description = """
To find out what's new in this version of Fabric, please see `the changelog
<http://docs.fabfile.org/changes/%s.html>`_.
You can also install the <a class="reference external" href="https://github.com/bitprophet/fabric/tarball/master#egg=fabric-dev">in-development version</a> using pip, with `pip install fabric==dev`.
----
%s
----
For more information, please see the Fabric website or execute ``fab --help``.
""" % (get_version('short'), readme)
# PyCrypto>2.0 + Python 2.5 + pip == bad times.
# We can't easily detect pip usage at this point, but we can at least limit our
# "downgrade" of the PyCrypto requirement to 2.5-only.
PYCRYPTO = "<2.1" if (sys.version_info[:2] == (2, 5)) else ">=1.9"
setup(
name='Fabric',
version=get_version('short'),
description='Fabric is a simple, Pythonic tool for remote execution and deployment.',
long_description=long_description,
author='Jeff Forcier',
author_email='jeff@bitprophet.org',
url='http://fabfile.org',
packages=find_packages(),
test_suite='nose.collector',
tests_require=['nose', 'fudge'],
install_requires=['pycrypto %s' % PYCRYPTO, 'paramiko >=1.7.6'],
entry_points={
'console_scripts': [
'fab = fabric.main:main',
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Clustering',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
|
bsd-2-clause
| 1,664,655,176,561,810,400 | 33.333333 | 198 | 0.62558 | false |
p0cisk/Bookshelf
|
routings.py
|
1
|
4987
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from flask import Blueprint, jsonify, render_template, redirect, request
from models import Books, Authors, Stories, AuthorsStories
from peewee import fn
from decorators import count
from playhouse.shortcuts import model_to_dict
from collections import defaultdict
mod_routings = Blueprint('routings', __name__)
#PAGES
@mod_routings.route('/')
def index():
return redirect('books')
@mod_routings.route('/books')
def books():
return render_template('books.html')
@mod_routings.route('/authors')
def authors():
return render_template('authors.html')
@mod_routings.route('/authors/<int:aid>')
def authors_id(aid):
return render_template('authors_id.html', aid=aid)
# API
@mod_routings.route('/api/books')
@count
def api_books():
#TODO: Fix when needed
result = []
#B = Books.select()
#S = Stories.select()
rs = Stories.select(Stories, Authors).join(AuthorsStories).join(Authors).aggregate_rows()
for row in rs:
print(row)
print(dir(row))
"""
rs = (Books.select(Books, Stories, Authors)
.join(Stories)
.join(AuthorsStories)
.join(Authors)
.aggregate_rows()
)
print( rs)
for row in rs:
#print (row)
#print (row.story_books)
#print (dir(row.story_books[0]))
#print (model_to_dict(row))
#book = {'title':row.title}
book = model_to_dict(row)#{'title':row.title}
authors = {}
#for story in row.story_books:
# print (story)
# print (list(story.authorsstories_set))
'''
authors = {}
for story in row.story_books:
print (story)
print (story.authorsstories_set)
print (dir(story.authorsstories_set))
author = story.author
authors[author.id] = '{}, {}'.format(author.second_name, author.first_name)
book_authors = []
for aid, author in authors.items():
book_authors.append({'id':aid, 'name':author})
book['authors'] = book_authors'''
result.append( book )
'''
book = {'title':row.title}
authors = {}
for story in row.story_books:
print (story)
print (story.authorsstories_set)
print (dir(story.authorsstories_set))
author = story.author
authors[author.id] = '{}, {}'.format(author.second_name, author.first_name)
book_authors = []
for aid, author in authors.items():
book_authors.append({'id':aid, 'name':author})
book['authors'] = book_authors
result.append( book )
'''
"""
return jsonify({'result':result})
@mod_routings.route('/api/authors', methods=['GET', 'POST'])
def api_authors():
if request.method=='GET':
rs = Authors.select().order_by(Authors.second_name, Authors.first_name).dicts()
return jsonify({'result':list(rs)})
else:
rs = Authors.create(**request.get_json(force=True))
return jsonify(model_to_dict(rs))
@mod_routings.route('/api/authors/<int:aid>', methods=['GET', 'PUT'])
def api_author(aid):
if request.method=='GET':
rs = Authors.select().where(Authors.id==aid).dicts().get()
return jsonify(rs)
else:
data = request.get_json(force=True)
rs = Authors.update(**data).where(Authors.id==aid).execute()
return jsonify(data)
@mod_routings.route('/api/authors_books/<int:aid>')
def api_author_books(aid):
books_id = set(Stories.select(fn.Distinct(Stories.book)).where(Stories.author==aid).tuples())
rs = Books.select().where(Books.id<<books_id).dicts()
return jsonify({'result':list(rs)})
@mod_routings.route('/api/authors_stories/<int:aid>')
def api_author_stories(aid):
rs = Stories.select().join(AuthorsStories).where(AuthorsStories.author==aid).dicts()
return jsonify({'result':list(rs)})
@mod_routings.route('/api/stories')
@count
def api_stories():
result = []
rs = (AuthorsStories
.select(AuthorsStories, Stories, Authors)
.join(Stories)
.switch(AuthorsStories)
.join(Authors)
)
stories = defaultdict(lambda:defaultdict(dict))
for row in rs:
stories[row.story.id]['authors'][row.author.id] = model_to_dict(row.author)
stories[row.story.id]['title'] = row.story.title
stories[row.story.id]['id'] = row.story.id
for story in stories.values():
story['authors'] = list(story['authors'].values())
result.append(story)
return jsonify({'result':result})
@mod_routings.route('/api/stories/<int:aid>')
@count
def api_stories_by_id(aid):
story = Stories.select().where(Stories.id==aid).dicts().get()
story['authors'] = list(Authors
.select()
.join(AuthorsStories)
.join(Stories)
.where(Stories.id==aid)
.dicts()
)
return jsonify({'result':story})
|
mit
| 1,332,563,994,120,331,300 | 29.975155 | 97 | 0.601765 | false |
kawamuray/ganeti
|
lib/tools/burnin.py
|
1
|
42637
|
#!/usr/bin/python
#
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Burnin program
"""
import sys
import optparse
import time
import socket
import urllib
from itertools import izip, islice, cycle
from cStringIO import StringIO
from ganeti import opcodes
from ganeti import constants
from ganeti import cli
from ganeti import errors
from ganeti import utils
from ganeti import hypervisor
from ganeti import compat
from ganeti import pathutils
from ganeti.confd import client as confd_client
from ganeti.runtime import (GetClient)
USAGE = ("\tburnin -o OS_NAME [options...] instance_name ...")
MAX_RETRIES = 3
LOG_HEADERS = {
0: "- ",
1: "* ",
2: "",
}
#: Disk templates supporting a single node
_SINGLE_NODE_DISK_TEMPLATES = compat.UniqueFrozenset([
constants.DT_DISKLESS,
constants.DT_PLAIN,
constants.DT_FILE,
constants.DT_SHARED_FILE,
constants.DT_EXT,
constants.DT_RBD,
constants.DT_GLUSTER
])
_SUPPORTED_DISK_TEMPLATES = compat.UniqueFrozenset([
constants.DT_DISKLESS,
constants.DT_DRBD8,
constants.DT_EXT,
constants.DT_FILE,
constants.DT_PLAIN,
constants.DT_RBD,
constants.DT_SHARED_FILE,
constants.DT_GLUSTER
])
#: Disk templates for which import/export is tested
_IMPEXP_DISK_TEMPLATES = (_SUPPORTED_DISK_TEMPLATES - frozenset([
constants.DT_DISKLESS,
constants.DT_FILE,
constants.DT_SHARED_FILE,
constants.DT_GLUSTER
]))
class InstanceDown(Exception):
"""The checked instance was not up"""
class BurninFailure(Exception):
"""Failure detected during burning"""
def Usage():
"""Shows program usage information and exits the program."""
print >> sys.stderr, "Usage:"
print >> sys.stderr, USAGE
sys.exit(2)
def Log(msg, *args, **kwargs):
"""Simple function that prints out its argument.
"""
if args:
msg = msg % args
indent = kwargs.get("indent", 0)
sys.stdout.write("%*s%s%s\n" % (2 * indent, "",
LOG_HEADERS.get(indent, " "), msg))
sys.stdout.flush()
def Err(msg, exit_code=1):
"""Simple error logging that prints to stderr.
"""
sys.stderr.write(msg + "\n")
sys.stderr.flush()
sys.exit(exit_code)
class SimpleOpener(urllib.FancyURLopener):
"""A simple url opener"""
# pylint: disable=W0221
def prompt_user_passwd(self, host, realm, clear_cache=0):
"""No-interaction version of prompt_user_passwd."""
# we follow parent class' API
# pylint: disable=W0613
return None, None
def http_error_default(self, url, fp, errcode, errmsg, headers):
"""Custom error handling"""
# make sure sockets are not left in CLOSE_WAIT, this is similar
# but with a different exception to the BasicURLOpener class
_ = fp.read() # throw away data
fp.close()
raise InstanceDown("HTTP error returned: code %s, msg %s" %
(errcode, errmsg))
OPTIONS = [
cli.cli_option("-o", "--os", dest="os", default=None,
help="OS to use during burnin",
metavar="<OS>",
completion_suggest=cli.OPT_COMPL_ONE_OS),
cli.HYPERVISOR_OPT,
cli.OSPARAMS_OPT,
cli.cli_option("--disk-size", dest="disk_size",
help="Disk size (determines disk count)",
default="128m", type="string", metavar="<size,size,...>",
completion_suggest=("128M 512M 1G 4G 1G,256M"
" 4G,1G,1G 10G").split()),
cli.cli_option("--disk-growth", dest="disk_growth", help="Disk growth",
default="128m", type="string", metavar="<size,size,...>"),
cli.cli_option("--mem-size", dest="mem_size", help="Memory size",
default=None, type="unit", metavar="<size>",
completion_suggest=("128M 256M 512M 1G 4G 8G"
" 12G 16G").split()),
cli.cli_option("--maxmem-size", dest="maxmem_size", help="Max Memory size",
default=256, type="unit", metavar="<size>",
completion_suggest=("128M 256M 512M 1G 4G 8G"
" 12G 16G").split()),
cli.cli_option("--minmem-size", dest="minmem_size", help="Min Memory size",
default=128, type="unit", metavar="<size>",
completion_suggest=("128M 256M 512M 1G 4G 8G"
" 12G 16G").split()),
cli.cli_option("--vcpu-count", dest="vcpu_count", help="VCPU count",
default=3, type="unit", metavar="<count>",
completion_suggest=("1 2 3 4").split()),
cli.DEBUG_OPT,
cli.VERBOSE_OPT,
cli.NOIPCHECK_OPT,
cli.NONAMECHECK_OPT,
cli.EARLY_RELEASE_OPT,
cli.cli_option("--no-replace1", dest="do_replace1",
help="Skip disk replacement with the same secondary",
action="store_false", default=True),
cli.cli_option("--no-replace2", dest="do_replace2",
help="Skip disk replacement with a different secondary",
action="store_false", default=True),
cli.cli_option("--no-failover", dest="do_failover",
help="Skip instance failovers", action="store_false",
default=True),
cli.cli_option("--no-migrate", dest="do_migrate",
help="Skip instance live migration",
action="store_false", default=True),
cli.cli_option("--no-move", dest="do_move",
help="Skip instance moves", action="store_false",
default=True),
cli.cli_option("--no-importexport", dest="do_importexport",
help="Skip instance export/import", action="store_false",
default=True),
cli.cli_option("--no-startstop", dest="do_startstop",
help="Skip instance stop/start", action="store_false",
default=True),
cli.cli_option("--no-reinstall", dest="do_reinstall",
help="Skip instance reinstall", action="store_false",
default=True),
cli.cli_option("--no-reboot", dest="do_reboot",
help="Skip instance reboot", action="store_false",
default=True),
cli.cli_option("--no-renamesame", dest="do_renamesame",
help="Skip instance rename to same name", action="store_false",
default=True),
cli.cli_option("--reboot-types", dest="reboot_types",
help="Specify the reboot types", default=None),
cli.cli_option("--no-activate-disks", dest="do_activate_disks",
help="Skip disk activation/deactivation",
action="store_false", default=True),
cli.cli_option("--no-add-disks", dest="do_addremove_disks",
help="Skip disk addition/removal",
action="store_false", default=True),
cli.cli_option("--no-add-nics", dest="do_addremove_nics",
help="Skip NIC addition/removal",
action="store_false", default=True),
cli.cli_option("--no-nics", dest="nics",
help="No network interfaces", action="store_const",
const=[], default=[{}]),
cli.cli_option("--no-confd", dest="do_confd_tests",
help="Skip confd queries",
action="store_false", default=constants.ENABLE_CONFD),
cli.cli_option("--rename", dest="rename", default=None,
help=("Give one unused instance name which is taken"
" to start the renaming sequence"),
metavar="<instance_name>"),
cli.cli_option("-t", "--disk-template", dest="disk_template",
choices=list(_SUPPORTED_DISK_TEMPLATES),
default=constants.DT_DRBD8,
help=("Disk template (default %s, otherwise one of %s)" %
(constants.DT_DRBD8,
utils.CommaJoin(_SUPPORTED_DISK_TEMPLATES)))),
cli.cli_option("-n", "--nodes", dest="nodes", default="",
help=("Comma separated list of nodes to perform"
" the burnin on (defaults to all nodes)"),
completion_suggest=cli.OPT_COMPL_MANY_NODES),
cli.cli_option("-I", "--iallocator", dest="iallocator",
default=None, type="string",
help=("Perform the allocation using an iallocator"
" instead of fixed node spread (node restrictions no"
" longer apply, therefore -n/--nodes must not be"
" used"),
completion_suggest=cli.OPT_COMPL_ONE_IALLOCATOR),
cli.cli_option("-p", "--parallel", default=False, action="store_true",
dest="parallel",
help=("Enable parallelization of some operations in"
" order to speed burnin or to test granular locking")),
cli.cli_option("--net-timeout", default=15, type="int",
dest="net_timeout",
help=("The instance check network timeout in seconds"
" (defaults to 15 seconds)"),
completion_suggest="15 60 300 900".split()),
cli.cli_option("-C", "--http-check", default=False, action="store_true",
dest="http_check",
help=("Enable checking of instance status via http,"
" looking for /hostname.txt that should contain the"
" name of the instance")),
cli.cli_option("-K", "--keep-instances", default=False,
action="store_true",
dest="keep_instances",
help=("Leave instances on the cluster after burnin,"
" for investigation in case of errors or simply"
" to use them")),
cli.REASON_OPT,
]
# Mainly used for bash completion
ARGUMENTS = [cli.ArgInstance(min=1)]
def _DoCheckInstances(fn):
"""Decorator for checking instances.
"""
def wrapper(self, *args, **kwargs):
val = fn(self, *args, **kwargs)
for instance in self.instances:
self._CheckInstanceAlive(instance) # pylint: disable=W0212
return val
return wrapper
def _DoBatch(retry):
"""Decorator for possible batch operations.
Must come after the _DoCheckInstances decorator (if any).
@param retry: whether this is a retryable batch, will be
passed to StartBatch
"""
def wrap(fn):
def batched(self, *args, **kwargs):
self.StartBatch(retry)
val = fn(self, *args, **kwargs)
self.CommitQueue()
return val
return batched
return wrap
class Burner(object):
"""Burner class."""
def __init__(self):
"""Constructor."""
self.url_opener = SimpleOpener()
self._feed_buf = StringIO()
self.nodes = []
self.instances = []
self.to_rem = []
self.queued_ops = []
self.opts = None
self.queue_retry = False
self.disk_count = self.disk_growth = self.disk_size = None
self.hvp = self.bep = None
self.ParseOptions()
self.cl = cli.GetClient()
self.GetState()
def ClearFeedbackBuf(self):
"""Clear the feedback buffer."""
self._feed_buf.truncate(0)
def GetFeedbackBuf(self):
"""Return the contents of the buffer."""
return self._feed_buf.getvalue()
def Feedback(self, msg):
"""Acumulate feedback in our buffer."""
formatted_msg = "%s %s" % (time.ctime(utils.MergeTime(msg[0])), msg[2])
self._feed_buf.write(formatted_msg + "\n")
if self.opts.verbose:
Log(formatted_msg, indent=3)
def MaybeRetry(self, retry_count, msg, fn, *args):
"""Possibly retry a given function execution.
@type retry_count: int
@param retry_count: retry counter:
- 0: non-retryable action
- 1: last retry for a retryable action
- MAX_RETRIES: original try for a retryable action
@type msg: str
@param msg: the kind of the operation
@type fn: callable
@param fn: the function to be called
"""
try:
val = fn(*args)
if retry_count > 0 and retry_count < MAX_RETRIES:
Log("Idempotent %s succeeded after %d retries",
msg, MAX_RETRIES - retry_count)
return val
except Exception, err: # pylint: disable=W0703
if retry_count == 0:
Log("Non-idempotent %s failed, aborting", msg)
raise
elif retry_count == 1:
Log("Idempotent %s repeated failure, aborting", msg)
raise
else:
Log("Idempotent %s failed, retry #%d/%d: %s",
msg, MAX_RETRIES - retry_count + 1, MAX_RETRIES, err)
self.MaybeRetry(retry_count - 1, msg, fn, *args)
def _ExecOp(self, *ops):
"""Execute one or more opcodes and manage the exec buffer.
@return: if only opcode has been passed, we return its result;
otherwise we return the list of results
"""
job_id = cli.SendJob(ops, cl=self.cl)
results = cli.PollJob(job_id, cl=self.cl, feedback_fn=self.Feedback)
if len(ops) == 1:
return results[0]
else:
return results
def ExecOp(self, retry, *ops):
"""Execute one or more opcodes and manage the exec buffer.
@return: if only opcode has been passed, we return its result;
otherwise we return the list of results
"""
if retry:
rval = MAX_RETRIES
else:
rval = 0
cli.SetGenericOpcodeOpts(ops, self.opts)
return self.MaybeRetry(rval, "opcode", self._ExecOp, *ops)
def ExecOrQueue(self, name, ops, post_process=None):
"""Execute an opcode and manage the exec buffer."""
if self.opts.parallel:
cli.SetGenericOpcodeOpts(ops, self.opts)
self.queued_ops.append((ops, name, post_process))
else:
val = self.ExecOp(self.queue_retry, *ops) # pylint: disable=W0142
if post_process is not None:
post_process()
return val
def StartBatch(self, retry):
"""Start a new batch of jobs.
@param retry: whether this is a retryable batch
"""
self.queued_ops = []
self.queue_retry = retry
def CommitQueue(self):
"""Execute all submitted opcodes in case of parallel burnin"""
if not self.opts.parallel or not self.queued_ops:
return
if self.queue_retry:
rval = MAX_RETRIES
else:
rval = 0
try:
results = self.MaybeRetry(rval, "jobset", self.ExecJobSet,
self.queued_ops)
finally:
self.queued_ops = []
return results
def ExecJobSet(self, jobs):
"""Execute a set of jobs and return once all are done.
The method will return the list of results, if all jobs are
successful. Otherwise, OpExecError will be raised from within
cli.py.
"""
self.ClearFeedbackBuf()
jex = cli.JobExecutor(cl=self.cl, feedback_fn=self.Feedback)
for ops, name, _ in jobs:
jex.QueueJob(name, *ops) # pylint: disable=W0142
try:
results = jex.GetResults()
except Exception, err: # pylint: disable=W0703
Log("Jobs failed: %s", err)
raise BurninFailure()
fail = False
val = []
for (_, name, post_process), (success, result) in zip(jobs, results):
if success:
if post_process:
try:
post_process()
except Exception, err: # pylint: disable=W0703
Log("Post process call for job %s failed: %s", name, err)
fail = True
val.append(result)
else:
fail = True
if fail:
raise BurninFailure()
return val
def ParseOptions(self):
"""Parses the command line options.
In case of command line errors, it will show the usage and exit the
program.
"""
parser = optparse.OptionParser(usage="\n%s" % USAGE,
version=("%%prog (ganeti) %s" %
constants.RELEASE_VERSION),
option_list=OPTIONS)
options, args = parser.parse_args()
if len(args) < 1 or options.os is None:
Usage()
if options.mem_size:
options.maxmem_size = options.mem_size
options.minmem_size = options.mem_size
elif options.minmem_size > options.maxmem_size:
Err("Maximum memory lower than minimum memory")
if options.disk_template not in _SUPPORTED_DISK_TEMPLATES:
Err("Unknown or unsupported disk template '%s'" % options.disk_template)
if options.disk_template == constants.DT_DISKLESS:
disk_size = disk_growth = []
options.do_addremove_disks = False
else:
disk_size = [utils.ParseUnit(v) for v in options.disk_size.split(",")]
disk_growth = [utils.ParseUnit(v)
for v in options.disk_growth.split(",")]
if len(disk_growth) != len(disk_size):
Err("Wrong disk sizes/growth combination")
if ((disk_size and options.disk_template == constants.DT_DISKLESS) or
(not disk_size and options.disk_template != constants.DT_DISKLESS)):
Err("Wrong disk count/disk template combination")
self.disk_size = disk_size
self.disk_growth = disk_growth
self.disk_count = len(disk_size)
if options.nodes and options.iallocator:
Err("Give either the nodes option or the iallocator option, not both")
if options.http_check and not options.name_check:
Err("Can't enable HTTP checks without name checks")
self.opts = options
self.instances = args
self.bep = {
constants.BE_MINMEM: options.minmem_size,
constants.BE_MAXMEM: options.maxmem_size,
constants.BE_VCPUS: options.vcpu_count,
}
self.hypervisor = None
self.hvp = {}
if options.hypervisor:
self.hypervisor, self.hvp = options.hypervisor
if options.reboot_types is None:
options.reboot_types = constants.REBOOT_TYPES
else:
options.reboot_types = options.reboot_types.split(",")
rt_diff = set(options.reboot_types).difference(constants.REBOOT_TYPES)
if rt_diff:
Err("Invalid reboot types specified: %s" % utils.CommaJoin(rt_diff))
socket.setdefaulttimeout(options.net_timeout)
def GetState(self):
"""Read the cluster state from the master daemon."""
if self.opts.nodes:
names = self.opts.nodes.split(",")
else:
names = []
try:
qcl = GetClient()
result = qcl.QueryNodes(names, ["name", "offline", "drained"], False)
except errors.GenericError, err:
err_code, msg = cli.FormatError(err)
Err(msg, exit_code=err_code)
finally:
qcl.Close()
self.nodes = [data[0] for data in result if not (data[1] or data[2])]
op_diagnose = opcodes.OpOsDiagnose(output_fields=["name",
"variants",
"hidden"],
names=[])
result = self.ExecOp(True, op_diagnose)
if not result:
Err("Can't get the OS list")
found = False
for (name, variants, _) in result:
if self.opts.os in cli.CalculateOSNames(name, variants):
found = True
break
if not found:
Err("OS '%s' not found" % self.opts.os)
cluster_info = self.cl.QueryClusterInfo()
self.cluster_info = cluster_info
if not self.cluster_info:
Err("Can't get cluster info")
default_nic_params = self.cluster_info["nicparams"][constants.PP_DEFAULT]
self.cluster_default_nicparams = default_nic_params
if self.hypervisor is None:
self.hypervisor = self.cluster_info["default_hypervisor"]
self.hv_can_migrate = \
hypervisor.GetHypervisorClass(self.hypervisor).CAN_MIGRATE
@_DoCheckInstances
@_DoBatch(False)
def BurnCreateInstances(self):
"""Create the given instances.
"""
self.to_rem = []
mytor = izip(cycle(self.nodes),
islice(cycle(self.nodes), 1, None),
self.instances)
Log("Creating instances")
for pnode, snode, instance in mytor:
Log("instance %s", instance, indent=1)
if self.opts.iallocator:
pnode = snode = None
msg = "with iallocator %s" % self.opts.iallocator
elif self.opts.disk_template not in constants.DTS_INT_MIRROR:
snode = None
msg = "on %s" % pnode
else:
msg = "on %s, %s" % (pnode, snode)
Log(msg, indent=2)
op = opcodes.OpInstanceCreate(instance_name=instance,
disks=[{"size": size}
for size in self.disk_size],
disk_template=self.opts.disk_template,
nics=self.opts.nics,
mode=constants.INSTANCE_CREATE,
os_type=self.opts.os,
pnode=pnode,
snode=snode,
start=True,
ip_check=self.opts.ip_check,
name_check=self.opts.name_check,
wait_for_sync=True,
file_driver="loop",
file_storage_dir=None,
iallocator=self.opts.iallocator,
beparams=self.bep,
hvparams=self.hvp,
hypervisor=self.hypervisor,
osparams=self.opts.osparams,
)
remove_instance = lambda name: lambda: self.to_rem.append(name)
self.ExecOrQueue(instance, [op], post_process=remove_instance(instance))
@_DoBatch(False)
def BurnModifyRuntimeMemory(self):
"""Alter the runtime memory."""
Log("Setting instance runtime memory")
for instance in self.instances:
Log("instance %s", instance, indent=1)
tgt_mem = self.bep[constants.BE_MINMEM]
op = opcodes.OpInstanceSetParams(instance_name=instance,
runtime_mem=tgt_mem)
Log("Set memory to %s MB", tgt_mem, indent=2)
self.ExecOrQueue(instance, [op])
@_DoBatch(False)
def BurnGrowDisks(self):
"""Grow both the os and the swap disks by the requested amount, if any."""
Log("Growing disks")
for instance in self.instances:
Log("instance %s", instance, indent=1)
for idx, growth in enumerate(self.disk_growth):
if growth > 0:
op = opcodes.OpInstanceGrowDisk(instance_name=instance, disk=idx,
amount=growth, wait_for_sync=True)
Log("increase disk/%s by %s MB", idx, growth, indent=2)
self.ExecOrQueue(instance, [op])
@_DoBatch(True)
def BurnReplaceDisks1D8(self):
"""Replace disks on primary and secondary for drbd8."""
Log("Replacing disks on the same nodes")
early_release = self.opts.early_release
for instance in self.instances:
Log("instance %s", instance, indent=1)
ops = []
for mode in constants.REPLACE_DISK_SEC, constants.REPLACE_DISK_PRI:
op = opcodes.OpInstanceReplaceDisks(instance_name=instance,
mode=mode,
disks=list(range(self.disk_count)),
early_release=early_release)
Log("run %s", mode, indent=2)
ops.append(op)
self.ExecOrQueue(instance, ops)
@_DoBatch(True)
def BurnReplaceDisks2(self):
"""Replace secondary node."""
Log("Changing the secondary node")
mode = constants.REPLACE_DISK_CHG
mytor = izip(islice(cycle(self.nodes), 2, None),
self.instances)
for tnode, instance in mytor:
Log("instance %s", instance, indent=1)
if self.opts.iallocator:
tnode = None
msg = "with iallocator %s" % self.opts.iallocator
else:
msg = tnode
op = opcodes.OpInstanceReplaceDisks(instance_name=instance,
mode=mode,
remote_node=tnode,
iallocator=self.opts.iallocator,
disks=[],
early_release=self.opts.early_release)
Log("run %s %s", mode, msg, indent=2)
self.ExecOrQueue(instance, [op])
@_DoCheckInstances
@_DoBatch(False)
def BurnFailover(self):
"""Failover the instances."""
Log("Failing over instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op = opcodes.OpInstanceFailover(instance_name=instance,
ignore_consistency=False)
self.ExecOrQueue(instance, [op])
@_DoCheckInstances
@_DoBatch(False)
def BurnMove(self):
"""Move the instances."""
Log("Moving instances")
mytor = izip(islice(cycle(self.nodes), 1, None),
self.instances)
for tnode, instance in mytor:
Log("instance %s", instance, indent=1)
op = opcodes.OpInstanceMove(instance_name=instance,
target_node=tnode)
self.ExecOrQueue(instance, [op])
@_DoBatch(False)
def BurnMigrate(self):
"""Migrate the instances."""
Log("Migrating instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op1 = opcodes.OpInstanceMigrate(instance_name=instance, mode=None,
cleanup=False)
op2 = opcodes.OpInstanceMigrate(instance_name=instance, mode=None,
cleanup=True)
Log("migration and migration cleanup", indent=2)
self.ExecOrQueue(instance, [op1, op2])
@_DoCheckInstances
@_DoBatch(False)
def BurnImportExport(self):
"""Export the instance, delete it, and import it back.
"""
Log("Exporting and re-importing instances")
mytor = izip(cycle(self.nodes),
islice(cycle(self.nodes), 1, None),
islice(cycle(self.nodes), 2, None),
self.instances)
qcl = GetClient()
for pnode, snode, enode, instance in mytor:
Log("instance %s", instance, indent=1)
# read the full name of the instance
((full_name, ), ) = qcl.QueryInstances([instance], ["name"], False)
if self.opts.iallocator:
pnode = snode = None
import_log_msg = ("import from %s"
" with iallocator %s" %
(enode, self.opts.iallocator))
elif self.opts.disk_template not in constants.DTS_INT_MIRROR:
snode = None
import_log_msg = ("import from %s to %s" %
(enode, pnode))
else:
import_log_msg = ("import from %s to %s, %s" %
(enode, pnode, snode))
exp_op = opcodes.OpBackupExport(instance_name=instance,
target_node=enode,
mode=constants.EXPORT_MODE_LOCAL,
shutdown=True)
rem_op = opcodes.OpInstanceRemove(instance_name=instance,
ignore_failures=True)
imp_dir = utils.PathJoin(pathutils.EXPORT_DIR, full_name)
imp_op = opcodes.OpInstanceCreate(instance_name=instance,
disks=[{"size": size}
for size in self.disk_size],
disk_template=self.opts.disk_template,
nics=self.opts.nics,
mode=constants.INSTANCE_IMPORT,
src_node=enode,
src_path=imp_dir,
pnode=pnode,
snode=snode,
start=True,
ip_check=self.opts.ip_check,
name_check=self.opts.name_check,
wait_for_sync=True,
file_storage_dir=None,
file_driver="loop",
iallocator=self.opts.iallocator,
beparams=self.bep,
hvparams=self.hvp,
osparams=self.opts.osparams,
)
erem_op = opcodes.OpBackupRemove(instance_name=instance)
Log("export to node %s", enode, indent=2)
Log("remove instance", indent=2)
Log(import_log_msg, indent=2)
Log("remove export", indent=2)
self.ExecOrQueue(instance, [exp_op, rem_op, imp_op, erem_op])
qcl.Close()
@staticmethod
def StopInstanceOp(instance):
"""Stop given instance."""
return opcodes.OpInstanceShutdown(instance_name=instance)
@staticmethod
def StartInstanceOp(instance):
"""Start given instance."""
return opcodes.OpInstanceStartup(instance_name=instance, force=False)
@staticmethod
def RenameInstanceOp(instance, instance_new):
"""Rename instance."""
return opcodes.OpInstanceRename(instance_name=instance,
new_name=instance_new)
@_DoCheckInstances
@_DoBatch(True)
def BurnStopStart(self):
"""Stop/start the instances."""
Log("Stopping and starting instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op1 = self.StopInstanceOp(instance)
op2 = self.StartInstanceOp(instance)
self.ExecOrQueue(instance, [op1, op2])
@_DoBatch(False)
def BurnRemove(self):
"""Remove the instances."""
Log("Removing instances")
for instance in self.to_rem:
Log("instance %s", instance, indent=1)
op = opcodes.OpInstanceRemove(instance_name=instance,
ignore_failures=True)
self.ExecOrQueue(instance, [op])
def BurnRename(self):
"""Rename the instances.
Note that this function will not execute in parallel, since we
only have one target for rename.
"""
Log("Renaming instances")
rename = self.opts.rename
for instance in self.instances:
Log("instance %s", instance, indent=1)
op_stop1 = self.StopInstanceOp(instance)
op_stop2 = self.StopInstanceOp(rename)
op_rename1 = self.RenameInstanceOp(instance, rename)
op_rename2 = self.RenameInstanceOp(rename, instance)
op_start1 = self.StartInstanceOp(rename)
op_start2 = self.StartInstanceOp(instance)
self.ExecOp(False, op_stop1, op_rename1, op_start1)
self._CheckInstanceAlive(rename)
self.ExecOp(False, op_stop2, op_rename2, op_start2)
self._CheckInstanceAlive(instance)
@_DoCheckInstances
@_DoBatch(True)
def BurnReinstall(self):
"""Reinstall the instances."""
Log("Reinstalling instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op1 = self.StopInstanceOp(instance)
op2 = opcodes.OpInstanceReinstall(instance_name=instance)
Log("reinstall without passing the OS", indent=2)
op3 = opcodes.OpInstanceReinstall(instance_name=instance,
os_type=self.opts.os)
Log("reinstall specifying the OS", indent=2)
op4 = self.StartInstanceOp(instance)
self.ExecOrQueue(instance, [op1, op2, op3, op4])
@_DoCheckInstances
@_DoBatch(True)
def BurnReboot(self):
"""Reboot the instances."""
Log("Rebooting instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
ops = []
for reboot_type in self.opts.reboot_types:
op = opcodes.OpInstanceReboot(instance_name=instance,
reboot_type=reboot_type,
ignore_secondaries=False)
Log("reboot with type '%s'", reboot_type, indent=2)
ops.append(op)
self.ExecOrQueue(instance, ops)
@_DoCheckInstances
@_DoBatch(True)
def BurnRenameSame(self):
"""Rename the instances to their own name."""
Log("Renaming the instances to their own name")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op1 = self.StopInstanceOp(instance)
op2 = self.RenameInstanceOp(instance, instance)
Log("rename to the same name", indent=2)
op4 = self.StartInstanceOp(instance)
self.ExecOrQueue(instance, [op1, op2, op4])
@_DoCheckInstances
@_DoBatch(True)
def BurnActivateDisks(self):
"""Activate and deactivate disks of the instances."""
Log("Activating/deactivating disks")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op_start = self.StartInstanceOp(instance)
op_act = opcodes.OpInstanceActivateDisks(instance_name=instance)
op_deact = opcodes.OpInstanceDeactivateDisks(instance_name=instance)
op_stop = self.StopInstanceOp(instance)
Log("activate disks when online", indent=2)
Log("activate disks when offline", indent=2)
Log("deactivate disks (when offline)", indent=2)
self.ExecOrQueue(instance, [op_act, op_stop, op_act, op_deact, op_start])
@_DoCheckInstances
@_DoBatch(False)
def BurnAddRemoveDisks(self):
"""Add and remove an extra disk for the instances."""
Log("Adding and removing disks")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op_add = opcodes.OpInstanceSetParams(
instance_name=instance,
disks=[(constants.DDM_ADD, {"size": self.disk_size[0]})])
op_rem = opcodes.OpInstanceSetParams(
instance_name=instance, disks=[(constants.DDM_REMOVE, {})])
op_stop = self.StopInstanceOp(instance)
op_start = self.StartInstanceOp(instance)
Log("adding a disk", indent=2)
Log("removing last disk", indent=2)
self.ExecOrQueue(instance, [op_add, op_stop, op_rem, op_start])
@_DoBatch(False)
def BurnAddRemoveNICs(self):
"""Add, change and remove an extra NIC for the instances."""
Log("Adding and removing NICs")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op_add = opcodes.OpInstanceSetParams(
instance_name=instance, nics=[(constants.DDM_ADD, {})])
op_chg = opcodes.OpInstanceSetParams(
instance_name=instance, nics=[(constants.DDM_MODIFY,
-1, {"mac": constants.VALUE_GENERATE})])
op_rem = opcodes.OpInstanceSetParams(
instance_name=instance, nics=[(constants.DDM_REMOVE, {})])
Log("adding a NIC", indent=2)
Log("changing a NIC", indent=2)
Log("removing last NIC", indent=2)
self.ExecOrQueue(instance, [op_add, op_chg, op_rem])
def ConfdCallback(self, reply):
"""Callback for confd queries"""
if reply.type == confd_client.UPCALL_REPLY:
if reply.server_reply.status != constants.CONFD_REPL_STATUS_OK:
Err("Query %s gave non-ok status %s: %s" % (reply.orig_request,
reply.server_reply.status,
reply.server_reply))
if reply.orig_request.type == constants.CONFD_REQ_PING:
Log("Ping: OK", indent=1)
elif reply.orig_request.type == constants.CONFD_REQ_CLUSTER_MASTER:
if reply.server_reply.answer == self.cluster_info["master"]:
Log("Master: OK", indent=1)
else:
Err("Master: wrong: %s" % reply.server_reply.answer)
elif reply.orig_request.type == constants.CONFD_REQ_NODE_ROLE_BYNAME:
if reply.server_reply.answer == constants.CONFD_NODE_ROLE_MASTER:
Log("Node role for master: OK", indent=1)
else:
Err("Node role for master: wrong: %s" % reply.server_reply.answer)
def DoConfdRequestReply(self, req):
self.confd_counting_callback.RegisterQuery(req.rsalt)
self.confd_client.SendRequest(req, async=False)
while not self.confd_counting_callback.AllAnswered():
if not self.confd_client.ReceiveReply():
Err("Did not receive all expected confd replies")
break
def BurnConfd(self):
"""Run confd queries for our instances.
The following confd queries are tested:
- CONFD_REQ_PING: simple ping
- CONFD_REQ_CLUSTER_MASTER: cluster master
- CONFD_REQ_NODE_ROLE_BYNAME: node role, for the master
"""
Log("Checking confd results")
filter_callback = confd_client.ConfdFilterCallback(self.ConfdCallback)
counting_callback = confd_client.ConfdCountingCallback(filter_callback)
self.confd_counting_callback = counting_callback
self.confd_client = confd_client.GetConfdClient(counting_callback)
req = confd_client.ConfdClientRequest(type=constants.CONFD_REQ_PING)
self.DoConfdRequestReply(req)
req = confd_client.ConfdClientRequest(
type=constants.CONFD_REQ_CLUSTER_MASTER)
self.DoConfdRequestReply(req)
req = confd_client.ConfdClientRequest(
type=constants.CONFD_REQ_NODE_ROLE_BYNAME,
query=self.cluster_info["master"])
self.DoConfdRequestReply(req)
def _CheckInstanceAlive(self, instance):
"""Check if an instance is alive by doing http checks.
This will try to retrieve the url on the instance /hostname.txt
and check that it contains the hostname of the instance. In case
we get ECONNREFUSED, we retry up to the net timeout seconds, for
any other error we abort.
"""
if not self.opts.http_check:
return
end_time = time.time() + self.opts.net_timeout
url = None
while time.time() < end_time and url is None:
try:
url = self.url_opener.open("http://%s/hostname.txt" % instance)
except IOError:
# here we can have connection refused, no route to host, etc.
time.sleep(1)
if url is None:
raise InstanceDown(instance, "Cannot contact instance")
hostname = url.read().strip()
url.close()
if hostname != instance:
raise InstanceDown(instance, ("Hostname mismatch, expected %s, got %s" %
(instance, hostname)))
def BurninCluster(self):
"""Test a cluster intensively.
This will create instances and then start/stop/failover them.
It is safe for existing instances but could impact performance.
"""
Log("Testing global parameters")
if (len(self.nodes) == 1 and
self.opts.disk_template not in _SINGLE_NODE_DISK_TEMPLATES):
Err("When one node is available/selected the disk template must"
" be one of %s" % utils.CommaJoin(_SINGLE_NODE_DISK_TEMPLATES))
if self.opts.do_confd_tests and not constants.ENABLE_CONFD:
Err("You selected confd tests but confd was disabled at configure time")
has_err = True
try:
self.BurnCreateInstances()
if self.bep[constants.BE_MINMEM] < self.bep[constants.BE_MAXMEM]:
self.BurnModifyRuntimeMemory()
if self.opts.do_replace1 and \
self.opts.disk_template in constants.DTS_INT_MIRROR:
self.BurnReplaceDisks1D8()
if (self.opts.do_replace2 and len(self.nodes) > 2 and
self.opts.disk_template in constants.DTS_INT_MIRROR):
self.BurnReplaceDisks2()
if (self.opts.disk_template in constants.DTS_GROWABLE and
compat.any(n > 0 for n in self.disk_growth)):
self.BurnGrowDisks()
if self.opts.do_failover and \
self.opts.disk_template in constants.DTS_MIRRORED:
self.BurnFailover()
if self.opts.do_migrate:
if self.opts.disk_template not in constants.DTS_MIRRORED:
Log("Skipping migration (disk template %s does not support it)",
self.opts.disk_template)
elif not self.hv_can_migrate:
Log("Skipping migration (hypervisor %s does not support it)",
self.hypervisor)
else:
self.BurnMigrate()
if (self.opts.do_move and len(self.nodes) > 1 and
self.opts.disk_template in [constants.DT_PLAIN, constants.DT_FILE]):
self.BurnMove()
if (self.opts.do_importexport and
self.opts.disk_template in _IMPEXP_DISK_TEMPLATES):
self.BurnImportExport()
if self.opts.do_reinstall:
self.BurnReinstall()
if self.opts.do_reboot:
self.BurnReboot()
if self.opts.do_renamesame:
self.BurnRenameSame()
if self.opts.do_addremove_disks:
self.BurnAddRemoveDisks()
default_nic_mode = self.cluster_default_nicparams[constants.NIC_MODE]
# Don't add/remove nics in routed mode, as we would need an ip to add
# them with
if self.opts.do_addremove_nics:
if default_nic_mode == constants.NIC_MODE_BRIDGED:
self.BurnAddRemoveNICs()
else:
Log("Skipping nic add/remove as the cluster is not in bridged mode")
if self.opts.do_activate_disks:
self.BurnActivateDisks()
if self.opts.rename:
self.BurnRename()
if self.opts.do_confd_tests:
self.BurnConfd()
if self.opts.do_startstop:
self.BurnStopStart()
has_err = False
finally:
if has_err:
Log("Error detected: opcode buffer follows:\n\n")
Log(self.GetFeedbackBuf())
Log("\n\n")
if not self.opts.keep_instances:
try:
self.BurnRemove()
except Exception, err: # pylint: disable=W0703
if has_err: # already detected errors, so errors in removal
# are quite expected
Log("Note: error detected during instance remove: %s", err)
else: # non-expected error
raise
return constants.EXIT_SUCCESS
def Main():
"""Main function.
"""
utils.SetupLogging(pathutils.LOG_BURNIN, sys.argv[0],
debug=False, stderr_logging=True)
return Burner().BurninCluster()
|
gpl-2.0
| -6,059,015,601,982,323,000 | 35.44188 | 80 | 0.594038 | false |
aaaler/k9
|
k9d/mpu6050/examples/6axis_dmp.py
|
1
|
2719
|
# coding=utf-8
import math
import smbus
import mpu6050
from time import time
# Sensor initialization
mpu = mpu6050.MPU6050(
address=mpu6050.MPU6050.MPU6050_DEFAULT_ADDRESS,
bus=smbus.SMBus(1))
mpu.dmpInitialize()
mpu.setDMPEnabled(True)
# get expected DMP packet size for later comparison
packetSize = mpu.dmpGetFIFOPacketSize()
calibrating = True
t0 = time()
yaw0 = 0
pitch0 = 0
roll0 = 0
ax0 = 0
ay0 = 0
az0 = 0
precision = 100
def ftoip(v):
return int(precision * v)
def equal(l1, l2):
for k, v1 in enumerate(l1):
v2 = l2[k]
if ftoip(v1) != ftoip(v2):
return False
return True
print "Calibrating..."
while True:
# Get INT_STATUS byte
mpuIntStatus = mpu.getIntStatus()
if mpuIntStatus >= 2: # check for DMP data ready interrupt
# get current FIFO count
fifoCount = mpu.getFIFOCount()
# check for overflow
if fifoCount == 1024:
# reset so we can continue cleanly
mpu.resetFIFO()
print('FIFO overflow!')
# wait for correct available data length, should be a VERY short wait
fifoCount = mpu.getFIFOCount()
while fifoCount < packetSize:
fifoCount = mpu.getFIFOCount()
result = mpu.getFIFOBytes(packetSize)
q = mpu.dmpGetQuaternion(result)
g = mpu.dmpGetGravity(q)
ypr = mpu.dmpGetYawPitchRoll(q, g)
a = mpu.dmpGetAccel(result)
la = mpu.dmpGetLinearAccel(a, g)
laiw = mpu.dmpGetLinearAccelInWorld(a, q)
yaw = ypr['yaw'] * 180 / math.pi # radians to degrees
pitch = ypr['pitch'] * 180 / math.pi
roll = ypr['roll'] * 180 / math.pi
ax = laiw['x'] * 9.8
ay = laiw['y'] * 9.8
az = laiw['z'] * 9.8
# Update timedelta
dt = time() - t0
if calibrating:
if equal(
[yaw, pitch, roll, ax, ay, az, ],
[yaw0, pitch0, roll0, ax0, ay0, az0, ]
):
calibrating = False
print("Calibration done in ", dt, "seconds")
else:
yaw0 = yaw
pitch0 = pitch
roll0 = roll
ax0 = ax
ay0 = ay
az0 = az
print(
"Calibrating:", int(dt), ftoip(yaw), ftoip(ax), ftoip(ay)
)
else:
# Update time only when not calibrating!
t0 = time()
print(t0, dt, yaw, ax, ay)
# track FIFO count here in case there is > 1 packet available
# (this lets us immediately read more without waiting for an
# interrupt)
fifoCount -= packetSize
|
lgpl-3.0
| 2,685,161,083,789,600,000 | 25.656863 | 77 | 0.545421 | false |
vim-scripts/TeX-9
|
ftplugin/tex_nine/evince_dbus.py
|
1
|
5753
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of the Gedit Synctex plugin.
#
# Copyright (C) 2010 Jose Aliste <jose.aliste@gmail.com>
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public Licence as published by the Free Software
# Foundation; either version 2 of the Licence, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public Licence for more
# details.
#
# You should have received a copy of the GNU General Public Licence along with
# this program; if not, write to the Free Software Foundation, Inc., 51 Franklin
# Street, Fifth Floor, Boston, MA 02110-1301, USA
############################
#
# Modified for TeX 9
# Elias Toivanen, 2012-04-19
#
############################
import dbus
RUNNING, CLOSED = range(2)
EV_DAEMON_PATH = "/org/gnome/evince/Daemon"
EV_DAEMON_NAME = "org.gnome.evince.Daemon"
EV_DAEMON_IFACE = "org.gnome.evince.Daemon"
EVINCE_PATH = "/org/gnome/evince/Evince"
EVINCE_IFACE = "org.gnome.evince.Application"
EV_WINDOW_IFACE = "org.gnome.evince.Window"
class EvinceWindowProxy:
"""A DBUS proxy for an Evince Window."""
daemon = None
bus = None
def __init__(self, uri, spawn = False, logger = None):
self._log = logger
self.uri = uri
self.spawn = spawn
self.status = CLOSED
self.source_handler = None
self.dbus_name = ''
self._handler = None
try:
if EvinceWindowProxy.bus is None:
EvinceWindowProxy.bus = dbus.SessionBus()
if EvinceWindowProxy.daemon is None:
EvinceWindowProxy.daemon = EvinceWindowProxy.bus.get_object(EV_DAEMON_NAME,
EV_DAEMON_PATH,
follow_name_owner_changes=True)
EvinceWindowProxy.bus.add_signal_receiver(self._on_doc_loaded, signal_name="DocumentLoaded",
dbus_interface = EV_WINDOW_IFACE,
sender_keyword='sender')
self._get_dbus_name(False)
except dbus.DBusException:
if self._log:
self._log.debug("Could not connect to the Evince Daemon")
def _on_doc_loaded(self, uri, **keyargs):
if uri == self.uri and self._handler is None:
self.handle_find_document_reply(keyargs['sender'])
def _get_dbus_name(self, spawn):
EvinceWindowProxy.daemon.FindDocument(self.uri,spawn,
reply_handler=self.handle_find_document_reply,
error_handler=self.handle_find_document_error,
dbus_interface = EV_DAEMON_IFACE)
def handle_find_document_error(self, error):
if self._log:
self._log.debug("FindDocument DBus call has failed")
def handle_find_document_reply(self, evince_name):
if self._handler is not None:
handler = self._handler
else:
handler = self.handle_get_window_list_reply
if evince_name != '':
self.dbus_name = evince_name
self.status = RUNNING
self.evince = EvinceWindowProxy.bus.get_object(self.dbus_name, EVINCE_PATH)
self.evince.GetWindowList(dbus_interface = EVINCE_IFACE,
reply_handler = handler,
error_handler = self.handle_get_window_list_error)
def handle_get_window_list_error (self, e):
if self._log:
self._log.debug("GetWindowList DBus call has failed")
def handle_get_window_list_reply (self, window_list):
if len(window_list) > 0:
window_obj = EvinceWindowProxy.bus.get_object(self.dbus_name, window_list[0])
self.window = dbus.Interface(window_obj,EV_WINDOW_IFACE)
self.window.connect_to_signal("Closed", self.on_window_close)
self.window.connect_to_signal("SyncSource", self.on_sync_source)
else:
#That should never happen.
if self._log:
self._log.debug("GetWindowList returned empty list")
def set_source_handler (self, source_handler):
self.source_handler = source_handler
def on_window_close(self):
self.window = None
self.status = CLOSED
def on_sync_source(self, input_file, source_link, timestamp):
if self.source_handler is not None:
self.source_handler(input_file, source_link, timestamp)
def SyncView(self, input_file, data, time):
if self.status == CLOSED:
if self.spawn:
self._tmp_syncview = [input_file, data, time];
self._handler = self._syncview_handler
self._get_dbus_name(True)
else:
self.window.SyncView(input_file, data, time, dbus_interface = "org.gnome.evince.Window")
def _syncview_handler(self, window_list):
self.handle_get_window_list_reply(window_list)
if self.status == CLOSED:
return False
try:
self.window.SyncView(self._tmp_syncview[0],
self._tmp_syncview[1],
self._tmp_syncview[2],
dbus_interface="org.gnome.evince.Window")
del self._tmp_syncview
self._handler = None
return True
except AttributeError:
# When restarting Vim, _tmp_syncview is forgotten
return False
|
gpl-3.0
| 6,099,572,603,743,671,000 | 36.357143 | 105 | 0.591344 | false |
ocordes/arctic
|
doc/source/conf.py
|
1
|
9412
|
# -*- coding: utf-8 -*-
#
# acs-cte documentation build configuration file, created by
# sphinx-quickstart on Fri Dec 20 10:46:26 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'acs-cte'
copyright = u'2013, Oliver Cordes & Ole Marggraf'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.9.11'
# The full version, including alpha/beta/rc tags.
release = '0.9.11'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'acs-ctedoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'acs-cte.tex', u'acs-cte Documentation',
u'Oliver Cordes \\& Ole Marggraf', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'acs-cte', u'acs-cte Documentation',
[u'Oliver Cordes & Ole Marggraf'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'acs-cte', u'acs-cte Documentation',
u'Oliver Cordes & Ole Marggraf', 'acs-cte', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'acs-cte'
epub_author = u'Oliver Cordes & Ole Marggraf'
epub_publisher = u'Oliver Cordes & Ole Marggraf'
epub_copyright = u'2013, Oliver Cordes & Ole Marggraf'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
|
lgpl-3.0
| 6,446,165,862,263,057,000 | 31.567474 | 215 | 0.703676 | false |
don-systems/management-system
|
mgmtsystem_nonconformity/__manifest__.py
|
1
|
2251
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 Savoir-faire Linux (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Management System - Nonconformity",
"version": "9.0.1.0.0",
"author": "Savoir-faire Linux,Odoo Community Association (OCA)",
"website": "http://www.savoirfairelinux.com",
"license": "AGPL-3",
"category": "Management System",
"depends": [
'mgmtsystem_action',
'document_page_procedure',
],
"data": [
'security/ir.model.access.csv',
'security/mgmtsystem_nonconformity_security.xml',
'views/mgmtsystem_nonconformity.xml',
'views/mgmtsystem_nonconformity_stage.xml',
'views/mgmtsystem_origin.xml',
'views/mgmtsystem_cause.xml',
'views/mgmtsystem_severity.xml',
'views/mgmtsystem_action.xml',
'reports/mgmtsystem_nonconformity_report.xml',
'data/sequence.xml',
'data/mgmtsystem_nonconformity_severity.xml',
'data/mgmtsystem_nonconformity_origin.xml',
'data/mgmtsystem_nonconformity_cause.xml',
'data/mgmtsystem_nonconformity_stage.xml',
'data/mail_message_subtype.xml',
],
"demo": [
"demo/mgmtsystem_nonconformity_origin.xml",
"demo/mgmtsystem_nonconformity_cause.xml",
"demo/mgmtsystem_nonconformity.xml",
],
'installable': False,
}
|
agpl-3.0
| -5,128,717,417,023,612,000 | 39.927273 | 79 | 0.617503 | false |
treycucco/pxp
|
pxp/stdlib/math.py
|
1
|
4465
|
import math
from decimal import Decimal
from pxp.exception import FunctionError
from pxp.function import FunctionArg, FunctionList, InjectedFunction
from pxp.stdlib.types import number_t, boolean_t
def math_abs(resolver, value):
"""Returns the absolute value of value."""
val = resolver.resolve(value)
return val if val >= 0 else -val
def math_ceil(resolver, value):
"""Returns value if value is a whole number, otherwise the next largest whole number."""
val = resolver.resolve(value)
return Decimal(math.ceil(val))
def math_cos(resolver, value):
"""Returns the cosine of value. Value must be in radians."""
val = resolver.resolve(value)
return Decimal(math.cos(val))
def math_degrees(resolver, value):
"""Converts a radians value to degrees."""
val = resolver.resolve(value)
return Decimal(math.degrees(val))
def math_floor(resolver, value):
"""Returns value if value is a whole number, otherwise the next smallest whole number."""
val = resolver.resolve(value)
return Decimal(math.floor(val))
def math_log(resolver, value, base):
"""Returns the log of value. If not specified, the log is a natural log with base e."""
bval = resolver.resolve(base)
if bval <= Decimal(0):
raise FunctionError("Invalid log base")
val = resolver.resolve(value)
return Decimal(math.log(val, bval))
def math_log10(resolver, value):
"""Returns the log base 10 of value."""
return math_log(resolver, value, Decimal(10))
def math_log2(resolver, value):
"""Returns the log base 2 of value."""
return math_log(resolver, value, Decimal(2))
def math_pow(resolver, value, exp):
"""Returns value raised to exp."""
val = resolver.resolve(value)
xval = resolver.resolve(exp)
return Decimal(math.pow(val, xval))
def math_radians(resolver, value):
"""Converts a degrees value to radians."""
val = resolver.resolve(value)
return Decimal(math.radians(val))
def math_root(resolver, value, root):
"""Returns the nth root of value."""
val = resolver.resolve(value)
rval = resolver.resolve(root)
return Decimal(math.pow(val, Decimal(1) / rval))
def math_round(resolver, value, ndigits):
"""Rounds value to the nearest nth digit.
If ndigits is not specified then value is rounded to the nearest whole number.
"""
val = resolver.resolve(value)
dval = resolver.resolve(ndigits)
return Decimal(round(val, int(dval)))
def math_sin(resolver, value):
"""Returns the sine of value. Value must be in radians."""
val = resolver.resolve(value)
return Decimal(math.sin(val))
def math_sqrt(resolver, value):
"""Returns the square root of value."""
return math_root(resolver, value, Decimal(2))
def math_tan(resolver, value):
"""Returns the tanget of value. Value must be in radians."""
val = resolver.resolve(value)
return Decimal(math.tan(val))
math_functions = FunctionList((
InjectedFunction("math.abs", (FunctionArg(number_t, "value"), ), number_t, math_abs),
InjectedFunction("math.ceil", (FunctionArg(number_t, "value"), ), number_t, math_ceil),
InjectedFunction("math.cos", (FunctionArg(number_t, "value"), ), number_t, math_cos),
InjectedFunction("math.degrees", (FunctionArg(number_t, "value"), ), number_t, math_degrees),
InjectedFunction("math.floor", (FunctionArg(number_t, "value"), ), number_t, math_floor),
InjectedFunction("math.log", (FunctionArg(number_t, "value"), FunctionArg(number_t, "base", Decimal(math.e))), number_t, math_log),
InjectedFunction("math.log10", (FunctionArg(number_t, "value"), ), number_t, math_log10),
InjectedFunction("math.log2", (FunctionArg(number_t, "value"), ), number_t, math_log2),
InjectedFunction("math.pow", (FunctionArg(number_t, "value"), FunctionArg(number_t, "exp")), number_t, math_pow),
InjectedFunction("math.radians", (FunctionArg(number_t, "value"), ), number_t, math_radians),
InjectedFunction("math.root", (FunctionArg(number_t, "value"), FunctionArg(number_t, "root")), number_t, math_root),
InjectedFunction("math.round", (FunctionArg(number_t, "value"), FunctionArg(number_t, "ndigits", Decimal(0))), number_t, math_round),
InjectedFunction("math.sin", (FunctionArg(number_t, "value"), ), number_t, math_sin),
InjectedFunction("math.sqrt", (FunctionArg(number_t, "value"), ), number_t, math_sqrt),
InjectedFunction("math.tan", (FunctionArg(number_t, "value"), ), number_t, math_tan)
))
math_constants = {"math.pi": Decimal(math.pi),
"math.e": Decimal(math.e)}
|
bsd-3-clause
| -4,806,091,900,588,177,000 | 33.882813 | 135 | 0.701904 | false |
kpreid/shinysdr
|
shinysdr/i/network/test_audio_http.py
|
1
|
6035
|
# -*- coding: utf-8 -*-
# Copyright 2018 Kevin Reid and the ShinySDR contributors
#
# This file is part of ShinySDR.
#
# ShinySDR is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ShinySDR is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ShinySDR. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import, division, print_function, unicode_literals
import struct
from twisted.internet import defer
from twisted.internet import reactor as the_reactor
from twisted.internet.protocol import Protocol
from twisted.trial import unittest
from twisted.web.resource import Resource
from twisted.web import client
from shinysdr.i.network.base import SiteWithDefaultHeaders
from shinysdr.i.network.audio_http import AudioStreamResource
from shinysdr.i.pycompat import bytes_or_ascii
from shinysdr.testutil import assert_http_resource_properties, http_head
class TestAudioStreamResource(unittest.TestCase):
# TODO: Have less boilerplate "set up a local web server".
def setUp(self):
tree = Resource()
tree.putChild('mono', AudioStreamResource(_FakeSession(1)))
tree.putChild('stereo', AudioStreamResource(_FakeSession(2)))
self.port = the_reactor.listenTCP(0, SiteWithDefaultHeaders(tree), interface="127.0.0.1") # pylint: disable=no-member
def tearDown(self):
return self.port.stopListening()
def __url(self, path):
return 'http://127.0.0.1:%i%s' % (self.port.getHost().port, path)
def test_common(self):
return assert_http_resource_properties(self, self.__url('/mono?rate=1'), dont_read_entire_body=True)
@defer.inlineCallbacks
def test_head(self):
response = yield http_head(the_reactor, self.__url('/mono?rate=1'))
self.assertEqual(response.headers.getRawHeaders('Content-Type'), ['audio/wav'])
self.assertEqual(response.headers.getRawHeaders('Cache-Control'), ['no-cache, no-store, must-revalidate'])
@defer.inlineCallbacks
def test_get_http_headers(self):
response, prefix_reader = yield get_stream_head(self, self.__url('/stereo?rate=1'))
self.assertEqual(response.headers.getRawHeaders('Content-Type'), ['audio/wav'])
self.assertEqual(response.headers.getRawHeaders('Cache-Control'), ['no-cache, no-store, must-revalidate'])
yield prefix_reader.done
@defer.inlineCallbacks
def test_wav_header_mono_2(self):
_response, prefix_reader = yield get_stream_head(self, self.__url('/mono?rate=22050'))
yield prefix_reader.done
self.assertEqual(prefix_reader.data, _generate_wav_header(sample_rate=22050, channels=1))
@defer.inlineCallbacks
def test_wav_header_stereo_2(self):
_response, prefix_reader = yield get_stream_head(self, self.__url('/stereo?rate=22050'))
yield prefix_reader.done
self.assertEqual(prefix_reader.data, _generate_wav_header(sample_rate=22050, channels=2))
@defer.inlineCallbacks
def test_wav_header_stereo_4(self):
_response, prefix_reader = yield get_stream_head(self, self.__url('/stereo?rate=44100'))
yield prefix_reader.done
self.assertEqual(prefix_reader.data, _generate_wav_header(sample_rate=44100, channels=2))
@defer.inlineCallbacks
def test_bad_options(self):
response = yield http_head(the_reactor, self.__url('/mono?rate=asdf'))
self.assertEqual(response.code, 400)
class _FakeSession(object):
def __init__(self, channels):
self.__channels = channels
def add_audio_callback(self, callback, sample_rate):
pass
def remove_audio_callback(self, callback):
pass
def get_audio_callback_channels(self):
return self.__channels
# TODO: Add this functionality to shinysdr.testutil.http_get
@defer.inlineCallbacks
def get_stream_head(test_case, url):
agent = client.Agent(the_reactor)
response = yield agent.request(
method=b'GET',
uri=bytes_or_ascii(url))
prefix_reader = _PrefixReaderProtocol()
response.deliverBody(prefix_reader)
defer.returnValue((response, prefix_reader))
class _PrefixReaderProtocol(Protocol):
def __init__(self):
self.data = b''
self.done = defer.Deferred()
def dataReceived(self, data):
self.data += data
self.transport.loseConnection()
def connectionLost(self, reason=None):
self.done.callback(None)
def _generate_wav_header(sample_rate, channels):
# This was originally a copy of the code under test. The point of it being a copy is that as the test and the tested code evolve they may eventually become different due to their differing usage patterns, and if so that makes a better test than reusing the same generator in both places. Or at least, that's what I'm telling myself right now.
fake_max_size = 2 ** 32 - 1
number_size = 4 # 32-bit float
riff_header_chunk = struct.pack('<4sI4s',
b'RIFF',
fake_max_size,
b'WAVE')
audio_format_chunk = struct.pack('<4sIHHIIHH',
b'fmt ',
16, # this chunk size
3, # float format
channels, # number of channels interleaved in a block
sample_rate, # sample rate per channel / block rate
channels * sample_rate * number_size, # byte rate
channels * number_size, # bytes per block
number_size * 8) # bits per sample
incomplete_data_chunk = struct.pack('<4sI', b'data', fake_max_size)
return riff_header_chunk + audio_format_chunk + incomplete_data_chunk
|
gpl-3.0
| 1,066,103,399,301,635,100 | 38.966887 | 346 | 0.688815 | false |
Mzero2010/MaxZone
|
plugin.video.Mzero/channels/tengourl.py
|
1
|
1956
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# Mzero - XBMC Plugin
# Canal para ver un vídeo conociendo su URL
# http://blog.tvalacarta.info/plugin-xbmc/Mzero/
#------------------------------------------------------------
from core import config
from core import logger
from core import scrapertools
from core import servertools
from core.item import Item
DEBUG = config.get_setting("debug")
def mainlist(item):
logger.info("[tengourl.py] mainlist")
itemlist = []
itemlist.append( Item(channel=item.channel, action="search", title="Entra aquí y teclea la URL [Enlace a servidor online/descarga]"))
itemlist.append( Item(channel=item.channel, action="search", title="Entra aquí y teclea la URL [Enlace directo a un vídeo]"))
itemlist.append( Item(channel=item.channel, action="search", title="Entra aquí y teclea la URL [Búsqueda de enlaces en una url]"))
return itemlist
# Al llamarse "search" la función, el launcher pide un texto a buscar y lo añade como parámetro
def search(item,texto):
logger.info("[tengourl.py] search texto="+texto)
if not texto.startswith("http://"):
texto = "http://"+texto
itemlist = []
if "servidor" in item.title:
itemlist = servertools.find_video_items(data=texto)
for item in itemlist:
item.channel="tengourl"
item.action="play"
elif "directo" in item.title:
itemlist.append( Item(channel=item.channel, action="play", url=texto, server="directo", title="Ver enlace directo"))
else:
data = scrapertools.downloadpage(texto)
itemlist = servertools.find_video_items(data=data)
for item in itemlist:
item.channel="tengourl"
item.action="play"
if len(itemlist)==0:
itemlist.append( Item(channel=item.channel, action="search", title="No hay ningún vídeo compatible en esa URL"))
return itemlist
|
gpl-3.0
| -6,197,162,499,388,738,000 | 35.018519 | 137 | 0.63599 | false |
noironetworks/horizon
|
openstack_dashboard/dashboards/project/volume_groups/forms.py
|
1
|
7723
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard.api import cinder
class UpdateForm(forms.SelfHandlingForm):
name = forms.CharField(max_length=255, label=_("Name"))
description = forms.CharField(max_length=255,
widget=forms.Textarea(attrs={'rows': 4}),
label=_("Description"),
required=False)
def clean(self):
cleaned_data = super(UpdateForm, self).clean()
new_desc = cleaned_data.get('description')
old_desc = self.initial['description']
if old_desc and not new_desc:
error_msg = _("Description is required.")
self._errors['description'] = self.error_class([error_msg])
return cleaned_data
return cleaned_data
def handle(self, request, data):
group_id = self.initial['group_id']
try:
cinder.group_update(request, group_id,
data['name'],
data['description'])
message = _('Updating volume group "%s"') % data['name']
messages.info(request, message)
return True
except Exception:
redirect = reverse("horizon:project:volume_groups:index")
exceptions.handle(request,
_('Unable to update volume group.'),
redirect=redirect)
class RemoveVolsForm(forms.SelfHandlingForm):
def handle(self, request, data):
group_id = self.initial['group_id']
name = self.initial['name']
search_opts = {'group_id': group_id}
try:
# get list of assigned volumes
assigned_vols = []
volumes = cinder.volume_list(request,
search_opts=search_opts)
for volume in volumes:
assigned_vols.append(volume.id)
cinder.group_update(request, group_id,
remove_volumes=assigned_vols)
message = _('Removing volumes from volume group "%s"') % name
messages.info(request, message)
return True
except Exception:
redirect = reverse("horizon:project:volume_groups:index")
exceptions.handle(request,
_('Errors occurred in removing volumes '
'from group.'),
redirect=redirect)
class DeleteForm(forms.SelfHandlingForm):
delete_volumes = forms.BooleanField(label=_("Delete Volumes"),
required=False)
def handle(self, request, data):
group_id = self.initial['group_id']
name = self.initial['name']
delete_volumes = data['delete_volumes']
try:
cinder.group_delete(request, group_id,
delete_volumes=delete_volumes)
message = _('Deleting volume group "%s"') % name
messages.success(request, message)
return True
except Exception:
redirect = reverse("horizon:project:volume_groups:index")
exceptions.handle(request, _('Errors occurred in deleting group.'),
redirect=redirect)
class CreateSnapshotForm(forms.SelfHandlingForm):
name = forms.CharField(max_length=255, label=_("Snapshot Name"))
description = forms.CharField(max_length=255,
widget=forms.Textarea(attrs={'rows': 4}),
label=_("Description"),
required=False)
def handle(self, request, data):
group_id = self.initial['group_id']
try:
message = _('Creating group snapshot "%s".') \
% data['name']
snapshot = cinder.group_snapshot_create(request,
group_id,
data['name'],
data['description'])
messages.info(request, message)
return snapshot
except Exception as e:
redirect = reverse("horizon:project:volume_groups:index")
msg = _('Unable to create group snapshot.')
if e.code == 413:
msg = _('Requested snapshot would exceed the allowed quota.')
else:
search_opts = {'group_id': group_id}
volumes = cinder.volume_list(request,
search_opts=search_opts)
if len(volumes) == 0:
msg = _('Unable to create snapshot. '
'group must contain volumes.')
exceptions.handle(request,
msg,
redirect=redirect)
class CloneGroupForm(forms.SelfHandlingForm):
name = forms.CharField(max_length=255, label=_("Group Name"))
description = forms.CharField(max_length=255,
widget=forms.Textarea(attrs={'rows': 4}),
label=_("Description"),
required=False)
group_source = forms.ChoiceField(
label=_("Use a group as source"),
widget=forms.ThemableSelectWidget(
attrs={'class': 'image-selector'},
data_attrs=('name'),
transform=lambda x: "%s" % (x.name)),
required=False)
def prepare_group_source_field(self, request):
try:
group_id = self.initial['group_id']
group = cinder.group_get(request, group_id)
self.fields['group_source'].choices = ((group_id, group),)
except Exception:
exceptions.handle(request,
_('Unable to load the specified group.'))
def __init__(self, request, *args, **kwargs):
super(CloneGroupForm, self).__init__(request, *args, **kwargs)
self.prepare_group_source_field(request)
def handle(self, request, data):
group_id = self.initial['group_id']
try:
message = _('Creating consistency group "%s".') % data['name']
group = cinder.group_create_from_source(
request,
data['name'],
source_group_id=group_id,
description=data['description'])
messages.info(request, message)
return group
except Exception:
redirect = reverse("horizon:project:volume_groups:index")
msg = _('Unable to clone group.')
search_opts = {'group_id': group_id}
volumes = cinder.volume_list(request, search_opts=search_opts)
if len(volumes) == 0:
msg = _('Unable to clone empty group.')
exceptions.handle(request,
msg,
redirect=redirect)
|
apache-2.0
| 5,956,005,950,523,606,000 | 38.005051 | 79 | 0.533083 | false |
fifengine/fifengine-demos
|
shooter/scripts/ships/enemies.py
|
1
|
6693
|
# -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2013 by the FIFE team
# http://www.fifengine.net
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ####################################################################
from fife import fife
from scripts.ships.shipbase import *
from scripts.common.baseobject import *
from fife.fife import FloatRect as Rect
from scripts.weapons import *
from fife.extensions import fife_timer
class EnemyActionListener(ShipActionListener):
def __init__(self, ship):
super(EnemyActionListener, self).__init__(ship)
def onInstanceActionFinished(self, instance, action):
super(EnemyActionListener, self).onInstanceActionFinished(instance, action)
def onInstanceActionCancelled(self, instance, action):
pass
class BossActionListener(ShipActionListener):
def __init__(self, ship):
super(BossActionListener, self).__init__(ship)
def onInstanceActionFinished(self, instance, action):
super(BossActionListener, self).onInstanceActionFinished(instance, action)
if action.getId() == 'explode':
self.delayed = fife_timer.delayCall(5000,self._ship.endLevel())
def onInstanceActionCancelled(self, instance, action):
pass
class Saucer1(Ship):
def __init__(self, scene, name, instance, findInstance=True):
super(Saucer1, self).__init__(scene, name, findInstance)
self.instance = instance
self._type = SHTR_ENEMYSHIP
self._dir = 0
self._time = 500
self.width = 0.2
self.height = 0.075
self.velocity.x = -0.5
self.weapon = Cannon(self._scene, self, 1000)
self.weapon.projectilevelocity = 0.4
self._actionlistener = EnemyActionListener(self)
self.hitpoints = 1
self.scorevalue = 50
def update(self):
if self._dir == 1:
self.applyThrust(fife.DoublePoint(0,-0.5))
elif self._dir == 0:
self.applyThrust(fife.DoublePoint(0,0.5))
if self._time >= 1000:
if self._dir == 1:
self._dir = 0
elif self._dir == 0:
self._dir = 1
self._time = 0
self._time += self._scene.timedelta
super(Saucer1, self).update()
self.fire(fife.DoublePoint(-1,0))
class Saucer2(Ship):
def __init__(self, scene, name, instance, findInstance=True):
super(Saucer2, self).__init__(scene, name, findInstance)
self.instance = instance
self._type = SHTR_ENEMYSHIP
self._dir = 0
self._time = 1000
self.width = 0.2
self.height = 0.2
self.velocity.x = -0.1
self.weapon = Cannon(self._scene, self, 2000)
self.weapon.projectilevelocity = 0.4
self._actionlistener = EnemyActionListener(self)
self.hitpoints = 2
self.scorevalue = 100
def applyHit(self, hp):
self.flash(1)
super(Saucer2, self).applyHit(hp)
def update(self):
if self._dir == 1:
self.applyThrust(fife.DoublePoint(0,-0.25))
elif self._dir == 0:
self.applyThrust(fife.DoublePoint(0,0.25))
if self._time >= 2000:
if self._dir == 1:
self._dir = 0
elif self._dir == 0:
self._dir = 1
self._time = 0
self._time += self._scene.timedelta
super(Saucer2, self).update()
self.fire(fife.DoublePoint(-1,0))
class DiagSaucer(Ship):
def __init__(self, scene, name, direction, instance, findInstance=True):
super(DiagSaucer, self).__init__(scene, name, findInstance)
self.instance = instance
self._type = SHTR_ENEMYSHIP
self.width = 0.2
self.height = 0.075
if direction == 0:
self._ythrust = 0.25
else:
self._ythrust = -0.25
self.weapon = Cannon(self._scene, self, 2000)
self.weapon.projectilevelocity = 0.4
self._actionlistener = EnemyActionListener(self)
self.hitpoints = 1
self.scorevalue = 50
def update(self):
self.applyThrust(fife.DoublePoint(-0.25,self._ythrust))
super(DiagSaucer, self).update()
self.fire(fife.DoublePoint(-1,0))
class Streaker(Ship):
def __init__(self, scene, name, instance, findInstance=True):
super(Streaker, self).__init__(scene, name, findInstance)
self.instance = instance
self._type = SHTR_ENEMYSHIP
self.width = 0.2
self.height = 0.2
self._maxvelocity = 2.0
self.weapon = FireBall(self._scene, self, 2000)
self.weapon.projectilevelocity = 0.25
self._actionlistener = EnemyActionListener(self)
self.hitpoints = 2
self.scorevalue = 150
def applyHit(self, hp):
self.flash(1)
super(Streaker, self).applyHit(hp)
def update(self):
self.applyThrust(fife.DoublePoint(-0.40,0))
super(Streaker, self).update()
playerloc = self._scene.player.location.getExactLayerCoordinates()
enemyloc = self.location.getExactLayerCoordinates()
playerloc.x -= enemyloc.x
playerloc.y -= enemyloc.y
self.fire(fife.DoublePoint(playerloc.x,playerloc.y))
class Boss(Ship):
def __init__(self, scene, name, instance, findInstance=True):
super(Boss, self).__init__(scene, name, findInstance)
self.instance = instance
self._type = SHTR_LASTBOSS
self.width = 0.85
self.height = 0.25
self._maxvelocity = 2.0
self.weapon = FireBall(self._scene, self, 1000)
self.weapon.projectilevelocity = 0.5
self._actionlistener = BossActionListener(self)
self.hitpoints = 30
self.scorevalue = 1000
self._explodclip = self._scene.soundmanager.createEmitter("sounds/bossexplode.ogg")
def endLevel(self):
self._scene.endLevel()
def update(self):
super(Boss, self).update()
playerloc = self._scene.player.location.getExactLayerCoordinates()
bossloc = self.location.getExactLayerCoordinates()
playerloc.x -= bossloc.x
playerloc.y -= bossloc.y
self.fire(fife.DoublePoint(playerloc.x,playerloc.y))
def applyHit(self, hp):
self.flash(2)
super(Boss, self).applyHit(hp)
if self.hitpoints == 20:
self.weapon = FireBallBurst(self._scene, self, 2000, 100, 10)
self.weapon.lastfired = self._scene.time
elif self.hitpoints == 10:
self.weapon = FireBallSpread(self._scene, self, 2000)
self.weapon.lastfired = self._scene.time
|
lgpl-2.1
| 8,287,204,922,065,141,000 | 26.097166 | 85 | 0.676827 | false |
NitinBhaskar/mbed
|
workspace_tools/tests.py
|
1
|
39585
|
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from workspace_tools.paths import *
from workspace_tools.data.support import *
TEST_CMSIS_LIB = join(TEST_DIR, "cmsis", "lib")
TEST_MBED_LIB = join(TEST_DIR, "mbed", "env")
PERIPHERALS = join(TEST_DIR, "peripherals")
BENCHMARKS_DIR = join(TEST_DIR, "benchmarks")
SD = join(TEST_DIR, "sd")
TMP102 = join(PERIPHERALS, 'TMP102')
"""
Wiring:
* Ground:
* LPC1*: p1
* KL25Z: GND
* Vout
* LPC1*: p40
* KL25Z: P3V3
* TMP102 (I2C):
* LPC1*: (SDA=p28 , SCL=p27)
* KL25Z: (SDA=PTC9, SCL=PTC8)
* MAXWSNENV: (SDA=TP6, SCL=TP5)
* digital_loop (Digital(In|Out|InOut), InterruptIn):
* Arduino headers: (D0 <-> D7)
* LPC1549: (D2 <-> D7)
* LPC1*: (p5 <-> p25 )
* KL25Z: (PTA5<-> PTC6)
* NUCLEO_F103RB: (PC_6 <-> PB_8)
* MAXWSNENV: (TP3 <-> TP4)
* port_loop (Port(In|Out|InOut)):
* Arduino headers: (D0 <-> D7), (D1 <-> D6)
* LPC1*: (p5 <-> p25), (p6 <-> p26)
* KL25Z: (PTA5 <-> PTC6), (PTA4 <-> PTC5)
* NUCLEO_F103RB: (PC_6 <-> PB_8), (PC_5 <-> PB_9)
* MAXWSNENV: (TP1 <-> TP3), (TP2 <-> TP4)
* analog_loop (AnalogIn, AnalogOut):
* Arduino headers: (A0 <-> A5)
* LPC1549: (A0 <-> D12)
* LPC1*: (p17 <-> p18 )
* KL25Z: (PTE30 <-> PTC2)
* analog_pot (AnalogIn):
* Arduino headers: (A0, A1)
* SD (SPI):
* LPC1*: (mosi=p11 , miso=p12 , sclk=p13 , cs=p14 )
* KL25Z: (mosi=PTD2, miso=PTD3, sclk=PTD1, cs=PTD0)
* MMA7660 (I2C):
* LPC1*: (SDA=p28 , SCL=p27)
* i2c_loop:
* LPC1768: (p28 <-> p9), (p27 <-> p10)
* i2c_eeprom:
* LPC1*: (SDA=p28 , SCL=p27)
* KL25Z: (SDA=PTE0, SCL=PTE1)
"""
TESTS = [
# Automated MBED tests
{
"id": "MBED_A1", "description": "Basic",
"source_dir": join(TEST_DIR, "mbed", "basic"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
},
{
"id": "MBED_A2", "description": "Semihost file system",
"source_dir": join(TEST_DIR, "mbed", "file"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC2368", "LPC11U24"]
},
{
"id": "MBED_A3", "description": "C++ STL",
"source_dir": join(TEST_DIR, "mbed", "stl"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": False,
},
{
"id": "MBED_A4", "description": "I2C TMP102",
"source_dir": join(TEST_DIR, "mbed", "i2c_TMP102"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, TMP102],
"automated": True,
"peripherals": ["TMP102"]
},
{
"id": "MBED_A5", "description": "DigitalIn DigitalOut",
"source_dir": join(TEST_DIR, "mbed", "digitalin_digitalout"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"peripherals": ["digital_loop"]
},
{
"id": "MBED_A6", "description": "DigitalInOut",
"source_dir": join(TEST_DIR, "mbed", "digitalinout"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"peripherals": ["digital_loop"]
},
{
"id": "MBED_A7", "description": "InterruptIn",
"source_dir": join(TEST_DIR, "mbed", "interruptin"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 15,
"automated": True,
"peripherals": ["digital_loop"]
},
{
"id": "MBED_A8", "description": "Analog",
"source_dir": join(TEST_DIR, "mbed", "analog"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"peripherals": ["analog_loop"],
"mcu": ["LPC1768", "LPC2368", "KL25Z", "K64F", "K22F", "LPC4088", "LPC1549",
"NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_F302R8", "NUCLEO_F303RE",
"NUCLEO_F334R8", "NUCLEO_L053R8", "NUCLEO_L073RZ", "NUCLEO_L152RE",
"NUCLEO_F411RE","DISCO_F407VG","ARCH_MAX"]
},
{
"id": "MBED_A9", "description": "Serial Echo at 115200",
"source_dir": join(TEST_DIR, "mbed", "echo"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
#"host_test": "echo"
},
{
"id": "MBED_A10", "description": "PortOut PortIn",
"source_dir": join(TEST_DIR, "mbed", "portout_portin"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"peripherals": ["port_loop"],
"supported": DEFAULT_SUPPORT,
"automated": True,
},
{
"id": "MBED_A11", "description": "PortInOut",
"source_dir": join(TEST_DIR, "mbed", "portinout"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"peripherals": ["port_loop"],
"supported": DEFAULT_SUPPORT,
"automated": True,
},
{
"id": "MBED_A12", "description": "SD File System",
"source_dir": join(TEST_DIR, "mbed", "sd"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
"automated": True,
"duration": 15,
"peripherals": ["SD"]
},
{
"id": "MBED_A13", "description": "I2C MMA7660 accelerometer",
"source_dir": join(TEST_DIR, "mbed", "i2c_MMA7660"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'MMA7660')],
"automated": True,
"peripherals": ["MMA7660"]
},
{
"id": "MBED_A14", "description": "I2C Master",
"source_dir": join(TEST_DIR, "mbed", "i2c_master"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
},
{
"id": "MBED_A15", "description": "I2C Slave",
"source_dir": join(TEST_DIR, "mbed", "i2c_slave"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
},
{
"id": "MBED_A16", "description": "SPI Master",
"source_dir": join(TEST_DIR, "mbed", "spi_master"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
},
{
"id": "MBED_A17", "description": "SPI Slave",
"source_dir": join(TEST_DIR, "mbed", "spi_slave"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
},
{
"id": "MBED_A18", "description": "Interrupt vector relocation",
"source_dir": join(TEST_DIR, "mbed", "vtor_reloc"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
"mcu": ["LPC1768"],
"automated": True,
},
{
"id": "MBED_A19", "description": "I2C EEPROM read/write test",
"source_dir": join(TEST_DIR, "mbed", "i2c_eeprom"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"peripherals": ["24LC256"],
"automated": True,
"duration": 15,
},
{
"id": "MBED_A20", "description": "I2C master/slave test",
"source_dir": join(TEST_DIR, "mbed", "i2c_master_slave"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
"mcu": ["LPC1768", "RZ_A1H"],
"peripherals": ["i2c_loop"]
},
{
"id": "MBED_A21", "description": "Call function before main (mbed_main)",
"source_dir": join(TEST_DIR, "mbed", "call_before_main"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
},
{
"id": "MBED_A22", "description": "SPIFI for LPC4088 (test 1)",
"source_dir": join(TEST_DIR, "mbed", "spifi1"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"duration": 30,
"mcu": ["LPC4088","LPC4088_DM"]
},
{
"id": "MBED_A23", "description": "SPIFI for LPC4088 (test 2)",
"source_dir": join(TEST_DIR, "mbed", "spifi2"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"duration": 30,
"mcu": ["LPC4088","LPC4088_DM"]
},
{
"id": "MBED_A24", "description": "Serial echo with RTS/CTS flow control",
"source_dir": join(TEST_DIR, "mbed", "echo_flow_control"),
"dependencies": [MBED_LIBRARIES],
"automated": "True",
"host_test": "echo_flow_control",
"mcu": ["LPC1768"],
"peripherals": ["extra_serial"]
},
{
"id": "MBED_A25", "description": "I2C EEPROM line read/write test",
"source_dir": join(TEST_DIR, "mbed", "i2c_eeprom_line"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"peripherals": ["24LC256"],
"automated": True,
"duration": 10,
},
{
"id": "MBED_A26", "description": "AnalogIn potentiometer test",
"source_dir": join(TEST_DIR, "mbed", "analog_pot"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"peripherals": ["analog_pot"],
"automated": True,
"duration": 10,
},
{
"id": "MBED_BLINKY", "description": "Blinky",
"source_dir": join(TEST_DIR, "mbed", "blinky"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": False,
},
{
"id": "MBED_BUS", "description": "Blinky BUS",
"source_dir": join(TEST_DIR, "mbed", "bus"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": False,
"duration": 15,
},
{
"id": "MBED_BUSOUT", "description": "BusOut",
"source_dir": join(TEST_DIR, "mbed", "bus_out"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"duration": 15,
},
# Size benchmarks
{
"id": "BENCHMARK_1", "description": "Size (c environment)",
"source_dir": join(BENCHMARKS_DIR, "cenv"),
"dependencies": [MBED_LIBRARIES]
},
{
"id": "BENCHMARK_2", "description": "Size (float math)",
"source_dir": join(BENCHMARKS_DIR, "float_math"),
"dependencies": [MBED_LIBRARIES]
},
{
"id": "BENCHMARK_3", "description": "Size (printf)",
"source_dir": join(BENCHMARKS_DIR, "printf"),
"dependencies": [MBED_LIBRARIES]
},
{
"id": "BENCHMARK_4", "description": "Size (mbed libs)",
"source_dir": join(BENCHMARKS_DIR, "mbed"),
"dependencies": [MBED_LIBRARIES]
},
{
"id": "BENCHMARK_5", "description": "Size (all)",
"source_dir": join(BENCHMARKS_DIR, "all"),
"dependencies": [MBED_LIBRARIES]
},
# performance related tests
{
"id": "PERF_1", "description": "SD Stdio R/W Speed",
"source_dir": join(TEST_DIR, "mbed", "sd_perf_stdio"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
"automated": True,
"duration": 15,
"peripherals": ["SD"]
},
{
"id": "PERF_2", "description": "SD FileHandle R/W Speed",
"source_dir": join(TEST_DIR, "mbed", "sd_perf_fhandle"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
"automated": True,
"duration": 15,
"peripherals": ["SD"]
},
{
"id": "PERF_3", "description": "SD FatFS R/W Speed",
"source_dir": join(TEST_DIR, "mbed", "sd_perf_fatfs"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
"automated": True,
"duration": 15,
"peripherals": ["SD"]
},
# Not automated MBED tests
{
"id": "MBED_1", "description": "I2C SRF08",
"source_dir": join(TEST_DIR, "mbed", "i2c_SRF08"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'SRF08')],
"peripherals": ["SRF08"]
},
{
"id": "MBED_2", "description": "stdio",
"source_dir": join(TEST_DIR, "mbed", "stdio"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 20,
"automated": True,
#"host_test": "stdio_auto"
},
{
"id": "MBED_3", "description": "PortOut",
"source_dir": join(TEST_DIR, "mbed", "portout"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_4", "description": "Sleep",
"source_dir": join(TEST_DIR, "mbed", "sleep"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 30,
"mcu": ["LPC1768", "LPC11U24", "LPC4088","LPC4088_DM","NRF51822", "LPC11U68"]
},
{
"id": "MBED_5", "description": "PWM",
"source_dir": join(TEST_DIR, "mbed", "pwm"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
},
{
"id": "MBED_6", "description": "SW Reset",
"source_dir": join(TEST_DIR, "mbed", "reset"),
"dependencies": [MBED_LIBRARIES],
"duration": 15
},
{
"id": "MBED_7", "description": "stdio benchmark",
"source_dir": join(TEST_DIR, "mbed", "stdio_benchmark"),
"dependencies": [MBED_LIBRARIES],
"duration": 40
},
{
"id": "MBED_8", "description": "SPI",
"source_dir": join(TEST_DIR, "mbed", "spi"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_9", "description": "Sleep Timeout",
"source_dir": join(TEST_DIR, "mbed", "sleep_timeout"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_10", "description": "Hello World",
"source_dir": join(TEST_DIR, "mbed", "hello"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
#"host_test": "hello_auto",
},
{
"id": "MBED_11", "description": "Ticker Int",
"source_dir": join(TEST_DIR, "mbed", "ticker"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
#"host_test": "wait_us_auto",
"duration": 20,
},
{
"id": "MBED_12", "description": "C++",
"source_dir": join(TEST_DIR, "mbed", "cpp"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True
},
{
"id": "MBED_13", "description": "Heap & Stack",
"source_dir": join(TEST_DIR, "mbed", "heap_and_stack"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
},
{
"id": "MBED_14", "description": "Serial Interrupt",
"source_dir": join(TEST_DIR, "mbed", "serial_interrupt"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
},
{
"id": "MBED_15", "description": "RPC",
"source_dir": join(TEST_DIR, "mbed", "rpc"),
"dependencies": [MBED_LIBRARIES, join(LIB_DIR, "rpc"), TEST_MBED_LIB],
"automated": False,
"mcu": ["LPC1768"]
},
{
"id": "MBED_16", "description": "RTC",
"source_dir": join(TEST_DIR, "mbed", "rtc"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
#"host_test": "rtc_auto",
"duration": 15
},
{
"id": "MBED_17", "description": "Serial Interrupt 2",
"source_dir": join(TEST_DIR, "mbed", "serial_interrupt_2"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_18", "description": "Local FS Directory",
"source_dir": join(TEST_DIR, "mbed", "dir"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_19", "description": "SD FS Directory",
"source_dir": join(TEST_DIR, "mbed", "dir_sd"),
"dependencies": [MBED_LIBRARIES, FS_LIBRARY],
"peripherals": ["SD"]
},
{
"id": "MBED_20", "description": "InterruptIn 2",
"source_dir": join(TEST_DIR, "mbed", "interruptin_2"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_21", "description": "freopen Stream",
"source_dir": join(TEST_DIR, "mbed", "freopen"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_22", "description": "Semihost",
"source_dir": join(TEST_DIR, "mbed", "semihost"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC2368", "LPC11U24"]
},
{
"id": "MBED_23", "description": "Ticker Int us",
"source_dir": join(TEST_DIR, "mbed", "ticker_2"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 15,
"automated": True,
#"host_test": "wait_us_auto"
},
{
"id": "MBED_24", "description": "Timeout Int us",
"source_dir": join(TEST_DIR, "mbed", "timeout"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 15,
"automated": True,
#"host_test": "wait_us_auto"
},
{
"id": "MBED_25", "description": "Time us",
"source_dir": join(TEST_DIR, "mbed", "time_us"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 15,
"automated": True,
#"host_test": "wait_us_auto"
},
{
"id": "MBED_26", "description": "Integer constant division",
"source_dir": join(TEST_DIR, "mbed", "div"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
},
{
"id": "MBED_27", "description": "SPI ADXL345",
"source_dir": join(TEST_DIR, "mbed", "spi_ADXL345"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'ADXL345')],
"peripherals": ["ADXL345"]
},
{
"id": "MBED_28", "description": "Interrupt chaining (InterruptManager)",
"source_dir": join(TEST_DIR, "mbed", "interrupt_chaining"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
},
{
"id": "MBED_29", "description": "CAN network test",
"source_dir": join(TEST_DIR, "mbed", "can"),
"dependencies": [MBED_LIBRARIES],
"mcu": ["LPC1768", "LPC4088", "LPC1549"]
},
{
"id": "MBED_30", "description": "CAN network test using interrupts",
"source_dir": join(TEST_DIR, "mbed", "can_interrupt"),
"dependencies": [MBED_LIBRARIES],
"mcu": ["LPC1768", "LPC4088", "LPC1549"]
},
{
"id": "MBED_31", "description": "PWM LED test",
"source_dir": join(TEST_DIR, "mbed", "pwm_led"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_32", "description": "Pin toggling",
"source_dir": join(TEST_DIR, "mbed", "pin_toggling"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_33", "description": "C string operations",
"source_dir": join(TEST_DIR, "mbed", "cstring"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 10,
"automated": False,
},
{
"id": "MBED_34", "description": "Ticker Two callbacks",
"source_dir": join(TEST_DIR, "mbed", "ticker_3"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 15,
"automated": True,
#"host_test": "wait_us_auto"
},
# CMSIS RTOS tests
{
"id": "CMSIS_RTOS_1", "description": "Basic",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "basic"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
},
{
"id": "CMSIS_RTOS_2", "description": "Mutex",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "mutex"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
"duration": 20
},
{
"id": "CMSIS_RTOS_3", "description": "Semaphore",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "semaphore"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
"duration": 20
},
{
"id": "CMSIS_RTOS_4", "description": "Signals",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "signals"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
},
{
"id": "CMSIS_RTOS_5", "description": "Queue",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "queue"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
"duration": 20
},
{
"id": "CMSIS_RTOS_6", "description": "Mail",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "mail"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
"duration": 20
},
{
"id": "CMSIS_RTOS_7", "description": "Timer",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "timer"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
},
{
"id": "CMSIS_RTOS_8", "description": "ISR",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "isr"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
},
# mbed RTOS tests
{
"id": "RTOS_1", "description": "Basic thread",
"source_dir": join(TEST_DIR, "rtos", "mbed", "basic"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"duration": 15,
"automated": True,
#"host_test": "wait_us_auto",
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8",
"NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC",
"DISCO_F401VC", "NUCLEO_F303RE", "MAXWSNENV"],
},
{
"id": "RTOS_2", "description": "Mutex resource lock",
"source_dir": join(TEST_DIR, "rtos", "mbed", "mutex"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"duration": 20,
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8",
"NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC",
"DISCO_F401VC", "NUCLEO_F303RE", "MAXWSNENV"],
},
{
"id": "RTOS_3", "description": "Semaphore resource lock",
"source_dir": join(TEST_DIR, "rtos", "mbed", "semaphore"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"duration": 20,
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8",
"NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC",
"DISCO_F401VC", "NUCLEO_F303RE", "MAXWSNENV"],
},
{
"id": "RTOS_4", "description": "Signals messaging",
"source_dir": join(TEST_DIR, "rtos", "mbed", "signals"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8",
"NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC",
"DISCO_F401VC", "NUCLEO_F303RE", "MAXWSNENV"],
},
{
"id": "RTOS_5", "description": "Queue messaging",
"source_dir": join(TEST_DIR, "rtos", "mbed", "queue"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8",
"NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC",
"DISCO_F401VC", "NUCLEO_F303RE", "MAXWSNENV"],
},
{
"id": "RTOS_6", "description": "Mail messaging",
"source_dir": join(TEST_DIR, "rtos", "mbed", "mail"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8",
"NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC",
"DISCO_F401VC", "NUCLEO_F303RE", "MAXWSNENV"],
},
{
"id": "RTOS_7", "description": "Timer",
"source_dir": join(TEST_DIR, "rtos", "mbed", "timer"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"duration": 15,
"automated": True,
#"host_test": "wait_us_auto",
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8",
"NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC",
"DISCO_F401VC", "NUCLEO_F303RE", "MAXWSNENV"],
},
{
"id": "RTOS_8", "description": "ISR (Queue)",
"source_dir": join(TEST_DIR, "rtos", "mbed", "isr"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8",
"NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC",
"DISCO_F401VC", "NUCLEO_F303RE", "MAXWSNENV"],
},
{
"id": "RTOS_9", "description": "SD File write-read",
"source_dir": join(TEST_DIR, "rtos", "mbed", "file"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
"automated": True,
"peripherals": ["SD"],
"mcu": ["LPC1768", "LPC11U24", "LPC812", "KL25Z",
"KL05Z", "K64F", "KL46Z", "RZ_A1H",
"DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "NUCLEO_F401RE"],
},
# Networking Tests
{
"id": "NET_1", "description": "TCP client hello world",
"source_dir": join(TEST_DIR, "net", "helloworld", "tcpclient"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"duration": 15,
"automated": True,
"peripherals": ["ethernet"],
},
{
"id": "NET_2", "description": "NIST Internet Time Service",
"source_dir": join(TEST_DIR, "net", "helloworld", "udpclient"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"duration": 15,
"automated": True,
"peripherals": ["ethernet"],
},
{
"id": "NET_3", "description": "TCP echo server",
"source_dir": join(TEST_DIR, "net", "echo", "tcp_server"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"automated": True,
#"host_test" : "tcpecho_server_auto",
"peripherals": ["ethernet"],
},
{
"id": "NET_4", "description": "TCP echo client",
"source_dir": join(TEST_DIR, "net", "echo", "tcp_client"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"automated": True,
#"host_test": "tcpecho_client_auto",
"peripherals": ["ethernet"]
},
{
"id": "NET_5", "description": "UDP echo server",
"source_dir": join(TEST_DIR, "net", "echo", "udp_server"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"automated": True,
#"host_test" : "udpecho_server_auto",
"peripherals": ["ethernet"]
},
{
"id": "NET_6", "description": "UDP echo client",
"source_dir": join(TEST_DIR, "net", "echo", "udp_client"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"automated": True,
#"host_test" : "udpecho_client_auto",
"peripherals": ["ethernet"],
},
{
"id": "NET_7", "description": "HTTP client hello world",
"source_dir": join(TEST_DIR, "net", "protocols", "HTTPClient_HelloWorld"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"automated": True,
"duration": 15,
"peripherals": ["ethernet"],
},
{
"id": "NET_8", "description": "NTP client",
"source_dir": join(TEST_DIR, "net", "protocols", "NTPClient_HelloWorld"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"automated": True,
"peripherals": ["ethernet"],
},
{
"id": "NET_9", "description": "Multicast Send",
"source_dir": join(TEST_DIR, "net", "helloworld", "multicast_send"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
"peripherals": ["ethernet"],
},
{
"id": "NET_10", "description": "Multicast Receive",
"source_dir": join(TEST_DIR, "net", "helloworld", "multicast_receive"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
"peripherals": ["ethernet"],
},
{
"id": "NET_11", "description": "Broadcast Send",
"source_dir": join(TEST_DIR, "net", "helloworld", "broadcast_send"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
"peripherals": ["ethernet"],
},
{
"id": "NET_12", "description": "Broadcast Receive",
"source_dir": join(TEST_DIR, "net", "helloworld", "broadcast_receive"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
"peripherals": ["ethernet"],
},
{
"id": "NET_13", "description": "TCP client echo loop",
"source_dir": join(TEST_DIR, "net", "echo", "tcp_client_loop"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"automated": True,
"duration": 15,
#"host_test": "tcpecho_client_auto",
"peripherals": ["ethernet"],
},
{
"id": "NET_14", "description": "UDP PHY/Data link layer",
"source_dir": join(TEST_DIR, "net", "echo", "udp_link_layer"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
"automated": False,
"duration": 20,
"host_test": "udp_link_layer_auto",
"peripherals": ["ethernet"],
},
# u-blox tests
{
"id": "UB_1", "description": "u-blox USB modem: HTTP client",
"source_dir": [join(TEST_DIR, "net", "cellular", "http", "ubloxusb"), join(TEST_DIR, "net", "cellular", "http", "common")],
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, USB_HOST_LIBRARIES, UBLOX_LIBRARY],
"supported": CORTEX_ARM_SUPPORT,
},
{
"id": "UB_2", "description": "u-blox USB modem: SMS test",
"source_dir": [join(TEST_DIR, "net", "cellular", "sms", "ubloxusb"), join(TEST_DIR, "net", "cellular", "sms", "common")],
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, USB_HOST_LIBRARIES, UBLOX_LIBRARY],
"supported": CORTEX_ARM_SUPPORT,
},
# USB Tests
{
"id": "USB_1", "description": "Mouse",
"source_dir": join(TEST_DIR, "usb", "device", "basic"),
"dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
},
{
"id": "USB_2", "description": "Keyboard",
"source_dir": join(TEST_DIR, "usb", "device", "keyboard"),
"dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
},
{
"id": "USB_3", "description": "Mouse_Keyboard",
"source_dir": join(TEST_DIR, "usb", "device", "keyboard"),
"dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
},
{
"id": "USB_4", "description": "Serial Port",
"source_dir": join(TEST_DIR, "usb", "device", "serial"),
"dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
"supported": CORTEX_ARM_SUPPORT,
},
{
"id": "USB_5", "description": "Generic HID",
"source_dir": join(TEST_DIR, "usb", "device", "raw_hid"),
"dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
},
{
"id": "USB_6", "description": "MIDI",
"source_dir": join(TEST_DIR, "usb", "device", "midi"),
"dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
},
{
"id": "USB_7", "description": "AUDIO",
"source_dir": join(TEST_DIR, "usb", "device", "audio"),
"dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
},
# CMSIS DSP
{
"id": "CMSIS_DSP_1", "description": "FIR",
"source_dir": join(TEST_DIR, "dsp", "cmsis", "fir_f32"),
"dependencies": [MBED_LIBRARIES, DSP_LIBRARIES],
},
# mbed DSP
{
"id": "DSP_1", "description": "FIR",
"source_dir": join(TEST_DIR, "dsp", "mbed", "fir_f32"),
"dependencies": [MBED_LIBRARIES, DSP_LIBRARIES],
},
# KL25Z
{
"id": "KL25Z_1", "description": "LPTMR",
"source_dir": join(TEST_DIR, "KL25Z", "lptmr"),
"dependencies": [MBED_LIBRARIES],
"supported": CORTEX_ARM_SUPPORT,
"mcu": ["KL25Z"],
},
{
"id": "KL25Z_2", "description": "PIT",
"source_dir": join(TEST_DIR, "KL25Z", "pit"),
"dependencies": [MBED_LIBRARIES],
"supported": CORTEX_ARM_SUPPORT,
"mcu": ["KL25Z"],
},
{
"id": "KL25Z_3", "description": "TSI Touch Sensor",
"source_dir": join(TEST_DIR, "mbed", "tsi"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'TSI')],
"mcu": ["KL25Z"],
},
{
"id": "KL25Z_4", "description": "RTC",
"source_dir": join(TEST_DIR, "KL25Z", "rtc"),
"dependencies": [MBED_LIBRARIES],
"mcu": ["KL25Z"],
},
{
"id": "KL25Z_5", "description": "MMA8451Q accelerometer",
"source_dir": join(TEST_DIR, "mbed", "i2c_MMA8451Q"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'MMA8451Q')],
"mcu": ["KL25Z", "KL05Z", "KL46Z", "K20D50M"],
"automated": True,
"duration": 15,
},
# Examples
{
"id": "EXAMPLE_1", "description": "/dev/null",
"source_dir": join(TEST_DIR, "mbed", "dev_null"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
#"host_test" : "dev_null_auto",
},
{
"id": "EXAMPLE_2", "description": "FS + RTOS",
"source_dir": join(TEST_DIR, "mbed", "fs"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
},
# CPPUTEST Library provides Unit testing Framework
#
# To write TESTs and TEST_GROUPs please add CPPUTEST_LIBRARY to 'dependencies'
#
# This will also include:
# 1. test runner - main function with call to CommandLineTestRunner::RunAllTests(ac, av)
# 2. Serial console object to print test result on serial port console
#
# Unit testing with cpputest library
{
"id": "UT_1", "description": "Basic",
"source_dir": join(TEST_DIR, "utest", "basic"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
"automated": False,
},
{
"id": "UT_2", "description": "Semihost file system",
"source_dir": join(TEST_DIR, "utest", "semihost_fs"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
"automated": False,
"mcu": ["LPC1768", "LPC2368", "LPC11U24"]
},
{
"id": "UT_3", "description": "General tests",
"source_dir": join(TEST_DIR, "utest", "general"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
"automated": False,
},
{
"id": "UT_BUSIO", "description": "BusIn BusOut",
"source_dir": join(TEST_DIR, "utest", "bus"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
"automated": False,
},
# Tests used for target information purposes
{
"id": "DTCT_1", "description": "Simple detect test",
"source_dir": join(TEST_DIR, "mbed", "detect"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
#"host_test" : "detect_auto",
},
]
# Group tests with the same goals into categories
GROUPS = {
"core": ["MBED_A1", "MBED_A2", "MBED_A3", "MBED_A18"],
"digital_io": ["MBED_A5", "MBED_A6", "MBED_A7", "MBED_A10", "MBED_A11"],
"analog_io": ["MBED_A8"],
"i2c": ["MBED_A19", "MBED_A20"],
"spi": ["MBED_A12"],
}
GROUPS["rtos"] = [test["id"] for test in TESTS if test["id"].startswith("RTOS_")]
GROUPS["net"] = [test["id"] for test in TESTS if test["id"].startswith("NET_")]
GROUPS["automated"] = [test["id"] for test in TESTS if test.get("automated", False)]
# Look for 'TEST_GROUPS' in private_settings.py and update the GROUPS dictionary
# with the information in test_groups if found
try:
from workspace_tools.private_settings import TEST_GROUPS
except:
TEST_GROUPS = {}
GROUPS.update(TEST_GROUPS)
class Test:
DEFAULTS = {
#'mcu': None,
'description': None,
'dependencies': None,
'duration': 10,
'host_test': 'host_test',
'automated': False,
'peripherals': None,
#'supported': None,
'source_dir': None,
'extra_files': None
}
def __init__(self, n):
self.n = n
self.__dict__.update(Test.DEFAULTS)
self.__dict__.update(TESTS[n])
def is_supported(self, target, toolchain):
if hasattr(self, 'mcu') and not target in self.mcu:
return False
if not hasattr(self, 'supported'):
return True
return (target in self.supported) and (toolchain in self.supported[target])
def get_description(self):
if self.description:
return self.description
else:
return self.id
def __cmp__(self, other):
return cmp(self.n, other.n)
def __str__(self):
return "[%3d] %s: %s" % (self.n, self.id, self.get_description())
def __getitem__(self, key):
if key == "id": return self.id
elif key == "mcu": return self.mcu
elif key == "dependencies": return self.dependencies
elif key == "description": return self.description
elif key == "duration": return self.duration
elif key == "host_test": return self.host_test
elif key == "automated": return self.automated
elif key == "peripherals": return self.peripherals
elif key == "supported": return self.supported
elif key == "source_dir": return self.source_dir
elif key == "extra_files": return self.extra_files
else:
return None
TEST_MAP = dict([(test['id'], Test(i)) for i, test in enumerate(TESTS)])
|
apache-2.0
| -1,732,097,689,878,460,400 | 36.34434 | 131 | 0.538285 | false |
semiautomaticgit/SemiAutomaticClassificationPlugin
|
semiautomaticclassificationplugin.py
|
1
|
86616
|
# -*- coding: utf-8 -*-
'''
/**************************************************************************************************************************
SemiAutomaticClassificationPlugin
The Semi-Automatic Classification Plugin for QGIS allows for the supervised classification of remote sensing images,
providing tools for the download, the preprocessing and postprocessing of images.
-------------------
begin : 2012-12-29
copyright : (C) 2012-2021 by Luca Congedo
email : ing.congedoluca@gmail.com
**************************************************************************************************************************/
/**************************************************************************************************************************
*
* This file is part of Semi-Automatic Classification Plugin
*
* Semi-Automatic Classification Plugin is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software Foundation,
* version 3 of the License.
*
* Semi-Automatic Classification Plugin is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Semi-Automatic Classification Plugin. If not, see <http://www.gnu.org/licenses/>.
*
**************************************************************************************************************************/
'''
global PluginCheck
PluginCheck = 'Yes'
import os
import sys
try:
from .core import config as cfg
except:
PluginCheck = 'No'
# try importing different path
from PyQt5.QtCore import QSettings
rK = QSettings()
mPythonSettings = rK.value(cfg.regPythonModulesPathSettings, str(cfg.PythonModulesPathSettings))
if len(mPythonSettings) > 0:
for ppS in mPythonSettings.split(';'):
if len(ppS) > 0:
sys.path.insert(1, ppS)
import platform
import inspect
import shutil
import time
import datetime
import subprocess
import numpy as np
import urllib
import requests
import ssl
import smtplib
import gc
from http.cookiejar import CookieJar
import itertools
import zipfile
import tarfile
import base64
import random
import re
import xml.etree.cElementTree as ET
from xml.dom import minidom
import json
import hashlib
import ctypes
import shlex
from collections import Counter
import multiprocessing as mp
try:
mp.set_start_method('spawn')
except:
pass
from multiprocessing import Pool, Manager
# Import the PyQt libraries
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import Qt, QObject, QFileInfo, QSettings, QDir, QDate, QVariant, pyqtSignal
from PyQt5.QtWidgets import QApplication, QTreeWidgetItem
from PyQt5.QtNetwork import QNetworkRequest
# Import the QGIS libraries
import qgis.core as qgisCore
import qgis.gui as qgisGui
import qgis.utils as qgisUtils
from osgeo import gdal
from osgeo import ogr
from osgeo import osr
# Initialize Qt ui
from .ui.resources_rc import *
from .ui.ui_semiautomaticclassificationplugin import Ui_SemiAutomaticClassificationPlugin
from .ui.ui_semiautomaticclassificationplugin_welcome import Ui_SCP_Welcome
from .ui.semiautomaticclassificationplugindialog import SemiAutomaticClassificationPluginDialog
from .ui.semiautomaticclassificationplugindialog import SpectralSignatureDialog
from .ui.semiautomaticclassificationplugindialog import WelcomeDialog
from .ui.semiautomaticclassificationplugindialog import ScatterPlotDialog
from .ui.semiautomaticclassificationplugindialog import DockClassDialog
# Import plugin version
from .__init__ import version as semiautomaticclassVersion
# required by other modules
cfg.QObjectSCP = QObject
cfg.pyqtSignalSCP = pyqtSignal
if PluginCheck == 'Yes':
try:
from .core.messages import Messages as msgs
from .core.utils import Utils
from .core.signature_importer import Signature_Importer
from .maininterface.downloadproductpointer import DownloadProductPointer
from .maininterface.downloadproducts import DownloadProducts
from .spectralsignature.spectralsignatureplot import SpectralSignaturePlot
from .spectralsignature.scatter_plot import Scatter_Plot
from .dock.manualroi import ManualROI
from .dock.regionroi import RegionROI
from .dock.scpdock import SCPDock
from .dock.classificationpreview import ClassificationPreview
from .maininterface.multipleroiTab import MultipleROITab
from .spectralsignature.usgs_spectral_lib import USGS_Spectral_Lib
from .maininterface.landsatTab import LandsatTab
from .maininterface.asterTab import ASTERTab
from .maininterface.modisTab import MODISTab
from .maininterface.sentinel1Tab import Sentinel1Tab
from .maininterface.sentinel2Tab import Sentinel2Tab
from .maininterface.sentinel3Tab import Sentinel3Tab
from .maininterface.GOESTab import GOESTab
from .maininterface.accuracy import Accuracy
from .maininterface.crossclassificationTab import CrossClassification
from .maininterface.bandcombination import BandCombination
from .maininterface.splitTab import SplitTab
from .maininterface.reprojectrasterbands import ReprojectRasterBands
from .maininterface.pcaTab import PcaTab
from .maininterface.clusteringTab import ClusteringTab
from .maininterface.classSignatureTab import ClassSignatureTab
from .maininterface.zonalStatRasterTab import ZonalStatRasterTab
from .maininterface.vectortorasterTab import VectorToRasterTab
from .maininterface.bandsetTab import BandsetTab
from .maininterface.algorithmWeightTab import AlgWeightTab
from .maininterface.signatureThresholdTab import SigThresholdTab
from .maininterface.LCSignatureThresholdTab import LCSigThresholdTab
from .maininterface.rgblistTab import RGBListTab
from .maininterface.bandsetlistTab import BandSetListTab
from .maininterface.LCSignaturePixel import LCSigPixel
from .maininterface.LCSignaturePixel2 import LCSigPixel2
from .maininterface.bandcalcTab import BandCalcTab
from .maininterface.batchTab import BatchTab
from .maininterface.clipmultiplerasters import ClipMultipleRasters
from .maininterface.stackrasterbands import StackRasterBands
from .maininterface.mosaicbandsets import MosaicBandSets
from .maininterface.cloudmasking import CloudMasking
from .maininterface.spectraldistancebandsets import SpectralDistanceBandsets
from .maininterface.randomForestTab import ClassRandomForestTab
from .maininterface.editraster import EditRaster
from .maininterface.sieveTab import SieveRaster
from .maininterface.erosionTab import ErosionRaster
from .maininterface.dilationTab import DilationRaster
from .maininterface.neighborpixelsTab import NeighborPixels
from .maininterface.clipmultiplerasterspointer import ClipMultiplerastersPointer
from .maininterface.landcoverchange import LandCoverChange
from .maininterface.classreportTab import ClassReportTab
from .maininterface.classificationTab import ClassificationTab
from .maininterface.classtovectorTab import ClassToVectorTab
from .maininterface.reclassificationTab import ReclassificationTab
from .maininterface.settings import Settings
from .core.input import Input
from .ui.ui_utils import Ui_Utils
except:
PluginCheck = 'No'
qgisUtils.iface.messageBar().pushMessage('Semi-Automatic Classification Plugin', QApplication.translate('semiautomaticclassificationplugin', 'Please, restart QGIS for executing the Semi-Automatic Classification Plugin'), level=qgisCore.Qgis.Info)
try:
import scipy.stats.distributions as statdistr
from scipy.spatial.distance import cdist
from scipy import signal
from scipy.ndimage import label
from scipy.cluster.vq import vq, kmeans, whiten
cfg.scipyCheck = 'Yes'
except:
cfg.scipyCheck = 'No'
try:
from matplotlib.ticker import MaxNLocator
import matplotlib.pyplot as mplplt
import matplotlib.colors as mplcolors
cfg.matplotlibCheck = 'Yes'
except Exception as err:
cfg.testMatplotlibV = err
cfg.matplotlibCheck = 'No'
class SemiAutomaticClassificationPlugin:
def __init__(self, iface):
try:
cfg.osSCP = os
cfg.sysSCP = sys
cfg.platformSCP = platform
cfg.shutilSCP = shutil
cfg.inspectSCP = inspect
cfg.timeSCP = time
cfg.datetimeSCP = datetime
cfg.subprocessSCP = subprocess
cfg.urllibSCP = urllib
cfg.requestsSCP = requests
cfg.itertoolsSCP = itertools
cfg.zipfileSCP = zipfile
cfg.tarfileSCP = tarfile
cfg.base64SCP = base64
cfg.randomSCP = random
cfg.QtCoreSCP = QtCore
cfg.QtGuiSCP = QtGui
cfg.QtWidgetsSCP = QtWidgets
cfg.QTreeWidgetItemSCP = QTreeWidgetItem
cfg.QNetworkRequestSCP = QNetworkRequest
cfg.QtSCP = Qt
cfg.QVariantSCP = QVariant
cfg.QFileInfoSCP = QFileInfo
cfg.QSettingsSCP = QSettings
cfg.QDirSCP = QDir
cfg.QDateSCP = QDate
cfg.qgisCoreSCP = qgisCore
cfg.qgisGuiSCP = qgisGui
cfg.gdalSCP = gdal
cfg.ogrSCP = ogr
cfg.osrSCP = osr
cfg.sslSCP = ssl
cfg.smtplibSCP = smtplib
cfg.CookieJarSCP = CookieJar
cfg.gcSCP = gc
cfg.reSCP = re
cfg.ETSCP = ET
cfg.minidomSCP = minidom
cfg.jsonSCP = json
cfg.hashlibSCP = hashlib
cfg.ctypesSCP = ctypes
cfg.shlexSCP = shlex
cfg.counterSCP = Counter
cfg.multiPSCP = mp
cfg.poolSCP = Pool
cfg.MultiManagerSCP = Manager
except:
qgisUtils.iface.messageBar().pushMessage('Semi-Automatic Classification Plugin', QApplication.translate('semiautomaticclassificationplugin', 'Please, restart QGIS for executing the Semi-Automatic Classification Plugin'), level=qgisCore.Qgis.Info)
return
try:
cfg.np = np
except:
qgisUtils.iface.messageBar().pushMessage('Semi-Automatic Classification Plugin', QApplication.translate('semiautomaticclassificationplugin', 'Error. Check Python Numpy installation for the Semi-Automatic Classification Plugin'), level=qgisCore.Qgis.Critical)
try:
if cfg.scipyCheck == 'Yes':
cfg.statdistrSCP = statdistr
cfg.cdistSCP = cdist
cfg.signalSCP = signal
cfg.labelSCP = label
cfg.vqSCP = vq
cfg.kmeansSCP = kmeans
cfg.whitenSCP = whiten
if cfg.matplotlibCheck == 'Yes':
cfg.MaxNLocatorSCP = MaxNLocator
cfg.mplpltSCP = mplplt
cfg.mplcolorsSCP = mplcolors
except:
pass
if cfg.scipyCheck == 'No':
qgisUtils.iface.messageBar().pushMessage('Semi-Automatic Classification Plugin', QApplication.translate('semiautomaticclassificationplugin', 'Error. Check Python Scipy installation for the Semi-Automatic Classification Plugin'), level=qgisCore.Qgis.Critical)
if cfg.matplotlibCheck == 'No':
qgisUtils.iface.messageBar().pushMessage('Semi-Automatic Classification Plugin', QApplication.translate('semiautomaticclassificationplugin', 'Error. Check Python Matplotlib installation for the Semi-Automatic Classification Plugin'), level=qgisCore.Qgis.Critical)
if PluginCheck == 'Yes':
# reference to QGIS interface
cfg.iface = iface
# reference to map canvas
cfg.cnvs = iface.mapCanvas()
# create the dialog
cfg.dlg = SemiAutomaticClassificationPluginDialog()
# reference to ui
cfg.ui = cfg.dlg.ui
# class dock dialog
cfg.dockclassdlg = DockClassDialog(cfg.iface.mainWindow(), cfg.iface)
# reference dock class ui
cfg.uidc = cfg.dockclassdlg.ui
# welcome dialog
cfg.welcomedlg = WelcomeDialog()
# spectral signature plot dialog
cfg.spectralplotdlg = SpectralSignatureDialog()
cfg.uisp = cfg.spectralplotdlg.ui
# scatter plot dialog
cfg.scatterplotdlg = ScatterPlotDialog()
cfg.uiscp = cfg.scatterplotdlg.ui
cfg.mx = msgs(cfg.iface)
cfg.utls = Utils()
cfg.SCPD = SCPDock()
cfg.classPrev = ClassificationPreview(cfg.cnvs)
cfg.spSigPlot = SpectralSignaturePlot()
cfg.scaPlT = Scatter_Plot()
cfg.multiROI = MultipleROITab()
cfg.usgsLib = USGS_Spectral_Lib()
cfg.acc = Accuracy()
cfg.crossC = CrossClassification()
cfg.bsComb = BandCombination()
cfg.splitT = SplitTab()
cfg.rprjRstBndsT = ReprojectRasterBands()
cfg.pcaT = PcaTab()
cfg.clusteringT = ClusteringTab()
cfg.classSigT = ClassSignatureTab()
cfg.znlSttRstT = ZonalStatRasterTab()
cfg.vctRstrT = VectorToRasterTab()
cfg.bst = BandsetTab()
cfg.algWT = AlgWeightTab()
cfg.signT = SigThresholdTab()
cfg.LCSignT = LCSigThresholdTab()
cfg.RGBLT = RGBListTab()
cfg.bstLT = BandSetListTab()
cfg.bCalc = BandCalcTab()
cfg.batchT= BatchTab()
cfg.clipMulti = ClipMultipleRasters()
cfg.stackRstr = StackRasterBands()
cfg.mosaicBS = MosaicBandSets()
cfg.cloudMsk = CloudMasking()
cfg.spclDstBS = SpectralDistanceBandsets()
cfg.rndmFrst = ClassRandomForestTab()
cfg.editRstr = EditRaster()
cfg.sieveRstr = SieveRaster()
cfg.ersnRstr = ErosionRaster()
cfg.dltnRstr = DilationRaster()
cfg.clssNghbr = NeighborPixels()
cfg.downProd = DownloadProducts()
cfg.landsatT = LandsatTab()
cfg.ASTERT = ASTERTab()
cfg.MODIST = MODISTab()
cfg.sentinel1T = Sentinel1Tab()
cfg.sentinel2T = Sentinel2Tab()
cfg.sentinel3T = Sentinel3Tab()
cfg.goesT = GOESTab()
cfg.landCC = LandCoverChange()
cfg.classRep = ClassReportTab()
cfg.classTab = ClassificationTab()
cfg.classVect = ClassToVectorTab()
cfg.reclassification = ReclassificationTab()
cfg.sigImport = Signature_Importer()
cfg.mnlROI = ManualROI(cfg.cnvs)
cfg.regionROI = RegionROI(cfg.cnvs)
cfg.dwnlPrdPnt = DownloadProductPointer(cfg.cnvs)
cfg.clipMultiP = ClipMultiplerastersPointer(cfg.cnvs)
cfg.LCSPixel = LCSigPixel(cfg.cnvs)
cfg.LCSPixel2 = LCSigPixel2(cfg.cnvs)
cfg.sets = Settings()
cfg.uiUtls = Ui_Utils()
cfg.ipt = Input()
# connect when map is clicked
cfg.mnlROI.rightClicked.connect(cfg.SCPD.clckR)
cfg.mnlROI.leftClicked.connect(cfg.SCPD.clckL)
cfg.mnlROI.moved.connect(cfg.SCPD.movedPointer)
cfg.regionROI.ROIleftClicked.connect(cfg.SCPD.pointerClickROI)
cfg.regionROI.ROIrightClicked.connect(cfg.SCPD.pointerRightClickROI)
cfg.regionROI.moved.connect(cfg.SCPD.movedPointer)
cfg.clipMultiP.leftClicked.connect(cfg.clipMulti.pointerLeftClick)
cfg.clipMultiP.rightClicked.connect(cfg.clipMulti.pointerRightClick)
cfg.dwnlPrdPnt.leftClicked.connect(cfg.downProd.pointerLeftClick)
cfg.dwnlPrdPnt.rightClicked.connect(cfg.downProd.pointerRightClick)
cfg.classPrev.leftClicked.connect(cfg.SCPD.pointerClickPreview)
cfg.classPrev.rightClicked.connect(cfg.SCPD.pointerRightClickPreview)
cfg.LCSPixel.MaprightClicked.connect(cfg.LCSignT.pointerLeftClick)
cfg.LCSPixel.MapleftClicked.connect(cfg.LCSignT.pointerLeftClick)
cfg.LCSPixel2.MaprightClicked.connect(cfg.spSigPlot.pointerLeftClick)
cfg.LCSPixel2.MapleftClicked.connect(cfg.spSigPlot.pointerLeftClick)
# system variables
cfg.utls.findSystemSpecs()
cfg.utls.readVariables()
# set font
try:
f, s, i = cfg.utls.readQGISVariableFont()
font = cfg.QtGuiSCP.QFont()
font.setFamily(f)
font.setPointSize(int(s))
cfg.dlg.setFont(font)
cfg.ui.menu_treeWidget.setFont(font)
except:
pass
# initialize plugin directory
cfg.plgnDir = cfg.QFileInfoSCP(cfg.qgisCoreSCP.QgsApplication.qgisUserDatabaseFilePath()).path() + '/python/plugins/' + str(__name__).split('.')[0]
# locale name
lclNm = cfg.QSettingsSCP().value('locale/userLocale')[0:2]
self.registryKeys()
if len(cfg.PythonPathSettings) > 0:
mp.set_executable(cfg.PythonPathSettings)
# temporary directory
tmpDir = cfg.utls.getTempDirectory()
cfg.ui.temp_directory_label.setText(tmpDir)
# log file path
cfg.logFile = cfg.tmpDir.replace('//', '/') + '/__0semiautomaticclass.log'
# locale
lclPth = ''
if cfg.QFileInfoSCP(cfg.plgnDir).exists():
lclPth = cfg.plgnDir + '/i18n/semiautomaticclassificationplugin_' + lclNm + '.qm'
if cfg.QFileInfoSCP(lclPth).exists():
trnsltr = cfg.QtCoreSCP.QTranslator()
trnsltr.load(lclPth)
if cfg.QtCoreSCP.qVersion() > '4.3.3':
cfg.QtCoreSCP.QCoreApplication.installTranslator(trnsltr)
# info
cfg.sysSCPInfo = str(' SemiAutomaticClass ' + semiautomaticclassVersion() + ' - QGIS v. ' + str(cfg.QGISVer) + ' L:' + lclNm + ' - OS ' + str(cfg.sysSCPNm) + ' - 64bit =' + cfg.sysSCP64bit)
# multiprocess Windows
if cfg.sysSCPNm == 'Windows':
mp.set_executable(os.path.join(sys.exec_prefix, 'pythonw.exe'))
# Mac OS
elif cfg.sysSCPNm == 'Darwin':
dPref = os.environ['PATH'].split(':')
for flPref in dPref:
flPrefPy = os.path.join(flPref, 'python3')
# first test
if os.path.isfile(flPrefPy):
mp.set_executable(flPrefPy)
cfg.sysSCPInfo = cfg.sysSCPInfo + ' - python path =' + flPrefPy
# second test
if 'library' in flPref.lower():
if os.path.isfile(flPrefPy):
mp.set_executable(flPrefPy)
cfg.sysSCPInfo = cfg.sysSCPInfo + ' - python path =' + flPrefPy
break
# GDAL config
try:
cfg.gdalSCP.SetConfigOption('GDAL_NUM_THREADS', str(cfg.threads))
cfg.gdalSCP.SetCacheMax(int(cfg.RAMValue * 0.3 * 1000000))
cfg.gdalSCP.SetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN', 'TRUE')
cfg.gdalSCP.SetConfigOption('GDAL_CACHEMAX', '4')
cfg.gdalSCP.SetConfigOption('VSI_CACHE', 'FALSE')
except:
pass
# read registry keys
def registryKeys(self):
''' registry keys '''
cfg.firstInstallVal = cfg.utls.readRegistryKeys(cfg.regFirstInstall, cfg.firstInstallVal)
cfg.logSetVal = cfg.utls.readRegistryKeys(cfg.regLogKey, cfg.logSetVal)
cfg.downNewsVal = cfg.utls.readRegistryKeys(cfg.downNewsKey, cfg.downNewsVal)
cfg.vrtRstProjVal = cfg.utls.readRegistryKeys(cfg.vrtRstProjKey, cfg.vrtRstProjVal)
cfg.ROIClrVal = cfg.utls.readRegistryKeys(cfg.regROIClr, cfg.ROIClrVal)
cfg.ROITrnspVal = int(cfg.utls.readRegistryKeys(cfg.regROITransp, cfg.ROITrnspVal))
cfg.outTempRastFormat = cfg.utls.readRegistryKeys(cfg.regTempRasterFormat, str(cfg.outTempRastFormat))
cfg.rasterCompression = cfg.utls.readRegistryKeys(cfg.regRasterCompression, str(cfg.rasterCompression))
cfg.parallelWritingCheck = cfg.utls.readRegistryKeys(cfg.regparallelWritingCheck, str(cfg.parallelWritingCheck))
cfg.RAMValue = int(cfg.utls.readRegistryKeys(cfg.regRAMValue, str(cfg.RAMValue)))
cfg.threads = int(cfg.utls.readRegistryKeys(cfg.regThreadsValue, str(cfg.threads)))
cfg.gdalPath = cfg.utls.readRegistryKeys(cfg.regGDALPathSettings, str(cfg.gdalPath))
cfg.PythonPathSettings = cfg.utls.readRegistryKeys(cfg.regPythonPathSettings, str(cfg.PythonPathSettings))
cfg.PythonModulesPathSettings = cfg.utls.readRegistryKeys(cfg.regPythonModulesPathSettings, str(cfg.PythonModulesPathSettings))
cfg.tmpDir = cfg.utls.readRegistryKeys(cfg.regTmpDir, cfg.tmpDir)
cfg.fldID_class = cfg.utls.readRegistryKeys(cfg.regIDFieldName, cfg.fldID_class)
cfg.fldMacroID_class = cfg.utls.readRegistryKeys(cfg.regMacroIDFieldName, cfg.fldMacroID_class)
cfg.macroclassCheck = cfg.utls.readRegistryKeys(cfg.regConsiderMacroclass, cfg.macroclassCheck)
cfg.sentinelAlternativeSearch = cfg.utls.readRegistryKeys(cfg.regSentinelAlternativeSearch, cfg.sentinelAlternativeSearch)
cfg.LCsignatureCheckBox = cfg.utls.readRegistryKeys(cfg.regLCSignature, cfg.LCsignatureCheckBox)
cfg.fldROI_info = cfg.utls.readRegistryKeys(cfg.regInfoFieldName, cfg.fldROI_info)
cfg.fldROIMC_info = cfg.utls.readRegistryKeys(cfg.regMCInfoFieldName, cfg.fldROIMC_info)
cfg.variableName = cfg.utls.readRegistryKeys(cfg.regVariableName, cfg.variableName)
cfg.vectorVariableName = cfg.utls.readRegistryKeys(cfg.regVectorVariableName, cfg.vectorVariableName)
cfg.SMTPCheck = cfg.utls.readRegistryKeys(cfg.regSMTPCheck, cfg.SMTPCheck)
cfg.SMTPServer = cfg.utls.readRegistryKeys(cfg.regSMTPServer, cfg.SMTPServer)
cfg.SMTPtoEmails = cfg.utls.readRegistryKeys(cfg.regSMTPtoEmails, cfg.SMTPtoEmails)
cfg.SMTPUser = cfg.utls.readRegistryKeys(cfg.regSMTPUser, cfg.SMTPUser)
cfg.SMTPPassword = cfg.utls.readRegistryKeys(cfg.regSMTPPassword, cfg.SMTPPassword)
cfg.USGSUser = cfg.utls.readRegistryKeys(cfg.regUSGSUser, cfg.USGSUser)
cfg.USGSPass = cfg.utls.readRegistryKeys(cfg.regUSGSPass, cfg.USGSPass)
cfg.USGSUserASTER = cfg.utls.readRegistryKeys(cfg.regUSGSUserASTER, cfg.USGSUserASTER)
cfg.USGSPassASTER = cfg.utls.readRegistryKeys(cfg.regUSGSPassASTER, cfg.USGSPassASTER)
cfg.SciHubUser = cfg.utls.readRegistryKeys(cfg.regSciHubUser, cfg.SciHubUser)
cfg.SciHubService = cfg.utls.readRegistryKeys(cfg.regSciHubService, cfg.SciHubService)
cfg.SciHubPass = cfg.utls.readRegistryKeys(cfg.regSciHubPass, cfg.SciHubPass)
cfg.sigPLRoundCharList = cfg.roundCharList
cfg.scatPlRoundCharList = cfg.roundCharList
cfg.grpNm = cfg.utls.readRegistryKeys(cfg.regGroupName, cfg.grpNm)
cfg.rasterDataType = cfg.utls.readRegistryKeys(cfg.regRasterDataType, cfg.rasterDataType)
cfg.expressionListBC = cfg.utls.readRegistryKeys(cfg.regExpressionListBC, cfg.expressionListBC)
cfg.soundVal = cfg.utls.readRegistryKeys(cfg.regSound, cfg.soundVal)
cfg.windowSizeW = cfg.utls.readRegistryKeys(cfg.regWindowSizeW, cfg.windowSizeW)
cfg.windowSizeH = cfg.utls.readRegistryKeys(cfg.regWindowSizeH, cfg.windowSizeH)
cfg.splitterSizeS = cfg.utls.readRegistryKeys(cfg.regSplitterSizeS, cfg.splitterSizeS)
def initGui(self):
if PluginCheck == 'Yes':
try:
cfg.iface.addDockWidget(cfg.QtSCP.LeftDockWidgetArea, cfg.dockclassdlg)
except:
msg = ''
try:
import scipy.stats.distributions as statdistr
except:
msg = 'SciPy'
try:
from matplotlib.ticker import MaxNLocator
except:
msg = 'Matplotlib'
try:
import numpy as np
except:
msg = 'NumPy'
try:
from osgeo import gdal
except:
msg = 'Gdal'
if len(msg) > 0:
qgisUtils.iface.messageBar().pushMessage('Semi-Automatic Classification Plugin', QApplication.translate('semiautomaticclassificationplugin', 'Semi-Automatic Classification Plugin possible missing dependecies: ' + msg), level=qgisCore.Qgis.Info)
else:
qgisUtils.iface.messageBar().pushMessage('Semi-Automatic Classification Plugin', QApplication.translate('semiautomaticclassificationplugin', 'Please restart QGIS for installing the Semi-Automatic Classification Plugin'), level=qgisCore.Qgis.Info)
return
from .modules.modules import Modules
cfg.SCPModules = Modules()
cfg.SCPModules.loading()
cfg.ipt.loadInputToolbar()
cfg.algName = cfg.algMinDist
cfg.ui.algorithm_combo.setCurrentIndex(0)
# vector to raster type of conversion
cfg.ui.conversion_type_combo.addItem(cfg.convCenterPixels)
cfg.ui.conversion_type_combo.addItem(cfg.convAllPixelsTouch)
cfg.centerOfPixels = cfg.ui.conversion_type_combo.itemText(0)
''' menu '''
cfg.ipt.loadMenu()
# set plugin version
cfg.ui.plugin_version_label.setText(semiautomaticclassVersion())
cfg.uidc.plugin_version_label2.setText('SCP ' + semiautomaticclassVersion())
# row height
cfg.ui.download_images_tableWidget.verticalHeader().setDefaultSectionSize(24)
cfg.ui.tableWidget_band_calc.verticalHeader().setDefaultSectionSize(24)
cfg.ui.landsat_tableWidget.verticalHeader().setDefaultSectionSize(24)
cfg.ui.sentinel_2_tableWidget.verticalHeader().setDefaultSectionSize(24)
cfg.utls.setColumnWidthList(cfg.ui.sentinel_2_tableWidget, [[0, 400], [1, 200], [2, 60]])
cfg.ui.ASTER_tableWidget.verticalHeader().setDefaultSectionSize(24)
cfg.utls.setColumnWidthList(cfg.ui.ASTER_tableWidget, [[0, 400], [1, 200], [2, 60]])
cfg.ui.MODIS_tableWidget.verticalHeader().setDefaultSectionSize(24)
cfg.utls.setColumnWidthList(cfg.ui.MODIS_tableWidget, [[0, 400], [1, 200], [2, 60]])
cfg.ui.LCS_tableWidget.verticalHeader().setDefaultSectionSize(24)
cfg.ui.signature_threshold_tableWidget.verticalHeader().setDefaultSectionSize(24)
cfg.ui.point_tableWidget.verticalHeader().setDefaultSectionSize(24)
cfg.ui.log_tableWidget.verticalHeader().setDefaultSectionSize(24)
cfg.utls.setColumnWidthList(cfg.ui.log_tableWidget, [[0, 100], [1, 200], [2, 800]])
# spectral signature plot list
cfg.utls.insertTableColumn(cfg.uisp.signature_list_plot_tableWidget, 6, cfg.tableColString, None, 'Yes')
cfg.utls.sortTableColumn(cfg.uisp.signature_list_plot_tableWidget, 3)
cfg.utls.setColumnWidthList(cfg.uisp.signature_list_plot_tableWidget, [[0, 30], [1, 40], [2, 100], [3, 40], [4, 100], [5, 30]])
try:
cfg.uisp.signature_list_plot_tableWidget.horizontalHeader().setSectionResizeMode(2, cfg.QtWidgetsSCP.QHeaderView.Stretch)
cfg.uisp.signature_list_plot_tableWidget.horizontalHeader().setSectionResizeMode(4, cfg.QtWidgetsSCP.QHeaderView.Stretch)
except:
pass
cfg.SCPD.clearTree()
# passwords
cfg.ui.smtp_password_lineEdit.setEchoMode(cfg.QtWidgetsSCP.QLineEdit.Password)
cfg.ui.password_usgs_lineEdit.setEchoMode(cfg.QtWidgetsSCP.QLineEdit.Password)
cfg.ui.password_usgs_lineEdit_2.setEchoMode(cfg.QtWidgetsSCP.QLineEdit.Password)
cfg.ui.password_scihub_lineEdit.setEchoMode(cfg.QtWidgetsSCP.QLineEdit.Password)
# scatter plot list
cfg.utls.insertTableColumn(cfg.uiscp.scatter_list_plot_tableWidget, 6, cfg.tableColString, None, 'Yes')
cfg.utls.sortTableColumn(cfg.uiscp.scatter_list_plot_tableWidget, 3)
cfg.utls.setColumnWidthList(cfg.uiscp.scatter_list_plot_tableWidget, [[0, 30], [1, 40], [2, 100], [3, 40], [4, 100], [5, 30]])
try:
cfg.uiscp.scatter_list_plot_tableWidget.horizontalHeader().setSectionResizeMode(2, cfg.QtWidgetsSCP.QHeaderView.Stretch)
cfg.uiscp.scatter_list_plot_tableWidget.horizontalHeader().setSectionResizeMode(4, cfg.QtWidgetsSCP.QHeaderView.Stretch)
except:
pass
# signature threshold
cfg.utls.insertTableColumn(cfg.ui.signature_threshold_tableWidget, 7, cfg.tableColString, None, 'Yes')
cfg.utls.setColumnWidthList(cfg.ui.signature_threshold_tableWidget, [[4, 100], [5, 100], [6, 100]])
try:
cfg.ui.signature_threshold_tableWidget.horizontalHeader().setSectionResizeMode(1, cfg.QtWidgetsSCP.QHeaderView.Stretch)
cfg.ui.signature_threshold_tableWidget.horizontalHeader().setSectionResizeMode(3, cfg.QtWidgetsSCP.QHeaderView.Stretch)
except:
pass
# product download tab
cfg.utls.setColumnWidthList(cfg.ui.download_images_tableWidget, [[0, 100], [1, 400]])
# USGS spectral lbrary
cfg.usgsLib.addSpectralLibraryToCombo(cfg.usgs_lib_list)
cfg.usgs_C1p = cfg.plgnDir + '/' + cfg.usgs_C1p
cfg.usgs_C2p = cfg.plgnDir + '/' + cfg.usgs_C2p
cfg.usgs_C3p = cfg.plgnDir + '/' + cfg.usgs_C3p
cfg.usgs_C4p = cfg.plgnDir + '/' + cfg.usgs_C4p
cfg.usgs_C5p = cfg.plgnDir + '/' + cfg.usgs_C5p
cfg.usgs_C6p = cfg.plgnDir + '/' + cfg.usgs_C6p
cfg.usgs_C7p = cfg.plgnDir + '/' + cfg.usgs_C7p
# band calc expression
cfg.bCalc.createExpressionList(cfg.expressionListBC)
cfg.batchT.addFunctionsToTable(cfg.functionNames)
cfg.bst.addSatelliteToCombo(cfg.satWlList)
cfg.downProd.addSatelliteToCombo(cfg.downProductList)
cfg.scaPlT.addColormapToCombo(cfg.scatterColorMap)
cfg.bst.addUnitToCombo(cfg.unitList)
cfg.SCPD.previewSize()
# set log state
if cfg.logSetVal == 'Yes':
cfg.ui.log_checkBox.setCheckState(2)
cfg.mx.msg19()
elif cfg.logSetVal == 'No':
cfg.ui.log_checkBox.setCheckState(0)
# set download news state
cfg.ui.download_news_checkBox.setCheckState(int(cfg.downNewsVal))
# set download news state
cfg.ui.virtual_raster_load_checkBox.setCheckState(int(cfg.vrtRstProjVal))
# set raster format
if cfg.outTempRastFormat == 'VRT':
cfg.ui.virtual_raster_checkBox.setCheckState(2)
elif cfg.outTempRastFormat == 'GTiff':
cfg.ui.virtual_raster_checkBox.setCheckState(0)
# set raster compression
if cfg.rasterCompression == 'Yes':
cfg.ui.raster_compression_checkBox.setCheckState(2)
elif cfg.rasterCompression == 'No':
cfg.ui.raster_compression_checkBox.setCheckState(0)
# set raster compression
if cfg.parallelWritingCheck == 'Yes':
cfg.ui.parallel_writing_checkBox.setCheckState(2)
elif cfg.parallelWritingCheck == 'No':
cfg.ui.parallel_writing_checkBox.setCheckState(0)
# set SMTP checkbox state
cfg.ui.smtp_checkBox.setCheckState(int(cfg.SMTPCheck))
# set sound state
cfg.ui.sound_checkBox.setCheckState(int(cfg.soundVal))
# connect to project loaded
cfg.qgisCoreSCP.QgsProject.instance().readProject.connect(self.projectLoaded)
cfg.qgisCoreSCP.QgsProject.instance().projectSaved.connect(self.projectSaved)
cfg.iface.newProjectCreated.connect(self.newProjectLoaded)
#cfg.qgisCoreSCP.QgsProject.instance().readMapLayer.connect(self.test)
#cfg.qgisCoreSCP.QgsProject.instance().layerLoaded.connect(self.test)
''' Help tab '''
cfg.utls.makeDirectory(cfg.tmpDir + '/_images/')
cfg.ui.help_textBrowser.setSearchPaths([cfg.tmpDir])
''' Docks '''
# set ROI color
cfg.ui.change_color_Button.setStyleSheet('background-color :' + cfg.ROIClrVal)
# set ROI transparency
cfg.ui.transparency_Slider.setValue(cfg.ROITrnspVal)
# set RAM value
cfg.ui.RAM_spinBox.setValue(cfg.RAMValue)
# set CPU value
cfg.ui.CPU_spinBox.setValue(cfg.threads)
# macroclass checkbox
if cfg.macroclassCheck == 'No':
cfg.ui.macroclass_checkBox.setCheckState(0)
cfg.ui.class_checkBox.blockSignals(True)
cfg.ui.class_checkBox.setCheckState(2)
cfg.ui.class_checkBox.blockSignals(False)
elif cfg.macroclassCheck == 'Yes':
cfg.ui.macroclass_checkBox.setCheckState(2)
cfg.ui.class_checkBox.blockSignals(True)
cfg.ui.class_checkBox.setCheckState(0)
cfg.ui.class_checkBox.blockSignals(False)
# macroclass checkbox
if cfg.macroclassCheckRF == 'No':
cfg.ui.macroclass_checkBox_rf.setCheckState(0)
cfg.ui.class_checkBox_rf.blockSignals(True)
cfg.ui.class_checkBox_rf.setCheckState(2)
cfg.ui.class_checkBox_rf.blockSignals(False)
elif cfg.macroclassCheckRF == 'Yes':
cfg.ui.macroclass_checkBox_rf.setCheckState(2)
cfg.ui.class_checkBox_rf.blockSignals(True)
cfg.ui.class_checkBox_rf.setCheckState(0)
cfg.ui.class_checkBox_rf.blockSignals(False)
# LC signature checkbox
if cfg.LCsignatureCheckBox == 'No':
cfg.ui.LC_signature_checkBox.setCheckState(0)
elif cfg.LCsignatureCheckBox == 'Yes':
cfg.ui.LC_signature_checkBox.setCheckState(2)
try:
# set SMTP server
cfg.ui.smtp_server_lineEdit.setText(cfg.SMTPServer)
# set SMTP to emails
cfg.ui.to_email_lineEdit.setText(cfg.SMTPtoEmails)
# set SMTP user and password
cfg.ui.smtp_user_lineEdit.setText(cfg.SMTPUser)
if cfg.SMTPPassword is not None:
SMTPPsw = cfg.utls.decryptPassword(cfg.SMTPPassword[2:-1])
cfg.ui.smtp_password_lineEdit.setText(str(SMTPPsw)[2:-1])
cfg.SMTPPassword = str(SMTPPsw)[2:-1]
# set USGS user and password
cfg.ui.user_usgs_lineEdit.setText(cfg.USGSUser)
if cfg.USGSPass is not None:
USGSPsw = cfg.utls.decryptPassword(cfg.USGSPass[2:-1])
cfg.ui.password_usgs_lineEdit.setText(str(USGSPsw)[2:-1])
cfg.ui.user_usgs_lineEdit_2.setText(cfg.USGSUserASTER)
if cfg.USGSPassASTER is not None:
USGSPsw2 = cfg.utls.decryptPassword(cfg.USGSPassASTER[2:-1])
cfg.ui.password_usgs_lineEdit_2.setText(str(USGSPsw2)[2:-1])
# set SciHub user and password
cfg.ui.sentinel_service_lineEdit.setText(cfg.SciHubService)
cfg.ui.user_scihub_lineEdit.setText(cfg.SciHubUser)
if cfg.SciHubPass is not None:
sciHubPsw = cfg.utls.decryptPassword(cfg.SciHubPass[2:-1])
cfg.ui.password_scihub_lineEdit.setText(str(sciHubPsw)[2:-1])
except Exception as err:
# logger
cfg.utls.logCondition(str(__name__) + '-' + (cfg.inspectSCP.stack()[0][3])+ ' ' + cfg.utls.lineOfCode(), ' ERROR exception: ' + str(err))
cfg.ui.sentinel2_alternative_search_checkBox.blockSignals(True)
cfg.ui.sentinel2_alternative_search_checkBox.setCheckState(int(cfg.sentinelAlternativeSearch))
cfg.ui.sentinel2_alternative_search_checkBox.blockSignals(False)
''' SCP tab '''
cfg.ui.SCP_tabs.currentChanged.connect(cfg.ipt.SCPTabChanged)
cfg.ui.main_tabWidget.currentChanged.connect(cfg.ipt.mainTabChanged)
# hide tabs
cfg.ui.SCP_tabs.setStyleSheet('QTabBar::tab {padding: 0px; max-height: 0px;}')
# set window size
cfg.dlg.resize(int(cfg.windowSizeW), int(cfg.windowSizeH))
cfg.ui.widget.setMinimumSize(cfg.QtCoreSCP.QSize(50, 0))
cfg.ui.widget.setMaximumSize(cfg.QtCoreSCP.QSize(400, 16777215))
cfg.ui.splitter.setSizes(eval(cfg.splitterSizeS))
cfg.ui.splitter.splitterMoved.connect(cfg.ipt.movedSplitter)
cfg.ui.menu_treeWidget.itemSelectionChanged.connect(cfg.ipt.menuIndex)
cfg.ui.f_filter_lineEdit.textChanged.connect(cfg.ipt.filterTree)
''' Multiple ROI tab '''
# connect to add point
cfg.ui.add_point_pushButton.clicked.connect(cfg.multiROI.addPointToTable)
# connect to create random points
cfg.ui.add_random_point_pushButton.clicked.connect(cfg.multiROI.createRandomPoint)
# connect to remove point
cfg.ui.remove_point_pushButton.clicked.connect(cfg.multiROI.removePointFromTable)
# connect to save point ROIs
cfg.ui.save_point_rois_pushButton.clicked.connect(cfg.multiROI.createROIfromPoint)
# connect to import points
cfg.ui.import_point_list_pushButton.clicked.connect(cfg.multiROI.importPoints)
# connect to export point list
cfg.ui.export_point_list_pushButton.clicked.connect(cfg.multiROI.exportPointList)
# connect the signature calculation checkBox 2
cfg.ui.signature_checkBox2.stateChanged.connect(cfg.multiROI.signatureCheckbox2)
# connect to text changed
cfg.ui.stratified_lineEdit.textChanged.connect(cfg.multiROI.textChanged)
''' Import spectral signature tab '''
# connect the import library
cfg.ui.open_library_pushButton.clicked.connect(cfg.SCPD.openLibraryFile)
# connect the open shapefile
cfg.ui.open_shapefile_pushButton.clicked.connect(cfg.sigImport.openShapefileI)
# connect the import shapefile
cfg.ui.import_shapefile_pushButton.clicked.connect(cfg.utls.importShapefile)
# connect the chapter changed
cfg.ui.usgs_chapter_comboBox.currentIndexChanged.connect(cfg.usgsLib.chapterChanged)
# connect the library changed
cfg.ui.usgs_library_comboBox.currentIndexChanged.connect(cfg.usgsLib.libraryChanged)
# connect the close library
cfg.ui.add_usgs_library_pushButton.clicked.connect(cfg.usgsLib.addSignatureToList)
''' Export spectral signature tab '''
# connect to export signature to SCP file
cfg.ui.export_SCP_pushButton.clicked.connect(cfg.SCPD.exportSignatureFile)
cfg.ui.export_SHP_pushButton.clicked.connect(cfg.SCPD.exportSignatureShapefile)
# connect to export signature to CSV
cfg.ui.export_CSV_library_toolButton.clicked.connect(cfg.SCPD.exportToCSVLibrary)
''' Algorithm weight tab '''
cfg.ui.reset_weights_pushButton.clicked.connect(cfg.algWT.resetWeights)
cfg.ui.set_weight_value_pushButton.clicked.connect(cfg.algWT.setWeights)
''' Signature threshold tab '''
# edited cell
cfg.ui.signature_threshold_tableWidget.cellChanged.connect(cfg.signT.editedThresholdTable)
cfg.ui.reset_threshold_pushButton.clicked.connect(cfg.signT.resetThresholds)
cfg.ui.automatic_threshold_pushButton.clicked.connect(cfg.signT.setAllWeightsVariance)
cfg.ui.set_threshold_value_pushButton.clicked.connect(cfg.signT.setThresholds)
cfg.ui.signature_threshold_tableWidget.horizontalHeader().sectionClicked.connect(cfg.signT.orderedTable)
''' LC Signature threshold tab '''
cfg.ui.LCS_tableWidget.cellChanged.connect(cfg.LCSignT.editedThresholdTable)
cfg.ui.LCS_tableWidget.horizontalHeader().sectionClicked.connect(cfg.LCSignT.orderedTable)
cfg.ui.automatic_threshold_pushButton_2.clicked.connect(cfg.LCSignT.setAllWeightsVariance)
# connect to activate pointer
cfg.ui.LCS_pointerButton.clicked.connect(cfg.LCSignT.pointerActive)
cfg.ui.LCS_ROI_button.clicked.connect(cfg.LCSignT.ROIThreshold)
cfg.ui.set_min_max_Button.clicked.connect(cfg.LCSignT.setMinimumMaximum)
# connect the include signature checkBox
cfg.ui.LCS_include_checkBox.stateChanged.connect(cfg.LCSignT.includeCheckbox)
cfg.ui.LCS_cut_checkBox.stateChanged.connect(cfg.LCSignT.cutCheckbox)
# add to spectral signature plot
cfg.ui.signature_spectral_plot_toolButton_2.clicked.connect(cfg.LCSignT.addSignatureToSpectralPlot)
''' RGB List tab '''
cfg.ui.RGB_tableWidget.cellChanged.connect(cfg.RGBLT.editedTable)
cfg.ui.add_RGB_pushButton.clicked.connect(cfg.RGBLT.addRGBToTable)
cfg.ui.remove_RGB_toolButton.clicked.connect(cfg.RGBLT.removeRGBFromTable)
cfg.ui.sort_by_name_toolButton_2.clicked.connect(cfg.RGBLT.sortRGBName)
cfg.ui.clear_RGB_list_toolButton.clicked.connect(cfg.RGBLT.clearTableAction)
cfg.ui.move_up_toolButton_3.clicked.connect(cfg.RGBLT.moveUpRGB)
cfg.ui.move_down_toolButton_3.clicked.connect(cfg.RGBLT.moveDownRGB)
cfg.ui.all_RGB_list_toolButton.clicked.connect(cfg.RGBLT.allRGBListAction)
cfg.ui.export_RGB_List_toolButton.clicked.connect(cfg.RGBLT.exportRGBList)
cfg.ui.import_RGB_List_toolButton.clicked.connect(cfg.RGBLT.importRGB)
''' Band set List tab '''
cfg.ui.add_bandset_pushButton.clicked.connect(cfg.bstLT.addBandSetToTable)
cfg.ui.rgb_toolButton.clicked.connect(cfg.bstLT.displayRGB)
cfg.ui.remove_bandset_toolButton.clicked.connect(cfg.bstLT.removeBandSetFromTable)
cfg.ui.move_up_toolButton_4.clicked.connect(cfg.bstLT.moveUpBandset)
cfg.ui.move_down_toolButton_4.clicked.connect(cfg.bstLT.moveDownBandset)
# connect to double click
cfg.ui.band_set_list_tableWidget.doubleClicked.connect(cfg.bstLT.doubleClick)
cfg.ui.export_bandset_List_toolButton.clicked.connect(cfg.bstLT.exportList)
cfg.ui.import_bandset_List_toolButton.clicked.connect(cfg.bstLT.importList)
# connect to filter
cfg.ui.band_set_filter_lineEdit.textChanged.connect(cfg.bstLT.filterTable)
''' Download product tab '''
# connect to find images button
cfg.ui.find_images_toolButton.clicked.connect(cfg.downProd.findImages)
cfg.ui.selectUL_toolButton_3.clicked.connect(cfg.downProd.pointerActive)
# connect to display button
cfg.ui.toolButton_display.clicked.connect(cfg.downProd.displayImages)
cfg.ui.toolButton_OSM.clicked.connect(cfg.downProd.displayOSM)
cfg.ui.remove_image_toolButton.clicked.connect(cfg.downProd.removeImageFromTable)
cfg.ui.clear_table_toolButton.clicked.connect(cfg.downProd.clearTable)
cfg.ui.download_images_Button.clicked.connect(cfg.downProd.downloadImages)
cfg.ui.export_links_Button.clicked.connect(cfg.downProd.exportLinks)
cfg.ui.import_table_pushButton.clicked.connect(cfg.downProd.importTableText)
cfg.ui.export_table_pushButton.clicked.connect(cfg.downProd.exportTableText)
cfg.ui.check_toolButton.clicked.connect(cfg.downProd.checkAllBands)
cfg.ui.show_area_radioButton_2.clicked.connect(cfg.downProd.showHideArea)
cfg.ui.remember_user_checkBox_2.stateChanged.connect(cfg.downProd.rememberUserCheckbox)
cfg.ui.user_usgs_lineEdit.editingFinished.connect(cfg.downProd.rememberUser)
cfg.ui.password_usgs_lineEdit.editingFinished.connect(cfg.downProd.rememberUser)
cfg.ui.reset_sentinel_service_toolButton.clicked.connect(cfg.downProd.resetService)
cfg.ui.remember_user_checkBox.stateChanged.connect(cfg.downProd.rememberUserCheckboxSentinel2)
cfg.ui.sentinel2_alternative_search_checkBox.stateChanged.connect(cfg.downProd.alternativeCheckboxSentinel2)
cfg.ui.user_scihub_lineEdit.editingFinished.connect(cfg.downProd.rememberUserSentinel2)
cfg.ui.password_scihub_lineEdit.editingFinished.connect(cfg.downProd.rememberUserSentinel2)
cfg.ui.sentinel_service_lineEdit.editingFinished.connect(cfg.downProd.rememberService)
cfg.ui.check_toolButton_2.clicked.connect(cfg.downProd.checkAllBandsSentinel2)
cfg.ui.check_toolButton_3.clicked.connect(cfg.downProd.checkAllBandsSentinel3)
cfg.ui.check_toolButton_4.clicked.connect(cfg.downProd.checkAllBandsGOES)
cfg.ui.remember_user_checkBox_3.stateChanged.connect(cfg.downProd.rememberUserCheckboxEarthdata)
cfg.ui.user_usgs_lineEdit_2.editingFinished.connect(cfg.downProd.rememberUserEarthdata)
cfg.ui.password_usgs_lineEdit_2.editingFinished.connect(cfg.downProd.rememberUserEarthdata)
cfg.ui.download_images_tableWidget.itemSelectionChanged.connect(cfg.downProd.tableClick)
# connect to filter
cfg.ui.products_filter_lineEdit.textChanged.connect(cfg.downProd.filterTable)
''' Classification dock '''
# button band set
cfg.uidc.bandset_toolButton.clicked.connect(cfg.utls.bandSetTab)
cfg.uidc.band_processing_toolButton.clicked.connect(cfg.utls.bandProcessingTab)
cfg.uidc.preprocessing_toolButton_2.clicked.connect(cfg.utls.preProcessingTab)
cfg.uidc.postprocessing_toolButton_2.clicked.connect(cfg.utls.postProcessingTab)
cfg.uidc.bandcalc_toolButton_2.clicked.connect(cfg.utls.bandCalcTab)
cfg.uidc.download_images_toolButton_2.clicked.connect(cfg.utls.selectTabDownloadImages)
cfg.uidc.basic_tools_toolButton.clicked.connect(cfg.utls.basicToolsTab)
cfg.uidc.batch_toolButton.clicked.connect(cfg.utls.batchTab)
cfg.uidc.userguide_toolButton_2.clicked.connect(cfg.ipt.quickGuide)
cfg.uidc.help_toolButton_2.clicked.connect(cfg.ipt.askHelp)
cfg.uidc.support_toolButton.clicked.connect(cfg.ipt.supportSCP)
cfg.uidc.tabWidget_dock.currentChanged.connect(cfg.ipt.dockTabChanged)
# button new input
cfg.uidc.button_new_input.clicked.connect(cfg.SCPD.createInput)
# button reset
cfg.uidc.button_reset_input.clicked.connect(cfg.SCPD.resetInput)
# connect to save to shapefile
cfg.uidc.button_Save_ROI.clicked.connect(cfg.SCPD.saveROItoShapefile)
# connect to undo save ROI
cfg.uidc.undo_save_Button.clicked.connect(cfg.SCPD.undoSaveROI)
cfg.uidc.redo_save_Button.clicked.connect(cfg.SCPD.redoSaveROI)
# connect the signature calculation checkBox
cfg.uidc.signature_checkBox.stateChanged.connect(cfg.SCPD.signatureCheckbox)
cfg.uidc.scatterPlot_toolButton.clicked.connect(cfg.SCPD.addROIToScatterPlot)
# connect the save input checkBox
cfg.uidc.save_input_checkBox.stateChanged.connect(cfg.SCPD.saveInputCheckbox)
# connect to open training file
cfg.uidc.trainingFile_toolButton.clicked.connect(cfg.SCPD.openTrainingFile)
# connect to export signature list file
cfg.uidc.export_signature_list_toolButton.clicked.connect(cfg.utls.exportSignaturesTab)
# connect to import library file
cfg.uidc.import_library_toolButton.clicked.connect(cfg.utls.importSignaturesTab)
# add to spectral signature plot
cfg.uidc.signature_spectral_plot_toolButton.clicked.connect(cfg.SCPD.addSignatureToSpectralPlot)
# connect to filter
cfg.uidc.ROI_filter_lineEdit.textChanged.connect(cfg.SCPD.filterTree)
# connect to delete signature
cfg.uidc.delete_Signature_Button.clicked.connect(cfg.SCPD.removeSelectedSignatures)
# connect to merge signatures
cfg.uidc.merge_signature_toolButton.clicked.connect(cfg.SCPD.mergeSelectedSignatures)
cfg.uidc.calculate_signature_toolButton.clicked.connect(cfg.SCPD.calculateSignatures)
# connect the ROI macroclass ID
cfg.uidc.ROI_Macroclass_ID_spin.valueChanged.connect(cfg.SCPD.setROIMacroID)
# connect the ROI Macroclass
cfg.uidc.ROI_Macroclass_line.editingFinished.connect(cfg.SCPD.roiMacroclassInfo)
# custom expression
cfg.uidc.custom_index_lineEdit.editingFinished.connect(cfg.SCPD.customExpressionEdited)
# connect the ROI Class ID
cfg.uidc.ROI_ID_spin.valueChanged.connect(cfg.SCPD.setROIID)
# connect the ROI Class
cfg.uidc.ROI_Class_line.editingFinished.connect(cfg.SCPD.roiClassInfo)
# connect the rapid ROI checkBox
cfg.uidc.display_cursor_checkBox.stateChanged.connect(cfg.SCPD.vegetationIndexCheckbox)
# connect the vegetation index combo
cfg.uidc.vegetation_index_comboBox.currentIndexChanged.connect(cfg.SCPD.vegetationIndexName)
# connect the rapid ROI checkBox
cfg.uidc.rapid_ROI_checkBox.stateChanged.connect(cfg.SCPD.rapidROICheckbox)
# connect the vegetation index display checkbox
cfg.uidc.rapidROI_band_spinBox.valueChanged.connect(cfg.SCPD.rapidROIband)
''' Classification tab '''
# connect to algorithm weight button
cfg.ui.algorithm_weight_button.clicked.connect(cfg.utls.algorithmBandWeightTab)
# connect to threshold button
cfg.ui.algorithm_threshold_button.clicked.connect(cfg.utls.signatureThresholdTab)
# connect to LCS threshold button
cfg.ui.LC_signature_button.clicked.connect(cfg.utls.LCSThresholdTab)
# connect the algorithm combo
cfg.ui.algorithm_combo.currentIndexChanged.connect(cfg.classTab.algorithmName)
# connect the algorithm threshold
cfg.ui.alg_threshold_SpinBox.valueChanged.connect(cfg.classTab.algorithmThreshold)
# connect to run classification
cfg.ui.button_classification.clicked.connect(cfg.classTab.runClassificationAction)
cfg.ui.classification.clicked.connect(cfg.batchT.setFunctionButton)
# connect the macroclass checkBox
cfg.ui.macroclass_checkBox.stateChanged.connect(cfg.classTab.macroclassCheckbox)
cfg.ui.class_checkBox.stateChanged.connect(cfg.classTab.classCheckbox)
# connect the LC signature checkBox
cfg.ui.LC_signature_checkBox.stateChanged.connect(cfg.classTab.LCSignature_Checkbox)
# connect the mask checkBox
cfg.ui.mask_checkBox.stateChanged.connect(cfg.classTab.maskCheckbox)
# connect to reset qml button
cfg.ui.resetQmlButton.clicked.connect(cfg.classTab.resetQmlStyle)
# connect to reset mask button
cfg.ui.resetMaskButton.clicked.connect(cfg.classTab.resetMask)
# connect to qml button
cfg.ui.qml_Button.clicked.connect(cfg.classTab.selectQmlStyle)
''' Spectral signature plot '''
# connect the sigma checkBox
cfg.uisp.sigma_checkBox.stateChanged.connect(cfg.spSigPlot.sigmaCheckbox)
cfg.uisp.band_lines_checkBox.stateChanged.connect(cfg.spSigPlot.refreshPlot)
cfg.uisp.grid_checkBox.stateChanged.connect(cfg.spSigPlot.refreshPlot)
# connect to remove signature button
cfg.uisp.remove_Signature_Button.clicked.connect(cfg.spSigPlot.removeSignature)
# connect to calculate spectral distances button
cfg.uisp.calculate_spectral_distance_Button.clicked.connect(cfg.spSigPlot.calculateSpectralDistances)
# connect to fit to axes
cfg.uisp.fitToAxes_pushButton.clicked.connect(cfg.spSigPlot.fitPlotToAxes)
# connect to plot spinbox
cfg.uisp.plot_text_spinBox.valueChanged.connect(cfg.spSigPlot.setPlotLegendLenght)
# connect to value range
cfg.uisp.value_range_pushButton.clicked.connect(cfg.spSigPlot.editValueRange)
cfg.uisp.set_min_max_Button.clicked.connect(cfg.spSigPlot.setMinimumMaximum)
cfg.uisp.automatic_threshold_pushButton_2.clicked.connect(cfg.spSigPlot.setAllWeightsVariance)
# connect to activate pointer
cfg.uisp.LCS_pointerButton_2.clicked.connect(cfg.spSigPlot.pointerActive)
cfg.uisp.LCS_ROI_button_2.clicked.connect(cfg.spSigPlot.ROIThreshold)
# undo threshold
cfg.uisp.undo_threshold_Button.clicked.connect(cfg.spSigPlot.undoThreshold)
# connect the include signature checkBox
cfg.uisp.LCS_include_checkBox_2.stateChanged.connect(cfg.spSigPlot.includeCheckbox)
cfg.uisp.LCS_cut_checkBox_2.stateChanged.connect(cfg.spSigPlot.cutCheckbox)
# connect to add to signature list
cfg.uisp.add_signature_list_pushButton.clicked.connect(cfg.spSigPlot.addToSignatureList)
# connect to save plot
cfg.uisp.save_plot_pushButton.clicked.connect(cfg.spSigPlot.savePlot)
# connect to edited cell
cfg.uisp.signature_list_plot_tableWidget.cellChanged.connect(cfg.spSigPlot.editedCell)
cfg.uisp.signature_list_plot_tableWidget.horizontalHeader().sectionClicked.connect(cfg.spSigPlot.orderedTable)
# connect to signature plot list double click
cfg.uisp.signature_list_plot_tableWidget.doubleClicked.connect(cfg.spSigPlot.signatureListDoubleClick)
''' Scatter plot tab '''
# connect to scatter plot button
cfg.uiscp.scatter_ROI_Button.clicked.connect(cfg.scaPlT.scatterPlotCalc)
# connect to Band X spinbox
cfg.uiscp.bandX_spinBox.valueChanged.connect(cfg.scaPlT.bandXPlot)
# connect to Band Y spinbox
cfg.uiscp.bandY_spinBox.valueChanged.connect(cfg.scaPlT.bandYPlot)
# connect double click ROI list to zoom
cfg.uiscp.scatter_list_plot_tableWidget.doubleClicked.connect(cfg.scaPlT.scatterPlotDoubleClick)
# connect to edited cell
cfg.uiscp.scatter_list_plot_tableWidget.cellChanged.connect(cfg.scaPlT.editedCell)
# connect to remove signature button
cfg.uiscp.remove_Signature_Button.clicked.connect(cfg.scaPlT.removeScatter)
# connect to save plot
cfg.uiscp.save_plot_pushButton_2.clicked.connect(cfg.scaPlT.savePlot)
# connect to fit to axes
cfg.uiscp.fitToAxes_pushButton_2.clicked.connect(cfg.scaPlT.fitPlotToAxes)
cfg.uiscp.plot_temp_ROI_pushButton.clicked.connect(cfg.scaPlT.addTempROIToScatterPlot)
cfg.uiscp.plot_display_pushButton.clicked.connect(cfg.scaPlT.addDisplayToScatterPlot)
cfg.uiscp.plot_image_pushButton.clicked.connect(cfg.scaPlT.addImageToScatterPlot)
# connect to change color button
cfg.uiscp.polygon_color_Button.clicked.connect(cfg.scaPlT.changePolygonColor)
cfg.uiscp.plot_color_ROI_pushButton.clicked.connect(cfg.scaPlT.colorPlot)
# connect to select value range
cfg.uiscp.draw_polygons_pushButton.clicked.connect(cfg.scaPlT.selectRange)
cfg.uiscp.remove_polygons_pushButton.clicked.connect(cfg.scaPlT.removePolygons)
cfg.uiscp.show_polygon_area_pushButton.clicked.connect(cfg.scaPlT.showScatterPolygonArea)
cfg.uiscp.add_signature_list_pushButton.clicked.connect(cfg.scaPlT.addToSignatureList)
''' Band set tab '''
# connect to refresh button
cfg.ui.toolButton_reload_3.clicked.connect(cfg.bst.rasterBandName)
# button reload
cfg.ui.toolButton_reload.clicked.connect(cfg.ipt.checkRefreshRasterLayer)
# connect to add file button
cfg.ui.toolButton_input_raster.clicked.connect(cfg.bst.addFileToBandSetAction)
# connect to add raster band button
cfg.ui.add_raster_bands_Button.clicked.connect(cfg.bst.addBandToSet)
# connect to select all bands button
cfg.ui.select_all_bands_Button.clicked.connect(cfg.bst.selectAllBands)
# connect to clear band set button
cfg.ui.clear_bandset_toolButton.clicked.connect(cfg.bst.clearBandSetAction)
# connect to move up band button
cfg.ui.move_up_toolButton.clicked.connect(cfg.bst.moveUpBand)
# connect to move down band button
cfg.ui.move_down_toolButton.clicked.connect(cfg.bst.moveDownBand)
# connect to sort by name button
cfg.ui.sort_by_name_toolButton.clicked.connect(cfg.bst.sortBandName)
# connect to remove band button
cfg.ui.remove_toolButton.clicked.connect(cfg.bst.removeBand)
# connect add band set
cfg.ui.add_band_set_toolButton.clicked.connect(cfg.bst.addBandSetTabAction)
# connect to changed tab
cfg.ui.Band_set_tabWidget.currentChanged.connect(cfg.bst.tabBandSetChanged)
# connect close tab
cfg.ui.Band_set_tabWidget.tabCloseRequested.connect(cfg.bst.closeBandSetTab)
# combo layer
cfg.ui.image_raster_name_combo.currentIndexChanged.connect(cfg.bst.rasterLayerName)
# connect to import band set button
cfg.ui.import_bandset_toolButton.clicked.connect(cfg.bst.importBandSet)
# connect to export band set button
cfg.ui.export_bandset_toolButton.clicked.connect(cfg.bst.exportBandSet)
# connect to satellite wavelength combo
cfg.ui.wavelength_sat_combo.currentIndexChanged.connect(cfg.bst.satelliteWavelength)
# connect to unit combo
cfg.ui.unit_combo.currentIndexChanged.connect(cfg.bst.setBandUnit)
# connect to date edit
cfg.ui.bandset_dateEdit.dateChanged.connect(cfg.bst.setBandsetDate)
# connect to band set process button
cfg.ui.band_set_process_toolButton.clicked.connect(cfg.bst.performBandSetTools)
# connect to filter
cfg.ui.bands_filter_lineEdit.textChanged.connect(cfg.bst.filterTable)
''' Pre processing tab '''
''' Clip multiple rasters '''
# connect to clip button
cfg.ui.clip_Button.clicked.connect(cfg.clipMulti.clipRastersAction)
cfg.ui.clip_multiple_rasters.clicked.connect(cfg.batchT.setFunctionButton)
# connect to activate UL pointer
cfg.ui.selectUL_toolButton.clicked.connect(cfg.clipMulti.pointerActive)
# connect to refresh shape button
cfg.ui.toolButton_reload_8.clicked.connect(cfg.clipMulti.refreshShapeClip)
cfg.ui.show_area_radioButton_3.clicked.connect(cfg.clipMulti.showHideArea)
cfg.ui.shapefile_checkBox.stateChanged.connect(cfg.clipMulti.checkboxShapeChanged)
cfg.ui.temporary_ROI_checkBox.stateChanged.connect(cfg.clipMulti.checkboxTempROIChanged)
# connect the shapefile combo
cfg.ui.shapefile_comboBox.currentIndexChanged.connect(cfg.clipMulti.referenceLayerName)
''' Stack raster bands '''
# connect to stack button
cfg.ui.stack_Button.clicked.connect(cfg.stackRstr.stackAction)
cfg.ui.stack_raster_bands.clicked.connect(cfg.batchT.setFunctionButton)
''' Spectral change band sets '''
# connect to calculate button
cfg.ui.spectral_distance_bandsets_toolButton.clicked.connect(cfg.spclDstBS.calculateDistanceAction)
cfg.ui.spectral_distance.clicked.connect(cfg.batchT.setFunctionButton)
cfg.ui.min_distance_radioButton_2.clicked.connect(cfg.spclDstBS.radioMinDistChanged)
cfg.ui.spectral_angle_map_radioButton_2.clicked.connect(cfg.spclDstBS.radioSAMChanged)
''' Mosaic band sets '''
# connect to mosaic button
cfg.ui.mosaic_bandsets_toolButton.clicked.connect(cfg.mosaicBS.mosaicAction)
cfg.ui.mosaic_bandsets.clicked.connect(cfg.batchT.setFunctionButton)
cfg.ui.mosaic_band_sets_lineEdit.textChanged.connect(cfg.mosaicBS.textChanged)
''' Cloud masking '''
# connect to mask button
cfg.ui.cloud_mask_toolButton.clicked.connect(cfg.cloudMsk.maskAction)
cfg.ui.cloud_masking.clicked.connect(cfg.batchT.setFunctionButton)
cfg.ui.cloud_mask_classes_lineEdit.textChanged.connect(cfg.cloudMsk.textChanged)
# connect to refresh button
cfg.ui.toolButton_reload_23.clicked.connect(cfg.utls.refreshClassificationLayer)
''' ASTER tab '''
# connect to input button
cfg.ui.toolButton_directoryInput_ASTER.clicked.connect(cfg.ASTERT.inputASTER)
cfg.ui.ASTER_tableWidget.cellChanged.connect(cfg.ASTERT.editedCell)
cfg.ui.earth_sun_dist_lineEdit_2.textChanged.connect(cfg.ASTERT.editedEarthSunDist)
cfg.ui.sun_elev_lineEdit_2.textChanged.connect(cfg.ASTERT.editedSunElevation)
cfg.ui.date_lineEdit_2.textChanged.connect(cfg.ASTERT.editedDate)
cfg.ui.pushButton_Conversion_3.clicked.connect(cfg.ASTERT.performASTERCorrection)
cfg.ui.aster_conversion.clicked.connect(cfg.batchT.setFunctionButton)
cfg.ui.pushButton_remove_band_2.clicked.connect(cfg.ASTERT.removeHighlightedBand)
''' MODIS tab '''
# connect to input button
cfg.ui.toolButton_directoryInput_MODIS.clicked.connect(cfg.MODIST.inputMODIS)
cfg.ui.MODIS_tableWidget.cellChanged.connect(cfg.MODIST.editedCell)
cfg.ui.pushButton_Conversion_4.clicked.connect(cfg.MODIST.performMODISConversion)
cfg.ui.modis_conversion.clicked.connect(cfg.batchT.setFunctionButton)
cfg.ui.pushButton_remove_band_3.clicked.connect(cfg.MODIST.removeHighlightedBand)
''' Landsat tab '''
# connect to input button
cfg.ui.toolButton_directoryInput.clicked.connect(cfg.landsatT.inputLandsat)
cfg.ui.toolButton_directoryInput_MTL.clicked.connect(cfg.landsatT.inputMTL)
cfg.ui.pushButton_Conversion.clicked.connect(cfg.landsatT.performLandsatCorrection)
cfg.ui.landsat_conversion.clicked.connect(cfg.batchT.setFunctionButton)
cfg.ui.pushButton_remove_band.clicked.connect(cfg.landsatT.removeHighlightedBand)
cfg.ui.landsat_tableWidget.cellChanged.connect(cfg.landsatT.editedCell)
cfg.ui.earth_sun_dist_lineEdit.textChanged.connect(cfg.landsatT.editedEarthSunDist)
cfg.ui.sun_elev_lineEdit.textChanged.connect(cfg.landsatT.editedSunElevation)
cfg.ui.date_lineEdit.textChanged.connect(cfg.landsatT.editedDate)
cfg.ui.satellite_lineEdit.textChanged.connect(cfg.landsatT.editedSatellite)
''' Sentinel-1 tab '''
# connect to input button
cfg.ui.S1_toolButton_fileInput.clicked.connect(cfg.sentinel1T.inputSentinel)
cfg.ui.S1_toolButton_directoryInput_xml.clicked.connect(cfg.sentinel1T.inputXML)
cfg.ui.pushButton_Conversion_6.clicked.connect(cfg.sentinel1T.performSentinelConversion)
cfg.ui.sentinel1_conversion.clicked.connect(cfg.batchT.setFunctionButton)
''' Sentinel-2 tab '''
# connect to input button
cfg.ui.S2_toolButton_directoryInput.clicked.connect(cfg.sentinel2T.inputSentinel)
cfg.ui.pushButton_Conversion_2.clicked.connect(cfg.sentinel2T.performSentinelConversion)
cfg.ui.sentinel2_conversion.clicked.connect(cfg.batchT.setFunctionButton)
cfg.ui.S2_satellite_lineEdit.textChanged.connect(cfg.sentinel2T.editedSatellite)
cfg.ui.S2_pushButton_remove_band.clicked.connect(cfg.sentinel2T.removeHighlightedBand)
cfg.ui.sentinel_2_tableWidget.cellChanged.connect(cfg.sentinel2T.editedCell)
cfg.ui.S2_toolButton_directoryInput_xml2.clicked.connect(cfg.sentinel2T.inputXML2)
''' Sentinel-3 tab '''
# connect to input button
cfg.ui.S3_toolButton_directoryInput.clicked.connect(cfg.sentinel3T.inputSentinel)
cfg.ui.pushButton_Conversion_5.clicked.connect(cfg.sentinel3T.performSentinelConversion)
cfg.ui.sentinel3_conversion.clicked.connect(cfg.batchT.setFunctionButton)
cfg.ui.S3_pushButton_remove_band.clicked.connect(cfg.sentinel3T.removeHighlightedBand)
''' GOES tab '''
# connect to input button
cfg.ui.GOES_toolButton_directoryInput.clicked.connect(cfg.goesT.inputGOES)
cfg.ui.pushButton_Conversion_8.clicked.connect(cfg.goesT.performGOESConversion)
cfg.ui.goes_conversion.clicked.connect(cfg.batchT.setFunctionButton)
cfg.ui.GOES_pushButton_remove_band.clicked.connect(cfg.goesT.removeHighlightedBand)
''' Classification neighbor tab'''
cfg.ui.class_neighbor_toolButton.clicked.connect(cfg.clssNghbr.classNeighborAction)
cfg.ui.neighbor_pixels.clicked.connect(cfg.batchT.setFunctionButton)
cfg.ui.toolButton_input_matrix.clicked.connect(cfg.clssNghbr.inputMatrixFile)
''' Reproject raster bands tab '''
# connect to refresh button
cfg.ui.toolButton_reload_25.clicked.connect(cfg.rprjRstBndsT.refreshClassificationLayer)
cfg.ui.use_align_raster_checkBox.stateChanged.connect(cfg.rprjRstBndsT.checkboxAlignChanged)
cfg.ui.use_epsg_checkBox.stateChanged.connect(cfg.rprjRstBndsT.checkboxEPSGChanged)
# connect to reproject raster button
cfg.ui.reproject_Button.clicked.connect(cfg.rprjRstBndsT.reprojectRasterBands)
cfg.ui.reproject_raster_bands.clicked.connect(cfg.batchT.setFunctionButton)
''' Split tab '''
# connect the classification combo
cfg.ui.raster_name_combo.currentIndexChanged.connect(cfg.splitT.rasterLayerName)
# connect to refresh button
cfg.ui.toolButton_reload_9.clicked.connect(cfg.splitT.refreshClassificationLayer)
# connect to split raster button
cfg.ui.split_Button.clicked.connect(cfg.splitT.splitRaster)
cfg.ui.split_raster_bands.clicked.connect(cfg.batchT.setFunctionButton)
''' PCA tab '''
# connect to PCA button
cfg.ui.pca_Button.clicked.connect(cfg.pcaT.calculatePCAAction)
cfg.ui.pca.clicked.connect(cfg.batchT.setFunctionButton)
''' K-means tab '''
# connect to kmeans button
cfg.ui.kmeans_Button.clicked.connect(cfg.clusteringT.calculateClusteringAction)
cfg.ui.clustering.clicked.connect(cfg.batchT.setFunctionButton)
# connect the algorithm combo
cfg.ui.kmean_minmax_radioButton.clicked.connect(cfg.clusteringT.radiokmean_minmaxChanged)
cfg.ui.kmean_siglist_radioButton.clicked.connect(cfg.clusteringT.radiokmean_siglistChanged)
cfg.ui.kmean_randomsiglist_radioButton.clicked.connect(cfg.clusteringT.radiokmean_randomsiglistChanged)
cfg.ui.kmeans_radioButton.clicked.connect(cfg.clusteringT.radioKmeansChanged)
cfg.ui.isodata_radioButton.clicked.connect(cfg.clusteringT.radioIsodataChanged)
cfg.ui.min_distance_radioButton.clicked.connect(cfg.clusteringT.radioMinDistChanged)
cfg.ui.spectral_angle_map_radioButton.clicked.connect(cfg.clusteringT.radioSAMChanged)
''' Random forest tab '''
# connect to calculate button
cfg.ui.button_random_forest.clicked.connect(cfg.rndmFrst.performRandomForest)
cfg.ui.random_forest.clicked.connect(cfg.batchT.setFunctionButton)
# connect the macroclass checkBox
cfg.ui.macroclass_checkBox_rf.stateChanged.connect(cfg.rndmFrst.macroclassCheckbox)
cfg.ui.class_checkBox_rf.stateChanged.connect(cfg.rndmFrst.classCheckbox)
cfg.ui.classifier_Button.clicked.connect(cfg.rndmFrst.selectRFClassifier)
# connect to reset classifier
cfg.ui.resetClassifierButton.clicked.connect(cfg.rndmFrst.resetRFClassifier)
''' Vector to Raster tab '''
cfg.ui.toolButton_reload_16.clicked.connect(cfg.vctRstrT.reloadVectorList)
cfg.ui.toolButton_reload_17.clicked.connect(cfg.utls.refreshClassificationLayer)
cfg.ui.convert_vector_toolButton.clicked.connect(cfg.vctRstrT.convertToRasterAction)
cfg.ui.vector_to_raster.clicked.connect(cfg.batchT.setFunctionButton)
cfg.ui.vector_name_combo.currentIndexChanged.connect(cfg.utls.refreshVectorFields)
cfg.ui.field_checkBox.stateChanged.connect(cfg.vctRstrT.checkboxFieldChanged)
cfg.ui.constant_value_checkBox.stateChanged.connect(cfg.vctRstrT.checkboxConstantValueChanged)
''' Post processing tab '''
''' accuracy tab '''
# connect the classification combo
cfg.ui.classification_name_combo.currentIndexChanged.connect(cfg.acc.classificationLayerName)
# connect to refresh button
cfg.ui.toolButton_reload_4.clicked.connect(cfg.utls.refreshClassificationLayer)
# connect the reference combo
cfg.ui.reference_name_combo.currentIndexChanged.connect(cfg.acc.referenceLayerName)
# connect to refresh button
cfg.ui.buttonReload_shape_4.clicked.connect(cfg.acc.refreshReferenceLayer)
# connect to calculate error matrix button
cfg.ui.calculateMatrix_toolButton.clicked.connect(cfg.acc.calculateErrorMatrix)
cfg.ui.accuracy.clicked.connect(cfg.batchT.setFunctionButton)
''' Land cover change '''
# connect to refresh button reference classification
cfg.ui.toolButton_reload_5.clicked.connect(cfg.landCC.refreshClassificationReferenceLayer)
# connect to refresh button new classification
cfg.ui.toolButton_reload_6.clicked.connect(cfg.landCC.refreshNewClassificationLayer)
# connect the classification reference combo
cfg.ui.classification_reference_name_combo.currentIndexChanged.connect(cfg.landCC.classificationReferenceLayerName)
# connect the new classification combo
cfg.ui.new_classification_name_combo.currentIndexChanged.connect(cfg.landCC.newClassificationLayerName)
# connect the mask unchanged checkBox
cfg.ui.mask_unchanged_checkBox.stateChanged.connect(cfg.landCC.maskUnchangedCheckbox)
# connect to calculate land cover change button
cfg.ui.calculateLandCoverChange_toolButton.clicked.connect(cfg.landCC.landCoverChangeAction)
cfg.ui.land_cover_change.clicked.connect(cfg.batchT.setFunctionButton)
''' Classification report '''
# connect to refresh button
cfg.ui.toolButton_reload_10.clicked.connect(cfg.utls.refreshClassificationLayer)
# connect to calculate button
cfg.ui.calculateReport_toolButton.clicked.connect(cfg.classRep.calculateClassReport)
cfg.ui.classification_report.clicked.connect(cfg.batchT.setFunctionButton)
''' Band set combination tab '''
# connect to calculate button
cfg.ui.calculateBandSetComb_toolButton.clicked.connect(cfg.bsComb.calculateBandSetCombination)
cfg.ui.band_combination.clicked.connect(cfg.batchT.setFunctionButton)
''' Cross classification tab '''
# connect the classification combo
cfg.ui.classification_name_combo_2.currentIndexChanged.connect(cfg.crossC.classificationLayerName)
# connect to refresh button
cfg.ui.toolButton_reload_21.clicked.connect(cfg.utls.refreshClassificationLayer)
# connect the reference combo
cfg.ui.reference_name_combo_2.currentIndexChanged.connect(cfg.crossC.referenceLayerName)
# connect to refresh button
cfg.ui.buttonReload_shape_5.clicked.connect(cfg.crossC.refreshReferenceLayer)
# connect to calculate error matrix button
cfg.ui.calculatecrossClass_toolButton.clicked.connect(cfg.crossC.calculateCrossClassification)
cfg.ui.cross_classification.clicked.connect(cfg.batchT.setFunctionButton)
''' Class signature '''
# connect to calculate signature
cfg.ui.class_signature_Button.clicked.connect(cfg.classSigT.calculateClassSignatureAction)
cfg.ui.class_signature.clicked.connect(cfg.batchT.setFunctionButton)
# connect to refresh button
cfg.ui.toolButton_reload_22.clicked.connect(cfg.utls.refreshClassificationLayer)
''' Classification to vector '''
# connect to refresh button
cfg.ui.toolButton_reload_12.clicked.connect(cfg.utls.refreshClassificationLayer)
# connect to convert button
cfg.ui.convert_toolButton.clicked.connect(cfg.classVect.convertClassificationToVectorAction)
cfg.ui.classification_to_vector.clicked.connect(cfg.batchT.setFunctionButton)
''' Reclassification '''
# connect to refresh button
cfg.ui.toolButton_reload_11.clicked.connect(cfg.utls.refreshClassificationLayer)
# connect to reclassify button
cfg.ui.reclassify_toolButton.clicked.connect(cfg.reclassification.reclassifyAction)
cfg.ui.reclassification.clicked.connect(cfg.batchT.setFunctionButton)
# connect to calculate unique values button
cfg.ui.calculate_unique_values_toolButton.clicked.connect(cfg.reclassification.calculateUniqueValues)
# connect to incremental new values button
cfg.ui.incremental_new_values_toolButton.clicked.connect(cfg.reclassification.incrementalNewValues)
# connect to add value button
cfg.ui.add_value_pushButton.clicked.connect(cfg.reclassification.addRowToTable)
# connect to remove point
cfg.ui.remove_row_pushButton.clicked.connect(cfg.reclassification.removePointFromTable)
# connect to import band set button
cfg.ui.import_reclass_toolButton.clicked.connect(cfg.reclassification.importReclass)
# connect to export band set button
cfg.ui.export_reclass_toolButton.clicked.connect(cfg.reclassification.exportReclass)
# connect to edited cell
cfg.ui.reclass_values_tableWidget.cellChanged.connect(cfg.reclassification.editedCell)
''' Edit Raster tab'''
# connect to set value
cfg.ui.raster_set_value_toolButton.clicked.connect(cfg.editRstr.setRasterValueAction)
cfg.ui.edit_raster_using_vector.clicked.connect(cfg.batchT.setFunctionButton)
# connect to refresh rasters button
cfg.ui.toolButton_reload_14.clicked.connect(cfg.utls.refreshClassificationLayer)
cfg.ui.undo_edit_Button.clicked.connect(cfg.editRstr.undoEdit)
# connect the expression text
cfg.ui.expression_lineEdit.textChanged.connect(cfg.editRstr.textChanged)
cfg.ui.use_constant_val_checkBox.stateChanged.connect(cfg.editRstr.checkboxConstantValChanged)
cfg.ui.use_field_vector_checkBox.stateChanged.connect(cfg.editRstr.checkboxVectorFieldChanged)
cfg.ui.use_expression_checkBox.stateChanged.connect(cfg.editRstr.checkboxUseExpressionChanged)
cfg.ui.edit_val_use_ROI_radioButton.clicked.connect(cfg.editRstr.radioUseROIPolygonChanged)
cfg.ui.edit_val_use_vector_radioButton.clicked.connect(cfg.editRstr.radioUseVectorChanged)
cfg.ui.toolButton_reload_20.clicked.connect(cfg.editRstr.reloadVectorList)
cfg.ui.vector_name_combo_2.currentIndexChanged.connect(cfg.utls.refreshVectorFields2)
''' Classification sieve tab'''
# connect to refresh rasters button
cfg.ui.toolButton_reload_15.clicked.connect(cfg.utls.refreshClassificationLayer)
cfg.ui.sieve_toolButton.clicked.connect(cfg.sieveRstr.sieveClassificationAction)
cfg.ui.classification_sieve.clicked.connect(cfg.batchT.setFunctionButton)
''' Classification erosion tab'''
# connect to refresh rasters button
cfg.ui.toolButton_reload_18.clicked.connect(cfg.utls.refreshClassificationLayer)
cfg.ui.class_erosion_toolButton.clicked.connect(cfg.ersnRstr.erosionClassificationAction)
cfg.ui.classification_erosion.clicked.connect(cfg.batchT.setFunctionButton)
# connect the value text
cfg.ui.erosion_classes_lineEdit.textChanged.connect(cfg.ersnRstr.textChanged)
''' Classification dilation tab'''
# connect to refresh rasters button
cfg.ui.toolButton_reload_19.clicked.connect(cfg.utls.refreshClassificationLayer)
cfg.ui.class_dilation_toolButton.clicked.connect(cfg.dltnRstr.dilationClassificationAction)
cfg.ui.classification_dilation.clicked.connect(cfg.batchT.setFunctionButton)
# connect the value text
cfg.ui.dilation_classes_lineEdit.textChanged.connect(cfg.dltnRstr.textChanged)
''' Classification zonal stat tab'''
# connect to refresh rasters button
cfg.ui.toolButton_reload_24.clicked.connect(cfg.utls.refreshClassificationLayer)
cfg.ui.buttonReload_shape_6.clicked.connect(cfg.znlSttRstT.refreshReferenceLayer)
cfg.ui.zonal_stat_raster_toolButton.clicked.connect(cfg.znlSttRstT.zonalStatRasterAction)
cfg.ui.zonal_stat_raster.clicked.connect(cfg.batchT.setFunctionButton)
# connect the classification combo
cfg.ui.classification_name_combo_5.currentIndexChanged.connect(cfg.znlSttRstT.classificationLayerName)
# connect the reference combo
cfg.ui.reference_name_combo_3.currentIndexChanged.connect(cfg.znlSttRstT.referenceLayerName)
''' Band Calc tab '''
# connect to refresh button
cfg.ui.toolButton_reload_13.clicked.connect(cfg.bCalc.rasterBandName)
# connect to calc button
cfg.ui.toolButton_calculate.clicked.connect(cfg.bCalc.calculateButton)
cfg.ui.band_calc.clicked.connect(cfg.batchT.setFunctionButton)
# connect to import expression button
cfg.ui.toolButton_import_expression.clicked.connect(cfg.bCalc.importExpressionList)
# connect the expression text
cfg.ui.plainTextEdit_calc.textChanged.connect(cfg.bCalc.textChanged)
# connect double click table
cfg.ui.tableWidget_band_calc.doubleClicked.connect(cfg.bCalc.doubleClick)
# connect the intersection checkBox
cfg.ui.intersection_checkBox.stateChanged.connect(cfg.bCalc.intersectionCheckbox)
# connect the extent checkBox
cfg.ui.extent_checkBox.stateChanged.connect(cfg.bCalc.extentCheckbox)
# connect to raster type combo
cfg.ui.raster_type_combo.currentIndexChanged.connect(cfg.bCalc.setRasterType)
# connect to expression buttons
cfg.ui.toolButton_plus.clicked.connect(cfg.bCalc.buttonPlus)
cfg.ui.toolButton_minus.clicked.connect(cfg.bCalc.buttonMinus)
cfg.ui.toolButton_product.clicked.connect(cfg.bCalc.buttonProduct)
cfg.ui.toolButton_ratio.clicked.connect(cfg.bCalc.buttonRatio)
cfg.ui.toolButton_power.clicked.connect(cfg.bCalc.buttonPower)
cfg.ui.toolButton_sqrt.clicked.connect(cfg.bCalc.buttonSQRT)
cfg.ui.toolButton_lbracket.clicked.connect(cfg.bCalc.buttonLbracket)
cfg.ui.toolButton_rbracket.clicked.connect(cfg.bCalc.buttonRbracket)
cfg.ui.toolButton_greater.clicked.connect(cfg.bCalc.buttonGreater)
cfg.ui.toolButton_less.clicked.connect(cfg.bCalc.buttonLower)
cfg.ui.toolButton_equal.clicked.connect(cfg.bCalc.buttonEqual)
cfg.ui.toolButton_unequal.clicked.connect(cfg.bCalc.buttonUnequal)
cfg.ui.band_calc_function_tableWidget.doubleClicked.connect(cfg.bCalc.setFunction)
# decision rules
cfg.ui.decision_rules_tableWidget.cellChanged.connect(cfg.bCalc.editedDecisionRulesTable)
cfg.ui.band_calc_tabWidget.currentChanged.connect(cfg.bCalc.tabChanged)
# connect to add rule
cfg.ui.add_rule_toolButton.clicked.connect(cfg.bCalc.addRowToTable)
cfg.ui.remove_rule_toolButton.clicked.connect(cfg.bCalc.removeHighlightedRule)
# connect to clear button
cfg.ui.clear_rules_toolButton.clicked.connect(cfg.bCalc.clearRulesAction)
cfg.ui.export_rules_toolButton.clicked.connect(cfg.bCalc.exportRules)
cfg.ui.import_rules_toolButton.clicked.connect(cfg.bCalc.importRules)
cfg.ui.move_up_toolButton_2.clicked.connect(cfg.bCalc.moveUpRule)
cfg.ui.move_down_toolButton_2.clicked.connect(cfg.bCalc.moveDownRule)
# connect to filter
cfg.ui.bandcalc_filter_lineEdit.textChanged.connect(cfg.bCalc.filterTable)
''' Batch tab '''
# connect the batch text
#cfg.ui.plainTextEdit_batch.textChanged.connect(cfg.batchT.textChanged)
# connect to calc button
cfg.ui.toolButton_run_batch.clicked.connect(cfg.batchT.runButton)
cfg.ui.check_batch.clicked.connect(cfg.batchT.textChanged)
cfg.ui.clear_batch_toolButton.clicked.connect(cfg.batchT.clearBatch)
cfg.ui.export_batch_toolButton.clicked.connect(cfg.batchT.exportBatch)
cfg.ui.import_batch_toolButton.clicked.connect(cfg.batchT.importBatch)
# connect to table double click
cfg.ui.batch_tableWidget.doubleClicked.connect(cfg.batchT.setFunction)
''' Settings tab '''
# connect the ID field name line
cfg.ui.ID_field_name_lineEdit.textChanged.connect(cfg.sets.IDFieldNameChange)
# connect the macroclass ID field name line
cfg.ui.MID_field_name_lineEdit.textChanged.connect(cfg.sets.MacroIDFieldNameChange)
# connect the macroclass Info field name line
cfg.ui.MCInfo_field_name_lineEdit.textChanged.connect(cfg.sets.MacroInfoFieldNameChange)
# connect the Info field name line
cfg.ui.Info_field_name_lineEdit.textChanged.connect(cfg.sets.InfoFieldNameChange)
# connect the variable name line
cfg.ui.variable_name_lineEdit.textChanged.connect(cfg.sets.VariableNameChange)
# connect the group name line
cfg.ui.group_name_lineEdit.textChanged.connect(cfg.sets.GroupNameChange)
# connect the SMTP line
cfg.ui.smtp_server_lineEdit.textChanged.connect(cfg.sets.SMTPServerChange)
# connect the SMTP to emails line
cfg.ui.to_email_lineEdit.textChanged.connect(cfg.sets.SMTPtoEmailsChange)
# connect the SMTP user
cfg.ui.smtp_user_lineEdit.editingFinished.connect(cfg.sets.rememberUser)
# connect the SMTP password
cfg.ui.smtp_password_lineEdit.editingFinished.connect(cfg.sets.rememberUser)
# connect the SMTP checkbox
cfg.ui.remeber_settings_checkBox.stateChanged.connect(cfg.sets.rememberUserCheckbox)
# connect the SMTP checkBox
cfg.ui.smtp_checkBox.stateChanged.connect(cfg.sets.SMTPCheckbox)
# connect to reset field names button
cfg.ui.reset_field_names_Button.clicked.connect(cfg.sets.resetFieldNames)
# connect to reset variable name button
cfg.ui.reset_variable_name_Button.clicked.connect(cfg.sets.resetVariableName)
# connect to reset group name button
cfg.ui.reset_group_name_Button.clicked.connect(cfg.sets.resetGroupName)
# connect the log file checkBox
cfg.ui.log_checkBox.stateChanged.connect(cfg.sets.logCheckbox)
# connect the download news checkBox
cfg.ui.download_news_checkBox.stateChanged.connect(cfg.sets.downloadNewsCheckbox)
# connect the virtual raster checkBox
cfg.ui.virtual_raster_load_checkBox.stateChanged.connect(cfg.sets.virtualRasterCheckbox)
# connect the sound checkBox
cfg.ui.sound_checkBox.stateChanged.connect(cfg.sets.soundCheckbox)
# connect the virtual raster format checkBox
cfg.ui.virtual_raster_checkBox.stateChanged.connect(cfg.sets.virtualRasterFormatCheckbox)
# connect the raster compression checkBox
cfg.ui.raster_compression_checkBox.stateChanged.connect(cfg.sets.rasterCompressionCheckbox)
# connect the parallel writing checkBox
cfg.ui.parallel_writing_checkBox.stateChanged.connect(cfg.sets.parallelWritingCheckbox)
# connect to change temporary directory button
cfg.ui.temp_directory_Button.clicked.connect(cfg.sets.changeTempDir)
# connect to reset temporary directory button
cfg.ui.reset_temp_directory_Button.clicked.connect(cfg.sets.resetTempDir)
# connect to clear log button
cfg.ui.clearLog_Button.clicked.connect(cfg.utls.clearLogFile)
# connect to export log button
cfg.ui.exportLog_Button.clicked.connect(cfg.sets.copyLogFile)
# connect to test dependencies button
cfg.ui.test_dependencies_Button.clicked.connect(cfg.sets.testDependencies)
# connect to RAM spinbox
cfg.ui.RAM_spinBox.valueChanged.connect(cfg.sets.RAMSettingChange)
# connect to thread spinbox
cfg.ui.CPU_spinBox.valueChanged.connect(cfg.sets.threadSettingChange)
# connect the Python path line
cfg.ui.python_path_lineEdit.textChanged.connect(cfg.sets.PythonPathSettingChange)
# connect the Python modules path line
cfg.ui.python_path_lineEdit_2.textChanged.connect(cfg.sets.PythonModulePathSettingChange)
# connect the GDAL path line
cfg.ui.gdal_path_lineEdit.textChanged.connect(cfg.sets.GDALPathSettingChange)
# connect to change color button
cfg.ui.change_color_Button.clicked.connect(cfg.sets.changeROIColor)
# connect to change color button
cfg.ui.reset_color_Button.clicked.connect(cfg.sets.resetROIStyle)
# connect to transparency slider
cfg.ui.transparency_Slider.valueChanged.connect(cfg.sets.changeROITransparency)
# first install
if cfg.firstInstallVal == 'Yes':
cfg.utls.welcomeTab()
cfg.utls.setQGISRegSetting(cfg.regFirstInstall, 'No')
cfg.utls.findAvailableRAM()
cfg.utls.findAvailableProcessors()
# welcome message
lWelcome = cfg.plgnDir + '/ui/welcome.html'
htmlTextF = open(lWelcome, 'r')
htmlText = htmlTextF.read()
cfg.uidc.main_textBrowser.clear()
cfg.uidc.main_textBrowser.setHtml(htmlText)
htmlTextF.close()
if cfg.osSCP.path.isfile(cfg.plgnDir + '/firstrun'):
cfg.ipt.welcomeText('https://semiautomaticgit.github.io/SemiAutomaticClassificationPluginWelcome/changelog.html')
cfg.osSCP.remove(cfg.plgnDir + '/firstrun')
else:
dateV = cfg.datetimeSCP.datetime.now()
dStr = dateV.strftime('%Y_%m_%d')
cfg.ipt.welcomeText('https://semiautomaticgit.github.io/SemiAutomaticClassificationPluginWelcome/welcome' + '_' + dStr + '.html', 'https://semiautomaticgit.github.io/SemiAutomaticClassificationPluginWelcome/welcome.html')
cfg.utls.cleanOldTempDirectory()
cfg.skipRegistry = False
else:
dockclassdlg = DockClassDialog(qgisUtils.iface.mainWindow(), qgisUtils.iface)
qgisUtils.iface.removeDockWidget(dockclassdlg)
# save signature list when saving project
def projectSaved(self):
if cfg.skipProjectSaved == 'No':
if len(cfg.signIDs) > 0:
cfg.SCPD.saveSignatureListToFile()
if cfg.scpFlPath is not None:
cfg.SCPD.saveMemToSHP(cfg.shpLay)
cfg.utls.zipDirectoryInFile(cfg.scpFlPath, cfg.inptDir)
cfg.downProd.saveDownloadTable()
try:
scpPath = cfg.utls.readProjectVariable('trainingLayer', '')
name = cfg.utls.fileNameNoExt(scpPath)
duplicateID = cfg.utls.layerID(name, cfg.shpLay.id())
cfg.qgisCoreSCP.QgsProject.instance().removeMapLayer(duplicateID)
except:
pass
# reset all variables and interface
def resetSCP(self):
# logger
cfg.utls.logToFile(str(__name__) + '-' + str(cfg.inspectSCP.stack()[0][3])+ ' ' + cfg.utls.lineOfCode(), 'LOG ACTIVE' + cfg.sysSCPInfo)
cfg.scpFlPath = None
cfg.ui.image_raster_name_combo.blockSignals(True)
cfg.ui.Band_set_tabWidget.blockSignals(True)
cfg.rasterComboEdited = 'No'
cfg.projPath = cfg.qgisCoreSCP.QgsProject.instance().fileName()
cfg.lastSaveDir = cfg.osSCP.path.dirname(cfg.projPath)
cfg.projPath = cfg.qgisCoreSCP.QgsProject.instance().fileName()
cfg.lastSaveDir = cfg.osSCP.path.dirname(cfg.projPath)
cfg.signList = {}
cfg.signIDs = {}
cfg.spectrPlotList = {}
cfg.signPlotIDs = {}
cfg.scatterPlotIDs = {}
cfg.scatterPlotList = {}
cfg.undoIDList = {}
cfg.undoSpectrPlotList = {}
cfg.lstROI = None
cfg.lstROI2 = None
cfg.rpdROICheck = '2'
cfg.vegIndexCheck = 2
cfg.sigClcCheck = 2
cfg.utls.clearTable(cfg.uisp.signature_list_plot_tableWidget)
cfg.utls.clearTable(cfg.uiscp.scatter_list_plot_tableWidget)
cfg.utls.clearTable(cfg.ui.signature_threshold_tableWidget)
cfg.utls.clearTable(cfg.ui.download_images_tableWidget)
cfg.utls.clearTable(cfg.ui.LCS_tableWidget)
cfg.treeDockItm = {}
cfg.treeDockMCItm = {}
cfg.SCPD.clearTree()
cfg.scaPlT.scatterPlotListTable(cfg.uiscp.scatter_list_plot_tableWidget)
cfg.spSigPlot.refreshPlot()
cfg.LCSignT.LCSignatureThresholdListTable()
# reload layers in combos
cfg.ipt.refreshRasterLayer()
cfg.utls.refreshVectorLayer()
cfg.utls.refreshClassificationLayer()
cfg.utls.refreshRasterExtent()
cfg.acc.refreshReferenceLayer()
cfg.crossC.refreshReferenceLayer()
cfg.znlSttRstT.refreshReferenceLayer()
cfg.znlSttRstT.loadStatisticCombo()
cfg.clssNghbr.loadStatisticCombo()
cfg.landCC.refreshClassificationReferenceLayer()
cfg.landCC.refreshNewClassificationLayer()
# read variables
cfg.utls.readVariables()
# set ROI color
cfg.ui.change_color_Button.setStyleSheet('background-color :' + cfg.ROIClrVal)
# set ROI transparency
cfg.ui.transparency_Slider.setValue(cfg.ROITrnspVal)
# set RAM value
cfg.ui.RAM_spinBox.setValue(cfg.RAMValue)
# set CPU value
cfg.ui.CPU_spinBox.setValue(cfg.threads)
# rapid ROI band
cfg.uidc.rapidROI_band_spinBox.setValue(int(cfg.ROIband))
# min ROI size
cfg.Min_region_size_spin.setValue(int(cfg.minROISz))
# max ROI width
cfg.Max_ROI_width_spin.setValue(int(cfg.maxROIWdth))
# range radius
cfg.Range_radius_spin.setValue(float(cfg.rngRad))
# ROI ID field
cfg.uidc.ROI_ID_spin.setValue(int(cfg.ROIID))
# ROI macro ID field
cfg.uidc.ROI_Macroclass_ID_spin.setValue(int(cfg.ROIMacroID))
# preview size
cfg.preview_size_spinBox.setValue(float(cfg.prvwSz))
# set ID field name line
cfg.ui.ID_field_name_lineEdit.setText(cfg.fldID_class)
cfg.ui.MID_field_name_lineEdit.setText(cfg.fldMacroID_class)
# set Info field name line
cfg.ui.Info_field_name_lineEdit.setText(cfg.fldROI_info)
cfg.ui.MCInfo_field_name_lineEdit.setText(cfg.fldROIMC_info)
cfg.ui.variable_name_lineEdit.setText(cfg.variableName)
cfg.ui.group_name_lineEdit.setText(cfg.grpNm)
# gdal path
cfg.ui.gdal_path_lineEdit.setText(cfg.gdalPath)
cfg.ui.python_path_lineEdit.setText(cfg.PythonPathSettings)
cfg.ui.python_path_lineEdit_2.setText(cfg.PythonModulesPathSettings)
# set signature calculation checkbox state
try:
cfg.uidc.rapid_ROI_checkBox.setCheckState(int(cfg.rpdROICheck))
except:
pass
# set vegetation index calculation checkbox state
try:
cfg.uidc.display_cursor_checkBox.setCheckState(int(cfg.vegIndexCheck))
except:
pass
# set signature calculation checkbox state
try:
cfg.uidc.signature_checkBox.setCheckState(int(cfg.sigClcCheck))
cfg.ui.signature_checkBox2.setCheckState(int(cfg.sigClcCheck))
except:
pass
# set save input checkbox state
try:
cfg.uidc.save_input_checkBox.setCheckState(int(cfg.saveInputCheck))
except:
pass
# load classification algorithm
idAlg = cfg.ui.algorithm_combo.findText(cfg.algName)
if idAlg >= 0:
cfg.ui.algorithm_combo.setCurrentIndex(idAlg)
else:
cfg.ui.algorithm_combo.setCurrentIndex(0)
cfg.algName = cfg.algMinDist
# ROI info
cfg.uidc.ROI_Class_line.setText(cfg.ROIInfo)
cfg.uidc.ROI_Macroclass_line.setText(cfg.ROIMacroClassInfo)
cfg.uidc.custom_index_lineEdit.setText(cfg.customExpression)
# RGB list
cfg.RGBLT.RGBListTable(cfg.RGBList)
# reload raster bands in checklist
cfg.bst.rasterBandName()
cfg.rasterComboEdited = 'Yes'
cfg.ui.image_raster_name_combo.blockSignals(False)
cfg.ui.Band_set_tabWidget.blockSignals(False)
# new project
def newProjectLoaded(self):
# clear band set
t = cfg.ui.Band_set_tabWidget.count()
for index in reversed(list(range(0, t))):
cfg.bst.deleteBandSetTab(index)
self.resetSCP()
cfg.bCalc.rasterBandName()
cfg.SCPD.openInput()
cfg.bstLT.BandSetListTable()
# read project variables
def projectLoaded(self):
self.resetSCP()
# load product download table
cfg.downProd.openDownloadTable()
cfg.bCalc.rasterBandName()
cfg.SCPD.openInput()
cfg.bstLT.BandSetListTable()
# run
def run(self):
# show the dialog
cfg.dlg.show()
# Run the dialog event loop
pointer_result = cfg.dlg.exec_()
# remove plugin menu and icon
def unload(self):
cfg.utls.createBackupFile(cfg.scpFlPath)
# save window size
try:
cfg.utls.setQGISRegSetting(cfg.regWindowSizeW, cfg.dlg.size().width())
cfg.utls.setQGISRegSetting(cfg.regWindowSizeH, cfg.dlg.size().height())
except:
pass
try:
qgisUtils.iface.removeDockWidget(cfg.dockclassdlg)
del cfg.toolBar2
del cfg.toolBar3
cfg.menu.deleteLater()
# remove temp files
if cfg.tmpDir is not None and cfg.QDirSCP(cfg.tmpDir).exists():
cfg.shutilSCP.rmtree(cfg.tmpDir, True)
oDir = cfg.utls.makeDirectory(str(cfg.QDirSCP.tempPath() + '/' + cfg.tempDirName))
except:
if PluginCheck == 'Yes':
qgisUtils.iface.messageBar().pushMessage('Semi-Automatic Classification Plugin', QApplication.translate('semiautomaticclassificationplugin', 'Please, restart QGIS for executing the Semi-Automatic Classification Plugin'), level=qgisCore.Qgis.Info)
|
gpl-3.0
| 1,716,961,368,861,842,000 | 50.911423 | 266 | 0.759837 | false |
aurule/npc
|
npc/gui/uis/main_window.py
|
1
|
9243
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'npc/gui/uis/main_window.ui'
#
# Created by: PyQt5 UI code generator 5.7.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(417, 392)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setMinimumSize(QtCore.QSize(417, 392))
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName("verticalLayout")
self.characterSearch = QtWidgets.QLineEdit(self.centralwidget)
self.characterSearch.setClearButtonEnabled(True)
self.characterSearch.setObjectName("characterSearch")
self.verticalLayout.addWidget(self.characterSearch)
self.characterTableView = QtWidgets.QTableView(self.centralwidget)
self.characterTableView.setStyleSheet("QTableView::item {\n"
" padding: 0 6px;\n"
"}")
self.characterTableView.setAlternatingRowColors(True)
self.characterTableView.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
self.characterTableView.setShowGrid(False)
self.characterTableView.setObjectName("characterTableView")
self.characterTableView.verticalHeader().setVisible(False)
self.verticalLayout.addWidget(self.characterTableView)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 417, 30))
self.menubar.setObjectName("menubar")
self.menuFile = QtWidgets.QMenu(self.menubar)
self.menuFile.setLayoutDirection(QtCore.Qt.LeftToRight)
self.menuFile.setObjectName("menuFile")
self.menuOpen_Recent_Campaign = QtWidgets.QMenu(self.menuFile)
icon = QtGui.QIcon.fromTheme("document-open-recent")
self.menuOpen_Recent_Campaign.setIcon(icon)
self.menuOpen_Recent_Campaign.setObjectName("menuOpen_Recent_Campaign")
self.menuHelp = QtWidgets.QMenu(self.menubar)
self.menuHelp.setObjectName("menuHelp")
self.menuCampaign = QtWidgets.QMenu(self.menubar)
self.menuCampaign.setObjectName("menuCampaign")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionAbout = QtWidgets.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme("help-about")
self.actionAbout.setIcon(icon)
self.actionAbout.setObjectName("actionAbout")
self.actionQuit = QtWidgets.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme("application-exit")
self.actionQuit.setIcon(icon)
self.actionQuit.setObjectName("actionQuit")
self.actionUserSettings = QtWidgets.QAction(MainWindow)
self.actionUserSettings.setObjectName("actionUserSettings")
self.actionOpenCampaign = QtWidgets.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme("folder-open")
self.actionOpenCampaign.setIcon(icon)
self.actionOpenCampaign.setObjectName("actionOpenCampaign")
self.actionCampaignSettings = QtWidgets.QAction(MainWindow)
self.actionCampaignSettings.setObjectName("actionCampaignSettings")
self.actionReloadSettings = QtWidgets.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme("view-refresh")
self.actionReloadSettings.setIcon(icon)
self.actionReloadSettings.setObjectName("actionReloadSettings")
self.actionInit = QtWidgets.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme("journal-new")
self.actionInit.setIcon(icon)
self.actionInit.setObjectName("actionInit")
self.actionNew_Character = QtWidgets.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme("list-add-user")
self.actionNew_Character.setIcon(icon)
self.actionNew_Character.setObjectName("actionNew_Character")
self.actionNew_Session = QtWidgets.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme("document-new")
self.actionNew_Session.setIcon(icon)
self.actionNew_Session.setObjectName("actionNew_Session")
self.actionClear_Recent_Campaigns = QtWidgets.QAction(MainWindow)
icon = QtGui.QIcon.fromTheme("edit-clear")
self.actionClear_Recent_Campaigns.setIcon(icon)
self.actionClear_Recent_Campaigns.setObjectName("actionClear_Recent_Campaigns")
self.actionLatest_Plot = QtWidgets.QAction(MainWindow)
self.actionLatest_Plot.setObjectName("actionLatest_Plot")
self.actionLatest_Session = QtWidgets.QAction(MainWindow)
self.actionLatest_Session.setObjectName("actionLatest_Session")
self.menuFile.addAction(self.actionNew_Character)
self.menuFile.addAction(self.actionNew_Session)
self.menuFile.addAction(self.actionOpenCampaign)
self.menuFile.addAction(self.menuOpen_Recent_Campaign.menuAction())
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionUserSettings)
self.menuFile.addAction(self.actionReloadSettings)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionQuit)
self.menuHelp.addAction(self.actionAbout)
self.menuCampaign.addAction(self.actionLatest_Plot)
self.menuCampaign.addAction(self.actionLatest_Session)
self.menuCampaign.addSeparator()
self.menuCampaign.addAction(self.actionCampaignSettings)
self.menuCampaign.addAction(self.actionInit)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuCampaign.menuAction())
self.menubar.addAction(self.menuHelp.menuAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "NPC"))
self.characterSearch.setToolTip(_translate("MainWindow", "Type to search instantly"))
self.characterSearch.setPlaceholderText(_translate("MainWindow", "Search for characters"))
self.menuFile.setTitle(_translate("MainWindow", "Fi&le"))
self.menuOpen_Recent_Campaign.setTitle(_translate("MainWindow", "&Recent Campaigns"))
self.menuHelp.setTitle(_translate("MainWindow", "&Help"))
self.menuCampaign.setTitle(_translate("MainWindow", "&Campaign"))
self.actionAbout.setText(_translate("MainWindow", "&About NPC"))
self.actionQuit.setText(_translate("MainWindow", "&Quit"))
self.actionQuit.setShortcut(_translate("MainWindow", "Ctrl+Q"))
self.actionUserSettings.setText(_translate("MainWindow", "&User Settings"))
self.actionUserSettings.setToolTip(_translate("MainWindow", "Open user settings"))
self.actionOpenCampaign.setText(_translate("MainWindow", "&Open Campaign..."))
self.actionOpenCampaign.setShortcut(_translate("MainWindow", "Ctrl+O"))
self.actionCampaignSettings.setText(_translate("MainWindow", "&Campaign Settings"))
self.actionCampaignSettings.setToolTip(_translate("MainWindow", "Open campaign settings"))
self.actionReloadSettings.setText(_translate("MainWindow", "R&eload Settings"))
self.actionReloadSettings.setToolTip(_translate("MainWindow", "Reload settings"))
self.actionInit.setText(_translate("MainWindow", "Set &Up..."))
self.actionInit.setToolTip(_translate("MainWindow", "Set up required folders in this campaign"))
self.actionNew_Character.setText(_translate("MainWindow", "&New Character..."))
self.actionNew_Character.setToolTip(_translate("MainWindow", "Create a new character"))
self.actionNew_Character.setShortcut(_translate("MainWindow", "Ctrl+N"))
self.actionNew_Session.setText(_translate("MainWindow", "New &Session"))
self.actionNew_Session.setToolTip(_translate("MainWindow", "Create files for a new game session"))
self.actionNew_Session.setShortcut(_translate("MainWindow", "Ctrl+Shift+N"))
self.actionClear_Recent_Campaigns.setText(_translate("MainWindow", "Clear Items"))
self.actionClear_Recent_Campaigns.setToolTip(_translate("MainWindow", "Clear recent campaigns"))
self.actionLatest_Plot.setText(_translate("MainWindow", "Latest &Plot"))
self.actionLatest_Plot.setToolTip(_translate("MainWindow", "Open the latest plot file"))
self.actionLatest_Session.setText(_translate("MainWindow", "Latest &Session"))
self.actionLatest_Session.setToolTip(_translate("MainWindow", "Open the latest session file"))
from . import images_rc
|
mit
| -1,819,100,779,175,044,000 | 58.632258 | 106 | 0.724332 | false |
NicolasLM/spinach
|
spinach/engine.py
|
1
|
10923
|
from datetime import datetime, timezone
from logging import getLogger
import threading
from .task import Tasks, Batch, Schedulable
from .utils import run_forever, handle_sigterm
from .job import Job, JobStatus, advance_job_status
from .brokers.base import Broker
from .const import DEFAULT_QUEUE, DEFAULT_NAMESPACE, DEFAULT_WORKER_NUMBER
from .worker import Workers
from . import exc
logger = getLogger(__name__)
class Engine:
"""Spinach Engine coordinating a broker with workers.
This class does the orchestration of all components, it is the one that
starts and terminates the whole machinery.
The Engine can be run in two modes:
- client: synchronously submits jobs.
- worker: asynchronously executes jobs.
Submitting jobs is quite easy, so running the Engine in client mode doesn't
require spawning any thread.
Executing jobs however is a bit more involved, so running the Engine in
worker mode ends up spawning a few threads:
- a few worker threads: they are only responsible for executing the task
function and advancing the job status once it is finished.
- a result notifier thread: sends back the result of job executions to the
Broker backend, acts basically as a client.
- an arbiter thread: fetches jobs from the Broker and gives them to the
workers as well as doing some periodic bookkeeping.
- a Broker subscriber thread: receives notifications from the backend when
something happens, typically a job is enqueued.
- the process main thread: starts all the above threads, then does nothing
waiting for the signal to terminate the threads it started.
This means that a Spinach worker process has at least 5 threads.
:arg broker: instance of a :class:`Broker`
:arg namespace: name of the namespace used by the Engine. When different
Engines use the same Redis server, they must use different namespaces to
isolate themselves.
"""
def __init__(self, broker: Broker, namespace: str=DEFAULT_NAMESPACE):
self._broker = broker
self._broker.namespace = namespace
self._namespace = namespace
self._tasks = Tasks()
self.task = self._tasks.task
self._reset()
def _reset(self):
"""Initialization that must happen before the arbiter is (re)started"""
self._arbiter = None
self._workers = None
self._working_queue = None
self._must_stop = threading.Event()
@property
def namespace(self) -> str:
"""Namespace the Engine uses."""
return self._namespace
def attach_tasks(self, tasks: Tasks):
"""Attach a set of tasks.
A task cannot be scheduled or executed before it is attached to an
Engine.
>>> tasks = Tasks()
>>> spin.attach_tasks(tasks)
"""
if tasks._spin is not None and tasks._spin is not self:
logger.warning('Tasks already attached to a different Engine')
self._tasks.update(tasks)
tasks._spin = self
def execute(self, task: Schedulable, *args, **kwargs):
return self._tasks.get(task).func(*args, **kwargs)
def schedule(self, task: Schedulable, *args, **kwargs):
"""Schedule a job to be executed as soon as possible.
:arg task: the task or its name to execute in the background
:arg args: args to be passed to the task function
:arg kwargs: kwargs to be passed to the task function
"""
at = datetime.now(timezone.utc)
return self.schedule_at(task, at, *args, **kwargs)
def schedule_at(self, task: Schedulable, at: datetime, *args, **kwargs):
"""Schedule a job to be executed in the future.
:arg task: the task or its name to execute in the background
:arg at: date at which the job should start. It is advised to pass a
timezone aware datetime to lift any ambiguity. However if a
timezone naive datetime if given, it will be assumed to
contain UTC time.
:arg args: args to be passed to the task function
:arg kwargs: kwargs to be passed to the task function
"""
task = self._tasks.get(task)
job = Job(task.name, task.queue, at, task.max_retries, task_args=args,
task_kwargs=kwargs)
job.task_func = task.func
job.check_signature()
return self._broker.enqueue_jobs([job])
def schedule_batch(self, batch: Batch):
"""Schedule many jobs at once.
Scheduling jobs in batches allows to enqueue them fast by avoiding
round-trips to the broker.
:arg batch: :class:`Batch` instance containing jobs to schedule
"""
jobs = list()
for task, at, args, kwargs in batch.jobs_to_create:
task = self._tasks.get(task)
job = Job(
task.name, task.queue, at, task.max_retries,
task_args=args, task_kwargs=kwargs
)
job.task_func = task.func
job.check_signature()
jobs.append(job)
return self._broker.enqueue_jobs(jobs)
def _arbiter_func(self, stop_when_queue_empty=False):
logger.debug('Arbiter started')
self._register_periodic_tasks()
self._broker.set_concurrency_keys(
[task for task in self._tasks.tasks.values()]
)
while not self._must_stop.is_set():
self._broker.move_future_jobs()
received_jobs = 0
available_slots = self._workers.available_slots
logger.debug("Available slots: %s", available_slots)
if available_slots > 0:
logger.debug("Getting jobs from queue %s", self._working_queue)
jobs = self._broker.get_jobs_from_queue(
self._working_queue, available_slots
)
for job in jobs:
logger.debug("Received job: %s", job)
received_jobs += 1
try:
job.task_func = self._tasks.get(job.task_name).func
except exc.UnknownTask as err:
# This is slightly cheating, when a task is unknown
# it doesn't go to workers but is still sent to the
# workers out_queue so that it is processed by the
# notifier.
advance_job_status(self.namespace, job, 0.0, err)
self._workers.out_queue.put(job)
else:
self._workers.submit_job(job)
if (stop_when_queue_empty and available_slots > 0
and received_jobs == 0
and self._broker.is_queue_empty(self._working_queue)):
logger.info("Stopping workers because queue '%s' is empty",
self._working_queue)
self.stop_workers(_join_arbiter=False)
logger.debug('Arbiter terminated')
return
logger.debug('Received %s jobs, now waiting for events',
received_jobs)
self._broker.wait_for_event()
logger.debug('Arbiter terminated')
def start_workers(self, number: int=DEFAULT_WORKER_NUMBER,
queue=DEFAULT_QUEUE, block=True,
stop_when_queue_empty=False):
"""Start the worker threads.
:arg number: number of worker threads to launch
:arg queue: name of the queue to consume, see :doc:`queues`
:arg block: whether to block the calling thread until a signal arrives
and workers get terminated
:arg stop_when_queue_empty: automatically stop the workers when the
queue is empty. Useful mostly for one-off scripts and testing.
"""
if self._arbiter or self._workers:
raise RuntimeError('Workers are already running')
self._working_queue = queue
tasks_names = '\n'.join(
[' - ' + task.name for task in self._tasks.tasks.values()
if task.queue == self._working_queue]
)
logger.info('Starting %d workers on queue "%s" with tasks:\n%s',
number, self._working_queue, tasks_names)
# Start the broker
self._broker.start()
# Start workers
self._workers = Workers(
num_workers=number,
namespace=self.namespace,
)
# Start the result notifier
self._result_notifier = threading.Thread(
target=run_forever,
args=(self._result_notifier_func, self._must_stop, logger),
name='{}-result-notifier'.format(self.namespace)
)
self._result_notifier.start()
# Start the arbiter
self._arbiter = threading.Thread(
target=run_forever,
args=(self._arbiter_func, self._must_stop, logger,
stop_when_queue_empty),
name='{}-arbiter'.format(self.namespace)
)
self._arbiter.start()
if block:
with handle_sigterm():
try:
self._arbiter.join()
except KeyboardInterrupt:
self.stop_workers()
except AttributeError:
# Arbiter thread starts and stops immediately when ran with
# `stop_when_queue_empty` and queue is already empty.
pass
def stop_workers(self, _join_arbiter=True):
"""Stop the workers and wait for them to terminate."""
# _join_arbiter is used internally when the arbiter is shutting down
# the full engine itself. This is because the arbiter thread cannot
# join itself.
self._must_stop.set()
self._workers.stop()
self._result_notifier.join()
self._broker.stop()
if _join_arbiter:
self._arbiter.join()
self._reset()
def _result_notifier_func(self):
logger.debug('Result notifier started')
while True:
job = self._workers.out_queue.get()
if job is self._workers.poison_pill:
break
if job.status in (JobStatus.SUCCEEDED, JobStatus.FAILED):
self._broker.remove_job_from_running(job)
elif job.status is JobStatus.NOT_SET:
self._broker.enqueue_jobs([job], from_failure=True)
else:
raise RuntimeError('Received job with an incorrect status')
logger.debug('Result notifier terminated')
def _register_periodic_tasks(self):
periodic_tasks = [task for task in self._tasks.tasks.values()
if task.periodicity]
self._broker.register_periodic_tasks(periodic_tasks)
|
bsd-2-clause
| 3,673,688,660,669,416,400 | 37.734043 | 79 | 0.593244 | false |
vhaasteren/PTMCMCSampler
|
PTMCMCSampler/PTMCMCSampler.py
|
1
|
35363
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division
import numpy as np
import scipy.stats as ss
import os
import sys
import time
from nutsjump import NUTSJump, HMCJump, MALAJump
try:
from mpi4py import MPI
except ImportError:
print 'Do not have mpi4py package.'
import nompi4py as MPI
try:
import acor
except ImportError:
print 'Do not have acor package'
pass
class PTSampler(object):
"""
Parallel Tempering Markov Chain Monte-Carlo (PTMCMC) sampler.
This implementation uses an adaptive jump proposal scheme
by default using both standard and single component Adaptive
Metropolis (AM) and Differential Evolution (DE) jumps.
This implementation also makes use of MPI (mpi4py) to run
the parallel chains.
Along with the AM and DE jumps, the user can add custom
jump proposals with the ``addProposalToCycle`` fuction.
@param ndim: number of dimensions in problem
@param logl: log-likelihood function
@param logp: log prior function (must be normalized for evidence evaluation)
@param cov: Initial covariance matrix of model parameters for jump proposals
@param covinds: Indices of parameters for which to perform adaptive jumps
@param loglargs: any additional arguments (apart from the parameter vector) for
log likelihood
@param loglkwargs: any additional keyword arguments (apart from the parameter vector)
for log likelihood
@param logpargs: any additional arguments (apart from the parameter vector) for
log like prior
@param logl_grad: log-likelihood function, including gradients
@param logp_grad: prior function, including gradients
@param logpkwargs: any additional keyword arguments (apart from the parameter vector)
for log prior
@param outDir: Full path to output directory for chain files (default = ./chains)
@param verbose: Update current run-status to the screen (default=True)
@param resume: Resume from a previous chain (still in testing so beware) (default=False)
"""
def __init__(self, ndim, logl, logp, cov, groups=None, loglargs=[], loglkwargs={},
logpargs=[], logpkwargs={}, logl_grad=None, logp_grad=None,
comm=MPI.COMM_WORLD, outDir='./chains', verbose=True, resume=False):
# MPI initialization
self.comm = comm
self.MPIrank = self.comm.Get_rank()
self.nchain = self.comm.Get_size()
self.ndim = ndim
self.logl = _function_wrapper(logl, loglargs, loglkwargs)
self.logp = _function_wrapper(logp, logpargs, logpkwargs)
if logl_grad is not None and logp_grad is not None:
self.logl_grad = _function_wrapper(logl_grad, loglargs, loglkwargs)
self.logp_grad = _function_wrapper(logp_grad, logpargs, logpkwargs)
else:
self.logl_grad = None
self.logp_grad = None
self.outDir = outDir
self.verbose = verbose
self.resume = resume
# setup output file
if not os.path.exists(self.outDir):
try:
os.makedirs(self.outDir)
except OSError:
pass
# find indices for which to perform adaptive jumps
self.groups = groups
if groups is None:
self.groups = [np.arange(0, self.ndim)]
# set up covariance matrix
self.cov = cov
self.U = [[]] * len(self.groups)
self.S = [[]] * len(self.groups)
# do svd on parameter groups
for ct, group in enumerate(self.groups):
covgroup = np.zeros((len(group), len(group)))
for ii in range(len(group)):
for jj in range(len(group)):
covgroup[ii, jj] = self.cov[group[ii], group[jj]]
self.U[ct], self.S[ct], v = np.linalg.svd(covgroup)
self.M2 = np.zeros((ndim, ndim))
self.mu = np.zeros(ndim)
# initialize proposal cycle
self.propCycle = []
self.jumpDict = {}
# indicator for auxilary jumps
self.aux = []
def initialize(self, Niter, ladder=None, Tmin=1, Tmax=None, Tskip=100,
isave=1000, covUpdate=1000, SCAMweight=30,
AMweight=20, DEweight=50,
NUTSweight=20, HMCweight=20, MALAweight=0,
burn=10000, HMCstepsize=0.1, HMCsteps=300,
maxIter=None, thin=10, i0=0, neff=100000,
writeHotChains=False, hotChain=False):
"""
Initialize MCMC quantities
@param maxIter: maximum number of iterations
@Tmin: minumum temperature to use in temperature ladder
"""
# get maximum number of iteration
if maxIter is None and self.MPIrank > 0:
maxIter = 2 * Niter
elif maxIter is None and self.MPIrank == 0:
maxIter = Niter
self.ladder = ladder
self.covUpdate = covUpdate
self.SCAMweight = SCAMweight
self.AMweight = AMweight
self.DEweight = DEweight
self.burn = burn
self.Tskip = Tskip
self.thin = thin
self.isave = isave
self.Niter = Niter
self.neff = neff
self.tstart = 0
N = int(maxIter / thin)
self._lnprob = np.zeros(N)
self._lnlike = np.zeros(N)
self._chain = np.zeros((N, self.ndim))
self.naccepted = 0
self.swapProposed = 0
self.nswap_accepted = 0
# set up covariance matrix and DE buffers
# TODO: better way of allocating this to save memory
if self.MPIrank == 0:
self._AMbuffer = np.zeros((self.Niter, self.ndim))
self._DEbuffer = np.zeros((self.burn, self.ndim))
# ##### setup default jump proposal distributions ##### #
# Gradient-based jumps
if self.logl_grad is not None and self.logp_grad is not None:
# DOES MALA do anything with the burnin? (Not adaptive enabled yet)
malajump = MALAJump(self.logl_grad, self.logp_grad, self.cov,
self.burn)
self.addProposalToCycle(malajump, MALAweight)
if MALAweight > 0:
print("WARNING: MALA jumps are not working properly yet")
# Perhaps have an option to adaptively tune the mass matrix?
# Now that is done by defaulk
hmcjump = HMCJump(self.logl_grad, self.logp_grad, self.cov,
self.burn, stepsize=HMCstepsize, nminsteps=2,
nmaxsteps=HMCsteps)
self.addProposalToCycle(hmcjump, HMCweight)
# Target acceptance rate (delta) should be optimal for 0.6
nutsjump = NUTSJump(self.logl_grad, self.logp_grad, self.cov,
self.burn, trajectoryDir=None, write_burnin=False,
force_trajlen=None, force_epsilon=None, delta=0.6)
self.addProposalToCycle(nutsjump, NUTSweight)
# add SCAM
self.addProposalToCycle(self.covarianceJumpProposalSCAM,
self.SCAMweight)
# add AM
self.addProposalToCycle(self.covarianceJumpProposalAM, self.AMweight)
# check length of jump cycle
if len(self.propCycle) == 0:
raise ValueError('No jump proposals specified!')
# randomize cycle
self.randomizeProposalCycle()
# setup default temperature ladder
if self.ladder is None:
self.ladder = self.temperatureLadder(Tmin, Tmax=Tmax)
# temperature for current chain
self.temp = self.ladder[self.MPIrank]
# hot chain sampling from prior
if hotChain and self.MPIrank == self.nchain-1:
self.temp = 1e80
self.fname = self.outDir + '/chain_hot.txt'
else:
self.fname = self.outDir + '/chain_{0}.txt'.format(self.temp)
# write hot chains
self.writeHotChains = writeHotChains
self.resumeLength = 0
if self.resume and os.path.isfile(self.fname):
if self.verbose:
print 'Resuming run from chain file {0}'.format(self.fname)
try:
self.resumechain = np.loadtxt(self.fname)
self.resumeLength = self.resumechain.shape[0]
except ValueError:
print 'WARNING: Cant read in file. Removing last line.'
os.system('sed -ie \'$d\' {0}'.format(self.fname))
self.resumechain = np.loadtxt(self.fname)
self.resumeLength = self.resumechain.shape[0]
self._chainfile = open(self.fname, 'a')
else:
self._chainfile = open(self.fname, 'w')
self._chainfile.close()
def updateChains(self, p0, lnlike0, lnprob0, iter):
"""
Update chains after jump proposals
"""
# update buffer
if self.MPIrank == 0:
self._AMbuffer[iter, :] = p0
# put results into arrays
if iter % self.thin == 0:
ind = int(iter / self.thin)
self._chain[ind, :] = p0
self._lnlike[ind] = lnlike0
self._lnprob[ind] = lnprob0
# write to file
if iter % self.isave == 0 and iter > 1 and iter > self.resumeLength:
if self.writeHotChains or self.MPIrank == 0:
self._writeToFile(iter)
# write output covariance matrix
np.save(self.outDir + '/cov.npy', self.cov)
if self.MPIrank == 0 and self.verbose and iter > 1:
sys.stdout.write('\r')
sys.stdout.write('Finished %2.2f percent in %f s Acceptance rate = %g'
% (iter / self.Niter * 100, time.time() - self.tstart,
self.naccepted / iter))
sys.stdout.flush()
def sample(self, p0, Niter, ladder=None, Tmin=1, Tmax=None, Tskip=100,
isave=1000, covUpdate=1000, SCAMweight=20,
AMweight=20, DEweight=20, NUTSweight=20, MALAweight=20,
HMCweight=20, burn=10000, HMCstepsize=0.1, HMCsteps=300,
maxIter=None, thin=10, i0=0, neff=100000,
writeHotChains=False, hotChain=False):
"""
Function to carry out PTMCMC sampling.
@param p0: Initial parameter vector
@param self.Niter: Number of iterations to use for T = 1 chain
@param ladder: User defined temperature ladder
@param Tmin: Minimum temperature in ladder (default=1)
@param Tmax: Maximum temperature in ladder (default=None)
@param Tskip: Number of steps between proposed temperature swaps (default=100)
@param isave: Number of iterations before writing to file (default=1000)
@param covUpdate: Number of iterations between AM covariance updates (default=1000)
@param SCAMweight: Weight of SCAM jumps in overall jump cycle (default=20)
@param AMweight: Weight of AM jumps in overall jump cycle (default=20)
@param DEweight: Weight of DE jumps in overall jump cycle (default=20)
@param NUTSweight: Weight of the NUTS jumps in jump cycle (default=20)
@param MALAweight: Weight of the MALA jumps in jump cycle (default=20)
@param HMCweight: Weight of the HMC jumps in jump cycle (default=20)
@param HMCstepsize: Step-size of the HMC jumps (default=0.1)
@param HMCsteps: Maximum number of steps in an HMC trajectory (default=300)
@param burn: Burn in time (DE jumps added after this iteration) (default=10000)
@param maxIter: Maximum number of iterations for high temperature chains
(default=2*self.Niter)
@param self.thin: Save every self.thin MCMC samples
@param i0: Iteration to start MCMC (if i0 !=0, do not re-initialize)
@param neff: Number of effective samples to collect before terminating
"""
# get maximum number of iteration
if maxIter is None and self.MPIrank > 0:
maxIter = 2 * Niter
elif maxIter is None and self.MPIrank == 0:
maxIter = Niter
# set up arrays to store lnprob, lnlike and chain
N = int(maxIter / thin)
# if picking up from previous run, don't re-initialize
if i0 == 0:
self.initialize(Niter, ladder=ladder, Tmin=Tmin, Tmax=Tmax,
Tskip=Tskip, isave=isave, covUpdate=covUpdate,
SCAMweight=SCAMweight,
AMweight=AMweight, DEweight=DEweight,
NUTSweight=NUTSweight, MALAweight=MALAweight,
HMCweight=HMCweight, burn=burn,
HMCstepsize=HMCstepsize, HMCsteps=HMCsteps,
maxIter=maxIter, thin=thin, i0=i0,
neff=neff, writeHotChains=writeHotChains,
hotChain=hotChain)
### compute lnprob for initial point in chain ###
# if resuming, just start with first point in chain
if self.resume and self.resumeLength > 0:
p0, lnlike0, lnprob0 = self.resumechain[0, :-4], \
self.resumechain[0, -3], self.resumechain[0, -4]
else:
# compute prior
lp = self.logp(p0)
if lp == float(-np.inf):
lnprob0 = -np.inf
lnlike0 = -np.inf
else:
lnlike0 = self.logl(p0)
lnprob0 = 1 / self.temp * lnlike0 + lp
# record first values
self.updateChains(p0, lnlike0, lnprob0, i0)
self.comm.barrier()
# start iterations
iter = i0
self.tstart = time.time()
runComplete = False
Neff = 0
while runComplete is False:
iter += 1
accepted = 0
# call PTMCMCOneStep
p0, lnlike0, lnprob0 = self.PTMCMCOneStep(
p0, lnlike0, lnprob0, iter)
# compute effective number of samples
if iter % 1000 == 0 and iter > 2 * self.burn and self.MPIrank == 0:
try:
Neff = iter / \
max(1, np.nanmax([acor.acor(self._AMbuffer[self.burn:(iter - 1), ii])[0]
for ii in range(self.ndim)]))
# print '\n {0} effective samples'.format(Neff)
except NameError:
Neff = 0
pass
# stop if reached maximum number of iterations
if self.MPIrank == 0 and iter >= self.Niter - 1:
if self.verbose:
print '\nRun Complete'
runComplete = True
# stop if reached effective number of samples
if self.MPIrank == 0 and int(Neff) > self.neff:
if self.verbose:
print '\nRun Complete with {0} effective samples'.format(int(Neff))
runComplete = True
if self.MPIrank == 0 and runComplete:
for jj in range(1, self.nchain):
self.comm.send(runComplete, dest=jj, tag=55)
# check for other chains
if self.MPIrank > 0:
runComplete = self.comm.Iprobe(source=0, tag=55)
time.sleep(0.000001) # trick to get around
def PTMCMCOneStep(self, p0, lnlike0, lnprob0, iter):
"""
Function to carry out PTMCMC sampling.
@param p0: Initial parameter vector
@param lnlike0: Initial log-likelihood value
@param lnprob0: Initial log probability value
@param iter: iteration number
@return p0: next value of parameter vector after one MCMC step
@return lnlike0: next value of likelihood after one MCMC step
@return lnprob0: next value of posterior after one MCMC step
"""
# update covariance matrix
if (iter - 1) % self.covUpdate == 0 and (iter -
1) != 0 and self.MPIrank == 0:
self._updateRecursive(iter - 1, self.covUpdate)
# broadcast to other chains
[self.comm.send(self.cov, dest=rank + 1, tag=111) for rank
in range(self.nchain - 1)]
# check for sent covariance matrix from T = 0 chain
getCovariance = self.comm.Iprobe(source=0, tag=111)
time.sleep(0.000001)
if getCovariance and self.MPIrank > 0:
self.cov[:,:] = self.comm.recv(source=0, tag=111)
for ct, group in enumerate(self.groups):
covgroup = np.zeros((len(group), len(group)))
for ii in range(len(group)):
for jj in range(len(group)):
covgroup[ii, jj] = self.cov[group[ii], group[jj]]
self.U[ct], self.S[ct], v = np.linalg.svd(covgroup)
getCovariance = 0
# update DE buffer
if (iter - 1) % self.burn == 0 and (iter -
1) != 0 and self.MPIrank == 0:
self._updateDEbuffer(iter - 1, self.burn)
# broadcast to other chains
[self.comm.send(self._DEbuffer, dest=rank + 1, tag=222) for rank
in range(self.nchain - 1)]
# check for sent DE buffer from T = 0 chain
getDEbuf = self.comm.Iprobe(source=0, tag=222)
time.sleep(0.000001)
if getDEbuf and self.MPIrank > 0:
self._DEbuffer = self.comm.recv(source=0, tag=222)
# randomize cycle
if self.DEJump not in self.propCycle:
self.addProposalToCycle(self.DEJump, self.DEweight)
self.randomizeProposalCycle()
# reset
getDEbuf = 0
# after burn in, add DE jumps
if (iter - 1) == self.burn and self.MPIrank == 0:
if self.verbose:
print 'Adding DE jump with weight {0}'.format(self.DEweight)
self.addProposalToCycle(self.DEJump, self.DEweight)
# randomize cycle
self.randomizeProposalCycle()
### jump proposal ###
# if resuming, just use previous chain points
if self.resume and self.resumeLength > 0 and iter < self.resumeLength:
p0, lnlike0, lnprob0 = self.resumechain[iter, :-4], \
self.resumechain[iter, -3], self.resumechain[iter, -4]
# update acceptance counter
self.naccepted = iter * self.resumechain[iter, -2]
accepted = 1
else:
y, qxy, jump_name = self._jump(p0, iter)
self.jumpDict[jump_name][0] += 1
# compute prior and likelihood
lp = self.logp(y)
if lp == -np.inf:
newlnprob = -np.inf
else:
newlnlike = self.logl(y)
newlnprob = 1 / self.temp * newlnlike + lp
# hastings step
diff = newlnprob - lnprob0 + qxy
if diff > np.log(np.random.rand()):
# accept jump
p0, lnlike0, lnprob0 = y, newlnlike, newlnprob
# update acceptance counter
self.naccepted += 1
accepted = 1
self.jumpDict[jump_name][1] += 1
# temperature swap
swapReturn, p0, lnlike0, lnprob0 = self.PTswap(
p0, lnlike0, lnprob0, iter)
# check return value
if swapReturn != 0:
self.swapProposed += 1
if swapReturn == 2:
self.nswap_accepted += 1
self.updateChains(p0, lnlike0, lnprob0, iter)
return p0, lnlike0, lnprob0
def PTswap(self, p0, lnlike0, lnprob0, iter):
"""
Do parallel tempering swap.
@param p0: current parameter vector
@param lnlike0: current log-likelihood
@param lnprob0: current log posterior value
@param iter: current iteration number
@return swapReturn: 0 = no swap proposed,
1 = swap proposed and rejected,
2 = swap proposed and accepted
@return p0: new parameter vector
@return lnlike0: new log-likelihood
@return lnprob0: new log posterior value
"""
# initialize variables
readyToSwap = 0
swapAccepted = 0
swapProposed = 0
# if Tskip is reached, block until next chain in ladder is ready for
# swap proposal
if iter % self.Tskip == 0 and self.MPIrank < self.nchain - 1:
swapProposed = 1
# send current likelihood for swap proposal
self.comm.send(lnlike0, dest=self.MPIrank + 1, tag=18)
# determine if swap was accepted
swapAccepted = self.comm.recv(source=self.MPIrank + 1, tag=888)
# perform swap
if swapAccepted:
# exchange likelihood
lnlike0 = self.comm.recv(source=self.MPIrank + 1, tag=18)
# exchange parameters
pnew = np.empty(self.ndim)
self.comm.Sendrecv(p0, dest=self.MPIrank+1, sendtag=19,
recvbuf=pnew, source=self.MPIrank+1,
recvtag=19)
p0 = pnew
# calculate new posterior values
lnprob0 = 1 / self.temp * lnlike0 + self.logp(p0)
# check if next lowest temperature is ready to swap
elif self.MPIrank > 0:
readyToSwap = self.comm.Iprobe(source=self.MPIrank - 1, tag=18)
# trick to get around processor using 100% cpu while waiting
time.sleep(0.000001)
# hotter chain decides acceptance
if readyToSwap:
newlnlike = self.comm.recv(source=self.MPIrank - 1, tag=18)
# determine if swap is accepted and tell other chain
logChainSwap = (1 / self.ladder[self.MPIrank - 1] -
1 / self.ladder[self.MPIrank]) \
* (lnlike0 - newlnlike)
if logChainSwap > np.log(np.random.rand()):
swapAccepted = 1
else:
swapAccepted = 0
# send out result
self.comm.send(swapAccepted, dest=self.MPIrank - 1, tag=888)
# perform swap
if swapAccepted:
# exchange likelihood
self.comm.send(lnlike0, dest=self.MPIrank - 1, tag=18)
lnlike0 = newlnlike
# exchange parameters
pnew = np.empty(self.ndim)
self.comm.Sendrecv(p0, dest=self.MPIrank-1, sendtag=19,
recvbuf=pnew, source=self.MPIrank-1,
recvtag=19)
p0 = pnew
# calculate new posterior values
lnprob0 = 1 / self.temp * lnlike0 + self.logp(p0)
# Return values for colder chain: 0=nothing happened; 1=swap proposed,
# not accepted; 2=swap proposed & accepted
if swapProposed:
if swapAccepted:
swapReturn = 2
else:
swapReturn = 1
else:
swapReturn = 0
return swapReturn, p0, lnlike0, lnprob0
def temperatureLadder(self, Tmin, Tmax=None, tstep=None):
"""
Method to compute temperature ladder. At the moment this uses
a geometrically spaced temperature ladder with a temperature
spacing designed to give 25 % temperature swap acceptance rate.
"""
# TODO: make options to do other temperature ladders
if self.nchain > 1:
if tstep is None and Tmax is None:
tstep = 1 + np.sqrt(2 / self.ndim)
elif tstep is None and Tmax is not None:
tstep = np.exp(np.log(Tmax / Tmin) / (self.nchain - 1))
ladder = np.zeros(self.nchain)
for ii in range(self.nchain):
ladder[ii] = Tmin * tstep ** ii
else:
ladder = np.array([1])
return ladder
def _writeToFile(self, iter):
"""
Function to write chain file. File has 3+ndim columns,
the first is log-posterior (unweighted), log-likelihood,
and acceptance probability, followed by parameter values.
@param iter: Iteration of sampler
"""
self._chainfile = open(self.fname, 'a+')
for jj in range((iter - self.isave), iter, self.thin):
ind = int(jj / self.thin)
pt_acc = 1
if self.MPIrank < self.nchain - 1 and self.swapProposed != 0:
pt_acc = self.nswap_accepted / self.swapProposed
self._chainfile.write('\t'.join(['%22.22f' % (self._chain[ind, kk])
for kk in range(self.ndim)]))
self._chainfile.write('\t%f\t %f\t %f\t %f\t' % (self._lnprob[ind],
self._lnlike[ind],
self.naccepted /
iter, pt_acc))
self._chainfile.write('\n')
self._chainfile.close()
#### write jump statistics files ####
# only for T=1 chain
if self.MPIrank == 0:
# first write file contaning jump names and jump rates
fout = open(self.outDir + '/jumps.txt', 'w')
njumps = len(self.propCycle)
ujumps = np.unique(self.propCycle)
for jump in ujumps:
fout.write('%s %4.2g\n' % (
jump.__name__,
np.sum(np.array(self.propCycle)==jump)/njumps))
fout.close()
# now write jump statistics for each jump proposal
for jump in self.jumpDict:
fout = open(self.outDir + '/' + jump + '_jump.txt', 'a+')
fout.write('%g\n'%(self.jumpDict[jump][1]/max(1,
self.jumpDict[jump][0])))
fout.close()
# function to update covariance matrix for jump proposals
def _updateRecursive(self, iter, mem):
"""
Function to recursively update sample covariance matrix.
@param iter: Iteration of sampler
@param mem: Number of steps between updates
"""
it = iter - mem
ndim = self.ndim
if it == 0:
self.M2 = np.zeros((ndim, ndim))
self.mu = np.zeros(ndim)
for ii in range(mem):
diff = np.zeros(ndim)
it += 1
for jj in range(ndim):
diff[jj] = self._AMbuffer[iter - mem + ii, jj] - self.mu[jj]
self.mu[jj] += diff[jj] / it
self.M2 += np.outer(diff,
(self._AMbuffer[iter - mem + ii, :] - self.mu))
self.cov[:,:] = self.M2 / (it - 1)
# do svd on parameter groups
for ct, group in enumerate(self.groups):
covgroup = np.zeros((len(group), len(group)))
for ii in range(len(group)):
for jj in range(len(group)):
covgroup[ii, jj] = self.cov[group[ii], group[jj]]
self.U[ct], self.S[ct], v = np.linalg.svd(covgroup)
# update DE buffer samples
def _updateDEbuffer(self, iter, burn):
"""
Update Differential Evolution with last burn
values in the total chain
@param iter: Iteration of sampler
@param burn: Total number of samples in DE buffer
"""
self._DEbuffer = self._AMbuffer[iter - burn:iter]
# SCAM jump
def covarianceJumpProposalSCAM(self, x, iter, beta):
"""
Single Component Adaptive Jump Proposal. This function will occasionally
jump in more than 1 parameter. It will also occasionally use different
jump sizes to ensure proper mixing.
@param x: Parameter vector at current position
@param iter: Iteration of sampler
@param beta: Inverse temperature of chain
@return: q: New position in parameter space
@return: qxy: Forward-Backward jump probability
"""
q = x.copy()
qxy = 0
# choose group
jumpind = np.random.randint(0, len(self.groups))
ndim = len(self.groups[jumpind])
# adjust step size
prob = np.random.rand()
# small jump
if prob > 0.9:
scale = 0.2
# large jump
elif prob > 0.97:
scale = 10
# small-medium jump
# elif prob > 0.6:
#:wq scale = 0.5
# standard medium jump
else:
scale = 1.0
#scale = np.random.uniform(0.5, 10)
# adjust scale based on temperature
if self.temp <= 100:
scale *= np.sqrt(self.temp)
# get parmeters in new diagonalized basis
#y = np.dot(self.U.T, x[self.covinds])
# make correlated componentwise adaptive jump
ind = np.unique(np.random.randint(0, ndim, 1))
neff = len(ind)
cd = 2.4 / np.sqrt(2 * neff) * scale
#y[ind] = y[ind] + np.random.randn(neff) * cd * np.sqrt(self.S[ind])
#q[self.covinds] = np.dot(self.U, y)
q[self.groups[jumpind]] += np.random.randn() * cd * np.sqrt(self.S[jumpind][ind]) * \
self.U[jumpind][:, ind].flatten()
return q, qxy
# AM jump
def covarianceJumpProposalAM(self, x, iter, beta):
"""
Adaptive Jump Proposal. This function will occasionally
use different jump sizes to ensure proper mixing.
@param x: Parameter vector at current position
@param iter: Iteration of sampler
@param beta: Inverse temperature of chain
@return: q: New position in parameter space
@return: qxy: Forward-Backward jump probability
"""
q = x.copy()
qxy = 0
# choose group
jumpind = np.random.randint(0, len(self.groups))
ndim = len(self.groups[jumpind])
# adjust step size
prob = np.random.rand()
# small jump
if prob > 0.9:
scale = 0.2
# large jump
elif prob > 0.97:
scale = 10
# small-medium jump
# elif prob > 0.6:
# scale = 0.5
# standard medium jump
else:
scale = 1.0
# adjust scale based on temperature
if self.temp <= 100:
scale *= np.sqrt(self.temp)
# get parmeters in new diagonalized basis
y = np.dot(self.U[jumpind].T, x[self.groups[jumpind]])
# make correlated componentwise adaptive jump
ind = np.arange(len(self.groups[jumpind]))
neff = len(ind)
cd = 2.4 / np.sqrt(2 * neff) * scale
y[ind] = y[ind] + \
np.random.randn(neff) * cd * np.sqrt(self.S[jumpind][ind])
q[self.groups[jumpind]] = np.dot(self.U[jumpind], y)
return q, qxy
# Differential evolution jump
def DEJump(self, x, iter, beta):
"""
Differential Evolution Jump. This function will occasionally
use different jump sizes to ensure proper mixing.
@param x: Parameter vector at current position
@param iter: Iteration of sampler
@param beta: Inverse temperature of chain
@return: q: New position in parameter space
@return: qxy: Forward-Backward jump probability
"""
# get old parameters
q = x.copy()
qxy = 0
# choose group
jumpind = np.random.randint(0, len(self.groups))
ndim = len(self.groups[jumpind])
bufsize = np.alen(self._DEbuffer)
# draw a random integer from 0 - iter
mm = np.random.randint(0, bufsize)
nn = np.random.randint(0, bufsize)
# make sure mm and nn are not the same iteration
while mm == nn:
nn = np.random.randint(0, bufsize)
# get jump scale size
prob = np.random.rand()
# mode jump
if prob > 0.5:
scale = 1.0
else:
scale = np.random.rand() * 2.4 / np.sqrt(2 * ndim) * \
np.sqrt(1 / beta)
for ii in range(ndim):
# jump size
sigma = self._DEbuffer[mm, self.groups[jumpind][ii]] - \
self._DEbuffer[nn, self.groups[jumpind][ii]]
# jump
q[self.groups[jumpind][ii]] += scale * sigma
return q, qxy
# add jump proposal distribution functions
def addProposalToCycle(self, func, weight):
"""
Add jump proposal distributions to cycle with a given weight.
@param func: jump proposal function
@param weight: jump proposal function weight in cycle
"""
# get length of cycle so far
length = len(self.propCycle)
# check for 0 weight
if weight == 0:
#print 'ERROR: Can not have 0 weight in proposal cycle!'
#sys.exit()
return
# add proposal to cycle
for ii in range(length, length + weight):
self.propCycle.append(func)
# add to jump dictionary and initialize file
if func.__name__ not in self.jumpDict:
self.jumpDict[func.__name__] = [0, 0]
fout = open(self.outDir + '/' + func.__name__ + '_jump.txt', 'w')
fout.close()
# add auxilary jump proposal distribution functions
def addAuxilaryJump(self, func):
"""
Add auxilary jump proposal distribution. This will be called after every
standard jump proposal. Examples include cyclic boundary conditions and
pulsar phase fixes
@param func: jump proposal function
"""
# set auxilary jump
self.aux.append(func)
# randomized proposal cycle
def randomizeProposalCycle(self):
"""
Randomize proposal cycle that has already been filled
"""
# get length of full cycle
length = len(self.propCycle)
# get random integers
index = np.arange(length)
np.random.shuffle(index)
# randomize proposal cycle
self.randomizedPropCycle = [self.propCycle[ind] for ind in index]
# call proposal functions from cycle
def _jump(self, x, iter):
"""
Call Jump proposals
"""
# get length of cycle
length = len(self.propCycle)
# call function
ind = np.random.randint(0, length)
q, qxy = self.propCycle[ind](x, iter, 1/self.temp)
# axuilary jump
if len(self.aux) > 0:
for aux in self.aux:
q, qxy_aux = aux(x, q, iter, 1 / self.temp)
qxy += qxy_aux
return q, qxy, self.propCycle[ind].__name__
# TODO: jump statistics
class _function_wrapper(object):
"""
This is a hack to make the likelihood function pickleable when ``args``
or ``kwargs`` are also included.
"""
def __init__(self, f, args, kwargs):
self.f = f
self.args = args
self.kwargs = kwargs
def __call__(self, x):
return self.f(x, *self.args, **self.kwargs)
|
mit
| 9,100,587,971,638,490,000 | 33.433301 | 100 | 0.55428 | false |
davidzyx/PythonNotes
|
Part I/ch04_notes.py
|
1
|
2075
|
# ch04_notes.py
# Chapter 4 notes taken from Automate the Boring Stuff with Python (2015).pdf
# Created by Davidzz on 7/20/2016
# List data type
primes = [2, 3, 5, 7, 11, 13, 17]
print(primes[0]) # 2
# For negative index number, start counting from the right
print(primes[-1]) # 17
print(primes[0:5]) # [2, 3, 5, 7, 11]
print(primes[3:5]) # [7, 11]
print(primes[4:-1]) # [11, 13]
# length of a list
print('No. of index in primes is: ')
print(len(primes))
# list.Methods
# return index number of the first encounter in testing equal loops
print (primes.index(3)) # 1
# value assignment
primes[-1] = primes[0]
primes[-1] = 17
print(primes)
# concatenation
primes = primes + [19, 23]
# remove value from index
del primes[3]
# append and insert
primes.append(29)
primes.insert(3, 7)
# remove first encounter value with match
primes.remove(29)
# using lists as counters in for loops
for pm in primes:
print(pm)
# including index:
for i in range(len(primes)):
print(str(i) + ': ' + str(primes[i]))
# in & not Operators
print(3 in primes) # True
print(5 not in primes) # False
# multiple assignment
prime1, prime2, prime3 = primes[0:3]
# sorting (numerical)
primes.sort(reverse = True)
print(primes)
primes.sort()
# also works for String sorting
# does not work with Strings and numbers in a list
strs = ['a', 'z', 'A', 'Z']
strs.sort()
print(strs)
strs.sort(key=str.lower)
print(strs)
# Methods from Strings and Lists are alike
string = 'String'
print(string[0])
print(string[1:4])
print('s' in string)
print('S' in string)
#for k in string:
# print(str(k), end=' ') # WTF!!!
# Tuple Data Type
eggs = ('hello', 42, 0.5)
# CANNOT modify values
type(('hello',)) # type == tuple
# converting from and to list
list(eggs)
tuple(eggs)
# List References
spam = [0, 1, 2, 3, 4, 5]
cheese = spam
cheese[1] = 'Hello!'
print(spam)
print(cheese)
# they appear to be the same
# However:
import copy
spam = ['A', 'B', 'C', 'D']
cheese = copy.copy(spam)
cheese.append(42)
print(spam)
print(cheese)
# when copying list which contains lists,
# use copy.deepcopy()
|
gpl-3.0
| -4,305,339,258,504,948,000 | 21.074468 | 77 | 0.675181 | false |
thachhoang/boxmover
|
main.py
|
1
|
2715
|
import json
import logging
import logging.config
import sys
import os
import pytumblr
from boxmover import delete_queue, reblog_everything, confirm, like_everything, new_oauth, unlike_everything, \
get_follow_list
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {'simple': {'format': '%(asctime)s %(levelname)s: %(message)s'}},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'stream': sys.stdout,
'formatter': 'simple'
},
'status_file': {
'class': 'logging.FileHandler',
'filename': 'status.log',
'formatter': 'simple'
}
},
'loggers': {'root': {'handlers': ['console', 'status_file'], 'level': 'DEBUG'}}
}
logging.config.dictConfig(LOGGING)
logger = logging.getLogger('root')
def move_posts():
if confirm('About to empty your queue on {}, are you sure?'.format(new_blog), default='n'):
delete_queue(client, new_blog)
reblog_everything(client, old_blog, new_blog, limit=None, offset=None, interactive=False, dry_run=True)
def move_likes():
like_everything(client, old_blog, dry_run=True)
def remove_likes():
unlike_everything(client, dry_run=True)
def save_follow_list(save_path):
with open(save_path, 'w+') as ff:
json.dump(get_follow_list(client), ff, indent=4)
def load_follow_list(save_path, interactive=False):
with open(save_path, 'r') as fl:
follow_list = json.load(fl)
i = 0
total = len(follow_list)
for follow in follow_list:
i += 1
logger.info('======= %s/%s: %s (%s)', i, total, follow['name'], follow['url'])
if interactive and not confirm('Follow?', default='y'):
continue
logger.info('Followed %s.', follow['url'])
# client.unfollow(follow['url'])
client.follow(follow['url'])
if __name__ == '__main__':
# Load app credentials
json_path = 'secrets.json'
tokens = {}
if not os.path.exists(json_path):
tokens = new_oauth()
with open(json_path, 'w+') as f:
json.dump(tokens, f, indent=4)
else:
with open(json_path, 'r') as f:
tokens = json.load(f)
client = pytumblr.TumblrRestClient(
tokens['consumer_key'],
tokens['consumer_secret'],
tokens['oauth_token'],
tokens['oauth_token_secret']
)
# Script settings
with open('settings.json') as f:
settings = json.load(f)
old_blog = settings['old_blog']
new_blog = settings['new_blog']
# save_follow_list('follows.json')
load_follow_list('follows.json', interactive=True)
|
mit
| 2,587,896,771,294,788,600 | 27.578947 | 111 | 0.583425 | false |
kgblll/libresoft-gymkhana
|
apps/explohyperfiction/rest/petitions.py
|
1
|
4387
|
# -*- coding: UTF-8 -*-
from django.http import HttpResponse, HttpResponseRedirect
from django.template.loader import get_template
from django.template import Context, RequestContext
from django.shortcuts import render_to_response
from datetime import datetime
from django.contrib.auth import authenticate, login, logout
from social.core import api as api_lgs
from social.rest.forms import LoginForm
from social.core.models import *
from apps.explohyperfiction.core import utils
from apps.explohyperfiction.models import *
from apps.explohyperfiction.core import api
def view_petitions(request):
if not request.user.is_authenticated():
data={"message": "You are not autheticated in LGS"}
return render_to_response("explohyperfiction_error.html", data)
if request.method !="GET":
data={"message": "Forbidden"}
return render_to_response("explohyperfiction_error.html", data)
person=Person.objects.get(id=request.session["_auth_user_id"])
if not Player.objects.filter(person=person):
data={"message": "You are not member of ExploHyperfiction"}
return render_to_response("explohyperfiction_error.html", data)
player=Player.objects.get(person=person)
if not player.is_superuser:
data={"message": "Only a superuser can see the resource"}
return render_to_response("explohyperfiction_error.html", data)
data={"login":True, "player":player, "number_of_petitions":len(Petition.objects.all()), "petitions":Petition.objects.all().order_by('-date')}
template=get_template("explohyperfiction_petitions.html")
return HttpResponse(template.render(RequestContext(request,data)))
def accept(request, id_petition):
if not request.user.is_authenticated():
data={"message": "You are not autheticated in LGS"}
return render_to_response("explohyperfiction_error.html", data)
if request.method !="GET":
data={"message": "Forbidden"}
return render_to_response("explohyperfiction_error.html", data)
person=Person.objects.get(id=request.session["_auth_user_id"])
if not Player.objects.filter(person=person):
data={"message": "You are not member of ExploHyperfiction"}
return render_to_response("explohyperfiction_error.html", data)
player=Player.objects.get(person=person)
if not player.is_superuser:
data={"message": "Only a superuser can see the resource"}
return render_to_response("explohyperfiction_error.html", data)
if not Petition.objects.filter(id=int(id_petition)).exists():
data={"message": "The petition doesn't exists"}
return render_to_response("explohyperfiction_error.html", data)
petition=Petition.objects.get(id=int(id_petition))
if petition.for_manager:
petition.player.is_manager=True
group=Group.objects.get(name="Free Group")
group.manager.add(petition.player)
group.save()
if petition.for_super:
petition.player.is_superuser=True
api.system_message.create_from_petition(petition, True)
petition.player.save()
petition.delete()
return HttpResponseRedirect("/explohyperfiction/petitions/")
def reject(request, id_petition):
if not request.user.is_authenticated():
data={"message": "You are not autheticated in LGS"}
return render_to_response("explohyperfiction_error.html", data)
if request.method !="GET":
data={"message": "Forbidden"}
return render_to_response("explohyperfiction_error.html", data)
person=Person.objects.get(id=request.session["_auth_user_id"])
if not Player.objects.filter(person=person):
data={"message": "You are not member of ExploHyperfiction"}
return render_to_response("explohyperfiction_error.html", data)
player=Player.objects.get(person=person)
if not player.is_superuser:
data={"message": "Only a superuser can see the resource"}
return render_to_response("explohyperfiction_error.html", data)
if not Petition.objects.filter(id=int(id_petition)).exists():
data={"message": "The petition doesn't exists"}
return render_to_response("explohyperfiction_error.html", data)
petition=Petition.objects.get(id=int(id_petition))
api.system_message.create_from_petition(petition, False)
petition.delete()
return HttpResponseRedirect("/explohyperfiction/petitions/")
|
gpl-2.0
| -5,561,933,755,589,448,000 | 47.744444 | 145 | 0.713928 | false |
ecederstrand/exchangelib
|
exchangelib/recurrence.py
|
1
|
13352
|
import logging
from .fields import IntegerField, EnumField, EnumListField, DateOrDateTimeField, DateTimeField, EWSElementField, \
IdElementField, MONTHS, WEEK_NUMBERS, WEEKDAYS
from .properties import EWSElement, IdChangeKeyMixIn, ItemId, EWSMeta
log = logging.getLogger(__name__)
def _month_to_str(month):
return MONTHS[month-1] if isinstance(month, int) else month
def _weekday_to_str(weekday):
return WEEKDAYS[weekday - 1] if isinstance(weekday, int) else weekday
def _week_number_to_str(week_number):
return WEEK_NUMBERS[week_number - 1] if isinstance(week_number, int) else week_number
class Pattern(EWSElement, metaclass=EWSMeta):
"""Base class for all classes implementing recurring pattern elements."""
class Regeneration(Pattern, metaclass=EWSMeta):
"""Base class for all classes implementing recurring regeneration elements."""
class AbsoluteYearlyPattern(Pattern):
"""MSDN:
https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/absoluteyearlyrecurrence
"""
ELEMENT_NAME = 'AbsoluteYearlyRecurrence'
# The day of month of an occurrence, in range 1 -> 31. If a particular month has less days than the day_of_month
# value, the last day in the month is assumed
day_of_month = IntegerField(field_uri='DayOfMonth', min=1, max=31, is_required=True)
# The month of the year, from 1 - 12
month = EnumField(field_uri='Month', enum=MONTHS, is_required=True)
def __str__(self):
return 'Occurs on day %s of %s' % (self.day_of_month, _month_to_str(self.month))
class RelativeYearlyPattern(Pattern):
"""MSDN:
https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/relativeyearlyrecurrence
"""
ELEMENT_NAME = 'RelativeYearlyRecurrence'
# The weekday of the occurrence, as a valid ISO 8601 weekday number in range 1 -> 7 (1 being Monday).
# Alternatively, the weekday can be one of the DAY (or 8), WEEK_DAY (or 9) or WEEKEND_DAY (or 10) consts which
# is interpreted as the first day, weekday, or weekend day in the month, respectively.
weekday = EnumField(field_uri='DaysOfWeek', enum=WEEKDAYS, is_required=True)
# Week number of the month, in range 1 -> 5. If 5 is specified, this assumes the last week of the month for
# months that have only 4 weeks
week_number = EnumField(field_uri='DayOfWeekIndex', enum=WEEK_NUMBERS, is_required=True)
# The month of the year, from 1 - 12
month = EnumField(field_uri='Month', enum=MONTHS, is_required=True)
def __str__(self):
return 'Occurs on weekday %s in the %s week of %s' % (
_weekday_to_str(self.weekday),
_week_number_to_str(self.week_number),
_month_to_str(self.month)
)
class AbsoluteMonthlyPattern(Pattern):
"""MSDN:
https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/absolutemonthlyrecurrence
"""
ELEMENT_NAME = 'AbsoluteMonthlyRecurrence'
# Interval, in months, in range 1 -> 99
interval = IntegerField(field_uri='Interval', min=1, max=99, is_required=True)
# The day of month of an occurrence, in range 1 -> 31. If a particular month has less days than the day_of_month
# value, the last day in the month is assumed
day_of_month = IntegerField(field_uri='DayOfMonth', min=1, max=31, is_required=True)
def __str__(self):
return 'Occurs on day %s of every %s month(s)' % (self.day_of_month, self.interval)
class RelativeMonthlyPattern(Pattern):
"""MSDN:
https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/relativemonthlyrecurrence
"""
ELEMENT_NAME = 'RelativeMonthlyRecurrence'
# Interval, in months, in range 1 -> 99
interval = IntegerField(field_uri='Interval', min=1, max=99, is_required=True)
# The weekday of the occurrence, as a valid ISO 8601 weekday number in range 1 -> 7 (1 being Monday).
# Alternatively, the weekday can be one of the DAY (or 8), WEEK_DAY (or 9) or WEEKEND_DAY (or 10) consts which
# is interpreted as the first day, weekday, or weekend day in the month, respectively.
weekday = EnumField(field_uri='DaysOfWeek', enum=WEEKDAYS, is_required=True)
# Week number of the month, in range 1 -> 5. If 5 is specified, this assumes the last week of the month for
# months that have only 4 weeks.
week_number = EnumField(field_uri='DayOfWeekIndex', enum=WEEK_NUMBERS, is_required=True)
def __str__(self):
return 'Occurs on weekday %s in the %s week of every %s month(s)' % (
_weekday_to_str(self.weekday),
_week_number_to_str(self.week_number),
self.interval
)
class WeeklyPattern(Pattern):
"""MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/weeklyrecurrence"""
ELEMENT_NAME = 'WeeklyRecurrence'
# Interval, in weeks, in range 1 -> 99
interval = IntegerField(field_uri='Interval', min=1, max=99, is_required=True)
# List of valid ISO 8601 weekdays, as list of numbers in range 1 -> 7 (1 being Monday)
weekdays = EnumListField(field_uri='DaysOfWeek', enum=WEEKDAYS, is_required=True)
# The first day of the week. Defaults to Monday
first_day_of_week = EnumField(field_uri='FirstDayOfWeek', enum=WEEKDAYS, default=1, is_required=True)
def __str__(self):
if isinstance(self.weekdays, str):
weekdays = [self.weekdays]
elif isinstance(self.weekdays, int):
weekdays = [_weekday_to_str(self.weekdays)]
else:
weekdays = [_weekday_to_str(i) for i in self.weekdays]
return 'Occurs on weekdays %s of every %s week(s) where the first day of the week is %s' % (
', '.join(weekdays), self.interval, _weekday_to_str(self.first_day_of_week)
)
class DailyPattern(Pattern):
"""MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/dailyrecurrence"""
ELEMENT_NAME = 'DailyRecurrence'
# Interval, in days, in range 1 -> 999
interval = IntegerField(field_uri='Interval', min=1, max=999, is_required=True)
def __str__(self):
return 'Occurs every %s day(s)' % self.interval
class YearlyRegeneration(Regeneration):
"""MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/yearlyregeneration"""
ELEMENT_NAME = 'YearlyRegeneration'
# Interval, in years
interval = IntegerField(field_uri='Interval', min=1, is_required=True)
def __str__(self):
return 'Regenerates every %s year(s)' % self.interval
class MonthlyRegeneration(Regeneration):
"""MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/monthlyregeneration"""
ELEMENT_NAME = 'MonthlyRegeneration'
# Interval, in months
interval = IntegerField(field_uri='Interval', min=1, is_required=True)
def __str__(self):
return 'Regenerates every %s month(s)' % self.interval
class WeeklyRegeneration(Regeneration):
"""MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/weeklyregeneration"""
ELEMENT_NAME = 'WeeklyRegeneration'
# Interval, in weeks
interval = IntegerField(field_uri='Interval', min=1, is_required=True)
def __str__(self):
return 'Regenerates every %s week(s)' % self.interval
class DailyRegeneration(Regeneration):
"""MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/dailyregeneration"""
ELEMENT_NAME = 'DailyRegeneration'
# Interval, in days
interval = IntegerField(field_uri='Interval', min=1, is_required=True)
def __str__(self):
return 'Regenerates every %s day(s)' % self.interval
class Boundary(EWSElement, metaclass=EWSMeta):
"""Base class for all classes implementing recurring boundary elements."""
class NoEndPattern(Boundary):
"""MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/noendrecurrence"""
ELEMENT_NAME = 'NoEndRecurrence'
# Start date, as EWSDate or EWSDateTime
start = DateOrDateTimeField(field_uri='StartDate', is_required=True)
def __str__(self):
return 'Starts on %s' % self.start
class EndDatePattern(Boundary):
"""MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/enddaterecurrence"""
ELEMENT_NAME = 'EndDateRecurrence'
# Start date, as EWSDate or EWSDateTime
start = DateOrDateTimeField(field_uri='StartDate', is_required=True)
# End date, as EWSDate
end = DateOrDateTimeField(field_uri='EndDate', is_required=True)
def __str__(self):
return 'Starts on %s, ends on %s' % (self.start, self.end)
class NumberedPattern(Boundary):
"""MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/numberedrecurrence"""
ELEMENT_NAME = 'NumberedRecurrence'
# Start date, as EWSDate or EWSDateTime
start = DateOrDateTimeField(field_uri='StartDate', is_required=True)
# The number of occurrences in this pattern, in range 1 -> 999
number = IntegerField(field_uri='NumberOfOccurrences', min=1, max=999, is_required=True)
def __str__(self):
return 'Starts on %s and occurs %s times' % (self.start, self.number)
class Occurrence(IdChangeKeyMixIn):
"""MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/occurrence"""
ELEMENT_NAME = 'Occurrence'
ID_ELEMENT_CLS = ItemId
_id = IdElementField(field_uri='ItemId', value_cls=ID_ELEMENT_CLS)
# The modified start time of the item, as EWSDateTime
start = DateTimeField(field_uri='Start')
# The modified end time of the item, as EWSDateTime
end = DateTimeField(field_uri='End')
# The original start time of the item, as EWSDateTime
original_start = DateTimeField(field_uri='OriginalStart')
# Container elements:
# 'ModifiedOccurrences'
# 'DeletedOccurrences'
class FirstOccurrence(Occurrence):
"""MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/firstoccurrence"""
ELEMENT_NAME = 'FirstOccurrence'
class LastOccurrence(Occurrence):
"""MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/lastoccurrence"""
ELEMENT_NAME = 'LastOccurrence'
class DeletedOccurrence(EWSElement):
"""MSDN: https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/deletedoccurrence"""
ELEMENT_NAME = 'DeletedOccurrence'
# The modified start time of the item, as EWSDateTime
start = DateTimeField(field_uri='Start')
PATTERN_CLASSES = AbsoluteYearlyPattern, RelativeYearlyPattern, AbsoluteMonthlyPattern, RelativeMonthlyPattern, \
WeeklyPattern, DailyPattern
REGENERATION_CLASSES = YearlyRegeneration, MonthlyRegeneration, WeeklyRegeneration, DailyRegeneration
BOUNDARY_CLASSES = NoEndPattern, EndDatePattern, NumberedPattern
class Recurrence(EWSElement):
"""MSDN:
https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/recurrence-recurrencetype
"""
ELEMENT_NAME = 'Recurrence'
PATTERN_CLASSES = PATTERN_CLASSES
pattern = EWSElementField(value_cls=Pattern)
boundary = EWSElementField(value_cls=Boundary)
def __init__(self, **kwargs):
# Allow specifying a start, end and/or number as a shortcut to creating a boundary
start = kwargs.pop('start', None)
end = kwargs.pop('end', None)
number = kwargs.pop('number', None)
if any([start, end, number]):
if 'boundary' in kwargs:
raise ValueError("'boundary' is not allowed in combination with 'start', 'end' or 'number'")
if start and not end and not number:
kwargs['boundary'] = NoEndPattern(start=start)
elif start and end and not number:
kwargs['boundary'] = EndDatePattern(start=start, end=end)
elif start and number and not end:
kwargs['boundary'] = NumberedPattern(start=start, number=number)
else:
raise ValueError("Unsupported 'start', 'end', 'number' combination")
super().__init__(**kwargs)
@classmethod
def from_xml(cls, elem, account):
for pattern_cls in cls.PATTERN_CLASSES:
pattern_elem = elem.find(pattern_cls.response_tag())
if pattern_elem is None:
continue
pattern = pattern_cls.from_xml(elem=pattern_elem, account=account)
break
else:
pattern = None
for boundary_cls in BOUNDARY_CLASSES:
boundary_elem = elem.find(boundary_cls.response_tag())
if boundary_elem is None:
continue
boundary = boundary_cls.from_xml(elem=boundary_elem, account=account)
break
else:
boundary = None
return cls(pattern=pattern, boundary=boundary)
def __str__(self):
return 'Pattern: %s, Boundary: %s' % (self.pattern, self.boundary)
class TaskRecurrence(Recurrence):
"""MSDN:
https://docs.microsoft.com/en-us/exchange/client-developer/web-service-reference/recurrence-taskrecurrencetype
"""
PATTERN_CLASSES = PATTERN_CLASSES + REGENERATION_CLASSES
|
bsd-2-clause
| -5,107,342,709,076,370,000 | 37.478386 | 116 | 0.687088 | false |
ErinMorelli/em-media-handler
|
mediahandler/types/__init__.py
|
1
|
7777
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is a part of EM Media Handler
# Copyright (c) 2014-2021 Erin Morelli
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""
Module: mediahandler.types
Module contains:
- |MHMediaType|
Parent class for all media type submodules. Includes the
logic for the video media types (TV & movies).
Media Type Submodules:
- |mediahandler.types.audiobooks|
- |mediahandler.types.movies|
- |mediahandler.types.music|
- |mediahandler.types.tv|
"""
import os
import logging
from subprocess import Popen, PIPE
from re import findall, search, sub, IGNORECASE
import mediahandler as mh
class MHMediaType(mh.MHObject):
"""Parent class for the media type submodule classes.
Required arguments:
- settings
Dict or MHSettings object.
- push
MHPush object.
Public method:
- |add()|
Main wrapper function for adding media files. Processes
calls to Beets and Filebot.
"""
def __init__(self, settings, push):
"""Initialize the MHMediaType class.
Required arguments:
- settings
Dict or MHSettings object.
- push
MHPush object.
"""
super(MHMediaType, self).__init__(settings, push)
# Set up class members
self.push = push
self.dst_path = ''
self.type = sub(r'^mh', '', type(self).__name__.lower())
# If the subclass didn't define a ptype, set default
if not hasattr(self, 'ptype'):
self.ptype = 'Media Type'
# Type specific
if self.ptype is not None:
# Set destination path
self.dst_path = os.path.join(
os.path.expanduser("~"), 'Media', self.ptype)
# Check for custom path in settings
if hasattr(self, 'folder'):
if self.folder is not None:
self.dst_path = self.folder
logging.debug("Using custom path: %s", self.dst_path)
# Check destination exists
if not os.path.exists(self.dst_path):
self.push.failure("Folder for {0} not found: {1}".format(
self.ptype, self.dst_path))
def _video_settings(self):
"""Set MHMediaType object methods for video types.
Sets up Filebot query values and post-query regex processing values.
"""
# Check for filebot
if not self.filebot:
self.push.failure(
"Filebot required to process {0} files".format(self.ptype))
# Filebot
cmd_info = self.MHSettings({
'action': 'copy',
'db': '',
'format': os.path.join(self.dst_path, self.format),
'flags': ['-r', '-non-strict']
})
self.__dict__.update({'cmd': cmd_info})
# Object defaults
query = self.MHSettings({
'file_types': r'(mkv|avi|m4v|mp4)',
'added_i': 2,
'skip_i': 1,
'reason': '{0} already exists in {1}'.format(
self.type, self.dst_path)
})
query.skip = r'({0}) \[(.*)\] because \[(.*)\] ({1})?already exists'.format(
'Skipped|Failed to process', 'is an exact copy and ')
query.added = r'\[{0}\] ({1}) \[(.*)\] to \[(.*)\.{2}\]'.format(
self.cmd.action.upper(), 'From|Rename', query.file_types)
self.__dict__.update({'query': query})
def add(self, file_path):
"""Wrapper for Filebot requests.
Sets up Filebot CLI query using object member values.
"""
logging.info("Starting %s handler", self.type)
# Set up query
m_cmd = [self.filebot,
'-rename', file_path,
'--db', self.cmd.db,
'--format', self.cmd.format,
'--action', self.cmd.action]
m_cmd.extend(self.cmd.flags)
# Check for logfile
if self.log_file is not None:
loginfo = [
'--log', 'all',
'--log-file', self.log_file]
m_cmd.extend(loginfo)
# If ignoring subtitles, remove all non-video files
if self.ignore_subs:
logging.debug("Ignoring subtitle files")
self._remove_bad_files(file_path)
return self._media_info(m_cmd, file_path)
def _media_info(self, cmd, file_path):
"""Makes request to Beets and Filebot.
Sends results to _process_output().
"""
logging.debug("Query: %s", cmd)
# Process query
query = Popen(cmd, stdout=PIPE, stderr=PIPE)
# Get output
(output, err) = query.communicate()
logging.debug("Query output: %s", output)
logging.debug("Query return errors: %s", err)
return self._process_output(output + err, file_path)
def _process_output(self, output, file_path):
"""Parses response from _media_info() query.
Returns good results and any skipped files.
"""
logging.info("Processing query output")
# Convert output to str, if needed
if not isinstance(output, str):
output = output.decode('utf-8')
# Look for content
added_data = findall(self.query.added, output, IGNORECASE)
skip_data = findall(self.query.skip, output, IGNORECASE)
# Check return
results = []
if added_data:
for added_item in added_data:
results.append(added_item[self.query.added_i])
# Get skipped results
skipped = []
if skip_data:
for skip_item in skip_data:
skip_item_name = os.path.basename(skip_item[self.query.skip_i])
skipped.append(skip_item_name)
logging.warning("File was skipped: %s (%s)",
skip_item_name,
self.query.reason)
# Return error if nothing found
if not skipped and not results:
return self._match_error(file_path)
return results, skipped
def _remove_bad_files(self, file_path):
"""Removes non-video files from media folder.
Only used when 'ignore_subs' setting is True.
"""
logging.info("Removing bad files")
# Skip if this is not a folder
if os.path.isfile(file_path):
return
# Look for bad files and remove them
regex = r'\.{0}$'.format(self.query.file_types)
for item in os.listdir(file_path):
item_path = os.path.join(file_path, item)
# If it's a folder, iterate again
if os.path.isdir(item_path):
self._remove_bad_files(item_path)
# Otherwise check for non-video files
elif not search(regex, item):
os.unlink(item_path)
def _match_error(self, name):
"""Returns a match error via the MHPush object.
"""
return self.push.failure(
"Unable to match {0} files: {1}".format(self.type, name))
def __repr__(self):
return '<MHMediaType {0}>'.format(self.__dict__)
|
mit
| 7,511,138,175,308,434,000 | 29.61811 | 84 | 0.561656 | false |
bowen0701/algorithms_data_structures
|
lc0605_can_place_flowers.py
|
1
|
2449
|
"""Leetcode 605. Can Place Flowers
Easy
URL: https://leetcode.com/problems/can-place-flowers/
Suppose you have a long flowerbed in which some of the plots are planted and some are not.
However, flowers cannot be planted in adjacent plots -
they would compete for water and both would die.
Given a flowerbed (represented as an array containing 0 and 1,
where 0 means empty and 1 means not empty), and a number n,
return if n new flowers can be planted in it without violating the no-adjacent-flowers rule.
Example 1:
Input: flowerbed = [1,0,0,0,1], n = 1
Output: True
Example 2:
Input: flowerbed = [1,0,0,0,1], n = 2
Output: False
Note:
- The input array won't violate no-adjacent-flowers rule.
- The input array size is in the range of [1, 20000].
- n is a non-negative integer which won't exceed the input array size.
"""
class SolutionIter(object):
def canPlaceFlowers(self, flowerbed, n):
"""
:type flowerbed: List[int]
:type n: int
:rtype: bool
Time complexity: O(n).
Space complexity: O(1).
"""
if not n:
return True
# Edge case: flowerbed = [0].
if flowerbed == [0]:
if n == 1:
return True
else:
return False
# Iterate to plant n flowers starting from position i = 0.
i = 0
while n > 0 and i < len(flowerbed):
if not flowerbed[i]:
if i == 0:
if not flowerbed[i + 1]:
flowerbed[i] = 1
n -= 1
elif i == len(flowerbed) - 1:
if not flowerbed[i - 1]:
flowerbed[i] = 1
n -= 1
else:
if not flowerbed[i - 1] and not flowerbed[i + 1]:
flowerbed[i] = 1
n -= 1
i += 1
# Check if there remain flowers to plant.
if n > 0:
return False
else:
return True
def main():
# Output: True
flowerbed = [1,0,0,0,1]
n = 1
print SolutionIter().canPlaceFlowers(flowerbed, n)
# Output: False
flowerbed = [1,0,0,0,1]
n = 2
print SolutionIter().canPlaceFlowers(flowerbed, n)
# Output: True
flowerbed = [0,0,1]
n = 1
print SolutionIter().canPlaceFlowers(flowerbed, n)
if __name__ == '__main__':
main()
|
bsd-2-clause
| -3,217,385,267,934,301,700 | 25.619565 | 92 | 0.537362 | false |
feureau/Small-Scripts
|
Blender/Blender config/2.91/scripts/addons/bricker_v2-2-1/functions/brick/mesh_generators/standard_brick.py
|
1
|
5467
|
# Copyright (C) 2020 Christopher Gearhart
# chris@bblanimation.com
# http://bblanimation.com/
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# System imports
import bmesh
import math
# Blender imports
from mathutils import Vector, Matrix
from bpy.types import Object
# Module imports
from .generator_utils import *
def make_standard_brick(dimensions:dict, brick_size:list, type:str, brick_type:str, circle_verts:int=16, detail:str="LOW", logo:Object=None, stud:bool=True, bme:bmesh=None):
"""
create brick with bmesh
Keyword Arguments:
dimensions -- dictionary containing brick dimensions
brick_size -- size of brick (e.g. standard 2x4 -> [2, 4, 3])
type -- type of brick (e.g. BRICK, PLATE, CUSTOM)
brick_type -- cm.brick_type
circle_verts -- number of vertices per circle of cylinders
detail -- level of brick detail (options: ["FLAT", "LOW", "HIGH"])
logo -- logo object to create on top of studs
stud -- create stud on top of brick
bme -- bmesh object in which to create verts
"""
assert detail in ("FLAT", "LOW", "HIGH")
# create new bmesh object
bme = bmesh.new() if not bme else bme
b_and_p_brick = flat_brick_type(brick_type) and brick_size[2] == 3
height = dimensions["height"] * (3 if b_and_p_brick else 1)
# get half scale
d = Vector((dimensions["half_width"], dimensions["half_width"], dimensions["half_height"]))
d.z = d.z * (brick_size[2] if flat_brick_type(brick_type) else 1)
# get scalar for d in positive xyz directions
scalar = Vector((
brick_size[0] * 2 - 1,
brick_size[1] * 2 - 1,
1,
))
# get thickness of brick from inside to outside
thick_xy = dimensions["thickness"] - (dimensions["tick_depth"] if "High" in detail and min(brick_size) != 1 else 0)
thick = Vector((thick_xy, thick_xy, dimensions["thickness"]))
# create cube
coord1 = -d
coord2 = vec_mult(d, scalar)
v1, v2, v3, v4, v5, v6, v7, v8 = make_cube(coord1, coord2, [0 if stud else 1, 1 if detail == "FLAT" else 0, 1, 1, 1, 1], seams=True, bme=bme)[1]
# add studs
if stud: add_studs(dimensions, height, brick_size, brick_type, circle_verts, bme, edge_xp=[v7, v6], edge_xn=[v8, v5], edge_yp=[v7, v8], edge_yn=[v6, v5], hollow=brick_size[2] > 3 or "HOLES" in type)
# add details
if detail != "FLAT":
draw_tick_marks = detail == "HIGH" and ((brick_size[0] == 2 and brick_size[1] > 1) or (brick_size[1] == 2 and brick_size[0] > 1)) and brick_size[2] != 1
# making verts for hollow portion
coord1 = -d + Vector((thick.x, thick.y, 0))
coord2 = vec_mult(d, scalar) - thick
sides = [1 if detail == "LOW" else 0, 0] + ([0 if draw_tick_marks else 1] * 4)
v9, v10, v11, v12, v13, v14, v15, v16 = make_cube(coord1, coord2, sides, flip_normals=True, seams=True, bme=bme)[1]
# make tick marks inside 2 by x bricks
if draw_tick_marks:
bottom_verts = add_tick_marks(dimensions, brick_size, circle_verts, detail, d, thick, bme, nno=v1, npo=v2, ppo=v3, pno=v4, nni=v9, npi=v10, ppi=v11, pni=v12, nnt=v13, npt=v16, ppt=v15, pnt=v14)
else:
# make faces on bottom edges of brick
bme.faces.new((v1, v9, v12, v4))
bme.faces.new((v1, v2, v10, v9))
bme.faces.new((v11, v3, v4, v12))
bme.faces.new((v11, v10, v2, v3))
# get upper edge verts for connecting to supports/cylinders
edge_xp = [v15] + (bottom_verts["X+"][::-1] if draw_tick_marks else []) + [v14]
edge_xn = [v16] + (bottom_verts["X-"][::-1] if draw_tick_marks else []) + [v13]
edge_yp = [v15] + (bottom_verts["Y+"][::-1] if draw_tick_marks else []) + [v16]
edge_yn = [v14] + (bottom_verts["Y-"][::-1] if draw_tick_marks else []) + [v13]
# add supports
if max(brick_size[:2]) > 1:
add_supports(dimensions, height, brick_size, brick_type, circle_verts, type, detail, d, scalar, thick, bme, add_beams=detail == "HIGH")
# add stud cutouts
if detail == "HIGH":
add_stud_cutouts(dimensions, brick_size, circle_verts, d, edge_xp, edge_xn, edge_yp, edge_yn, bme)
# transform final mesh
gap = Vector([dimensions["gap"]] * 2)
numer = vec_mult(d.xy * 2 + gap, brick_size[:2]) - gap
denom = vec_mult(d.xy * 2, brick_size[:2])
if brick_size[0] != 1 or brick_size[1] != 1:
bmesh.ops.scale(bme, verts=bme.verts, vec=(numer.x / denom.x, numer.y / denom.y, 1.0))
if brick_size[0] > 1:
for v in bme.verts:
v.co.x -= (gap.x * brick_size[0] - gap.x) / 2
if brick_size[1] > 1:
for v in bme.verts:
v.co.y -= (gap.y * brick_size[1] - gap.y) / 2
# return bmesh
return bme
|
gpl-3.0
| -5,588,545,381,719,541,000 | 46.12931 | 205 | 0.60728 | false |
pdamodaran/yellowbrick
|
tests/rand.py
|
1
|
3176
|
# tests.random
# A visualizer that draws a random scatter plot for testing.
#
# Author: Benjamin Bengfort <bbengfort@districtdatalabs.com>
# Created: Wed Mar 21 17:51:15 2018 -0400
#
# ID: random.py [] benjamin@bengfort.com $
"""
A visualizer that draws a random scatter plot for testing.
"""
##########################################################################
## Imports
##########################################################################
import numpy as np
from yellowbrick.base import Visualizer
from yellowbrick.style import resolve_colors
from sklearn.datasets import make_blobs
##########################################################################
## Random Visualizer
##########################################################################
class RandomVisualizer(Visualizer):
"""
Creates random scatter plots as a testing utility.
Data generation uses scikit-learn make_blobs to create scatter plots that
have reasonable visual features and multiple colors.
Parameters
----------
ax : matplotlib Axes, default: None
The axis to plot the figure on. If None is passed in the current axes
will be used (or generated if required).
n_samples : int, default: 100
The number of points to generate for the scatter plot
n_blobs : int or array of shape [n_centers, 2]
Define the number of blobs to create or specify their centers.
random_state : int, RandomState or None:
Used to specify the seed of the random state to ensure tests work.
"""
def __init__(self, ax=None, n_samples=100, n_blobs=3,
random_state=None, **kwargs):
super(RandomVisualizer, self).__init__(ax=ax, **kwargs)
if isinstance(random_state, (int, float)) or random_state is None:
random_state = np.random.RandomState(random_state)
self.set_params(
n_samples=n_samples, n_blobs=n_blobs, random_state=random_state,
)
def generate(self):
"""
Returns random data according to the visualizer specification.
Returns
-------
X : array of shape [n_samples, 2]
2 dimensional array of points to plot
y : vector with length n_samples
Center/blob each point belongs to (used for color)
"""
return make_blobs(
self.n_samples, 2, self.n_blobs, random_state=self.random_state
)
def fit(self, *args, **kwargs):
X, c = self.generate()
x = X[:,0]
y = X[:,1]
self.draw(x, y, c)
return self
def draw(self, x, y, c):
colors = resolve_colors(self.n_blobs)
for i in np.arange(self.n_blobs):
mask = c==i
label = "c{}".format(i)
self.ax.scatter(x[mask], y[mask], label=label, c=colors[i])
return self.ax
def finalize(self):
self.ax.legend(frameon=True)
self.ax.set_ylabel("$y$")
self.ax.set_xlabel("$x$")
self.ax.set_title("Random Scatter Plot")
return self.ax
if __name__ == '__main__':
r = RandomVisualizer()
r.fit()
r.poof(outpath='test.png')
|
apache-2.0
| 5,707,857,525,669,161,000 | 28.137615 | 77 | 0.55573 | false |
MiczFlor/RPi-Jukebox-RFID
|
scripts/python-phoniebox/PhonieboxDaemon.py
|
1
|
6961
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# import threading
import sys, os.path
import signal
from Phoniebox import Phoniebox
from time import sleep, time
# get absolute path of this script
dir_path = os.path.dirname(os.path.realpath(__file__))
defaultconfigFilePath = os.path.join(dir_path, 'phoniebox.conf')
# watchdog blocks the script, so it cannot be used in the same file as the PhonieboxDaemon
# from watchdog.observers import Observer
# from watchdog.events import FileSystemEventHandler
# from os.path import dirname
# class FileModifiedHandler(FileSystemEventHandler):
# """ watch the given file for changes and execute callback function on modification """
# def __init__(self, file_path, callback):
# self.file_path = file_path
# self.callback = callback
# # set observer to watch for changes in the directory
# self.observer = Observer()
# self.observer.schedule(self, dirname(file_path), recursive=False)
# self.observer.start()
# try:
# while True:
# sleep(1)
# except KeyboardInterrupt:
# self.observer.stop()
# self.observer.join()
#
# def on_modified(self, event):
# # only act on the change that we're looking for
# if not event.is_directory and event.src_path.endswith(self.file_path):
# daemon.log("cardAssignmentsFile modified!",3)
# self.callback() # call callback
class PhonieboxDaemon(Phoniebox):
""" This subclass of Phoniebox is to be called directly, running as RFID reader daemon """
def __init__(self, configFilePath=defaultconfigFilePath):
Phoniebox.__init__(self, configFilePath)
self.lastplayedID = 0
def run(self):
# do things if killed
signal.signal(signal.SIGINT, self.signal_handler)
signal.signal(signal.SIGTERM, self.signal_handler)
# establish mpd connection
self.mpd_init_connection()
self.mpd_init_settings()
state = self.client.status()["state"]
daemon.play_alsa(daemon.get_setting("phoniebox", 'startup_sound'))
if state == "play":
self.client.play()
# launch watcher for config files, blocks the script
# TODO: it would be better to watch the changes with a second process that
# tells the PhonieboxDaemon to reload the config whenever needed.
# card_assignments_file = daemon.get_setting("phoniebox","card_assignments_file")
# cardAssignmentsWatchdog = FileModifiedHandler(card_assignments_file, self.update_cardAssignments)
# ConfigWatchdog = FileModifiedHandler(configFilePath, self.read_config)
# # start_reader runs an endless loop, nothing will be executed afterwards
daemon.start_reader()
def start_reader(self):
from Reader import Reader
reader = Reader()
card_detection_sound = self.get_setting("phoniebox", "card_detection_sound")
debounce_time = self.get_setting("phoniebox", "debounce_time")
if debounce_time == -1:
debounce_time = 0.5
second_swipe_delay = self.get_setting("phoniebox", "second_swipe_delay")
if second_swipe_delay == -1:
second_swipe_delay = 0
store_card_assignments = self.get_setting("phoniebox", "store_card_assignments")
if store_card_assignments == -1:
store_card_assignments = 30
last_swipe = 0
last_write_card_assignments = 0
while True:
# reading the card id
cardid = reader.reader.readCard()
# cardid = None
# sleep(debounce_time)
try:
# start the player script and pass on the cardid
if cardid is not None:
print("Card ID: {}".format(int(cardid)))
filename = self.get_setting("phoniebox", "Latest_RFID_file")
if filename != -1:
self.print_to_file(filename, "\'{}\' was used at {}".format(cardid, time()))
if card_detection_sound != -1:
self.play_alsa(card_detection_sound)
if cardid in self.cardAssignments.sections():
# second swipe detection
if int(cardid) == int(self.lastplayedID) and time()-last_swipe > second_swipe_delay:
self.log("Second swipe for {}".format(cardid), 3)
self.do_second_swipe()
# if first swipe, just play
else:
last_swipe = time()
self.do_start_playlist(cardid)
# do not react for debounce_time
sleep(debounce_time)
else:
self.log("Card with ID {} not mapped yet.".format(cardid), 1)
except OSError as e:
print("Execution failed:", e)
# check if it is time for the next update of the cardAssignments and do it
# Note: this is purely time-based and not clever at all. Find a
# TODO: find a better way to check for changes in the files on disk to trigger the update
if time()-last_write_card_assignments > store_card_assignments and store_card_assignments != False:
# store card assignments
if self.get_setting("phoniebox", "translate_legacy_cardassignments", "bool") == True:
legacy_cardAssignments = self.translate_legacy_cardAssignments(last_write_card_assignments)
self.update_cardAssignments(legacy_cardAssignments)
else:
self.update_cardAssignments(self.read_cardAssignments)
self.write_new_cardAssignments()
last_write_card_assignments = time()
def signal_handler(self, signal, frame):
""" catches signal and triggers the graceful exit """
print("Caught signal {}, exiting...".format(signal))
self.exit_gracefully()
def exit_gracefully(self):
""" stop mpd and write cardAssignments to disk if daemon is stopped """
self.mpd_connect_timeout()
self.client.stop()
self.client.disconnect()
# write config to update playstate
self.write_new_cardAssignments()
# exit script
sys.exit(0)
if __name__ == "__main__":
# if called directly, launch Phoniebox.py as rfid-reader daemon
# treat the first argument as defaultconfigFilePath if given
if len(sys.argv) <= 1:
configFilePath = defaultconfigFilePath
else:
configFilePath = sys.argv[1]
daemon = PhonieboxDaemon(configFilePath)
# setup the signal listeners
signal.signal(signal.SIGINT, daemon.exit_gracefully)
signal.signal(signal.SIGTERM, daemon.exit_gracefully)
# start the daemon (blocking)
daemon.run()
|
mit
| 4,467,813,023,690,706,000 | 39.707602 | 111 | 0.60724 | false |
ivan7farre/PyFreeFOAM
|
Foam/finiteVolume/cfdTools/compressible/compressibleCourantNo_impl.py
|
1
|
2812
|
## pythonFlu - Python wrapping for OpenFOAM C++ API
## Copyright (C) 2010- Alexey Petrov
## Copyright (C) 2009-2010 Pebble Bed Modular Reactor (Pty) Limited (PBMR)
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
## See http://sourceforge.net/projects/pythonflu
##
## Author : Alexey PETROV
##
#---------------------------------------------------------------------------
def compressibleCourantNo_010401_dev( mesh, phi, rho, runTime ):
from Foam.OpenFOAM import Time
from Foam.finiteVolume import fvMesh
from Foam.finiteVolume import surfaceScalarField
CoNum = 0.0;
meanCoNum = 0.0;
if mesh.nInternalFaces() :
from Foam import fvc
SfUfbyDelta = mesh.deltaCoeffs() * phi.mag() / fvc.interpolate( rho )
CoNum = ( SfUfbyDelta / mesh.magSf() ).ext_max().value() * runTime.deltaT().value()
meanCoNum = ( SfUfbyDelta.sum() / mesh.magSf().sum() ).value() * runTime.deltaT().value();
pass
from Foam.OpenFOAM import ext_Info, nl
ext_Info() << "Courant Number mean: " << meanCoNum << " max: " << CoNum << nl
return CoNum, meanCoNum
#---------------------------------------------------------------------------
def compressibleCourantNo_010600_dev( mesh, phi, rho, runTime ):
from Foam.OpenFOAM import Time
from Foam.finiteVolume import fvMesh
from Foam.finiteVolume import surfaceScalarField
CoNum = 0.0
meanCoNum = 0.0
velMag = 0.0
if mesh.nInternalFaces() :
from Foam import fvc
phiOverRho = phi.mag() / fvc.interpolate( rho )
SfUfbyDelta = mesh.deltaCoeffs() * phiOverRho
CoNum = ( SfUfbyDelta / mesh.magSf() ).ext_max().value() * runTime.deltaT().value()
meanCoNum = ( SfUfbyDelta.sum() / mesh.magSf().sum() ).value() * runTime.deltaT().value();
velMag = ( phiOverRho / mesh.magSf() ).ext_max().value()
pass
from Foam.OpenFOAM import ext_Info, nl
ext_Info() << "Courant Number mean: " << meanCoNum << " max: " << CoNum << " velocity magnitude: " << velMag << nl
return CoNum, meanCoNum, velMag
#---------------------------------------------------------------------------
|
gpl-3.0
| 3,301,216,150,076,530,700 | 34.15 | 120 | 0.599573 | false |
gamarino/numa_shared_taxes
|
invoices.py
|
1
|
9193
|
#-*- coding: utf-8 -*-
##############################################################################
#
# NUMA Extreme Systems (www.numaes.com)
# Copyright (C) 2013
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields
from openerp.osv.osv import Model, TransientModel, except_osv
from openerp.tools.translate import _
import openerp.exceptions
import datetime
class account_invoice_tax (Model):
_inherit = "account.invoice.tax"
def compute(self, cr, uid, invoice_id, context=None):
tax_obj = self.pool.get ('account.tax')
cur_obj = self.pool.get('res.currency')
invoice = self.pool.get('account.invoice').browse (cr, uid, invoice_id, context=context)
company = invoice.journal_id.company_id
total_wo_taxes = invoice.amount_untaxed
total_w_taxes = invoice.amount_tax
partner = invoice.partner_id
company_currency = company.currency_id.id
today = datetime.date.today().strftime('%Y-%m-%d')
tax_context = {
'pool': self.pool,
'uid': uid,
'invoice': invoice,
'date': invoice.date_invoice,
'company': company,
'datetime': datetime,
}
# Invoice line computation
tax_grouped = {}
for line in invoice.invoice_line:
for tax in tax_obj.compute_all(cr, uid,
line.invoice_line_tax_id,
(line.price_unit* (1-(line.discount or 0.0)/100.0)),
line.quantity,
line.product_id,
invoice.partner_id,
context=tax_context)['taxes']:
val={}
val['invoice_id'] = invoice.id
val['name'] = tax['name']
val['amount'] = tax['amount']
val['manual'] = False
val['sequence'] = tax['sequence']
val['base'] = cur_obj.round(cr, uid, invoice.currency_id, tax['price_unit'] * line['quantity'])
if invoice.type in ('out_invoice','in_invoice'):
val['base_code_id'] = tax['base_code_id']
val['tax_code_id'] = tax['tax_code_id']
val['base_amount'] = cur_obj.compute(cr, uid, invoice.currency_id.id, company_currency, val['base'] * tax['base_sign'], context={'date': invoice.date_invoice or today}, round=False)
val['tax_amount'] = cur_obj.compute(cr, uid, invoice.currency_id.id, company_currency, val['amount'] * tax['tax_sign'], context={'date': invoice.date_invoice or today}, round=False)
val['account_id'] = tax['account_collected_id'] or line.account_id.id
val['account_analytic_id'] = tax['account_analytic_collected_id']
else:
val['base_code_id'] = tax['ref_base_code_id']
val['tax_code_id'] = tax['ref_tax_code_id']
val['base_amount'] = cur_obj.compute(cr, uid, invoice.currency_id.id, company_currency, val['base'] * tax['ref_base_sign'], context={'date': invoice.date_invoice or today}, round=False)
val['tax_amount'] = cur_obj.compute(cr, uid, invoice.currency_id.id, company_currency, val['amount'] * tax['ref_tax_sign'], context={'date': invoice.date_invoice or today}, round=False)
val['account_id'] = tax['account_paid_id'] or line.account_id.id
val['account_analytic_id'] = tax['account_analytic_paid_id']
key = (val['tax_code_id'], val['base_code_id'], val['account_id'], val['account_analytic_id'])
if not key in tax_grouped:
tax_grouped[key] = val
else:
tax_grouped[key]['amount'] += val['amount']
tax_grouped[key]['base'] += val['base']
tax_grouped[key]['base_amount'] += val['base_amount']
tax_grouped[key]['tax_amount'] += val['tax_amount']
for t in tax_grouped.values():
t['base'] = cur_obj.round(cr, uid, company.currency_id, t['base'])
t['amount'] = cur_obj.round(cr, uid, company.currency_id, t['amount'])
t['base_amount'] = cur_obj.round(cr, uid, company.currency_id, t['base_amount'])
t['tax_amount'] = cur_obj.round(cr, uid, company.currency_id, t['tax_amount'])
if invoice.type in ['out_invoice', 'in_invoice']:
tax_list = company.sales_applicable_taxes
if company.fiscal_country_state:
tax_list += company.fiscal_country_state.sales_applicable_taxes
if company.fiscal_country:
tax_list += company.fiscal_country.sales_applicable_taxes
else:
return tax_grouped
# Invoice total taxes
for tax in tax_list:
if invoice.type in ('out_invoice','in_invoice'):
if not tax.account_collected_id:
raise openerp.exceptions.Warning(_('Tax %s is not properly configured. No invoice account configured! Please check it') % tax.name)
else:
if not tax.account_paid_id:
raise openerp.exceptions.Warning(_('Tax %s is not properly configured. No payment account configured! Please check it') % tax.name)
for tax in tax_obj.compute_all(cr, uid, [tax],
total_wo_taxes,
1.00,
None,
partner,
context = tax_context)['taxes']:
val={}
val['invoice_id'] = invoice.id
val['name'] = tax['name']
val['amount'] = tax['amount']
val['manual'] = False
val['sequence'] = tax['sequence']
val['base'] = total_wo_taxes
if invoice.type in ('out_invoice','in_invoice'):
val['base_code_id'] = tax['base_code_id']
val['tax_code_id'] = tax['tax_code_id']
val['base_amount'] = cur_obj.compute(cr, uid, invoice.currency_id.id, company_currency, val['base'] * tax['base_sign'], context={'date': invoice.date_invoice or today}, round=False)
val['tax_amount'] = cur_obj.compute(cr, uid, invoice.currency_id.id, company_currency, val['amount'] * tax['tax_sign'], context={'date': invoice.date_invoice or today}, round=False)
val['account_id'] = tax['account_collected_id']
val['account_analytic_id'] = tax['account_analytic_collected_id']
else:
val['base_code_id'] = tax['ref_base_code_id']
val['tax_code_id'] = tax['ref_tax_code_id']
val['base_amount'] = cur_obj.compute(cr, uid, invoice.currency_id.id, company_currency, val['base'] * tax['ref_base_sign'], context={'date': invoice.date_invoice or today}, round=False)
val['tax_amount'] = cur_obj.compute(cr, uid, invoice.currency_id.id, company_currency, val['amount'] * tax['ref_tax_sign'], context={'date': invoice.date_invoice or today}, round=False)
val['account_analytic_id'] = tax['account_analytic_paid_id']
key = (val['tax_code_id'], val['base_code_id'], val['account_id'], val['account_analytic_id'])
if not key in tax_grouped:
tax_grouped[key] = val
else:
tax_grouped[key]['amount'] += val['amount']
tax_grouped[key]['base'] += val['base']
tax_grouped[key]['base_amount'] += val['base_amount']
tax_grouped[key]['tax_amount'] += val['tax_amount']
for t in tax_grouped.values():
t['base'] = cur_obj.round(cr, uid, company.currency_id, t['base'])
t['amount'] = cur_obj.round(cr, uid, company.currency_id, t['amount'])
t['base_amount'] = cur_obj.round(cr, uid, company.currency_id, t['base_amount'])
t['tax_amount'] = cur_obj.round(cr, uid, company.currency_id, t['tax_amount'])
return tax_grouped
|
agpl-3.0
| -4,234,420,390,260,595,700 | 54.047904 | 205 | 0.526923 | false |
ronie/script.playalbum
|
default.py
|
1
|
1774
|
import sys
import xbmc, xbmcaddon
import json
ADDON = xbmcaddon.Addon()
ADDONID = ADDON.getAddonInfo('id')
ADDONVERSION = ADDON.getAddonInfo('version')
def log(txt):
if isinstance (txt,str):
txt = txt.decode("utf-8")
message = u'%s: %s' % (ADDONID, txt)
xbmc.log(msg=message.encode("utf-8"), level=xbmc.LOGDEBUG)
class Main:
def __init__(self):
self._parse_argv()
if self.songid:
self._get_albumid()
self._play_album()
def _parse_argv(self):
try:
params = dict(arg.split('=') for arg in sys.argv[1].split('&'))
except:
params = {}
self.songid = int(params.get('songid', False))
self.albumid = int(params.get('albumid', False))
self.tracknr = int(params.get('tracknr', False))
def _get_albumid(self):
json_query = xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"AudioLibrary.GetSongDetails", "params":{"properties":["albumid"], "songid":%s}, "id":1}' % self.songid)
json_query = unicode(json_query, 'utf-8', errors='ignore')
json_response = json.loads(json_query)
if json_response and json_response['result'] and json_response['result'].get('songdetails', None):
self.albumid = json_response['result']['songdetails']['albumid']
def _play_album(self):
xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Player.Open", "params":{"item":{"albumid":%d}}, "id":1}' % self.albumid)
if self.tracknr and self.tracknr > 0:
xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Player.GoTo", "params":{"playerid":0, "to":%d}, "id":1}' % (self.tracknr - 1))
if (__name__ == "__main__"):
log('script version %s started' % ADDONVERSION)
Main()
log('finished')
|
gpl-2.0
| -1,628,069,564,425,146,400 | 37.565217 | 173 | 0.590192 | false |
coarse/NGSE
|
test.py
|
1
|
3875
|
from webtest import TestApp
import unittest
from ngse import main
# from pyramid import testing
# from cornice import Service
import sys
login_testcases=[
['ngse@coe.upd.edu.ph', 'ngse', True],
['mfmayol@up.edu.ph', 'kz2xjiAm', True],
['bmicons360@gmail.com', 'QFmy5jWg', True],
['bmicons360@gmail.com', 'iaX6oHGy', True],
['bmicons360@gmail.com', 'Yz8pcQPb', True],
['bmicons360@gmail.com', 'sm6GlLLq', True],
['sdnkskksdls', 'emknlnsklnkls', False],
['msdksmkl', 'ngse@123', False],
['ngse@coe.upd.edu.ph', 'msdkls', False],
]
view_user_testcases=[
['2', True], #applicant
['1', True], #admin
['1000', False], #non existent user
['', False] #null input
]
# update_application_status_testcases=[
# ['2', 'notNull', True],
# # ['2', '', False],
# ['1', 'notNull', False], #ADMIN
# ['1', '', False],
# ['3', 'notNull', False], #nnonexistent id
# ['3', '', False],
# # ['', 'notNull', False],
# # ['', '', False]
# ]
view_answers_testcases=[
['2', '5', True], #applicant
['2', '100', False], #applicant invalid category
['1000', '5', False], #non existent user
['', '', False] #null input
]
# create_user_testcases=[
# ['ngse@coe.upd.edu.ph', 'name', False],
# # ['valid@email.com', 'name', True],
# # ['', '', False],
# # ['', 'name', False]
# ]
show_form_testcases=[
['1', True],
['5', False]
]
class TestEndpoints(unittest.TestCase):
app = TestApp(main({}))
tokens = []
def test_get_users(self):
resp = self.app.get('/v1/users')
print resp.json
assert resp.status == '200 OK'
def test_login(self):
print '\n\n'
for item in login_testcases:
e = item[0]
p = item[1]
o = item[2]
request = self.app.post('/v1/users/login', dict(email=e, password=p))
if o:
self.tokens.append(request.json['token'])
print '{}, {}'.format(request.json['success'], o)
self.assertEqual(request.json['success'], o)
def test_tokens(self):
print '\n\n'
for token in self.tokens:
request = self.app.post('/v1/users/verify', dict(token=token))
print '{}'.format(request.json['message'])
self.assertEqual(request.json['success'], True)
def test_view_user(self):
print '\n\n'
for item in view_user_testcases:
id = item[0]
o = item[1]
resp = self.app.get('/v1/users/show', dict(user_id=id))
if not o:
print resp.json
self.assertEqual(resp.json['success'], o)
else:
assert resp.status == '200 OK'
# def test_update_status(self):
# print '\n\n'
# for item in update_status_testcases:
# id = item[0]
# stat = item[1]
# o = item[2]
# # print id, stat, o
# request = app.get('/v1/users/update_status', dict(user_id=id, user_status = stat))
# self.assertEqual(request.json['success'], o)
def test_view_answers(self):
print '\n\n'
for item in view_answers_testcases:
id = item[0]
cat_id = item[1]
o = item[2]
resp = self.app.get('/v1/users/answers/show', dict(user_id=id, category_id=cat_id))
if o:
assert resp.status == '200 OK'
else:
self.assertEqual(resp.json['success'], o)
# def test_create_user(self):
# print '\n\n'
# for item in create_user_testcases:
# e, n, o = item[0], item[1], item[2]
# request = app.post('/v1/users/create', dict(email=e, name=n))
# self.assertEqual(request.json['success'], o)
def test_get_forms(self):
print '\n\n'
request = self.app.get('/v1/forms')
self.assertEqual(request.status, '200 OK')
def test_list_form_types(self):
print '\n\n'
request = self.app.get('/v1/forms/types')
self.assertEqual(request.status, '200 OK')
def test_show_form(self):
print '\n\n'
for item in show_form_testcases:
id = item[0]
o = item[1]
resp = self.app.get('/v1/forms/show', dict(form_id=id))
print resp.json
if o:
assert resp.status == '200 OK'
else:
self.assertEqual(resp.json['success'], o)
if __name__ == '__main__':
unittest.main()
|
gpl-3.0
| 8,050,052,237,690,515,000 | 24 | 87 | 0.610581 | false |
IKARUSSoftwareSecurity/FakeTokenConfigDecrypt
|
decrypt_bankersecure.py
|
1
|
2652
|
__author__ = 'bachmann.s & elias.t'
import base64
from Crypto.Cipher import Blowfish
import zipfile
import sys
from os.path import isdir
from os.path import exists
from os.path import join
from os.path import split
from os import listdir
from argparse import ArgumentParser
blfs = 'res/raw/blfs.key'
config = 'res/raw/config.cfg'
iv = "12345678"
def decrypt_config(file_path):
'''
This is an APK reader that reads out config and blfs.key.
Prints the APK name along with the decrypted config data.
:param file_path: APK file to read and decrypt its config.cfg
:return: nothing
'''
try:
arch = zipfile.ZipFile(file_path, 'r')
key = "".join(list(map(lambda x: x[2:], map(hex, map(ord, (arch.open(blfs,'r').read()).decode("utf-8"))))))[:50]
ciphertext = base64.b64decode(arch.read('res/raw/config.cfg'))
cipher = Blowfish.new(key, Blowfish.MODE_CBC, iv)
print("\n" + ''.join(split(file_path)[-1:]) + ":")
print(cipher.decrypt(ciphertext).decode("UTF-8"))
except zipfile.BadZipfile:
print(file_path + r" is not a ZIP file!")
return
except KeyError:
print(file_path + r" is either not the Trojan-Banker APK/n"
r"or the filename(s) are not the same anymore.")
print("Unexpected error: " + sys.exc_info())
raise
def process_folder(folder_path):
'''
Runs decrypt_config() on all APKs
in the directory where folder_path
points to
:param folder_path: folder to analyze
:return: nothing
'''
folder_entries = listdir(folder_path)
# check if the folder is empty
if len(folder_entries) != 0:
for entry in folder_entries:
absolute_path = join(folder_path, entry)
if isdir(absolute_path):
process_folder(absolute_path)
elif exists(path):
decrypt_config(absolute_path)
if __name__ == "__main__":
'''
Tested on Windows 8.1 and Ubuntu 14.04
'''
parser = ArgumentParser(description="Decrypts the config.cfg file of Trojan-Banker.")
parser.add_argument('-i', '--input', nargs='+',
help='Input APK file for analysis.', required=True)
# if the path is a folder or a file
args = parser.parse_args()
if args.input:
print("Analyzing APK(s)...")
for path in args.input:
# if it is a folder then process all files that are APK files
if isdir(path):
process_folder(path)
# otherwise process the single file
elif exists(path):
decrypt_config(path)
pass
|
bsd-2-clause
| -7,360,807,983,732,962,000 | 31.753086 | 120 | 0.609729 | false |
sarielsaz/sarielsaz
|
test/functional/importprunedfunds.py
|
1
|
4602
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Sarielsaz Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the importprunedfunds and removeprunedfunds RPCs."""
from test_framework.test_framework import SarielsazTestFramework
from test_framework.util import *
class ImportPrunedFundsTest(SarielsazTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(101)
self.sync_all()
# address
address1 = self.nodes[0].getnewaddress()
# pubkey
address2 = self.nodes[0].getnewaddress()
# privkey
address3 = self.nodes[0].getnewaddress()
address3_privkey = self.nodes[0].dumpprivkey(address3) # Using privkey
#Check only one address
address_info = self.nodes[0].validateaddress(address1)
assert_equal(address_info['ismine'], True)
self.sync_all()
#Node 1 sync test
assert_equal(self.nodes[1].getblockcount(),101)
#Address Test - before import
address_info = self.nodes[1].validateaddress(address1)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address2)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address3)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
#Send funds to self
txnid1 = self.nodes[0].sendtoaddress(address1, 0.1)
self.nodes[0].generate(1)
rawtxn1 = self.nodes[0].gettransaction(txnid1)['hex']
proof1 = self.nodes[0].gettxoutproof([txnid1])
txnid2 = self.nodes[0].sendtoaddress(address2, 0.05)
self.nodes[0].generate(1)
rawtxn2 = self.nodes[0].gettransaction(txnid2)['hex']
proof2 = self.nodes[0].gettxoutproof([txnid2])
txnid3 = self.nodes[0].sendtoaddress(address3, 0.025)
self.nodes[0].generate(1)
rawtxn3 = self.nodes[0].gettransaction(txnid3)['hex']
proof3 = self.nodes[0].gettxoutproof([txnid3])
self.sync_all()
#Import with no affiliated address
assert_raises_rpc_error(-5, "No addresses", self.nodes[1].importprunedfunds, rawtxn1, proof1)
balance1 = self.nodes[1].getbalance("", 0, True)
assert_equal(balance1, Decimal(0))
#Import with affiliated address with no rescan
self.nodes[1].importaddress(address2, "add2", False)
self.nodes[1].importprunedfunds(rawtxn2, proof2)
balance2 = self.nodes[1].getbalance("add2", 0, True)
assert_equal(balance2, Decimal('0.05'))
#Import with private key with no rescan
self.nodes[1].importprivkey(privkey=address3_privkey, label="add3", rescan=False)
self.nodes[1].importprunedfunds(rawtxn3, proof3)
balance3 = self.nodes[1].getbalance("add3", 0, False)
assert_equal(balance3, Decimal('0.025'))
balance3 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance3, Decimal('0.075'))
#Addresses Test - after import
address_info = self.nodes[1].validateaddress(address1)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address2)
assert_equal(address_info['iswatchonly'], True)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address3)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], True)
#Remove transactions
assert_raises_rpc_error(-8, "Transaction does not exist in wallet.", self.nodes[1].removeprunedfunds, txnid1)
balance1 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance1, Decimal('0.075'))
self.nodes[1].removeprunedfunds(txnid2)
balance2 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance2, Decimal('0.025'))
self.nodes[1].removeprunedfunds(txnid3)
balance3 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance3, Decimal('0.0'))
if __name__ == '__main__':
ImportPrunedFundsTest().main()
|
mit
| -4,054,450,471,639,731,700 | 39.368421 | 117 | 0.644937 | false |
superdachs/servercontrol
|
servercontrol/network/migrations/0001_initial.py
|
1
|
1149
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-19 14:29
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Interface',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('interface_name', models.CharField(max_length=255)),
('interface_description', models.TextField()),
('dhcp', models.BooleanField()),
('mac_address', models.CharField(blank=True, max_length=255, null=True)),
('ipv4_address', models.GenericIPAddressField(blank=True, null=True, protocol='IPv4')),
('netmask', models.GenericIPAddressField(blank=True, null=True, protocol='IPv4')),
('gateway_address', models.GenericIPAddressField(blank=True, null=True, protocol='IPv4')),
('dns_servers', models.TextField(blank=True, null=True)),
],
),
]
|
gpl-3.0
| 6,986,743,445,912,005,000 | 37.3 | 114 | 0.592689 | false |
centrofermi/e3pipe
|
db/__select2__.py
|
1
|
3094
|
#!/usr/bin/env python
# *********************************************************************
# * Copyright (C) 2014 Luca Baldini (luca.baldini@pi.infn.it) *
# * *
# * For the license terms see the file LICENSE, distributed *
# * along with this software. *
# *********************************************************************
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import datetime
from e3pipe.db.E3RunDbInterface import E3RunDbInterface
from e3pipe.tasks.__exitcodes__ import E3PIPE_EXIT_CODE_SUCCESS
def _select(startDate, endDate, selection = None, db = None):
""" Base function.
"""
_closeOnExit = False
if db is None:
db = E3RunDbInterface()
_closeOnExit = True
query = 'SELECT station_name, run_date, run_id from runs2 WHERE '
query += 'run_date BETWEEN "%s" AND "%s"' % (startDate, endDate)
if selection:
query += ' AND %s' % selection
query += ';'
db.execute(query, commit = False)
runList = [item for item in db.fetchall()]
if _closeOnExit:
db.close()
return runList
def selectRunsToBeProcessed(startDate, endDate, minSize = 0, overwrite = False,
db = None):
""" Select the runs to be processed.
"""
selection = 'bin_file_size > %d' % minSize
if not overwrite:
selection += ' AND processing_status_code IS NULL'
return _select(startDate, endDate, selection, db)
def selectProcessedRuns(startDate, endDate, selection = None, db = None):
""" Select the runs that have been correctly processed, subjected to some
condition.
"""
_scsel = 'processing_status_code = %d' % E3PIPE_EXIT_CODE_SUCCESS
if selection is None:
selection = _scsel
else:
selection += ' AND %s' % _scsel
return _select(startDate, endDate, selection, db)
def test():
""" Test program.
"""
endDate = datetime.date.today()
startDate = endDate - datetime.timedelta(1)
for run in selectRunsToBeProcessed(startDate, endDate):
print run
print
for run in selectProcessedRuns(startDate, endDate):
print run
print
for run in selectProcessedRuns(startDate, endDate,
'station_name = "BOLO-03"'):
print run
if __name__ == '__main__':
test()
|
gpl-3.0
| -7,725,358,799,463,030,000 | 33 | 79 | 0.601487 | false |
Charlotte-Morgan/inasafe
|
safe/gis/vector/clean_geometry.py
|
1
|
2725
|
# coding=utf-8
"""Try to make a layer valid."""
from qgis.core import QgsFeatureRequest
from safe.common.custom_logging import LOGGER
from safe.definitions.processing_steps import clean_geometry_steps
from safe.gis.sanity_check import check_layer
from safe.utilities.profiling import profile
__copyright__ = "Copyright 2016, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "info@inasafe.org"
__revision__ = '$Format:%H$'
@profile
def clean_layer(layer):
"""Clean a vector layer.
:param layer: The vector layer.
:type layer: QgsVectorLayer
:return: The buffered vector layer.
:rtype: QgsVectorLayer
"""
output_layer_name = clean_geometry_steps['output_layer_name']
output_layer_name = output_layer_name % layer.keywords['layer_purpose']
# start editing
layer.startEditing()
count = 0
# iterate through all features
request = QgsFeatureRequest().setSubsetOfAttributes([])
for feature in layer.getFeatures(request):
geom = feature.geometry()
was_valid, geometry_cleaned = geometry_checker(geom)
if was_valid:
# Do nothing if it was valid
pass
elif not was_valid and geometry_cleaned:
# Update the geometry if it was not valid, and clean now
layer.changeGeometry(feature.id(), geometry_cleaned, True)
else:
# Delete if it was not valid and not able to be cleaned
count += 1
layer.deleteFeature(feature.id())
if count:
LOGGER.critical(
'%s features have been removed from %s because of invalid '
'geometries.' % (count, layer.name()))
else:
LOGGER.info(
'No feature has been removed from the layer: %s' % layer.name())
# save changes
layer.commitChanges()
layer.keywords['title'] = output_layer_name
check_layer(layer)
return layer
def geometry_checker(geometry):
"""Perform a cleaning if the geometry is not valid.
:param geometry: The geometry to check and clean.
:type geometry: QgsGeometry
:return: Tuple of bool and cleaned geometry. True if the geometry is
already valid, False if the geometry was not valid.
A cleaned geometry, or None if the geometry could not be repaired
:rtype: (bool, QgsGeometry)
"""
if geometry is None:
# The geometry can be None.
return False, None
if geometry.isGeosValid():
return True, geometry
else:
new_geom = geometry.makeValid()
if new_geom.isGeosValid():
return False, new_geom
else:
# Make valid was not enough, the feature will be deleted.
return False, None
|
gpl-3.0
| 244,502,894,381,175,800 | 29.277778 | 76 | 0.645138 | false |
start-jsk/jsk_apc
|
jsk_apc2016_common/python/jsk_apc2016_common/rbo_segmentation/evaluate.py
|
1
|
7455
|
from apc_data import APCDataSet, APCSample
from probabilistic_segmentation import ProbabilisticSegmentationRF, ProbabilisticSegmentationBP
import pickle
import os
import matplotlib.pyplot as plt
import numpy as np
import copy
import rospkg
def _fast_hist(a, b, n):
k = (a >= 0) & (a < n)
hist = np.bincount(n * a[k].astype(int) +
b[k], minlength=n**2).reshape(n, n)
return hist
def label_accuracy_score(label_true, label_pred, n_class):
"""Returns accuracy score evaluation result.
- overall accuracy
- mean accuracy
- mean IU
- fwavacc
"""
hist = _fast_hist(label_true.flatten(), label_pred.flatten(), n_class)
acc = np.diag(hist).sum() / hist.sum().astype(np.float64)
acc_cls = np.diag(hist) / hist.sum(axis=1).astype(np.float64)
acc_cls = np.nanmean(acc_cls)
iu = np.diag(hist) / (hist.sum(axis=1) + hist.sum(axis=0) - np.diag(hist)).astype(np.float64)
mean_iu = np.nanmean(iu)
freq = hist.sum(axis=1) / hist.sum().astype(np.float64)
fwavacc = (freq[freq > 0] * iu[freq > 0]).sum()
return acc, acc_cls, mean_iu, fwavacc
# previously declared in main.py
def combine_datasets(datasets):
samples = []
for d in datasets:
samples += d.samples
return APCDataSet(samples=samples)
def load_datasets(dataset_names, data_path, cache_path):
datasets = dict()
for dataset_name in dataset_names:
dataset_path = os.path.join(
data_path, 'rbo_apc/{}'.format(dataset_name))
datasets[dataset_name] = APCDataSet(
name=dataset_name, dataset_path=dataset_path,
cache_path=cache_path, load_from_cache=True)
return datasets
def evaluate(bp, test_data):
acc_list = []
acc_cls_list = []
mean_iu_list = []
fwavacc_list = []
for sample in test_data.samples:
if len(sample.object_masks) == 0:
continue
pred_target = sample.object_masks.keys()[0]
if pred_target == 'shelf':
if len(sample.object_masks.keys()) == 1:
continue
pred_target = sample.object_masks.keys()[1]
bp.predict(sample, pred_target)
print 'done'
images = []
images.append(bp.posterior_images_smooth['shelf'])
objects = []
objects.append('shelf')
for _object in bp.posterior_images_smooth.keys():
if _object != 'shelf':
images.append(bp.posterior_images_smooth[_object])
objects.append(_object)
pred = np.argmax(np.array(images), axis=0)
# remove dataset that does not have complete set
objects_copy = copy.copy(objects)
object_masks_keys = sample.object_masks.keys()
if 'shelf' in objects_copy: objects_copy.remove('shelf')
if 'shelf' in object_masks_keys: object_masks_keys.remove('shelf')
if set(objects_copy) != set(object_masks_keys):
#print 'skip posterior_image keys ', objects_copy
#print 'skip object_mask keys ', object_masks_keys
continue
true = np.zeros_like(pred)
for i, _object in enumerate(objects):
if _object != 'shelf':
true[sample.object_masks[_object]] = i
masked_pred = pred[sample.bin_mask]
masked_true = true[sample.bin_mask]
acc, acc_cls, mean_iu, fwavacc = label_accuracy_score(masked_true, masked_pred, len(objects))
acc_list.append(acc)
acc_cls_list.append(acc_cls)
mean_iu_list.append(mean_iu)
fwavacc_list.append(fwavacc)
"""
label_pred = np.zeros(pred.shape[1:]).astype(np.int64)
label_true = np.zeros(pred.shape[1:]).astype(np.int64)
for i in range(pred.shape[0]):
label_pred[pred[i]] = i
label_true[true[i]] = i
label_pred_masked = label_pred[sample.bin_mask]
label_true_masked = label_true[sample.bin_mask]
"""
return acc_list, acc_cls_list, mean_iu_list, fwavacc_list
def create_dataset(dataset_path):
# initialize empty dataset
dataset = APCDataSet(from_pkl=False)
data_file_prefixes = []
key = '.jpg'
for dir_name, sub_dirs, files in os.walk(dataset_path):
for f in files:
if key == f[-len(key):]:
data_file_prefixes.append(
os.path.join(dir_name, f[:-len(key)]))
print data_file_prefixes
for file_prefix in data_file_prefixes:
dataset.samples.append(
APCSample(data_2016_prefix=file_prefix,
labeled=True, is_2016=True, infer_shelf_mask=True))
return dataset
###############################################################################
# prepare dataset #
###############################################################################
#data_path = '/home/leus/ros/indigo/src/start-jsk/jsk_apc/jsk_apc2016_common/data'
#cache_path = os.path.join(data_path, 'cache')
#dataset_path = os.path.join(data_path, 'rbo_apc')
rospack = rospkg.RosPack()
common_path = rospack.get_path('jsk_apc2016_common')
data_path = common_path + '/data/'
dataset_name = 'tokyo_run/single_item_labeled'
dataset_path = os.path.join(data_path, dataset_name)
data = create_dataset(dataset_path)
###############################################################################
# dataset #
###############################################################################
train_data, test_data = data.split_simple(portion_training=0.7)
###############################################################################
# all features #
###############################################################################
all_features = ['color', 'height3D', 'dist2shelf']
params = {
'use_features': all_features,
'segmentation_method': "max_smooth", 'selection_method': "max_smooth",
'make_convex': True, 'do_shrinking_resegmentation': True,
'do_greedy_resegmentation': True}
bp = ProbabilisticSegmentationBP(**params)
bp.fit(train_data)
acc_list, acc_cls_list, mean_iu_list, fwavacc_list = evaluate(bp, test_data)
print 'all features acc ', np.mean(acc_list)
print 'all features acc_cls ', np.mean(acc_cls_list)
print 'all features mean_iu ', np.mean(mean_iu_list)
print 'all features fwavcc ', np.mean(fwavacc_list)
###############################################################################
# # Color only #
###############################################################################
params = {
'use_features': ['color'],
'segmentation_method': "max_smooth", 'selection_method': "max_smooth",
'make_convex': True, 'do_shrinking_resegmentation': True,
'do_greedy_resegmentation': True}
bp = ProbabilisticSegmentationBP(**params)
bp.fit(train_data)
acc_list, acc_cls_list, mean_iu_list, fwavacc_list = evaluate(bp, test_data)
print 'trained only by color features acc ', np.mean(acc_list)
print 'trained only by color features acc_cls ', np.mean(acc_cls_list)
print 'trained only by color features mean_iu ', np.mean(mean_iu_list)
print 'trained only by color features fwavcc ', np.mean(fwavacc_list)
|
bsd-3-clause
| -4,862,534,498,546,936,000 | 34.331754 | 101 | 0.548625 | false |
ElvishArtisan/GlassCoder
|
plugins/pypad_glasscoder.py
|
1
|
2223
|
#!%PYTHON_BANGPATH%
# pypad_glasscoder.py
#
# Send articulated PAD updates to an instance of glasscoder(1).
#
# (C) Copyright 2019 Fred Gleason <fredg@paravelsystems.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import sys
import syslog
import configparser
import pypad
import json
import requests
def eprint(*args,**kwargs):
print(*args,file=sys.stderr,**kwargs)
def ProcessPad(update):
if update.config().has_section('Glasscoder'):
update_url=update.config().get('Glasscoder','UpdateUrl')
n=1
lines=[]
section='Line'+str(n)
while(update.config().has_section(section)):
lines.append('"%s": "%s"' % (update.config().get(section,'Key'), update.resolvePadFields(update.config().get(section,'Value'),pypad.ESCAPE_JSON)))
n=n+1
section='Line'+str(n)
if update.shouldBeProcessed('Glasscoder'):
req_data='{ "Metadata": { %s } }' % ", ".join(lines)
req_url = update_url+'/json_pad'
try:
r = requests.post(req_url, json=json.loads(req_data))
update.syslog(syslog.LOG_INFO,'[PyPAD][Glasscoder] Update exit code: ' + str(r.status_code))
except requests.exceptions.RequestException as e:
update.syslog(syslog.LOG_WARNING,'[PyPAD][Glasscoder] Update failed: ' + str(e))
#
# 'Main' function
#
rcvr=pypad.Receiver()
try:
rcvr.setConfigFile(sys.argv[3])
except IndexError:
eprint('pypad_glasscoder.py: USAGE: cmd <hostname> <port> <config>')
sys.exit(1)
rcvr.setPadCallback(ProcessPad)
rcvr.start(sys.argv[1],int(sys.argv[2]))
|
gpl-2.0
| -3,623,632,450,328,001,000 | 34.285714 | 158 | 0.665767 | false |
johnnoone/json-spec
|
tests/test_operations.py
|
1
|
4710
|
"""
tests.tests_operations
~~~~~~~~~~~~~~~~~~~~~~
"""
import pytest
from collections import Mapping, Sequence
from . import TestMappingType, TestSequenceType
from jsonspec.operations import check, remove, add, replace, copy, move
from jsonspec.operations import Error, NonexistentTarget
def test_types():
assert isinstance(TestMappingType(), Mapping)
assert isinstance(TestSequenceType(), Sequence)
def test_check():
assert check({'foo': 'bar'}, '/foo', 'bar')
assert not check({'foo': 'bar'}, '/foo', 'baz')
assert not check({'foo': 'bar'}, '/bar/baz', 'quux')
with pytest.raises(Error):
check({'foo': 'bar'}, '/bar/baz', 'quux', raise_onerror=True)
def test_remove():
obj = {'foo': 'bar'}
response = remove(obj, '/foo')
assert response == {}
assert response != obj
with pytest.raises(Error):
assert remove({'foo': 'bar'}, '/bar')
def test_add():
obj = {'foo': 'bar'}
response = add(obj, '/baz', 'quux')
assert response == {'foo': 'bar', 'baz': 'quux'}
assert response != obj
obj = {'foo': {'bar': 'baz'}}
response = add(obj, '/baz', 'quux')
assert response == {'foo': {'bar': 'baz'}, 'baz': 'quux'}
response = add(obj, '/foo/quux', 42)
assert response == {'foo': {'bar': 'baz', 'quux': 42}}
def test_replace():
obj = {'foo': 'bar'}
response = replace(obj, '/foo', 'quux')
assert response == {'foo': 'quux'}
assert response != obj
with pytest.raises(Error):
replace(obj, '/baz', 'quux')
def test_copy():
obj = {'foo': 42, 'bar': {}}
response = copy(obj, '/bar/baz', '/foo')
assert response == {'foo': 42, 'bar': {'baz': 42}}
assert response != obj
obj = {'foo': {'baz': 42}}
response = copy(obj, '/bar', '/foo')
assert response == {'foo': {'baz': 42}, 'bar': {'baz': 42}}
def test_move():
obj = {'foo': 42}
response = move(obj, '/bar', '/foo')
assert response == {'bar': 42}
assert response != obj
obj = {'foo': {'bar': 'baz'}}
response = move(obj, '/bar', '/foo/bar')
assert response == {'bar': 'baz', 'foo': {}}
def test_add_object_member():
obj = {'foo': 'bar'}
assert add(obj, '/baz', 'qux') == {
'baz': 'qux',
'foo': 'bar'
}
def test_add_array_element():
obj = {'foo': ['bar', 'baz']}
assert add(obj, '/foo/1', 'qux') == {
'foo': ['bar', 'qux', 'baz']
}
def test_remove_object_member():
obj = {
'baz': 'qux',
'foo': 'bar'
}
assert remove(obj, '/baz') == {'foo': 'bar'}
def test_remove_array_element():
obj = {
'foo': ['bar', 'qux', 'baz']
}
assert remove(obj, '/foo/1') == {
'foo': ['bar', 'baz']
}
def test_replace_value():
obj = {
'baz': 'qux',
'foo': 'bar'
}
assert replace(obj, '/baz', 'boo') == {
'baz': 'boo',
'foo': 'bar'
}
def test_move_value():
obj = {
'foo': {
'bar': 'baz',
'waldo': 'fred'
},
'qux': {
'corge': 'grault'
}
}
assert move(obj, '/qux/thud', '/foo/waldo') == {
'foo': {
'bar': 'baz'
},
'qux': {
'corge': 'grault',
'thud': 'fred'
}
}
def test_move_array_element():
obj = {
'foo': ['all', 'grass', 'cows', 'eat']
}
assert move(obj, '/foo/3', '/foo/1') == {
'foo': ['all', 'cows', 'eat', 'grass']
}
def test_testing_value_success():
obj = {
'baz': 'qux',
'foo': ['a', 2, 'c']
}
assert check(obj, '/baz', 'qux')
assert check(obj, '/foo/1', 2)
def test_testing_value_error():
obj = {'baz': 'qux'}
assert not check(obj, '/baz', 'bar')
def test_adding_nested_member_object():
obj = {'foo': 'bar'}
assert add(obj, '/child', {'grandchild': {}}) == {
'foo': 'bar',
'child': {
'grandchild': {}
}
}
def test_adding_to_nonexistent_target():
obj = {'foo': 'bar'}
with pytest.raises(NonexistentTarget):
assert add(obj, '/baz/bat', 'qux')
def test_escape_ordering():
obj = {
'/': 9,
'~1': 10
}
assert check(obj, '/~01', 10)
def test_comparing_strings_and_numbers():
obj = {
'/': 9,
'~1': 10
}
assert not check(obj, '/~01', '10')
def test_adding_array_value():
obj = {'foo': ['bar']}
assert add(obj, '/foo/-', ['abc', 'def']) == {
'foo': ['bar', ['abc', 'def']]
}
def test_adding_mapping_type_value():
obj = TestMappingType({'foo': ['bar']})
assert add(obj, '/foo/-', ['abc', 'def']) == TestMappingType({
'foo': ['bar', ['abc', 'def']]
})
|
bsd-3-clause
| -2,329,346,672,964,189,700 | 23.409326 | 71 | 0.486412 | false |
proversity-org/edx-platform
|
openedx/features/course_experience/tests/views/test_course_home.py
|
1
|
25042
|
# coding=utf-8
"""
Tests for the course home page.
"""
from datetime import datetime, timedelta
import ddt
import mock
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import QueryDict
from django.utils.http import urlquote_plus
from django.utils.timezone import now
from pytz import UTC
from waffle.models import Flag
from waffle.testutils import override_flag
from course_modes.models import CourseMode
from courseware.tests.factories import StaffFactory
from lms.djangoapps.commerce.models import CommerceConfiguration
from lms.djangoapps.commerce.utils import EcommerceService
from lms.djangoapps.course_goals.api import add_course_goal, remove_course_goal
from openedx.core.djangoapps.waffle_utils.testutils import WAFFLE_TABLES, override_waffle_flag
from openedx.features.course_experience import (
SHOW_REVIEWS_TOOL_FLAG,
SHOW_UPGRADE_MSG_ON_COURSE_HOME,
UNIFIED_COURSE_TAB_FLAG
)
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
from util.date_utils import strftime_localized
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import CourseUserType, ModuleStoreTestCase, SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, check_mongo_calls
from ... import COURSE_PRE_START_ACCESS_FLAG, ENABLE_COURSE_GOALS
from .helpers import add_course_mode
from .test_course_updates import create_course_update, remove_course_updates
TEST_PASSWORD = 'test'
TEST_CHAPTER_NAME = 'Test Chapter'
TEST_WELCOME_MESSAGE = '<h2>Welcome!</h2>'
TEST_UPDATE_MESSAGE = '<h2>Test Update!</h2>'
TEST_COURSE_UPDATES_TOOL = '/course/updates">'
TEST_COURSE_HOME_MESSAGE = 'course-message'
TEST_COURSE_HOME_MESSAGE_ANONYMOUS = '/login'
TEST_COURSE_HOME_MESSAGE_UNENROLLED = 'Enroll now'
TEST_COURSE_HOME_MESSAGE_PRE_START = 'Course starts in'
TEST_COURSE_GOAL_OPTIONS = 'goal-options-container'
TEST_COURSE_GOAL_UPDATE_FIELD = 'section-goals'
TEST_COURSE_GOAL_UPDATE_FIELD_HIDDEN = 'section-goals hidden'
COURSE_GOAL_DISMISS_OPTION = 'unsure'
QUERY_COUNT_TABLE_BLACKLIST = WAFFLE_TABLES
def course_home_url(course):
"""
Returns the URL for the course's home page.
Arguments:
course (CourseDescriptor): The course being tested.
"""
return course_home_url_from_string(unicode(course.id))
def course_home_url_from_string(course_key_string):
"""
Returns the URL for the course's home page.
Arguments:
course_key_string (String): The course key as string.
"""
return reverse(
'openedx.course_experience.course_home',
kwargs={
'course_id': course_key_string,
}
)
class CourseHomePageTestCase(SharedModuleStoreTestCase):
"""
Base class for testing the course home page.
"""
@classmethod
def setUpClass(cls):
"""
Set up a course to be used for testing.
"""
# setUpClassAndTestData() already calls setUpClass on SharedModuleStoreTestCase
# pylint: disable=super-method-not-called
with super(CourseHomePageTestCase, cls).setUpClassAndTestData():
with cls.store.default_store(ModuleStoreEnum.Type.split):
cls.course = CourseFactory.create(
org='edX',
number='test',
display_name='Test Course',
start=now() - timedelta(days=30),
)
with cls.store.bulk_operations(cls.course.id):
chapter = ItemFactory.create(
category='chapter',
parent_location=cls.course.location,
display_name=TEST_CHAPTER_NAME,
)
section = ItemFactory.create(category='sequential', parent_location=chapter.location)
section2 = ItemFactory.create(category='sequential', parent_location=chapter.location)
ItemFactory.create(category='vertical', parent_location=section.location)
ItemFactory.create(category='vertical', parent_location=section2.location)
@classmethod
def setUpTestData(cls):
"""Set up and enroll our fake user in the course."""
cls.staff_user = StaffFactory(course_key=cls.course.id, password=TEST_PASSWORD)
cls.user = UserFactory(password=TEST_PASSWORD)
CourseEnrollment.enroll(cls.user, cls.course.id)
def create_future_course(self, specific_date=None):
"""
Creates and returns a course in the future.
"""
return CourseFactory.create(
display_name='Test Future Course',
start=specific_date if specific_date else now() + timedelta(days=30),
)
class TestCourseHomePage(CourseHomePageTestCase):
def setUp(self):
super(TestCourseHomePage, self).setUp()
self.client.login(username=self.user.username, password=TEST_PASSWORD)
def tearDown(self):
remove_course_updates(self.user, self.course)
super(TestCourseHomePage, self).tearDown()
@override_waffle_flag(UNIFIED_COURSE_TAB_FLAG, active=True)
def test_welcome_message_when_unified(self):
# Create a welcome message
create_course_update(self.course, self.user, TEST_WELCOME_MESSAGE)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, TEST_WELCOME_MESSAGE, status_code=200)
@override_waffle_flag(UNIFIED_COURSE_TAB_FLAG, active=False)
def test_welcome_message_when_not_unified(self):
# Create a welcome message
create_course_update(self.course, self.user, TEST_WELCOME_MESSAGE)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertNotContains(response, TEST_WELCOME_MESSAGE, status_code=200)
@override_waffle_flag(UNIFIED_COURSE_TAB_FLAG, active=True)
def test_updates_tool_visibility(self):
"""
Verify that the updates course tool is visible only when the course
has one or more updates.
"""
url = course_home_url(self.course)
response = self.client.get(url)
self.assertNotContains(response, TEST_COURSE_UPDATES_TOOL, status_code=200)
create_course_update(self.course, self.user, TEST_UPDATE_MESSAGE)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, TEST_COURSE_UPDATES_TOOL, status_code=200)
def test_queries(self):
"""
Verify that the view's query count doesn't regress.
"""
# Pre-fetch the view to populate any caches
course_home_url(self.course)
# Fetch the view and verify the query counts
with self.assertNumQueries(50, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST):
with check_mongo_calls(4):
url = course_home_url(self.course)
self.client.get(url)
@mock.patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_start_date_handling(self):
"""
Verify that the course home page handles start dates correctly.
"""
# The course home page should 404 for a course starting in the future
future_course = self.create_future_course(datetime(2030, 1, 1, tzinfo=UTC))
url = course_home_url(future_course)
response = self.client.get(url)
self.assertRedirects(response, '/dashboard?notlive=Jan+01%2C+2030')
# With the Waffle flag enabled, the course should be visible
with override_flag(COURSE_PRE_START_ACCESS_FLAG.namespaced_flag_name, True):
url = course_home_url(future_course)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@ddt.ddt
class TestCourseHomePageAccess(CourseHomePageTestCase):
"""
Test access to the course home page.
"""
def setUp(self):
super(TestCourseHomePageAccess, self).setUp()
# Make this a verified course so that an upgrade message might be shown
add_course_mode(self.course, upgrade_deadline_expired=False)
# Add a welcome message
create_course_update(self.course, self.staff_user, TEST_WELCOME_MESSAGE)
def tearDown(self):
remove_course_updates(self.staff_user, self.course)
super(TestCourseHomePageAccess, self).tearDown()
@override_waffle_flag(UNIFIED_COURSE_TAB_FLAG, active=True)
@override_waffle_flag(SHOW_REVIEWS_TOOL_FLAG, active=True)
@ddt.data(
[CourseUserType.ANONYMOUS, 'To see course content'],
[CourseUserType.ENROLLED, None],
[CourseUserType.UNENROLLED, 'You must be enrolled in the course to see course content.'],
[CourseUserType.UNENROLLED_STAFF, 'You must be enrolled in the course to see course content.'],
)
@ddt.unpack
def test_home_page(self, user_type, expected_message):
self.create_user_for_course(self.course, user_type)
# Render the course home page
url = course_home_url(self.course)
response = self.client.get(url)
# Verify that the course tools and dates are always shown
self.assertContains(response, 'Course Tools')
self.assertContains(response, 'Today is')
# Verify that the outline, start button, course sock, and welcome message
# are only shown to enrolled users.
is_enrolled = user_type is CourseUserType.ENROLLED
is_unenrolled_staff = user_type is CourseUserType.UNENROLLED_STAFF
expected_count = 1 if (is_enrolled or is_unenrolled_staff) else 0
self.assertContains(response, TEST_CHAPTER_NAME, count=expected_count)
self.assertContains(response, 'Start Course', count=expected_count)
self.assertContains(response, 'Learn About Verified Certificate', count=(1 if is_enrolled else 0))
self.assertContains(response, TEST_WELCOME_MESSAGE, count=expected_count)
# Verify that the expected message is shown to the user
self.assertContains(response, '<div class="user-messages">', count=1 if expected_message else 0)
if expected_message:
self.assertContains(response, expected_message)
@override_waffle_flag(UNIFIED_COURSE_TAB_FLAG, active=False)
@override_waffle_flag(SHOW_REVIEWS_TOOL_FLAG, active=True)
@ddt.data(
[CourseUserType.ANONYMOUS, 'To see course content'],
[CourseUserType.ENROLLED, None],
[CourseUserType.UNENROLLED, 'You must be enrolled in the course to see course content.'],
[CourseUserType.UNENROLLED_STAFF, 'You must be enrolled in the course to see course content.'],
)
@ddt.unpack
def test_home_page_not_unified(self, user_type, expected_message):
"""
Verifies the course home tab when not unified.
"""
self.create_user_for_course(self.course, user_type)
# Render the course home page
url = course_home_url(self.course)
response = self.client.get(url)
# Verify that the course tools and dates are always shown
self.assertContains(response, 'Course Tools')
self.assertContains(response, 'Today is')
# Verify that welcome messages are never shown
self.assertNotContains(response, TEST_WELCOME_MESSAGE)
# Verify that the outline, start button, course sock, and welcome message
# are only shown to enrolled users.
is_enrolled = user_type is CourseUserType.ENROLLED
is_unenrolled_staff = user_type is CourseUserType.UNENROLLED_STAFF
expected_count = 1 if (is_enrolled or is_unenrolled_staff) else 0
self.assertContains(response, TEST_CHAPTER_NAME, count=expected_count)
self.assertContains(response, 'Start Course', count=expected_count)
self.assertContains(response, 'Learn About Verified Certificate', count=(1 if is_enrolled else 0))
# Verify that the expected message is shown to the user
self.assertContains(response, '<div class="user-messages">', count=1 if expected_message else 0)
if expected_message:
self.assertContains(response, expected_message)
def test_sign_in_button(self):
"""
Verify that the sign in button will return to this page.
"""
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, '/login?next={url}'.format(url=urlquote_plus(url)))
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
def test_non_live_course(self):
"""
Ensure that a user accessing a non-live course sees a redirect to
the student dashboard, not a 404.
"""
future_course = self.create_future_course()
self.create_user_for_course(future_course, CourseUserType.ENROLLED)
url = course_home_url(future_course)
response = self.client.get(url)
start_date = strftime_localized(future_course.start, 'SHORT_DATE')
expected_params = QueryDict(mutable=True)
expected_params['notlive'] = start_date
expected_url = '{url}?{params}'.format(
url=reverse('dashboard'),
params=expected_params.urlencode()
)
self.assertRedirects(response, expected_url)
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
@mock.patch("util.date_utils.strftime_localized")
def test_non_live_course_other_language(self, mock_strftime_localized):
"""
Ensure that a user accessing a non-live course sees a redirect to
the student dashboard, not a 404, even if the localized date is unicode
"""
future_course = self.create_future_course()
self.create_user_for_course(future_course, CourseUserType.ENROLLED)
fake_unicode_start_time = u"üñîçø∂é_ßtå®t_tîµé"
mock_strftime_localized.return_value = fake_unicode_start_time
url = course_home_url(future_course)
response = self.client.get(url)
expected_params = QueryDict(mutable=True)
expected_params['notlive'] = fake_unicode_start_time
expected_url = u'{url}?{params}'.format(
url=reverse('dashboard'),
params=expected_params.urlencode()
)
self.assertRedirects(response, expected_url)
def test_nonexistent_course(self):
"""
Ensure a non-existent course results in a 404.
"""
self.create_user_for_course(self.course, CourseUserType.ANONYMOUS)
url = course_home_url_from_string('not/a/course')
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
@override_waffle_flag(UNIFIED_COURSE_TAB_FLAG, active=True)
@override_waffle_flag(COURSE_PRE_START_ACCESS_FLAG, active=True)
def test_course_messaging(self):
"""
Ensure that the following four use cases work as expected
1) Anonymous users are shown a course message linking them to the login page
2) Unenrolled users are shown a course message allowing them to enroll
3) Enrolled users who show up on the course page after the course has begun
are not shown a course message.
4) Enrolled users who show up on the course page before the course begins
are shown a message explaining when the course starts as well as a call to
action button that allows them to add a calendar event.
"""
# Verify that anonymous users are shown a login link in the course message
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE_ANONYMOUS)
# Verify that unenrolled users are shown an enroll call to action message
user = self.create_user_for_course(self.course, CourseUserType.UNENROLLED)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE_UNENROLLED)
# Verify that enrolled users are not shown any state warning message when enrolled and course has begun.
CourseEnrollment.enroll(user, self.course.id)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertNotContains(response, TEST_COURSE_HOME_MESSAGE_ANONYMOUS)
self.assertNotContains(response, TEST_COURSE_HOME_MESSAGE_UNENROLLED)
self.assertNotContains(response, TEST_COURSE_HOME_MESSAGE_PRE_START)
# Verify that enrolled users are shown 'days until start' message before start date
future_course = self.create_future_course()
CourseEnrollment.enroll(user, future_course.id)
url = course_home_url(future_course)
response = self.client.get(url)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE_PRE_START)
@override_waffle_flag(UNIFIED_COURSE_TAB_FLAG, active=True)
@override_waffle_flag(COURSE_PRE_START_ACCESS_FLAG, active=True)
@override_waffle_flag(ENABLE_COURSE_GOALS, active=True)
def test_course_goals(self):
"""
Ensure that the following five use cases work as expected.
1) Unenrolled users are not shown the set course goal message.
2) Enrolled users are shown the set course goal message if they have not yet set a course goal.
3) Enrolled users are not shown the set course goal message if they have set a course goal.
4) Enrolled and verified users are not shown the set course goal message.
5) Enrolled users are not shown the set course goal message in a course that cannot be verified.
"""
# Create a course with a verified track.
verifiable_course = CourseFactory.create()
add_course_mode(verifiable_course, upgrade_deadline_expired=False)
# Verify that unenrolled users are not shown the set course goal message.
user = self.create_user_for_course(verifiable_course, CourseUserType.UNENROLLED)
response = self.client.get(course_home_url(verifiable_course))
self.assertNotContains(response, TEST_COURSE_GOAL_OPTIONS)
# Verify that enrolled users are shown the set course goal message in a verified course.
CourseEnrollment.enroll(user, verifiable_course.id)
response = self.client.get(course_home_url(verifiable_course))
self.assertContains(response, TEST_COURSE_GOAL_OPTIONS)
# Verify that enrolled users that have set a course goal are not shown the set course goal message.
add_course_goal(user, verifiable_course.id, COURSE_GOAL_DISMISS_OPTION)
response = self.client.get(course_home_url(verifiable_course))
self.assertNotContains(response, TEST_COURSE_GOAL_OPTIONS)
# Verify that enrolled and verified users are not shown the set course goal message.
remove_course_goal(user, str(verifiable_course.id))
CourseEnrollment.enroll(user, verifiable_course.id, CourseMode.VERIFIED)
response = self.client.get(course_home_url(verifiable_course))
self.assertNotContains(response, TEST_COURSE_GOAL_OPTIONS)
# Verify that enrolled users are not shown the set course goal message in an audit only course.
audit_only_course = CourseFactory.create()
CourseEnrollment.enroll(user, audit_only_course.id)
response = self.client.get(course_home_url(audit_only_course))
self.assertNotContains(response, TEST_COURSE_GOAL_OPTIONS)
@override_waffle_flag(UNIFIED_COURSE_TAB_FLAG, active=True)
@override_waffle_flag(COURSE_PRE_START_ACCESS_FLAG, active=True)
@override_waffle_flag(ENABLE_COURSE_GOALS, active=True)
def test_course_goal_updates(self):
"""
Ensure that the following five use cases work as expected.
1) Unenrolled users are not shown the update goal selection field.
2) Enrolled users are not shown the update goal selection field if they have not yet set a course goal.
3) Enrolled users are shown the update goal selection field if they have set a course goal.
4) Enrolled users in the verified track are shown the update goal selection field.
"""
# Create a course with a verified track.
verifiable_course = CourseFactory.create()
add_course_mode(verifiable_course, upgrade_deadline_expired=False)
# Verify that unenrolled users are not shown the update goal selection field.
user = self.create_user_for_course(verifiable_course, CourseUserType.UNENROLLED)
response = self.client.get(course_home_url(verifiable_course))
self.assertNotContains(response, TEST_COURSE_GOAL_UPDATE_FIELD)
# Verify that enrolled users that have not set a course goal are shown a hidden update goal selection field.
enrollment = CourseEnrollment.enroll(user, verifiable_course.id)
response = self.client.get(course_home_url(verifiable_course))
self.assertContains(response, TEST_COURSE_GOAL_UPDATE_FIELD_HIDDEN)
# Verify that enrolled users that have set a course goal are shown a visible update goal selection field.
add_course_goal(user, verifiable_course.id, COURSE_GOAL_DISMISS_OPTION)
response = self.client.get(course_home_url(verifiable_course))
self.assertContains(response, TEST_COURSE_GOAL_UPDATE_FIELD)
self.assertNotContains(response, TEST_COURSE_GOAL_UPDATE_FIELD_HIDDEN)
# Verify that enrolled and verified users are shown the update goal selection
CourseEnrollment.update_enrollment(enrollment, is_active=True, mode=CourseMode.VERIFIED)
response = self.client.get(course_home_url(verifiable_course))
self.assertContains(response, TEST_COURSE_GOAL_UPDATE_FIELD)
self.assertNotContains(response, TEST_COURSE_GOAL_UPDATE_FIELD_HIDDEN)
class CourseHomeFragmentViewTests(ModuleStoreTestCase):
CREATE_USER = False
def setUp(self):
super(CourseHomeFragmentViewTests, self).setUp()
CommerceConfiguration.objects.create(checkout_on_ecommerce_service=True)
end = now() + timedelta(days=30)
self.course = CourseFactory(
start=now() - timedelta(days=30),
end=end,
)
self.url = course_home_url(self.course)
CourseMode.objects.create(course_id=self.course.id, mode_slug=CourseMode.AUDIT)
self.verified_mode = CourseMode.objects.create(
course_id=self.course.id,
mode_slug=CourseMode.VERIFIED,
min_price=100,
expiration_datetime=end,
sku='test'
)
self.user = UserFactory()
self.client.login(username=self.user.username, password=TEST_PASSWORD)
name = SHOW_UPGRADE_MSG_ON_COURSE_HOME.waffle_namespace._namespaced_name(
SHOW_UPGRADE_MSG_ON_COURSE_HOME.flag_name)
self.flag, __ = Flag.objects.update_or_create(name=name, defaults={'everyone': True})
def assert_upgrade_message_not_displayed(self):
response = self.client.get(self.url)
self.assertNotIn('section-upgrade', response.content)
def assert_upgrade_message_displayed(self):
response = self.client.get(self.url)
self.assertIn('section-upgrade', response.content)
url = EcommerceService().get_checkout_page_url(self.verified_mode.sku)
self.assertIn('<a class="btn-brand btn-upgrade"', response.content)
self.assertIn(url, response.content)
self.assertIn('Upgrade (${price})'.format(price=self.verified_mode.min_price), response.content)
def test_no_upgrade_message_if_logged_out(self):
self.client.logout()
self.assert_upgrade_message_not_displayed()
def test_no_upgrade_message_if_not_enrolled(self):
self.assertEqual(len(CourseEnrollment.enrollments_for_user(self.user)), 0)
self.assert_upgrade_message_not_displayed()
def test_no_upgrade_message_if_verified_track(self):
CourseEnrollment.enroll(self.user, self.course.id, CourseMode.VERIFIED)
self.assert_upgrade_message_not_displayed()
def test_no_upgrade_message_if_upgrade_deadline_passed(self):
self.verified_mode.expiration_datetime = now() - timedelta(days=20)
self.verified_mode.save()
self.assert_upgrade_message_not_displayed()
def test_no_upgrade_message_if_flag_disabled(self):
self.flag.everyone = False
self.flag.save()
CourseEnrollment.enroll(self.user, self.course.id, CourseMode.AUDIT)
self.assert_upgrade_message_not_displayed()
def test_display_upgrade_message_if_audit_and_deadline_not_passed(self):
CourseEnrollment.enroll(self.user, self.course.id, CourseMode.AUDIT)
self.assert_upgrade_message_displayed()
|
agpl-3.0
| -1,781,964,234,215,230,000 | 44.422868 | 116 | 0.68723 | false |
grahamhayes/designate
|
designate/tests/test_api/test_v2/test_zones.py
|
1
|
27400
|
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Author: Kiall Mac Innes <kiall@managedit.ie>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from mock import patch
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_log import log as logging
from designate import exceptions
from designate import objects
from designate.central import service as central_service
from designate.mdns import rpcapi as mdns_api
from designate.tests.test_api.test_v2 import ApiV2TestCase
LOG = logging.getLogger(__name__)
class ApiV2ZonesTest(ApiV2TestCase):
def setUp(self):
super(ApiV2ZonesTest, self).setUp()
# Create the default TLDs
self.create_default_tlds()
def test_create_zone(self):
# Create a zone
fixture = self.get_zone_fixture(fixture=0)
response = self.client.post_json('/zones/', fixture)
# Check the headers are what we expect
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check the body structure is what we expect
self.assertIn('links', response.json)
self.assertIn('self', response.json['links'])
# Check the values returned are what we expect
self.assertIn('id', response.json)
self.assertIn('created_at', response.json)
self.assertEqual('PENDING', response.json['status'])
self.assertEqual('PRIMARY', response.json['type'])
self.assertEqual([], response.json['masters'])
self.assertIsNone(response.json['updated_at'])
for k in fixture:
self.assertEqual(fixture[k], response.json[k])
def test_create_zone_no_type(self):
# Create a zone
fixture = self.get_zone_fixture(fixture=0)
del fixture['type']
response = self.client.post_json('/zones/', fixture)
# Check the headers are what we expect
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check the body structure is what we expect
self.assertIn('links', response.json)
self.assertIn('self', response.json['links'])
# Check the values returned are what we expect
self.assertIn('id', response.json)
self.assertIn('created_at', response.json)
self.assertEqual('PENDING', response.json['status'])
self.assertEqual('PRIMARY', response.json['type'])
self.assertEqual([], response.json['masters'])
self.assertIsNone(response.json['updated_at'])
for k in fixture:
self.assertEqual(fixture[k], response.json[k])
def test_create_zone_validation(self):
# NOTE: The schemas should be tested separately to the API. So we
# don't need to test every variation via the API itself.
# Fetch a fixture
fixture = self.get_zone_fixture(fixture=0)
# Add a junk field to the body
fixture['junk'] = 'Junk Field'
# Ensure it fails with a 400
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_email_too_long(self):
fixture = self.get_zone_fixture(fixture=0)
fixture.update({'email': 'a' * 255 + '@abc.com'})
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_invalid_email(self):
invalid_emails = [
'org',
'example.org',
'bla.example.org',
'org.',
'example.org.',
'bla.example.org.',
]
fixture = self.get_zone_fixture(fixture=0)
for email in invalid_emails:
fixture.update({'email': email})
body = fixture
self._assert_exception('invalid_object', 400,
self.client.post_json,
'/zones', body)
def test_create_zone_email_missing(self):
fixture = self.get_zone_fixture(fixture=0)
del fixture['email']
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_ttl_less_than_zero(self):
fixture = self.get_zone_fixture(fixture=0)
fixture['ttl'] = -1
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_ttl_is_zero(self):
fixture = self.get_zone_fixture(fixture=0)
fixture['ttl'] = 0
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_ttl_is_greater_than_max(self):
fixture = self.get_zone_fixture(fixture=0)
fixture['ttl'] = 2174483648
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_ttl_is_invalid(self):
fixture = self.get_zone_fixture(fixture=0)
fixture['ttl'] = "!@?>"
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_ttl_is_not_required_field(self):
fixture = self.get_zone_fixture(fixture=0)
body = fixture
response = self.client.post_json('/zones', body)
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
def test_create_zone_description_too_long(self):
fixture = self.get_zone_fixture(fixture=0)
fixture['description'] = "a" * 161
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_name_is_missing(self):
fixture = self.get_zone_fixture(fixture=0)
del fixture['name']
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_name_too_long(self):
fixture = self.get_zone_fixture(fixture=0)
fixture['name'] = 'x' * 255 + ".com"
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_body_validation(self):
fixture = self.get_zone_fixture(fixture=0)
# Add id to the body
fixture['id'] = '2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
# Ensure it fails with a 400
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
fixture = self.get_zone_fixture(fixture=0)
# Add created_at to the body
fixture['created_at'] = '2014-03-12T19:07:53.000000'
# Ensure it fails with a 400
body = fixture
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', body)
def test_create_zone_invalid_name(self):
# Try to create a zone with an invalid name
fixture = self.get_zone_fixture(fixture=-1)
# Ensure it fails with a 400
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones', fixture)
@patch.object(central_service.Service, 'create_zone',
side_effect=messaging.MessagingTimeout())
def test_create_zone_timeout(self, _):
fixture = self.get_zone_fixture(fixture=0)
body = fixture
self._assert_exception('timeout', 504, self.client.post_json,
'/zones/', body)
@patch.object(central_service.Service, 'create_zone',
side_effect=exceptions.DuplicateZone())
def test_create_zone_duplicate(self, _):
fixture = self.get_zone_fixture(fixture=0)
body = fixture
self._assert_exception('duplicate_zone', 409, self.client.post_json,
'/zones/', body)
def test_create_zone_missing_content_type(self):
self._assert_exception('unsupported_content_type', 415,
self.client.post, '/zones')
def test_create_zone_bad_content_type(self):
self._assert_exception(
'unsupported_content_type', 415, self.client.post, '/zones',
headers={'Content-type': 'test/goat'})
def test_zone_invalid_url(self):
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980/invalid'
self._assert_exception('not_found', 404, self.client.get, url,
headers={'Accept': 'application/json'})
self._assert_exception('not_found', 404, self.client.patch_json, url)
self._assert_exception('not_found', 404, self.client.delete, url)
# Pecan returns a 405 for post
response = self.client.post(url, status=405)
self.assertEqual(405, response.status_int)
def test_get_zones(self):
response = self.client.get('/zones/')
# Check the headers are what we expect
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check the body structure is what we expect
self.assertIn('zones', response.json)
self.assertIn('links', response.json)
self.assertIn('self', response.json['links'])
# We should start with 0 zones
self.assertEqual(0, len(response.json['zones']))
# We should start with 0 zones
self.assertEqual(0, len(response.json['zones']))
data = [self.create_zone(name='x-%s.com.' % i)
for i in 'abcdefghij']
self._assert_paging(data, '/zones', key='zones')
self._assert_invalid_paging(data, '/zones', key='zones')
@patch.object(central_service.Service, 'find_zones',
side_effect=messaging.MessagingTimeout())
def test_get_zones_timeout(self, _):
self._assert_exception('timeout', 504, self.client.get, '/zones/')
def test_get_zone(self):
# Create a zone
zone = self.create_zone()
response = self.client.get('/zones/%s' % zone['id'],
headers=[('Accept', 'application/json')])
# Check the headers are what we expect
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check the body structure is what we expect
self.assertIn('links', response.json)
self.assertIn('self', response.json['links'])
# Check the values returned are what we expect
self.assertIn('id', response.json)
self.assertIn('created_at', response.json)
self.assertEqual('PENDING', response.json['status'])
self.assertIsNone(response.json['updated_at'])
self.assertEqual(zone['name'], response.json['name'])
self.assertEqual(zone['email'], response.json['email'])
def test_get_zone_invalid_id(self):
self._assert_invalid_uuid(self.client.get, '/zones/%s')
@patch.object(central_service.Service, 'get_zone',
side_effect=messaging.MessagingTimeout())
def test_get_zone_timeout(self, _):
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
self._assert_exception('timeout', 504, self.client.get, url,
headers={'Accept': 'application/json'})
@patch.object(central_service.Service, 'get_zone',
side_effect=exceptions.ZoneNotFound())
def test_get_zone_missing(self, _):
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
self._assert_exception('zone_not_found', 404, self.client.get, url,
headers={'Accept': 'application/json'})
def test_get_zone_bad_accept(self):
url = '/zones/6e2146f3-87bc-4f47-adc5-4df0a5c78218'
self.client.get(url, headers={'Accept': 'test/goat'}, status=406)
def test_update_zone(self):
# Create a zone
zone = self.create_zone()
# Prepare an update body
body = {'email': 'prefix-%s' % zone['email']}
response = self.client.patch_json('/zones/%s' % zone['id'], body,
status=202)
# Check the headers are what we expect
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check the body structure is what we expect
self.assertIn('links', response.json)
self.assertIn('self', response.json['links'])
self.assertIn('status', response.json)
# Check the values returned are what we expect
self.assertIn('id', response.json)
self.assertIsNotNone(response.json['updated_at'])
self.assertEqual('prefix-%s' % zone['email'],
response.json['email'])
def test_update_zone_invalid_id(self):
self._assert_invalid_uuid(self.client.patch_json, '/zones/%s')
def test_update_zone_validation(self):
# NOTE: The schemas should be tested separatly to the API. So we
# don't need to test every variation via the API itself.
# Create a zone
zone = self.create_zone()
# Prepare an update body with junk in the body
body = {'email': 'prefix-%s' % zone['email'],
'junk': 'Junk Field'}
url = '/zones/%s' % zone['id']
# Ensure it fails with a 400
self._assert_exception('invalid_object', 400, self.client.patch_json,
url, body)
# Prepare an update body with negative ttl in the body
body = {'email': 'prefix-%s' % zone['email'],
'ttl': -20}
# Ensure it fails with a 400
self._assert_exception('invalid_object', 400, self.client.patch_json,
url, body)
# Prepare an update body with ttl > maximum (2147483647) in the body
body = {'email': 'prefix-%s' % zone['email'],
'ttl': 2147483648}
# Ensure it fails with a 400
self._assert_exception('invalid_object', 400, self.client.patch_json,
url, body)
@patch.object(central_service.Service, 'get_zone',
side_effect=exceptions.DuplicateZone())
def test_update_zone_duplicate(self, _):
# Prepare an update body
body = {'email': 'example@example.org'}
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
# Ensure it fails with a 409
self._assert_exception('duplicate_zone', 409, self.client.patch_json,
url, body)
@patch.object(central_service.Service, 'get_zone',
side_effect=messaging.MessagingTimeout())
def test_update_zone_timeout(self, _):
# Prepare an update body
body = {'email': 'example@example.org'}
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
# Ensure it fails with a 504
self._assert_exception('timeout', 504, self.client.patch_json,
url, body)
@patch.object(central_service.Service, 'get_zone',
side_effect=exceptions.ZoneNotFound())
def test_update_zone_missing(self, _):
# Prepare an update body
body = {'email': 'example@example.org'}
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
# Ensure it fails with a 404
self._assert_exception('zone_not_found', 404, self.client.patch_json,
url, body)
def test_delete_zone(self):
zone = self.create_zone()
response = self.client.delete('/zones/%s' % zone['id'], status=202)
# Check the headers are what we expect
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual('DELETE', response.json['action'])
self.assertEqual('PENDING', response.json['status'])
# The deleted zone should still be listed
zones = self.client.get('/zones/')
self.assertEqual(1, len(zones.json['zones']))
def test_delete_zone_invalid_id(self):
self._assert_invalid_uuid(self.client.delete, '/zones/%s')
@patch.object(central_service.Service, 'delete_zone',
side_effect=messaging.MessagingTimeout())
def test_delete_zone_timeout(self, _):
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
self._assert_exception('timeout', 504, self.client.delete, url)
@patch.object(central_service.Service, 'delete_zone',
side_effect=exceptions.ZoneNotFound())
def test_delete_zone_missing(self, _):
url = '/zones/2fdadfb1-cf96-4259-ac6b-bb7b6d2ff980'
self._assert_exception('zone_not_found', 404, self.client.delete,
url)
def test_post_abandon_zone(self):
zone = self.create_zone()
url = '/zones/%s/tasks/abandon' % zone.id
# Ensure that we get permission denied
self._assert_exception('forbidden', 403, self.client.post_json, url)
# Ensure that abandon zone succeeds with the right policy
self.policy({'abandon_zone': '@'})
response = self.client.post_json(url)
self.assertEqual(204, response.status_int)
def test_get_abandon_zone(self):
zone = self.create_zone()
url = '/zones/%s/tasks/abandon' % zone.id
self._assert_exception('method_not_allowed', 405, self.client.get, url)
def test_get_invalid_abandon(self):
# This is an invalid endpoint - should return 404
url = '/zones/tasks/abandon'
self._assert_exception('not_found', 404, self.client.get, url)
def test_get_zone_tasks(self):
# This is an invalid endpoint - should return 404
zone = self.create_zone()
url = '/zones/%s/tasks' % zone.id
self._assert_exception('not_found', 404, self.client.get, url)
def test_create_secondary(self):
# Create a zone
fixture = self.get_zone_fixture('SECONDARY', 0)
fixture['masters'] = ["10.0.0.1"]
response = self.client.post_json('/zones/', fixture)
# Check the headers are what we expect
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check the body structure is what we expect
self.assertIn('links', response.json)
self.assertIn('self', response.json['links'])
# Check the values returned are what we expect
self.assertIn('id', response.json)
self.assertIn('created_at', response.json)
self.assertEqual('PENDING', response.json['status'])
self.assertEqual(cfg.CONF['service:central'].managed_resource_email,
response.json['email'])
self.assertIsNone(response.json['updated_at'])
# Zone is not transferred yet
self.assertIsNone(response.json['transferred_at'])
# Serial defaults to 1
self.assertEqual(response.json['serial'], 1)
for k in fixture:
self.assertEqual(fixture[k], response.json[k])
def test_create_secondary_no_masters(self):
# Create a zone
fixture = self.get_zone_fixture('SECONDARY', 0)
self._assert_exception('invalid_object', 400, self.client.post_json,
'/zones/', fixture)
def test_update_secondary(self):
# Create a zone
fixture = self.get_zone_fixture('SECONDARY', 0)
zone = objects.Zone(**fixture)
zone.email = cfg.CONF['service:central'].managed_resource_email
# Create a zone
zone = self.central_service.create_zone(self.admin_context, zone)
masters = ['10.0.0.1', '10.0.0.2']
# Prepare an update body
body = {'masters': masters}
response = self.client.patch_json('/zones/%s' % zone['id'], body,
status=202)
# Check the headers are what we expect
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check the body structure is what we expect
self.assertIn('links', response.json)
self.assertIn('self', response.json['links'])
self.assertIn('status', response.json)
# Check the values returned are what we expect
self.assertIn('id', response.json)
self.assertIsNotNone(response.json['updated_at'])
self.assertEqual(masters, response.json['masters'])
self.assertEqual(1, response.json['serial'])
def test_xfr_request(self):
# Create a zone
fixture = self.get_zone_fixture('SECONDARY', 0)
fixture['email'] = cfg.CONF['service:central'].managed_resource_email
fixture['attributes'] = [{"key": "master", "value": "10.0.0.10"}]
# Create a zone
zone = self.create_zone(**fixture)
mdns = mock.Mock()
with mock.patch.object(mdns_api.MdnsAPI, 'get_instance') as get_mdns:
get_mdns.return_value = mdns
mdns.get_serial_number.return_value = ('SUCCESS', 10, 1, )
response = self.client.post_json(
'/zones/%s/tasks/xfr' % zone['id'],
None, status=202)
self.assertTrue(mdns.perform_zone_xfr.called)
# Check the headers are what we expect
self.assertEqual(202, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertEqual('""', response.body)
def test_invalid_xfr_request(self):
# Create a zone
zone = self.create_zone()
response = self.client.post_json(
'/zones/%s/tasks/xfr' % zone['id'],
None, status=400)
# Check the headers are what we expect
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
def test_update_secondary_email_invalid_object(self):
# Create a zone
fixture = self.get_zone_fixture('SECONDARY', 0)
fixture['email'] = cfg.CONF['service:central'].managed_resource_email
# Create a zone
zone = self.create_zone(**fixture)
body = {'email': 'foo@bar.io'}
self._assert_exception('invalid_object', 400, self.client.patch_json,
'/zones/%s' % zone['id'], body)
# Metadata tests
def test_metadata_exists(self):
response = self.client.get('/zones/')
# Make sure the fields exist
self.assertIn('metadata', response.json)
self.assertIn('total_count', response.json['metadata'])
def test_total_count(self):
response = self.client.get('/zones/')
# There are no zones by default
self.assertEqual(0, response.json['metadata']['total_count'])
# Create a zone
fixture = self.get_zone_fixture(fixture=0)
response = self.client.post_json('/zones/', fixture)
response = self.client.get('/zones/')
# Make sure total_count picked it up
self.assertEqual(1, response.json['metadata']['total_count'])
def test_total_count_pagination(self):
# Create two zones
fixture = self.get_zone_fixture(fixture=0)
response = self.client.post_json('/zones/', fixture)
fixture = self.get_zone_fixture(fixture=1)
response = self.client.post_json('/zones/', fixture)
# Paginate so that there is only one zone returned
response = self.client.get('/zones?limit=1')
self.assertEqual(1, len(response.json['zones']))
# The total_count should know there are two
self.assertEqual(2, response.json['metadata']['total_count'])
def test_no_update_deleting(self):
# Create a zone
zone = self.create_zone()
# Prepare an update body
body = {'zone': {'email': 'prefix-%s' % zone['email']}}
self.client.delete('/zones/%s' % zone['id'], status=202)
self._assert_exception('bad_request', 400, self.client.patch_json,
'/zones/%s' % zone['id'], body)
def test_get_nameservers(self):
# Create a zone
zone = self.create_zone()
# Prepare an update body
response = self.client.get('/zones/%s/nameservers' % zone['id'],
headers=[('Accept', 'application/json')])
self.assertIn('nameservers', response.json)
self.assertEqual(1, len(response.json['nameservers']))
self.assertIn('hostname', response.json['nameservers'][0])
self.assertIn('priority', response.json['nameservers'][0])
def test_get_zones_filter(self):
# Add zones for testing
fixtures = [
self.get_zone_fixture(
'PRIMARY', fixture=0, values={
'ttl': 3600,
'description': 'test1'
}
),
self.get_zone_fixture(
'PRIMARY', fixture=1, values={
'ttl': 4000,
'description': 'test2'
}
)
]
for fixture in fixtures:
response = self.client.post_json('/zones/', fixture)
get_urls = [
# Filter by Type
'/zones?type=%s' % fixtures[0]['type'],
# Filter by Name
'/zones?name=%s' % fixtures[0]['name'],
# Filter by Email
'/zones?email=example*',
'/zones?email=%s' % fixtures[1]['email'],
# Filter by TTL
'/zones?ttl=3600',
# Filter by Description
'/zones?description=test1',
'/zones?description=test*'
]
correct_results = [2, 1, 2, 1, 1, 1, 2]
for get_url, correct_result in zip(get_urls, correct_results):
response = self.client.get(get_url)
# Check the headers are what we expect
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
# Check that the correct number of zones match
self.assertEqual(correct_result, len(response.json['zones']))
def test_invalid_zones_filter(self):
invalid_url = '/zones?id=155477ef-e6c5-4b94-984d-8fc68c0c1a14'
self._assert_exception(
'bad_request', 400, self.client.get, invalid_url)
|
apache-2.0
| 7,254,965,936,942,582,000 | 36.741047 | 79 | 0.59292 | false |
mrkiwi-nz/django-helpdesk
|
helpdesk/migrations/0003_initial_data_import.py
|
4
|
1141
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from sys import path
from django.db import models, migrations
from django.core import serializers
fixture_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../fixtures'))
fixture_filename = 'emailtemplate.json'
def deserialize_fixture():
fixture_file = os.path.join(fixture_dir, fixture_filename)
with open(fixture_file, 'rb') as fixture:
return list(serializers.deserialize('json', fixture, ignorenonexistent=True))
def load_fixture(apps, schema_editor):
objects = deserialize_fixture()
for obj in objects:
obj.save()
def unload_fixture(apps, schema_editor):
"""Delete all EmailTemplate objects"""
objects = deserialize_fixture()
EmailTemplate = apps.get_model("helpdesk", "emailtemplate")
EmailTemplate.objects.filter(pk__in=[ obj.object.pk for obj in objects ]).delete()
class Migration(migrations.Migration):
dependencies = [
('helpdesk', '0002_populate_usersettings'),
]
operations = [
migrations.RunPython(load_fixture, reverse_code=unload_fixture),
]
|
bsd-3-clause
| -8,847,635,837,566,119,000 | 24.931818 | 86 | 0.695881 | false |
thesgc/shergar
|
shergar/calc/migrations/0002_auto__del_field_workflowstep_depth__del_field_workflowstep_path__del_f.py
|
1
|
10972
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'WorkFlowStep.depth'
db.delete_column(u'calc_workflowstep', 'depth')
# Deleting field 'WorkFlowStep.path'
db.delete_column(u'calc_workflowstep', 'path')
# Deleting field 'WorkFlowStep.numchild'
db.delete_column(u'calc_workflowstep', 'numchild')
# Adding field 'WorkFlowStep.title'
db.add_column(u'calc_workflowstep', 'title',
self.gf('django.db.models.fields.CharField')(default=1, max_length=255),
keep_default=False)
def backwards(self, orm):
# Adding field 'WorkFlowStep.depth'
db.add_column(u'calc_workflowstep', 'depth',
self.gf('django.db.models.fields.PositiveIntegerField')(default=1),
keep_default=False)
# Adding field 'WorkFlowStep.path'
db.add_column(u'calc_workflowstep', 'path',
self.gf('django.db.models.fields.CharField')(default='', max_length=255, unique=True),
keep_default=False)
# Adding field 'WorkFlowStep.numchild'
db.add_column(u'calc_workflowstep', 'numchild',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
# Deleting field 'WorkFlowStep.title'
db.delete_column(u'calc_workflowstep', 'title')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'calc.experimentindexpage': {
'Meta': {'object_name': 'ExperimentIndexPage', '_ormbases': [u'wagtailcore.Page']},
'intro': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['wagtailcore.Page']", 'unique': 'True', 'primary_key': 'True'})
},
u'calc.experimentpage': {
'Meta': {'object_name': 'ExperimentPage', '_ormbases': [u'wagtailcore.Page']},
'body': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
'intro': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['wagtailcore.Page']", 'unique': 'True', 'primary_key': 'True'})
},
u'calc.experimentrelatedlink': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'ExperimentRelatedLink'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link_document': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'link_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtailcore.Page']"}),
'page': ('modelcluster.fields.ParentalKey', [], {'related_name': "'related_links'", 'to': u"orm['calc.ExperimentPage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'calc.workflowindexpage': {
'Meta': {'object_name': 'WorkflowIndexPage', '_ormbases': [u'wagtailcore.Page']},
'intro': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['wagtailcore.Page']", 'unique': 'True', 'primary_key': 'True'})
},
u'calc.workflowpage': {
'Meta': {'object_name': 'WorkflowPage', '_ormbases': [u'wagtailcore.Page']},
'example_file': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'intro': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['wagtailcore.Page']", 'unique': 'True', 'primary_key': 'True'})
},
u'calc.workflowstep': {
'Meta': {'ordering': "['sort_order']", 'object_name': 'WorkFlowStep'},
'example_input': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['wagtaildocs.Document']"}),
'fields_list': ('dbarray.fields.CharArrayField', [], {'max_length': '10', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intro': ('wagtail.wagtailcore.fields.RichTextField', [], {'blank': 'True'}),
'page': ('modelcluster.fields.ParentalKey', [], {'blank': 'True', 'related_name': "'workflow_steps'", 'null': 'True', 'to': u"orm['calc.WorkflowPage']"}),
'sort_order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'wagtailcore.page': {
'Meta': {'object_name': 'Page'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pages'", 'to': u"orm['contenttypes.ContentType']"}),
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'expire_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'expired': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'go_live_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'has_unpublished_changes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'live': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_pages'", 'null': 'True', 'to': u"orm['auth.User']"}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'search_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'seo_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'show_in_menus': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'url_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
u'wagtaildocs.document': {
'Meta': {'object_name': 'Document'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uploaded_by_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['calc']
|
mit
| 4,968,346,890,640,987,000 | 70.253247 | 195 | 0.560882 | false |
futurecolors/django-future-url
|
setup.py
|
1
|
1341
|
# -*- coding: utf-8 -*-
import os
import codecs
from setuptools import setup, find_packages
import django_future_url
read = lambda filepath: codecs.open(filepath, 'r', 'utf-8').read()
setup(
name="django-future-url",
version=django_future_url.__version__,
author='Vitaly Olevinsky',
author_email='olevinsky.v.s@gmail.com',
packages=find_packages(),
url='https://github.com/futurecolors/django-future-url/',
description="Migration tool for django 1.4, fixes url template tag deprecation warnings.",
long_description=read(os.path.join(os.path.dirname(__file__), 'README.rst')),
license='MIT',
install_requires=['docopt'],
entry_points = {
'console_scripts': [
'future_url = django_future_url.main:future_url',
],
},
tests_require=['cram==0.5'],
test_suite='django_future_url.test',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
)
|
mit
| 1,000,879,088,271,199,900 | 32.525 | 94 | 0.619687 | false |
kgsn1763/deep-learning-from-scratch
|
common/functions.py
|
1
|
1171
|
#!/usr/bin/env python
# coding: utf-8
import numpy as np
def identity_function(x):
return x
def step_function(x):
return np.array(x > 0, dtype=np.int)
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def sigmoid_grad(x):
return (1.0 - sigmoid(x)) * sigmoid(x)
def relu(x):
return np.maximum(0, x)
def relu_grad(x):
grad = np.zeros(x)
grad[x >= 0] = 1
return grad
def softmax(x):
if x.ndim == 2:
x = x.T
x = x - np.max(x, axis=0)
y = np.exp(x) / np.sum(np.exp(x), axis=0)
return y.T
x = x - np.max(x) # オーバーフロー対策
return np.exp(x) / np.sum(np.exp(x))
def mean_squared_error(y, t):
return 0.5 * np.sum((y-t)**2)
def cross_entropy_error(y, t):
if y.ndim == 1:
t = t.reshape(1, t.size)
y = y.reshape(1, y.size)
# 教師データがone-hot-vectorの場合、正解ラベルのインデックスに変換
if t.size == y.size:
t = t.argmax(axis=1)
batch_size = y.shape[0]
return -np.sum(np.log(y[np.arange(batch_size), t])) / batch_size
def softmax_loss(X, t):
y = softmax(X)
return cross_entropy_error(y, t)
|
mit
| 8,491,684,971,797,022,000 | 16.507937 | 68 | 0.55485 | false |
licco/zipline
|
zipline/history/history_container.py
|
1
|
18509
|
#
# Copyright 2014 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import groupby
import numpy as np
import pandas as pd
from six import itervalues, iteritems, iterkeys
from . history import (
index_at_dt,
)
from zipline.utils.data import RollingPanel
# The closing price is referred to by multiple names,
# allow both for price rollover logic etc.
CLOSING_PRICE_FIELDS = frozenset({'price', 'close_price'})
def ffill_buffer_from_prior_values(field,
buffer_frame,
digest_frame,
pre_digest_values):
"""
Forward-fill a buffer frame, falling back to the end-of-period values of a
digest frame if the buffer frame has leading NaNs.
"""
# Get values which are NaN at the beginning of the period.
first_bar = buffer_frame.iloc[0]
def iter_nan_sids():
"""
Helper for iterating over the remaining nan sids in first_bar.
"""
return (sid for sid in first_bar[first_bar.isnull()].index)
# Try to fill with the last entry from the digest frame.
if digest_frame is not None:
# We don't store a digest frame for frequencies that only have a bar
# count of 1.
for sid in iter_nan_sids():
buffer_frame[sid][0] = digest_frame.ix[-1, sid]
# If we still have nan sids, try to fill with pre_digest_values.
for sid in iter_nan_sids():
prior_sid_value = pre_digest_values[field].get(sid)
if prior_sid_value:
# If the prior value is greater than the timestamp of our first
# bar.
if prior_sid_value.get('dt', first_bar.name) > first_bar.name:
buffer_frame[sid][0] = prior_sid_value.get('value', np.nan)
return buffer_frame.ffill()
def ffill_digest_frame_from_prior_values(field, digest_frame, prior_values):
"""
Forward-fill a digest frame, falling back to the last known priof values if
necessary.
"""
if digest_frame is not None:
# Digest frame is None in the case that we only have length 1 history
# specs for a given frequency.
# It's possible that the first bar in our digest frame is storing NaN
# values. If so, check if we've tracked an older value and use that as
# an ffill value for the first bar.
first_bar = digest_frame.ix[0]
nan_sids = first_bar[first_bar.isnull()].index
for sid in nan_sids:
try:
# Only use prior value if it is before the index,
# so that a backfill does not accidentally occur.
if prior_values[field][sid]['dt'] <= digest_frame.index[0]:
digest_frame[sid][0] = prior_values[field][sid]['value']
except KeyError:
# Allow case where there is no previous value.
# e.g. with leading nans.
pass
digest_frame = digest_frame.ffill()
return digest_frame
def freq_str_and_bar_count(history_spec):
"""
Helper for getting the frequency string and bar count from a history spec.
"""
return (history_spec.frequency.freq_str, history_spec.bar_count)
def group_by_frequency(history_specs):
"""
Takes an iterable of history specs and returns a dictionary mapping unique
frequencies to a list of specs with that frequency.
Within each list, the HistorySpecs are sorted by ascending bar count.
Example:
[HistorySpec(3, '1d', 'price', True),
HistorySpec(2, '2d', 'open', True),
HistorySpec(2, '1d', 'open', False),
HistorySpec(5, '1m', 'open', True)]
yields
{Frequency('1d') : [HistorySpec(2, '1d', 'open', False)],
HistorySpec(3, '1d', 'price', True),
Frequency('2d') : [HistorySpec(2, '2d', 'open', True)],
Frequency('1m') : [HistorySpec(5, '1m', 'open', True)]}
"""
return {key: list(group)
for key, group in groupby(
sorted(history_specs, key=freq_str_and_bar_count),
key=lambda spec: spec.frequency)}
class HistoryContainer(object):
"""
Container for all history panels and frames used by an algoscript.
To be used internally by TradingAlgorithm, but *not* passed directly to the
algorithm.
Entry point for the algoscript is the result of `get_history`.
"""
def __init__(self, history_specs, initial_sids, initial_dt):
# History specs to be served by this container.
self.history_specs = history_specs
self.frequency_groups = \
group_by_frequency(itervalues(self.history_specs))
# The set of fields specified by all history specs
self.fields = set(spec.field for spec in itervalues(history_specs))
# This panel contains raw minutes for periods that haven't been fully
# completed. When a frequency period rolls over, these minutes are
# digested using some sort of aggregation call on the panel (e.g. `sum`
# for volume, `max` for high, `min` for low, etc.).
self.buffer_panel = self.create_buffer_panel(
initial_sids,
initial_dt,
)
# Dictionaries with Frequency objects as keys.
self.digest_panels, self.cur_window_starts, self.cur_window_closes = \
self.create_digest_panels(initial_sids, initial_dt)
# Populating initial frames here, so that the cost of creating the
# initial frames does not show up when profiling. These frames are
# cached since mid-stream creation of containing data frames on every
# bar is expensive.
self.create_return_frames(initial_dt)
# Helps prop up the prior day panel against having a nan, when the data
# has been seen.
self.last_known_prior_values = {field: {} for field in self.fields}
@property
def unique_frequencies(self):
"""
Return an iterator over all the unique frequencies serviced by this
container.
"""
return iterkeys(self.frequency_groups)
def create_digest_panels(self, initial_sids, initial_dt):
"""
Initialize a RollingPanel for each unique panel frequency being stored
by this container. Each RollingPanel pre-allocates enough storage
space to service the highest bar-count of any history call that it
serves.
Relies on the fact that group_by_frequency sorts the value lists by
ascending bar count.
"""
# Map from frequency -> first/last minute of the next digest to be
# rolled for that frequency.
first_window_starts = {}
first_window_closes = {}
# Map from frequency -> digest_panels.
panels = {}
for freq, specs in iteritems(self.frequency_groups):
# Relying on the sorting of group_by_frequency to get the spec
# requiring the largest number of bars.
largest_spec = specs[-1]
if largest_spec.bar_count == 1:
# No need to allocate a digest panel; this frequency will only
# ever use data drawn from self.buffer_panel.
first_window_starts[freq] = freq.window_open(initial_dt)
first_window_closes[freq] = freq.window_close(
first_window_starts[freq]
)
continue
initial_dates = index_at_dt(largest_spec, initial_dt)
# Set up dates for our first digest roll, which is keyed to the
# close of the first entry in our initial index.
first_window_closes[freq] = initial_dates[0]
first_window_starts[freq] = freq.window_open(initial_dates[0])
rp = RollingPanel(len(initial_dates) - 1,
self.fields,
initial_sids)
panels[freq] = rp
return panels, first_window_starts, first_window_closes
def create_buffer_panel(self, initial_sids, initial_dt):
"""
Initialize a RollingPanel containing enough minutes to service all our
frequencies.
"""
max_bars_needed = max(freq.max_minutes
for freq in self.unique_frequencies)
rp = RollingPanel(
max_bars_needed,
self.fields,
initial_sids,
# Restrict the initial data down to just the fields being used in
# this container.
)
return rp
def convert_columns(self, values):
"""
If columns have a specific type you want to enforce, overwrite this
method and return the transformed values.
"""
return values
def create_return_frames(self, algo_dt):
"""
Populates the return frame cache.
Called during init and at universe rollovers.
"""
self.return_frames = {}
for spec_key, history_spec in iteritems(self.history_specs):
index = pd.to_datetime(index_at_dt(history_spec, algo_dt))
frame = pd.DataFrame(
index=index,
columns=self.convert_columns(
self.buffer_panel.minor_axis.values),
dtype=np.float64)
self.return_frames[spec_key] = frame
def buffer_panel_minutes(self,
buffer_panel=None,
earliest_minute=None,
latest_minute=None):
"""
Get the minutes in @buffer_panel between @earliest_minute and
@last_minute, inclusive.
@buffer_panel can be a RollingPanel or a plain Panel. If a
RollingPanel is supplied, we call `get_current` to extract a Panel
object. If no panel is supplied, we use self.buffer_panel.
If no value is specified for @earliest_minute, use all the minutes we
have up until @latest minute.
If no value for @latest_minute is specified, use all values up until
the latest minute.
"""
buffer_panel = buffer_panel or self.buffer_panel
if isinstance(buffer_panel, RollingPanel):
buffer_panel = buffer_panel.get_current()
return buffer_panel.ix[:, earliest_minute:latest_minute, :]
def update(self, data, algo_dt):
"""
Takes the bar at @algo_dt's @data, checks to see if we need to roll any
new digests, then adds new data to the buffer panel.
"""
self.update_digest_panels(algo_dt, self.buffer_panel)
fields = self.fields
frame = pd.DataFrame(
{sid: {field: bar[field] for field in fields}
for sid, bar in data.iteritems()
if (bar
and
bar['dt'] == algo_dt
and
# Only use data which is keyed in the data panel.
# Prevents crashes due to custom data.
sid in self.buffer_panel.minor_axis)})
self.buffer_panel.add_frame(algo_dt, frame)
def update_digest_panels(self, algo_dt, buffer_panel, freq_filter=None):
"""
Check whether @algo_dt is greater than cur_window_close for any of our
frequencies. If so, roll a digest for that frequency using data drawn
from @buffer panel and insert it into the appropriate digest panels.
If @freq_filter is specified, only use the given data to update
frequencies on which the filter returns True.
"""
for frequency in self.unique_frequencies:
if freq_filter is not None and not freq_filter(frequency):
continue
# We don't keep a digest panel if we only have a length-1 history
# spec for a given frequency
digest_panel = self.digest_panels.get(frequency, None)
while algo_dt > self.cur_window_closes[frequency]:
earliest_minute = self.cur_window_starts[frequency]
latest_minute = self.cur_window_closes[frequency]
minutes_to_process = self.buffer_panel_minutes(
buffer_panel,
earliest_minute=earliest_minute,
latest_minute=latest_minute,
)
# Create a digest from minutes_to_process and add it to
# digest_panel.
self.roll(frequency,
digest_panel,
minutes_to_process,
latest_minute)
# Update panel start/close for this frequency.
self.cur_window_starts[frequency] = \
frequency.next_window_start(latest_minute)
self.cur_window_closes[frequency] = \
frequency.window_close(self.cur_window_starts[frequency])
def roll(self, frequency, digest_panel, buffer_minutes, digest_dt):
"""
Package up minutes in @buffer_minutes insert that bar into
@digest_panel at index @last_minute, and update
self.cur_window_{starts|closes} for the given frequency.
"""
if digest_panel is None:
# This happens if the only spec we have at this frequency has a bar
# count of 1.
return
rolled = pd.DataFrame(
index=self.fields,
columns=buffer_minutes.minor_axis)
for field in self.fields:
if field in CLOSING_PRICE_FIELDS:
# Use the last close, or NaN if we have no minutes.
try:
prices = buffer_minutes.loc[field].ffill().iloc[-1]
except IndexError:
# Scalar assignment sets the value for all entries.
prices = np.nan
rolled.ix[field] = prices
elif field == 'open_price':
# Use the first open, or NaN if we have no minutes.
try:
opens = buffer_minutes.loc[field].bfill().iloc[0]
except IndexError:
# Scalar assignment sets the value for all entries.
opens = np.nan
rolled.ix['open_price'] = opens
elif field == 'volume':
# Volume is the sum of the volumes during the
# course of the period.
volumes = buffer_minutes.ix['volume'].sum().fillna(0)
rolled.ix['volume'] = volumes
elif field == 'high':
# Use the highest high.
highs = buffer_minutes.ix['high'].max()
rolled.ix['high'] = highs
elif field == 'low':
# Use the lowest low.
lows = buffer_minutes.ix['low'].min()
rolled.ix['low'] = lows
for sid, value in rolled.ix[field].iterkv():
if not np.isnan(value):
try:
prior_values = \
self.last_known_prior_values[field][sid]
except KeyError:
prior_values = {}
self.last_known_prior_values[field][sid] = \
prior_values
prior_values['dt'] = digest_dt
prior_values['value'] = value
digest_panel.add_frame(digest_dt, rolled)
def get_history(self, history_spec, algo_dt):
"""
Main API used by the algoscript is mapped to this function.
Selects from the overarching history panel the values for the
@history_spec at the given @algo_dt.
"""
field = history_spec.field
bar_count = history_spec.bar_count
do_ffill = history_spec.ffill
index = pd.to_datetime(index_at_dt(history_spec, algo_dt))
return_frame = self.return_frames[history_spec.key_str]
# Overwrite the index.
# Not worrying about values here since the values are overwritten
# in the next step.
return_frame.index = index
if bar_count > 1:
# Get the last bar_count - 1 frames from our stored historical
# frames.
digest_panel = self.digest_panels[history_spec.frequency]\
.get_current()
digest_frame = digest_panel[field].copy().ix[1 - bar_count:]
else:
digest_frame = None
# Get minutes from our buffer panel to build the last row.
buffer_frame = self.buffer_panel_minutes(
earliest_minute=self.cur_window_starts[history_spec.frequency],
)[field]
if do_ffill:
digest_frame = ffill_digest_frame_from_prior_values(
field,
digest_frame,
self.last_known_prior_values,
)
buffer_frame = ffill_buffer_from_prior_values(
field,
buffer_frame,
digest_frame,
self.last_known_prior_values,
)
if digest_frame is not None:
return_frame.ix[:-1] = digest_frame.ix[:]
if field == 'volume':
return_frame.ix[algo_dt] = buffer_frame.fillna(0).sum()
elif field == 'high':
return_frame.ix[algo_dt] = buffer_frame.max()
elif field == 'low':
return_frame.ix[algo_dt] = buffer_frame.min()
elif field == 'open_price':
return_frame.ix[algo_dt] = buffer_frame.iloc[0]
else:
return_frame.ix[algo_dt] = buffer_frame.loc[algo_dt]
# Returning a copy of the DataFrame so that we don't crash if the user
# adds columns to the frame. Ideally we would just drop any added
# columns, but pandas 0.12.0 doesn't support in-place dropping of
# columns. We should re-evaluate this implementation once we're on a
# more up-to-date pandas.
return return_frame.copy()
|
apache-2.0
| 1,417,596,418,311,687,000 | 36.928279 | 79 | 0.580853 | false |
gwillem/magento-malware-scanner
|
mwscan/ruleset.py
|
1
|
7113
|
import os
import re
import json
import logging
import hashlib
import requests
import yara
import time
from requests.exceptions import RequestException
from mwscan import settings
# For very old installs, eg CentOS:
# https://github.com/magesec/magesec/issues/60
try:
requests.packages.urllib3.disable_warnings()
except AttributeError:
# Ubuntu 12.04
pass
def strip_last_url_path(url):
parent, _, _ = url.rpartition('/')
return parent
def last_url_path(url):
return url.rpartition('/')[2]
class RulesProvider:
rules_url = None
whitelist_url = None
def __init__(self, **kwargs):
logging.info("Using {0} rules.".format(self.__class__.__name__))
self._args = kwargs.get('args')
def find_whitelist_in_rawrules(self, rawrules):
# Find whitelist hashes from comments, because yara whitelist
# hashing is too slow. See
# https://github.com/VirusTotal/yara/issues/592
m = re.search(
'/\*[^*]*WHITELIST = (\{.*?\})\s*\*/', rawrules, flags=re.DOTALL)
return set(json.loads(m.group(1)) if m else [])
def get_rules(self):
rawrules = self._recursive_fetch(self.rules_url)
try:
if type(rawrules) is unicode:
return rawrules.encode('ascii', 'ignore')
except NameError:
pass # py3
return rawrules
def get_whitelist(self):
if not self.whitelist_url:
return set()
data = self._httpget(self.whitelist_url)
hashes = re.findall('[a-f0-9]{40}', data) # assume sha1 hex hash
return set(hashes)
def transform_rules(self, rawrules):
"""For specific rules providers, to mangle into mwscan compatible form"""
whitelist = set()
return rawrules, whitelist
def _get_cache_filename(self, url):
hash = hashlib.sha1(url.encode()).hexdigest()
cachefile = self.__class__.__name__.lower() + '.cache_' + hash
cachefile = os.path.join(settings.CACHEDIR, cachefile)
return cachefile
def _get_cache_timestamp_content(self, cachefile):
cachedcontent = None
mtime = None
if os.path.exists(cachefile):
mtime = os.path.getmtime(cachefile)
mtime = time.gmtime(mtime)
mtime = time.strftime('%a, %d %b %Y %H:%M:%S GMT', mtime)
with open(cachefile, 'rb') as fh:
cachedcontent = fh.read()
return mtime, cachedcontent
def _httpget(self, url):
""" Fetch URL and use if-modified-since header, store in cache,
fail if upstream fails """
filename = last_url_path(url)
cachefile = self._get_cache_filename(url)
mtime, cachedcontent = self._get_cache_timestamp_content(cachefile)
headers = dict()
# requests 0.8.2 doesn't like None header values
if mtime:
headers['if-modified-since'] = mtime
logging.debug("Fetching {0}".format(filename))
try:
resp = requests.get(url, headers=headers)
except RequestException as e:
if cachedcontent is not None:
return cachedcontent
raise RuntimeError(
"No cache and invalid response for {0}: {1}".format(url, e))
if resp.status_code == 200:
with open(cachefile, 'wb') as fh:
# py3 vs py2
if type(resp.content) is bytes:
ruleset = resp.content.decode('utf-8', 'ignore')
else:
ruleset = resp.content
fh.write(ruleset.encode('utf-8', 'ignore'))
return ruleset
if resp.status_code == 304:
logging.debug(
'Upstream {0} is the same as our cache (HTTP 304)'.format(url))
# Upstream hasn't changed (304) or has err'd
if cachedcontent is not None:
return cachedcontent.decode('utf-8', 'ignore')
raise RuntimeError("No cache @ {0} and invalid response for {1}: {2}".format(
cachefile, url, resp.status_code))
def get(self):
"""Returns rules, whitelist"""
rawrules = self.get_rules()
# provider specific transformation, if necessary
rawrules, whitelist = self.transform_rules(rawrules)
# if alternative whitelist method is required
whitelist.update(self.get_whitelist())
whitelist.update(self.find_whitelist_in_rawrules(rawrules))
rules = yara.compile(source=rawrules)
return rules, whitelist
def _recursive_fetch(self, url):
def include(match):
relpath = match.group(1)
# return match.group(1)
newurl = strip_last_url_path(url) + '/' + relpath
return "/* included from {0} */\n".format(newurl) + self._recursive_fetch(newurl)
data = self._httpget(url)
data = re.sub(r'include "([^"]+?)"\s+', include, data)
# data = re.sub('import "hash"\s*', '', data)
return data
class Files(RulesProvider):
# initialize with Files(args)
def get_rules(self):
path = self._args.rules
logging.info("Loading {0}".format(self._args.rules))
with open(path, 'rb') as fh:
return fh.read().decode('utf-8', 'ignore')
class NBS(RulesProvider):
rules_url = 'https://raw.githubusercontent.com/nbs-system/php-malware-finder/master/php-malware-finder/php.yar'
def transform_rules(self, rawrules):
whitelist = set()
rules = list()
tokens = re.findall(
'\n(?:global )?(?:private )?rule .+?\n\{\n.+?\n\}', rawrules, flags=re.DOTALL)
for token in tokens:
hashes = re.findall('== "([a-f0-9]{40})"', token)
if 'rule IsWhitelisted' in token:
continue
if hashes or 'hash.sha1' in token:
whitelist.update(hashes)
else:
token = token.strip()
token = re.sub(' and not IsWhitelisted', '', token)
rules.append(token.strip())
return '\n'.join(rules), whitelist
class Magemojo(RulesProvider):
rules_url = 'https://raw.githubusercontent.com/magesec/magesecurityscanner/master/yararules.yar'
whitelist_url = 'https://raw.githubusercontent.com/magesec/magesecurityscanner/master/magesecurityscan/sha1whitelist.json'
class Magesec(RulesProvider):
rules_url = 'https://magesec.org/download/yara-standard.yar'
whitelist_url = 'https://magesec.org/download/whitelist.json'
class Mwscan(RulesProvider):
rules_url = 'https://mwscan.s3.amazonaws.com/mwscan.yar'
class MageHost(RulesProvider):
rules_url = 'https://raw.githubusercontent.com/magehost/magento-malware-scanner/master/rules/magehost.yar'
whitelist_url = 'https://raw.githubusercontent.com/magehost/magento-malware-scanner/master/rules/magehost_whitelist.json'
providers = {
'nbs': NBS,
'byte': Mwscan, # backwards compatible
'mwscan': Mwscan,
'magehost': MageHost,
'magemojo': Magemojo,
'magesec': Magesec,
'file': Files,
}
|
gpl-3.0
| 1,615,909,693,083,476,500 | 29.397436 | 126 | 0.60059 | false |
lex86/nnet
|
python/nnet.py
|
1
|
5793
|
import sys
from ctypes import *
import numpy as np
nnetlib = np.ctypeslib.load_library('libnnet', '../lib/')
class NNet:
def __init__(self, cfg_path):
init = nnetlib.NNet_init
self.nnet = init(cfg_path)
if self.nnet == 0:
sys.exit(-1)
c_size = c_int()
size = nnetlib.NNet_size
size.argtypes = (c_void_p, POINTER(c_int))
if size(self.nnet,
byref(c_size)) < 0:
sys.exit(-1)
self.size = c_size.value
c_num_iters = c_int()
num_iters = nnetlib.NNet_num_iters
num_iters.argtypes = (c_void_p, POINTER(c_int))
if num_iters(self.nnet,
byref(c_num_iters)) < 0:
sys.exit(-1)
self.num_iters = c_num_iters.value
self.dims = np.zeros(self.size+1, dtype=np.int32)
get_dims = nnetlib.NNet_get_dims
get_dims.argtypes = (c_void_p,
c_int,
np.ctypeslib.ndpointer(
dtype = np.int32,
ndim = 1,
flags = 'C')
)
if get_dims(self.nnet,
np.shape(self.dims)[0], self.dims) < 0:
sys.exit(-1)
def __del__(self):
destroy = nnetlib.NNet_destroy
destroy(self.nnet)
def forward(self, data):
forward = nnetlib.NNet_forward
forward.argtypes = (c_void_p,
c_int,
c_int,
np.ctypeslib.ndpointer(
dtype = np.float64,
ndim = 2,
flags = 'C')
)
if forward(self.nnet,
np.shape(data)[0], np.shape(data)[1], data) < 0:
sys.exit(-1)
def backward(self, labels):
backward = nnetlib.NNet_backward
backward.argtypes = (c_void_p,
c_int,
c_int,
np.ctypeslib.ndpointer(
dtype = np.float64,
ndim = 2,
flags = 'C')
)
if backward(self.nnet,
np.shape(labels)[0],
np.shape(labels)[1],
labels) < 0:
sys.exit(-1)
def get_params(self):
get_layer_params = nnetlib.NNet_get_layer_params
get_layer_params.argtypes = (c_void_p,
c_int,
c_int,
c_int,
np.ctypeslib.ndpointer(
dtype = np.float64,
ndim = 2,
flags = 'C'),
c_int,
np.ctypeslib.ndpointer(
dtype = np.float64,
ndim = 1,
flags = 'C'))
Ws = []
bs = []
for i in range(self.size):
mat = np.zeros((self.dims[i], self.dims[i+1]), dtype=np.float64)
vec = np.zeros(self.dims[i+1], dtype=np.float64)
if get_layer_params(self.nnet,
i,
self.dims[i], self.dims[i+1], mat,
self.dims[i+1], vec) < 0:
sys.exit(-1)
Ws.append(mat)
bs.append(vec)
return Ws, bs
def get_gradients(self):
get_layer_gradients = nnetlib.NNet_get_layer_gradients
get_layer_gradients.argtypes = (c_void_p,
c_int,
c_int,
c_int,
np.ctypeslib.ndpointer(
dtype = np.float64,
ndim = 2,
flags = 'C'),
c_int,
np.ctypeslib.ndpointer(
dtype = np.float64,
ndim = 1,
flags = 'C'))
grad_Ws = []
grad_bs = []
for i in range(self.size):
mat = np.zeros((self.dims[i], self.dims[i+1]), dtype=np.float64)
vec = np.zeros(self.dims[i+1], dtype=np.float64)
if get_layer_gradients(self.nnet,
i,
self.dims[i], self.dims[i+1], mat,
self.dims[i+1], vec) < 0:
sys.exit(-1)
grad_Ws.append(mat)
grad_bs.append(vec)
return grad_Ws, grad_bs
def set_params(self, Ws, bs):
set_layer_params = nnetlib.NNet_set_layer_params
set_layer_params.argtypes = (c_void_p,
c_int,
c_int,
c_int,
np.ctypeslib.ndpointer(
dtype = np.float64,
ndim = 2,
flags = 'C'),
c_int,
np.ctypeslib.ndpointer(
dtype = np.float64,
ndim = 1,
flags = 'C'))
for i in range(self.size):
if set_layer_params(self.nnet,
i,
Ws[i].shape[0], Ws[i].shape[1], Ws[i],
bs[i].shape[0], bs[i]) < 0:
sys.exit(-1)
def update_params(self, grad_Ws, grad_bs):
update_layer_params = nnetlib.NNet_update_layer_params
update_layer_params.argtypes = (c_void_p,
c_int,
c_int,
c_int,
np.ctypeslib.ndpointer(
dtype = np.float64,
ndim = 2,
flags = 'C'),
c_int,
np.ctypeslib.ndpointer(
dtype = np.float64,
ndim = 1,
flags = 'C'))
for i in range(self.size):
if update_layer_params(self.nnet,
i,
grad_Ws[i].shape[0], grad_Ws[i].shape[1], grad_Ws[i],
grad_bs[i].shape[0], grad_bs[i]) < 0:
sys.exit(-1)
|
gpl-2.0
| 6,646,499,532,115,967,000 | 32.680233 | 76 | 0.416882 | false |
mikel-egana-aranguren/SADI-Galaxy-Docker
|
galaxy-dist/lib/galaxy/web/framework/webapp.py
|
1
|
39566
|
"""
"""
import inspect
import os
import hashlib
import random
import socket
import string
import time
from Cookie import CookieError
from galaxy import eggs
eggs.require( "Cheetah" )
from Cheetah.Template import Template
eggs.require( "Mako" )
import mako.runtime
import mako.lookup
# pytz is used by Babel.
eggs.require( "pytz" )
eggs.require( "Babel" )
from babel.support import Translations
from babel import Locale
eggs.require( "SQLAlchemy >= 0.4" )
from sqlalchemy import and_
from sqlalchemy.orm.exc import NoResultFound
from galaxy.exceptions import MessageException
from galaxy import util
from galaxy.util import asbool
from galaxy.util import safe_str_cmp
from galaxy.util.backports.importlib import import_module
from galaxy.util.sanitize_html import sanitize_html
from galaxy.managers import context
from galaxy.web.framework import url_for
from galaxy.web.framework import base
from galaxy.web.framework import helpers
from galaxy.web.framework import formbuilder
import logging
log = logging.getLogger( __name__ )
UCSC_SERVERS = (
'hgw1.cse.ucsc.edu',
'hgw2.cse.ucsc.edu',
'hgw3.cse.ucsc.edu',
'hgw4.cse.ucsc.edu',
'hgw5.cse.ucsc.edu',
'hgw6.cse.ucsc.edu',
'hgw7.cse.ucsc.edu',
'hgw8.cse.ucsc.edu',
)
class WebApplication( base.WebApplication ):
"""
Base WSGI application instantiated for all Galaxy webapps.
A web application that:
* adds API and UI controllers by scanning given directories and
importing all modules found there.
* has a security object.
* builds mako template lookups.
* generates GalaxyWebTransactions.
"""
def __init__( self, galaxy_app, session_cookie='galaxysession', name=None ):
self.name = name
base.WebApplication.__init__( self )
self.set_transaction_factory( lambda e: self.transaction_chooser( e, galaxy_app, session_cookie ) )
# Mako support
self.mako_template_lookup = self.create_mako_template_lookup( galaxy_app, name )
# Security helper
self.security = galaxy_app.security
def create_mako_template_lookup( self, galaxy_app, name ):
paths = []
# First look in webapp specific directory
if name is not None:
paths.append( os.path.join( galaxy_app.config.template_path, 'webapps', name ) )
# Then look in root directory
paths.append( galaxy_app.config.template_path )
# Create TemplateLookup with a small cache
return mako.lookup.TemplateLookup(directories=paths,
module_directory=galaxy_app.config.template_cache,
collection_size=500,
output_encoding='utf-8' )
def handle_controller_exception( self, e, trans, **kwargs ):
if isinstance( e, MessageException ):
# In the case of a controller exception, sanitize to make sure
# unsafe html input isn't reflected back to the user
return trans.show_message( sanitize_html(e.err_msg), e.type )
def make_body_iterable( self, trans, body ):
if isinstance( body, formbuilder.FormBuilder ):
body = trans.show_form( body )
return base.WebApplication.make_body_iterable( self, trans, body )
def transaction_chooser( self, environ, galaxy_app, session_cookie ):
return GalaxyWebTransaction( environ, galaxy_app, self, session_cookie )
def add_ui_controllers( self, package_name, app ):
"""
Search for UI controllers in `package_name` and add
them to the webapp.
"""
from galaxy.web.base.controller import BaseUIController
from galaxy.web.base.controller import ControllerUnavailable
package = import_module( package_name )
controller_dir = package.__path__[0]
for fname in os.listdir( controller_dir ):
if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
name = fname[:-3]
module_name = package_name + "." + name
try:
module = import_module( module_name )
except ControllerUnavailable, exc:
log.debug("%s could not be loaded: %s" % (module_name, str(exc)))
continue
# Look for a controller inside the modules
for key in dir( module ):
T = getattr( module, key )
if inspect.isclass( T ) and T is not BaseUIController and issubclass( T, BaseUIController ):
controller = self._instantiate_controller( T, app )
self.add_ui_controller( name, controller )
def add_api_controllers( self, package_name, app ):
"""
Search for UI controllers in `package_name` and add
them to the webapp.
"""
from galaxy.web.base.controller import BaseAPIController
from galaxy.web.base.controller import ControllerUnavailable
package = import_module( package_name )
controller_dir = package.__path__[0]
for fname in os.listdir( controller_dir ):
if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
name = fname[:-3]
module_name = package_name + "." + name
try:
module = import_module( module_name )
except ControllerUnavailable, exc:
log.debug("%s could not be loaded: %s" % (module_name, str(exc)))
continue
for key in dir( module ):
T = getattr( module, key )
# Exclude classes such as BaseAPIController and BaseTagItemsController
if inspect.isclass( T ) and not key.startswith("Base") and issubclass( T, BaseAPIController ):
# By default use module_name, but allow controller to override name
controller_name = getattr( T, "controller_name", name )
controller = self._instantiate_controller( T, app )
self.add_api_controller( controller_name, controller )
def _instantiate_controller( self, T, app ):
""" Extension point, allow apps to contstruct controllers differently,
really just used to stub out actual controllers for routes testing.
"""
return T( app )
class GalaxyWebTransaction( base.DefaultWebTransaction,
context.ProvidesAppContext, context.ProvidesUserContext, context.ProvidesHistoryContext ):
"""
Encapsulates web transaction specific state for the Galaxy application
(specifically the user's "cookie" session and history)
"""
def __init__( self, environ, app, webapp, session_cookie=None):
self.app = app
self.webapp = webapp
self.security = webapp.security
base.DefaultWebTransaction.__init__( self, environ )
self.setup_i18n()
self.expunge_all()
self.debug = asbool( self.app.config.get( 'debug', False ) )
# Flag indicating whether we are in workflow building mode (means
# that the current history should not be used for parameter values
# and such).
self.workflow_building_mode = False
# Flag indicating whether this is an API call and the API key user is an administrator
self.api_inherit_admin = False
self.__user = None
self.galaxy_session = None
self.error_message = None
if self.environ.get('is_api_request', False):
# With API requests, if there's a key, use it and associate the
# user with the transaction.
# If not, check for an active session but do not create one.
# If an error message is set here, it's sent back using
# trans.show_error in the response -- in expose_api.
self.error_message = self._authenticate_api( session_cookie )
elif self.app.name == "reports":
self.galaxy_session = None
else:
# This is a web request, get or create session.
self._ensure_valid_session( session_cookie )
if self.galaxy_session:
# When we've authenticated by session, we have to check the
# following.
# Prevent deleted users from accessing Galaxy
if self.app.config.use_remote_user and self.galaxy_session.user.deleted:
self.response.send_redirect( url_for( '/static/user_disabled.html' ) )
if self.app.config.require_login:
self._ensure_logged_in_user( environ, session_cookie )
def setup_i18n( self ):
locales = []
if 'HTTP_ACCEPT_LANGUAGE' in self.environ:
# locales looks something like: ['en', 'en-us;q=0.7', 'ja;q=0.3']
client_locales = self.environ['HTTP_ACCEPT_LANGUAGE'].split( ',' )
for locale in client_locales:
try:
locales.append( Locale.parse( locale.split( ';' )[0].strip(), sep='-' ).language )
except Exception, e:
log.debug( "Error parsing locale '%s'. %s: %s", locale, type( e ), e )
if not locales:
# Default to English
locales = 'en'
t = Translations.load( dirname='locale', locales=locales, domain='ginga' )
self.template_context.update( dict( _=t.ugettext, n_=t.ugettext, N_=t.ungettext ) )
def get_user( self ):
"""Return the current user if logged in or None."""
if self.galaxy_session:
return self.galaxy_session.user
else:
return self.__user
def set_user( self, user ):
"""Set the current user."""
if self.galaxy_session:
self.galaxy_session.user = user
self.sa_session.add( self.galaxy_session )
self.sa_session.flush()
self.__user = user
user = property( get_user, set_user )
def get_cookie( self, name='galaxysession' ):
"""Convenience method for getting a session cookie"""
try:
# If we've changed the cookie during the request return the new value
if name in self.response.cookies:
return self.response.cookies[name].value
else:
return self.request.cookies[name].value
except:
return None
def set_cookie( self, value, name='galaxysession', path='/', age=90, version='1' ):
"""Convenience method for setting a session cookie"""
# The galaxysession cookie value must be a high entropy 128 bit random number encrypted
# using a server secret key. Any other value is invalid and could pose security issues.
self.response.cookies[name] = value
self.response.cookies[name]['path'] = path
self.response.cookies[name]['max-age'] = 3600 * 24 * age # 90 days
tstamp = time.localtime( time.time() + 3600 * 24 * age )
self.response.cookies[name]['expires'] = time.strftime( '%a, %d-%b-%Y %H:%M:%S GMT', tstamp )
self.response.cookies[name]['version'] = version
try:
self.response.cookies[name]['httponly'] = True
except CookieError, e:
log.warning( "Error setting httponly attribute in cookie '%s': %s" % ( name, e ) )
def _authenticate_api( self, session_cookie ):
"""
Authenticate for the API via key or session (if available).
"""
api_key = self.request.params.get('key', None)
secure_id = self.get_cookie( name=session_cookie )
api_key_supplied = self.environ.get('is_api_request', False) and api_key
if api_key_supplied and self._check_master_api_key( api_key ):
self.api_inherit_admin = True
log.info( "Session authenticated using Galaxy master api key" )
self.user = None
self.galaxy_session = None
elif api_key_supplied:
# Sessionless API transaction, we just need to associate a user.
try:
provided_key = self.sa_session.query( self.app.model.APIKeys ).filter( self.app.model.APIKeys.table.c.key == api_key ).one()
except NoResultFound:
return 'Provided API key is not valid.'
if provided_key.user.deleted:
return 'User account is deactivated, please contact an administrator.'
newest_key = provided_key.user.api_keys[0]
if newest_key.key != provided_key.key:
return 'Provided API key has expired.'
self.set_user( provided_key.user )
elif secure_id:
# API authentication via active session
# Associate user using existing session
self._ensure_valid_session( session_cookie )
else:
# Anonymous API interaction -- anything but @expose_api_anonymous will fail past here.
self.user = None
self.galaxy_session = None
def _check_master_api_key( self, api_key ):
master_api_key = getattr( self.app.config, 'master_api_key', None )
if not master_api_key:
return False
# Hash keys to make them the same size, so we can do safe comparison.
master_hash = hashlib.sha256( master_api_key ).hexdigest()
provided_hash = hashlib.sha256( api_key ).hexdigest()
return safe_str_cmp( master_hash, provided_hash )
def _ensure_valid_session( self, session_cookie, create=True):
"""
Ensure that a valid Galaxy session exists and is available as
trans.session (part of initialization)
Support for universe_session and universe_user cookies has been
removed as of 31 Oct 2008.
"""
# Try to load an existing session
secure_id = self.get_cookie( name=session_cookie )
galaxy_session = None
prev_galaxy_session = None
user_for_new_session = None
invalidate_existing_session = False
# Track whether the session has changed so we can avoid calling flush
# in the most common case (session exists and is valid).
galaxy_session_requires_flush = False
if secure_id:
# Decode the cookie value to get the session_key
session_key = self.security.decode_guid( secure_id )
try:
# Make sure we have a valid UTF-8 string
session_key = session_key.encode( 'utf8' )
except UnicodeDecodeError:
# We'll end up creating a new galaxy_session
session_key = None
if session_key:
# Retrieve the galaxy_session id via the unique session_key
galaxy_session = self.sa_session.query( self.app.model.GalaxySession ) \
.filter( and_( self.app.model.GalaxySession.table.c.session_key==session_key, #noqa
self.app.model.GalaxySession.table.c.is_valid==True ) ).first() #noqa
# If remote user is in use it can invalidate the session and in some
# cases won't have a cookie set above, so we need to to check some
# things now.
if self.app.config.use_remote_user:
# If this is an api request, and they've passed a key, we let this go.
assert self.app.config.remote_user_header in self.environ, \
"use_remote_user is set but %s header was not provided" % self.app.config.remote_user_header
remote_user_email = self.environ[ self.app.config.remote_user_header ]
if getattr( self.app.config, "normalize_remote_user_email", False ):
remote_user_email = remote_user_email.lower()
if galaxy_session:
# An existing session, make sure correct association exists
if galaxy_session.user is None:
# No user, associate
galaxy_session.user = self.get_or_create_remote_user( remote_user_email )
galaxy_session_requires_flush = True
elif ((galaxy_session.user.email != remote_user_email) and
((not self.app.config.allow_user_impersonation) or
(remote_user_email not in self.app.config.admin_users_list))):
# Session exists but is not associated with the correct
# remote user, and the currently set remote_user is not a
# potentially impersonating admin.
invalidate_existing_session = True
user_for_new_session = self.get_or_create_remote_user( remote_user_email )
log.warning( "User logged in as '%s' externally, but has a cookie as '%s' invalidating session",
remote_user_email, galaxy_session.user.email )
else:
# No session exists, get/create user for new session
user_for_new_session = self.get_or_create_remote_user( remote_user_email )
else:
if galaxy_session is not None and galaxy_session.user and galaxy_session.user.external:
# Remote user support is not enabled, but there is an existing
# session with an external user, invalidate
invalidate_existing_session = True
log.warning( "User '%s' is an external user with an existing session, invalidating session since external auth is disabled",
galaxy_session.user.email )
elif galaxy_session is not None and galaxy_session.user is not None and galaxy_session.user.deleted:
invalidate_existing_session = True
log.warning( "User '%s' is marked deleted, invalidating session" % galaxy_session.user.email )
# Do we need to invalidate the session for some reason?
if invalidate_existing_session:
prev_galaxy_session = galaxy_session
prev_galaxy_session.is_valid = False
galaxy_session = None
# No relevant cookies, or couldn't find, or invalid, so create a new session
if galaxy_session is None:
galaxy_session = self.__create_new_session( prev_galaxy_session, user_for_new_session )
galaxy_session_requires_flush = True
self.galaxy_session = galaxy_session
self.__update_session_cookie( name=session_cookie )
else:
self.galaxy_session = galaxy_session
# Do we need to flush the session?
if galaxy_session_requires_flush:
self.sa_session.add( galaxy_session )
# FIXME: If prev_session is a proper relation this would not
# be needed.
if prev_galaxy_session:
self.sa_session.add( prev_galaxy_session )
self.sa_session.flush()
# If the old session was invalid, get a new history with our new session
if invalidate_existing_session:
self.new_history()
def _ensure_logged_in_user( self, environ, session_cookie ):
# The value of session_cookie can be one of
# 'galaxysession' or 'galaxycommunitysession'
# Currently this method does nothing unless session_cookie is 'galaxysession'
if session_cookie == 'galaxysession' and self.galaxy_session.user is None:
# TODO: re-engineer to eliminate the use of allowed_paths
# as maintenance overhead is far too high.
allowed_paths = (
url_for( controller='root', action='index' ),
url_for( controller='root', action='tool_menu' ),
url_for( controller='root', action='masthead' ),
url_for( controller='root', action='history' ),
url_for( controller='user', action='api_keys' ),
url_for( controller='user', action='create' ),
url_for( controller='user', action='index' ),
url_for( controller='user', action='login' ),
url_for( controller='user', action='logout' ),
url_for( controller='user', action='manage_user_info' ),
url_for( controller='user', action='set_default_permissions' ),
url_for( controller='user', action='reset_password' ),
url_for( controller='user', action='openid_auth' ),
url_for( controller='user', action='openid_process' ),
url_for( controller='user', action='openid_associate' ),
url_for( controller='library', action='browse' ),
url_for( controller='history', action='list' ),
url_for( controller='dataset', action='list' )
)
display_as = url_for( controller='root', action='display_as' )
if self.app.datatypes_registry.get_display_sites('ucsc') and self.request.path == display_as:
try:
host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
host = None
if host in UCSC_SERVERS:
return
external_display_path = url_for( controller='', action='display_application' )
if self.request.path.startswith( external_display_path ):
request_path_split = self.request.path.split( '/' )
try:
if (self.app.datatypes_registry.display_applications.get( request_path_split[-5] )
and request_path_split[-4] in self.app.datatypes_registry.display_applications.get( request_path_split[-5] ).links
and request_path_split[-3] != 'None'):
return
except IndexError:
pass
if self.request.path not in allowed_paths:
self.response.send_redirect( url_for( controller='root', action='index' ) )
def __create_new_session( self, prev_galaxy_session=None, user_for_new_session=None ):
"""
Create a new GalaxySession for this request, possibly with a connection
to a previous session (in `prev_galaxy_session`) and an existing user
(in `user_for_new_session`).
Caller is responsible for flushing the returned session.
"""
session_key = self.security.get_new_guid()
galaxy_session = self.app.model.GalaxySession(
session_key=session_key,
is_valid=True,
remote_host=self.request.remote_host,
remote_addr=self.request.remote_addr,
referer=self.request.headers.get( 'Referer', None ) )
if prev_galaxy_session:
# Invalidated an existing session for some reason, keep track
galaxy_session.prev_session_id = prev_galaxy_session.id
if user_for_new_session:
# The new session should be associated with the user
galaxy_session.user = user_for_new_session
return galaxy_session
def get_or_create_remote_user( self, remote_user_email ):
"""
Create a remote user with the email remote_user_email and return it
"""
if not self.app.config.use_remote_user:
return None
if getattr( self.app.config, "normalize_remote_user_email", False ):
remote_user_email = remote_user_email.lower()
user = self.sa_session.query( self.app.model.User
).filter( self.app.model.User.table.c.email==remote_user_email ).first() #noqa
if user:
# GVK: June 29, 2009 - This is to correct the behavior of a previous bug where a private
# role and default user / history permissions were not set for remote users. When a
# remote user authenticates, we'll look for this information, and if missing, create it.
if not self.app.security_agent.get_private_user_role( user ):
self.app.security_agent.create_private_user_role( user )
if 'webapp' not in self.environ or self.environ['webapp'] != 'tool_shed':
if not user.default_permissions:
self.app.security_agent.user_set_default_permissions( user )
self.app.security_agent.user_set_default_permissions( user, history=True, dataset=True )
elif user is None:
username = remote_user_email.split( '@', 1 )[0].lower()
random.seed()
user = self.app.model.User( email=remote_user_email )
user.set_password_cleartext( ''.join( random.sample( string.letters + string.digits, 12 ) ) )
user.external = True
# Replace invalid characters in the username
for char in filter( lambda x: x not in string.ascii_lowercase + string.digits + '-', username ):
username = username.replace( char, '-' )
# Find a unique username - user can change it later
if ( self.sa_session.query( self.app.model.User ).filter_by( username=username ).first() ):
i = 1
while ( self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first() ):
i += 1
username += '-' + str(i)
user.username = username
self.sa_session.add( user )
self.sa_session.flush()
self.app.security_agent.create_private_user_role( user )
# We set default user permissions, before we log in and set the default history permissions
if 'webapp' not in self.environ or self.environ['webapp'] != 'tool_shed':
self.app.security_agent.user_set_default_permissions( user )
# self.log_event( "Automatically created account '%s'", user.email )
return user
def __update_session_cookie( self, name='galaxysession' ):
"""
Update the session cookie to match the current session.
"""
self.set_cookie( self.security.encode_guid(self.galaxy_session.session_key ),
name=name, path=self.app.config.cookie_path )
def handle_user_login( self, user ):
"""
Login a new user (possibly newly created)
- create a new session
- associate new session with user
- if old session had a history and it was not associated with a user, associate it with the new session,
otherwise associate the current session's history with the user
- add the disk usage of the current session to the user's total disk usage
"""
# Set the previous session
prev_galaxy_session = self.galaxy_session
prev_galaxy_session.is_valid = False
# Define a new current_session
self.galaxy_session = self.__create_new_session( prev_galaxy_session, user )
if self.webapp.name == 'galaxy':
cookie_name = 'galaxysession'
# Associated the current user's last accessed history (if exists) with their new session
history = None
try:
users_last_session = user.galaxy_sessions[0]
last_accessed = True
except:
users_last_session = None
last_accessed = False
if (prev_galaxy_session.current_history and not
prev_galaxy_session.current_history.deleted and
prev_galaxy_session.current_history.datasets):
if prev_galaxy_session.current_history.user is None or prev_galaxy_session.current_history.user == user:
# If the previous galaxy session had a history, associate it with the new
# session, but only if it didn't belong to a different user.
history = prev_galaxy_session.current_history
if prev_galaxy_session.user is None:
# Increase the user's disk usage by the amount of the previous history's datasets if they didn't already own it.
for hda in history.datasets:
user.total_disk_usage += hda.quota_amount( user )
elif self.galaxy_session.current_history:
history = self.galaxy_session.current_history
if (not history and users_last_session and
users_last_session.current_history and not
users_last_session.current_history.deleted):
history = users_last_session.current_history
elif not history:
history = self.get_history( create=True )
if history not in self.galaxy_session.histories:
self.galaxy_session.add_history( history )
if history.user is None:
history.user = user
self.galaxy_session.current_history = history
if not last_accessed:
# Only set default history permissions if current history is not from a previous session
self.app.security_agent.history_set_default_permissions( history, dataset=True, bypass_manage_permission=True )
self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session, history ) )
else:
cookie_name = 'galaxycommunitysession'
self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
self.sa_session.flush()
# This method is not called from the Galaxy reports, so the cookie will always be galaxysession
self.__update_session_cookie( name=cookie_name )
def handle_user_logout( self, logout_all=False ):
"""
Logout the current user:
- invalidate the current session
- create a new session with no user associated
"""
prev_galaxy_session = self.galaxy_session
prev_galaxy_session.is_valid = False
self.galaxy_session = self.__create_new_session( prev_galaxy_session )
self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
galaxy_user_id = prev_galaxy_session.user_id
if logout_all and galaxy_user_id is not None:
for other_galaxy_session in self.sa_session.query( self.app.model.GalaxySession
).filter( and_( self.app.model.GalaxySession.table.c.user_id==galaxy_user_id, #noqa
self.app.model.GalaxySession.table.c.is_valid==True, #noqa
self.app.model.GalaxySession.table.c.id!=prev_galaxy_session.id ) ): #noqa
other_galaxy_session.is_valid = False
self.sa_session.add( other_galaxy_session )
self.sa_session.flush()
if self.webapp.name == 'galaxy':
# This method is not called from the Galaxy reports, so the cookie will always be galaxysession
self.__update_session_cookie( name='galaxysession' )
elif self.webapp.name == 'tool_shed':
self.__update_session_cookie( name='galaxycommunitysession' )
def get_galaxy_session( self ):
"""
Return the current galaxy session
"""
return self.galaxy_session
def get_history( self, create=False ):
"""
Load the current history, creating a new one only if there is not
current history and we're told to create.
Transactions will not always have an active history (API requests), so
None is a valid response.
"""
history = None
if self.galaxy_session:
history = self.galaxy_session.current_history
if not history and util.string_as_bool( create ):
history = self.new_history()
return history
def set_history( self, history ):
if history and not history.deleted:
self.galaxy_session.current_history = history
self.sa_session.add( self.galaxy_session )
self.sa_session.flush()
history = property( get_history, set_history )
def get_or_create_default_history( self ):
"""
Gets or creates a default history and associates it with the current
session.
"""
# There must be a user to fetch a default history.
if not self.galaxy_session.user:
return self.new_history()
# Look for default history that (a) has default name + is not deleted and
# (b) has no datasets. If suitable history found, use it; otherwise, create
# new history.
unnamed_histories = self.sa_session.query( self.app.model.History ).filter_by(
user=self.galaxy_session.user,
name=self.app.model.History.default_name,
deleted=False )
default_history = None
for history in unnamed_histories:
if len( history.datasets ) == 0:
# Found suitable default history.
default_history = history
break
# Set or create hsitory.
if default_history:
history = default_history
self.set_history( history )
else:
history = self.new_history()
return history
def new_history( self, name=None ):
"""
Create a new history and associate it with the current session and
its associated user (if set).
"""
# Create new history
history = self.app.model.History()
if name:
history.name = name
# Associate with session
history.add_galaxy_session( self.galaxy_session )
# Make it the session's current history
self.galaxy_session.current_history = history
# Associate with user
if self.galaxy_session.user:
history.user = self.galaxy_session.user
# Track genome_build with history
history.genome_build = self.app.genome_builds.default_value
# Set the user's default history permissions
self.app.security_agent.history_set_default_permissions( history )
# Save
self.sa_session.add_all( ( self.galaxy_session, history ) )
self.sa_session.flush()
return history
@base.lazy_property
def template_context( self ):
return dict()
def make_form_data( self, name, **kwargs ):
rval = self.template_context[name] = FormData()
rval.values.update( kwargs )
return rval
def set_message( self, message, type=None ):
"""
Convenience method for setting the 'message' and 'message_type'
element of the template context.
"""
self.template_context['message'] = message
if type:
self.template_context['status'] = type
def get_message( self ):
"""
Convenience method for getting the 'message' element of the template
context.
"""
return self.template_context['message']
def show_message( self, message, type='info', refresh_frames=[], cont=None, use_panels=False, active_view="" ):
"""
Convenience method for displaying a simple page with a single message.
`type`: one of "error", "warning", "info", or "done"; determines the
type of dialog box and icon displayed with the message
`refresh_frames`: names of frames in the interface that should be
refreshed when the message is displayed
"""
return self.fill_template( "message.mako", status=type, message=message, refresh_frames=refresh_frames, cont=cont, use_panels=use_panels, active_view=active_view )
def show_error_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
"""
Convenience method for displaying an error message. See `show_message`.
"""
return self.show_message( message, 'error', refresh_frames, use_panels=use_panels, active_view=active_view )
def show_ok_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
"""
Convenience method for displaying an ok message. See `show_message`.
"""
return self.show_message( message, 'done', refresh_frames, use_panels=use_panels, active_view=active_view )
def show_warn_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
"""
Convenience method for displaying an warn message. See `show_message`.
"""
return self.show_message( message, 'warning', refresh_frames, use_panels=use_panels, active_view=active_view )
def show_form( self, form, header=None, template="form.mako", use_panels=False, active_view="" ):
"""
Convenience method for displaying a simple page with a single HTML
form.
"""
return self.fill_template( template, form=form, header=header,
use_panels=( form.use_panels or use_panels ),
active_view=active_view )
def fill_template(self, filename, **kwargs):
"""
Fill in a template, putting any keyword arguments on the context.
"""
# call get_user so we can invalidate sessions from external users,
# if external auth has been disabled.
self.get_user()
if filename.endswith( ".mako" ):
return self.fill_template_mako( filename, **kwargs )
else:
template = Template( file=os.path.join(self.app.config.template_path, filename),
searchList=[kwargs, self.template_context, dict(caller=self, t=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app)] )
return str( template )
def fill_template_mako( self, filename, template_lookup=None, **kwargs ):
template_lookup = template_lookup or self.webapp.mako_template_lookup
template = template_lookup.get_template( filename )
template.output_encoding = 'utf-8'
data = dict( caller=self, t=self, trans=self, h=helpers, util=util,
request=self.request, response=self.response, app=self.app )
data.update( self.template_context )
data.update( kwargs )
return template.render( **data )
def stream_template_mako( self, filename, **kwargs ):
template = self.webapp.mako_template_lookup.get_template( filename )
template.output_encoding = 'utf-8'
data = dict( caller=self, t=self, trans=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app )
data.update( self.template_context )
data.update( kwargs )
def render( environ, start_response ):
response_write = start_response( self.response.wsgi_status(), self.response.wsgi_headeritems() )
class StreamBuffer( object ):
def write( self, d ):
response_write( d.encode( 'utf-8' ) )
buffer = StreamBuffer()
context = mako.runtime.Context( buffer, **data )
template.render_context( context )
return []
return render
def fill_template_string(self, template_string, context=None, **kwargs):
"""
Fill in a template, putting any keyword arguments on the context.
"""
template = Template( source=template_string,
searchList=[context or kwargs, dict(caller=self)] )
return str(template)
|
gpl-3.0
| 6,105,172,054,496,387,000 | 47.666667 | 186 | 0.601274 | false |
GoogleCloudPlatform/datacatalog-connectors-rdbms
|
google-datacatalog-oracle-connector/system_tests/cleanup_results_test.py
|
1
|
1324
|
#!/usr/bin/python
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
from google.cloud import datacatalog
datacatalog_client = datacatalog.DataCatalogClient()
class CleanupResultsTest(unittest.TestCase):
def test_entries_should_not_exist_after_cleanup(self):
query = 'system=oracle'
scope = datacatalog.SearchCatalogRequest.Scope()
scope.include_project_ids.append(
os.environ['ORACLE2DC_DATACATALOG_PROJECT_ID'])
request = datacatalog.SearchCatalogRequest()
request.scope = scope
request.query = query
request.page_size = 1000
search_results = [
result for result in datacatalog_client.search_catalog(request)
]
self.assertEqual(len(search_results), 0)
|
apache-2.0
| 2,843,499,763,594,922,000 | 29.790698 | 75 | 0.714502 | false |
hfutsuchao/Python2.6
|
fromNet/pinyinDomainScan.py
|
1
|
1428
|
#coding:utf-8
from yuming import ifRegged
import sys
import time,random
dicType = []
dic = {}
dtFile = open('domainType','r').readlines()
for t in dtFile[1:-1]:
dicType.append(t[:-1])
#print dicType
times = [1,2,3,4]
for t in times:
dic[t] = []
totalDic = []
pinyinFile = open('pinyin','r').readlines()
for py in pinyinFile[1:-1]:
py = py[:-2]
dic[1].append(py)
totalDic.append(py)
'''for py2 in pinyinFile:
py2 = py2[:-2]
dic[2].append(py+py2)
totalDic.append(py+py2)'''
''' for py3 in pinyinFile:
py3 = py3[:-1]
dic[3].append(py+py2+py3)
for py4 in pinyinFile:
py4 = py4[:-1]
dic[4].append(py+py2+py3+py4)
'''
result = open('unRegged','a')
'''
print dicType[:10]
sys.exit()
'''
timer=0
for dm in totalDic:
for dtype in dicType[:1]:
domainName = dm + dtype
try:
print domainName
regResult = ifRegged(domainName)
except Exception,e:
print domainName,e
continue
if regResult==1:
print domainName + ' unRegged!'
result.write(domainName + '\t' + 'unRegged!' + '\n')
time.sleep(random.random()*1)
timer = timer + 1
if timer == 20:
result.close()
time.sleep(2)
result = open('unRegged','a')
timer = 0
|
gpl-2.0
| -3,893,511,307,367,741,400 | 21.666667 | 65 | 0.516106 | false |
20c/django-ixpmgr
|
src/django_ixpmgr/util.py
|
1
|
1735
|
import configparser
from collections import namedtuple
import os
import re
from ixpmgr import settings
from ixpmgr.models import Customer
from ixpmgr.models import PhysicalInterface
from ixpmgr.models import VirtualInterface
from ixpmgr.models import VlanInterface
def load_config():
"""
load the IXP-Manager ini config into a dict and return it
"""
home = settings.HOME
cfg_file = os.path.join(home, "application", "configs", "application.ini")
if not os.path.exists(cfg_file):
raise ValueError(f"Config file {cfg_file} not found")
parser = configparser.ConfigParser()
parser.read(cfg_file)
rv = {}
for section in parser.sections():
if section not in rv:
rv[section] = {}
for k, v in parser.items(section):
rv[section][k] = v
return rv
Interface = namedtuple("Interface", ["phy", "vir"])
def get_interface(vir_intf_id):
vir_intf = VirtualInterface.objects.get(pk=vir_intf_id)
phy_intf = PhysicalInterface.objects.get(pk=each.id)
def parse_macaddr(addr):
addr = re.sub(r"[\.\s:-]+", "", addr)
return "{:012x}".format(int(addr, 16))
def get_macaddr(virt_intf):
qs = MacAddress.objects.filter(virtual_interface__id=virt_intf.id)
cnt = len(qs)
if cnt == 1:
return qs[0]
elif not cnt:
raise ValueError("no mac addresses defined for interface")
else:
raise ValueError("multiple mac addresses already defined for interface")
def format_macaddr(addr):
return ":".join(map("".join, list(zip(*[iter(addr)] * 2))))
def dns_intf_name(intf):
regex = settings.RDNS_INTF_REGEX
for (pattern, repl) in regex:
intf = re.sub(pattern, repl, intf)
return intf
|
apache-2.0
| 7,544,470,746,037,920,000 | 24.514706 | 80 | 0.65879 | false |
twdb/sonde
|
sonde/formats/generic.py
|
1
|
6474
|
"""
sonde.formats.generic
~~~~~~~~~~~~~~~~~
This module implements a generic format.
The files are in .csv format and must conform to the
following guidelines
comments and metadata at top of file in the format:
# name: value
a timezone field: (UTC-?, the data must all be in one UTC offset)
# timezone: UTC-6
a fill_value field:
# fill_value = -999.99
the last two comment/header lines should be the following
parameter header prepended by single #:
# datetime, air_pressure, water_specific_conductance, etc
(datetime must be first field and in format yyyy/mm/dd HH:MM:SS)
(parameter names must be from master_param_list
unit header prepended by single #:
yyyy/mm/dd HH:MM:SS, Pa, mS/cm, PSU, degC, mH2O, n/a, n/a, n/a
(units must be from supported_units_list)
comma seperated data
special columns or header items:
original_data_file_name, instrument_manufacturer,
instrument_serial_number
if these exist they will overide self.manufacturer,
self.data_file and self.serial_number
"""
from __future__ import absolute_import
import csv
import datetime
import pkg_resources
import re
from StringIO import StringIO
import warnings
import xlrd
import numpy as np
import quantities as pq
from .. import sonde
from .. import quantities as sq
from ..timezones import UTCStaticOffset
class GenericDataset(sonde.BaseSondeDataset):
"""
Dataset object that represents the data contained in a generic csv
file.
"""
def __init__(self, data_file):
self.manufacturer = 'generic'
self.file_format = 'generic'
self.data_file = data_file
super(GenericDataset, self).__init__(data_file)
def _read_data(self):
"""
Read the generic data file
"""
unit_map = {'degc': pq.degC,
'degf': pq.degC,
'm': pq.m,
'mh2o': sq.mH2O,
'ft': sq.ftH2O,
'fth2o': sq.ftH2O,
'ms/cm': sq.mScm,
'psu': sq.psu,
'psi': pq.psi,
'pa': pq.Pa,
'v': pq.volt,
'mg/l': sq.mgl,
'%': pq.percent,
'nd': pq.dimensionless,
'ntu': sq.ntu,
}
generic_data = GenericReader(self.data_file)
self.parameters = dict()
self.data = dict()
metadata = dict()
for parameter in generic_data.parameters:
if parameter.unit != 'n/a':
if parameter.name.lower() in sonde.master_parameter_list:
pcode = parameter.name.lower()
else:
warnings.warn('Un-mapped Parameter: %s' %
parameter.name.lower(),
Warning)
try:
punit = unit_map[(parameter.unit.lower()).strip()]
if not np.all(np.isnan(parameter.data)):
self.parameters[pcode] = sonde.master_parameter_list[pcode]
self.data[pcode] = parameter.data * punit
except KeyError:
warnings.warn('Un-mapped Unit Type\n'
'Unit Name: %s' % parameter.unit,
Warning)
else:
metadata[parameter.name.lower()] = parameter.data
self.format_parameters = generic_data.format_parameters
#overide default metadata if present in file
names = ['manufacturer', 'data_file', 'serial_number']
kwds = ['instrument_manufacturer', 'original_data_file',
'instrument_serial_number']
for name, kwd in zip(names, kwds):
#check format_parameters
idx = [i for i
in self.format_parameters.keys() if i.lower() == kwd]
if idx != []:
exec('self.' + name + '=self.format_parameters[idx[0]]')
idx = [i for i in metadata.keys() if i.lower() == kwd]
if idx != []:
exec('self.' + name + ' = metadata[idx[0]]')
self.dates = generic_data.dates
class GenericReader:
"""
A reader object that opens and reads a Solinst lev file.
`data_file` should be either a file path string or a file-like
object. It accepts one optional parameter, `tzinfo` is a
datetime.tzinfo object that represents the timezone of the
timestamps in the txt file.
"""
def __init__(self, data_file):
self.num_params = 0
self.parameters = []
self.format_parameters = {}
self.read_generic(data_file)
self.dates = [i.replace(tzinfo=self.default_tzinfo)
for i in self.dates]
def read_generic(self, data_file):
"""
Open and read a Solinst file.
"""
if type(data_file) == str:
fid = open(data_file, 'r')
else:
fid = data_file
buf = fid.readline().strip('# ')
while buf:
if buf[0:8].lower() == 'datetime':
params = buf.split(',')
units = fid.readline().strip('# ').split(',')
break
key, val = buf.split(':')
self.format_parameters[key.strip()] = val.strip()
buf = fid.readline().strip('# ')
utc_offset = int(
self.format_parameters['timezone'].lower().strip('utc'))
self.default_tzinfo = UTCStaticOffset(utc_offset)
data = np.genfromtxt(fid, dtype=None, names=params, delimiter=',')
self.dates = np.array(
[datetime.datetime.strptime(dt, '%Y/%m/%d %H:%M:%S')
for dt in data['datetime']]
)
#assign param & unit names
for param, unit in zip(params[1:], units[1:]):
self.num_params += 1
self.parameters.append(Parameter(param.strip(), unit.strip()))
for ii in range(self.num_params):
param = self.parameters[ii].name
self.parameters[ii].data = data[param]
class Parameter:
"""
Class that implements the a structure to return a parameters
name, unit and data
"""
def __init__(self, param_name, param_unit):
self.name = param_name
self.unit = param_unit
self.data = []
|
bsd-3-clause
| -5,914,747,278,133,182,000 | 32.71875 | 83 | 0.538153 | false |
wilkeraziz/grasp
|
grasp/mt/pipeline.py
|
1
|
27278
|
"""
:Authors: - Wilker Aziz
"""
import random
import os
import itertools
import numpy as np
import grasp.ptypes as ptypes
import grasp.semiring as semiring
from grasp.loss.fast_bleu import DecodingBLEU
from grasp.mt.segment import SegmentMetaData
import grasp.mt.cdec_format as cdeclib
from grasp.mt.input import make_pass_grammar
from grasp.mt.util import GoalRuleMaker
from grasp.mt.util import make_dead_oview
import grasp.formal.scfgop as scfgop
from grasp.formal.fsa import make_dfa
from grasp.formal.fsa import make_dfa_set
from grasp.formal.topsort import AcyclicTopSortTable
from grasp.formal.wfunc import TableLookupFunction
from grasp.formal.traversal import bracketed_string
from grasp.formal.traversal import yield_string
from grasp.recipes import dummyfunc
from grasp.recipes import traceit
from grasp.recipes import smart_ropen
from grasp.recipes import smart_wopen
from grasp.recipes import pickle_it
from grasp.recipes import unpickle_it
from grasp.scoring.lookup import RuleTable
from grasp.scoring.stateless import WordPenalty
from grasp.scoring.stateless import ArityPenalty
from grasp.scoring.lm import StatelessLM
from grasp.scoring.lm import KenLM
from grasp.scoring.scorer import TableLookupScorer
from grasp.scoring.scorer import StatelessScorer
from grasp.scoring.scorer import StatefulScorer
from grasp.scoring.model import DummyModel
from grasp.scoring.util import make_weight_map
from grasp.scoring.util import InitialWeightFunction
from grasp.scoring.util import construct_extractors
from grasp.scoring.util import read_weights
from grasp.scoring.util import make_models
from grasp.cfg.model import DummyConstant
from grasp.cfg.symbol import Nonterminal
from grasp.alg.deduction import NederhofParser
from grasp.alg.deduction import EarleyParser
from grasp.alg.deduction import EarleyRescorer
from grasp.alg.rescoring import SlicedRescoring
from grasp.alg.chain import apply_filters
from grasp.alg.chain import group_by_identity
from grasp.alg.chain import group_by_projection
from grasp.alg.value import acyclic_value_recursion
from grasp.alg.constraint import Constraint as DummyConstraint
from grasp.alg.constraint import GlueConstraint
from grasp.alg.constraint import HieroConstraints
def is_step_complete(step, saving, redo):
return step in saving and os.path.exists(saving[step]) and not redo
def read_segments_from_stream(istream, grammar_dir=None, shuffle=False) -> 'tuple':
"""
Read cdec-formated input segments (possibly along with their reference translations) from an input stream.
:param istream: input stream
:param grammar_dir: overwrites grammar directory
:param shuffle: shuffle segments inplace
:return: tuple of SegmentMetaData objects
"""
if shuffle:
segments = [SegmentMetaData.parse(input_str, grammar_dir=grammar_dir)
for input_str in istream]
random.shuffle(segments)
return tuple(segments)
else:
return tuple(SegmentMetaData.parse(input_str, grammar_dir=grammar_dir)
for input_str in istream)
def read_segments_from_file(path, grammar_dir=None, shuffle=False) -> 'tuple':
"""
Read cdec-formated input segments (possibly along with their reference translations) from a file.
:param path: path to file (possibly gzipped)
:param grammar_dir: overwrites grammar directory
:param shuffle: shuffle segments inplace
:return: tuple of SegmentMetaData objects
"""
return read_segments_from_stream(smart_ropen(path), grammar_dir=grammar_dir, shuffle=shuffle)
def save_segments(path, segments):
with smart_wopen(path) as fo:
for seg in segments:
print(seg.to_sgm(True), file=fo)
def load_feature_extractors(rt=None, wp=None, ap=None, slm=None, lm=None) -> 'tuple':
"""
Load feature extractors depending on command line options.
For now we have the following extractors:
* RuleTable
* WordPenalty
* ArityPenalty
* StatelessLM
* KenLM
:return: a tuple of Extractor objects
"""
extractors = []
if rt:
extractor = RuleTable(uid=len(extractors),
name='RuleTable')
extractors.append(extractor)
if wp:
extractor = WordPenalty(uid=len(extractors),
name=wp[0],
penalty=float(wp[1]))
extractors.append(extractor)
if ap:
extractor = ArityPenalty(uid=len(extractors),
name=ap[0],
penalty=float(ap[1]))
extractors.append(extractor)
if slm:
extractor = StatelessLM(uid=len(extractors),
name=slm[0],
order=int(slm[1]),
path=slm[2])
extractors.append(extractor)
if lm:
extractor = KenLM(uid=len(extractors),
name=lm[0],
order=int(lm[1]),
path=lm[2])
extractors.append(extractor)
return tuple(extractors)
def load_model(description, weights, init):
"""
:param description: path to Extractor constructors
:param weights: path to weights
:param init: initialisation strategy
:return: ModelContainer
"""
extractors = construct_extractors(description)
if not weights and init is None:
raise ValueError('Either provide a file containing weights or an initialisation strategy')
if weights:
wmap = read_weights(weights)
else:
if init == 'uniform':
wmap = make_weight_map(extractors, InitialWeightFunction.uniform(len(extractors)))
elif init == 'random':
wmap = make_weight_map(extractors, InitialWeightFunction.normal())
else:
wmap = make_weight_map(extractors, InitialWeightFunction.constant(float(init)))
return make_models(wmap, extractors)
def make_grammar_hypergraph(seg, extra_grammar_paths=[],
glue_grammar_paths=[],
pass_through=True,
default_symbol='X') -> 'Hypergraph':
"""
Load grammars (i.e. main, extra, glue, passthrough) and prepare input FSA.
:return: Hypergraph grammar
"""
# 1. Load grammars
# 1a. additional grammars
extra_grammars = []
if extra_grammar_paths:
for grammar_path in extra_grammar_paths:
grammar = cdeclib.load_grammar(grammar_path)
extra_grammars.append(grammar)
# 1b. glue grammars
glue_grammars = []
if glue_grammar_paths:
for glue_path in glue_grammar_paths:
glue = cdeclib.load_grammar(glue_path)
glue_grammars.append(glue)
# 1c. main grammar
main_grammar = cdeclib.load_grammar(seg.grammar)
# 2. Make a pass-through grammar for the given segment
# 2a. pass-through grammar
_, pass_grammar = make_pass_grammar(seg,
list(itertools.chain([main_grammar], extra_grammars, glue_grammars)),
semiring.inside,
default_symbol)
# 3a. put all (normal) grammars together
if pass_through:
grammars = list(itertools.chain([main_grammar], extra_grammars, [pass_grammar]))
else:
grammars = list(itertools.chain([main_grammar], extra_grammars))
# and finally create a hypergraph based on the source side of the grammar
# TODO: allow different models (other than DummyConstant)
hg = scfgop.make_hypergraph_from_input_view(grammars,
glue_grammars,
DummyConstant(semiring.inside.one))
return hg
def make_input_dfa(seg) -> 'DFA':
"""
Create a DFA view of the input.
"""
input_dfa = make_dfa(seg.src_tokens())
return input_dfa
def make_reference_dfa(seg) -> 'DFA':
return make_dfa_set([ref.split() for ref in seg.refs], semiring.inside.one)
def parse_dfa(hg, root, dfa, goal_rule, bottomup=True, constraint=DummyConstraint()) -> 'Hypergraph':
"""
Intersect a (possibly cyclic) hypergaph and a DFA.
"""
# 2a. get a parser and intersect the source FSA
if bottomup:
parser = NederhofParser(hg, dfa, semiring.inside, constraint=constraint)
else:
parser = EarleyParser(hg, dfa, semiring.inside, constraint=constraint)
return parser.do(root,goal_rule)
def make_target_forest(source_forest, rulescorer=TableLookupScorer(DummyModel())) -> 'Hypergraph':
return scfgop.output_projection(source_forest, semiring.inside, rulescorer)
def get_lookup_components(forest, lookup_extractors) -> 'list':
"""
Return the TableLookup representation of each edge in the forest.
"""
return scfgop.lookup_components(forest, lookup_extractors)
def get_stateless_components(forest, stateless_extractors) -> 'list':
"""
Return the Stateless representation of each edge in the forest.
"""
return scfgop.stateless_components(forest, stateless_extractors)
def rescore_forest(forest, root, lookup, stateless, stateful, goal_rule, omega=None, keep_components=True) -> 'tuple':
"""
Return a rescored forest and a list of component vectors.
"""
rescorer = EarleyRescorer(forest,
lookup,
stateless,
stateful,
semiring.inside,
omega=omega,
map_edges=False,
keep_frepr=keep_components)
rescored_forest = rescorer.do(root, goal_rule)
return rescored_forest, rescorer.components()
def pass0(seg, extra_grammar_paths=[], glue_grammar_paths=[], pass_through=True,
default_symbol='X', goal_str='GOAL', start_str='S', max_span=-1, n_goal=0,
saving={}, redo=True, log=dummyfunc) -> 'Hypergraph':
"""
Pass0 consists in parsing with the source side of the grammar.
For now, pass0 does not do any scoring (not even local), but it could (TODO).
Steps
1. Make a hypergraph view of the grammar
2. Make an input DFA
3. Parse the input DFA
:return: source forest
"""
if is_step_complete('forest', saving, redo):
return unpickle_it(saving['forest'])
# here we need to decode for sure
log('[%d] Make hypergraph view of all available grammars', seg.id)
# make a hypergraph view of all available grammars
grammar = make_grammar_hypergraph(seg,
extra_grammar_paths=extra_grammar_paths,
glue_grammar_paths=glue_grammar_paths,
pass_through=pass_through,
default_symbol=default_symbol)
# parse source lattice
log('[%d] Parse source DFA', seg.id)
goal_maker = GoalRuleMaker(goal_str=goal_str, start_str=start_str, n=n_goal)
dfa = make_input_dfa(seg)
forest = parse_dfa(grammar,
grammar.fetch(Nonterminal(start_str)),
dfa,
goal_maker.get_iview(),
bottomup=True,
constraint=HieroConstraints(grammar, dfa, max_span))
if 'forest' in saving:
pickle_it(saving['forest'], forest)
return forest
def pass1(seg, src_forest, model,
saving={}, redo=True,
log=dummyfunc) -> 'str':
"""
Pass1 consists in obtaining a target forest and locally scoring it.
Steps
1. Project target side of the forest
2. Lookup scoring
3. Stateless scoring
:return: source forest
"""
if is_step_complete('forest', saving, redo):
tgt_forest = unpickle_it(saving['forest'])
else:
# target projection
log('[%d] Project target rules', seg.id)
tgt_forest = make_target_forest(src_forest)
if 'forest' in saving:
pickle_it(saving['forest'], tgt_forest)
# local scoring
if is_step_complete('lookup', saving, redo):
lookup_comps = unpickle_it(saving['lookup'])
else:
log('[%d] Lookup scoring', seg.id)
lookup_comps = get_lookup_components(tgt_forest, model.lookup.extractors())
if 'lookup' in saving:
pickle_it(saving['lookup'], lookup_comps)
if is_step_complete('stateless', saving, redo):
stateless_comps = unpickle_it(saving['stateless'])
else:
log('[%d] Stateless scoring', seg.id)
stateless_comps = get_stateless_components(tgt_forest, model.stateless.extractors())
if 'stateless' in saving:
pickle_it(saving['stateless'], stateless_comps)
return tgt_forest, lookup_comps, stateless_comps
def pass2(seg, forest,
lookup_scorer, stateless_scorer, stateful_scorer,
goal_rule, omega=None,
saving={}, redo=True, log=dummyfunc) -> 'tuple':
"""
Pass2 consists in exactly rescoring a forest.
:return: rescored forest (a Hypergraph), and components (one FComponents object per edge)
"""
if is_step_complete('forest', saving, redo) and is_step_complete('components', saving, redo) :
rescored_forest = unpickle_it(saving['forest'])
components = unpickle_it(saving['components'])
return rescored_forest, components
log('[%d] Forest rescoring', seg.id)
rescored_forest, components = rescore_forest(forest,
0,
lookup_scorer,
stateless_scorer,
stateful_scorer,
goal_rule=goal_rule,
omega=omega,
keep_components=True)
if 'forest' in saving:
pickle_it(saving['forest'], rescored_forest)
if 'components' in saving:
pickle_it(saving['components'], components)
return rescored_forest, components
def draw_samples(forest,
omega,
tsort,
lookup_scorer,
stateless_scorer,
stateful_scorer,
n_samples, batch_size, within, initial, prior, burn, lag, temperature0,
goal_rule,
dead_rule):
sampler = SlicedRescoring(forest,
omega,
tsort,
lookup_scorer,
stateless_scorer,
stateful_scorer,
semiring.inside,
goal_rule,
dead_rule)
# here samples are represented as sequences of edge ids
d0, markov_chain = sampler.sample(n_samples=n_samples,
batch_size=batch_size,
within=within,
initial=initial,
prior=prior,
burn=burn,
lag=lag,
temperature0=temperature0)
return d0, markov_chain
def consensus(seg, forest, samples, log=dummyfunc):
# total number of samples kept
n_samples = len(samples)
projections = group_by_projection(samples, lambda d: yield_string(forest, d.edges))
log('[%d] Consensus decoding', seg.id)
# translation strings
support = [group.key for group in projections]
# empirical distribution
posterior = np.array([float(group.count) / n_samples for group in projections], dtype=ptypes.weight)
# consensus decoding
scorer = DecodingBLEU(support, posterior)
losses = np.array([scorer.loss(y) for y in support], dtype=ptypes.weight)
# order samples by least loss, then by max prob
ranking = sorted(range(len(support)), key=lambda i: (losses[i], -posterior[i]))
return [(losses[i], posterior[i], support[i]) for i in ranking]
def make_slice_sampler(seg, model,
extra_grammar_paths=[], glue_grammar_paths=[], pass_through=True,
default_symbol='X', goal_str='GOAL', start_str='S',
saving={}, redo=True,
log=dummyfunc) -> 'str':
"""
Return the best translation according to a consensus decision rule.
:return: best translation string
"""
# check for pass1
if all(is_step_complete(step, saving, redo) for step in ['forest', 'lookup', 'stateless']):
tgt_forest = unpickle_it(saving['forest'])
lookup_comps = unpickle_it(saving['lookup'])
stateless_comps = unpickle_it(saving['stateless'])
else:
src_forest = pass0(seg,
extra_grammar_paths=extra_grammar_paths,
glue_grammar_paths=glue_grammar_paths,
pass_through=pass_through,
default_symbol=default_symbol,
goal_str=goal_str,
start_str=start_str,
n_goal=0,
saving={},
redo=redo,
log=log)
# pass1: local scoring
tgt_forest, lookup_comps, stateless_comps = pass1(seg,
src_forest,
model,
saving=saving,
redo=redo,
log=log)
# l(d)
lfunc = TableLookupFunction(np.array([semiring.inside.times(model.lookup.score(ff1),
model.stateless.score(ff2))
for ff1, ff2 in zip(lookup_comps, stateless_comps)], dtype=ptypes.weight))
# top sort table
tsort = AcyclicTopSortTable(tgt_forest)
goal_maker = GoalRuleMaker(goal_str=goal_str, start_str=start_str, n=1)
# slice sampler
sampler = SlicedRescoring(tgt_forest,
lfunc,
tsort,
TableLookupScorer(model.dummy),
StatelessScorer(model.dummy),
StatefulScorer(model.stateful),
semiring.inside,
goal_rule=goal_maker.get_oview(),
dead_rule=make_dead_oview())
return tgt_forest, lfunc, tsort, sampler
def decode(seg, args, n_samples, model, saving, redo, log=dummyfunc):
# first we check whether the decisions have been completed before
if is_step_complete('decisions', saving, redo):
log('[%d] Reusing decisions', seg.id)
with smart_ropen(saving['decisions']) as fi:
for line in fi.readlines():
if line.startswith('#'):
continue
line = line.strip()
if not line:
continue
fields = line.split(' ||| ') # that should be (loss, posterior, solution)
if len(fields) == 3:
return fields[2] # that's the solution
forest, lfunc, tsort, sampler = make_slice_sampler(seg,
model,
extra_grammar_paths=args.extra_grammar,
glue_grammar_paths=args.glue_grammar,
pass_through=args.pass_through,
default_symbol=args.default_symbol,
goal_str=args.goal,
start_str=args.start,
saving=saving,
redo=args.redo,
log=log)
d0, markov_chain = sampler.sample(n_samples=n_samples,
batch_size=args.batch,
within=args.within,
initial=args.initial,
prior=args.prior,
burn=args.burn,
lag=args.lag,
temperature0=args.temperature0)
# TODO: save stuff
samples = apply_filters(markov_chain,
burn=args.burn,
lag=args.lag)
decisions = consensus(seg, forest, samples)
if 'decisions' in saving:
# write all decisions to file
with smart_wopen(saving['decisions']) as fo:
print('# co-loss ||| posterior ||| solution', file=fo)
for l, p, y in decisions:
print('{0} ||| {1} ||| {2}'.format(l, p, y), file=fo)
return decisions[0][2] # return best translation
@traceit
def training_decode(seg, args, n_samples, staticdir, decisiondir, model, redo, log=dummyfunc):
saving = {
'forest': '{0}/{1}.hyp.forest'.format(staticdir, seg.id),
'lookup': '{0}/{1}.hyp.ffs.rule'.format(staticdir, seg.id),
'stateless': '{0}/{1}.hyp.ffs.stateless'.format(staticdir, seg.id),
'decisions': '{0}/{1}.gz'.format(decisiondir, seg.id)
}
return decode(seg, args, n_samples, model, saving, redo, log)
@traceit
def training_biparse(seg, args, workingdir, model, log=dummyfunc) -> 'bool':
"""
Steps:
I. Pass0 and pass1: parse source, project, local scoring
II. Pass2
- make a reference DFA
- parse the reference DFA
- fully score the reference forest (lookup, stateless, stateful)
- save rescored forest and components
:return: whether or not the input is bi-parsable
"""
pass1_files = ['{0}/{1}.hyp.forest'.format(workingdir, seg.id),
'{0}/{1}.hyp.ffs.rule'.format(workingdir, seg.id),
'{0}/{1}.hyp.ffs.stateless'.format(workingdir, seg.id)]
ref_files = ['{0}/{1}.ref.ffs.all'.format(workingdir, seg.id),
'{0}/{1}.ref.forest'.format(workingdir, seg.id)]
# check for redundant work
if all(os.path.exists(path) for path in pass1_files) and not args.redo:
if all(os.path.exists(path) for path in ref_files):
log('[%d] Reusing forests for segment', seg.id)
return True # parsable
else:
return False # not parsable
# pass0: parsing
src_forest = pass0(seg,
extra_grammar_paths=args.extra_grammar,
glue_grammar_paths=args.glue_grammar,
pass_through=args.pass_through,
default_symbol=args.default_symbol,
goal_str=args.goal,
start_str=args.start,
n_goal=0,
saving={},
redo=args.redo,
log=log)
# pass1: local scoring
saving1 = {
'forest': '{0}/{1}.hyp.forest'.format(workingdir, seg.id),
'lookup': '{0}/{1}.hyp.ffs.rule'.format(workingdir, seg.id),
'stateless': '{0}/{1}.hyp.ffs.stateless'.format(workingdir, seg.id)
}
tgt_forest, lookup_comps, stateless_comps = pass1(seg,
src_forest,
model,
saving=saving1,
redo=args.redo,
log=log)
# parse reference lattice
log('[%d] Parse reference DFA', seg.id)
ref_dfa = make_reference_dfa(seg)
goal_maker = GoalRuleMaker(goal_str=args.goal, start_str=args.start, n=1)
ref_forest = parse_dfa(tgt_forest,
0,
ref_dfa,
goal_maker.get_oview(),
bottomup=False)
if not ref_forest:
return False # not parsable
# pass2: rescore reference forest
saving2 = {
'forest': '{0}/{1}.ref.forest'.format(workingdir, seg.id),
'components': '{0}/{1}.ref.ffs.all'.format(workingdir, seg.id)
}
goal_maker.update()
pass2(seg, ref_forest,
TableLookupScorer(model.lookup),
StatelessScorer(model.stateless),
StatefulScorer(model.stateful),
goal_maker.get_oview(),
saving=saving2, redo=args.redo,
log=log)
return True # parsable
@traceit
def training_parse(seg, args, workingdir, model, log=dummyfunc) -> 'bool':
"""
Steps:
I. Pass0 and pass1: parse source, project, local scoring
II. Pass2
- make a reference DFA
- parse the reference DFA
- fully score the reference forest (lookup, stateless, stateful)
- save rescored forest and components
:return: whether or not the input is bi-parsable
"""
pass1_files = ['{0}/{1}.hyp.forest'.format(workingdir, seg.id),
'{0}/{1}.hyp.ffs.rule'.format(workingdir, seg.id),
'{0}/{1}.hyp.ffs.stateless'.format(workingdir, seg.id)]
# check for redundant work
if all(os.path.exists(path) for path in pass1_files) and not args.redo:
return True
# pass0: parsing
src_forest = pass0(seg,
extra_grammar_paths=args.extra_grammar,
glue_grammar_paths=args.glue_grammar,
pass_through=args.pass_through,
default_symbol=args.default_symbol,
goal_str=args.goal,
start_str=args.start,
n_goal=0,
saving={},
redo=args.redo,
log=log)
if not src_forest:
return False
# pass1: local scoring
saving1 = {
'forest': '{0}/{1}.hyp.forest'.format(workingdir, seg.id),
'lookup': '{0}/{1}.hyp.ffs.rule'.format(workingdir, seg.id),
'stateless': '{0}/{1}.hyp.ffs.stateless'.format(workingdir, seg.id)
}
tgt_forest, lookup_comps, stateless_comps = pass1(seg,
src_forest,
model,
saving=saving1,
redo=args.redo,
log=log)
return True
|
apache-2.0
| -1,688,278,262,305,269,200 | 37.097765 | 118 | 0.5507 | false |
fernandolins/What2watch
|
What2Watch/settings.py
|
1
|
5815
|
# Django settings for What2Watch project.
import os
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ROOT_PATH = os.path.dirname(os.path.abspath(__file__))
LOGIN_URL = '/'
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'watch', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': 'what',
'PASSWORD': 'UUjTS8HCZcQk8Bn3st3d',
'HOST': 'localhost', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
import dj_database_url
DATABASES['default'] = dj_database_url.config()
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Recife'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'pt_BR'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = 'staticfiles'
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(ROOT_PATH, 'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'i4dmpvcym6xbtljt5v@rd^fc4f()f#u1va6s&l!oa2g4z$&)10'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'What2Watch.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'What2Watch.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(ROOT_PATH, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'south',
'core',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
try:
from local_settings import *
except:
pass
|
lgpl-3.0
| -936,482,418,237,707,900 | 31.853107 | 136 | 0.687704 | false |
sintrb/Douban-Group-AD-Killer
|
datamining/classify/bayes.py
|
1
|
2418
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2014-10-15
# @Author : Robin (sintrb@gmail.com)
# @Link : https://github.com/sintrb/Douban-Group-AD-Killer
# @Version : 1.0
from base import BaseClassifier, test_classifer
class BaseProbability():
def __init__(self, cls, prt):
self.cls = cls
self.prt = prt
def save(self):
pass
class BaseAttribute(object):
def __init__(self, atb):
self.atb = atb
self.count = 0
self.map = {}
def all_probability(self):
return self.map.values()
def get_probability(self, cls):
if cls not in self.map:
self.map[cls] = BaseProbability(cls, 0.0)
return self.map[cls]
def save(self):
pass
class BaseDriver(object):
def __init__(self):
self.classes = {}
self.attributes = {}
def has_sample(self, sample):
return sample in self.classes
def add_sample(self, sample, cls):
if cls not in self.classes:
self.classes[cls] = set()
self.classes[cls].add(sample)
def all_class(self):
return self.classes
def all_attribute(self):
return self.attributes.values()
def has_attribute(self, atb):
return atb in self.attributes
def get_attribute(self, atb):
if atb not in self.attributes:
self.attributes[atb] = BaseAttribute(atb)
return self.attributes[atb]
def show_info(self):
for atb in self.all_attribute():
for prt in atb.all_probability():
print '%s -- > %s %s'%(atb.atb, prt.cls, prt.prt)
def save(self):
pass
class NaiveBayesClassifier(BaseClassifier):
"""朴素贝叶斯分类"""
def __init__(self, db):
super(NaiveBayesClassifier, self).__init__()
self.db = db
def training(self, sample, cls, force=False):
if force or not self.db.has_sample(sample):
self.db.add_sample(sample, cls)
for a in sample:
att = self.db.get_attribute(a)
prt = att.get_probability(cls)
prt.prt = prt.prt + 1.0/len(sample)
prt.save()
att.count = att.count + 1
att.save()
self.db.save()
def classify(self, sample):
clss = {}
for c in self.db.all_class():
clss[c] = 0.0
for a in sample:
if self.db.has_attribute(a):
atb = self.db.get_attribute(a)
for prt in atb.all_probability():
if not prt.cls in clss:
clss[prt.cls] = 0
clss[prt.cls] = clss[prt.cls] + (prt.prt / atb.count)
else:
# print 'unknown attribute: %s'%a
pass
return clss
if __name__ == '__main__':
# 测试
nbc = NaiveBayesClassifier(BaseDriver())
test_classifer(nbc)
|
gpl-2.0
| 1,136,967,823,608,875,100 | 23.742268 | 61 | 0.65125 | false |
vmintam/flasky_older
|
fabfile_youtrack.py
|
1
|
8441
|
__author__ = 'vmintam'
from cuisine import user_ensure, dir_exists, dir_ensure, mode_sudo, dir_remove
from cuisine import user_remove, user_check, file_write, package_ensure_yum
from cuisine import package_clean_yum, package_update_yum, file_append
from fabric.api import env, hide, sudo, run
from fabric.colors import red, green
from fabric.decorators import with_settings
env.hosts = ['192.168.1.81']
SUDO_USER = 'vmintam'
SUDO_PASS = '13119685'
YOUTRACK_USER = 'youtrack'
YOUTRACK_LINK = 'http://download.jetbrains.com/charisma/youtrack-6.0.12223.jar'
YOUTRACK_NAME = "youtrack-6.0.12223.jar"
WORKING_DIR = "/usr/local/youtrack"
def user_setup(user):
""" Creates a test the docs user """
with mode_sudo():
if user_check(user):
user_remove(user, rmhome='/home/%s' % user)
user_ensure(user, home="/home/%s" % user)
print (green("=================================================="))
print(red('created %s user' % user))
print (green("=================================================="))
def working_dir():
"""
create directory and chmod for this
:return:
"""
with mode_sudo():
if dir_exists(WORKING_DIR):
dir_remove(WORKING_DIR)
dir_ensure(WORKING_DIR, mode="755", owner=YOUTRACK_USER,
group=YOUTRACK_USER)
print (green("=================================================="))
print(red('created %s working directory' % WORKING_DIR))
print (green("=================================================="))
#===============================================================================
#install epel repository
def install_epel():
epel_link = 'http://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm'
sudo('rpm -Uvh %s' % epel_link)
package_clean_yum()
package_update_yum()
print (green("=================================================="))
print (red("installed epel repository"))
print (green("=================================================="))
#===============================================================================
#install Development tools and nginx
def install_req():
sudo('yum groupinstall -y "Development tools" ; true')
package_ensure_yum('nginx')
#==============================================================================
#install jdk
def install_java():
java_link = 'http://www.reucon.com/cdn/java/jdk-7u51-linux-x64.tar.gz'
sudo('wget -O /tmp/jdk-7u51-linux-x64.tar.gz %s' % java_link)
with mode_sudo():
if dir_exists('/home/youtrack/jdk1.7.0_51'):
dir_remove('/home/youtrack/jdk1.7.0_51')
sudo('tar -xvzf /tmp/jdk-7u51-linux-x64.tar.gz -C /home/youtrack')
def write_daemon():
youtrack_deamon = """
#! /bin/sh
### BEGIN INIT INFO
# Provides: youtrack
# Required-Start: $local_fs $remote_fs
# Required-Stop: $local_fs $remote_fs
# Default-Start: 2 3 4 5
# Default-Stop: S 0 1 6
# Short-Description: initscript for youtrack
# Description: initscript for youtrack
### END INIT INFO
export HOME=/home/youtrack
set -e
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
NAME=youtrack
SCRIPT=/usr/local/$NAME/$NAME
d_start() {
su youtrack -l -c "$SCRIPT start"
}
d_stop() {
su youtrack -l -c "$SCRIPT stop"
}
case "$1" in
start)
echo "Starting $NAME..."
d_start
;;
stop)
echo "Stopping $NAME..."
d_stop
;;
restart|force-reload)
echo "Restarting $NAME..."
d_stop
d_start
;;
*)
echo "Usage: sudo /etc/init.d/youtrack {start|stop|restart}" >&2
exit 1
;;
esac
exit 0
"""
with mode_sudo():
file_write('/etc/init.d/youtrack', content=youtrack_deamon)
sudo("chmod +x /etc/init.d/youtrack")
sudo("chkconfig --level 2345 youtrack on")
def write_command_run():
command_run = """
#! /bin/sh
export HOME=/home/youtrack
export JAVA_HOME=/home/youtrack/jdk1.7.0_51
NAME=youtrack
PORT=8112
USR=/usr/local/$NAME
#JAR=$USR/`ls -Lt $USR/*.jar | grep -o "$NAME-[^/]*.jar" | head -1`
JAR="$USR/youtrack-6.0.12223.jar"
LOG=$USR/$NAME-$PORT.log
PID=$USR/$NAME-$PORT.pid
d_start() {
if [ -f $PID ]; then
PID_VALUE=`cat $PID`
if [ ! -z "$PID_VALUE" ]; then
PID_VALUE=`ps ax | grep $PID_VALUE | grep -v grep | awk '{print $1}'`
if [ ! -z "$PID_VALUE" ]; then
exit 1;
fi
fi
fi
PREV_DIR=`pwd`
cd $USR
exec $JAVA_HOME/bin/java -Xmx1g -XX:MaxPermSize=256m -Djava.awt.headless=true -jar $JAR $PORT >> $LOG 2>&1 &
echo $! > $PID
cd $PREV_DIR
}
d_stop() {
if [ -f $PID ]; then
PID_VALUE=`cat $PID`
if [ ! -z "$PID_VALUE" ]; then
PID_VALUE=`ps ax | grep $PID_VALUE | grep -v grep | awk '{print $1}'`
if [ ! -z "$PID_VALUE" ]; then
kill $PID_VALUE
WAIT_TIME=0
while [ `ps ax | grep $PID_VALUE | grep -v grep | wc -l` -ne 0 -a "$WAIT_TIME" -lt 2 ]
do
sleep 1
WAIT_TIME=$(expr $WAIT_TIME + 1)
done
if [ `ps ax | grep $PID_VALUE | grep -v grep | wc -l` -ne 0 ]; then
WAIT_TIME=0
while [ `ps ax | grep $PID_VALUE | grep -v grep | wc -l` -ne 0 -a "$WAIT_TIME" -lt 15 ]
do
sleep 1
WAIT_TIME=$(expr $WAIT_TIME + 1)
done
echo
fi
if [ `ps ax | grep $PID_VALUE | grep -v grep | wc -l` -ne 0 ]; then
kill -9 $PID_VALUE
fi
fi
fi
rm -f $PID
fi
}
case "$1" in
start)
d_start
;;
stop)
d_stop
;;
*)
echo "Usage: $0 {start|stop|restart}" >&2
exit 1
;;
esac
exit 0
"""
with mode_sudo():
file_write('%s/%s' % (WORKING_DIR, YOUTRACK_USER), content=command_run)
sudo('chown %s.%s %s/%s' % (YOUTRACK_USER, YOUTRACK_USER,
WORKING_DIR, YOUTRACK_USER))
sudo('chmod +x %s/%s' % (WORKING_DIR, YOUTRACK_USER))
def get_youtrack():
sudo('wget -O %s/%s %s' % (WORKING_DIR, YOUTRACK_NAME, YOUTRACK_LINK))
sudo('chown %s.%s %s/%s' % (YOUTRACK_USER, YOUTRACK_USER,
WORKING_DIR, YOUTRACK_NAME))
def nginx_config():
youtrack_site = '/etc/nginx/sites/youtrack.conf'
upstream_content = """
upstream youtrack {
server 127.0.0.1:8112;
}
"""
youtrack_content = """
server {
listen 80;
server_name youtrack.vnpid.com;
access_log /var/log/nginx/youtrack.vnpid.log;
keepalive_timeout 600s;
send_timeout 600s;
location / {
client_max_body_size 100M;
proxy_pass http://youtrack;
proxy_set_header X-Forwarded-Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_connect_timeout 600;
proxy_read_timeout 600;
proxy_send_timeout 600;
}
}
"""
check_yt = run('cat /etc/nginx/conf.d/upstreams.conf | grep youtrack; true')
with mode_sudo():
file_write(youtrack_site, youtrack_content)
if check_yt.find('youtrack') == -1:
file_append('/etc/nginx/conf.d/upstreams.conf', upstream_content)
sudo('/etc/init.d/nginx restart')
def start_youtrack():
sudo('/etc/init.d/youtrack restart')
def iptable_stop():
sudo('/etc/init.d/iptables stop')
def disable_selinux():
pass
# command = run('cat /etc/selinux/config | grep "SELINUX=disabled"')
@with_settings(hide('running', 'commands', 'stdout', 'stderr'))
def deploy():
env.user = SUDO_USER
env.password = SUDO_PASS
iptable_stop()
disable_selinux()
user_setup(YOUTRACK_USER)
working_dir()
if sudo('ls -laht /etc/yum.repos.d/ | grep epel ; true').find('epel') != -1:
print (red("epel have already installed"))
else:
install_epel()
install_req()
get_youtrack()
install_java()
write_daemon()
write_command_run()
nginx_config()
start_youtrack()
|
mit
| 8,358,021,976,100,836,000 | 28.010309 | 113 | 0.525767 | false |
peterbrittain/asciimatics
|
samples/pacman.py
|
1
|
18400
|
#!/usr/bin/env python3
from copy import deepcopy
import sys
from asciimatics.exceptions import ResizeScreenError
from asciimatics.paths import Path
from asciimatics.renderers import StaticRenderer, ColourImageFile, FigletText
from asciimatics.scene import Scene
from asciimatics.screen import Screen
from asciimatics.effects import Print, Sprite, BannerText
namco = """
88888888b. 8888888b. 8888888888b. .d88888888 .d888888b.
88 88 88 88 88 88 88 88 88
88 88 .d88888888 88 88 88 88 88 88
88 88 88 88 88 88 88 88 88 88
88 88 `888888888 88 88 88 `888888888 `8888888P'
"""
dot = """${7,2,7}####
${7,2,7}####
"""
pac_man = """
{0}##########
{0}##################
{0}############${{7,2,7}} {0}######
{0}############${{4,2,0}} ${{7,2,7}} {0}######
{0}##########################
{0}##########################
{0}##########################
{0}##########################
{0}##########################
{0}######################
{0}######################
{0}##################
{0}##########
""", """
{0}##########
{0}##################
{0}############${{7,2,7}} {0}######
{0}############${{4,2,0}} ${{7,2,7}} {0}######
{0}##########################
{0}##########################
{0}############
{0}##########################
{0}##########################
{0}######################
{0}######################
{0}##################
{0}##########
""", """
{0}##########
{0}##################
{0}############${{7,2,7}} {0}######
{0}############${{4,2,0}} ${{7,2,7}} {0}######
{0}##########################
{0}####################
{0}############
{0}####################
{0}##########################
{0}######################
{0}######################
{0}##################
{0}##########
""", """
{0}##########
{0}##################
{0}############${{7,2,7}} {0}######
{0}############${{4,2,0}} ${{7,2,7}} {0}######
{0}####################
{0}################
{0}############
{0}################
{0}####################
{0}######################
{0}######################
{0}##################
{0}##########
""", """
{0}##########
{0}##################
{0}############${{7,2,7}} {0}######
{0}##########${{4,2,0}} ${{7,2,7}} {0}######
{0}##################
{0}##############
{0}############
{0}##############
{0}##################
{0}####################
{0}######################
{0}##################
{0}##########
"""
pac_man_right = """
{0}##########
{0}##################
{0}######${{7,2,7}} {0}############
{0}######${{7,2,7}} ${{4,2,0}} {0}############
{0}##########################
{0}##########################
{0}##########################
{0}##########################
{0}##########################
{0}######################
{0}######################
{0}##################
{0}##########
""", """
{0}##########
{0}##################
{0}######${{7,2,7}} {0}############
{0}######${{7,2,7}} ${{4,2,0}} {0}############
{0}##########################
{0}##########################
{0}############
{0}##########################
{0}##########################
{0}######################
{0}######################
{0}##################
{0}##########
""", """
{0}##########
{0}##################
{0}######${{7,2,7}} {0}############
{0}######${{7,2,7}} ${{4,2,0}} {0}############
{0}##########################
{0}####################
{0}############
{0}####################
{0}##########################
{0}######################
{0}######################
{0}##################
{0}##########
""", """
{0}##########
{0}##################
{0}######${{7,2,7}} {0}############
{0}######${{7,2,7}} ${{4,2,0}} {0}############
{0}####################
{0}################
{0}############
{0}################
{0}#####################
{0}######################
{0}######################
{0}##################
{0}##########
""", """
{0}##########
{0}##################
{0}######${{7,2,7}} {0}############
{0}######${{7,2,7}} ${{4,2,0}} {0}##########
{0}##################
{0}##############
{0}############
{0}##############
{0}##################
{0}####################
{0}######################
{0}##################
{0}##########
"""
ghost = """
{0}########
{0}################
{0}####################
{0}##${{7,2,7}}....{0}########${{7,2,7}}....{0}######
${{7,2,7}}........{0}####${{7,2,7}}........{0}####
${{4,2,4}} ${{7,2,7}}....{0}####${{4,2,4}} ${{7,2,7}}....{0}####
{0}##${{4,2,4}} ${{7,2,7}}....{0}####${{4,2,4}} ${{7,2,7}}....{0}######
{0}####${{7,2,7}}....{0}########${{7,2,7}}....{0}########
{0}############################
{0}############################
{0}##########################
{0}####${{7,2,0}} {0}########${{7,2,0}} {0}########
{0}##${{7,2,0}} {0}####${{7,2,0}} {0}####
""", """
{0}########
{0}################
{0}####################
{0}##${{7,2,7}}....{0}########${{7,2,7}}....{0}######
${{7,2,7}}........{0}####${{7,2,7}}........{0}####
${{4,2,4}} ${{7,2,7}}....{0}####${{4,2,4}} ${{7,2,7}}....{0}####
{0}##${{4,2,4}} ${{7,2,7}}....{0}####${{4,2,4}} ${{7,2,7}}....{0}######
{0}####${{7,2,7}}....{0}########${{7,2,7}}....{0}########
{0}############################
{0}############################
{0}############################
{0}######${{7,2,0}} {0}########${{7,2,0}} {0}########
{0}####${{7,2,0}} {0}####${{7,2,0}} {0}####
""", """
{0}########
{0}################
{0}####################
{0}##${{7,2,7}}....{0}########${{7,2,7}}....{0}######
${{7,2,7}}........{0}####${{7,2,7}}........{0}####
${{4,2,4}} ${{7,2,7}}....{0}####${{4,2,4}} ${{7,2,7}}....{0}####
{0}##${{4,2,4}} ${{7,2,7}}....{0}####${{4,2,4}} ${{7,2,7}}....{0}######
{0}####${{7,2,7}}....{0}########${{7,2,7}}....{0}########
{0}############################
{0}############################
{0}############################
{0}########${{7,2,0}} {0}########${{7,2,0}} {0}########
{0}####${{7,2,0}} {0}####${{7,2,0}} {0}####
""", """
{0}########
{0}################
{0}####################
{0}##${{7,2,7}}....{0}########${{7,2,7}}....{0}######
${{7,2,7}}........{0}####${{7,2,7}}........{0}####
${{4,2,4}} ${{7,2,7}}....{0}####${{4,2,4}} ${{7,2,7}}....{0}####
{0}##${{4,2,4}} ${{7,2,7}}....{0}####${{4,2,4}} ${{7,2,7}}....{0}######
{0}####${{7,2,7}}....{0}########${{7,2,7}}....{0}########
{0}############################
{0}############################
{0}############################
{0}########${{7,2,0}} {0}########${{7,2,0}} {0}######
{0}####${{7,2,0}} {0}####${{7,2,0}} {0}####
""", """
{0}########
{0}################
{0}####################
{0}##${{7,2,7}}....{0}########${{7,2,7}}....{0}######
${{7,2,7}}........{0}####${{7,2,7}}........{0}####
${{4,2,4}} ${{7,2,7}}....{0}####${{4,2,4}} ${{7,2,7}}....{0}####
{0}##${{4,2,4}} ${{7,2,7}}....{0}####${{4,2,4}} ${{7,2,7}}....{0}######
{0}####${{7,2,7}}....{0}########${{7,2,7}}....{0}########
{0}############################
{0}############################
{0}############################
{0}##${{7,2,0}} {0}########${{7,2,0}} {0}########${{7,2,0}} {0}####
{0}####${{7,2,0}} {0}####${{7,2,0}} {0}##
"""
scared_ghost = """
${4,2,4}########
${4,2,4}################
${4,2,4}####################
${4,2,4}########################
${4,2,4}####${7,2,7} ${4,2,4}########${7,2,7} ${4,2,4}####
${4,2,4}####${7,2,7} ${4,2,4}########${7,2,7} ${4,2,4}####
${4,2,4}############################
${4,2,4}############################
${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####
${4,2,4}##${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}##
${4,2,4}############################
${4,2,4}####${7,2,0} ${4,2,4}########${7,2,0} ${4,2,4}########${7,2,0} ${4,2,4}##
${4,2,4}##${7,2,0} ${4,2,4}####${7,2,0} ${4,2,4}####
""", """
${4,2,4}########
${4,2,4}################
${4,2,4}####################
${4,2,4}########################
${4,2,4}####${7,2,7} ${4,2,4}########${7,2,7} ${4,2,4}####
${4,2,4}####${7,2,7} ${4,2,4}########${7,2,7} ${4,2,4}####
${4,2,4}############################
${4,2,4}############################
${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####
${4,2,4}##${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}##
${4,2,4}############################
${4,2,4}##${7,2,0} ${4,2,4}########${7,2,0} ${4,2,4}########${7,2,0} ${4,2,4}####
${4,2,4}####${7,2,0} ${4,2,4}####${7,2,0} ${4,2,4}##
""", """
${4,2,4}########
${4,2,4}################
${4,2,4}####################
${4,2,4}########################
${4,2,4}####${7,2,7} ${4,2,4}########${7,2,7} ${4,2,4}####
${4,2,4}####${7,2,7} ${4,2,4}########${7,2,7} ${4,2,4}####
${4,2,4}############################
${4,2,4}############################
${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####
${4,2,4}##${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}##
${4,2,4}############################
${4,2,4}########${7,2,0} ${4,2,4}########${7,2,0} ${4,2,4}######
${4,2,4}####${7,2,0} ${4,2,4}####${7,2,0} ${4,2,4}####
""", """
${4,2,4}########
${4,2,4}################
${4,2,4}####################
${4,2,4}########################
${4,2,4}####${7,2,7} ${4,2,4}########${7,2,7} ${4,2,4}####
${4,2,4}####${7,2,7} ${4,2,4}########${7,2,7} ${4,2,4}####
${4,2,4}############################
${4,2,4}############################
${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####
${4,2,4}##${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}##
${4,2,4}############################
${4,2,4}########${7,2,0} ${4,2,4}########${7,2,0} ${4,2,4}########
${4,2,4}####${7,2,0} ${4,2,4}####${7,2,0} ${4,2,4}####
""", """
${4,2,4}########
${4,2,4}################
${4,2,4}####################
${4,2,4}########################
${4,2,4}####${7,2,7} ${4,2,4}########${7,2,7} ${4,2,4}####
${4,2,4}####${7,2,7} ${4,2,4}########${7,2,7} ${4,2,4}####
${4,2,4}############################
${4,2,4}############################
${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####
${4,2,4}##${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}####${7,2,7} ${4,2,4}##
${4,2,4}############################
${4,2,4}######${7,2,0} ${4,2,4}########${7,2,0} ${4,2,4}########
${4,2,4}####${7,2,0} ${4,2,4}####${7,2,0} ${4,2,4}####
"""
eyes = """
${4,2,4}####${4,2,0} ${4,2,4}####
${7,2,7}..${4,2,4}####${7,2,7}..${7,2,0} ${7,2,7}..${4,2,4}####${7,2,7}..
${7,2,7}........${7,2,0} ${7,2,7}........
${7,2,7}........${7,2,0} ${7,2,7}........
${7,2,7}....${7,2,0} ${7,2,7}....
"""
# Globals used for pacman animation
direction = 1
value = 0
def cycle():
global value, direction
value += direction
if value <= 0 or value >= 4:
direction = -direction
return value
class PacMan(Sprite):
def __init__(self, screen, path, start_frame=0, stop_frame=0):
images = []
images_right = []
colour = Screen.COLOUR_YELLOW if screen.colours <= 16 else 11
for image in pac_man:
images.append(image.format("${%d,2,%d}" % (colour, colour)))
for image in pac_man_right:
images_right.append(image.format("${%d,2,%d}" % (colour, colour)))
super(PacMan, self).__init__(
screen,
renderer_dict={
"default": StaticRenderer(images=images, animation=cycle),
"left": StaticRenderer(images=images, animation=cycle),
"right": StaticRenderer(images=images_right, animation=cycle),
},
path=path,
start_frame=start_frame,
stop_frame=stop_frame)
def _update(self, frame_no):
super(PacMan, self)._update(frame_no)
for effect in self._scene.effects:
if isinstance(effect, ScaredGhost) and self.overlaps(effect):
effect.eaten()
class Ghost(Sprite):
def __init__(self, screen, path, colour=1, start_frame=0, stop_frame=0):
images = []
for image in ghost:
images.append(image.format("${%d,2,%d}" % (colour, colour)))
super(Ghost, self).__init__(
screen,
renderer_dict={
"default": StaticRenderer(images=images),
},
colour=colour,
path=path,
start_frame=start_frame,
stop_frame=stop_frame)
class ScaredGhost(Sprite):
def __init__(self, screen, path, start_frame=0, stop_frame=0):
super(ScaredGhost, self).__init__(
screen,
renderer_dict={
"default": StaticRenderer(images=scared_ghost),
},
colour=Screen.COLOUR_BLUE,
path=path,
start_frame=start_frame,
stop_frame=stop_frame)
self._eaten = False
def eaten(self):
# Already eaten - just ignore
if self._eaten:
return
# Allow one more iteration for this Sprite to clear itself up.
self._eaten = True
self._delete_count = 2
# Spawn the eyes to run away
path = Path()
path.jump_to(self._old_x + 12, self._old_y + 4)
path.move_straight_to(
self._old_x + 12, -8, (self._old_y + 12) // 2)
path.wait(10000)
self._scene.add_effect(Eyes(self._screen, path))
class Eyes(Sprite):
def __init__(self, screen, path, start_frame=0, stop_frame=0):
super(Eyes, self).__init__(
screen,
renderer_dict={
"default": StaticRenderer(images=[eyes]),
},
colour=Screen.COLOUR_BLUE,
path=path,
start_frame=start_frame,
stop_frame=stop_frame)
class EatingScene(Scene):
def __init__(self, screen):
super(EatingScene, self).__init__([], 240 + screen.width)
self._screen = screen
self._reset_count = 0
def reset(self, old_scene=None, screen=None):
super(EatingScene, self).reset(old_scene, screen)
# Recreate all the elements.
centre = (self._screen.width // 2, self._screen.height // 2)
path = Path()
path.jump_to(-16, centre[1])
path.move_straight_to(
self._screen.width + 16, centre[1], (self._screen.width + 16) // 3)
path.wait(100)
path2 = Path()
path2.jump_to(-16, centre[1])
path2.move_straight_to(
self._screen.width + 16, centre[1], self._screen.width + 16)
path2.wait(100)
# Take a copy of the list before using it to remove all effects.
for effect in self.effects[:]:
self.remove_effect(effect)
self.add_effect(
ScaredGhost(self._screen, deepcopy(path2)))
self.add_effect(
ScaredGhost(self._screen, deepcopy(path2), start_frame=60))
self.add_effect(
ScaredGhost(self._screen, deepcopy(path2), start_frame=120))
self.add_effect(
ScaredGhost(self._screen, deepcopy(path2), start_frame=180))
self.add_effect(PacMan(self._screen, path, start_frame=240))
def demo(screen):
scenes = []
centre = (screen.width // 2, screen.height // 2)
# Title
effects = [
BannerText(screen,
ColourImageFile(screen, "pacman.png", 16, 0, True),
(screen.height - 16) // 2,
Screen.COLOUR_WHITE),
Print(screen,
StaticRenderer(images=["A tribute to the classic 80's "
"video game by Namco."]),
screen.height - 1)
]
scenes.append(Scene(effects, 0))
# Scene 1 - run away, eating dots
path = Path()
path.jump_to(screen.width + 16, centre[1])
path.move_straight_to(-16, centre[1], (screen.width + 16) // 3)
path.wait(100)
if screen.colours <= 16:
inky = 6
pinky = 5
blinky = 1
clyde = 2
else:
inky = 14
pinky = 201
blinky = 9
clyde = 208
effects = [
PacMan(screen, path),
Ghost(screen, deepcopy(path), inky, start_frame=40),
Ghost(screen, deepcopy(path), pinky, start_frame=60),
Ghost(screen, deepcopy(path), blinky, start_frame=80),
Ghost(screen, deepcopy(path), clyde, start_frame=100),
]
for x in range(5, screen.width, 16):
effects.insert(0,
Print(screen,
StaticRenderer(images=[dot]),
screen.height // 2,
x=x,
speed=1,
stop_frame=4))
scenes.append(Scene(effects, 100 + screen.width))
# Scene 2 - Chase ghosts after a power pill
scenes.append(EatingScene(screen))
# Scene 3 - Thanks...
effects = [
Print(screen, FigletText("Thank you,"), screen.height // 3 - 3,
colour=Screen.COLOUR_RED),
Print(screen,
StaticRenderer(images=[namco]),
screen.height * 2 // 3 - 2,
colour=Screen.COLOUR_RED),
Print(screen,
StaticRenderer(images=["< Press X to exit. >"]),
screen.height - 1)
]
scenes.append(Scene(effects, 0))
screen.play(scenes, stop_on_resize=True, repeat=False)
if __name__ == "__main__":
while True:
try:
Screen.wrapper(demo)
sys.exit(0)
except ResizeScreenError:
pass
|
apache-2.0
| 8,174,484,442,454,301,000 | 33.200743 | 100 | 0.299946 | false |
oicr-ibc/riser
|
bin/run_analysis.py
|
1
|
3234
|
#!/usr/bin/python
import ConfigParser,os,sys
from subprocess import call
from glob import iglob
from shutil import move
from os.path import join
use_message = '''
Usage:
python run_analysis.py
'''
class Usage(Exception):
def __init__(self, msg):
self.msg = msg
def get_version():
return "1.2.0"
def move_files(src_glob, dst_folder):
for fname in iglob(src_glob):
move(fname, join(dst_folder, os.path.basename(fname)))
def ConfigSectionMap(section):
dict1 = {}
options = Config.options(section)
for option in options:
try:
dict1[option] = Config.get(section, option)
if dict1[option] == -1:
DebugPrint("skip: %s" % option)
except:
print("exception on %s!" % option)
dict1[option] = None
return dict1
def run_analysis(gbff_names,
aligners,
aligned_files,
project_dir,
cov_fold,
read_length):
path=os.getcwd().rstrip('bin')
for gbff_id,gbff_name in enumerate(gbff_names):
for al_id,al_name in enumerate(aligners):
path_sim_data=project_dir + '/' + gbff_name + '/' + 'simulation/simulated/' + cov_fold + 'x' + 'len' + read_length + '/fastq'
junction_file=project_dir + '/' + gbff_name + '/genome/simulated_junction_info.txt'
exec1 = path + "/scripts/run_analysis.sh"
alignedx=aligned_files[gbff_id] + '/' + al_name
call([exec1,path_sim_data,junction_file,alignedx,al_name,read_length])
if len(sys.argv) > 1:
if sys.argv[1] == "-v" or sys.argv[1] == "--version":
print "RiSER v%s" % (get_version())
exit(0)
Config = ConfigParser.ConfigParser()
#Config.read("../config/config.ini")
path=os.getcwd()
Config.read(path + "/config/config.ini")
if ConfigSectionMap("genbank")['name'].split(",")[0] != 'None' and ConfigSectionMap("custom")['name'].split(",")[0] != 'None':
gbff_names=ConfigSectionMap("genbank")['name'].split(",") + ConfigSectionMap("custom")['name'].split(",")
elif ConfigSectionMap("genbank")['name'].split(",")[0] == 'None':
gbff_names=ConfigSectionMap("custom")['name'].split(",")
elif ConfigSectionMap("custom")['name'].split(",")[0] == 'None':
gbff_names=ConfigSectionMap("genbank")['name'].split(",")
aligners=ConfigSectionMap("aligners")['name'].split(",")
aligned_files=ConfigSectionMap("aligners")['aligned_files'].split(",")
if len(gbff_names) != len(aligned_files):
print 'Error Exiting: the # "names" in [genbank] differs from the # "aligned_files" in [aligners].'
exit(0)
project_dir=ConfigSectionMap("project_dir")['location'] + '/' + ConfigSectionMap("project_dir")['name']
read_length=ConfigSectionMap("simulator")['read_length']
cov_fold=ConfigSectionMap("simulator")['fold_coverage']
print "RiSER v%s" % (get_version())
print "-------------------------------------------------------------------------------------------------------"
print "Analysis run."
print "-------------------------------------------------------------------------------------------------------"
run_analysis(gbff_names,aligners,aligned_files,project_dir,cov_fold,read_length)
|
gpl-3.0
| 6,044,638,009,739,695,000 | 33.404255 | 137 | 0.579159 | false |
ska-sa/montblanc
|
montblanc/tests/meqtrees/turbo-sim.py
|
1
|
8704
|
# -*- coding: utf-8 -*-
#% $Id$
#
#
# Copyright (C) 2002-2007
# The MeqTree Foundation &
# ASTRON (Netherlands Foundation for Research in Astronomy)
# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>,
# or write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# standard preamble
from Timba.TDL import *
from Timba.Meq import meq
import math
import random
import Meow
import Meow.StdTrees
# MS options first
mssel = Meow.Context.mssel = Meow.MSUtils.MSSelector(has_input=False,has_model=False,tile_sizes=[8,16,32],flags=False);
# MS compile-time options
TDLCompileOptions(*mssel.compile_options());
TDLCompileOption("run_purr","Start Purr on this MS",False);
# MS run-time options
TDLRuntimeOptions(*mssel.runtime_options());
## also possible:
# TDLRuntimeMenu("MS selection options",open=True,*mssel.runtime_options());
# UVW
TDLCompileOptions(*Meow.IfrArray.compile_options());
# simulation mode menu
SIM_ONLY = "sim only";
ADD_MS = "add to MS";
SUB_MS = "subtract from MS";
simmode_opt = TDLCompileOption("sim_mode","Simulation mode",[SIM_ONLY,ADD_MS,SUB_MS]);
simmode_opt.when_changed(lambda mode:mssel.enable_input_column(mode!=SIM_ONLY));
model_opt = TDLCompileOption("read_ms_model","Read additional uv-model visibilities from MS",False,doc="""
<P>If enabled, then an extra set of model visibilities will be read from a column
of the MS, and added to whatever is predicted by the sky model <i>in the uv-plane</i> (i.e. subject to uv-Jones but not sky-Jones corruptions).</P>
""");
model_opt.when_changed(mssel.enable_model_column);
# now load optional modules for the ME maker
from Meow import TensorMeqMaker
meqmaker = TensorMeqMaker.TensorMeqMaker();
# specify available sky models
# these will show up in the menu automatically
from Siamese.OMS import gridded_sky
from Siamese.AGW import azel_sky
from Siamese.OMS import transient_sky
from Siamese.OMS import fitsimage_sky
## OMS: time to retire this one
#import Meow.LSM
#lsm = Meow.LSM.MeowLSM(include_options=False);
models = [ gridded_sky,azel_sky,transient_sky,fitsimage_sky]; # ,lsm ];
try:
from Siamese.OMS.tigger_lsm import TiggerSkyModel
models.insert(0,TiggerSkyModel());
except:
print('Failure to import TiggerSkyModel module')
print('Is the location of Tigger defined in your PYTHONPATH environment variable?')
pass;
meqmaker.add_sky_models(models);
# now add optional Jones terms
# these will show up in the menu automatically
# Ncorr - correct for N
from Siamese.OMS import oms_n_inverse
meqmaker.add_sky_jones('Ncorr','n-term correction',oms_n_inverse);
# Z - ionosphere
from Lions import ZJones
from Siamese.OMS import oms_ionosphere,oms_ionosphere2
meqmaker.add_sky_jones('Z','ionosphere',[oms_ionosphere,oms_ionosphere2,ZJones.ZJones()]);
# P - Parallactic angle or dipole projection
from Siamese.OMS.rotation import Rotation
from Siamese.OMS import oms_dipole_projection
meqmaker.add_sky_jones('L','parallactic angle or dipole rotation',[Rotation('L',feed_angle=False),oms_dipole_projection]);
# E - beam
from Siamese.OMS import analytic_beams
from Siamese.OMS import fits_beams0
from Siamese.OMS import pybeams_fits
from Siamese.OMS.emss_beams import emss_polar_beams
from Siamese.OMS import paf_beams
##OMS: retiting this one: from Siamese.OMS import wsrt_beams
from Siamese.OMS import vla_beams
from Siamese.SBY import lofar_beams
from Siamese.OMS import oms_pointing_errors
meqmaker.add_sky_jones('E','beam',[analytic_beams,pybeams_fits,emss_polar_beams,paf_beams,fits_beams0,vla_beams,lofar_beams],
pointing=oms_pointing_errors);
# P - Parallactic angle
meqmaker.add_uv_jones('P','feed angle',Rotation('P'));
# G - gains
from Siamese.OMS import oms_gain_models
meqmaker.add_uv_jones('G','gains/phases',oms_gain_models);
# very important -- insert meqmaker's options properly
TDLCompileOptions(*meqmaker.compile_options());
# noise option
_noise_option = TDLOption("noise_stddev","Add noise, Jy per visibility",[None,1e-6,1e-3],more=float);
_sefd_options = [
TDLOption("noise_sefd","SEFD, Jy",0,more=float),
TDLOption("noise_sefd_bw_khz","Channel width, kHz",4,more=float),
TDLOption("noise_sefd_integration","Integration, s",60,more=float),
];
_sefd_menu = TDLMenu("Compute from SEFD",toggle="noise_from_sefd",
doc="""To compute per-visibility noise from the system equivalent flux density, enable this option,
and enter correct values for SEFD (per antenna), channel width and integration time in the fields below.
The formula actually used is sigma = SEFD/sqrt(2*bandwidth*integration).
""",
*_sefd_options);
TDLCompileMenu("Add noise",
_noise_option,
_sefd_menu);
def _recompute_noise (dum):
if noise_from_sefd:
_noise_option.set_value(noise_sefd/math.sqrt(noise_sefd_bw_khz*1e+3*noise_sefd_integration));
for opt in _sefd_options + [_sefd_menu]:
opt.when_changed(_recompute_noise);
TDLCompileOption("random_seed","Random generator seed",["time",0],more=int,
doc="""<P>To get a reproducible distribution for noise (and other "random" errors), supply a fixed seed value
here. The default setting of "time" uses the current time to seed the generator, so the distribution
is different upon every run.</P>""");
# MPI options
# from Meow import Parallelization
# TDLCompileOptions(*Parallelization.compile_options());
def _define_forest (ns):
random.seed(random_seed if isinstance(random_seed,int) else None);
if not mssel.msname:
raise RuntimeError("MS not set up in compile-time options");
if run_purr:
print((mssel.msname));
import os.path
purrlog = os.path.normpath(mssel.msname)+".purrlog";
Timba.TDL.GUI.purr(purrlog,[mssel.msname,'.']);
# setup contexts properly
array,observation = mssel.setup_observation_context(ns);
# setup imaging options (now that we have an imaging size set up)
imsel = mssel.imaging_selector(npix=512,arcmin=meqmaker.estimate_image_size());
TDLRuntimeMenu("Imaging options",*imsel.option_list());
# reading in model?
if read_ms_model:
model_spigots = array.spigots(column="PREDICT",corr=mssel.get_corr_index());
meqmaker.make_per_ifr_bookmarks(model_spigots,"UV-model visibilities");
else:
model_spigots = None;
# get a predict tree from the MeqMaker
output = meqmaker.make_predict_tree(ns,uvdata=model_spigots);
# throw in a bit of noise
if noise_stddev:
noisedef = Meq.GaussNoise(stddev=noise_stddev,dims=[2,2],complex=True)
for p,q in array.ifrs():
ns.noisy_predict(p,q) << output(p,q) + ( ns.noise(p,q)<<noisedef );
output = ns.noisy_predict;
# in add or subtract sim mode, make some spigots and add/subtract visibilities
if sim_mode == ADD_MS:
spigots = array.spigots(corr=mssel.get_corr_index());
for p,q in array.ifrs():
ns.sum(p,q) << output(p,q) + spigots(p,q);
output = ns.sum;
elif sim_mode == SUB_MS:
spigots = array.spigots(corr=mssel.get_corr_index());
for p,q in array.ifrs():
ns.diff(p,q) << spigots(p,q) - output(p,q);
output = ns.diff;
else:
spigots = False;
meqmaker.make_per_ifr_bookmarks(output,"Output visibilities");
# make sinks and vdm.
# The list of inspectors comes in handy here
Meow.StdTrees.make_sinks(ns,output,spigots=spigots,post=meqmaker.get_inspectors(),corr_index=mssel.get_corr_index());
# very important -- insert meqmaker's options properly
TDLRuntimeOptions(*meqmaker.runtime_options());
TDLRuntimeJob(_tdl_job_1_simulate_MS,"Run simulation",job_id="simulate");
# close the meqmaker. This produces annotations, etc.
meqmaker.close();
def _tdl_job_1_simulate_MS (mqs,parent,wait=False):
mqs.clearcache('VisDataMux');
mqs.execute('VisDataMux',mssel.create_io_request(),wait=wait);
# this is a useful thing to have at the bottom of the script, it allows us to check the tree for consistency
# simply by running 'python script.tdl'
if __name__ == '__main__':
ns = NodeScope();
_define_forest(ns);
# resolves nodes
ns.Resolve();
print((len(ns.AllNodes()),'nodes defined'));
|
gpl-2.0
| 2,129,762,275,439,893,000 | 35.41841 | 149 | 0.729779 | false |
vthorsteinsson/tensor2tensor
|
tensor2tensor/utils/diet.py
|
1
|
12143
|
# coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Diet variables are much more memory-efficient than regular variables.
Using diet variables, we can reduce memory overhead per parameter from
16 bytes to 2 bytes, allowing for up to 4B parameters per GPU.
Functions that build subgraphs with variables can be made to use diet variables
by using the fn_with_diet_vars decorator.
"""
from collections import defaultdict
import copy
import math
from tensor2tensor.layers import common_layers
import tensorflow as tf
def diet_adam_optimizer_params():
"""Default hyperparameters for a DietAdamOptimizer.
Returns:
a hyperparameters object.
"""
return tf.contrib.training.HParams(
quantize=True, # use 16-bit fixed-point
quantization_scale=10.0 / tf.int16.max,
optimizer="DietAdam",
learning_rate=1.0,
learning_rate_warmup_steps=2000,
learning_rate_decay_scheme="noam", # "noam" or "none"
epsilon=1e-10,
beta1=0.0, # we can save memory if beta1=0
beta2=0.98,
factored_second_moment_accumulator=True, # this saves memory
)
def diet_expert(x, hidden_size, params):
"""A two-layer feed-forward network with relu activation on hidden layer.
Uses diet variables.
Recomputes hidden layer on backprop to save activation memory.
Args:
x: a Tensor with shape [batch, io_size]
hidden_size: an integer
params: a diet variable HParams object.
Returns:
a Tensor with shape [batch, io_size]
"""
@fn_with_diet_vars(params)
def diet_expert_internal(x):
dim = x.get_shape().as_list()[-1]
h = tf.layers.dense(x, hidden_size, activation=tf.nn.relu, use_bias=False)
y = tf.layers.dense(h, dim, use_bias=False)
y *= tf.rsqrt(tf.to_float(dim * hidden_size))
return y
return diet_expert_internal(x)
class DietVariableOptimizer(object):
"""Base class for Diet variable optimizers."""
def __init__(self, params):
self._params = params
self._global_step = tf.train.get_or_create_global_step()
@property
def params(self):
return self._params
@property
def global_step(self):
return self._global_step
def create_slots(self, var):
raise NotImplementedError()
def update_variable(self, var, grad_var):
raise NotImplementedError()
class DietAdamOptimizer(DietVariableOptimizer):
"""A memory efficient optimizer for memory-efficient variables.
We employ the following techniques:
- 16-bit fixed-point quantization
- inline updates during backprop, instead of through the optimizer. This
keeps the gradients from staying around in memory.
- momentum is optional - saves a slot if it is off (beta1=0.0).
- "factored second-moment accumulator"
(keep row-wise and col-wise averages instead of full accumulator)
- tighter control over operation ordering to make sure that only a small
portion of the decompressed variables and of the variable gradients
are resident in memory at any given time.
All together these techniques reduce the memory footprint per parameter to
a little over 2 bytes, allowing for roughly 4B parameters per GPU. This is
roughly an 8x improvement over the naive version.
Usage:
Diet variables should be created with the
DietAdamOptimizer.get_variable() method. The resulting variables
have extra fields pointing to the optimizer and to the accumulator
slots.
The variable is kept in quantized form, so you need to call
var.optimizer.dequantize(var) to get the value.
The variables are created with trainable=False, so that they will
not be optimized by an ordinary optimizer. Instead, the user is
responsible for making sure that var.optimizer.update(var, grad) is
called during backprop. The reason for this inline update is to
avoid keeping around the gradients for all variables at once. This
is done with the clever use of defuns and control dependencies. See
diet_expert() for an example of how all of this is done.
To facilitate fixed-point quantization and to make it easier to
choose a learning rate, all variables are initialized with unit
normal initialization. If you want smaller values, downscale on the
outside.
"""
def create_slots(self, var):
"""Create the factorized Adam accumulators for diet variables."""
params = self.params
shape = var.get_shape().as_list()
if not hasattr(params, "slots"):
params.slots = defaultdict(dict)
name = var.op.name
slots = params.slots[name]
if params.factored_second_moment_accumulator and len(shape) == 2:
slots["adam_vr"] = tf.get_variable(
name + "_adam_vr", [shape[0], 1],
trainable=False,
initializer=tf.zeros_initializer())
slots["adam_vc"] = tf.get_variable(
name + "_adam_vc", [1, shape[1]],
trainable=False,
initializer=tf.zeros_initializer())
else:
slots["adam_v"] = tf.get_variable(
name + "_adam_v",
shape,
trainable=False,
initializer=tf.zeros_initializer())
if params.beta1 != 0.0:
slots["adam_m"] = tf.get_variable(
name + "_adam_m",
shape,
trainable=False,
initializer=tf.zeros_initializer())
def update_variable(self, var, grad_var):
"""Update the variable and its slots."""
params = self.params
global_step = tf.to_float(self.global_step) + 1
# compute learning rate
lrate = params.learning_rate
if params.learning_rate_decay_scheme == "noam":
lrate *= tf.minimum(global_step * params.learning_rate_warmup_steps**-1.5,
global_step**-0.5)
else:
assert params.learning_rate_decay_scheme == "none"
lrate *= tf.minimum(global_step / params.learning_rate_warmup_steps, 1.0)
# compute adjustment due to second moment
slots = params.slots[var.op.name]
grad_squared = tf.square(grad_var)
beta2_pow = tf.pow(params.beta2, global_step)
if params.factored_second_moment_accumulator and len(var.shape) == 2:
vr_update = tf.assign(slots["adam_vr"], slots["adam_vr"] * params.beta2 +
tf.reduce_mean(grad_squared, 1, keepdims=True) *
(1.0 - params.beta2))
vc_update = tf.assign(slots["adam_vc"], slots["adam_vc"] * params.beta2 +
tf.reduce_mean(grad_squared, 0, keepdims=True) *
(1.0 - params.beta2))
with tf.control_dependencies([vr_update, vc_update]):
vr = tf.sqrt(slots["adam_vr"] / (1.0 - beta2_pow)) + params.epsilon
vc = tf.sqrt(slots["adam_vc"] / (1.0 - beta2_pow)) + params.epsilon
vc /= tf.reduce_mean(vc)
denom = vr * vc
else:
v_update = tf.assign(slots["adam_v"],
slots["adam_v"] * params.beta2 + grad_squared *
(1.0 - params.beta2))
with tf.control_dependencies([v_update]):
denom = tf.sqrt(slots["adam_v"] / (1.0 - beta2_pow)) + params.epsilon
# compute momentum if applicable
if params.beta1 != 0.0:
m_update = tf.assign(slots["adam_m"],
slots["adam_m"] * params.beta1 + grad_var *
(1.0 - params.beta1))
with tf.control_dependencies([m_update]):
grad_var = slots["adam_m"]
# update var
subtrahend = lrate * grad_var / denom
new_val = _quantize(_dequantize(var, params) - subtrahend, params)
return tf.assign(var, new_val)
def _create_diet_optimizer(params):
if params.optimizer == "DietAdam":
return DietAdamOptimizer(params)
else:
raise ValueError("Unrecognized diet optimizer")
def _quantize(x, params, randomize=True):
"""Quantize x according to params, optionally randomizing the rounding."""
if not params.quantize:
return x
if not randomize:
return tf.bitcast(
tf.cast(x / params.quantization_scale, tf.int16), tf.float16)
abs_x = tf.abs(x)
sign_x = tf.sign(x)
y = abs_x / params.quantization_scale
y = tf.floor(y + tf.random_uniform(common_layers.shape_list(x)))
y = tf.minimum(y, tf.int16.max) * sign_x
q = tf.bitcast(tf.cast(y, tf.int16), tf.float16)
return q
def _dequantize(q, params):
"""Dequantize q according to params."""
if not params.quantize:
return q
return tf.to_float(tf.bitcast(q, tf.int16)) * params.quantization_scale
def make_diet_var_getter(params):
"""Create a custom variable getter for diet variables according to params."""
def diet_var_initializer(shape, dtype, partition_info=None):
"""Initializer for a diet variable."""
del dtype
del partition_info
with common_layers.fn_device_dependency("diet_init") as out_deps:
float_range = math.sqrt(3)
ret = tf.random_uniform(shape, -float_range, float_range)
if params.quantize:
ret = _quantize(ret, params, randomize=False)
out_deps.append(ret)
return ret
def diet_var_getter(getter, **kwargs):
"""Get diet variable and return it dequantized."""
if params.quantize:
kwargs["dtype"] = tf.float16
kwargs["initializer"] = diet_var_initializer
kwargs["trainable"] = False
base_var = getter(**kwargs)
dequantized = _dequantize(base_var, params)
if not hasattr(params, "dequantized"):
params.dequantized = defaultdict(list)
params.dequantized[base_var.name].append(dequantized)
return dequantized
return diet_var_getter
def _fn_with_diet_vars(fn, args, params):
"""Call function with args; use diet variables according to params."""
vs_ctr = []
def grad_fn(inputs, variables, outputs, output_grads):
"""Custom gradient function."""
del outputs # recomputing below
with common_layers.fn_device_dependency("diet_grad",
output_grads[0].device) as out_dep:
with tf.variable_scope(vs_ctr[0], reuse=True):
outputs = fn(*inputs)
variables = [common_layers.underlying_variable_ref(v) for v in variables]
dequantized_variables = [
params.dequantized[v.name][-1] for v in variables
]
grads = tf.gradients(outputs, inputs + dequantized_variables,
output_grads)
grad_inputs = grads[:len(inputs)]
grad_variables = grads[len(inputs):]
opt = _create_diet_optimizer(params)
# Apply grad_variables here
var_updates = []
for v, dv in zip(variables, grad_variables):
with tf.variable_scope(vs_ctr[0].name):
opt.create_slots(v)
update_op = opt.update_variable(v, dv)
var_updates.append(update_op)
with tf.control_dependencies(var_updates):
grad_inputs = [tf.identity(dx) for dx in grad_inputs]
out_dep.append(grad_inputs)
return grad_inputs, [None] * len(variables)
@common_layers.fn_with_custom_grad(grad_fn, use_global_vars=True)
def forward(*inputs):
with tf.variable_scope(
None, default_name="diet",
custom_getter=make_diet_var_getter(params)) as vs:
vs_ctr.append(vs)
outputs = fn(*inputs)
return outputs
with common_layers.fn_device_dependency("diet_forward",
args[0].device) as out_dep:
outputs = forward(*args)
out_dep.append(outputs)
return outputs
def fn_with_diet_vars(params):
"""Decorator for graph-building function to use diet variables."""
params = copy.copy(params)
def dec(fn):
def wrapped(*args):
return _fn_with_diet_vars(fn, args, params)
return wrapped
return dec
|
apache-2.0
| 4,098,083,250,670,264,300 | 32.637119 | 80 | 0.659722 | false |
chromium/chromium
|
third_party/protobuf/python/google/protobuf/internal/json_format_test.py
|
9
|
51094
|
#! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test for google.protobuf.json_format."""
__author__ = 'jieluo@google.com (Jie Luo)'
import json
import math
import struct
import sys
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf import any_pb2
from google.protobuf import duration_pb2
from google.protobuf import field_mask_pb2
from google.protobuf import struct_pb2
from google.protobuf import timestamp_pb2
from google.protobuf import wrappers_pb2
from google.protobuf import any_test_pb2
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_pb2
from google.protobuf.internal import test_proto3_optional_pb2
from google.protobuf import descriptor_pool
from google.protobuf import json_format
from google.protobuf.util import json_format_pb2
from google.protobuf.util import json_format_proto3_pb2
class JsonFormatBase(unittest.TestCase):
def FillAllFields(self, message):
message.int32_value = 20
message.int64_value = -20
message.uint32_value = 3120987654
message.uint64_value = 12345678900
message.float_value = float('-inf')
message.double_value = 3.1415
message.bool_value = True
message.string_value = 'foo'
message.bytes_value = b'bar'
message.message_value.value = 10
message.enum_value = json_format_proto3_pb2.BAR
# Repeated
message.repeated_int32_value.append(0x7FFFFFFF)
message.repeated_int32_value.append(-2147483648)
message.repeated_int64_value.append(9007199254740992)
message.repeated_int64_value.append(-9007199254740992)
message.repeated_uint32_value.append(0xFFFFFFF)
message.repeated_uint32_value.append(0x7FFFFFF)
message.repeated_uint64_value.append(9007199254740992)
message.repeated_uint64_value.append(9007199254740991)
message.repeated_float_value.append(0)
message.repeated_double_value.append(1E-15)
message.repeated_double_value.append(float('inf'))
message.repeated_bool_value.append(True)
message.repeated_bool_value.append(False)
message.repeated_string_value.append('Few symbols!#$,;')
message.repeated_string_value.append('bar')
message.repeated_bytes_value.append(b'foo')
message.repeated_bytes_value.append(b'bar')
message.repeated_message_value.add().value = 10
message.repeated_message_value.add().value = 11
message.repeated_enum_value.append(json_format_proto3_pb2.FOO)
message.repeated_enum_value.append(json_format_proto3_pb2.BAR)
self.message = message
def CheckParseBack(self, message, parsed_message):
json_format.Parse(json_format.MessageToJson(message),
parsed_message)
self.assertEqual(message, parsed_message)
def CheckError(self, text, error_message):
message = json_format_proto3_pb2.TestMessage()
self.assertRaisesRegexp(
json_format.ParseError,
error_message,
json_format.Parse, text, message)
class JsonFormatTest(JsonFormatBase):
def testEmptyMessageToJson(self):
message = json_format_proto3_pb2.TestMessage()
self.assertEqual(json_format.MessageToJson(message),
'{}')
parsed_message = json_format_proto3_pb2.TestMessage()
self.CheckParseBack(message, parsed_message)
def testPartialMessageToJson(self):
message = json_format_proto3_pb2.TestMessage(
string_value='test',
repeated_int32_value=[89, 4])
self.assertEqual(json.loads(json_format.MessageToJson(message)),
json.loads('{"stringValue": "test", '
'"repeatedInt32Value": [89, 4]}'))
parsed_message = json_format_proto3_pb2.TestMessage()
self.CheckParseBack(message, parsed_message)
def testAllFieldsToJson(self):
message = json_format_proto3_pb2.TestMessage()
text = ('{"int32Value": 20, '
'"int64Value": "-20", '
'"uint32Value": 3120987654,'
'"uint64Value": "12345678900",'
'"floatValue": "-Infinity",'
'"doubleValue": 3.1415,'
'"boolValue": true,'
'"stringValue": "foo",'
'"bytesValue": "YmFy",'
'"messageValue": {"value": 10},'
'"enumValue": "BAR",'
'"repeatedInt32Value": [2147483647, -2147483648],'
'"repeatedInt64Value": ["9007199254740992", "-9007199254740992"],'
'"repeatedUint32Value": [268435455, 134217727],'
'"repeatedUint64Value": ["9007199254740992", "9007199254740991"],'
'"repeatedFloatValue": [0],'
'"repeatedDoubleValue": [1e-15, "Infinity"],'
'"repeatedBoolValue": [true, false],'
'"repeatedStringValue": ["Few symbols!#$,;", "bar"],'
'"repeatedBytesValue": ["Zm9v", "YmFy"],'
'"repeatedMessageValue": [{"value": 10}, {"value": 11}],'
'"repeatedEnumValue": ["FOO", "BAR"]'
'}')
self.FillAllFields(message)
self.assertEqual(
json.loads(json_format.MessageToJson(message)),
json.loads(text))
parsed_message = json_format_proto3_pb2.TestMessage()
json_format.Parse(text, parsed_message)
self.assertEqual(message, parsed_message)
def testUnknownEnumToJsonAndBack(self):
text = '{\n "enumValue": 999\n}'
message = json_format_proto3_pb2.TestMessage()
message.enum_value = 999
self.assertEqual(json_format.MessageToJson(message),
text)
parsed_message = json_format_proto3_pb2.TestMessage()
json_format.Parse(text, parsed_message)
self.assertEqual(message, parsed_message)
def testExtensionToJsonAndBack(self):
message = unittest_mset_pb2.TestMessageSetContainer()
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
message.message_set.Extensions[ext1].i = 23
message.message_set.Extensions[ext2].str = 'foo'
message_text = json_format.MessageToJson(
message
)
parsed_message = unittest_mset_pb2.TestMessageSetContainer()
json_format.Parse(message_text, parsed_message)
self.assertEqual(message, parsed_message)
def testExtensionErrors(self):
self.CheckError('{"[extensionField]": {}}',
'Message type proto3.TestMessage does not have extensions')
def testExtensionToDictAndBack(self):
message = unittest_mset_pb2.TestMessageSetContainer()
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
message.message_set.Extensions[ext1].i = 23
message.message_set.Extensions[ext2].str = 'foo'
message_dict = json_format.MessageToDict(
message
)
parsed_message = unittest_mset_pb2.TestMessageSetContainer()
json_format.ParseDict(message_dict, parsed_message)
self.assertEqual(message, parsed_message)
def testExtensionToDictAndBackWithScalar(self):
message = unittest_pb2.TestAllExtensions()
ext1 = unittest_pb2.TestNestedExtension.test
message.Extensions[ext1] = 'data'
message_dict = json_format.MessageToDict(
message
)
parsed_message = unittest_pb2.TestAllExtensions()
json_format.ParseDict(message_dict, parsed_message)
self.assertEqual(message, parsed_message)
def testJsonParseDictToAnyDoesNotAlterInput(self):
orig_dict = {
'int32Value': 20,
'@type': 'type.googleapis.com/proto3.TestMessage'
}
copied_dict = json.loads(json.dumps(orig_dict))
parsed_message = any_pb2.Any()
json_format.ParseDict(copied_dict, parsed_message)
self.assertEqual(copied_dict, orig_dict)
def testExtensionSerializationDictMatchesProto3Spec(self):
"""See go/proto3-json-spec for spec.
"""
message = unittest_mset_pb2.TestMessageSetContainer()
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
message.message_set.Extensions[ext1].i = 23
message.message_set.Extensions[ext2].str = 'foo'
message_dict = json_format.MessageToDict(
message
)
golden_dict = {
'messageSet': {
'[protobuf_unittest.'
'TestMessageSetExtension1.message_set_extension]': {
'i': 23,
},
'[protobuf_unittest.'
'TestMessageSetExtension2.message_set_extension]': {
'str': u'foo',
},
},
}
self.assertEqual(golden_dict, message_dict)
parsed_msg = unittest_mset_pb2.TestMessageSetContainer()
json_format.ParseDict(golden_dict, parsed_msg)
self.assertEqual(message, parsed_msg)
def testExtensionSerializationDictMatchesProto3SpecMore(self):
"""See go/proto3-json-spec for spec.
"""
message = json_format_pb2.TestMessageWithExtension()
ext = json_format_pb2.TestExtension.ext
message.Extensions[ext].value = 'stuff'
message_dict = json_format.MessageToDict(
message
)
expected_dict = {
'[protobuf_unittest.TestExtension.ext]': {
'value': u'stuff',
},
}
self.assertEqual(expected_dict, message_dict)
def testExtensionSerializationJsonMatchesProto3Spec(self):
"""See go/proto3-json-spec for spec.
"""
message = unittest_mset_pb2.TestMessageSetContainer()
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
message.message_set.Extensions[ext1].i = 23
message.message_set.Extensions[ext2].str = 'foo'
message_text = json_format.MessageToJson(
message
)
ext1_text = ('protobuf_unittest.TestMessageSetExtension1.'
'message_set_extension')
ext2_text = ('protobuf_unittest.TestMessageSetExtension2.'
'message_set_extension')
golden_text = ('{"messageSet": {'
' "[%s]": {'
' "i": 23'
' },'
' "[%s]": {'
' "str": "foo"'
' }'
'}}') % (ext1_text, ext2_text)
self.assertEqual(json.loads(golden_text), json.loads(message_text))
def testJsonEscapeString(self):
message = json_format_proto3_pb2.TestMessage()
if sys.version_info[0] < 3:
message.string_value = '&\n<\"\r>\b\t\f\\\001/\xe2\x80\xa8\xe2\x80\xa9'
else:
message.string_value = '&\n<\"\r>\b\t\f\\\001/'
message.string_value += (b'\xe2\x80\xa8\xe2\x80\xa9').decode('utf-8')
self.assertEqual(
json_format.MessageToJson(message),
'{\n "stringValue": '
'"&\\n<\\\"\\r>\\b\\t\\f\\\\\\u0001/\\u2028\\u2029"\n}')
parsed_message = json_format_proto3_pb2.TestMessage()
self.CheckParseBack(message, parsed_message)
text = u'{"int32Value": "\u0031"}'
json_format.Parse(text, message)
self.assertEqual(message.int32_value, 1)
def testAlwaysSeriliaze(self):
message = json_format_proto3_pb2.TestMessage(
string_value='foo')
self.assertEqual(
json.loads(json_format.MessageToJson(message, True)),
json.loads('{'
'"repeatedStringValue": [],'
'"stringValue": "foo",'
'"repeatedBoolValue": [],'
'"repeatedUint32Value": [],'
'"repeatedInt32Value": [],'
'"enumValue": "FOO",'
'"int32Value": 0,'
'"floatValue": 0,'
'"int64Value": "0",'
'"uint32Value": 0,'
'"repeatedBytesValue": [],'
'"repeatedUint64Value": [],'
'"repeatedDoubleValue": [],'
'"bytesValue": "",'
'"boolValue": false,'
'"repeatedEnumValue": [],'
'"uint64Value": "0",'
'"doubleValue": 0,'
'"repeatedFloatValue": [],'
'"repeatedInt64Value": [],'
'"repeatedMessageValue": []}'))
parsed_message = json_format_proto3_pb2.TestMessage()
self.CheckParseBack(message, parsed_message)
def testProto3Optional(self):
message = test_proto3_optional_pb2.TestProto3Optional()
self.assertEqual(
json.loads(
json_format.MessageToJson(
message, including_default_value_fields=True)),
json.loads('{}'))
message.optional_int32 = 0
self.assertEqual(
json.loads(
json_format.MessageToJson(
message, including_default_value_fields=True)),
json.loads('{"optionalInt32": 0}'))
def testIntegersRepresentedAsFloat(self):
message = json_format_proto3_pb2.TestMessage()
json_format.Parse('{"int32Value": -2.147483648e9}', message)
self.assertEqual(message.int32_value, -2147483648)
json_format.Parse('{"int32Value": 1e5}', message)
self.assertEqual(message.int32_value, 100000)
json_format.Parse('{"int32Value": 1.0}', message)
self.assertEqual(message.int32_value, 1)
def testMapFields(self):
message = json_format_proto3_pb2.TestNestedMap()
self.assertEqual(
json.loads(json_format.MessageToJson(message, True)),
json.loads('{'
'"boolMap": {},'
'"int32Map": {},'
'"int64Map": {},'
'"uint32Map": {},'
'"uint64Map": {},'
'"stringMap": {},'
'"mapMap": {}'
'}'))
message.bool_map[True] = 1
message.bool_map[False] = 2
message.int32_map[1] = 2
message.int32_map[2] = 3
message.int64_map[1] = 2
message.int64_map[2] = 3
message.uint32_map[1] = 2
message.uint32_map[2] = 3
message.uint64_map[1] = 2
message.uint64_map[2] = 3
message.string_map['1'] = 2
message.string_map['null'] = 3
message.map_map['1'].bool_map[True] = 3
self.assertEqual(
json.loads(json_format.MessageToJson(message, False)),
json.loads('{'
'"boolMap": {"false": 2, "true": 1},'
'"int32Map": {"1": 2, "2": 3},'
'"int64Map": {"1": 2, "2": 3},'
'"uint32Map": {"1": 2, "2": 3},'
'"uint64Map": {"1": 2, "2": 3},'
'"stringMap": {"1": 2, "null": 3},'
'"mapMap": {"1": {"boolMap": {"true": 3}}}'
'}'))
parsed_message = json_format_proto3_pb2.TestNestedMap()
self.CheckParseBack(message, parsed_message)
def testOneofFields(self):
message = json_format_proto3_pb2.TestOneof()
# Always print does not affect oneof fields.
self.assertEqual(
json_format.MessageToJson(message, True),
'{}')
message.oneof_int32_value = 0
self.assertEqual(
json_format.MessageToJson(message, True),
'{\n'
' "oneofInt32Value": 0\n'
'}')
parsed_message = json_format_proto3_pb2.TestOneof()
self.CheckParseBack(message, parsed_message)
def testSurrogates(self):
# Test correct surrogate handling.
message = json_format_proto3_pb2.TestMessage()
json_format.Parse('{"stringValue": "\\uD83D\\uDE01"}', message)
self.assertEqual(message.string_value,
b'\xF0\x9F\x98\x81'.decode('utf-8', 'strict'))
# Error case: unpaired high surrogate.
self.CheckError(
'{"stringValue": "\\uD83D"}',
r'Invalid \\uXXXX escape|Unpaired.*surrogate')
# Unpaired low surrogate.
self.CheckError(
'{"stringValue": "\\uDE01"}',
r'Invalid \\uXXXX escape|Unpaired.*surrogate')
def testTimestampMessage(self):
message = json_format_proto3_pb2.TestTimestamp()
message.value.seconds = 0
message.value.nanos = 0
message.repeated_value.add().seconds = 20
message.repeated_value[0].nanos = 1
message.repeated_value.add().seconds = 0
message.repeated_value[1].nanos = 10000
message.repeated_value.add().seconds = 100000000
message.repeated_value[2].nanos = 0
# Maximum time
message.repeated_value.add().seconds = 253402300799
message.repeated_value[3].nanos = 999999999
# Minimum time
message.repeated_value.add().seconds = -62135596800
message.repeated_value[4].nanos = 0
self.assertEqual(
json.loads(json_format.MessageToJson(message, True)),
json.loads('{'
'"value": "1970-01-01T00:00:00Z",'
'"repeatedValue": ['
' "1970-01-01T00:00:20.000000001Z",'
' "1970-01-01T00:00:00.000010Z",'
' "1973-03-03T09:46:40Z",'
' "9999-12-31T23:59:59.999999999Z",'
' "0001-01-01T00:00:00Z"'
']'
'}'))
parsed_message = json_format_proto3_pb2.TestTimestamp()
self.CheckParseBack(message, parsed_message)
text = (r'{"value": "1970-01-01T00:00:00.01+08:00",'
r'"repeatedValue":['
r' "1970-01-01T00:00:00.01+08:30",'
r' "1970-01-01T00:00:00.01-01:23"]}')
json_format.Parse(text, parsed_message)
self.assertEqual(parsed_message.value.seconds, -8 * 3600)
self.assertEqual(parsed_message.value.nanos, 10000000)
self.assertEqual(parsed_message.repeated_value[0].seconds, -8.5 * 3600)
self.assertEqual(parsed_message.repeated_value[1].seconds, 3600 + 23 * 60)
def testDurationMessage(self):
message = json_format_proto3_pb2.TestDuration()
message.value.seconds = 1
message.repeated_value.add().seconds = 0
message.repeated_value[0].nanos = 10
message.repeated_value.add().seconds = -1
message.repeated_value[1].nanos = -1000
message.repeated_value.add().seconds = 10
message.repeated_value[2].nanos = 11000000
message.repeated_value.add().seconds = -315576000000
message.repeated_value.add().seconds = 315576000000
self.assertEqual(
json.loads(json_format.MessageToJson(message, True)),
json.loads('{'
'"value": "1s",'
'"repeatedValue": ['
' "0.000000010s",'
' "-1.000001s",'
' "10.011s",'
' "-315576000000s",'
' "315576000000s"'
']'
'}'))
parsed_message = json_format_proto3_pb2.TestDuration()
self.CheckParseBack(message, parsed_message)
def testFieldMaskMessage(self):
message = json_format_proto3_pb2.TestFieldMask()
message.value.paths.append('foo.bar')
message.value.paths.append('bar')
self.assertEqual(
json_format.MessageToJson(message, True),
'{\n'
' "value": "foo.bar,bar"\n'
'}')
parsed_message = json_format_proto3_pb2.TestFieldMask()
self.CheckParseBack(message, parsed_message)
message.value.Clear()
self.assertEqual(
json_format.MessageToJson(message, True),
'{\n'
' "value": ""\n'
'}')
self.CheckParseBack(message, parsed_message)
def testWrapperMessage(self):
message = json_format_proto3_pb2.TestWrapper()
message.bool_value.value = False
message.int32_value.value = 0
message.string_value.value = ''
message.bytes_value.value = b''
message.repeated_bool_value.add().value = True
message.repeated_bool_value.add().value = False
message.repeated_int32_value.add()
self.assertEqual(
json.loads(json_format.MessageToJson(message, True)),
json.loads('{\n'
' "int32Value": 0,'
' "boolValue": false,'
' "stringValue": "",'
' "bytesValue": "",'
' "repeatedBoolValue": [true, false],'
' "repeatedInt32Value": [0],'
' "repeatedUint32Value": [],'
' "repeatedFloatValue": [],'
' "repeatedDoubleValue": [],'
' "repeatedBytesValue": [],'
' "repeatedInt64Value": [],'
' "repeatedUint64Value": [],'
' "repeatedStringValue": []'
'}'))
parsed_message = json_format_proto3_pb2.TestWrapper()
self.CheckParseBack(message, parsed_message)
def testStructMessage(self):
message = json_format_proto3_pb2.TestStruct()
message.value['name'] = 'Jim'
message.value['age'] = 10
message.value['attend'] = True
message.value['email'] = None
message.value.get_or_create_struct('address')['city'] = 'SFO'
message.value['address']['house_number'] = 1024
message.value.get_or_create_struct('empty_struct')
message.value.get_or_create_list('empty_list')
struct_list = message.value.get_or_create_list('list')
struct_list.extend([6, 'seven', True, False, None])
struct_list.add_struct()['subkey2'] = 9
message.repeated_value.add()['age'] = 11
message.repeated_value.add()
self.assertEqual(
json.loads(json_format.MessageToJson(message, False)),
json.loads(
'{'
' "value": {'
' "address": {'
' "city": "SFO", '
' "house_number": 1024'
' }, '
' "empty_struct": {}, '
' "empty_list": [], '
' "age": 10, '
' "name": "Jim", '
' "attend": true, '
' "email": null, '
' "list": [6, "seven", true, false, null, {"subkey2": 9}]'
' },'
' "repeatedValue": [{"age": 11}, {}]'
'}'))
parsed_message = json_format_proto3_pb2.TestStruct()
self.CheckParseBack(message, parsed_message)
# check for regression; this used to raise
parsed_message.value['empty_struct']
parsed_message.value['empty_list']
def testValueMessage(self):
message = json_format_proto3_pb2.TestValue()
message.value.string_value = 'hello'
message.repeated_value.add().number_value = 11.1
message.repeated_value.add().bool_value = False
message.repeated_value.add().null_value = 0
self.assertEqual(
json.loads(json_format.MessageToJson(message, False)),
json.loads(
'{'
' "value": "hello",'
' "repeatedValue": [11.1, false, null]'
'}'))
parsed_message = json_format_proto3_pb2.TestValue()
self.CheckParseBack(message, parsed_message)
# Can't parse back if the Value message is not set.
message.repeated_value.add()
self.assertEqual(
json.loads(json_format.MessageToJson(message, False)),
json.loads(
'{'
' "value": "hello",'
' "repeatedValue": [11.1, false, null, null]'
'}'))
message.Clear()
json_format.Parse('{"value": null}', message)
self.assertEqual(message.value.WhichOneof('kind'), 'null_value')
def testListValueMessage(self):
message = json_format_proto3_pb2.TestListValue()
message.value.values.add().number_value = 11.1
message.value.values.add().null_value = 0
message.value.values.add().bool_value = True
message.value.values.add().string_value = 'hello'
message.value.values.add().struct_value['name'] = 'Jim'
message.repeated_value.add().values.add().number_value = 1
message.repeated_value.add()
self.assertEqual(
json.loads(json_format.MessageToJson(message, False)),
json.loads(
'{"value": [11.1, null, true, "hello", {"name": "Jim"}]\n,'
'"repeatedValue": [[1], []]}'))
parsed_message = json_format_proto3_pb2.TestListValue()
self.CheckParseBack(message, parsed_message)
def testNullValue(self):
message = json_format_proto3_pb2.TestOneof()
message.oneof_null_value = 0
self.assertEqual(json_format.MessageToJson(message),
'{\n "oneofNullValue": null\n}')
parsed_message = json_format_proto3_pb2.TestOneof()
self.CheckParseBack(message, parsed_message)
# Check old format is also accepted
new_message = json_format_proto3_pb2.TestOneof()
json_format.Parse('{\n "oneofNullValue": "NULL_VALUE"\n}',
new_message)
self.assertEqual(json_format.MessageToJson(new_message),
'{\n "oneofNullValue": null\n}')
def testAnyMessage(self):
message = json_format_proto3_pb2.TestAny()
value1 = json_format_proto3_pb2.MessageType()
value2 = json_format_proto3_pb2.MessageType()
value1.value = 1234
value2.value = 5678
message.value.Pack(value1)
message.repeated_value.add().Pack(value1)
message.repeated_value.add().Pack(value2)
message.repeated_value.add()
self.assertEqual(
json.loads(json_format.MessageToJson(message, True)),
json.loads(
'{\n'
' "repeatedValue": [ {\n'
' "@type": "type.googleapis.com/proto3.MessageType",\n'
' "value": 1234\n'
' }, {\n'
' "@type": "type.googleapis.com/proto3.MessageType",\n'
' "value": 5678\n'
' },\n'
' {}],\n'
' "value": {\n'
' "@type": "type.googleapis.com/proto3.MessageType",\n'
' "value": 1234\n'
' }\n'
'}\n'))
parsed_message = json_format_proto3_pb2.TestAny()
self.CheckParseBack(message, parsed_message)
# Must print @type first
test_message = json_format_proto3_pb2.TestMessage(
bool_value=True,
int32_value=20,
int64_value=-20,
uint32_value=20,
uint64_value=20,
double_value=3.14,
string_value='foo')
message.Clear()
message.value.Pack(test_message)
self.assertEqual(
json_format.MessageToJson(message, False)[0:68],
'{\n'
' "value": {\n'
' "@type": "type.googleapis.com/proto3.TestMessage"')
def testAnyMessageDescriptorPoolMissingType(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
empty_pool = descriptor_pool.DescriptorPool()
with self.assertRaises(TypeError) as cm:
json_format.MessageToJson(message, True, descriptor_pool=empty_pool)
self.assertEqual(
'Can not find message descriptor by type_url:'
' type.googleapis.com/protobuf_unittest.OneString.',
str(cm.exception))
def testWellKnownInAnyMessage(self):
message = any_pb2.Any()
int32_value = wrappers_pb2.Int32Value()
int32_value.value = 1234
message.Pack(int32_value)
self.assertEqual(
json.loads(json_format.MessageToJson(message, True)),
json.loads(
'{\n'
' "@type": \"type.googleapis.com/google.protobuf.Int32Value\",\n'
' "value": 1234\n'
'}\n'))
parsed_message = any_pb2.Any()
self.CheckParseBack(message, parsed_message)
timestamp = timestamp_pb2.Timestamp()
message.Pack(timestamp)
self.assertEqual(
json.loads(json_format.MessageToJson(message, True)),
json.loads(
'{\n'
' "@type": "type.googleapis.com/google.protobuf.Timestamp",\n'
' "value": "1970-01-01T00:00:00Z"\n'
'}\n'))
self.CheckParseBack(message, parsed_message)
duration = duration_pb2.Duration()
duration.seconds = 1
message.Pack(duration)
self.assertEqual(
json.loads(json_format.MessageToJson(message, True)),
json.loads(
'{\n'
' "@type": "type.googleapis.com/google.protobuf.Duration",\n'
' "value": "1s"\n'
'}\n'))
self.CheckParseBack(message, parsed_message)
field_mask = field_mask_pb2.FieldMask()
field_mask.paths.append('foo.bar')
field_mask.paths.append('bar')
message.Pack(field_mask)
self.assertEqual(
json.loads(json_format.MessageToJson(message, True)),
json.loads(
'{\n'
' "@type": "type.googleapis.com/google.protobuf.FieldMask",\n'
' "value": "foo.bar,bar"\n'
'}\n'))
self.CheckParseBack(message, parsed_message)
struct_message = struct_pb2.Struct()
struct_message['name'] = 'Jim'
message.Pack(struct_message)
self.assertEqual(
json.loads(json_format.MessageToJson(message, True)),
json.loads(
'{\n'
' "@type": "type.googleapis.com/google.protobuf.Struct",\n'
' "value": {"name": "Jim"}\n'
'}\n'))
self.CheckParseBack(message, parsed_message)
nested_any = any_pb2.Any()
int32_value.value = 5678
nested_any.Pack(int32_value)
message.Pack(nested_any)
self.assertEqual(
json.loads(json_format.MessageToJson(message, True)),
json.loads(
'{\n'
' "@type": "type.googleapis.com/google.protobuf.Any",\n'
' "value": {\n'
' "@type": "type.googleapis.com/google.protobuf.Int32Value",\n'
' "value": 5678\n'
' }\n'
'}\n'))
self.CheckParseBack(message, parsed_message)
def testParseNull(self):
message = json_format_proto3_pb2.TestMessage()
parsed_message = json_format_proto3_pb2.TestMessage()
self.FillAllFields(parsed_message)
json_format.Parse('{"int32Value": null, '
'"int64Value": null, '
'"uint32Value": null,'
'"uint64Value": null,'
'"floatValue": null,'
'"doubleValue": null,'
'"boolValue": null,'
'"stringValue": null,'
'"bytesValue": null,'
'"messageValue": null,'
'"enumValue": null,'
'"repeatedInt32Value": null,'
'"repeatedInt64Value": null,'
'"repeatedUint32Value": null,'
'"repeatedUint64Value": null,'
'"repeatedFloatValue": null,'
'"repeatedDoubleValue": null,'
'"repeatedBoolValue": null,'
'"repeatedStringValue": null,'
'"repeatedBytesValue": null,'
'"repeatedMessageValue": null,'
'"repeatedEnumValue": null'
'}',
parsed_message)
self.assertEqual(message, parsed_message)
# Null and {} should have different behavior for sub message.
self.assertFalse(parsed_message.HasField('message_value'))
json_format.Parse('{"messageValue": {}}', parsed_message)
self.assertTrue(parsed_message.HasField('message_value'))
# Null is not allowed to be used as an element in repeated field.
self.assertRaisesRegexp(
json_format.ParseError,
'Failed to parse repeatedInt32Value field: '
'null is not allowed to be used as an element in a repeated field.',
json_format.Parse,
'{"repeatedInt32Value":[1, null]}',
parsed_message)
self.CheckError('{"repeatedMessageValue":[null]}',
'Failed to parse repeatedMessageValue field: null is not'
' allowed to be used as an element in a repeated field.')
def testNanFloat(self):
message = json_format_proto3_pb2.TestMessage()
message.float_value = float('nan')
text = '{\n "floatValue": "NaN"\n}'
self.assertEqual(json_format.MessageToJson(message), text)
parsed_message = json_format_proto3_pb2.TestMessage()
json_format.Parse(text, parsed_message)
self.assertTrue(math.isnan(parsed_message.float_value))
def testParseDoubleToFloat(self):
message = json_format_proto3_pb2.TestMessage()
text = ('{"repeatedDoubleValue": [3.4028235e+39, 1.4028235e-39]\n}')
json_format.Parse(text, message)
self.assertEqual(message.repeated_double_value[0], 3.4028235e+39)
self.assertEqual(message.repeated_double_value[1], 1.4028235e-39)
text = ('{"repeatedFloatValue": [3.4028235e+39, 1.4028235e-39]\n}')
self.CheckError(text,
'Failed to parse repeatedFloatValue field: '
'Float value too large.')
def testFloatPrecision(self):
message = json_format_proto3_pb2.TestMessage()
message.float_value = 1.123456789
# Set to 8 valid digits.
text = '{\n "floatValue": 1.1234568\n}'
self.assertEqual(
json_format.MessageToJson(message, float_precision=8), text)
# Set to 7 valid digits.
text = '{\n "floatValue": 1.123457\n}'
self.assertEqual(
json_format.MessageToJson(message, float_precision=7), text)
# Default float_precision will automatic print shortest float.
message.float_value = 1.1000000011
text = '{\n "floatValue": 1.1\n}'
self.assertEqual(
json_format.MessageToJson(message), text)
message.float_value = 1.00000075e-36
text = '{\n "floatValue": 1.00000075e-36\n}'
self.assertEqual(
json_format.MessageToJson(message), text)
message.float_value = 12345678912345e+11
text = '{\n "floatValue": 1.234568e+24\n}'
self.assertEqual(
json_format.MessageToJson(message), text)
# Test a bunch of data and check json encode/decode do not
# lose precision
value_list = [0x00, 0xD8, 0x6E, 0x00]
msg2 = json_format_proto3_pb2.TestMessage()
for a in range(0, 256):
value_list[3] = a
for b in range(0, 256):
value_list[0] = b
byte_array = bytearray(value_list)
message.float_value = struct.unpack('<f', byte_array)[0]
self.CheckParseBack(message, msg2)
def testParseEmptyText(self):
self.CheckError('',
r'Failed to load JSON: (Expecting value)|(No JSON).')
def testParseEnumValue(self):
message = json_format_proto3_pb2.TestMessage()
text = '{"enumValue": 0}'
json_format.Parse(text, message)
text = '{"enumValue": 1}'
json_format.Parse(text, message)
self.CheckError(
'{"enumValue": "baz"}',
'Failed to parse enumValue field: Invalid enum value baz '
'for enum type proto3.EnumType.')
# Proto3 accepts numeric unknown enums.
text = '{"enumValue": 12345}'
json_format.Parse(text, message)
# Proto2 does not accept unknown enums.
message = unittest_pb2.TestAllTypes()
self.assertRaisesRegexp(
json_format.ParseError,
'Failed to parse optionalNestedEnum field: Invalid enum value 12345 '
'for enum type protobuf_unittest.TestAllTypes.NestedEnum.',
json_format.Parse, '{"optionalNestedEnum": 12345}', message)
def testBytes(self):
message = json_format_proto3_pb2.TestMessage()
# Test url base64
text = '{"bytesValue": "-_"}'
json_format.Parse(text, message)
self.assertEqual(message.bytes_value, b'\xfb')
# Test padding
text = '{"bytesValue": "AQI="}'
json_format.Parse(text, message)
self.assertEqual(message.bytes_value, b'\x01\x02')
text = '{"bytesValue": "AQI"}'
json_format.Parse(text, message)
self.assertEqual(message.bytes_value, b'\x01\x02')
text = '{"bytesValue": "AQI*"}'
json_format.Parse(text, message)
self.assertEqual(message.bytes_value, b'\x01\x02')
def testParseBadIdentifer(self):
self.CheckError('{int32Value: 1}',
(r'Failed to load JSON: Expecting property name'
r'( enclosed in double quotes)?: line 1'))
self.CheckError('{"unknownName": 1}',
'Message type "proto3.TestMessage" has no field named '
'"unknownName".')
def testIgnoreUnknownField(self):
text = '{"unknownName": 1}'
parsed_message = json_format_proto3_pb2.TestMessage()
json_format.Parse(text, parsed_message, ignore_unknown_fields=True)
text = ('{\n'
' "repeatedValue": [ {\n'
' "@type": "type.googleapis.com/proto3.MessageType",\n'
' "unknownName": 1\n'
' }]\n'
'}\n')
parsed_message = json_format_proto3_pb2.TestAny()
json_format.Parse(text, parsed_message, ignore_unknown_fields=True)
def testDuplicateField(self):
self.CheckError('{"int32Value": 1,\n"int32Value":2}',
'Failed to load JSON: duplicate key int32Value.')
def testInvalidBoolValue(self):
self.CheckError('{"boolValue": 1}',
'Failed to parse boolValue field: '
'Expected true or false without quotes.')
self.CheckError('{"boolValue": "true"}',
'Failed to parse boolValue field: '
'Expected true or false without quotes.')
def testInvalidIntegerValue(self):
message = json_format_proto3_pb2.TestMessage()
text = '{"int32Value": 0x12345}'
self.assertRaises(json_format.ParseError,
json_format.Parse, text, message)
self.CheckError('{"int32Value": 1.5}',
'Failed to parse int32Value field: '
'Couldn\'t parse integer: 1.5.')
self.CheckError('{"int32Value": 012345}',
(r'Failed to load JSON: Expecting \'?,\'? delimiter: '
r'line 1.'))
self.CheckError('{"int32Value": " 1 "}',
'Failed to parse int32Value field: '
'Couldn\'t parse integer: " 1 ".')
self.CheckError('{"int32Value": "1 "}',
'Failed to parse int32Value field: '
'Couldn\'t parse integer: "1 ".')
self.CheckError('{"int32Value": false}',
'Failed to parse int32Value field: Bool value False '
'is not acceptable for integer field.')
self.CheckError('{"int32Value": 12345678901234567890}',
'Failed to parse int32Value field: Value out of range: '
'12345678901234567890.')
self.CheckError('{"uint32Value": -1}',
'Failed to parse uint32Value field: '
'Value out of range: -1.')
def testInvalidFloatValue(self):
self.CheckError('{"floatValue": "nan"}',
'Failed to parse floatValue field: Couldn\'t '
'parse float "nan", use "NaN" instead.')
self.CheckError('{"floatValue": NaN}',
'Failed to parse floatValue field: Couldn\'t '
'parse NaN, use quoted "NaN" instead.')
self.CheckError('{"floatValue": Infinity}',
'Failed to parse floatValue field: Couldn\'t parse Infinity'
' or value too large, use quoted "Infinity" instead.')
self.CheckError('{"floatValue": -Infinity}',
'Failed to parse floatValue field: Couldn\'t parse '
'-Infinity or value too small, '
'use quoted "-Infinity" instead.')
self.CheckError('{"doubleValue": -1.89769e+308}',
'Failed to parse doubleValue field: Couldn\'t parse '
'-Infinity or value too small, '
'use quoted "-Infinity" instead.')
self.CheckError('{"floatValue": 3.4028235e+39}',
'Failed to parse floatValue field: Float value too large.')
self.CheckError('{"floatValue": -3.502823e+38}',
'Failed to parse floatValue field: Float value too small.')
def testInvalidRepeated(self):
self.CheckError('{"repeatedInt32Value": 12345}',
(r'Failed to parse repeatedInt32Value field: repeated field'
r' repeatedInt32Value must be in \[\] which is 12345.'))
def testInvalidMap(self):
message = json_format_proto3_pb2.TestMap()
text = '{"int32Map": {"null": 2, "2": 3}}'
self.assertRaisesRegexp(
json_format.ParseError,
'Failed to parse int32Map field: invalid literal',
json_format.Parse, text, message)
text = '{"int32Map": {1: 2, "2": 3}}'
self.assertRaisesRegexp(
json_format.ParseError,
(r'Failed to load JSON: Expecting property name'
r'( enclosed in double quotes)?: line 1'),
json_format.Parse, text, message)
text = '{"boolMap": {"null": 1}}'
self.assertRaisesRegexp(
json_format.ParseError,
'Failed to parse boolMap field: Expected "true" or "false", not null.',
json_format.Parse, text, message)
if sys.version_info < (2, 7):
return
text = r'{"stringMap": {"a": 3, "\u0061": 2}}'
self.assertRaisesRegexp(
json_format.ParseError,
'Failed to load JSON: duplicate key a',
json_format.Parse, text, message)
text = r'{"stringMap": 0}'
self.assertRaisesRegexp(
json_format.ParseError,
'Failed to parse stringMap field: Map field string_map must be '
'in a dict which is 0.',
json_format.Parse, text, message)
def testInvalidTimestamp(self):
message = json_format_proto3_pb2.TestTimestamp()
text = '{"value": "10000-01-01T00:00:00.00Z"}'
self.assertRaisesRegexp(
json_format.ParseError,
'Failed to parse value field: '
'time data \'10000-01-01T00:00:00\' does not match'
' format \'%Y-%m-%dT%H:%M:%S\'.',
json_format.Parse, text, message)
text = '{"value": "1970-01-01T00:00:00.0123456789012Z"}'
self.assertRaisesRegexp(
json_format.ParseError,
'nanos 0123456789012 more than 9 fractional digits.',
json_format.Parse, text, message)
text = '{"value": "1972-01-01T01:00:00.01+08"}'
self.assertRaisesRegexp(
json_format.ParseError,
(r'Invalid timezone offset value: \+08.'),
json_format.Parse, text, message)
# Time smaller than minimum time.
text = '{"value": "0000-01-01T00:00:00Z"}'
self.assertRaisesRegexp(
json_format.ParseError,
'Failed to parse value field: year (0 )?is out of range.',
json_format.Parse, text, message)
# Time bigger than maximum time.
message.value.seconds = 253402300800
self.assertRaisesRegexp(
OverflowError,
'date value out of range',
json_format.MessageToJson, message)
# Lower case t does not accept.
text = '{"value": "0001-01-01t00:00:00Z"}'
with self.assertRaises(json_format.ParseError) as e:
json_format.Parse(text, message)
self.assertEqual(
'Failed to parse value field: '
'time data \'0001-01-01t00:00:00\' does not match format '
'\'%Y-%m-%dT%H:%M:%S\', lowercase \'t\' is not accepted.',
str(e.exception))
def testInvalidOneof(self):
message = json_format_proto3_pb2.TestOneof()
text = '{"oneofInt32Value": 1, "oneofStringValue": "2"}'
self.assertRaisesRegexp(
json_format.ParseError,
'Message type "proto3.TestOneof"'
' should not have multiple "oneof_value" oneof fields.',
json_format.Parse, text, message)
def testInvalidListValue(self):
message = json_format_proto3_pb2.TestListValue()
text = '{"value": 1234}'
self.assertRaisesRegexp(
json_format.ParseError,
r'Failed to parse value field: ListValue must be in \[\] which is 1234',
json_format.Parse, text, message)
def testInvalidStruct(self):
message = json_format_proto3_pb2.TestStruct()
text = '{"value": 1234}'
self.assertRaisesRegexp(
json_format.ParseError,
'Failed to parse value field: Struct must be in a dict which is 1234',
json_format.Parse, text, message)
def testInvalidAny(self):
message = any_pb2.Any()
text = '{"@type": "type.googleapis.com/google.protobuf.Int32Value"}'
self.assertRaisesRegexp(
KeyError,
'value',
json_format.Parse, text, message)
text = '{"value": 1234}'
self.assertRaisesRegexp(
json_format.ParseError,
'@type is missing when parsing any message.',
json_format.Parse, text, message)
text = '{"@type": "type.googleapis.com/MessageNotExist", "value": 1234}'
self.assertRaisesRegexp(
TypeError,
'Can not find message descriptor by type_url: '
'type.googleapis.com/MessageNotExist.',
json_format.Parse, text, message)
# Only last part is to be used: b/25630112
text = (r'{"@type": "incorrect.googleapis.com/google.protobuf.Int32Value",'
r'"value": 1234}')
json_format.Parse(text, message)
def testPreservingProtoFieldNames(self):
message = json_format_proto3_pb2.TestMessage()
message.int32_value = 12345
self.assertEqual('{\n "int32Value": 12345\n}',
json_format.MessageToJson(message))
self.assertEqual('{\n "int32_value": 12345\n}',
json_format.MessageToJson(message, False, True))
# When including_default_value_fields is True.
message = json_format_proto3_pb2.TestTimestamp()
self.assertEqual('{\n "repeatedValue": []\n}',
json_format.MessageToJson(message, True, False))
self.assertEqual('{\n "repeated_value": []\n}',
json_format.MessageToJson(message, True, True))
# Parsers accept both original proto field names and lowerCamelCase names.
message = json_format_proto3_pb2.TestMessage()
json_format.Parse('{"int32Value": 54321}', message)
self.assertEqual(54321, message.int32_value)
json_format.Parse('{"int32_value": 12345}', message)
self.assertEqual(12345, message.int32_value)
def testIndent(self):
message = json_format_proto3_pb2.TestMessage()
message.int32_value = 12345
self.assertEqual('{\n"int32Value": 12345\n}',
json_format.MessageToJson(message, indent=0))
def testFormatEnumsAsInts(self):
message = json_format_proto3_pb2.TestMessage()
message.enum_value = json_format_proto3_pb2.BAR
message.repeated_enum_value.append(json_format_proto3_pb2.FOO)
message.repeated_enum_value.append(json_format_proto3_pb2.BAR)
self.assertEqual(json.loads('{\n'
' "enumValue": 1,\n'
' "repeatedEnumValue": [0, 1]\n'
'}\n'),
json.loads(json_format.MessageToJson(
message, use_integers_for_enums=True)))
def testParseDict(self):
expected = 12345
js_dict = {'int32Value': expected}
message = json_format_proto3_pb2.TestMessage()
json_format.ParseDict(js_dict, message)
self.assertEqual(expected, message.int32_value)
def testParseDictAnyDescriptorPoolMissingType(self):
# Confirm that ParseDict does not raise ParseError with default pool
js_dict = {
'any_value': {
'@type': 'type.googleapis.com/proto3.MessageType',
'value': 1234
}
}
json_format.ParseDict(js_dict, any_test_pb2.TestAny())
# Check ParseDict raises ParseError with empty pool
js_dict = {
'any_value': {
'@type': 'type.googleapis.com/proto3.MessageType',
'value': 1234
}
}
with self.assertRaises(json_format.ParseError) as cm:
empty_pool = descriptor_pool.DescriptorPool()
json_format.ParseDict(js_dict,
any_test_pb2.TestAny(),
descriptor_pool=empty_pool)
self.assertEqual(
str(cm.exception),
'Failed to parse any_value field: Can not find message descriptor by'
' type_url: type.googleapis.com/proto3.MessageType..')
def testParseDictUnknownValueType(self):
class UnknownClass(object):
def __str__(self):
return 'v'
message = json_format_proto3_pb2.TestValue()
self.assertRaisesRegexp(
json_format.ParseError,
r"Value v has unexpected type <class '.*\.UnknownClass'>.",
json_format.ParseDict,
{'value': UnknownClass()},
message)
def testMessageToDict(self):
message = json_format_proto3_pb2.TestMessage()
message.int32_value = 12345
expected = {'int32Value': 12345}
self.assertEqual(expected,
json_format.MessageToDict(message))
def testJsonName(self):
message = json_format_proto3_pb2.TestCustomJsonName()
message.value = 12345
self.assertEqual('{\n "@value": 12345\n}',
json_format.MessageToJson(message))
parsed_message = json_format_proto3_pb2.TestCustomJsonName()
self.CheckParseBack(message, parsed_message)
def testSortKeys(self):
# Testing sort_keys is not perfectly working, as by random luck we could
# get the output sorted. We just use a selection of names.
message = json_format_proto3_pb2.TestMessage(bool_value=True,
int32_value=1,
int64_value=3,
uint32_value=4,
string_value='bla')
self.assertEqual(
json_format.MessageToJson(message, sort_keys=True),
# We use json.dumps() instead of a hardcoded string due to differences
# between Python 2 and Python 3.
json.dumps({'boolValue': True, 'int32Value': 1, 'int64Value': '3',
'uint32Value': 4, 'stringValue': 'bla'},
indent=2, sort_keys=True))
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
| 7,361,897,291,038,372,000 | 39.454473 | 80 | 0.607606 | false |
philwo/dbbench
|
selector.py
|
1
|
2829
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Performs randomized SELECT queries on a test database in multiple threads.
#
# Copyright 2012 by Philipp Wollermann
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import division
import sys
import threading
import logging
from time import time, sleep
from random import randrange
import settings
class Worker(threading.Thread):
def __init__(self, id_min, id_max):
super(Worker, self).__init__()
self._stop = threading.Event()
self.id_min = id_min
self.id_max = id_max
def stop(self):
logging.debug('Stopping...')
self._stop.set()
def stopped(self):
return self._stop.isSet()
def run(self):
logging.debug('Starting...')
while not self.stopped():
start = time()
conn = settings.db_connect()
cur = conn.cursor()
for i in xrange(settings.SELECT_ROW_COUNT):
cur.execute('SELECT * FROM ' + settings.DB_TABLE + ' WHERE id = %s', (randrange(self.id_min, self.id_max),))
conn.commit()
cur.close()
conn.close()
end = time()
logging.info('Selecting %s rows from indexes between [%s, %s] took %.2f seconds...' % (settings.SELECT_ROW_COUNT, self.id_min, self.id_max, (end - start),))
def main():
# Setup logging
logging.basicConfig(
level=logging.DEBUG,
format='[%(levelname)s] (%(threadName)-10s) %(message)s',
)
# Get the current minimum and maximum value of the auto_increment primary key in our test table.
conn = settings.db_connect()
cur = conn.cursor()
cur.execute('SELECT id FROM ' + settings.DB_TABLE + ' ORDER BY id ASC LIMIT 1')
id_min = cur.fetchone()[0]
cur.execute('SELECT id FROM ' + settings.DB_TABLE + ' ORDER BY id DESC LIMIT 1')
id_max = cur.fetchone()[0]
cur.close()
conn.close()
# Start worker threads
try:
threads = [Worker(id_min, id_max) for i in xrange(settings.THREAD_COUNT)]
for thread in threads:
thread.start()
while True:
sleep(1)
except (KeyboardInterrupt, SystemExit):
for thread in threads:
thread.stop()
if __name__ == "__main__":
sys.exit(main())
|
apache-2.0
| -7,106,444,881,440,432,000 | 30.433333 | 168 | 0.618947 | false |
ccca-dc/ckanext-mdedit
|
ckanext/mdedit/plugin.py
|
1
|
9816
|
import ckan.plugins as plugins
import ckan.plugins.toolkit as toolkit
import json
import ckanext.mdedit.logic.action as action
import pylons
from ckanext.mdedit import helpers
from ckanext.mdedit import validators as v
ignore_empty = plugins.toolkit.get_validator('ignore_empty')
from ckanext.mdedit.helpers import (
localize_json_title, get_frequency_name, get_readable_file_size,
parse_json, dump_json, map_to_valid_format
)
class MdeditPlugin(plugins.SingletonPlugin):
plugins.implements(plugins.IConfigurer)
plugins.implements(plugins.IValidators)
plugins.implements(plugins.IPackageController, inherit=True)
plugins.implements(plugins.IActions)
plugins.implements(plugins.ITemplateHelpers)
# IConfigurer
def update_config(self, config_):
toolkit.add_template_directory(config_, 'templates')
toolkit.add_public_directory(config_, 'public')
toolkit.add_resource('fanstatic', 'mdedit')
# IValidators
def get_validators(self):
return {
'mdedit_contains_k': v.mdedit_contains_k,
'multiple_text': v.multiple_text,
'multiple_text_output': v.multiple_text_output,
'list_of_dicts': v.list_of_dicts,
'parse_json': parse_json,
'version_to_name': v.version_to_name,
'readonly_subset_fields': v.readonly_subset_fields,
'readonly_subset_fields_dicts': v.readonly_subset_fields_dicts
}
# IActions
def get_actions(self):
actions = {'package_contact_show': action.package_contact_show}
return actions
# ITemplateHelpers
def get_helpers(self):
return {
'mdedit_get_package_name': helpers.mdedit_get_package_name,
'mdedit_get_name': helpers.mdedit_get_name,
'mdedit_get_mail': helpers.mdedit_get_mail,
'mdedit_get_date': helpers.mdedit_get_date,
'mdedit_parse_date': helpers.mdedit_parse_date,
'mdedit_get_name_citation': helpers.mdedit_get_name_citation,
'mdedit_get_contain_values_k': helpers.mdedit_get_contain_values_k,
'mdedit_get_contain_labels': helpers.mdedit_get_contain_labels,
'mdedit_get_contain_pholders': helpers.mdedit_get_contain_pholders,
'mdedit_get_resource_version': helpers.mdedit_get_resource_version,
'mdedit_get_resource_title': helpers.mdedit_get_resource_title,
'mdedit_get_package_id': helpers.mdedit_get_package_id,
'mdedit_get_contact_choices': helpers.mdedit_get_contact_choices,
'mdedit_get_contact_values': helpers.mdedit_get_contact_values,
'get_readable_file_size': helpers.get_readable_file_size,
'get_frequency_name': helpers.get_frequency_name,
'parse_json': helpers.parse_json,
'dump_json': helpers.dump_json,
'filesizeformat': helpers.filesizeformat
}
class MdeditMasterPlugin(plugins.SingletonPlugin):
"""
Handles dictionaries in data_dict (pkg_dict).
"""
def before_view(self, pkg_dict):
pkg_dict = self._prepare_package_json(pkg_dict)
return pkg_dict
def _parse_field(self, key):
#return False
return key in 'contact_points' or key in 'specifics' or key in 'variables' or key in 'dimensions' or key in 'relations'
def _prepare_package_json(self, pkg_dict):
# parse all json strings in dict
pkg_dict = self._package_parse_json_strings(pkg_dict)
# map ckan fields
pkg_dict = self._package_map_ckan_default_fields(pkg_dict)
#try:
# # Do not change the resulting dict for API requests
# path = pylons.request.path
# if path.startswith('/api'):
# return pkg_dict
#except TypeError:
# # we get here if there is no request (i.e. on the command line)
# return pkg_dict
return pkg_dict
def _package_parse_json_strings(self, pkg_dict):
# try to parse all values as JSON
for key, value in pkg_dict.iteritems():
if self._parse_field(key):
pkg_dict[key] = parse_json(value)
return pkg_dict
def _package_map_ckan_default_fields(self, pkg_dict): # noqa
# Map Maintainer and author from contact_points
if pkg_dict.get('maintainer') is None:
try:
pkg_dict['maintainer'] = pkg_dict['contact_points'][0]['name'] # noqa
except (KeyError, IndexError, TypeError):
pass
if pkg_dict.get('maintainer_email') is None:
try:
pkg_dict['maintainer_email'] = pkg_dict['contact_points'][0]['email'] # noqa
except (KeyError, IndexError, TypeError):
pass
if pkg_dict.get('author') is None:
try:
pkg_dict['author'] = pkg_dict['contact_points'][0]['name'] # noqa
except (KeyError, IndexError, TypeError):
pass
if pkg_dict.get('author_email') is None:
try:
pkg_dict['author_email'] = pkg_dict['contact_points'][0]['email'] # noqa
except (KeyError, IndexError, TypeError):
pass
# Map Temporals for DCAT Export
# TODO Support multiple temporal extents
if pkg_dict.get('temporal_start') is None:
try:
pkg_dict['temporal_start'] = pkg_dict['temporals'][0]['start_date'] # noqa
except (KeyError, IndexError, TypeError):
pass
if pkg_dict.get('temporal_end') is None:
try:
pkg_dict['temporal_end'] = pkg_dict['temporals'][0]['end_date'] # noqa
except (KeyError, IndexError, TypeError):
pass
return pkg_dict
class MdeditResourcePlugin(MdeditMasterPlugin):
plugins.implements(plugins.IResourceController, inherit=True)
# IResourceController
def before_show(self, res_dict):
res_dict = super(MdeditResourcePlugin, self).before_view(res_dict)
# res_dict = self._prepare_resource_format(res_dict)
# if format could not be mapped and media_type exists use this value
# if not res_dict.get('format') and res_dict.get('media_type'):
# res_dict['format'] = res_dict['media_type'].split('/')[-1]
return res_dict
def _ignore_field(self, key):
return key == 'tracking_summary' or key in 'name' or key in 'description'
class MdeditPackagePlugin(MdeditMasterPlugin):
plugins.implements(plugins.IPackageController, inherit=True)
def is_supported_package_type(self, pkg_dict):
# only package type 'dataset' is supported (not harvesters!)
try:
return (pkg_dict['type'] == 'dataset')
except KeyError:
return False
# IPackageController
def before_view(self, pkg_dict):
if not self.is_supported_package_type(pkg_dict):
return pkg_dict
return super(MdeditPackagePlugin, self).before_view(pkg_dict)
def after_show(self, context, pkg_dict):
if not self.is_supported_package_type(pkg_dict):
return pkg_dict
pkg_dict = self._package_map_ckan_default_fields(pkg_dict)
return super(MdeditPackagePlugin, self).before_view(pkg_dict)
#return pkg_dict
def before_index(self, search_data):
if not self.is_supported_package_type(search_data):
return search_data
validated_dict = json.loads(search_data['validated_data_dict'])
variables = []
# Add any specified variables to search_data
if u'variables' in validated_dict:
for x in validated_dict[u'variables']:
if 'description' in x or 'name' in x or 'standard_name' in x:
new_item ={}
new_item[u'name'] = u'Variables'
new_item[u'value'] = x['description'] if 'description' in x else x['name']
variables.append(new_item)
try:
search_data['extras_variables'] = self._prepare_list_for_index(validated_dict[u'variables']) # noqa
search_data['extras_dimensions'] = self._prepare_list_for_index(validated_dict[u'dimensions']) # noqa
search_data['extras_relations'] = self._prepare_list_for_index(validated_dict[u'relations']) # noqa
search_data['extras_specifics'] = self._prepare_list_for_index(validated_dict[u'specifics']) # noqa
search_data['res_hash'] = [ d['hash'] for d in validated_dict[u'resources'] if d['hash'] not in '' ]
# Flatten specifics
# i.e: Add the new search items extras_specifcs_*
search_data.update(self._flatten_list_for_index(validated_dict[u'specifics'], 'extras_specifics', 'name', 'value'))
# Add the new search item extras_specifcs_Variables
search_data.update(self._flatten_list_for_index(variables, 'extras_specifics', 'name', 'value'))
except:
print "before-index: something did not work"
pass
return search_data
# generates a set with all dicts from list
def _prepare_list_for_index(self, list_dicts):
dicts = []
for d in list_dicts:
dicts.append(dump_json(d))
return dicts
def _flatten_list_for_index(self, list_dicts, result_key_prefix, filter_key, filter_value):
unique_keywords = set([dic.get(filter_key) for dic in list_dicts])
flatten_dict = {}
for keyword in unique_keywords:
flatten_dict.update(
{'_'.join([result_key_prefix, keyword]): [d.get(filter_value) for d in list_dicts if d.get(filter_key) in keyword ]})
return flatten_dict
|
agpl-3.0
| -6,954,540,575,138,646,000 | 37.645669 | 133 | 0.614609 | false |
GoogleCloudPlatform/appengine-python-standard
|
src/google/appengine/ext/ndb/model.py
|
1
|
130428
|
#!/usr/bin/env python
#
# Copyright 2007 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Model and Property classes and associated stuff.
A model class represents the structure of entities stored in the
datastore. Applications define model classes to indicate the
structure of their entities, then instantiate those model classes
to create entities.
All model classes must inherit (directly or indirectly) from Model.
Through the magic of metaclasses, straightforward assignments in the
model class definition can be used to declare the model's structure::
class Person(Model):
name = StringProperty()
age = IntegerProperty()
We can now create a Person entity and write it to Cloud Datastore::
p = Person(name='Arthur Dent', age=42)
k = p.put()
The return value from put() is a Key (see the documentation for
ndb/key.py), which can be used to retrieve the same entity later::
p2 = k.get()
p2 == p # Returns True
To update an entity, simple change its attributes and write it back
(note that this doesn't change the key)::
p2.name = 'Arthur Philip Dent'
p2.put()
We can also delete an entity (by using the key)::
k.delete()
The property definitions in the class body tell the system the names
and the types of the fields to be stored in Cloud Datastore, whether
they must be indexed, their default value, and more.
Many different Property types exist. Most are indexed by default, the
exceptions indicated in the list below:
- StringProperty: a short text string, limited to 500 bytes
- TextProperty: an unlimited text string; unindexed
- BlobProperty: an unlimited byte string; unindexed
- IntegerProperty: a 64-bit signed integer
- FloatProperty: a double precision floating point number
- BooleanProperty: a bool value
- DateTimeProperty: a datetime object. Note: App Engine always uses
UTC as the timezone
- DateProperty: a date object
- TimeProperty: a time object
- GeoPtProperty: a geographical location, i.e. (latitude, longitude)
- KeyProperty: a Cloud Datastore Key value, optionally constrained to
referring to a specific kind
- UserProperty: a User object (for backwards compatibility only)
- StructuredProperty: a field that is itself structured like an
entity; see below for more details
- LocalStructuredProperty: like StructuredProperty but the on-disk
representation is an opaque blob; unindexed
- ComputedProperty: a property whose value is computed from other
properties by a user-defined function. The property value is
written to Cloud Datastore so that it can be used in queries, but the
value from Cloud Datastore is not used when the entity is read back
- GenericProperty: a property whose type is not constrained; mostly
used by the Expando class (see below) but also usable explicitly
- JsonProperty: a property whose value is any object that can be
serialized using JSON; the value written to Cloud Datastore is a JSON
representation of that object
- PickleProperty: a property whose value is any object that can be
serialized using Python's pickle protocol; the value written to the
Cloud Datastore is the pickled representation of that object, using the
highest available pickle protocol
Most Property classes have similar constructor signatures. They
accept several optional keyword arguments:
- name=<string>: the name used to store the property value in the
datastore. Unlike the following options, this may also be given as
a positional argument
- indexed=<bool>: indicates whether the property should be indexed
(allowing queries on this property's value)
- repeated=<bool>: indicates that this property can have multiple
values in the same entity.
- write_empty_list<bool>: For repeated value properties, controls
whether properties with no elements (the empty list) is
written to Datastore. If true, written, if false, then nothing
is written to Datastore.
- required=<bool>: indicates that this property must be given a value
- default=<value>: a default value if no explicit value is given
- choices=<list of values>: a list or tuple of allowable values
- validator=<function>: a general-purpose validation function. It
will be called with two arguments (prop, value) and should either
return the validated value or raise an exception. It is also
allowed for the function to modify the value, but calling it again
on the modified value should not modify the value further. (For
example: a validator that returns value.strip() or value.lower() is
fine, but one that returns value + '$' is not.)
- verbose_name=<value>: A human readable name for this property. This
human readable name can be used for html form labels.
The repeated and required/default options are mutually exclusive: a
repeated property cannot be required nor can it specify a default
value (the default is always an empty list and an empty list is always
an allowed value), but a required property can have a default.
Some property types have additional arguments. Some property types
do not support all options.
Repeated properties are always represented as Python lists; if there
is only one value, the list has only one element. When a new list is
assigned to a repeated property, all elements of the list are
validated. Since it is also possible to mutate lists in place,
repeated properties are re-validated before they are written to the
datastore.
No validation happens when an entity is read from Cloud Datastore;
however property values read that have the wrong type (e.g. a string
value for an IntegerProperty) are ignored.
For non-repeated properties, None is always a possible value, and no
validation is called when the value is set to None. However for
required properties, writing the entity to Cloud Datastore requires
the value to be something other than None (and valid).
The StructuredProperty is different from most other properties; it
lets you define a sub-structure for your entities. The substructure
itself is defined using a model class, and the attribute value is an
instance of that model class. However it is not stored in the
datastore as a separate entity; instead, its attribute values are
included in the parent entity using a naming convention (the name of
the structured attribute followed by a dot followed by the name of the
subattribute). For example::
class Address(Model):
street = StringProperty()
city = StringProperty()
class Person(Model):
name = StringProperty()
address = StructuredProperty(Address)
p = Person(name='Harry Potter',
address=Address(street='4 Privet Drive',
city='Little Whinging'))
k.put()
This would write a single 'Person' entity with three attributes (as
you could verify using the Datastore Viewer in the Admin Console)::
name = 'Harry Potter'
address.street = '4 Privet Drive'
address.city = 'Little Whinging'
Structured property types can be nested arbitrarily deep, but in a
hierarchy of nested structured property types, only one level can have
the repeated flag set. It is fine to have multiple structured
properties referencing the same model class.
It is also fine to use the same model class both as a top-level entity
class and as for a structured property; however queries for the model
class will only return the top-level entities.
The LocalStructuredProperty works similar to StructuredProperty on the
Python side. For example::
class Address(Model):
street = StringProperty()
city = StringProperty()
class Person(Model):
name = StringProperty()
address = LocalStructuredProperty(Address)
p = Person(name='Harry Potter',
address=Address(street='4 Privet Drive',
city='Little Whinging'))
k.put()
However the data written to Cloud Datastore is different; it writes a
'Person' entity with a 'name' attribute as before and a single
'address' attribute whose value is a blob which encodes the Address
value (using the standard"protocol buffer" encoding).
Sometimes the set of properties is not known ahead of time. In such
cases you can use the Expando class. This is a Model subclass that
creates properties on the fly, both upon assignment and when loading
an entity from Cloud Datastore. For example::
class SuperPerson(Expando):
name = StringProperty()
superpower = StringProperty()
razorgirl = SuperPerson(name='Molly Millions',
superpower='bionic eyes, razorblade hands',
rasta_name='Steppin\' Razor',
alt_name='Sally Shears')
elastigirl = SuperPerson(name='Helen Parr',
superpower='stretchable body')
elastigirl.max_stretch = 30 # Meters
You can inspect the properties of an expando instance using the
_properties attribute:
>>> print razorgirl._properties.keys()
['rasta_name', 'name', 'superpower', 'alt_name']
>>> print elastigirl._properties
{'max_stretch': GenericProperty('max_stretch'),
'name': StringProperty('name'),
'superpower': StringProperty('superpower')}
Note: this property exists for plain Model instances too; it is just
not as interesting for those.
The Model class offers basic query support. You can create a Query
object by calling the query() class method. Iterating over a Query
object returns the entities matching the query one at a time.
Query objects are fully described in the docstring for query.py, but
there is one handy shortcut that is only available through
Model.query(): positional arguments are interpreted as filter
expressions which are combined through an AND operator. For example::
Person.query(Person.name == 'Harry Potter', Person.age >= 11)
is equivalent to::
Person.query().filter(Person.name == 'Harry Potter', Person.age >= 11)
Keyword arguments passed to .query() are passed along to the Query()
constructor.
It is possible to query for field values of structured properties. For
example::
qry = Person.query(Person.address.city == 'London')
A number of top-level functions also live in this module:
- transaction() runs a function inside a transaction
- get_multi() reads multiple entities at once
- put_multi() writes multiple entities at once
- delete_multi() deletes multiple entities at once
All these have a corresponding ``*_async()`` variant as well.
The ``*_multi_async()`` functions return a list of Futures.
And finally these (without async variants):
- in_transaction() tests whether you are currently running in a transaction
- @transactional decorates functions that should be run in a transaction
There are many other interesting features. For example, Model
subclasses may define pre-call and post-call hooks for most operations
(get, put, delete, allocate_ids), and Property classes may be
subclassed to suit various needs. Documentation for writing a
Property subclass is in the docstring for the Property class.
"""
import collections
import copy
import datetime
import logging
import os
import zlib
from google.appengine.ext.ndb import key as key_module
from google.appengine.ext.ndb import utils
import six
from six.moves import map
import six.moves.cPickle as pickle
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import users
from google.appengine.datastore import datastore_query
from google.appengine.datastore import datastore_rpc
from google.appengine.datastore import entity_bytes_pb2 as entity_pb2
Key = key_module.Key
__all__ = ['Key', 'BlobKey', 'GeoPt', 'Rollback',
'Index', 'IndexState', 'IndexProperty',
'ModelAdapter', 'ModelAttribute',
'ModelKey', 'MetaModel', 'Model', 'Expando',
'transaction', 'transaction_async', 'in_transaction',
'transactional', 'transactional_async', 'transactional_tasklet',
'non_transactional',
'get_multi', 'get_multi_async',
'put_multi', 'put_multi_async',
'delete_multi', 'delete_multi_async',
'get_indexes', 'get_indexes_async',
'make_connection',
]
BlobKey = datastore_types.BlobKey
GeoPt = datastore_types.GeoPt
Rollback = datastore_errors.Rollback
class KindError(datastore_errors.BadValueError):
"""Raised when an implementation for a kind can't be found.
Also raised when the Kind is not an 8-bit string.
"""
class InvalidPropertyError(datastore_errors.Error):
"""Raised when a property is not applicable to a given use.
For example, a property must exist and be indexed to be used in a query's
projection or group by clause.
"""
BadProjectionError = InvalidPropertyError
class UnprojectedPropertyError(datastore_errors.Error):
"""Raised when getting a property value that's not in the projection."""
class ReadonlyPropertyError(datastore_errors.Error):
"""Raised when attempting to set a property value that is read-only."""
class ComputedPropertyError(ReadonlyPropertyError):
"""Raised when attempting to set a value to or delete a computed property."""
_MAX_LONG = key_module._MAX_LONG
_MAX_STRING_LENGTH = datastore_types._MAX_STRING_LENGTH
_DIR_MAP = {
entity_pb2.Index.Property.ASCENDING: 'asc',
entity_pb2.Index.Property.DESCENDING: 'desc',
}
_STATE_MAP = {
entity_pb2.CompositeIndex.ERROR: 'error',
entity_pb2.CompositeIndex.DELETED: 'deleting',
entity_pb2.CompositeIndex.READ_WRITE: 'serving',
entity_pb2.CompositeIndex.WRITE_ONLY: 'building',
}
class _NotEqualMixin(object):
"""Mix-in class that implements __ne__ in terms of __eq__."""
def __ne__(self, other):
"""Implement self != other as not(self == other)."""
eq = self.__eq__(other)
if eq is NotImplemented:
return NotImplemented
return not eq
class _NestedCounter(object):
""" A recursive counter for StructuredProperty deserialization.
Deserialization has some complicated rules to handle StructuredPropertys
that may or may not be empty. The simplest case is a leaf counter, where
the counter will return the index of the repeated value that last had this
leaf property written. When a non-leaf counter requested, this will return
the max of all its leaf values. This is due to the fact that the next index
that a full non-leaf property may be written to comes after all indices that
have part of that property written (otherwise, a partial entity would be
overwritten.
Consider an evaluation of the following structure:
class B(model.Model):
c = model.IntegerProperty()
d = model.IntegerProperty()
class A(model.Model):
b = model.StructuredProperty(B)
class Foo(model.Model):
# top-level model
a = model.StructuredProperty(A, repeated=True)
Foo(a=[A(b=None),
A(b=B(c=1)),
A(b=None),
A(b=B(c=2, d=3))])
This will result in a serialized structure:
1) a.b = None
2) a.b.c = 1
3) a.b.d = None
4) a.b = None
5) a.b.c = 2
6) a.b.d = 3
The counter state should be the following:
a | a.b | a.b.c | a.b.d
0) - - - -
1) @1 1 - -
2) @2 @2 2 -
3) @2 @2 2 2
4) @3 @3 3 3
5) @4 @4 4 3
6) @4 @4 4 4
Here, @ indicates that this counter value is actually a calculated value.
It is equal to the MAX of its sub-counters.
Counter values may get incremented multiple times while deserializing a
property. This will happen if a child counter falls behind,
for example in steps 2 and 3.
During an increment of a parent node, all child nodes values are incremented
to match that of the parent, for example in step 4.
"""
def __init__(self):
self.__counter = 0
self.__sub_counters = collections.defaultdict(_NestedCounter)
def get(self, parts=None):
if parts:
return self.__sub_counters[parts[0]].get(parts[1:])
if self.__is_parent_node():
return max(v.get() for v in six.itervalues(self.__sub_counters))
return self.__counter
def increment(self, parts=None):
if parts:
self.__make_parent_node()
return self.__sub_counters[parts[0]].increment(parts[1:])
if self.__is_parent_node():
value = self.get() + 1
self._set(value)
return value
self.__counter += 1
return self.__counter
def _set(self, value):
"""Updates all descendants to a specified value."""
if self.__is_parent_node():
for child in six.itervalues(self.__sub_counters):
child._set(value)
else:
self.__counter = value
def _absolute_counter(self):
return self.__counter
def __is_parent_node(self):
return self.__counter == -1
def __make_parent_node(self):
self.__counter = -1
class IndexProperty(_NotEqualMixin):
"""Immutable object representing a single property in an index."""
@utils.positional(1)
def __new__(cls, name, direction):
"""Constructor."""
obj = object.__new__(cls)
obj.__name = name
obj.__direction = direction
return obj
@property
def name(self):
"""The property name being indexed, a string."""
return self.__name
@property
def direction(self):
"""The direction in the index for this property, 'asc' or 'desc'."""
return self.__direction
def __repr__(self):
"""Return a string representation."""
return '%s(name=%r, direction=%r)' % (self.__class__.__name__,
self.name,
self.direction)
def __eq__(self, other):
"""Compare two index properties for equality."""
if not isinstance(other, IndexProperty):
return NotImplemented
return self.name == other.name and self.direction == other.direction
def __hash__(self):
return hash((self.name, self.direction))
class Index(_NotEqualMixin):
"""Immutable object representing an index."""
@utils.positional(1)
def __new__(cls, kind, properties, ancestor):
"""Constructor."""
obj = object.__new__(cls)
obj.__kind = kind
obj.__properties = properties
obj.__ancestor = ancestor
return obj
@property
def kind(self):
"""The kind being indexed, a string."""
return self.__kind
@property
def properties(self):
"""A list of PropertyIndex objects giving the properties being indexed."""
return self.__properties
@property
def ancestor(self):
"""Whether this is an ancestor index, a bool."""
return self.__ancestor
def __repr__(self):
"""Return a string representation."""
parts = []
parts.append('kind=%r' % self.kind)
parts.append('properties=%r' % self.properties)
parts.append('ancestor=%s' % self.ancestor)
return '%s(%s)' % (self.__class__.__name__, ', '.join(parts))
def __eq__(self, other):
"""Compare two indexes."""
if not isinstance(other, Index):
return NotImplemented
return (self.kind == other.kind and
self.properties == other.properties and
self.ancestor == other.ancestor)
def __hash__(self):
return hash((self.kind, self.properties, self.ancestor))
class IndexState(_NotEqualMixin):
"""Immutable object representing and index and its state."""
@utils.positional(1)
def __new__(cls, definition, state, id):
"""Constructor."""
obj = object.__new__(cls)
obj.__definition = definition
obj.__state = state
obj.__id = id
return obj
@property
def definition(self):
"""An Index object describing the index."""
return self.__definition
@property
def state(self):
"""The index state, a string.
Possible values are 'error', 'deleting', 'serving' or 'building'.
"""
return self.__state
@property
def id(self):
"""The index ID, an integer."""
return self.__id
def __repr__(self):
"""Return a string representation."""
parts = []
parts.append('definition=%r' % self.definition)
parts.append('state=%r' % self.state)
parts.append('id=%d' % self.id)
return '%s(%s)' % (self.__class__.__name__, ', '.join(parts))
def __eq__(self, other):
"""Compare two index states."""
if not isinstance(other, IndexState):
return NotImplemented
return (self.definition == other.definition and
self.state == other.state and
self.id == other.id)
def __hash__(self):
return hash((self.definition, self.state, self.id))
class ModelAdapter(datastore_rpc.AbstractAdapter):
"""Conversions between 'our' Key and Model classes and protobufs.
This is needed to construct a Connection object, which in turn is
needed to construct a Context object.
See the base class docstring for more info about the signatures.
"""
def __init__(self, default_model=None, id_resolver=None):
"""Constructor.
Args:
default_model: If an implementation for the kind cannot be found, use
this model class. If none is specified, an exception will be thrown
(default).
id_resolver: A datastore_pbs.IdResolver that can resolve
application ids. This is only necessary when running on the Cloud
Datastore v1 API.
"""
try:
super(ModelAdapter, self).__init__(id_resolver)
except:
pass
self.default_model = default_model
self.want_pbs = 0
def __enter__(self):
self.want_pbs += 1
def __exit__(self, *unused_args):
self.want_pbs -= 1
def pb_to_key(self, pb):
return Key(reference=pb)
def key_to_pb(self, key):
return key.reference()
def pb_to_entity(self, pb):
key = None
kind = None
if len(pb.key.path.element):
key = Key(reference=pb.key)
kind = key.kind()
modelclass = Model._lookup_model(kind, self.default_model)
entity = modelclass._from_pb(pb, key=key, set_key=False)
if self.want_pbs:
entity._orig_pb = pb
return entity
def entity_to_pb(self, ent):
pb = ent._to_pb()
return pb
def pb_to_index(self, pb):
index_def = pb.definition
properties = [
IndexProperty(name=prop.name, direction=_DIR_MAP[prop.direction])
for prop in index_def.property
]
index = Index(
kind=index_def.entity_type,
properties=properties,
ancestor=bool(index_def.ancestor),
)
index_state = IndexState(
definition=index,
state=_STATE_MAP[pb.state],
id=pb.id,
)
return index_state
def make_connection(config=None, default_model=None,
_api_version=datastore_rpc._DATASTORE_V3,
_id_resolver=None):
"""Create a new Connection object with the right adapter.
Optionally you can pass in a datastore_rpc.Configuration object.
"""
return datastore_rpc.Connection(
adapter=ModelAdapter(default_model, id_resolver=_id_resolver),
config=config,
_api_version=_api_version)
class ModelAttribute(object):
"""A Base class signifying the presence of a _fix_up() method."""
def _fix_up(self, cls, code_name):
pass
class _BaseValue(_NotEqualMixin):
"""A marker object wrapping a 'base type' value.
This is used to be able to tell whether ent._values[name] is a
user value (i.e. of a type that the Python code understands) or a
base value (i.e of a type that serialization understands).
User values are unwrapped; base values are wrapped in a
_BaseValue instance.
"""
__slots__ = ['b_val']
def __init__(self, b_val):
"""Constructor. Argument is the base value to be wrapped."""
assert b_val is not None, "Cannot wrap None"
assert not isinstance(b_val, list), repr(b_val)
self.b_val = b_val
def __repr__(self):
return '_BaseValue(%r)' % (self.b_val,)
def __eq__(self, other):
if not isinstance(other, _BaseValue):
return NotImplemented
return self.b_val == other.b_val
def __hash__(self):
raise TypeError('_BaseValue is not immutable')
class Property(ModelAttribute):
"""A class describing a typed, persisted attribute of a Cloud Datastore entity.
Not to be confused with Python's 'property' built-in.
This is just a base class; there are specific subclasses that
describe Properties of various types (and GenericProperty which
describes a dynamically typed Property).
All special Property attributes, even those considered 'public',
have names starting with an underscore, because StructuredProperty
uses the non-underscore attribute namespace to refer to nested
Property names; this is essential for specifying queries on
subproperties (see the module docstring).
The Property class and its predefined subclasses allow easy
subclassing using composable (or stackable) validation and
conversion APIs. These require some terminology definitions:
- A 'user value' is a value such as would be set and accessed by the
application code using standard attributes on the entity.
- A 'base value' is a value such as would be serialized to
and deserialized from Cloud Datastore.
The values stored in ent._values[name] and accessed by
_store_value() and _retrieve_value() can be either user values or
base values. To retrieve user values, use
_get_user_value(). To retrieve base values, use
_get_base_value(). In particular, _get_value() calls
_get_user_value(), and _serialize() effectively calls
_get_base_value().
To store a user value, just call _store_value(). To store a
base value, wrap the value in a _BaseValue() and then
call _store_value().
A Property subclass that wants to implement a specific
transformation between user values and serialiazble values should
implement two methods, _to_base_type() and _from_base_type().
These should *NOT* call their super() method; super calls are taken
care of by _call_to_base_type() and _call_from_base_type().
This is what is meant by composable (or stackable) APIs.
The API supports 'stacking' classes with ever more sophisticated
user<-->base conversions: the user-->base conversion
goes from more sophisticated to less sophisticated, while the
base-->user conversion goes from less sophisticated to more
sophisticated. For example, see the relationship between
BlobProperty, TextProperty and StringProperty.
In addition to _to_base_type() and _from_base_type(), the
_validate() method is also a composable API.
The validation API distinguishes between 'lax' and 'strict' user
values. The set of lax values is a superset of the set of strict
values. The _validate() method takes a lax value and if necessary
converts it to a strict value. This means that when setting the
property value, lax values are accepted, while when getting the
property value, only strict values will be returned. If no
conversion is needed, _validate() may return None. If the argument
is outside the set of accepted lax values, _validate() should raise
an exception, preferably TypeError or
datastore_errors.BadValueError.
Example/boilerplate:
def _validate(self, value):
'Lax user value to strict user value.'
if not isinstance(value, <top type>):
raise TypeError(...) # Or datastore_errors.BadValueError(...).
def _to_base_type(self, value):
'(Strict) user value to base value.'
if isinstance(value, <user type>):
return <base type>(value)
def _from_base_type(self, value):
'base value to (strict) user value.'
if not isinstance(value, <base type>):
return <user type>(value)
Things that _validate(), _to_base_type() and _from_base_type()
do *not* need to handle:
- None: They will not be called with None (and if they return None,
this means that the value does not need conversion).
- Repeated values: The infrastructure (_get_user_value() and
_get_base_value()) takes care of calling
_from_base_type() or _to_base_type() for each list item in a
repeated value.
- Wrapping values in _BaseValue(): The wrapping and unwrapping is
taken care of by the infrastructure that calls the composable APIs.
- Comparisons: The comparison operations call _to_base_type() on
their operand.
- Distinguishing between user and base values: the
infrastructure guarantees that _from_base_type() will be called
with an (unwrapped) base value, and that
_to_base_type() will be called with a user value.
- Returning the original value: if any of these return None, the
original value is kept. (Returning a differen value not equal to
None will substitute the different value.)
"""
_code_name = None
_name = None
_indexed = True
_repeated = False
_required = False
_default = None
_choices = None
_validator = None
_verbose_name = None
_write_empty_list = False
__creation_counter_global = 0
_attributes = ['_name', '_indexed', '_repeated', '_required', '_default',
'_choices', '_validator', '_verbose_name',
'_write_empty_list']
_positional = 1
@utils.positional(1 + _positional)
def __init__(self, name=None, indexed=None, repeated=None,
required=None, default=None, choices=None, validator=None,
verbose_name=None, write_empty_list=None):
"""Constructor. For arguments see the module docstring."""
if name is not None:
if isinstance(name, six.text_type):
name = six.ensure_binary(name)
if not isinstance(name, six.binary_type):
raise TypeError('Name %r is not a string' % (name,))
if b'.' in name:
raise ValueError('Name %r cannot contain period characters' % (name,))
self._name = name
if indexed is not None:
self._indexed = indexed
if repeated is not None:
self._repeated = repeated
if required is not None:
self._required = required
if default is not None:
self._default = default
if verbose_name is not None:
self._verbose_name = verbose_name
if write_empty_list is not None:
self._write_empty_list = write_empty_list
if self._repeated and (self._required or self._default is not None):
raise ValueError('repeated is incompatible with required or default')
if choices is not None:
if not isinstance(choices, (list, tuple, set, frozenset)):
raise TypeError('choices must be a list, tuple or set; received %r' %
choices)
self._choices = frozenset(choices)
if validator is not None:
if not hasattr(validator, '__call__'):
raise TypeError('validator must be callable or None; received %r' %
validator)
self._validator = validator
Property.__creation_counter_global += 1
self._creation_counter = Property.__creation_counter_global
def __repr__(self):
"""Return a compact unambiguous string representation of a property."""
args = []
cls = self.__class__
for i, attr in enumerate(self._attributes):
val = getattr(self, attr)
if val is not getattr(cls, attr):
if isinstance(val, type):
s = val.__name__
else:
s = repr(val)
if i >= cls._positional:
if attr.startswith('_'):
attr = attr[1:]
s = '%s=%s' % (attr, s)
args.append(s)
s = '%s(%s)' % (self.__class__.__name__, ', '.join(args))
return s
def _datastore_type(self, value):
"""Internal hook used by property filters.
Sometimes the low-level query interface needs a specific data type
in order for the right filter to be constructed. See _comparison().
"""
return value
def _comparison(self, op, value):
"""Internal helper for comparison operators.
Args:
op: The operator ('=', '<' etc.).
Returns:
A FilterNode instance representing the requested comparison.
"""
if not self._indexed:
raise datastore_errors.BadFilterError(
'Cannot query for unindexed property %s' % self._name)
from google.appengine.ext.ndb.query import FilterNode
if value is not None:
value = self._do_validate(value)
value = self._call_to_base_type(value)
value = self._datastore_type(value)
return FilterNode(self._name, op, value)
def __eq__(self, value):
"""Return a FilterNode instance representing the '=' comparison."""
return self._comparison('=', value)
def __ne__(self, value):
"""Return a FilterNode instance representing the '!=' comparison."""
return self._comparison('!=', value)
def __lt__(self, value):
"""Return a FilterNode instance representing the '<' comparison."""
return self._comparison('<', value)
def __le__(self, value):
"""Return a FilterNode instance representing the '<=' comparison."""
return self._comparison('<=', value)
def __gt__(self, value):
"""Return a FilterNode instance representing the '>' comparison."""
return self._comparison('>', value)
def __ge__(self, value):
"""Return a FilterNode instance representing the '>=' comparison."""
return self._comparison('>=', value)
def _IN(self, value):
"""Comparison operator for the 'in' comparison operator.
The Python 'in' operator cannot be overloaded in the way we want
to, so we define a method. For example::
Employee.query(Employee.rank.IN([4, 5, 6]))
Note that the method is called ._IN() but may normally be invoked
as .IN(); ._IN() is provided for the case you have a
StructuredProperty with a model that has a Property named IN.
"""
if not self._indexed:
raise datastore_errors.BadFilterError(
'Cannot query for unindexed property %s' % self._name)
from google.appengine.ext.ndb.query import FilterNode
if not isinstance(value, (list, tuple, set, frozenset)):
raise datastore_errors.BadArgumentError(
'Expected list, tuple or set, got %r' % (value,))
values = []
for val in value:
if val is not None:
val = self._do_validate(val)
val = self._call_to_base_type(val)
val = self._datastore_type(val)
values.append(val)
return FilterNode(self._name, 'in', values)
IN = _IN
def __neg__(self):
"""Return a descending sort order on this Property.
For example::
Employee.query().order(-Employee.rank)
"""
return datastore_query.PropertyOrder(
self._name, datastore_query.PropertyOrder.DESCENDING)
def __pos__(self):
"""Return an ascending sort order on this Property.
Note that this is redundant but provided for consistency with
__neg__. For example, the following two are equivalent::
Employee.query().order(+Employee.rank)
Employee.query().order(Employee.rank)
"""
return datastore_query.PropertyOrder(self._name)
def _do_validate(self, value):
"""Call all validations on the value.
This calls the most derived _validate() method(s), then the custom
validator function, and then checks the choices. It returns the
value, possibly modified in an idempotent way, or raises an
exception.
Note that this does not call all composable _validate() methods.
It only calls _validate() methods up to but not including the
first _to_base_type() method, when the MRO is traversed looking
for _validate() and _to_base_type() methods. (IOW if a class
defines both _validate() and _to_base_type(), its _validate()
is called and then the search is aborted.)
Note that for a repeated Property this function should be called
for each item in the list, not for the list as a whole.
"""
if isinstance(value, _BaseValue):
return value
value = self._call_shallow_validation(value)
if self._validator is not None:
newvalue = self._validator(self, value)
if newvalue is not None:
value = newvalue
if self._choices is not None:
if value not in self._choices:
raise datastore_errors.BadValueError(
'Value %r for property %s is not an allowed choice' %
(value, self._name))
return value
def _fix_up(self, cls, code_name):
"""Internal helper called to tell the property its name.
This is called by _fix_up_properties() which is called by
MetaModel when finishing the construction of a Model subclass.
The name passed in is the name of the class attribute to which the
Property is assigned (a.k.a. the code name). Note that this means
that each Property instance must be assigned to (at most) one
class attribute. E.g. to declare three strings, you must call
StringProperty() three times, you cannot write
foo = bar = baz = StringProperty()
"""
self._code_name = code_name
if self._name is None:
self._name = six.ensure_binary(code_name)
def _store_value(self, entity, value):
"""Internal helper to store a value in an entity for a Property.
This assumes validation has already taken place. For a repeated
Property the value should be a list.
"""
entity._values[self._name] = value
def _set_value(self, entity, value):
"""Internal helper to set a value in an entity for a Property.
This performs validation first. For a repeated Property the value
should be a list.
"""
if entity._projection:
raise ReadonlyPropertyError(
'You cannot set property values of a projection entity')
if self._repeated:
if not isinstance(value, (list, tuple, set, frozenset)):
raise datastore_errors.BadValueError('Expected list or tuple, got %r' %
(value,))
value = [self._do_validate(v) for v in value]
else:
if value is not None:
value = self._do_validate(value)
self._store_value(entity, value)
def _has_value(self, entity, unused_rest=None):
"""Internal helper to ask if the entity has a value for this Property."""
return self._name in entity._values
def _retrieve_value(self, entity, default=None):
"""Internal helper to retrieve the value for this Property from an entity.
This returns None if no value is set, or the default argument if
given. For a repeated Property this returns a list if a value is
set, otherwise None. No additional transformations are applied.
"""
return entity._values.get(self._name, default)
def _get_user_value(self, entity):
"""Return the user value for this property of the given entity.
This implies removing the _BaseValue() wrapper if present, and
if it is, calling all _from_base_type() methods, in the reverse
method resolution order of the property's class. It also handles
default values and repeated properties.
"""
return self._apply_to_values(entity, self._opt_call_from_base_type)
def _get_base_value(self, entity):
"""Return the base value for this property of the given entity.
This implies calling all _to_base_type() methods, in the method
resolution order of the property's class, and adding a
_BaseValue() wrapper, if one is not already present. (If one
is present, no work is done.) It also handles default values and
repeated properties.
"""
return self._apply_to_values(entity, self._opt_call_to_base_type)
def _get_base_value_unwrapped_as_list(self, entity):
"""Like _get_base_value(), but always returns a list.
Returns:
A new list of unwrapped base values. For an unrepeated
property, if the value is missing or None, returns [None]; for a
repeated property, if the original value is missing or None or
empty, returns [].
"""
wrapped = self._get_base_value(entity)
if self._repeated:
if wrapped is None:
return []
assert isinstance(wrapped, list)
return [w.b_val for w in wrapped]
else:
if wrapped is None:
return [None]
assert isinstance(wrapped, _BaseValue)
return [wrapped.b_val]
def _opt_call_from_base_type(self, value):
"""Call _from_base_type() if necessary.
If the value is a _BaseValue instance, unwrap it and call all
_from_base_type() methods. Otherwise, return the value
unchanged.
"""
if isinstance(value, _BaseValue):
value = self._call_from_base_type(value.b_val)
return value
def _value_to_repr(self, value):
"""Turn a value (base or not) into its repr().
This exists so that property classes can override it separately.
"""
val = self._opt_call_from_base_type(value)
return repr(val)
def _opt_call_to_base_type(self, value):
"""Call _to_base_type() if necessary.
If the value is a _BaseValue instance, return it unchanged.
Otherwise, call all _validate() and _to_base_type() methods and
wrap it in a _BaseValue instance.
"""
if not isinstance(value, _BaseValue):
value = _BaseValue(self._call_to_base_type(value))
return value
def _call_from_base_type(self, value):
"""Call all _from_base_type() methods on the value.
This calls the methods in the reverse method resolution order of
the property's class.
"""
methods = self._find_methods('_from_base_type', reverse=True)
call = self._apply_list(methods)
return call(value)
def _call_to_base_type(self, value):
"""Call all _validate() and _to_base_type() methods on the value.
This calls the methods in the method resolution order of the
property's class.
"""
methods = self._find_methods('_validate', '_to_base_type')
call = self._apply_list(methods)
return call(value)
def _call_shallow_validation(self, value):
"""Call the initial set of _validate() methods.
This is similar to _call_to_base_type() except it only calls
those _validate() methods that can be called without needing to
call _to_base_type().
An example: suppose the class hierarchy is A -> B -> C ->
Property, and suppose A defines _validate() only, but B and C
define _validate() and _to_base_type(). The full list of
methods called by _call_to_base_type() is::
A._validate()
B._validate()
B._to_base_type()
C._validate()
C._to_base_type()
This method will call A._validate() and B._validate() but not the
others.
"""
methods = []
for method in self._find_methods('_validate', '_to_base_type'):
if method.__name__ != '_validate':
break
methods.append(method)
call = self._apply_list(methods)
return call(value)
@classmethod
def _find_methods(cls, *names, **kwds):
"""Compute a list of composable methods.
Because this is a common operation and the class hierarchy is
static, the outcome is cached (assuming that for a particular list
of names the reversed flag is either always on, or always off).
Args:
*names: One or more method names.
reverse: Optional flag, default False; if True, the list is
reversed.
Returns:
A list of callable class method objects.
"""
reverse = kwds.pop('reverse', False)
assert not kwds, repr(kwds)
cache = cls.__dict__.get('_find_methods_cache')
if cache:
hit = cache.get(names)
if hit is not None:
return hit
else:
cls._find_methods_cache = cache = {}
methods = []
for c in cls.__mro__:
for name in names:
method = c.__dict__.get(name)
if method is not None:
methods.append(method)
if reverse:
methods.reverse()
cache[names] = methods
return methods
def _apply_list(self, methods):
"""Return a single callable that applies a list of methods to a value.
If a method returns None, the last value is kept; if it returns
some other value, that replaces the last value. Exceptions are
not caught.
"""
def call(value):
for method in methods:
newvalue = method(self, value)
if newvalue is not None:
value = newvalue
return value
return call
def _apply_to_values(self, entity, function):
"""Apply a function to the property value/values of a given entity.
This retrieves the property value, applies the function, and then
stores the value back. For a repeated property, the function is
applied separately to each of the values in the list. The
resulting value or list of values is both stored back in the
entity and returned from this method.
"""
value = self._retrieve_value(entity, self._default)
if self._repeated:
if value is None:
value = []
self._store_value(entity, value)
else:
value[:] = list(map(function, value))
else:
if value is not None:
newvalue = function(value)
if newvalue is not None and newvalue is not value:
self._store_value(entity, newvalue)
value = newvalue
return value
def _get_value(self, entity):
"""Internal helper to get the value for this Property from an entity.
For a repeated Property this initializes the value to an empty
list if it is not set.
"""
if entity._projection:
if six.ensure_text(self._name) not in entity._projection:
raise UnprojectedPropertyError(
'Property %s is not in the projection' % (self._name,))
return self._get_user_value(entity)
def _delete_value(self, entity):
"""Internal helper to delete the value for this Property from an entity.
Note that if no value exists this is a no-op; deleted values will
not be serialized but requesting their value will return None (or
an empty list in the case of a repeated Property).
"""
if self._name in entity._values:
del entity._values[self._name]
def _is_initialized(self, entity):
"""Internal helper to ask if the entity has a value for this Property.
This returns False if a value is stored but it is None.
"""
return (not self._required or
((self._has_value(entity) or self._default is not None) and
self._get_value(entity) is not None))
def __get__(self, entity, unused_cls=None):
"""Descriptor protocol: get the value from the entity."""
if entity is None:
return self
return self._get_value(entity)
def __set__(self, entity, value):
"""Descriptor protocol: set the value on the entity."""
self._set_value(entity, value)
def __delete__(self, entity):
"""Descriptor protocol: delete the value from the entity."""
self._delete_value(entity)
def _serialize(self, entity, pb, prefix='', parent_repeated=False,
projection=None):
"""Internal helper to serialize this property to a protocol buffer.
Subclasses may override this method.
Args:
entity: The entity, a Model (subclass) instance.
pb: The protocol buffer, an EntityProto instance.
prefix: Optional name prefix used for StructuredProperty
(if present, must end in '.').
parent_repeated: True if the parent (or an earlier ancestor)
is a repeated Property.
projection: A list or tuple of strings representing the projection for
the model instance, or None if the instance is not a projection.
"""
values = self._get_base_value_unwrapped_as_list(entity)
name = six.ensure_text(prefix) + six.ensure_text(self._name)
if projection and name not in projection:
return
if self._indexed:
create_prop = lambda: pb.property.add()
else:
create_prop = lambda: pb.raw_property.add()
if self._repeated and not values and self._write_empty_list:
p = create_prop()
p.name = name
p.multiple = False
p.meaning = entity_pb2.Property.EMPTY_LIST
p.value.SetInParent()
else:
for val in values:
p = create_prop()
p.name = name
p.multiple = self._repeated or parent_repeated
p.value.SetInParent()
v = p.value
if val is not None:
self._db_set_value(v, p, val)
if projection:
new_p = entity_pb2.Property()
new_p.name = p.name
new_p.meaning = entity_pb2.Property.INDEX_VALUE
new_p.multiple = False
new_p.value.CopyFrom(v)
p.CopyFrom(new_p)
def _deserialize(self, entity, p, unused_depth=1):
"""Internal helper to deserialize this property from a protocol buffer.
Subclasses may override this method.
Args:
entity: The entity, a Model (subclass) instance.
p: A Property Message object (a protocol buffer).
depth: Optional nesting depth, default 1 (unused here, but used
by some subclasses that override this method).
"""
if p.meaning == entity_pb2.Property.EMPTY_LIST:
self._store_value(entity, [])
return
val = self._db_get_value(p.value, p)
if val is not None:
val = _BaseValue(val)
if self._repeated:
if self._has_value(entity):
value = self._retrieve_value(entity)
assert isinstance(value, list), repr(value)
value.append(val)
else:
value = [val]
else:
value = val
self._store_value(entity, value)
def _prepare_for_put(self, entity):
pass
def _check_property(self, rest=None, require_indexed=True):
"""Internal helper to check this property for specific requirements.
Called by Model._check_properties().
Args:
rest: Optional subproperty to check, of the form 'name1.name2...nameN'.
Raises:
InvalidPropertyError if this property does not meet the given
requirements or if a subproperty is specified. (StructuredProperty
overrides this method to handle subproperties.)
"""
if require_indexed and not self._indexed:
raise InvalidPropertyError('Property is unindexed %s' % self._name)
if rest:
raise InvalidPropertyError('Referencing subproperty %s.%s '
'but %s is not a structured property' %
(self._name, rest, self._name))
def _get_for_dict(self, entity):
"""Retrieve the value like _get_value(), processed for _to_dict().
Property subclasses can override this if they want the dictionary
returned by entity._to_dict() to contain a different value. The
main use case is StructuredProperty and LocalStructuredProperty.
NOTES::
- If you override _get_for_dict() to return a different type, you
must override _validate() to accept values of that type and
convert them back to the original type.
- If you override _get_for_dict(), you must handle repeated values
and None correctly. (See _StructuredGetForDictMixin for an
example.) However, _validate() does not need to handle these.
"""
return self._get_value(entity)
def _validate_key(value, entity=None):
if not isinstance(value, Key):
raise datastore_errors.BadValueError('Expected Key, got %r' % value)
if entity and entity.__class__ not in (Model, Expando):
value_kind = six.ensure_str(value.kind(), encoding='utf-8')
entity_kind = six.ensure_str(entity._get_kind(), encoding='utf-8')
if value_kind != entity_kind:
raise KindError(
'Expected Key kind to be %s; received %s' % (entity_kind, value_kind))
return value
class ModelKey(Property):
"""Special property to store the Model key."""
def __init__(self):
super(ModelKey, self).__init__()
self._name = '__key__'
def _datastore_type(self, value):
return datastore_types.Key(value.urlsafe())
def _comparison(self, op, value):
if value is not None:
return super(ModelKey, self)._comparison(op, value)
raise datastore_errors.BadValueError(
"__key__ filter query can't be compared to None")
def _validate(self, value):
return _validate_key(value)
def _set_value(self, entity, value):
"""Setter for key attribute."""
if value is not None:
value = _validate_key(value, entity=entity)
value = entity._validate_key(value)
entity._entity_key = value
def _get_value(self, entity):
"""Getter for key attribute."""
return entity._entity_key
def _delete_value(self, entity):
"""Deleter for key attribute."""
entity._entity_key = None
class BooleanProperty(Property):
"""A Property whose value is a Python bool."""
def _validate(self, value):
if not isinstance(value, bool):
raise datastore_errors.BadValueError('Expected bool, got %r' %
(value,))
return value
def _db_set_value(self, v, unused_p, value):
if not isinstance(value, bool):
raise TypeError('BooleanProperty %s can only be set to bool values; '
'received %r' % (self._name, value))
v.booleanValue = value
def _db_get_value(self, v, unused_p):
if not v.HasField('booleanValue'):
return None
return bool(v.booleanValue)
class IntegerProperty(Property):
"""A Property whose value is a Python int or long (or bool)."""
def _validate(self, value):
if not isinstance(value, six.integer_types):
raise datastore_errors.BadValueError('Expected integer, got %r' %
(value,))
return int(value)
def _db_set_value(self, v, unused_p, value):
if not isinstance(value, six.integer_types + (bool,)):
raise TypeError('IntegerProperty %s can only be set to integer values; '
'received %r' % (self._name, value))
v.int64Value = value
def _db_get_value(self, v, unused_p):
if not v.HasField('int64Value'):
return None
return int(v.int64Value)
class FloatProperty(Property):
"""A Property whose value is a Python float.
Note: int, long and bool are also allowed.
"""
def _validate(self, value):
if not isinstance(value, six.integer_types + (float,)):
raise datastore_errors.BadValueError('Expected float, got %r' %
(value,))
return float(value)
def _db_set_value(self, v, unused_p, value):
if not isinstance(value, six.integer_types + (bool, float)):
raise TypeError('FloatProperty %s can only be set to integer or float '
'values; received %r' % (self._name, value))
v.doubleValue = float(value)
def _db_get_value(self, v, unused_p):
if not v.HasField('doubleValue'):
return None
return v.doubleValue
_MEANING_URI_COMPRESSED = 'ZLIB'
class _CompressedValue(_NotEqualMixin):
"""A marker object wrapping compressed values."""
__slots__ = ['z_val']
def __init__(self, z_val):
"""Constructor. Argument is a string returned by zlib.compress()."""
assert isinstance(z_val, six.binary_type), repr(z_val)
self.z_val = z_val
def __repr__(self):
return '_CompressedValue(%s)' % repr(self.z_val)
def __eq__(self, other):
if not isinstance(other, _CompressedValue):
return NotImplemented
return self.z_val == other.z_val
def __hash__(self):
raise TypeError('_CompressedValue is not immutable')
class BlobProperty(Property):
"""A Property whose value is a byte string. It may be compressed."""
_indexed = False
_compressed = False
_attributes = Property._attributes + ['_compressed']
@utils.positional(1 + Property._positional)
def __init__(self, name=None, compressed=False, **kwds):
super(BlobProperty, self).__init__(name=name, **kwds)
self._compressed = compressed
if compressed and self._indexed:
raise NotImplementedError('BlobProperty %s cannot be compressed and '
'indexed at the same time.' % self._name)
def _value_to_repr(self, value):
long_repr = super(BlobProperty, self)._value_to_repr(value)
prefix = long_repr[0] if long_repr[0] != "'" else ''
i = 2 if prefix else 1
j = len(long_repr) - 1
content = long_repr[i:j]
if len(content) > _MAX_STRING_LENGTH:
long_repr = "%s'%s...'" % (prefix, content[:_MAX_STRING_LENGTH])
return long_repr
def _validate(self, value):
if not isinstance(value, six.binary_type):
raise datastore_errors.BadValueError(
'Expected %s, got %s' % (six.binary_type, type(value)))
if (self._indexed and
not isinstance(self, TextProperty) and
len(value) > _MAX_STRING_LENGTH):
raise datastore_errors.BadValueError(
'Indexed value %s must be at most %d bytes' %
(self._name, _MAX_STRING_LENGTH))
def _to_base_type(self, value):
if self._compressed:
return _CompressedValue(zlib.compress(value))
def _from_base_type(self, value):
if isinstance(value, _CompressedValue):
return zlib.decompress(value.z_val)
def _datastore_type(self, value):
return datastore_types.ByteString(value)
def _db_set_value(self, v, p, value):
if isinstance(value, _CompressedValue):
self._db_set_compressed_meaning(p)
value = value.z_val
else:
self._db_set_uncompressed_meaning(p)
v.stringValue = value
def _db_set_compressed_meaning(self, p):
p.meaning_uri = _MEANING_URI_COMPRESSED
p.meaning = entity_pb2.Property.BLOB
def _db_set_uncompressed_meaning(self, p):
if self._indexed:
p.meaning = entity_pb2.Property.BYTESTRING
else:
p.meaning = entity_pb2.Property.BLOB
def _db_get_value(self, v, p):
if not v.HasField('stringValue'):
return None
value = v.stringValue
if p.meaning_uri == _MEANING_URI_COMPRESSED:
value = _CompressedValue(value)
return value
class TextProperty(BlobProperty):
"""An unindexed Property whose value is a text string of unlimited length."""
def _validate(self, value):
if isinstance(value, six.binary_type):
try:
length = len(value)
value = six.ensure_text(value)
except UnicodeError:
raise datastore_errors.BadValueError('Expected valid UTF-8, got %r' %
(value,))
elif isinstance(value, six.text_type):
length = len(value.encode('utf-8'))
else:
raise datastore_errors.BadValueError('Expected string, got %r' %
(value,))
if self._indexed and length > _MAX_STRING_LENGTH:
raise datastore_errors.BadValueError(
'Indexed value %s must be at most %d bytes' %
(self._name, _MAX_STRING_LENGTH))
def _to_base_type(self, value):
if isinstance(value, six.text_type):
return value.encode('utf-8')
def _from_base_type(self, value):
if isinstance(value, six.binary_type):
try:
return six.text_type(value, 'utf-8')
except UnicodeDecodeError:
pass
def _db_set_uncompressed_meaning(self, p):
if not self._indexed:
p.meaning = entity_pb2.Property.TEXT
class StringProperty(TextProperty):
"""An indexed Property whose value is a text string of limited length."""
_indexed = True
class GeoPtProperty(Property):
"""A Property whose value is a GeoPt."""
def _validate(self, value):
if not isinstance(value, GeoPt):
raise datastore_errors.BadValueError('Expected GeoPt, got %r' %
(value,))
def _db_set_value(self, v, p, value):
if not isinstance(value, GeoPt):
raise TypeError('GeoPtProperty %s can only be set to GeoPt values; '
'received %r' % (self._name, value))
p.meaning = entity_pb2.Property.GEORSS_POINT
pv = v.pointvalue
pv.x = value.lat
pv.y = value.lon
def _db_get_value(self, v, unused_p):
if not v.HasField('pointvalue'):
return None
pv = v.pointvalue
return GeoPt(pv.x, pv.y)
def _unpack_user(v):
"""Internal helper to unpack a User value from a protocol buffer."""
uv = v.uservalue
email = six.ensure_text(uv.email)
auth_domain = six.ensure_text(uv.auth_domain)
obfuscated_gaiaid = six.ensure_text(uv.obfuscated_gaiaid)
obfuscated_gaiaid = six.ensure_text(obfuscated_gaiaid)
federated_identity = None
if uv.HasField('federated_identity'):
federated_identity = six.text_type(uv.federated_identity.decode('utf-8'))
value = users.User(email=email,
_auth_domain=auth_domain,
_user_id=obfuscated_gaiaid,
federated_identity=federated_identity)
return value
class PickleProperty(BlobProperty):
"""A Property whose value is any picklable Python object."""
def _to_base_type(self, value):
if os.environ.get('NDB_USE_CROSS_COMPATIBLE_PICKLE_PROTOCOL', False):
protocol = 2
else:
protocol = pickle.HIGHEST_PROTOCOL
return pickle.dumps(value, protocol)
def _from_base_type(self, value):
try:
return pickle.loads(value)
except UnicodeDecodeError:
if int(os.environ.get('NDB_PY2_UNPICKLE_COMPAT', '0')):
return pickle.loads(value, encoding='bytes')
raise
class JsonProperty(BlobProperty):
"""A property whose value is any Json-encodable Python object."""
_json_type = None
@utils.positional(1 + BlobProperty._positional)
def __init__(self, name=None, compressed=False, json_type=None, **kwds):
super(JsonProperty, self).__init__(name=name, compressed=compressed, **kwds)
self._json_type = json_type
def _validate(self, value):
if self._json_type is not None and not isinstance(value, self._json_type):
raise TypeError('JSON property must be a %s' % self._json_type)
def _to_base_type(self, value):
try:
import json
except ImportError:
import simplejson as json
return six.ensure_binary(json.dumps(value, separators=(',', ':')))
def _from_base_type(self, value):
try:
import json
except ImportError:
import simplejson as json
return json.loads(value)
class UserProperty(Property):
"""A Property whose value is a User object.
Note: this exists for backwards compatibility with existing
Cloud Datastore schemas only; we do not recommend storing User objects
directly in Cloud Datastore, but instead recommend storing the
user.user_id() value.
"""
_attributes = Property._attributes + ['_auto_current_user',
'_auto_current_user_add']
_auto_current_user = False
_auto_current_user_add = False
@utils.positional(1 + Property._positional)
def __init__(self, name=None, auto_current_user=False,
auto_current_user_add=False, **kwds):
super(UserProperty, self).__init__(name=name, **kwds)
if self._repeated:
if auto_current_user:
raise ValueError('UserProperty could use auto_current_user and be '
'repeated, but there would be no point.')
elif auto_current_user_add:
raise ValueError('UserProperty could use auto_current_user_add and be '
'repeated, but there would be no point.')
self._auto_current_user = auto_current_user
self._auto_current_user_add = auto_current_user_add
def _validate(self, value):
if not isinstance(value, users.User):
raise datastore_errors.BadValueError('Expected User, got %r' %
(value,))
def _prepare_for_put(self, entity):
if (self._auto_current_user or
(self._auto_current_user_add and not self._has_value(entity))):
value = users.get_current_user()
if value is not None:
self._store_value(entity, value)
def _db_set_value(self, v, p, value):
datastore_types.PackUser(p.name, value, v)
def _db_get_value(self, v, unused_p):
if not v.HasField('uservalue'):
return None
return _unpack_user(v)
class KeyProperty(Property):
"""A Property whose value is a Key object.
Optional keyword argument: kind=<kind>, to require that keys
assigned to this property always have the indicated kind. May be a
string or a Model subclass.
"""
_attributes = Property._attributes + ['_kind']
_kind = None
@utils.positional(2 + Property._positional)
def __init__(self, *args, **kwds):
name = kind = None
for arg in args:
if isinstance(arg, six.string_types):
if name is not None:
raise TypeError('You can only specify one name')
name = arg
elif isinstance(arg, type) and issubclass(arg, Model):
if kind is not None:
raise TypeError('You can only specify one kind')
kind = arg
elif arg is not None:
raise TypeError('Unexpected positional argument: %r' % (arg,))
if name is None:
name = kwds.pop('name', None)
elif 'name' in kwds:
raise TypeError('You can only specify name once')
if kind is None:
kind = kwds.pop('kind', None)
elif 'kind' in kwds:
raise TypeError('You can only specify kind once')
if kind is not None:
if isinstance(kind, type) and issubclass(kind, Model):
kind = kind._get_kind()
if isinstance(kind, (six.text_type, bytes)):
kind = six.ensure_str(kind)
if not isinstance(kind, str):
raise TypeError('kind must be a Model class or a string')
super(KeyProperty, self).__init__(name, **kwds)
self._kind = kind
def _datastore_type(self, value):
return datastore_types.Key(value.urlsafe())
def _validate(self, value):
if not isinstance(value, Key):
raise datastore_errors.BadValueError('Expected Key, got %r' % (value,))
if not value.id():
raise datastore_errors.BadValueError('Expected complete Key, got %r' %
(value,))
if self._kind is not None:
if value.kind() != self._kind:
raise datastore_errors.BadValueError(
'Expected Key with kind=%r, got %r' % (self._kind, value))
def _db_set_value(self, v, unused_p, value):
if not isinstance(value, Key):
raise TypeError('KeyProperty %s can only be set to Key values; '
'received %r' % (self._name, value))
ref = value.reference()
v.referencevalue.SetInParent()
rv = v.referencevalue
rv.app = ref.app
if ref.HasField('name_space'):
rv.name_space = ref.name_space
for elem in ref.path.element:
pe = rv.pathelement.add()
if elem.HasField('type'):
pe.type = elem.type
if elem.HasField('id'):
pe.id = elem.id
elif elem.HasField('name'):
pe.name = elem.name
def _db_get_value(self, v, unused_p):
if not v.HasField('referencevalue'):
return None
ref = entity_pb2.Reference()
rv = v.referencevalue
if rv.HasField('app'):
ref.app = rv.app
if rv.HasField('name_space'):
ref.name_space = rv.name_space
ref.path.SetInParent()
path = ref.path
for elem in rv.pathelement:
e = path.element.add()
if elem.HasField('type'):
e.type = elem.type
if elem.HasField('id'):
e.id = elem.id
elif elem.HasField('name'):
e.name = elem.name
return Key(reference=ref)
class BlobKeyProperty(Property):
"""A Property whose value is a BlobKey object."""
def _validate(self, value):
if not isinstance(value, datastore_types.BlobKey):
raise datastore_errors.BadValueError('Expected BlobKey, got %r' %
(value,))
def _db_set_value(self, v, p, value):
if not isinstance(value, datastore_types.BlobKey):
raise TypeError('BlobKeyProperty %s can only be set to BlobKey values; '
'received %r' % (self._name, value))
p.meaning = entity_pb2.Property.BLOBKEY
v.stringValue = six.ensure_binary(str(value))
def _db_get_value(self, v, unused_p):
if not v.HasField('stringValue'):
return None
return datastore_types.BlobKey(six.ensure_text(v.stringValue))
_EPOCH = datetime.datetime.utcfromtimestamp(0)
class DateTimeProperty(Property):
"""A Property whose value is a datetime object.
Note: Unlike Django, auto_now_add can be overridden by setting the
value before writing the entity. And unlike classic db, auto_now
does not supply a default value. Also unlike classic db, when the
entity is written, the property values are updated to match what
was written. Finally, beware that this also updates the value in
the in-process cache, *and* that auto_now_add may interact weirdly
with transaction retries (a retry of a property with auto_now_add
set will reuse the value that was set on the first try).
"""
_attributes = Property._attributes + ['_auto_now', '_auto_now_add']
_auto_now = False
_auto_now_add = False
@utils.positional(1 + Property._positional)
def __init__(self, name=None, auto_now=False, auto_now_add=False, **kwds):
super(DateTimeProperty, self).__init__(name=name, **kwds)
if self._repeated:
if auto_now:
raise ValueError('DateTimeProperty %s could use auto_now and be '
'repeated, but there would be no point.' % self._name)
elif auto_now_add:
raise ValueError('DateTimeProperty %s could use auto_now_add and be '
'repeated, but there would be no point.' % self._name)
self._auto_now = auto_now
self._auto_now_add = auto_now_add
def _validate(self, value):
if not isinstance(value, datetime.datetime):
raise datastore_errors.BadValueError('Expected datetime, got %r' %
(value,))
def _now(self):
return datetime.datetime.utcnow()
def _prepare_for_put(self, entity):
if (self._auto_now or
(self._auto_now_add and not self._has_value(entity))):
value = self._now()
self._store_value(entity, value)
def _db_set_value(self, v, p, value):
if not isinstance(value, datetime.datetime):
raise TypeError('DatetimeProperty %s can only be set to datetime values; '
'received %r' % (self._name, value))
if value.tzinfo is not None:
raise NotImplementedError('DatetimeProperty %s can only support UTC. '
'Please derive a new Property to support '
'alternative timezones.' % self._name)
dt = value - _EPOCH
ival = dt.microseconds + 1000000 * (dt.seconds + 24 * 3600 * dt.days)
v.int64Value = ival
p.meaning = entity_pb2.Property.GD_WHEN
def _db_get_value(self, v, unused_p):
if not v.HasField('int64Value'):
return None
ival = v.int64Value
return _EPOCH + datetime.timedelta(microseconds=ival)
def _date_to_datetime(value):
"""Convert a date to a datetime for Cloud Datastore storage.
Args:
value: A datetime.date object.
Returns:
A datetime object with time set to 0:00.
"""
if not isinstance(value, datetime.date):
raise TypeError('Cannot convert to datetime expected date value; '
'received %s' % value)
return datetime.datetime(value.year, value.month, value.day)
def _time_to_datetime(value):
"""Convert a time to a datetime for Cloud Datastore storage.
Args:
value: A datetime.time object.
Returns:
A datetime object with date set to 1970-01-01.
"""
if not isinstance(value, datetime.time):
raise TypeError('Cannot convert to datetime expected time value; '
'received %s' % value)
return datetime.datetime(1970, 1, 1,
value.hour, value.minute, value.second,
value.microsecond)
class DateProperty(DateTimeProperty):
"""A Property whose value is a date object."""
def _validate(self, value):
if not isinstance(value, datetime.date):
raise datastore_errors.BadValueError('Expected date, got %r' %
(value,))
def _to_base_type(self, value):
assert isinstance(value, datetime.date), repr(value)
return _date_to_datetime(value)
def _from_base_type(self, value):
assert isinstance(value, datetime.datetime), repr(value)
return value.date()
def _now(self):
return datetime.datetime.utcnow().date()
class TimeProperty(DateTimeProperty):
"""A Property whose value is a time object."""
def _validate(self, value):
if not isinstance(value, datetime.time):
raise datastore_errors.BadValueError('Expected time, got %r' %
(value,))
def _to_base_type(self, value):
assert isinstance(value, datetime.time), repr(value)
return _time_to_datetime(value)
def _from_base_type(self, value):
assert isinstance(value, datetime.datetime), repr(value)
return value.time()
def _now(self):
return datetime.datetime.utcnow().time()
class _StructuredGetForDictMixin(Property):
"""Mixin class so *StructuredProperty can share _get_for_dict().
The behavior here is that sub-entities are converted to dictionaries
by calling to_dict() on them (also doing the right thing for
repeated properties).
NOTE: Even though the _validate() method in StructuredProperty and
LocalStructuredProperty are identical, they cannot be moved into
this shared base class. The reason is subtle: _validate() is not a
regular method, but treated specially by _call_to_base_type() and
_call_shallow_validation(), and the class where it occurs matters
if it also defines _to_base_type().
"""
def _get_for_dict(self, entity):
value = self._get_value(entity)
if self._repeated:
value = [v._to_dict() for v in value]
elif value is not None:
value = value._to_dict()
return value
class StructuredProperty(_StructuredGetForDictMixin):
"""A Property whose value is itself an entity.
The values of the sub-entity are indexed and can be queried.
See the module docstring for details.
"""
_modelclass = None
_attributes = ['_modelclass'] + Property._attributes
_positional = 1 + Property._positional
@utils.positional(1 + _positional)
def __init__(self, modelclass, name=None, **kwds):
super(StructuredProperty, self).__init__(name=name, **kwds)
if self._repeated:
if modelclass._has_repeated:
raise TypeError('This StructuredProperty cannot use repeated=True '
'because its model class (%s) contains repeated '
'properties (directly or indirectly).' %
modelclass.__name__)
self._modelclass = modelclass
def _get_value(self, entity):
"""Override _get_value() to *not* raise UnprojectedPropertyError."""
value = self._get_user_value(entity)
if value is None and entity._projection:
return super(StructuredProperty, self)._get_value(entity)
return value
def __getattr__(self, attrname):
"""Dynamically get a subproperty."""
prop = self._modelclass._properties.get(attrname)
if prop is None or prop._code_name != attrname:
for prop in self._modelclass._properties.values():
if prop._code_name == attrname:
break
else:
prop = None
if prop is None:
raise AttributeError('Model subclass %s has no attribute %s' %
(self._modelclass.__name__, attrname))
prop_copy = copy.copy(prop)
prop_copy._name = self._name + b'.' + prop_copy._name
setattr(self, attrname, prop_copy)
return prop_copy
def _comparison(self, op, value):
if op != '=':
raise datastore_errors.BadFilterError(
'StructuredProperty filter can only use ==')
if not self._indexed:
raise datastore_errors.BadFilterError(
'Cannot query for unindexed StructuredProperty %s' % self._name)
from google.appengine.ext.ndb.query import ConjunctionNode, PostFilterNode
from google.appengine.ext.ndb.query import RepeatedStructuredPropertyPredicate
if value is None:
from google.appengine.ext.ndb.query import FilterNode
return FilterNode(self._name, op, value)
value = self._do_validate(value)
value = self._call_to_base_type(value)
filters = []
match_keys = []
for prop in six.itervalues(self._modelclass._properties):
vals = prop._get_base_value_unwrapped_as_list(value)
if prop._repeated:
if vals:
raise datastore_errors.BadFilterError(
'Cannot query for non-empty repeated property %s' % prop._name)
continue
assert isinstance(vals, list) and len(vals) == 1, repr(vals)
val = vals[0]
if val is not None:
altprop = getattr(self, prop._code_name)
filt = altprop._comparison(op, val)
filters.append(filt)
match_keys.append(altprop._name)
if not filters:
raise datastore_errors.BadFilterError(
'StructuredProperty filter without any values')
if len(filters) == 1:
return filters[0]
if self._repeated:
pb = value._to_pb(allow_partial=True)
pred = RepeatedStructuredPropertyPredicate(
match_keys, pb, self._name + b'.')
filters.append(PostFilterNode(pred))
return ConjunctionNode(*filters)
def _IN(self, value):
if not isinstance(value, (list, tuple, set, frozenset)):
raise datastore_errors.BadArgumentError(
'Expected list, tuple or set, got %r' % (value,))
from google.appengine.ext.ndb.query import DisjunctionNode, FalseNode
filters = [self._comparison('=', val) for val in value]
if not filters:
return FalseNode()
else:
return DisjunctionNode(*filters)
IN = _IN
def _validate(self, value):
if isinstance(value, dict):
return self._modelclass(**value)
if not isinstance(value, self._modelclass):
raise datastore_errors.BadValueError('Expected %s instance, got %r' %
(self._modelclass.__name__, value))
def _has_value(self, entity, rest=None):
ok = super(StructuredProperty, self)._has_value(entity)
if ok and rest:
lst = self._get_base_value_unwrapped_as_list(entity)
if len(lst) != 1:
raise RuntimeError('Failed to retrieve sub-entity of StructuredProperty'
' %s' % self._name)
subent = lst[0]
if subent is None:
return True
subprop = subent._properties.get(rest[0])
if subprop is None:
ok = False
else:
ok = subprop._has_value(subent, rest[1:])
return ok
def _serialize(self, entity, pb, prefix=b'', parent_repeated=False,
projection=None):
values = self._get_base_value_unwrapped_as_list(entity)
for value in values:
if value is not None:
for unused_name, prop in sorted(six.iteritems(value._properties)):
prop._serialize(
value,
pb,
prefix=prefix + self._name + b'.',
parent_repeated=self._repeated or parent_repeated,
projection=projection)
else:
super(StructuredProperty, self)._serialize(
entity, pb, prefix=prefix, parent_repeated=parent_repeated,
projection=projection)
def _deserialize(self, entity, p, depth=1):
if not self._repeated:
subentity = self._retrieve_value(entity)
if subentity is None:
subentity = self._modelclass()
self._store_value(entity, _BaseValue(subentity))
cls = self._modelclass
if isinstance(subentity, _BaseValue):
subentity = subentity.b_val
if not isinstance(subentity, cls):
raise RuntimeError('Cannot deserialize StructuredProperty %s; value '
'retrieved not a %s instance %r' %
(self._name, cls.__name__, subentity))
indexed = p.meaning_uri != _MEANING_URI_COMPRESSED
prop = subentity._get_property_for(p, depth=depth, indexed=indexed)
if prop is None:
self._store_value(entity, None)
return
prop._deserialize(subentity, p, depth + 1)
return
name = p.name
parts = name.split('.')
if len(parts) <= depth:
raise RuntimeError('StructuredProperty %s expected to find properties '
'separated by periods at a depth of %i; received %r' %
(self._name, depth, parts))
next = parts[depth]
rest = parts[depth + 1:]
prop = self._modelclass._properties.get(next)
prop_is_fake = False
if prop is None:
if rest:
logging.warn('Skipping unknown structured subproperty (%s) '
'in repeated structured property (%s of %s)',
name, self._name, entity.__class__.__name__)
return
compressed = p.meaning_uri == _MEANING_URI_COMPRESSED
prop = GenericProperty(next, compressed=compressed)
prop._code_name = next
prop_is_fake = True
if not hasattr(entity, '_subentity_counter'):
entity._subentity_counter = _NestedCounter()
counter = entity._subentity_counter
counter_path = parts[depth - 1:]
next_index = counter.get(counter_path)
subentity = None
if self._has_value(entity):
while next_index < self._get_value_size(entity):
subentity = self._get_base_value_at_index(entity, next_index)
if not isinstance(subentity, self._modelclass):
raise TypeError('sub-entities must be instances '
'of their Model class.')
if not prop._has_value(subentity, rest):
break
next_index = counter.increment(counter_path)
else:
subentity = None
counter.increment(counter_path)
if not subentity:
subentity = self._modelclass()
values = self._retrieve_value(entity, self._default)
if values is None:
self._store_value(entity, [])
values = self._retrieve_value(entity, self._default)
values.append(_BaseValue(subentity))
if prop_is_fake:
subentity._clone_properties()
subentity._properties[six.ensure_text(prop._name)] = prop
prop._deserialize(subentity, p, depth + 1)
def _prepare_for_put(self, entity):
values = self._get_base_value_unwrapped_as_list(entity)
for value in values:
if value is not None:
value._prepare_for_put()
def _check_property(self, rest=None, require_indexed=True):
"""Override for Property._check_property().
Raises:
InvalidPropertyError if no subproperty is specified or if something
is wrong with the subproperty.
"""
if not rest:
raise InvalidPropertyError(
'Structured property %s requires a subproperty' % self._name)
self._modelclass._check_properties([rest], require_indexed=require_indexed)
def _get_base_value_at_index(self, entity, index):
assert self._repeated
value = self._retrieve_value(entity, self._default)
value[index] = self._opt_call_to_base_type(value[index])
return value[index].b_val
def _get_value_size(self, entity):
values = self._retrieve_value(entity, self._default)
if values is None:
return 0
return len(values)
class LocalStructuredProperty(_StructuredGetForDictMixin, BlobProperty):
"""Substructure that is serialized to an opaque blob.
This looks like StructuredProperty on the Python side, but is
written like a BlobProperty in Cloud Datastore. It is not indexed
and you cannot query for subproperties. On the other hand, the
on-disk representation is more efficient and can be made even more
efficient by passing compressed=True, which compresses the blob
data using gzip.
"""
_indexed = False
_modelclass = None
_keep_keys = False
_attributes = ['_modelclass'] + BlobProperty._attributes + ['_keep_keys']
_positional = 1 + BlobProperty._positional
@utils.positional(1 + _positional)
def __init__(self, modelclass,
name=None, compressed=False, keep_keys=False,
**kwds):
super(LocalStructuredProperty, self).__init__(name=name,
compressed=compressed,
**kwds)
if self._indexed:
raise NotImplementedError('Cannot index LocalStructuredProperty %s.' %
self._name)
self._modelclass = modelclass
self._keep_keys = keep_keys
def _validate(self, value):
if isinstance(value, dict):
return self._modelclass(**value)
if not isinstance(value, self._modelclass):
raise datastore_errors.BadValueError('Expected %s instance, got %r' %
(self._modelclass.__name__, value))
def _to_base_type(self, value):
if isinstance(value, self._modelclass):
pb = value._to_pb(set_key=self._keep_keys)
return pb.SerializePartialToString()
def _from_base_type(self, value):
if not isinstance(value, self._modelclass):
pb = entity_pb2.EntityProto()
pb.MergeFromString(value)
if not self._keep_keys:
pb.ClearField('key')
return self._modelclass._from_pb(pb)
def _prepare_for_put(self, entity):
value = self._get_user_value(entity)
if value is not None:
if self._repeated:
for subent in value:
if subent is not None:
subent._prepare_for_put()
else:
value._prepare_for_put()
def _db_set_uncompressed_meaning(self, p):
p.meaning = entity_pb2.Property.ENTITY_PROTO
class GenericProperty(Property):
"""A Property whose value can be (almost) any basic type.
This is mainly used for Expando and for orphans (values present in
Cloud Datastore but not represented in the Model subclass) but can
also be used explicitly for properties with dynamically-typed
values.
This supports compressed=True, which is only effective for str
values (not for unicode), and implies indexed=False.
"""
_compressed = False
_attributes = Property._attributes + ['_compressed']
@utils.positional(1 + Property._positional)
def __init__(self, name=None, compressed=False, **kwds):
if compressed:
kwds.setdefault('indexed', False)
super(GenericProperty, self).__init__(name=name, **kwds)
self._compressed = compressed
if compressed and self._indexed:
raise NotImplementedError('GenericProperty %s cannot be compressed and '
'indexed at the same time.' % self._name)
def _to_base_type(self, value):
if self._compressed and isinstance(value, six.binary_type):
return _CompressedValue(zlib.compress(value))
def _from_base_type(self, value):
if isinstance(value, _CompressedValue):
return zlib.decompress(value.z_val)
def _validate(self, value):
if self._indexed:
if isinstance(value, six.text_type):
value = value.encode('utf-8')
if (isinstance(value, (six.text_type, six.binary_type)) and
len(value) > _MAX_STRING_LENGTH):
raise datastore_errors.BadValueError(
'Indexed value %s must be at most %d bytes' %
(self._name, _MAX_STRING_LENGTH))
def _db_get_value(self, v, p):
if v.HasField('stringValue'):
sval = v.stringValue
meaning = p.meaning
if meaning == entity_pb2.Property.BLOBKEY:
sval = BlobKey(sval)
elif meaning == entity_pb2.Property.BLOB:
if p.meaning_uri == _MEANING_URI_COMPRESSED:
sval = _CompressedValue(sval)
elif meaning == entity_pb2.Property.ENTITY_PROTO:
pb = entity_pb2.EntityProto()
pb.MergeFromString(sval)
modelclass = Expando
if len(pb.key.path.element):
kind = six.ensure_str(pb.key.path.element[-1].type)
modelclass = Model._kind_map.get(kind, modelclass)
sval = modelclass._from_pb(pb)
elif meaning != entity_pb2.Property.BYTESTRING:
try:
sval.decode('ascii')
except UnicodeDecodeError:
try:
sval = six.text_type(sval.decode('utf-8'))
except UnicodeDecodeError:
pass
return sval
elif v.HasField('int64Value'):
ival = v.int64Value
if p.meaning == entity_pb2.Property.GD_WHEN:
return _EPOCH + datetime.timedelta(microseconds=ival)
return ival
elif v.HasField('booleanValue'):
return bool(v.booleanValue)
elif v.HasField('doubleValue'):
return v.doubleValue
elif v.HasField('referencevalue'):
rv = v.referencevalue
app = rv.app
namespace = rv.name_space
pairs = [(elem.type, elem.id or elem.name) for elem in rv.pathelement]
return Key(pairs=pairs, app=app, namespace=namespace)
elif v.HasField('pointvalue'):
pv = v.pointvalue
return GeoPt(pv.x, pv.y)
elif v.HasField('uservalue'):
return _unpack_user(v)
else:
return None
def _db_set_value(self, v, p, value):
if isinstance(value, six.binary_type):
v.stringValue = value
elif isinstance(value, six.text_type):
v.stringValue = six.ensure_binary(value)
if not self._indexed:
p.meaning = entity_pb2.Property.TEXT
elif isinstance(value, bool):
v.booleanValue = value
elif isinstance(value, six.integer_types):
if not (-_MAX_LONG <= value < _MAX_LONG):
raise TypeError('Property %s can only accept 64-bit integers; '
'received %s' % (self._name, value))
v.int64Value = value
elif isinstance(value, float):
v.doubleValue = value
elif isinstance(value, Key):
ref = value.reference()
rv = v.referencevalue
rv.app = ref.app
if ref.HasField('name_space'):
rv.name_space = ref.name_space
for elem in ref.path.element:
pe = rv.pathelement.add()
if elem.HasField('type'):
pe.type = elem.type
if elem.HasField('id'):
pe.id = elem.id
elif elem.HasField('name'):
pe.name = elem.name
elif isinstance(value, datetime.datetime):
if value.tzinfo is not None:
raise NotImplementedError('Property %s can only support the UTC. '
'Please derive a new Property to support '
'alternative timezones.' % self._name)
dt = value - _EPOCH
ival = dt.microseconds + 1000000 * (dt.seconds + 24 * 3600 * dt.days)
v.int64Value = ival
p.meaning = entity_pb2.Property.GD_WHEN
elif isinstance(value, GeoPt):
p.meaning = entity_pb2.Property.GEORSS_POINT
pv = v.pointvalue
pv.x = value.lat
pv.y = value.lon
elif isinstance(value, users.User):
datastore_types.PackUser(p.name, value, v)
elif isinstance(value, BlobKey):
v.stringValue = six.ensure_binary(str(value))
p.meaning = entity_pb2.Property.BLOBKEY
elif isinstance(value, Model):
set_key = value._key is not None
pb = value._to_pb(set_key=set_key)
value = pb.SerializePartialToString()
v.stringValue = value
p.meaning = entity_pb2.Property.ENTITY_PROTO
elif isinstance(value, _CompressedValue):
value = value.z_val
v.stringValue = value
p.meaning_uri = _MEANING_URI_COMPRESSED
p.meaning = entity_pb2.Property.BLOB
else:
raise NotImplementedError('Property %s does not support %s types.' %
(self._name, type(value)))
class ComputedProperty(GenericProperty):
"""A Property whose value is determined by a user-supplied function.
Computed properties cannot be set directly, but are instead generated by a
function when required. They are useful to provide fields in Cloud Datastore
that can be used for filtering or sorting without having to manually set the
value in code - for example, sorting on the length of a BlobProperty, or
using an equality filter to check if another field is not empty.
ComputedProperty can be declared as a regular property, passing a function as
the first argument, or it can be used as a decorator for the function that
does the calculation.
Example:
>>> class DatastoreFile(Model):
... name = StringProperty()
... name_lower = ComputedProperty(lambda self: self.name.lower())
...
... data = BlobProperty()
...
... @ComputedProperty
... def size(self):
... return len(self.data)
...
... def _compute_hash(self):
... return hashlib.sha1(self.data).hexdigest()
... hash = ComputedProperty(_compute_hash, name='sha1')
"""
def __init__(self, func, name=None, indexed=None,
repeated=None, verbose_name=None):
"""Constructor.
Args:
func: A function that takes one argument, the model instance, and returns
a calculated value.
"""
super(ComputedProperty, self).__init__(name=name, indexed=indexed,
repeated=repeated,
verbose_name=verbose_name)
self._func = func
def _set_value(self, entity, value):
raise ComputedPropertyError("Cannot assign to a ComputedProperty")
def _delete_value(self, entity):
raise ComputedPropertyError("Cannot delete a ComputedProperty")
def _get_value(self, entity):
if entity._projection and six.ensure_text(self._name) in entity._projection:
return super(ComputedProperty, self)._get_value(entity)
value = self._func(entity)
self._store_value(entity, value)
return value
def _prepare_for_put(self, entity):
self._get_value(entity)
class MetaModel(type):
"""Metaclass for Model.
This exists to fix up the properties -- they need to know their name.
This is accomplished by calling the class's _fix_properties() method.
"""
def __init__(cls, name, bases, classdict):
super(MetaModel, cls).__init__(name, bases, classdict)
cls._fix_up_properties()
def __repr__(cls):
props = []
for _, prop in sorted(six.iteritems(cls._properties)):
props.append('%s=%r' % (prop._code_name, prop))
return '%s<%s>' % (cls.__name__, ', '.join(props))
class Model(six.with_metaclass(MetaModel, _NotEqualMixin)):
"""A class describing Cloud Datastore entities.
Model instances are usually called entities. All model classes
inheriting from Model automatically have MetaModel as their
metaclass, so that the properties are fixed up properly after the
class once the class is defined.
Because of this, you cannot use the same Property object to describe
multiple properties -- you must create separate Property objects for
each property. E.g. this does not work::
wrong_prop = StringProperty()
class Wrong(Model):
wrong1 = wrong_prop
wrong2 = wrong_prop
The kind is normally equal to the class name (exclusive of the
module name or any other parent scope). To override the kind,
define a class method named _get_kind(), as follows::
class MyModel(Model):
@classmethod
def _get_kind(cls):
return 'AnotherKind'
"""
_properties = None
_has_repeated = False
_kind_map = {}
_entity_key = None
_values = None
_projection = ()
_key = ModelKey()
key = _key
def __init__(*args, **kwds):
"""Creates a new instance of this model (a.k.a. an entity).
The new entity must be written to Cloud Datastore using an explicit
call to .put().
Keyword Args:
key: Key instance for this model. If key is used, id and parent must
be None.
id: Key id for this model. If id is used, key must be None.
parent: Key instance for the parent model or None for a top-level one.
If parent is used, key must be None.
namespace: Optional namespace.
app: Optional app ID.
**kwds: Keyword arguments mapping to properties of this model.
Note: you cannot define a property named key; the .key attribute
always refers to the entity's key. But you can define properties
named id or parent. Values for the latter cannot be passed
through the constructor, but can be assigned to entity attributes
after the entity has been created.
"""
if len(args) > 1:
raise TypeError('Model constructor takes no positional arguments.')
(self,) = args
get_arg = self.__get_arg
key = get_arg(kwds, 'key')
id = get_arg(kwds, 'id')
app = get_arg(kwds, 'app')
namespace = get_arg(kwds, 'namespace')
parent = get_arg(kwds, 'parent')
projection = get_arg(kwds, 'projection')
if key is not None:
if (id is not None or parent is not None or
app is not None or namespace is not None):
raise datastore_errors.BadArgumentError(
'Model constructor given key= does not accept '
'id=, app=, namespace=, or parent=.')
self._key = _validate_key(key, entity=self)
elif (id is not None or parent is not None or
app is not None or namespace is not None):
self._key = Key(self._get_kind(), id,
parent=parent, app=app, namespace=namespace)
self._values = {}
self._set_attributes(kwds)
if projection:
self._set_projection(projection)
@classmethod
def __get_arg(cls, kwds, kwd):
"""Internal helper method to parse keywords that may be property names."""
alt_kwd = '_' + kwd
if alt_kwd in kwds:
return kwds.pop(alt_kwd)
if kwd in kwds:
obj = getattr(cls, kwd, None)
if not isinstance(obj, Property) or isinstance(obj, ModelKey):
return kwds.pop(kwd)
return None
def __getstate__(self):
return self._to_pb().SerializeToString()
def __setstate__(self, serialized_pb):
pb = entity_pb2.EntityProto.FromString(serialized_pb)
self.__init__()
self.__class__._from_pb(pb, set_key=False, ent=self)
def _populate(self, **kwds):
"""Populate an instance from keyword arguments.
Each keyword argument will be used to set a corresponding
property. Keywords must refer to valid property name. This is
similar to passing keyword arguments to the Model constructor,
except that no provisions for key, id or parent are made.
"""
self._set_attributes(kwds)
populate = _populate
def _set_attributes(self, kwds):
"""Internal helper to set attributes from keyword arguments.
Expando overrides this.
"""
cls = self.__class__
for name, value in six.iteritems(kwds):
prop = getattr(cls, name)
if not isinstance(prop, Property):
raise TypeError('Cannot set non-property %s' % name)
prop._set_value(self, value)
def _find_uninitialized(self):
"""Internal helper to find uninitialized properties.
Returns:
A set of property names.
"""
return set(name for name, prop in six.iteritems(self._properties)
if not prop._is_initialized(self))
def _check_initialized(self):
"""Internal helper to check for uninitialized properties.
Raises:
BadValueError if it finds any.
"""
baddies = self._find_uninitialized()
if baddies:
raise datastore_errors.BadValueError(
'Entity has uninitialized properties: %s' % ', '.join(baddies))
def __repr__(self):
"""Return an unambiguous string representation of an entity."""
args = []
for prop in six.itervalues(self._properties):
if prop._has_value(self):
val = prop._retrieve_value(self)
if val is None:
rep = 'None'
elif prop._repeated:
reprs = [prop._value_to_repr(v) for v in val]
if reprs:
reprs[0] = '[' + reprs[0]
reprs[-1] = reprs[-1] + ']'
rep = ', '.join(reprs)
else:
rep = '[]'
else:
rep = prop._value_to_repr(val)
args.append('%s=%s' % (prop._code_name, rep))
args.sort()
if self._key is not None:
args.insert(0, 'key=%r' % self._key)
if self._projection:
args.append('_projection=%r' % (self._projection,))
s = '%s(%s)' % (self.__class__.__name__, ', '.join(args))
return s
@classmethod
def _get_kind(cls):
"""Return the kind name for this class.
This defaults to cls.__name__; users may overrid this to give a
class a different on-disk name than its class name.
"""
return cls.__name__
@classmethod
def _class_name(cls):
"""A hook for polymodel to override.
For regular models and expandos this is just an alias for
_get_kind(). For PolyModel subclasses, it returns the class name
(as set in the 'class' attribute thereof), whereas _get_kind()
returns the kind (the class name of the root class of a specific
PolyModel hierarchy).
"""
return cls._get_kind()
@classmethod
def _default_filters(cls):
"""Return an iterable of filters that are always to be applied.
This is used by PolyModel to quietly insert a filter for the
current class name.
"""
return ()
@classmethod
def _reset_kind_map(cls):
"""Clear the kind map. Useful for testing."""
keep = {}
for name, value in six.iteritems(cls._kind_map):
if name.startswith('__') and name.endswith('__'):
keep[name] = value
cls._kind_map.clear()
cls._kind_map.update(keep)
@classmethod
def _lookup_model(cls, kind, default_model=None):
"""Get the model class for the kind.
Args:
kind: A string representing the name of the kind to lookup.
default_model: The model class to use if the kind can't be found.
Returns:
The model class for the requested kind.
Raises:
KindError: The kind was not found and no default_model was provided.
"""
modelclass = cls._kind_map.get(kind, default_model)
if modelclass is None:
raise KindError(
"No model class found for kind '%s'. Did you forget to import it?" %
kind)
return modelclass
def _has_complete_key(self):
"""Return whether this entity has a complete key."""
return self._key is not None and self._key.id() is not None
has_complete_key = _has_complete_key
def __hash__(self):
"""Dummy hash function.
Raises:
Always TypeError to emphasize that entities are mutable.
"""
raise TypeError('Model is not immutable')
def __eq__(self, other):
"""Compare two entities of the same class for equality."""
if other.__class__ is not self.__class__:
return NotImplemented
if self._key != other._key:
return False
return self._equivalent(other)
def _equivalent(self, other):
"""Compare two entities of the same class, excluding keys."""
if other.__class__ is not self.__class__:
raise NotImplementedError('Cannot compare different model classes. '
'%s is not %s' % (self.__class__.__name__,
other.__class_.__name__))
if set(self._projection) != set(other._projection):
return False
if len(self._properties) != len(other._properties):
return False
my_prop_names = set(six.iterkeys(self._properties))
their_prop_names = set(six.iterkeys(other._properties))
if my_prop_names != their_prop_names:
return False
if self._projection:
my_prop_names = set(self._projection)
for name in my_prop_names:
name = six.ensure_text(name)
if '.' in name:
name, _ = name.split('.', 1)
my_value = self._properties[name]._get_value(self)
their_value = other._properties[name]._get_value(other)
if my_value != their_value:
return False
return True
def _to_pb(self, pb=None, allow_partial=False, set_key=True):
"""Internal helper to turn an entity into an EntityProto protobuf."""
if not allow_partial:
self._check_initialized()
if pb is None:
pb = entity_pb2.EntityProto()
if set_key:
self._key_to_pb(pb)
for unused_name, prop in sorted(six.iteritems(self._properties)):
prop._serialize(self, pb, projection=self._projection)
return pb
def _key_to_pb(self, pb):
"""Internal helper to copy the key into a protobuf."""
key = self._key
if key is None:
pairs = [(self._get_kind(), None)]
ref = key_module._ReferenceFromPairs(pairs, reference=pb.key)
else:
ref = key.reference()
pb.key.CopyFrom(ref)
pb.entity_group.SetInParent()
group = pb.entity_group
if key is not None and key.id():
elem = ref.path.element[0]
if elem.id or elem.name:
group.element.add().CopyFrom(elem)
@classmethod
def _from_pb(cls, pb, set_key=True, ent=None, key=None):
"""Internal helper to create an entity from an EntityProto protobuf."""
if not isinstance(pb, entity_pb2.EntityProto):
raise TypeError('pb must be a EntityProto; received %r' % pb)
if ent is None:
ent = cls()
if key is None and len(pb.key.path.element):
key = Key(reference=pb.key)
if key is not None and (set_key or key.id() or key.parent()):
ent._key = key
_property_map = {}
projection = []
for indexed, plist in ((True, pb.property), (False, pb.raw_property)):
for p in plist:
prop_name = six.ensure_text(p.name)
if p.meaning == entity_pb2.Property.INDEX_VALUE:
projection.append(prop_name)
property_map_key = (prop_name, indexed)
if property_map_key not in _property_map:
_property_map[property_map_key] = ent._get_property_for(p, indexed)
_property_map[property_map_key]._deserialize(ent, p)
ent._set_projection(projection)
return ent
def _set_projection(self, projection):
by_prefix = {}
for propname in projection:
if '.' in propname:
head, tail = propname.split('.', 1)
if head in by_prefix:
by_prefix[head].append(tail)
else:
by_prefix[head] = [tail]
self._projection = tuple(projection)
for propname, proj in six.iteritems(by_prefix):
prop = self._properties.get(propname)
subval = prop._get_base_value_unwrapped_as_list(self)
for item in subval:
assert item is not None
item._set_projection(proj)
def _get_property_for(self, p, indexed=True, depth=0):
"""Internal helper to get the Property for a protobuf-level property."""
parts = p.name.split('.')
if len(parts) <= depth:
return None
next = parts[depth]
prop = self._properties.get(next)
if prop is None:
prop = self._fake_property(p, next, indexed)
return prop
def _clone_properties(self):
"""Internal helper to clone self._properties if necessary."""
cls = self.__class__
if self._properties is cls._properties:
self._properties = dict(cls._properties)
def _fake_property(self, p, next, indexed=True):
"""Internal helper to create a fake Property."""
self._clone_properties()
if p.name != next and not p.name.endswith('.' + next):
prop = StructuredProperty(Expando, next)
prop._store_value(self, _BaseValue(Expando()))
else:
compressed = p.meaning_uri == _MEANING_URI_COMPRESSED
prop = GenericProperty(
next, repeated=p.multiple, indexed=indexed, compressed=compressed)
prop._code_name = next
self._properties[prop._name.decode('utf-8')] = prop
return prop
@utils.positional(1)
def _to_dict(self, include=None, exclude=None):
"""Return a dict containing the entity's property values.
Args:
include: Optional set of property names to include, default all.
exclude: Optional set of property names to skip, default none.
A name contained in both include and exclude is excluded.
"""
if (include is not None and
not isinstance(include, (list, tuple, set, frozenset))):
raise TypeError('include should be a list, tuple or set')
if (exclude is not None and
not isinstance(exclude, (list, tuple, set, frozenset))):
raise TypeError('exclude should be a list, tuple or set')
values = {}
for prop in six.itervalues(self._properties):
name = prop._code_name
if include is not None and name not in include:
continue
if exclude is not None and name in exclude:
continue
try:
values[name] = prop._get_for_dict(self)
except UnprojectedPropertyError:
pass
return values
to_dict = _to_dict
@classmethod
def _fix_up_properties(cls):
"""Fix up the properties by calling their _fix_up() method.
Note: This is called by MetaModel, but may also be called manually
after dynamically updating a model class.
"""
kind = cls._get_kind()
if not isinstance(kind, (six.text_type, six.binary_type)):
raise KindError('Class %s defines a _get_kind() method that returns '
'a non-string (%r)' % (cls.__name__, kind))
if six.PY2 and not isinstance(kind, six.binary_type):
try:
kind = kind.encode('ascii')
except UnicodeEncodeError:
raise KindError('Class %s defines a _get_kind() method that returns '
'a Unicode string (%r); please encode using utf-8' %
(cls.__name__, kind))
cls._properties = {}
if cls.__module__ == __name__:
return
for name in set(dir(cls)):
attr = getattr(cls, name, None)
if isinstance(attr, ModelAttribute) and not isinstance(attr, ModelKey):
if name.startswith('_'):
raise TypeError('ModelAttribute %s cannot begin with an underscore '
'character. _ prefixed attributes are reserved for '
'temporary Model instance values.' % name)
attr._fix_up(cls, name)
if isinstance(attr, Property):
if (attr._repeated or
(isinstance(attr, StructuredProperty) and
attr._modelclass._has_repeated)):
cls._has_repeated = True
cls._properties[six.ensure_text(attr._name)] = attr
cls._update_kind_map()
@classmethod
def _update_kind_map(cls):
"""Update the kind map to include this class."""
k = cls._get_kind()
cls._kind_map[k] = cls
def _prepare_for_put(self):
if self._properties:
for _, prop in sorted(six.iteritems(self._properties)):
prop._prepare_for_put(self)
@classmethod
def _check_properties(cls, property_names, require_indexed=True):
"""Internal helper to check the given properties exist and meet specified
requirements.
Called from query.py.
Args:
property_names: List or tuple of property names -- each being a string,
possibly containing dots (to address subproperties of structured
properties).
Raises:
InvalidPropertyError if one of the properties is invalid.
AssertionError if the argument is not a list or tuple of strings.
"""
assert isinstance(property_names, (list, tuple)), repr(property_names)
for name in property_names:
assert isinstance(name, (six.text_type, six.binary_type)), repr(name)
name = six.ensure_text(name)
if '.' in name:
name, rest = name.split('.', 1)
else:
rest = None
prop = cls._properties.get(name)
if prop is None:
cls._unknown_property(name)
else:
prop._check_property(rest, require_indexed=require_indexed)
@classmethod
def _unknown_property(cls, name):
"""Internal helper to raise an exception for an unknown property name.
This is called by _check_properties(). It is overridden by
Expando, where this is a no-op.
Raises:
InvalidPropertyError.
"""
raise InvalidPropertyError('Unknown property %s' % name)
def _validate_key(self, key):
"""Validation for _key attribute (designed to be overridden).
Args:
key: Proposed Key to use for entity.
Returns:
A valid key.
"""
return key
@classmethod
def _query(cls, *args, **kwds):
"""Create a Query object for this class.
Args:
distinct: Optional bool, short hand for group_by = projection.
*args: Used to apply an initial filter
**kwds: are passed to the Query() constructor.
Returns:
A Query object.
"""
if 'distinct' in kwds:
if 'group_by' in kwds:
raise TypeError(
'cannot use distinct= and group_by= at the same time')
projection = kwds.get('projection')
if not projection:
raise TypeError(
'cannot use distinct= without projection=')
if kwds.pop('distinct'):
kwds['group_by'] = projection
from google.appengine.ext.ndb.query import Query
qry = Query(kind=cls._get_kind(), **kwds)
qry = qry.filter(*cls._default_filters())
qry = qry.filter(*args)
return qry
query = _query
@classmethod
def _gql(cls, query_string, *args, **kwds):
"""Run a GQL query."""
from google.appengine.ext.ndb.query import gql
return gql('SELECT * FROM %s %s' % (cls._class_name(), query_string),
*args, **kwds)
gql = _gql
def _put(self, **ctx_options):
"""Write this entity to Cloud Datastore.
If the operation creates or completes a key, the entity's key
attribute is set to the new, complete key.
Returns:
The key for the entity. This is always a complete key.
"""
return self._put_async(**ctx_options).get_result()
put = _put
def _put_async(self, **ctx_options):
"""Write this entity to Cloud Datastore.
This is the asynchronous version of Model._put().
"""
if self._projection:
raise datastore_errors.BadRequestError('Cannot put a partial entity')
from google.appengine.ext.ndb import tasklets
ctx = tasklets.get_context()
self._prepare_for_put()
if self._key is None:
self._key = Key(self._get_kind(), None)
self._pre_put_hook()
fut = ctx.put(self, **ctx_options)
post_hook = self._post_put_hook
if not self._is_default_hook(Model._default_post_put_hook, post_hook):
fut.add_immediate_callback(post_hook, fut)
return fut
put_async = _put_async
@classmethod
def _get_or_insert(*args, **kwds):
"""Transactionally retrieves an existing entity or creates a new one.
Positional Args:
name: Key name to retrieve or create.
Keyword Args:
namespace: Optional namespace.
app: Optional app ID.
parent: Parent entity key, if any.
context_options: ContextOptions object (not keyword args!) or None.
**kwds: Keyword arguments to pass to the constructor of the model class
if an instance for the specified key name does not already exist. If
an instance with the supplied key_name and parent already exists,
these arguments will be discarded.
Returns:
Existing instance of Model class with the specified key name and parent
or a new one that has just been created.
"""
cls, args = args[0], args[1:]
return cls._get_or_insert_async(*args, **kwds).get_result()
get_or_insert = _get_or_insert
@classmethod
def _get_or_insert_async(*args, **kwds):
"""Transactionally retrieves an existing entity or creates a new one.
This is the asynchronous version of Model._get_or_insert().
"""
from google.appengine.ext.ndb import tasklets
cls, name = args
get_arg = cls.__get_arg
app = get_arg(kwds, 'app')
namespace = get_arg(kwds, 'namespace')
parent = get_arg(kwds, 'parent')
context_options = get_arg(kwds, 'context_options')
if not isinstance(name, six.string_types):
raise TypeError('name must be a string; received %r' % name)
elif not name:
raise ValueError('name cannot be an empty string.')
key = Key(cls, name, app=app, namespace=namespace, parent=parent)
@tasklets.tasklet
def internal_tasklet():
@tasklets.tasklet
def txn():
ent = yield key.get_async(options=context_options)
if ent is None:
ent = cls(**kwds)
ent._key = key
yield ent.put_async(options=context_options)
raise tasklets.Return(ent)
if in_transaction():
ent = yield txn()
else:
ent = yield key.get_async(options=context_options)
if ent is None:
ent = yield transaction_async(txn)
raise tasklets.Return(ent)
return internal_tasklet()
get_or_insert_async = _get_or_insert_async
@classmethod
def _allocate_ids(cls, size=None, max=None, parent=None, **ctx_options):
"""Allocates a range of key IDs for this model class.
Args:
size: Number of IDs to allocate. Either size or max can be specified,
not both.
max: Maximum ID to allocate. Either size or max can be specified,
not both.
parent: Parent key for which the IDs will be allocated.
**ctx_options: Context options.
Returns:
A tuple with (start, end) for the allocated range, inclusive.
"""
return cls._allocate_ids_async(size=size, max=max, parent=parent,
**ctx_options).get_result()
allocate_ids = _allocate_ids
@classmethod
def _allocate_ids_async(cls, size=None, max=None, parent=None,
**ctx_options):
"""Allocates a range of key IDs for this model class.
This is the asynchronous version of Model._allocate_ids().
"""
from google.appengine.ext.ndb import tasklets
ctx = tasklets.get_context()
cls._pre_allocate_ids_hook(size, max, parent)
key = Key(cls._get_kind(), None, parent=parent)
fut = ctx.allocate_ids(key, size=size, max=max, **ctx_options)
post_hook = cls._post_allocate_ids_hook
if not cls._is_default_hook(Model._default_post_allocate_ids_hook,
post_hook):
fut.add_immediate_callback(post_hook, size, max, parent, fut)
return fut
allocate_ids_async = _allocate_ids_async
@classmethod
@utils.positional(3)
def _get_by_id(cls, id, parent=None, **ctx_options):
"""Returns an instance of Model class by ID.
This is really just a shorthand for Key(cls, id, ...).get().
Args:
id: A string or integer key ID.
parent: Optional parent key of the model to get.
namespace: Optional namespace.
app: Optional app ID.
**ctx_options: Context options.
Returns:
A model instance or None if not found.
"""
return cls._get_by_id_async(id, parent=parent, **ctx_options).get_result()
get_by_id = _get_by_id
@classmethod
@utils.positional(3)
def _get_by_id_async(cls, id, parent=None, app=None, namespace=None,
**ctx_options):
"""Returns an instance of Model class by ID (and app, namespace).
This is the asynchronous version of Model._get_by_id().
"""
key = Key(cls._get_kind(), id, parent=parent, app=app, namespace=namespace)
return key.get_async(**ctx_options)
get_by_id_async = _get_by_id_async
@classmethod
def _pre_allocate_ids_hook(cls, size, max, parent):
pass
_default_pre_allocate_ids_hook = _pre_allocate_ids_hook
@classmethod
def _post_allocate_ids_hook(cls, size, max, parent, future):
pass
_default_post_allocate_ids_hook = _post_allocate_ids_hook
@classmethod
def _pre_delete_hook(cls, key):
pass
_default_pre_delete_hook = _pre_delete_hook
@classmethod
def _post_delete_hook(cls, key, future):
pass
_default_post_delete_hook = _post_delete_hook
@classmethod
def _pre_get_hook(cls, key):
pass
_default_pre_get_hook = _pre_get_hook
@classmethod
def _post_get_hook(cls, key, future):
pass
_default_post_get_hook = _post_get_hook
def _pre_put_hook(self):
pass
_default_pre_put_hook = _pre_put_hook
def _post_put_hook(self, future):
pass
_default_post_put_hook = _post_put_hook
@staticmethod
def _is_default_hook(default_hook, hook):
"""Checks whether a specific hook is in its default state.
Args:
default_hook: Callable specified by ndb internally (do not override).
hook: The hook defined by a model class using _post_*_hook.
Raises:
TypeError if either the default hook or the tested hook are not callable.
"""
if not hasattr(default_hook, '__call__'):
raise TypeError('Default hooks for ndb.model.Model must be callable')
if not hasattr(hook, '__call__'):
raise TypeError('Hooks must be callable')
if hasattr(default_hook, '__func__'):
default_hook = default_hook.__func__
if hasattr(hook, '__func__'):
hook = hook.__func__
return default_hook is hook
class Expando(Model):
"""Model subclass to support dynamic Property names and types.
See the module docstring for details.
"""
_default_indexed = True
_write_empty_list_for_dynamic_properties = None
def _set_attributes(self, kwds):
for name, value in six.iteritems(kwds):
setattr(self, name, value)
@classmethod
def _unknown_property(cls, name):
pass
def __getattr__(self, name):
if name.startswith('_'):
return super(Expando, self).__getattr__(name)
prop = self._properties.get(six.ensure_text(name))
if prop is None:
return super(Expando, self).__getattribute__(name)
return prop._get_value(self)
def __setattr__(self, name, value):
if (name.startswith('_') or
isinstance(getattr(self.__class__, name, None), (Property, property))):
return super(Expando, self).__setattr__(name, value)
self._clone_properties()
if isinstance(value, Model):
prop = StructuredProperty(Model, name)
elif isinstance(value, dict):
prop = StructuredProperty(Expando, name)
else:
prop = GenericProperty(
name, repeated=isinstance(value, list),
indexed=self._default_indexed,
write_empty_list=self._write_empty_list_for_dynamic_properties)
prop._code_name = name
self._properties[six.ensure_text(name)] = prop
prop._set_value(self, value)
def __delattr__(self, name):
if (name.startswith('_') or
isinstance(getattr(self.__class__, name, None), (Property, property))):
return super(Expando, self).__delattr__(name)
prop_name = six.ensure_text(name)
prop = self._properties.get(prop_name)
if not isinstance(prop, Property):
raise TypeError('Model properties must be Property instances; not %r' %
prop)
prop._delete_value(self)
if prop_name in self.__class__._properties:
raise RuntimeError('Property %s still in the list of properties for the '
'base class.' % name)
del self._properties[prop_name]
@utils.positional(1)
def transaction(callback, **ctx_options):
"""Run a callback in a transaction.
Args:
callback: A function or tasklet to be called.
**ctx_options: Transaction options.
Useful options include:
retries=N: Retry up to N times (i.e. try up to N+1 times)
propagation=<flag>: Determines how an existing transaction should be
propagated, where <flag> can be one of the following:
TransactionOptions.NESTED: Start a nested transaction (this is the
default; but actual nested transactions are not yet implemented,
so effectively you can only use this outside an existing transaction).
TransactionOptions.MANDATORY: A transaction must already be in progress.
TransactionOptions.ALLOWED: If a transaction is in progress, join it.
TransactionOptions.INDEPENDENT: Always start a new parallel transaction.
xg=True: On the High Replication Datastore, enable cross-group
transactions, i.e. allow writing to up to 5 entity groups.
read_only=True: Indicates a transaction will not do any writes, which
potentially allows for more throughput.
WARNING: Using anything other than NESTED for the propagation flag
can have strange consequences. When using ALLOWED or MANDATORY, if
an exception is raised, the transaction is likely not safe to
commit. When using INDEPENDENT it is not generally safe to return
values read to the caller (as they were not read in the caller's
transaction).
Returns:
Whatever callback() returns.
Raises:
Whatever callback() raises; datastore_errors.TransactionFailedError
if the transaction failed.
Note:
To pass arguments to a callback function, use a lambda, e.g.
def my_callback(key, inc):
...
transaction(lambda: my_callback(Key(...), 1))
"""
fut = transaction_async(callback, **ctx_options)
return fut.get_result()
@utils.positional(1)
def transaction_async(callback, **ctx_options):
"""Run a callback in a transaction.
This is the asynchronous version of transaction().
"""
from google.appengine.ext.ndb import tasklets
return tasklets.get_context().transaction(callback, **ctx_options)
def in_transaction():
"""Return whether a transaction is currently active."""
from google.appengine.ext.ndb import tasklets
return tasklets.get_context().in_transaction()
@utils.decorator
def transactional(func, args, kwds, **options):
"""Decorator to make a function automatically run in a transaction.
Args:
**ctx_options: Transaction options (see transaction(), but propagation
default to TransactionOptions.ALLOWED).
This supports two forms:
(1) Vanilla:
@transactional
def callback(arg):
...
(2) With options:
@transactional(retries=1)
def callback(arg):
...
"""
return transactional_async.wrapped_decorator(
func, args, kwds, **options).get_result()
@utils.decorator
def transactional_async(func, args, kwds, **options):
"""The async version of @ndb.transaction."""
options.setdefault('propagation', datastore_rpc.TransactionOptions.ALLOWED)
if args or kwds:
return transaction_async(lambda: func(*args, **kwds), **options)
return transaction_async(func, **options)
@utils.decorator
def transactional_tasklet(func, args, kwds, **options):
"""The async version of @ndb.transaction.
Will return the result of the wrapped function as a Future.
"""
from google.appengine.ext.ndb import tasklets
func = tasklets.tasklet(func)
return transactional_async.wrapped_decorator(func, args, kwds, **options)
@utils.decorator
def non_transactional(func, args, kwds, allow_existing=True):
"""A decorator that ensures a function is run outside a transaction.
If there is an existing transaction (and allow_existing=True), the
existing transaction is paused while the function is executed.
Args:
allow_existing: If false, throw an exception if called from within
a transaction. If true, temporarily re-establish the
previous non-transactional context. Defaults to True.
This supports two forms, similar to transactional().
Returns:
A wrapper for the decorated function that ensures it runs outside a
transaction.
"""
from google.appengine.ext.ndb import tasklets
ctx = tasklets.get_context()
if not ctx.in_transaction():
return func(*args, **kwds)
if not allow_existing:
raise datastore_errors.BadRequestError(
'%s cannot be called within a transaction.' % func.__name__)
save_ctx = ctx
while ctx.in_transaction():
ctx = ctx._parent_context
if ctx is None:
raise datastore_errors.BadRequestError(
'Context without non-transactional ancestor')
save_ds_conn = datastore._GetConnection()
try:
if hasattr(save_ctx, '_old_ds_conn'):
datastore._SetConnection(save_ctx._old_ds_conn)
tasklets.set_context(ctx)
return func(*args, **kwds)
finally:
tasklets.set_context(save_ctx)
datastore._SetConnection(save_ds_conn)
def get_multi_async(keys, **ctx_options):
"""Fetches a sequence of keys.
Args:
keys: A sequence of keys.
**ctx_options: Context options.
Returns:
A list of futures.
"""
return [key.get_async(**ctx_options) for key in keys]
def get_multi(keys, **ctx_options):
"""Fetches a sequence of keys.
Args:
keys: A sequence of keys.
**ctx_options: Context options.
Returns:
A list whose items are either a Model instance or None if the key wasn't
found.
"""
return [future.get_result()
for future in get_multi_async(keys, **ctx_options)]
def put_multi_async(entities, **ctx_options):
"""Stores a sequence of Model instances.
Args:
entities: A sequence of Model instances.
**ctx_options: Context options.
Returns:
A list of futures.
"""
return [entity.put_async(**ctx_options) for entity in entities]
def put_multi(entities, **ctx_options):
"""Stores a sequence of Model instances.
Args:
entities: A sequence of Model instances.
**ctx_options: Context options.
Returns:
A list with the stored keys.
"""
return [future.get_result()
for future in put_multi_async(entities, **ctx_options)]
def delete_multi_async(keys, **ctx_options):
"""Deletes a sequence of keys.
Args:
keys: A sequence of keys.
**ctx_options: Context options.
Returns:
A list of futures.
"""
return [key.delete_async(**ctx_options) for key in keys]
def delete_multi(keys, **ctx_options):
"""Deletes a sequence of keys.
Args:
keys: A sequence of keys.
**ctx_options: Context options.
Returns:
A list whose items are all None, one per deleted key.
"""
return [future.get_result()
for future in delete_multi_async(keys, **ctx_options)]
def get_indexes_async(**ctx_options):
"""Get a data structure representing the configured indexes.
Args:
**ctx_options: Context options.
Returns:
A future.
"""
from google.appengine.ext.ndb import tasklets
ctx = tasklets.get_context()
return ctx.get_indexes(**ctx_options)
def get_indexes(**ctx_options):
"""Get a data structure representing the configured indexes.
Args:
**ctx_options: Context options.
Returns:
A list of Index objects.
"""
return get_indexes_async(**ctx_options).get_result()
for _name, _object in list(globals().items()):
if ((_name.endswith('Property') and issubclass(_object, Property)) or
(_name.endswith('Error') and issubclass(_object, Exception))):
__all__.append(_name)
|
apache-2.0
| -7,679,272,179,333,130,000 | 30.765222 | 82 | 0.653272 | false |
geertj/draco2
|
draco2/util/test/test_misc.py
|
1
|
2420
|
# vi: ts=8 sts=4 sw=4 et
#
# test_misc: test suite for draco2.util.misc
#
# This file is part of Draco2. Draco2 is free software and is made available
# under the MIT license. Consult the file "LICENSE" that is distributed
# together with this file for the exact licensing terms.
#
# Draco2 is copyright (c) 1999-2007 by the Draco2 authors. See the file
# "AUTHORS" for a complete overview.
#
# $Revision: $
from draco2.util.misc import dedent
class TestDedent(object):
def test_null(self):
s = ''
assert dedent(s) == ''
def test_zero_line(self):
s = 'test line'
assert dedent(s) == 'test line'
s = ' test line'
assert dedent(s) == 'test line'
def test_one_line(self):
s = 'line1\n'
assert dedent(s) == 'line1'
s = ' line1\n'
assert dedent(s) == 'line1'
def test_multi_line_with_first_line(self):
s = 'line1\n line2\n line3\n'
assert dedent(s) == 'line1\nline2\nline3'
def test_multi_line_without_first_line(self):
s = '\n line2\n line3\n'
assert dedent(s) == 'line2\nline3'
def test_multi_line_without_final_newline(self):
s = 'line1\n line2'
assert dedent(s) == 'line1\nline2'
def test_multi_line_with_increasing_indent(self):
s = 'line1\n line2\n line3\n line4\n'
assert dedent(s) == 'line1\nline2\n line3\n line4'
def test_multi_line_with_decreasing_indent(self):
s = 'line1\n line2\n line3\n line4\n'
assert dedent(s) == 'line1\n line2\n line3\nline4'
def test_multi_line_with_trim(self):
s = '\n\n\nline1\nline2\n\n\n'
assert dedent(s) == 'line1\nline2'
def test_multi_line_with_trim_and_indent(self):
s = '\n\n\n line1\n line2\n\n\n'
assert dedent(s) == 'line1\nline2'
def test_multi_line_without_trim(self):
s = '\n\n\nline1\nline2\n\n\n'
assert dedent(s, trim=0) == '\n\n\nline1\nline2\n\n'
def test_empty_line(self):
s = '\n'
assert dedent(s) == ''
def test_empty_lines(self):
s = '\n\n\n\n'
assert dedent(s) == ''
def test_whitespace(self):
s = ' '
assert dedent(s) == ''
def test_whitespace_line(self):
s = ' \n'
assert dedent(s) == ''
def test_whitespace_lines(self):
s = ' \n \n'
assert dedent(s) == ''
|
mit
| -5,645,479,705,840,652,000 | 27.470588 | 76 | 0.570661 | false |
coopernurse/radiator
|
radiator/__init__.py
|
1
|
19563
|
import logging
import uuid
import struct
import os
import time
import re
import types
import base64
import random
import tempfile
logger = logging.getLogger('radiator')
def start_server(reactor,
dir=None,
fsync_millis=0,
rewrite_interval_secs=300):
broker = Broker(dir=dir, fsync_millis=fsync_millis,
rewrite_interval_secs=rewrite_interval_secs)
reactor.start_server(broker, blocking=True)
def now_millis():
return int(time.time() * 1000)
class RadiatorTimeout(Exception):
pass
class Subscription(object):
def __init__(self, dest_name, auto_ack, dest, wildcard_add=False):
self.dest_name = dest_name
self.auto_ack = auto_ack
self.dest = dest
self.wildcard_add = wildcard_add
def matches(self, dest_name):
if self.dest_name.endswith(".>"):
s = self.dest_name[:dest_name.rfind(".>")]
return dest_name.startswith(s)
class Session(object):
def __init__(self, session_id, send_message_cb):
self.session_id = session_id
self.send_message_cb = send_message_cb
self.subscriptions = { }
self.busy = False
def destroy(self):
for dest_name, sub in self.subscriptions.items():
if sub.dest:
sub.dest.unsubscribe(self)
self.subscriptions.clear()
def subscribe(self, dest_name, auto_ack, dest=None, wildcard_add=False):
sub = Subscription(dest_name, auto_ack, dest, wildcard_add)
self.subscriptions[dest_name] = sub
if dest:
dest.subscribe(self)
self.pull_message(dest)
def unsubscribe(self, dest_name):
if self.subscriptions.has_key(dest_name):
d = self.subscriptions[dest_name]
if d.dest:
d.dest.unsubscribe(self)
del(self.subscriptions[dest_name])
# TODO - remove wildcard matches
def add_all_matching_dests(self, dest_dict):
for dest in dest_dict.values():
self.on_dest_created(dest)
def on_dest_created(self, dest):
if self.subscriptions.has_key(dest.name):
d = self.subscriptions[dest.name]
if d.dest != dest:
d.dest = dest
dest.subscribe(self)
else:
parent_sub = None
for sub in self.subscriptions.values():
if sub.matches(dest.name):
parent_sub = sub
break
if parent_sub:
self.subscribe(dest.name, parent_sub.auto_ack, dest,
wildcard_add=True)
def pull_message(self, dest=None):
msg_sent = False
if not self.busy:
msg = None
auto_ack = True
if dest:
auto_ack = self.subscriptions[dest.name].auto_ack
msg = dest.receive(auto_ack)
else:
my_dests = self.subscriptions.values()
random.shuffle(my_dests)
for d in my_dests:
if d.dest:
msg = d.dest.receive(d.auto_ack)
if msg:
break
if msg:
self.busy = (not auto_ack)
msg_sent = True
dest_name = msg[0].name
msg_id = msg[1].id.hex+","+msg[0].name
body = msg[1].body
self.send_message_cb(dest_name, msg_id, body)
return msg_sent
def send_message(self, dest_name, msg_id, body):
self.send_message_cb(dest_name, msg_id, body)
class Broker(object):
def __init__(self, dir=None,
fsync_millis=0,
rewrite_interval_secs=300):
self.dir = dir
self.fsync_millis = fsync_millis
self.rewrite_interval_secs = rewrite_interval_secs
#
# key: dest_name
# value: Dest obj (provides send(), receive(), ack())
self.dest_dict = { }
#
# key: session_id
# value: Session obj
self.session_dict = { }
def destroy_session(self, session_id):
if self.session_dict.has_key(session_id):
self.session_dict[session_id].destroy()
del self.session_dict[session_id]
def send(self, dest_name, body):
self._get_or_create_dest(dest_name).send(body)
def subscribe(self, dest_name, auto_ack, session_id, on_message_cb):
session = self._get_or_create_session(session_id, on_message_cb)
dest = None
if self.dest_dict.has_key(dest_name):
dest = self.dest_dict[dest_name]
session.subscribe(dest_name, auto_ack, dest=dest)
session.add_all_matching_dests(self.dest_dict)
def unsubscribe(self, dest_name, session_id):
if self.session_dict.has_key(session_id):
session = self.session_dict[session_id]
session.unsubscribe(dest_name)
def ack(self, session_id, message_id):
(message_id, dest_name) = message_id.split(",")
self._get_or_create_dest(dest_name).ack(uuid.UUID(message_id))
if self.session_dict.has_key(session_id):
session = self.session_dict[session_id]
session.busy = False
session.pull_message()
def _get_or_create_session(self, session_id, on_message_cb):
if not self.session_dict.has_key(session_id):
self.session_dict[session_id] = Session(session_id, on_message_cb)
return self.session_dict[session_id]
def _get_or_create_dest(self, dest_name):
dest = None
dests = self.dest_dict
if dests.has_key(dest_name):
dest = dests[dest_name]
else:
if dest_name.find("/topic/") == 0:
dest = PubSubTopic(dest_name)
else:
rw_secs = self.rewrite_interval_secs
dest = FileQueue(dest_name,
dir=self.dir,
rewrite_interval_secs=rw_secs,
fsync_millis=self.fsync_millis)
dests[dest_name] = dest
for session in self.session_dict.values():
session.on_dest_created(dest)
return dest
class MessageHeader(object):
def __init__(self, pos, create_time, dequeue_time, ack_timeout,
id, header_size, body_size):
self.pos = pos
self.create_time = create_time
self.dequeue_time = dequeue_time
self.ack_timeout = ack_timeout
self.id = id
self.header_size = header_size
self.body_size = body_size
self.total_size = header_size + body_size + 48
def copy(self, from_file, to_file):
self._write_header(to_file)
from_file.seek(self.pos+48)
if (self.header_size > 0):
to_file.write(from_file.read(self.header_size))
if (self.body_size > 0):
to_file.write(from_file.read(self.body_size))
def write(self, to_file, header, body):
self.header_size = len(header)
self.body_size = len(body)
self._write_header(to_file)
to_file.write(header)
to_file.write(body)
def _write_header(self, to_file):
to_file.write(struct.pack("q", self.create_time))
to_file.write(struct.pack("q", self.dequeue_time))
to_file.write(struct.pack("q", self.ack_timeout))
to_file.write(self.id.bytes)
to_file.write(struct.pack("i", self.header_size))
to_file.write(struct.pack("i", self.body_size))
def __str__(self):
#return "MessageHeader pos=%d id=%s create_time=%d dequeue_time=%d " + \
# "ack_timeout=%d header_size=%d body_size=%d" % \
#(self.pos, self.id.hex, self.create_time, self.dequeue_time,
# self.ack_timeout, self.header_size, self.body_size)
return "MessageHeader pos=%s id=%s" % (self.pos, self.id.hex)
class BaseDestination(object):
def __init__(self, name):
#self._validate_name(name)
self.name = name
self.subscribers = { }
def subscribe(self, session):
if not self.subscribers.has_key(session.session_id):
self.subscribers[session.session_id] = session
def unsubscribe(self, session):
if self.subscribers.has_key(session.session_id):
del(self.subscribers[session.session_id])
def send(self, body):
raise NotImplementedError
def receive(self, auto_ack):
raise NotImplementedError
def ack(self, id):
raise NotImplementedError
def close(self):
raise NotImplementedError
def destroy(self):
for s_id, session in self.subscribers.items():
session.unsubscribe(self.name)
self.subscribers.clear()
def _validate_name(self, name):
if not type(name) is types.StringType:
raise ValueError("Queue name must be a string")
name_regex = re.compile("^[a-zA-Z0-9\/\-\.\_]+$")
if not name_regex.match(name):
raise ValueError("Invalid queue name: %s" % name)
class PubSubTopic(BaseDestination):
def __init__(self, name):
BaseDestination.__init__(self, name)
self.messages = [ ]
def send(self, body):
id = uuid.uuid4()
for k, v in self.subscribers.items():
v.send_message(self.name, id.hex, body)
def receive(self, auto_ack):
return None
def ack(self, id):
pass
def close():
pass
class FileQueue(BaseDestination):
def __init__(self, name, dir=None, ack_timeout=120, fsync_millis=0,
rewrite_interval_secs=300,
compress_files=False):
BaseDestination.__init__(self, name)
self.version = 1
self.compress_files = compress_files
dir = dir or os.getcwd()
self.dir = dir
self.rewrite_interval_secs = rewrite_interval_secs
self.next_rewrite = 0
self.ack_timeout_millis = int(ack_timeout * 1000)
self.pending_prune_threshold = 10 * 1024 * 1024
# dict tracking messages in use
# key: uuid of message
# value: tuple (starting byte offset in file, timeout)
self.msgs_in_use = { }
self.pending_message_count = 0
self.total_messages = 0
#
# pending_file_pos is a byte offset into the
# pending file. we increment this as we read
# if we hit EOF then we have an empty queue and
# can reset the queue file
self.pending_file_pos = 12
#
# each queue has a configurable fsync interval
# fsync_seconds==0 means we'll fsync on all writes
self.fsync_seconds = now_millis()
self.last_fsync = 0
#
# one file per queue
basename = base64.urlsafe_b64encode(name)
self.filename = os.path.join(dir, "%s.msg.dat" % basename)
self.f = None
self._load_or_init_state()
def pending_messages(self):
return self.pending_message_count
def in_use_messages(self):
return len(self.msgs_in_use)
def msg_in_use(self, id):
return self.msgs_in_use.has_key(id.hex)
def close(self):
self.f.close()
self.f = None
def destroy(self):
BaseDestination.destroy(self)
self._delete_file(self.filename)
def send(self, body):
self._open()
id = uuid.uuid4()
self.f.seek(0, 2) # go to end of file
msg_header = MessageHeader(self.f.tell(),
now_millis(),
0, 0, id, 0, 0)
msg_header.write(self.f, "", body)
self._fsync()
self.pending_message_count += 1
self.total_messages += 1
self._dump("send %s" % id.hex)
for k,v in self.subscribers.items():
if v.pull_message(self):
break
return id
def receive(self, auto_ack):
self._open()
if self.pending_message_count > 0:
# grab next msg from queue file, w/body
msg = self._read_msg(self.pending_file_pos, True)
# advance file pointer and write it
self.pending_file_pos += msg.total_size
self.f.seek(0)
self.f.write(struct.pack("q", self.pending_file_pos))
# mark message dequeued
now = now_millis()
self.f.seek(msg.pos+8)
self.f.write(struct.pack("q", now)) # dequeue time
self.f.write(struct.pack("q", now+self.ack_timeout_millis))
self._fsync()
self.pending_message_count -= 1
self.msgs_in_use[msg.id.hex] = msg
self._dump("receive %s" % msg.id.hex)
if auto_ack:
self.ack(msg.id)
return (self, msg)
else:
return None
def ack(self, id):
self._open()
if self.msgs_in_use.has_key(id.hex):
msg_header = self.msgs_in_use[id.hex]
del(self.msgs_in_use[id.hex])
# zero out the timeout, marking this message acked
self.f.seek(msg_header.pos + 16)
self.f.write(struct.pack("q", 0))
self._fsync()
active = self.pending_message_count + len(self.msgs_in_use)
self._dump("ack before rewrite %s" % id.hex)
active_pct = active / (self.total_messages * 1.0)
if active_pct < 0.1 and self.next_rewrite < time.time():
self._rewrite_file()
else:
logger.error("ack: %s: no msg in use with id: %s" % \
(self.name, id.hex))
self._dump("ack %s" % id.hex)
##################################################################
def _rewrite_file(self):
start = time.time()
self.next_rewrite = time.time() + self.rewrite_interval_secs
(tmp_fd, tmp_path) = tempfile.mkstemp(dir=self.dir)
tmp_file = os.fdopen(tmp_fd, "w")
tmp_file.write(struct.pack("q", 12))
tmp_file.write(struct.pack("i", self.version))
fsize = os.path.getsize(self.filename)
self.msgs_in_use.clear()
self.pending_message_count = 0
pos = 12
self.pending_file_pos = pos
remove_count = 0
to_requeue = [ ]
now_ms = now_millis()
self.f.seek(pos)
while pos < fsize:
write_msg = False
msg_header = self._read_msg(pos, False)
pos += msg_header.total_size
if msg_header.dequeue_time > 0:
# msg has been dequeued
if msg_header.ack_timeout == 0:
# msg dequeued and acked. we don't need to keep it
remove_count += 1
elif msg_header.ack_timeout < now_ms:
# ack expired. re-queue
to_requeue.append(msg_header)
else:
# ack not expired - but store new file offset
write_msg = True
msg_header.pos = tmp_file.tell()
self.msgs_in_use[msg_header.id.hex] = msg_header
else:
write_msg = True
if self.pending_message_count == 0:
# position of first pending msg in new file
self.pending_file_pos = tmp_file.tell()
self.pending_message_count += 1
if write_msg:
msg_header.copy(self.f, tmp_file)
else:
self.f.seek(msg_header.header_size,1)
self.f.seek(msg_header.body_size,1)
# add ack expired messages to end of queue
for msg_header in to_requeue:
msg_header.dequeue_time = 0
msg_header.ack_timeout = 0
if self.pending_message_count == 0:
# position of first pending msg in new file
self.pending_file_pos = tmp_file.tell()
msg_header.copy(self.f, tmp_file)
self.pending_message_count += 1
self.total_messages = self.pending_message_count+len(self.msgs_in_use)
self.f.close()
tmp_file.seek(0,0)
tmp_file.write(struct.pack("q", self.pending_file_pos))
tmp_file.close()
os.rename(tmp_path, self.filename)
self.f = open(self.filename, "r+")
self.f.write(struct.pack("q", self.pending_file_pos))
self.f.write(struct.pack("i", self.version))
self._fsync(True)
#elapsed = int((time.time() - start) * 1000)
#print "_rewrite_file. elapsed=%d old_size=%d new_size=%d - kept=%d requeued=%d removed=%d" % (elapsed, fsize, os.path.getsize(self.filename), self.total_messages, len(to_requeue), remove_count)
self._dump("_rewrite_file")
def _open(self):
if not self.f:
self._load_or_init_state()
def _load_or_init_state(self):
self.pending_message_count = 0
self.msgs_in_use.clear()
if os.path.exists(self.filename):
self._load_state()
else:
self.pending_file_pos = 12
self.f = open(self.filename, "w")
self.f.write(struct.pack("q", self.pending_file_pos))
self.f.write(struct.pack("i", self.version))
self.f.close()
self.f = open(self.filename, "r+")
self.total_messages = self.pending_message_count+len(self.msgs_in_use)
self._dump("init")
def _load_state(self):
finfo = os.stat(self.filename)
fsize = finfo.st_size
self.f = open(self.filename, "r+")
self.pending_file_pos = struct.unpack("q", self.f.read(8))[0]
self.version = struct.unpack("i", self.f.read(4))[0]
pos = 12
while pos < fsize:
self._dump("_load_state")
msg_header = self._read_msg(pos, False)
if msg_header.dequeue_time > 0:
if msg_header.ack_timeout > 0:
self.msgs_in_use[msg_header.id.hex] = msg_header
else:
self.pending_message_count += 1
pos += msg_header.total_size
def _read_msg(self, pos, read_contents):
self.f.seek(pos, 0)
msg = MessageHeader(pos=self.f.tell(),
create_time =struct.unpack("q", self.f.read(8))[0],
dequeue_time=struct.unpack("q", self.f.read(8))[0],
ack_timeout =struct.unpack("q", self.f.read(8))[0],
id=uuid.UUID(bytes=self.f.read(16)),
header_size=struct.unpack("i", self.f.read(4))[0],
body_size=struct.unpack("i", self.f.read(4))[0])
if read_contents:
if msg.header_size > 0:
msg.header = self.f.read(msg.header_size)
if msg.body_size > 0:
msg.body = self.f.read(msg.body_size)
return msg
def _delete_file(self, filename):
if os.path.exists(filename):
os.remove(filename)
def _fsync(self, force=False):
self.f.flush()
if force or (time.time() > (self.last_fsync + self.fsync_seconds)):
os.fsync(self.f.fileno())
self.last_fsync = time.time()
def _dump(self, msg):
#print "%s - pos=%d pending=%d in_use=%d" % (msg, self.pending_file_pos, self.pending_message_count, len(self.msgs_in_use))
pass
|
mit
| 2,084,008,065,329,050,600 | 34.961397 | 204 | 0.543322 | false |
sawdog/OraclePyDoc
|
oraclepydoc/oracleobjects/oraclecolumn.py
|
1
|
1065
|
class OracleColumn:
"""! \brief Oracle column represents table column object"""
def __init__(self, name, column_id, data_type, nullable, data_default, comments):
self.column_id = column_id
self.name = name
self.data_type = data_type
self.nullable = nullable
self.data_default = data_default
self.comments = comments
def getXML(self, table_name):
"""! \brief get xml representation of column"""
#TODO: and it sucks to pass table_name via getXML, fix it
return '''<column id="column-%s.%s">
<name>%s</name>
<position>%s</position>
<datatype>%s</datatype>
<default_value>%s</default_value>
<nullable>%s</nullable>
<comments><![CDATA[%s]]></comments>
</column>\n''' % (table_name, self.name,
self.name, self.column_id, self.data_type,
self.data_default, self.nullable,
self.comments)
|
gpl-2.0
| 8,176,197,300,944,973,000 | 39.961538 | 85 | 0.524883 | false |
appleseedhq/gaffer
|
python/GafferUI/NumericPlugValueWidget.py
|
1
|
7034
|
##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferUI
## Supported metadata :
#
# "numericPlugValueWidget:fixedCharacterWidth"
##
## \todo Maths expressions to modify the existing value
## \todo Enter names of other plugs to create a connection
## \todo Reject drag and drop of anything that's not a number
class NumericPlugValueWidget( GafferUI.PlugValueWidget ) :
def __init__( self, plug, **kw ) :
self.__numericWidget = GafferUI.NumericWidget( 0 )
GafferUI.PlugValueWidget.__init__( self, self.__numericWidget, plug, **kw )
self._addPopupMenu( self.__numericWidget )
# we use these to decide which actions to merge into a single undo
self.__lastChangedReason = None
self.__mergeGroupId = 0
self.__numericWidget.keyPressSignal().connect( Gaffer.WeakMethod( self.__keyPress ), scoped = False )
self.__valueChangedConnection = self.__numericWidget.valueChangedSignal().connect( Gaffer.WeakMethod( self.__valueChanged ), scoped = False )
self._updateFromPlug()
self.__updateWidth()
def setPlug( self, plug ) :
GafferUI.PlugValueWidget.setPlug( self, plug )
self.__updateWidth()
def numericWidget( self ) :
return self.__numericWidget
def setHighlighted( self, highlighted ) :
GafferUI.PlugValueWidget.setHighlighted( self, highlighted )
self.numericWidget().setHighlighted( highlighted )
def getToolTip( self ) :
result = GafferUI.PlugValueWidget.getToolTip( self )
if self.getPlug() is not None :
if result :
result += "\n"
result += "## Actions\n"
result += " - Cursor up/down to increment/decrement\n"
return result
def _updateFromPlug( self ) :
plug = self.getPlug()
if plug is not None :
with self.getContext() :
try :
value = plug.getValue()
except :
value = None
if value is not None :
with Gaffer.BlockedConnection( self.__valueChangedConnection ) :
self.__numericWidget.setValue( value )
self.__numericWidget.setErrored( value is None )
## \todo Perhaps this styling should be provided by the NumericWidget itself?
animated = Gaffer.Animation.isAnimated( plug )
widgetAnimated = GafferUI._Variant.fromVariant( self.__numericWidget._qtWidget().property( "gafferAnimated" ) ) or False
if widgetAnimated != animated :
self.__numericWidget._qtWidget().setProperty( "gafferAnimated", GafferUI._Variant.toVariant( bool( animated ) ) )
self.__numericWidget._repolish()
self.__numericWidget.setEditable( self._editable( canEditAnimation = True ) )
def __keyPress( self, widget, event ) :
assert( widget is self.__numericWidget )
if not self.__numericWidget.getEditable() :
return False
# escape abandons everything
if event.key=="Escape" :
self._updateFromPlug()
return True
return False
def __valueChanged( self, widget, reason ) :
if self._editable( canEditAnimation = True ) :
if not widget.changesShouldBeMerged( self.__lastChangedReason, reason ) :
self.__mergeGroupId += 1
self.__lastChangedReason = reason
self.__setPlugValue( mergeGroup = "NumericPlugValueWidget%d%d" % ( id( self, ), self.__mergeGroupId ) )
return False
def __setPlugValue( self, mergeGroup="" ) :
with Gaffer.UndoScope( self.getPlug().ancestor( Gaffer.ScriptNode ), mergeGroup=mergeGroup ) :
with Gaffer.BlockedConnection( self._plugConnections() ) :
if Gaffer.Animation.isAnimated( self.getPlug() ) :
curve = Gaffer.Animation.acquire( self.getPlug() )
if self.__numericWidget.getText() != self.__numericWidget.valueToString( curve.evaluate( self.getContext().getTime() ) ) :
curve.addKey(
Gaffer.Animation.Key(
self.getContext().getTime(),
self.__numericWidget.getValue(),
Gaffer.Animation.Type.Linear
)
)
else :
try :
self.getPlug().setValue( self.__numericWidget.getValue() )
except :
pass
# now any changes that were made in the numeric widget have been transferred
# into the global undo queue, we remove the text editing changes from the
# widget's private text editing undo queue. it will then ignore undo shortcuts,
# allowing them to fall through to the global undo shortcut.
self.__numericWidget.clearUndo()
# we always need to update the ui from the plug after trying to set it,
# because the plug might clamp the value to something else. furthermore
# it might not even emit plugSetSignal if it happens to clamp to the same
# value as it had before. we block calls to _updateFromPlug() while setting
# the value to avoid having to do the work twice if plugSetSignal is emitted.
self._updateFromPlug()
def __updateWidth( self ) :
charWidth = None
if self.getPlug() is not None :
charWidth = Gaffer.Metadata.value( self.getPlug(), "numericPlugValueWidget:fixedCharacterWidth" )
if charWidth is None and isinstance( self.getPlug(), Gaffer.IntPlug ) and self.getPlug().hasMaxValue() :
charWidth = len( str( self.getPlug().maxValue() ) )
self.__numericWidget.setFixedCharacterWidth( charWidth )
GafferUI.PlugValueWidget.registerType( Gaffer.FloatPlug, NumericPlugValueWidget )
GafferUI.PlugValueWidget.registerType( Gaffer.IntPlug, NumericPlugValueWidget )
|
bsd-3-clause
| -8,314,075,521,094,012,000 | 35.445596 | 143 | 0.702587 | false |
ArteliaTelemac/PostTelemac
|
PostTelemac/PostTelemac.py
|
1
|
10296
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
PostTelemac
A QGIS plugin
Post Traitment or Telemac
-------------------
begin : 2015-07-07
git sha : $Format:%H$
copyright : (C) 2015 by Artelia
email : patrice.Verchere@arteliagroup.com
***************************************************************************/
"""
# import QT
# from PyQt4 import QtCore ,QtGui
from qgis.PyQt import QtCore, QtGui
# import qgis
import qgis
# Other standart libs import
import os.path
import time
import sys
import subprocess
# sys.path.append(os.path.join(os.path.dirname(__file__),'libs_telemac'))
# Posttelemac library import
# import .resources_rc
from . import resources_rc
from .meshlayer.post_telemac_pluginlayer import SelafinPluginLayer
from .meshlayer.post_telemac_pluginlayer_type import SelafinPluginLayerType
from .meshlayerdialogs.posttelemac_about import aboutDialog
from .meshlayertools import utils
# Processing
DOPROCESSING = False # set to false to make the plugin reloader work
if DOPROCESSING:
from processing.core.Processing import Processing
from posttelemacprovider.PostTelemacProvider import PostTelemacProvider
cmd_folder = os.path.split(inspect.getfile(inspect.currentframe()))[0]
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
"""
/***************************************************************************
Main Class
***************************************************************************/
"""
class PostTelemac:
"""QGIS Plugin Implementation."""
def __init__(self, iface):
"""Constructor.
:param iface: An interface instance that will be passed to this class
which provides the hook by which you can manipulate the QGIS
application at run time.
:type iface: QgsInterface
"""
# ***********************************************************************
# Save reference to the QGIS interface
self.iface = iface
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
# initialize locale
locale = QtCore.QSettings().value("locale/userLocale")[0:2]
locale_path = os.path.join(self.plugin_dir, "i18n", "posttelemac_{}.qm".format(locale))
if os.path.exists(locale_path):
# app=QApplication([''])
self.translator = QtCore.QTranslator()
# self.translator = QTranslator(app)
self.translator.load(locale_path)
QtCore.QCoreApplication.installTranslator(self.translator)
"""
if qVersion() > '4.3.3':
print 'ok'
QCoreApplication.installTranslator(self.translator)
#app.installTranslator(self.translator)
"""
# ***********************************************************************
self.pluginLayerType = None
self.addToRegistry()
self.slf = []
# Declare instance attributes
self.actions = []
self.menu = self.tr(u"&PostTelemac")
# TODO: We are going to let the user set this up in a future iteration
# toolbar
try:
from qgis.PyQt.QtGui import QToolBar
except:
from qgis.PyQt.QtWidgets import QToolBar
toolbars = self.iface.mainWindow().findChildren(QToolBar)
test = True
for toolbar1 in toolbars:
if toolbar1.windowTitle() == u"Telemac":
self.toolbar = toolbar1
test = False
break
if test:
self.toolbar = self.iface.addToolBar(u"Telemac")
self.toolbar.setObjectName(u"Telemac")
self.dlg_about = None
# Processing
if DOPROCESSING:
self.provider = PostTelemacProvider()
# noinspection PyMethodMayBeStatic
def tr(self, message):
"""Get the translation for a string using Qt translation API.
We implement this ourselves since we do not inherit QObject.
:param message: String for translation.
:type message: str, QString
:returns: Translated version of message.
:rtype: QString
"""
# noinspection PyTypeChecker,PyArgumentList,PyCallByClass
return QtCore.QCoreApplication.translate("PostTelemac", message)
def add_action(
self,
icon_path,
text,
callback,
enabled_flag=True,
add_to_menu=True,
add_to_toolbar=True,
status_tip=None,
whats_this=None,
parent=None,
):
"""Add a toolbar icon to the toolbar.
:param icon_path: Path to the icon for this action. Can be a resource
path (e.g. ':/plugins/foo/bar.png') or a normal file system path.
:type icon_path: str
:param text: Text that should be shown in menu items for this action.
:type text: str
:param callback: Function to be called when the action is triggered.
:type callback: function
:param enabled_flag: A flag indicating if the action should be enabled
by default. Defaults to True.
:type enabled_flag: bool
:param add_to_menu: Flag indicating whether the action should also
be added to the menu. Defaults to True.
:type add_to_menu: bool
:param add_to_toolbar: Flag indicating whether the action should also
be added to the toolbar. Defaults to True.
:type add_to_toolbar: bool
:param status_tip: Optional text to show in a popup when mouse pointer
hovers over the action.
:type status_tip: str
:param parent: Parent widget for the new action. Defaults None.
:type parent: QWidget
:param whats_this: Optional text to show in the status bar when the
mouse pointer hovers over the action.
:returns: The action that was created. Note that the action is also
added to self.actions list.
:rtype: QAction
"""
icon = QtGui.QIcon(icon_path)
try:
from qgis.PyQt.QtGui import QAction
except:
from qgis.PyQt.QtWidgets import QAction
action = QAction(icon, text, parent)
action.triggered.connect(callback)
action.setEnabled(enabled_flag)
if status_tip is not None:
action.setStatusTip(status_tip)
if whats_this is not None:
action.setWhatsThis(whats_this)
if add_to_toolbar:
self.toolbar.addAction(action)
if add_to_menu:
self.iface.addPluginToMenu(self.menu, action)
self.actions.append(action)
return action
def initGui(self):
"""Create the menu entries and toolbar icons inside the QGIS GUI."""
icon_path = ":/plugins/PostTelemac/icons/posttelemac.png"
self.add_action(icon_path, text=self.tr(u"PostTelemac"), callback=self.run, parent=self.iface.mainWindow())
self.add_action(
icon_path,
text=self.tr(u"PostTelemac Help"),
add_to_toolbar=False,
callback=self.showHelp,
parent=self.iface.mainWindow(),
)
self.add_action(
icon_path,
text=self.tr(u"PostTelemac About"),
add_to_toolbar=False,
callback=self.showAbout,
parent=self.iface.mainWindow(),
)
# Processing thing
if DOPROCESSING:
Processing.addProvider(self.provider)
def unload(self):
"""Removes the plugin menu item and icon from QGIS GUI."""
for action in self.actions:
self.iface.removePluginMenu(self.tr(u"&PostTelemac"), action)
self.toolbar.removeAction(action)
# remove the toolbar
if len(self.toolbar.actions()) == 0:
del self.toolbar
if DOPROCESSING:
Processing.removeProvider(self.provider)
def run(self):
"""Run method that performs all the real work"""
self.slf.append(
SelafinPluginLayer(self.tr("Click properties to load selafin file"))
) # add selafin to list otherwise it can not work with multiple selafin files
self.slf[len(self.slf) - 1].setRealCrs(
self.iface.mapCanvas().mapSettings().destinationCrs()
) # to prevent weird bug with weird crs
qgis.core.QgsProject.instance().addMapLayer(self.slf[len(self.slf) - 1])
# try:
# qgis.core.QgsMapLayerRegistry.instance().addMapLayer(self.slf[len(self.slf)-1])
# except:
# qgis.core.QgsProject.instance().addMapLayer(self.slf[len(self.slf)-1])
self.iface.showLayerProperties(self.slf[len(self.slf) - 1])
def showHelp(self):
if sys.platform == "linux2":
subprocess.call(["xdg-open", "https://github.com/ArteliaTelemac/PostTelemac/wiki"])
else:
os.startfile("https://github.com/ArteliaTelemac/PostTelemac/wiki")
def showAbout(self):
if not self.dlg_about:
self.dlg_about = aboutDialog()
self.dlg_about.setWindowModality(2)
r = self.dlg_about.exec_()
# Specific functions
def addToRegistry(self):
# Add telemac_viewer in QgsPluginLayerRegistry
if utils.getQgisVersion() < 2.20:
reg = qgis.core.QgsPluginLayerRegistry.instance()
else:
reg = qgis.core.QgsApplication.pluginLayerRegistry()
if False:
if u"selafin_viewer" in qgis.core.QgsPluginLayerRegistry.instance().pluginLayerTypes():
qgis.core.QgsPluginLayerRegistry.instance().removePluginLayerType("selafin_viewer")
self.pluginLayerType = SelafinPluginLayerType()
qgis.core.QgsPluginLayerRegistry.instance().addPluginLayerType(self.pluginLayerType)
if True:
if u"selafin_viewer" in reg.pluginLayerTypes():
reg.removePluginLayerType("selafin_viewer")
self.pluginLayerType = SelafinPluginLayerType()
reg.addPluginLayerType(self.pluginLayerType)
|
gpl-3.0
| -2,736,814,625,547,561,000 | 33.901695 | 115 | 0.587801 | false |
davy39/eric
|
Utilities/ClassBrowsers/idlclbr.py
|
1
|
12545
|
# -*- coding: utf-8 -*-
# Copyright (c) 2005 - 2014 Detlev Offenbach <detlev@die-offenbachs.de>
#
"""
Parse a CORBA IDL file and retrieve modules, interfaces, methods and
attributes.
Parse enough of a CORBA IDL file to recognize module, interface and method
definitions and to find out the superclasses of an interface as well as its
attributes.
It is based on the Python class browser found in this package.
"""
from __future__ import unicode_literals
import re
import Utilities
import Utilities.ClassBrowsers as ClassBrowsers
from . import ClbrBaseClasses
SUPPORTED_TYPES = [ClassBrowsers.IDL_SOURCE]
_getnext = re.compile(
r"""
(?P<String>
" [^"\\\n]* (?: \\. [^"\\\n]*)* "
)
| (?P<Comment>
^ [ \t]* // .*? $
|
^ [ \t]* /\* .*? \*/
)
| (?P<Method>
^
(?P<MethodIndent> [ \t]* )
(?: oneway [ \t]+ )?
(?: [a-zA-Z0-9_:]+ | void ) [ \t]*
(?P<MethodName> [a-zA-Z_] [a-zA-Z0-9_]* )
[ \t]*
\(
(?P<MethodSignature> [^)]*? )
\);
[ \t]*
)
| (?P<Interface>
^
(?P<InterfaceIndent> [ \t]* )
(?: abstract [ \t]+ )?
interface [ \t]+
(?P<InterfaceName> [a-zA-Z_] [a-zA-Z0-9_]* )
[ \t]*
(?P<InterfaceSupers> : [^{]+? )?
[ \t]* {
)
| (?P<Module>
^
(?P<ModuleIndent> [ \t]* )
module [ \t]+
(?P<ModuleName> [a-zA-Z_] [a-zA-Z0-9_]* )
[ \t]* {
)
| (?P<Attribute>
^
(?P<AttributeIndent> [ \t]* )
(?P<AttributeReadonly> readonly [ \t]+ )?
attribute [ \t]+
(?P<AttributeType> (?: [a-zA-Z0-9_:]+ [ \t]+ )+ )
(?P<AttributeNames> [^;]* )
;
)
| (?P<Begin>
[ \t]* {
)
| (?P<End>
[ \t]* } [ \t]* ;
)
""", re.VERBOSE | re.DOTALL | re.MULTILINE).search # __IGNORE_WARNING__
# function to replace comments
_commentsub = re.compile(r"""//[^\n]*\n|//[^\n]*$""").sub
# function to normalize whitespace
_normalize = re.compile(r"""[ \t]{2,}""").sub
_modules = {} # cache of modules we've seen
class VisibilityMixin(ClbrBaseClasses.ClbrVisibilityMixinBase):
"""
Mixin class implementing the notion of visibility.
"""
def __init__(self):
"""
Constructor
"""
self.setPublic()
class Module(ClbrBaseClasses.Module, VisibilityMixin):
"""
Class to represent a CORBA IDL module.
"""
def __init__(self, module, name, file, lineno):
"""
Constructor
@param module name of the module containing this class
@param name name of this class
@param file filename containing this class
@param lineno linenumber of the class definition
"""
ClbrBaseClasses.Module.__init__(self, module, name, file, lineno)
VisibilityMixin.__init__(self)
class Interface(ClbrBaseClasses.Class, VisibilityMixin):
"""
Class to represent a CORBA IDL interface.
"""
def __init__(self, module, name, super, file, lineno):
"""
Constructor
@param module name of the module containing this class
@param name name of this interface
@param super list of interface names this interface is inherited from
@param file filename containing this interface
@param lineno linenumber of the interface definition
"""
ClbrBaseClasses.Class.__init__(self, module, name, super, file, lineno)
VisibilityMixin.__init__(self)
class Function(ClbrBaseClasses.Function, VisibilityMixin):
"""
Class to represent a CORBA IDL function.
"""
def __init__(self, module, name, file, lineno, signature='',
separator=','):
"""
Constructor
@param module name of the module containing this function
@param name name of this function
@param file filename containing this class
@param lineno linenumber of the class definition
@param signature parameterlist of the method
@param separator string separating the parameters
"""
ClbrBaseClasses.Function.__init__(self, module, name, file, lineno,
signature, separator)
VisibilityMixin.__init__(self)
class Attribute(ClbrBaseClasses.Attribute, VisibilityMixin):
"""
Class to represent a CORBA IDL attribute.
"""
def __init__(self, module, name, file, lineno):
"""
Constructor
@param module name of the module containing this class
@param name name of this class
@param file filename containing this attribute
@param lineno linenumber of the class definition
"""
ClbrBaseClasses.Attribute.__init__(self, module, name, file, lineno)
VisibilityMixin.__init__(self)
def readmodule_ex(module, path=[]):
"""
Read a CORBA IDL file and return a dictionary of classes, functions and
modules.
@param module name of the CORBA IDL file (string)
@param path path the file should be searched in (list of strings)
@return the resulting dictionary
"""
global _modules
dict = {}
dict_counts = {}
if module in _modules:
# we've seen this file before...
return _modules[module]
# search the path for the file
f = None
fullpath = list(path)
f, file, (suff, mode, type) = ClassBrowsers.find_module(module, fullpath)
if f:
f.close()
if type not in SUPPORTED_TYPES:
# not CORBA IDL source, can't do anything with this module
_modules[module] = dict
return dict
_modules[module] = dict
classstack = [] # stack of (class, indent) pairs
indent = 0
try:
src = Utilities.readEncodedFile(file)[0]
except (UnicodeError, IOError):
# can't do anything with this module
_modules[module] = dict
return dict
lineno, last_lineno_pos = 1, 0
lastGlobalEntry = None
cur_obj = None
i = 0
while True:
m = _getnext(src, i)
if not m:
break
start, i = m.span()
if m.start("Method") >= 0:
# found a method definition or function
thisindent = indent
meth_name = m.group("MethodName")
meth_sig = m.group("MethodSignature")
meth_sig = meth_sig and meth_sig.replace('\\\n', '') or ''
meth_sig = _commentsub('', meth_sig)
meth_sig = _normalize(' ', meth_sig)
lineno = lineno + src.count('\n', last_lineno_pos, start)
last_lineno_pos = start
# close all interfaces/modules indented at least as much
while classstack and \
classstack[-1][1] >= thisindent:
if classstack[-1][0] is not None:
# record the end line
classstack[-1][0].setEndLine(lineno - 1)
del classstack[-1]
if classstack:
# it's an interface/module method
cur_class = classstack[-1][0]
if isinstance(cur_class, Interface) or \
isinstance(cur_class, Module):
# it's a method
f = Function(None, meth_name,
file, lineno, meth_sig)
cur_class._addmethod(meth_name, f)
# else it's a nested def
else:
f = None
else:
# it's a function
f = Function(module, meth_name,
file, lineno, meth_sig)
if meth_name in dict_counts:
dict_counts[meth_name] += 1
meth_name = "{0}_{1:d}".format(
meth_name, dict_counts[meth_name])
else:
dict_counts[meth_name] = 0
dict[meth_name] = f
if not classstack:
if lastGlobalEntry:
lastGlobalEntry.setEndLine(lineno - 1)
lastGlobalEntry = f
if cur_obj and isinstance(cur_obj, Function):
cur_obj.setEndLine(lineno - 1)
cur_obj = f
classstack.append((f, thisindent)) # Marker for nested fns
elif m.start("String") >= 0:
pass
elif m.start("Comment") >= 0:
pass
elif m.start("Interface") >= 0:
# we found an interface definition
thisindent = indent
indent += 1
# close all interfaces/modules indented at least as much
while classstack and \
classstack[-1][1] >= thisindent:
if classstack[-1][0] is not None:
# record the end line
classstack[-1][0].setEndLine(lineno - 1)
del classstack[-1]
lineno = lineno + src.count('\n', last_lineno_pos, start)
last_lineno_pos = start
class_name = m.group("InterfaceName")
inherit = m.group("InterfaceSupers")
if inherit:
# the interface inherits from other interfaces
inherit = inherit[1:].strip()
inherit = [_commentsub('', inherit)]
# remember this interface
cur_class = Interface(module, class_name, inherit,
file, lineno)
if not classstack:
dict[class_name] = cur_class
else:
cls = classstack[-1][0]
cls._addclass(class_name, cur_class)
if not classstack:
if lastGlobalEntry:
lastGlobalEntry.setEndLine(lineno - 1)
lastGlobalEntry = cur_class
if cur_obj and isinstance(cur_obj, Function):
cur_obj.setEndLine(lineno - 1)
cur_obj = cur_class
classstack.append((cur_class, thisindent))
elif m.start("Module") >= 0:
# we found a module definition
thisindent = indent
indent += 1
# close all interfaces/modules indented at least as much
while classstack and \
classstack[-1][1] >= thisindent:
if classstack[-1][0] is not None:
# record the end line
classstack[-1][0].setEndLine(lineno - 1)
del classstack[-1]
lineno = lineno + src.count('\n', last_lineno_pos, start)
last_lineno_pos = start
module_name = m.group("ModuleName")
# remember this module
cur_class = Module(module, module_name, file, lineno)
if not classstack:
dict[module_name] = cur_class
if lastGlobalEntry:
lastGlobalEntry.setEndLine(lineno - 1)
lastGlobalEntry = cur_class
if cur_obj and isinstance(cur_obj, Function):
cur_obj.setEndLine(lineno - 1)
cur_obj = cur_class
classstack.append((cur_class, thisindent))
elif m.start("Attribute") >= 0:
lineno = lineno + src.count('\n', last_lineno_pos, start)
last_lineno_pos = start
index = -1
while index >= -len(classstack):
if classstack[index][0] is not None and \
not isinstance(classstack[index][0], Function) and \
not classstack[index][1] >= indent:
attributes = m.group("AttributeNames").split(',')
ro = m.group("AttributeReadonly")
for attribute in attributes:
attr = Attribute(module, attribute, file, lineno)
if ro:
attr.setPrivate()
classstack[index][0]._addattribute(attr)
break
else:
index -= 1
if lastGlobalEntry:
lastGlobalEntry.setEndLine(lineno - 1)
lastGlobalEntry = None
elif m.start("Begin") >= 0:
# a begin of a block we are not interested in
indent += 1
elif m.start("End") >= 0:
# an end of a block
indent -= 1
else:
assert 0, "regexp _getnext found something unexpected"
return dict
|
gpl-3.0
| -3,784,231,802,065,473,500 | 31.926509 | 79 | 0.520048 | false |
smorand/dtol
|
var/test/essai/models.py
|
1
|
1276
|
from django.db import models, transaction
'''
create table dt_games (id integer primary key auto_increment, name varchar(100), lasteventid integer);
create table dt_spawns (id integer primary key auto_increment, name varchar(100), game_id integer references game(id), lasteventid integer);
insert into dt_games values (1, 'game1', 8);
insert into dt_games values (2, 'game2', 10);
insert into dt_spawns values (1, 'spawn1', 1, 1);
insert into dt_spawns values (2, 'spawn2', 1, 1);
insert into dt_spawns values (3, 'spawn3', 1, 8);
insert into dt_spawns values (4, 'spawn1', 2, 1);
insert into dt_spawns values (8, 'spawn4', 2, 10);
'''
class DtGame(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=100)
lasteventid = models.IntegerField()
def spawns(self, lasteventidchoosen=None):
if lasteventidchoosen is not None:
return DtSpawn.objects.filter(game=self.id, lasteventid__gt=lasteventidchoosen).all()
else:
return DtSpawn.objects.filter(game=self.id).all()
class Meta:
db_table = u'dt_games'
class DtSpawn(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=100)
game = models.ForeignKey(DtGame)
lasteventid = models.IntegerField()
class Meta:
db_table = u'dt_spawns'
|
gpl-3.0
| 7,205,550,349,692,199,000 | 37.666667 | 140 | 0.732759 | false |
ucloud/uai-sdk
|
examples/tensorflow/train/mnist_summary_1.1/code/mnist_summary.py
|
1
|
7440
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A simple MNIST classifier which displays summaries in TensorBoard.
This is an unimpressive MNIST model, but it is a good example of using
tf.name_scope to make a graph legible in the TensorBoard graph explorer, and of
naming summary tags so that they are grouped meaningfully in TensorBoard.
It demonstrates the functionality of every TensorBoard dashboard.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
from uaitrain.arch.tensorflow import uflag
tf.app.flags.DEFINE_float('learning_rate', 0.001, 'Initial learning rate')
tf.app.flags.DEFINE_float('dropout', 0.9, 'Keep probability for training dropout.')
FLAGS = tf.app.flags.FLAGS
def train():
# Import data
mnist = input_data.read_data_sets(FLAGS.data_dir,
one_hot=True)
sess = tf.InteractiveSession()
# Create a multilayer model.
# Input placeholders
with tf.name_scope('input'):
x = tf.placeholder(tf.float32, [None, 784], name='x-input')
y_ = tf.placeholder(tf.float32, [None, 10], name='y-input')
with tf.name_scope('input_reshape'):
image_shaped_input = tf.reshape(x, [-1, 28, 28, 1])
tf.summary.image('input', image_shaped_input, 10)
# We can't initialize these variables to 0 - the network will get stuck.
def weight_variable(shape):
"""Create a weight variable with appropriate initialization."""
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
def bias_variable(shape):
"""Create a bias variable with appropriate initialization."""
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
def variable_summaries(var):
"""Attach a lot of summaries to a Tensor (for TensorBoard visualization)."""
with tf.name_scope('summaries'):
mean = tf.reduce_mean(var)
tf.summary.scalar('mean', mean)
with tf.name_scope('stddev'):
stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)))
tf.summary.scalar('stddev', stddev)
tf.summary.scalar('max', tf.reduce_max(var))
tf.summary.scalar('min', tf.reduce_min(var))
tf.summary.histogram('histogram', var)
def nn_layer(input_tensor, input_dim, output_dim, layer_name, act=tf.nn.relu):
"""Reusable code for making a simple neural net layer.
It does a matrix multiply, bias add, and then uses relu to nonlinearize.
It also sets up name scoping so that the resultant graph is easy to read,
and adds a number of summary ops.
"""
# Adding a name scope ensures logical grouping of the layers in the graph.
with tf.name_scope(layer_name):
# This Variable will hold the state of the weights for the layer
with tf.name_scope('weights'):
weights = weight_variable([input_dim, output_dim])
variable_summaries(weights)
with tf.name_scope('biases'):
biases = bias_variable([output_dim])
variable_summaries(biases)
with tf.name_scope('Wx_plus_b'):
preactivate = tf.matmul(input_tensor, weights) + biases
tf.summary.histogram('pre_activations', preactivate)
activations = act(preactivate, name='activation')
tf.summary.histogram('activations', activations)
return activations
hidden1 = nn_layer(x, 784, 500, 'layer1')
with tf.name_scope('dropout'):
keep_prob = tf.placeholder(tf.float32)
tf.summary.scalar('dropout_keep_probability', keep_prob)
dropped = tf.nn.dropout(hidden1, keep_prob)
# Do not apply softmax activation yet, see below.
y = nn_layer(dropped, 500, 10, 'layer2', act=tf.identity)
with tf.name_scope('cross_entropy'):
# The raw formulation of cross-entropy,
#
# tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(tf.softmax(y)),
# reduction_indices=[1]))
#
# can be numerically unstable.
#
# So here we use tf.nn.softmax_cross_entropy_with_logits on the
# raw outputs of the nn_layer above, and then average across
# the batch.
diff = tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=y)
with tf.name_scope('total'):
cross_entropy = tf.reduce_mean(diff)
tf.summary.scalar('cross_entropy', cross_entropy)
with tf.name_scope('train'):
train_step = tf.train.AdamOptimizer(FLAGS.learning_rate).minimize(
cross_entropy)
with tf.name_scope('accuracy'):
with tf.name_scope('correct_prediction'):
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
with tf.name_scope('accuracy'):
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
tf.summary.scalar('accuracy', accuracy)
# Merge all the summaries and write them out to /tmp/tensorflow/mnist/logs/mnist_with_summaries (by default)
merged = tf.summary.merge_all()
train_writer = tf.summary.FileWriter(FLAGS.log_dir + '/train', sess.graph)
test_writer = tf.summary.FileWriter(FLAGS.log_dir + '/test')
tf.global_variables_initializer().run()
# Train the model, and also write summaries.
# Every 10th step, measure test-set accuracy, and write test summaries
# All other steps, run train_step on training data, & add training summaries
def feed_dict(train):
"""Make a TensorFlow feed_dict: maps data onto Tensor placeholders."""
xs, ys = mnist.test.images, mnist.test.labels
k = 1.0
return {x: xs, y_: ys, keep_prob: k}
saver = tf.train.Saver()
for i in range(FLAGS.max_step):
if i % 10 == 0: # Record summaries and test-set accuracy
summary, acc = sess.run([merged, accuracy], feed_dict=feed_dict(False))
test_writer.add_summary(summary, i)
print('Accuracy at step %s: %s' % (i, acc))
else: # Record train set summaries, and train
if i % 100 == 99: # Record execution stats
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
summary, _ = sess.run([merged, train_step],
feed_dict=feed_dict(True),
options=run_options,
run_metadata=run_metadata)
train_writer.add_run_metadata(run_metadata, 'step%03d' % i)
train_writer.add_summary(summary, i)
print('Adding run metadata for', i)
else: # Record a summary
summary, _ = sess.run([merged, train_step], feed_dict=feed_dict(True))
train_writer.add_summary(summary, i)
save_path = saver.save(sess, FLAGS.output_dir + "/model.ckpt")
print("Model saved in file: %s" % save_path)
train_writer.close()
test_writer.close()
def main(_):
train()
if __name__ == '__main__':
tf.app.run()
|
apache-2.0
| 2,417,338,661,895,161,300 | 38.163158 | 110 | 0.667876 | false |
wujuguang/scrapy
|
scrapy/core/downloader/webclient.py
|
1
|
5796
|
from time import time
from six.moves.urllib.parse import urlparse, urlunparse, urldefrag
from twisted.web.client import HTTPClientFactory
from twisted.web.http import HTTPClient
from twisted.internet import defer
from scrapy.http import Headers
from scrapy.utils.httpobj import urlparse_cached
from scrapy.utils.python import to_bytes
from scrapy.responsetypes import responsetypes
def _parsed_url_args(parsed):
# Assume parsed is urlparse-d from Request.url,
# which was passed via safe_url_string and is ascii-only.
b = lambda s: to_bytes(s, encoding='ascii')
path = urlunparse(('', '', parsed.path or '/', parsed.params, parsed.query, ''))
path = b(path)
host = b(parsed.hostname)
port = parsed.port
scheme = b(parsed.scheme)
netloc = b(parsed.netloc)
if port is None:
port = 443 if scheme == b'https' else 80
return scheme, netloc, host, port, path
def _parse(url):
""" Return tuple of (scheme, netloc, host, port, path),
all in bytes except for port which is int.
Assume url is from Request.url, which was passed via safe_url_string
and is ascii-only.
"""
url = url.strip()
parsed = urlparse(url)
return _parsed_url_args(parsed)
class ScrapyHTTPPageGetter(HTTPClient):
delimiter = b'\n'
def connectionMade(self):
self.headers = Headers() # bucket for response headers
# Method command
self.sendCommand(self.factory.method, self.factory.path)
# Headers
for key, values in self.factory.headers.items():
for value in values:
self.sendHeader(key, value)
self.endHeaders()
# Body
if self.factory.body is not None:
self.transport.write(self.factory.body)
def lineReceived(self, line):
return HTTPClient.lineReceived(self, line.rstrip())
def handleHeader(self, key, value):
self.headers.appendlist(key, value)
def handleStatus(self, version, status, message):
self.factory.gotStatus(version, status, message)
def handleEndHeaders(self):
self.factory.gotHeaders(self.headers)
def connectionLost(self, reason):
self._connection_lost_reason = reason
HTTPClient.connectionLost(self, reason)
self.factory.noPage(reason)
def handleResponse(self, response):
if self.factory.method.upper() == b'HEAD':
self.factory.page(b'')
elif self.length is not None and self.length > 0:
self.factory.noPage(self._connection_lost_reason)
else:
self.factory.page(response)
self.transport.loseConnection()
def timeout(self):
self.transport.loseConnection()
# transport cleanup needed for HTTPS connections
if self.factory.url.startswith(b'https'):
self.transport.stopProducing()
self.factory.noPage(\
defer.TimeoutError("Getting %s took longer than %s seconds." % \
(self.factory.url, self.factory.timeout)))
class ScrapyHTTPClientFactory(HTTPClientFactory):
"""Scrapy implementation of the HTTPClientFactory overwriting the
setUrl method to make use of our Url object that cache the parse
result.
"""
protocol = ScrapyHTTPPageGetter
waiting = 1
noisy = False
followRedirect = False
afterFoundGet = False
def __init__(self, request, timeout=180):
self._url = urldefrag(request.url)[0]
# converting to bytes to comply to Twisted interface
self.url = to_bytes(self._url, encoding='ascii')
self.method = to_bytes(request.method, encoding='ascii')
self.body = request.body or None
self.headers = Headers(request.headers)
self.response_headers = None
self.timeout = request.meta.get('download_timeout') or timeout
self.start_time = time()
self.deferred = defer.Deferred().addCallback(self._build_response, request)
# Fixes Twisted 11.1.0+ support as HTTPClientFactory is expected
# to have _disconnectedDeferred. See Twisted r32329.
# As Scrapy implements it's own logic to handle redirects is not
# needed to add the callback _waitForDisconnect.
# Specifically this avoids the AttributeError exception when
# clientConnectionFailed method is called.
self._disconnectedDeferred = defer.Deferred()
self._set_connection_attributes(request)
# set Host header based on url
self.headers.setdefault('Host', self.netloc)
# set Content-Length based len of body
if self.body is not None:
self.headers['Content-Length'] = len(self.body)
# just in case a broken http/1.1 decides to keep connection alive
self.headers.setdefault("Connection", "close")
# Content-Length must be specified in POST method even with no body
elif self.method == b'POST':
self.headers['Content-Length'] = 0
def _build_response(self, body, request):
request.meta['download_latency'] = self.headers_time-self.start_time
status = int(self.status)
headers = Headers(self.response_headers)
respcls = responsetypes.from_args(headers=headers, url=self._url)
return respcls(url=self._url, status=status, headers=headers, body=body)
def _set_connection_attributes(self, request):
parsed = urlparse_cached(request)
self.scheme, self.netloc, self.host, self.port, self.path = _parsed_url_args(parsed)
proxy = request.meta.get('proxy')
if proxy:
self.scheme, _, self.host, self.port, _ = _parse(proxy)
self.path = self.url
def gotHeaders(self, headers):
self.headers_time = time()
self.response_headers = headers
|
bsd-3-clause
| 2,003,167,116,600,618,500 | 35.225 | 92 | 0.658213 | false |
gmartinvela/Incubator
|
Incubator/mongo_save.py
|
1
|
2777
|
from pymongo import MongoClient
import urllib2
import time
import datetime
import json
import sqlite3
import pandas.io.sql as psql
from data_utils import retrieve_DBs, extract_data_from_DB
mongo_client = MongoClient()
mongo_db = mongo_client.incubator
measures_collection = mongo_db.measures
local_path_SHT1xdb = "/home/weblord/Desktop/Incubator/Incubator/static/data/SHT1x.db"
SQL_execute_SHT1xdb = "select max(date), humi from READ"
index_SHT1xdb = "date"
SQL_remove_last_SHT1xdb = "select date, humi from READ"
SHT1x = [local_path_SHT1xdb, SQL_execute_SHT1xdb, index_SHT1xdb, SQL_remove_last_SHT1xdb]
local_path_thermodb = "/home/weblord/Desktop/Incubator/Incubator/static/data/thermo.db"
SQL_execute_thermodb = "select max(DATE_LOG), TEMP_LOG from LOG"
index_thermodb = "DATE_LOG"
SQL_remove_last_thermodb = "select DATE_LOG, TEMP_LOG from LOG"
THERMO = [local_path_thermodb, SQL_execute_thermodb, index_thermodb, SQL_remove_last_thermodb]
DBs = [SHT1x, THERMO]
retrieve_DBs()
dataframes_sqlite = []
all_DBs_list = []
now = datetime.datetime.utcnow()
now_without_seconds = now.strftime("%Y-%m-%d %H:%M")
print "NOW:",now_without_seconds
URL = 'http://localhost:8008/measures'
data_lost = []
def retrieve_row_from_DBs(DBs, rows):
for DB in DBs:
with sqlite3.connect(DB[0], detect_types=sqlite3.PARSE_DECLTYPES) as conn:
all_db = psql.frame_query(DB[3], con=conn)
all_db.index = pd.to_datetime(all_db.pop(DB[2]))
# TODO: This is an approximation. We need data every 15 seconds minimum. In these moments SHT1x go 1:13 seconds
all_db = all_db.resample('15S', fill_method='bfill')
all_DBs_list.append(all_db)
concatenated_db = pd.concat([all_DBs_list[0], all_DBs_list[1]], axis=1)
concatenated_db_filled = h.fillna(method='ffill')
print "HUMI: %.2f" % dataframes_sqlite[0].humi.iloc[0]
print "TEMP: %.2f" % dataframes_sqlite[1].TEMP_LOG.iloc[0]
# Remove this row
def request_without_proxy(URL):
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
request = urllib2.Request(URL)
request_data = opener.open(request).read()
return request_data
def save_in_mongo():
print "Saving all the data to mongodb"
while(1):
# if data_lost:
# try:
# retrieve_DBs()
# retrieve_rows_from_DBS(DBs, len(data_lost))
# except:
# print "Impossible to retrive DB. Fix the problems in the net!"
# time.sleep(10)
# else:
# time.sleep(15)
time.sleep(15)
try:
data = request_without_proxy(URL)
json_data = json.loads(data)
measure = {
'date': datetime.datetime.utcnow(),
'humi': json_data['HUMI'],
'temp': json_data['TEMP']
}
measure_id = measures_collection.insert(measure)
except:
data_lost.append(datetime.datetime.utcnow())
#print measure_id
|
mit
| 6,239,591,299,131,140,000 | 29.866667 | 115 | 0.707598 | false |
NBajanca/django-non-profit
|
volunteers/tests.py
|
1
|
2614
|
from django.contrib.auth.models import User
from django.test import TestCase
from volunteers.models import Volunteer, Task, Shift, Preference, VolunteerShift, VolunteerUnavailability, \
VolunteerPresence
class VolunteerTestCase(TestCase):
def setUp(self):
self.user = User.objects.create_user(username='refood', email='test@refood-non-profit.org',
password='top_secret')
Volunteer.objects.create(user=self.user, car_availability=True)
self.task = Task.objects.create(name='Distribuição', slug='distribuicao', place='BO', time_beginning='19:30:00',
time_ending='21:30:00', min_volunteers='4', max_volunteers='6')
self.shift = Shift.objects.create(task=self.task, day_of_the_week='1')
self.preference = Preference.objects.create(volunteer=self.user.volunteer, shift=self.shift, priority=1)
self.volunteer_shift = VolunteerShift.objects.create(volunteer=self.user.volunteer, shift=self.shift,
frequency='W')
self.volunteer_unavailability = VolunteerUnavailability.objects.create(volunteer_shift=self.volunteer_shift,
date='2016-12-26')
self.volunteer_presence = VolunteerPresence.objects.create(volunteer_shift=self.volunteer_shift,
date='2016-12-27', presence=True)
def test_Volunteer(self):
self.assertEqual(str(self.user.volunteer), self.user.get_username())
self.user.first_name = 'ReFood'
self.user.last_name = 'Cascais CPR'
self.user.save()
self.assertEqual(str(self.user.volunteer), self.user.get_full_name())
def test_Task(self):
self.assertEqual(str(self.task), 'Distribuição')
def test_Shift(self):
self.assertEqual(str(self.shift), 'Distribuição (Segunda-feira)')
def test_Preference(self):
self.assertEqual(str(self.preference), '[1] refood - Distribuição (Segunda-feira)')
def test_VolunteerShift(self):
self.assertEqual(str(self.volunteer_shift), 'refood - Distribuição (Segunda-feira)')
def test_VolunteerUnavailability(self):
self.assertEqual(str(self.volunteer_unavailability),
'2016-12-26 - refood - Distribuição (Segunda-feira)')
def test_VolunteerPresence(self):
self.assertEqual(str(self.volunteer_presence), '2016-12-27 - refood - Distribuição (Segunda-feira)')
|
mit
| -5,676,116,049,376,112,000 | 49 | 120 | 0.628846 | false |
tddv/readthedocs.org
|
readthedocs/doc_builder/environments.py
|
1
|
24734
|
'''
Documentation Builder Environments
'''
import os
import re
import sys
import logging
import subprocess
import traceback
import socket
from datetime import datetime
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _, ugettext_noop
from docker import Client
from docker.utils import create_host_config
from docker.errors import APIError as DockerAPIError, DockerException
from rest_framework.renderers import JSONRenderer
from slumber.exceptions import HttpClientError
from readthedocs.builds.constants import BUILD_STATE_FINISHED
from readthedocs.builds.models import BuildCommandResultMixin
from readthedocs.projects.constants import LOG_TEMPLATE
from readthedocs.api.client import api as api_v1
from readthedocs.restapi.client import api as api_v2
from readthedocs.restapi.serializers import BuildCommandSerializer
from .exceptions import (BuildEnvironmentException, BuildEnvironmentError,
BuildEnvironmentWarning)
from .constants import (DOCKER_SOCKET, DOCKER_VERSION, DOCKER_IMAGE,
DOCKER_LIMITS, DOCKER_TIMEOUT_EXIT_CODE,
DOCKER_OOM_EXIT_CODE, SPHINX_TEMPLATE_DIR,
MKDOCS_TEMPLATE_DIR, DOCKER_HOSTNAME_MAX_LEN)
log = logging.getLogger(__name__)
class BuildCommand(BuildCommandResultMixin):
'''Wrap command execution for execution in build environments
This wraps subprocess commands with some logic to handle exceptions,
logging, and setting up the env for the build command.
This acts a mapping of sorts to the API representation of the
:py:class:`readthedocs.builds.models.BuildCommandResult` model.
:param command: string or array of command parameters
:param cwd: current working path for the command
:param shell: execute command in shell, default=False
:param environment: environment variables to add to environment
:type environment: dict
:param combine_output: combine stdout/stderr, default=True
:param input_data: data to pass in on stdin
:type input_data: str
:param build_env: build environment to use to execute commands
:param bin_path: binary path to add to PATH resolution
:param description: a more grokable description of the command being run
'''
def __init__(self, command, cwd=None, shell=False, environment=None,
combine_output=True, input_data=None, build_env=None,
bin_path=None, description=None):
self.command = command
self.shell = shell
if cwd is None:
cwd = os.getcwd()
self.cwd = cwd
self.environment = os.environ.copy()
if environment is not None:
assert 'PATH' not in environment, "PATH can't be set"
self.environment.update(environment)
self.combine_output = combine_output
self.input_data = input_data
self.build_env = build_env
self.output = None
self.error = None
self.start_time = None
self.end_time = None
self.bin_path = bin_path
self.description = ''
if description is not None:
self.description = description
self.exit_code = None
def __str__(self):
# TODO do we want to expose the full command here?
output = u''
if self.output is not None:
output = self.output.encode('utf-8')
return '\n'.join([self.get_command(), output])
def run(self):
'''Set up subprocess and execute command
:param cmd_input: input to pass to command in STDIN
:type cmd_input: str
:param combine_output: combine STDERR into STDOUT
'''
log.info("Running: '%s' [%s]", self.get_command(), self.cwd)
self.start_time = datetime.utcnow()
stdout = subprocess.PIPE
stderr = subprocess.PIPE
stdin = None
if self.input_data is not None:
stdin = subprocess.PIPE
if self.combine_output:
stderr = subprocess.STDOUT
environment = {}
environment.update(self.environment)
environment['READTHEDOCS'] = 'True'
if self.build_env is not None:
environment['READTHEDOCS_VERSION'] = self.build_env.version.slug
environment['READTHEDOCS_PROJECT'] = self.build_env.project.slug
if 'DJANGO_SETTINGS_MODULE' in environment:
del environment['DJANGO_SETTINGS_MODULE']
if 'PYTHONPATH' in environment:
del environment['PYTHONPATH']
if self.bin_path is not None:
env_paths = environment.get('PATH', '').split(':')
env_paths.insert(0, self.bin_path)
environment['PATH'] = ':'.join(env_paths)
# print(1111111111111111111);
# print(environment);
try:
proc = subprocess.Popen(
self.command,
shell=self.shell,
cwd=self.cwd,
stdin=stdin,
stdout=stdout,
stderr=stderr,
env=environment,
)
cmd_input = None
if self.input_data is not None:
cmd_input = self.input_data
cmd_output = proc.communicate(input=cmd_input)
(cmd_stdout, cmd_stderr) = cmd_output
try:
self.output = cmd_stdout.decode('utf-8', 'replace')
except (TypeError, AttributeError):
self.output = None
try:
self.error = cmd_stderr.decode('utf-8', 'replace')
except (TypeError, AttributeError):
self.error = None
self.exit_code = proc.returncode
except OSError:
self.error = traceback.format_exc()
self.output = self.error
self.exit_code = -1
finally:
self.end_time = datetime.utcnow()
def get_command(self):
'''Flatten command'''
if hasattr(self.command, '__iter__') and not isinstance(self.command, str):
return ' '.join(self.command)
else:
return self.command
def save(self):
'''Save this command and result via the API'''
data = {
'build': self.build_env.build.get('id'),
'command': self.get_command(),
'description': self.description,
'output': self.output,
'exit_code': self.exit_code,
'start_time': self.start_time,
'end_time': self.end_time,
}
api_v2.command.post(data)
class DockerBuildCommand(BuildCommand):
'''Create a docker container and run a command inside the container
Build command to execute in docker container
'''
def run(self):
'''Execute command in existing Docker container
:param cmd_input: input to pass to command in STDIN
:type cmd_input: str
:param combine_output: combine STDERR into STDOUT
'''
log.info("Running in container %s: '%s' [%s]",
self.build_env.container_id, self.get_command(), self.cwd)
self.start_time = datetime.utcnow()
client = self.build_env.get_client()
try:
exec_cmd = client.exec_create(
container=self.build_env.container_id,
cmd=self.get_wrapped_command(),
stdout=True,
stderr=True
)
output = client.exec_start(exec_id=exec_cmd['Id'], stream=False)
try:
self.output = output.decode('utf-8', 'replace')
except (TypeError, AttributeError):
self.output = ''
cmd_ret = client.exec_inspect(exec_id=exec_cmd['Id'])
self.exit_code = cmd_ret['ExitCode']
# Docker will exit with a special exit code to signify the command
# was killed due to memory usage, make the error code nicer.
if (self.exit_code == DOCKER_OOM_EXIT_CODE and
self.output == 'Killed\n'):
self.output = _('Command killed due to excessive memory '
'consumption\n')
except DockerAPIError:
self.exit_code = -1
if self.output is None or not self.output:
self.output = _('Command exited abnormally')
finally:
self.end_time = datetime.utcnow()
def get_wrapped_command(self):
"""Escape special bash characters in command to wrap in shell
In order to set the current working path inside a docker container, we
need to wrap the command in a shell call manually. Some characters will
be interpreted as shell characters without escaping, such as: ``pip
install requests<0.8``. This escapes a good majority of those
characters.
"""
bash_escape_re = re.compile(r"([\t\ \!\"\#\$\&\'\(\)\*\:\;\<\>\?\@"
r"\[\\\]\^\`\{\|\}\~])")
prefix = ''
if self.bin_path:
prefix += 'PATH={0}:$PATH '.format(self.bin_path)
return ("/bin/sh -c 'cd {cwd} && {prefix}{cmd}'"
.format(
cwd=self.cwd,
prefix=prefix,
cmd=(' '.join([bash_escape_re.sub(r'\\\1', part)
for part in self.command]))))
class BuildEnvironment(object):
"""Base build environment
Base class for wrapping command execution for build steps. This provides a
context for command execution and reporting, and eventually performs updates
on the build object itself, reporting success/failure, as well as top-level
failures.
:param project: Project that is being built
:param version: Project version that is being built
:param build: Build instance
:param record: Record status of build object
:param environment: shell environment variables
"""
def __init__(self, project=None, version=None, build=None, record=True,
environment=None):
self.project = project
self.version = version
self.build = build
self.record = record
self.environment = environment or {}
self.commands = []
self.failure = None
self.start_time = datetime.utcnow()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
ret = self.handle_exception(exc_type, exc_value, tb)
self.build['state'] = BUILD_STATE_FINISHED
log.info(LOG_TEMPLATE
.format(project=self.project.slug,
version=self.version.slug,
msg='Build finished'))
return ret
def handle_exception(self, exc_type, exc_value, _):
"""Exception handling for __enter__ and __exit__
This reports on the exception we're handling and special cases
subclasses of BuildEnvironmentException. For
:py:class:`BuildEnvironmentWarning`, exit this context gracefully, but
don't mark the build as a failure. For all other exception classes,
including :py:class:`BuildEnvironmentError`, the build will be marked as
a failure and the context will be gracefully exited.
"""
if exc_type is not None:
log.error(LOG_TEMPLATE
.format(project=self.project.slug,
version=self.version.slug,
msg=exc_value),
exc_info=True)
if not issubclass(exc_type, BuildEnvironmentWarning):
self.failure = exc_value
return True
def run(self, *cmd, **kwargs):
'''Shortcut to run command from environment'''
return self.run_command_class(cls=self.command_class, cmd=cmd, **kwargs)
def run_command_class(self, cls, cmd, **kwargs):
'''Run command from this environment
Use ``cls`` to instantiate a command
:param warn_only: Don't raise an exception on command failure
'''
warn_only = kwargs.pop('warn_only', False)
# Remove PATH from env, and set it to bin_path if it isn't passed in
env_path = self.environment.pop('BIN_PATH', None)
if 'bin_path' not in kwargs and env_path:
kwargs['bin_path'] = env_path
assert 'environment' not in kwargs, "environment can't be passed in via commands."
kwargs['environment'] = self.environment
kwargs['build_env'] = self
build_cmd = cls(cmd, **kwargs)
self.commands.append(build_cmd)
build_cmd.run()
# Save to database
if self.record:
build_cmd.save()
if build_cmd.failed:
msg = u'Command {cmd} failed'.format(cmd=build_cmd.get_command())
if build_cmd.output:
msg += u':\n{out}'.format(out=build_cmd.output)
if warn_only:
log.warn(LOG_TEMPLATE
.format(project=self.project.slug,
version=self.version.slug,
msg=msg))
else:
raise BuildEnvironmentWarning(msg)
return build_cmd
@property
def successful(self):
'''Is build completed, without top level failures or failing commands'''
return (self.done and self.failure is None and
all(cmd.successful for cmd in self.commands))
@property
def failed(self):
'''Is build completed, but has top level failure or failing commands'''
return (self.done and (
self.failure is not None or
any(cmd.failed for cmd in self.commands)
))
@property
def done(self):
'''Is build in finished state'''
return (self.build is not None and
self.build['state'] == BUILD_STATE_FINISHED)
def update_build(self, state=None):
"""Record a build by hitting the API
This step is skipped if we aren't recording the build, or if we don't
want to record successful builds yet (if we are running setup commands
for the build)
"""
if not self.record:
return None
self.build['project'] = self.project.pk
self.build['version'] = self.version.pk
self.build['builder'] = socket.gethostname()
self.build['state'] = state
if self.done:
self.build['success'] = self.successful
# TODO drop exit_code and provide a more meaningful UX for error
# reporting
if self.failure and isinstance(self.failure,
BuildEnvironmentException):
self.build['exit_code'] = self.failure.status_code
elif len(self.commands) > 0:
self.build['exit_code'] = max([cmd.exit_code
for cmd in self.commands])
self.build['setup'] = self.build['setup_error'] = ""
self.build['output'] = self.build['error'] = ""
if self.start_time:
build_length = (datetime.utcnow() - self.start_time)
self.build['length'] = int(build_length.total_seconds())
if self.failure is not None:
# Only surface the error message if it was a
# BuildEnvironmentException or BuildEnvironmentWarning
if isinstance(self.failure,
(BuildEnvironmentException, BuildEnvironmentWarning)):
self.build['error'] = str(self.failure)
else:
self.build['error'] = ugettext_noop(
"An unexpected error occurred")
# Attempt to stop unicode errors on build reporting
for key, val in self.build.items():
if isinstance(val, basestring):
self.build[key] = val.decode('utf-8', 'ignore')
try:
api_v2.build(self.build['id']).put(self.build)
except HttpClientError as e:
log.error("Unable to post a new build: %s" % e.content)
except Exception:
log.error("Unknown build exception", exc_info=True)
class LocalEnvironment(BuildEnvironment):
'''Local execution environment'''
command_class = BuildCommand
class DockerEnvironment(BuildEnvironment):
'''
Docker build environment, uses docker to contain builds
If :py:data:`settings.DOCKER_ENABLE` is true, build documentation inside a
docker container, instead of the host system, using this build environment
class. The build command creates a docker container from a pre-built image,
defined by :py:data:`settings.DOCKER_IMAGE`. This container is started with
a mount to the project's build path under ``user_builds`` on the host
machine, walling off project builds from reading/writing other projects'
data.
:param docker_socket: Override to Docker socket URI
'''
command_class = DockerBuildCommand
container_image = DOCKER_IMAGE
container_mem_limit = DOCKER_LIMITS.get('memory')
container_time_limit = DOCKER_LIMITS.get('time')
def __init__(self, *args, **kwargs):
self.docker_socket = kwargs.pop('docker_socket', DOCKER_SOCKET)
super(DockerEnvironment, self).__init__(*args, **kwargs)
self.client = None
self.container = None
self.container_name = slugify(
'build-{build}-project-{project_id}-{project_name}'.format(
build=self.build.get('id'),
project_id=self.project.pk,
project_name=self.project.slug,
)[:DOCKER_HOSTNAME_MAX_LEN]
)
if self.project.container_mem_limit:
self.container_mem_limit = self.project.container_mem_limit
if self.project.container_time_limit:
self.container_time_limit = self.project.container_time_limit
def __enter__(self):
'''Start of environment context'''
log.info('Creating container')
try:
# Test for existing container. We remove any stale containers that
# are no longer running here if there is a collision. If the
# container is still running, this would be a failure of the version
# locking code, so we throw an exception.
state = self.container_state()
if state is not None:
if state.get('Running') is True:
exc = BuildEnvironmentError(
_('A build environment is currently '
'running for this version'))
self.failure = exc
self.build['state'] = BUILD_STATE_FINISHED
raise exc
else:
log.warn(LOG_TEMPLATE
.format(
project=self.project.slug,
version=self.version.slug,
msg=("Removing stale container {0}"
.format(self.container_id))))
client = self.get_client()
client.remove_container(self.container_id)
except DockerAPIError:
pass
# Create the checkout path if it doesn't exist to avoid Docker creation
if not os.path.exists(self.project.doc_path):
os.makedirs(self.project.doc_path)
try:
self.create_container()
except: # pylint: disable=broad-except
self.__exit__(*sys.exc_info())
raise
return self
def __exit__(self, exc_type, exc_value, tb):
'''End of environment context'''
ret = self.handle_exception(exc_type, exc_value, tb)
# Update buildenv state given any container error states first
self.update_build_from_container_state()
client = self.get_client()
try:
client.kill(self.container_id)
except DockerAPIError:
pass
try:
log.info('Removing container %s', self.container_id)
client.remove_container(self.container_id)
except DockerAPIError:
log.error(LOG_TEMPLATE
.format(
project=self.project.slug,
version=self.version.slug,
msg="Couldn't remove container"),
exc_info=True)
self.container = None
self.build['state'] = BUILD_STATE_FINISHED
log.info(LOG_TEMPLATE
.format(project=self.project.slug,
version=self.version.slug,
msg='Build finished'))
return ret
def get_client(self):
'''Create Docker client connection'''
try:
if self.client is None:
self.client = Client(
base_url=self.docker_socket,
version=DOCKER_VERSION,
timeout=None
)
return self.client
except DockerException as e:
log.error(LOG_TEMPLATE
.format(
project=self.project.slug,
version=self.version.slug,
msg=e),
exc_info=True)
raise BuildEnvironmentError('Problem creating build environment')
@property
def container_id(self):
'''Return id of container if it is valid'''
if self.container_name:
return self.container_name
elif self.container:
return self.container.get('Id')
def container_state(self):
'''Get container state'''
client = self.get_client()
try:
info = client.inspect_container(self.container_id)
return info.get('State', {})
except DockerAPIError:
return None
def update_build_from_container_state(self):
'''Update buildenv state from container state
In the case of the parent command exiting before the exec commands
finish and the container is destroyed, or in the case of OOM on the
container, set a failure state and error message explaining the failure
on the buildenv.
'''
state = self.container_state()
if state is not None and state.get('Running') is False:
if state.get('ExitCode') == DOCKER_TIMEOUT_EXIT_CODE:
self.failure = BuildEnvironmentError(
_('Build exited due to time out'))
elif state.get('OOMKilled', False):
self.failure = BuildEnvironmentError(
_('Build exited due to excessive memory consumption'))
elif state.get('Error'):
self.failure = BuildEnvironmentError(
(_('Build exited due to unknown error: {0}')
.format(state.get('Error'))))
def create_container(self):
'''Create docker container'''
client = self.get_client()
image = self.container_image
if self.project.container_image:
image = self.project.container_image
try:
self.container = client.create_container(
image=image,
command=('/bin/sh -c "sleep {time}; exit {exit}"'
.format(time=self.container_time_limit,
exit=DOCKER_TIMEOUT_EXIT_CODE)),
name=self.container_id,
hostname=self.container_id,
host_config=create_host_config(binds={
SPHINX_TEMPLATE_DIR: {
'bind': SPHINX_TEMPLATE_DIR,
'mode': 'ro'
},
MKDOCS_TEMPLATE_DIR: {
'bind': MKDOCS_TEMPLATE_DIR,
'mode': 'ro'
},
self.project.doc_path: {
'bind': self.project.doc_path,
'mode': 'rw'
},
}),
detach=True,
environment=self.environment,
mem_limit=self.container_mem_limit,
)
client.start(container=self.container_id)
except DockerAPIError as e:
log.error(LOG_TEMPLATE
.format(
project=self.project.slug,
version=self.version.slug,
msg=e.explanation),
exc_info=True)
raise BuildEnvironmentError('Build environment creation failed')
|
mit
| 6,347,718,754,111,018,000 | 37.52648 | 90 | 0.569459 | false |
JoaquimPatriarca/senpy-for-gis
|
tmp/Prog_25.py
|
1
|
7237
|
# Criação de amostras de treino (70%) e amostras de avaliação (30%) consideradas ideais para cada um dos factores condicionanes - por ideal entenda-se: cada classe de cada factor tem 70% dos pontos que a intersectam definidos como ocorrências para treino e tem 30% para validação - GRASS GIS
def cria_objTable(tab, path):
from grass.pygrass.vector.table import Table, get_path
import sqlite3
objTabela = Table(name=tab, connection=sqlite3.connect(get_path(path)))
return objTabela
def id_classes(in_vector, field, grassdata_workspace, location, mapset):
caminho = grassdata_workspace + "\\" + location + "\\" + mapset + "\\sqlite\\sqlite.db"
tabela = cria_objTable(in_vector, caminho)
cls = []
for linha in tabela:
cls.append(int(linha[1]))
import numpy
cls = numpy.unique(cls)
t = []
for i in cls:
if i < 0 or i > 20:
continue
else:
t.append(i)
return t
def conta_numero_pnt(entrada):
from grass.pygrass.vector import VectorTopo
layer = VectorTopo(entrada)
layer.is_open()
layer.exist()
layer.mapset
layer.open(mode='r')
try:
linha = layer.next()
except:
return
conta = 0
try:
while linha:
conta += 1
linha = layer.next()
except:
layer.close()
return conta
def extrai_70_30(vector, setenta):
lst70 = []
lst30 = []
from grass.pygrass.vector import VectorTopo
layer = VectorTopo(vector)
layer.open(mode='r')
linha = layer.next()
logos = 1
try:
while linha:
ponto = layer.read(logos) # objecto do tipo ponto
string = str.split(str(ponto), "(")
outra = str.split(string[1], " ")
x = float(outra[0])
y = float(outra[1][:-1])
temp = [x, y]
if logos <= setenta:
lst70.append(temp)
elif logos > setenta:
lst30.append(temp)
linha = layer.next()
logos += 1
except:
layer.close()
return lst70, lst30
def cria_vector_pnt(lista, saida):
from grass.pygrass.vector import VectorTopo
from grass.pygrass.vector.geometry import Point
new = VectorTopo(saida)
cols = [(u'cat', 'INTEGER PRIMARY KEY')]
new.open('w', tab_name=saida, tab_cols=cols)
for pnt in lista:
point = Point(pnt[0], pnt[1])
new.write(point)
new.table.conn.commit()
new.table.execute().fetchall()
new.close()
def merge_pntLayers(lista, saida):
pontos = []
from grass.pygrass.vector import VectorTopo
for i in lista:
layer = VectorTopo(i)
layer.open(mode='r')
linha = layer.next()
logos = 1
try:
while linha:
pnt = layer.read(logos)
string = str.split(str(pnt), "(")
outra = str.split(string[1], " ")
temp = [float(outra[0]), float(outra[1][:-1])]
pontos.append(temp)
linha = layer.next()
logos += 1
except:
layer.close()
cria_vector_pnt(pontos, saida)
def gera_amostras_main(fenomeno, workspace_factores, saida, grassdata, location, mapset):
import time
inicio = time.localtime()
print inicio
# Listar factores condicionantes e adicioná-los no GRASS
lst_factores = lista_factores(workspace_factores)
conta = 0
# Adicionar os dados que expressam a ocorrência do fenómeno no GRASS
v_in_ogr(fenomeno, "movimentos")
lst_de_lst = [] #serve para guardar as várias listas que elencam cada classe das shapefile
for i in lst_factores:
# Converter factores para formato vectorial
RasterToPolygon(i, "v_factor_" + str(conta))
lst_cls = id_classes("v_factor_" + str(conta), "value", grassdata, location, mapset)
lst_extract = []
# Extrair as várias classes para ficheiros separados
for cls in lst_cls:
extract("v_factor_" + str(conta), "factor_" + str(conta) + "_" + str(cls), cls)
lst_extract.append("factor_" + str(conta) + "_" + str(cls))
lst_de_lst.append(lst_extract)
# Intersectar cada extracção com os movimentos
lst_intersect = []
for i in range(len(lst_extract)):
intersection("movimentos", lst_extract[i], "intersect_" + str(conta) + "_" + str(i))
lst_intersect.append("intersect_" + str(conta) + "_" + str(i))
# Contar o número de pontos de cada intersecção e dividir e reparti-las em dois temas com 70% e 30% do total inicial
c = 0
lst_amostras_classe70 = []
lst_amostras_classe30 = []
for i in lst_intersect:
nr_pontos = conta_numero_pnt(i)
if nr_pontos < 4:
continue
setenta = int((nr_pontos * 70) / 100)
amostras = extrai_70_30(i, setenta)
# Criar amostra classe - 70%
cria_vector_pnt(amostras[0], "setenta_" + str(conta) + "_" + str(c))
lst_amostras_classe70.append("setenta_" + str(conta) + "_" + str(c))
# Criar amostra classe - 30%
cria_vector_pnt(amostras[1], "trinta_" + str(conta) + "_" + str(c))
lst_amostras_classe30.append("trinta_" + str(conta) + "_" + str(c))
c += 1
# Merge amostras 70
merge_pntLayers(lst_amostras_classe70, "amostra70_" + str(conta))
merge_pntLayers(lst_amostras_classe30, "amostra30_" + str(conta))
conta += 1
# Avaliar qual a melhor amostra - ver qual a representatividade que cada classe de cada factor tem
import xlwt
excel = xlwt.Workbook()
for amostra in range(len(lst_factores)):
nr_linha = 0
add_livro = excel.add_sheet("amostra_" + str(amostra))
amostra_70 = "amostra70_" + str(amostra)
amostra_30 = "amostra30_" + str(amostra)
for raster in range(len(lst_factores)):
add_livro.write(nr_linha, 0, lst_factores[raster])
nr_linha += 1
add_livro.write(nr_linha, 1, 'ponto_70')
add_livro.write(nr_linha, 2, 'ponto_30')
conta = 0
nr_linha += 1
lst_shp = lst_de_lst[raster]
for classe in range(len(lst_shp)):
intersection(amostra_70, lst_shp[classe], "intersect70_" + str(amostra) + "_" + str(raster) + "_" + str(conta))
conta_70 = conta_numero_pnt("intersect70_" + str(amostra) + "_" + str(raster) + "_" + str(conta))
intersection(amostra_30, lst_shp[classe], "intersect30_" + str(amostra) + "_" + str(raster) + "_" + str(conta))
conta_30 = conta_numero_pnt("intersect30_" + str(amostra) + "_" + str(raster) + "_" + str(conta))
add_livro.write(nr_linha, 0, classe)
add_livro.write(nr_linha, 1, conta_70)
add_livro.write(nr_linha, 2, conta_30)
nr_linha += 1
conta += 1
excel.save(saida)
fim = time.localtime()
print fim
|
gpl-3.0
| 2,416,969,780,901,124,600 | 40.087209 | 291 | 0.55769 | false |
ctrl-alt-d/fpuf
|
fpuf/settings.py
|
1
|
7622
|
# -*- coding: utf-8 -*-
import os
from django.core.urlresolvers import reverse
# Django settings for aula project.
PROJECT_DIR = os.path.join( os.path.dirname(__file__), '..')
location = lambda x: os.path.join(PROJECT_DIR, x)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
TMPDIR = location( r'tmp')
LOGIN_URL= r"/usuaris/login/"
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': location( r'dades/db.sqlite'),
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
try:
from settings_local_database import DATABASES as D
DATABASES = D
except ImportError:
pass
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Europe/Madrid'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'ca'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = location( r'static')
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
location( 'fpuf/site_css'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = ')iam2bjmt74qpgj_m&^lpmiq&jmtx7o=6(1%hxg2q-eb5!funs'
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
#"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.contrib.messages.context_processors.messages",
"django.core.context_processors.csrf",
"django.core.context_processors.request",
'django.contrib.messages.context_processors.messages',
'fpuf.utils.context_processors.dades_basiques',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.middleware.transaction.TransactionMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
'fpuf.utils.middleware.IncludeLoginInErrors',
'fpuf.apps.usuaris.middleware.CustomSocialAuthExceptionMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'fpuf.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'fpuf.wsgi.application'
TEMPLATE_DIRS = (
location('fpuf/templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'django.contrib.sitemaps',
'django.contrib.humanize',
'fpuf.apps.ufs',
'fpuf.apps.usuaris',
'fpuf.apps.material',
'fpuf.apps.social',
'fpuf.apps.notificacions',
'fpuf.utils',
'social_auth',
#'registration',
'django_tables2',
)
#-----------------------------
AUTHENTICATION_BACKENDS = (
#'fpuf.apps.usuaris.authbackends.GoogleOAuth2Backend',
'social_auth.backends.google.GoogleOAuth2Backend',
'social_auth.backends.google.GoogleBackend',
'django.contrib.auth.backends.ModelBackend',
)
WHITE_LISTED_DOMAINS = [ 'xtec.cat', ]
GOOGLE_WHITE_LISTED_DOMAINS = WHITE_LISTED_DOMAINS
SOCIAL_AUTH_EXTRA_DATA = False
#GOOGLE_OAUTH2_CLIENT_ID = "xxxx"
#GOOGLE_OAUTH2_CLIENT_SECRET = "xxxx"
#LOGIN_ERROR_URL = '/login-error/'
#SOCIAL_AUTH_DEFAULT_USERNAME = 'new_social_auth_user'
SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = False
SOCIAL_AUTH_PROTECTED_USER_FIELDS = ['email',]
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER = "lolo@mailinator.com"
EMAIL_HOST_PASSWORD = "supersecret"
#LOGIN_URL = reverse( 'public:public_login_opcions' )
LOGIN_REDIRECT_URL = '/'
LOGIN_ERROR_URL = reverse( "usuaris_error_login" )
#------------------------------
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
SEND_BROKEN_LINK_EMAILS = False
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
try:
from settings_local import *
except ImportError:
pass
|
gpl-3.0
| -7,926,954,789,157,858,000 | 29.8583 | 127 | 0.691944 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.