code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
#
# This class is a wrapper around subprocess.Popen
#
# It behaves like a the stdout pipe generated by Popen.
# However it makes a serious efford to do this:
# 1) Forward STDERR of generated process to our STDERR
# 2) Don't get stuck - no matter how much output we get
# on STDOUT or STDERR
# 3) Raise an Exception if the exit code was not 0
# 4) Raise an Exception if there was data on STDERR
#
from fcntl import fcntl, F_GETFL, F_SETFL
from os import waitpid, O_NONBLOCK
from sys import stderr
from subprocess import Popen, check_call, PIPE, STDOUT
from string import join
from errno import EAGAIN
import re
class CheckedCommandFileHandle(object):
def __init__(self, args, ignore_patterns=[], error_fh=stderr):
self.status_ok = True
self.args = args
self.ignore_patterns = ignore_patterns
self.error_fh = error_fh
self.process = Popen(args, stdout=PIPE, stderr=PIPE)
self._make_stderr_non_blocking()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, trace):
if exc_value:
self._empty_stderr()
else:
self.close()
return False
def __iter__(self):
return self.process.stdout
def _make_stderr_non_blocking(self):
file_no = self.process.stderr.fileno()
bits = O_NONBLOCK | fcntl(file_no, F_GETFL)
fcntl(file_no, F_SETFL, bits)
def next(self):
self._empty_stderr()
return self.process.stdout.next()
def readline(self, size=-1):
self._empty_stderr()
return self.process.stdout.readline(size)
def read(self, size=-1):
self._empty_stderr()
return self.process.stdout.read(size)
def _empty_stderr(self):
try:
error = self.process.stderr.read()
except IOError as (errno, strerror):
if errno == EAGAIN:
return
raise
self._validate_error_output(error)
def _validate_error_output(self, error):
if not error:
return
if error[-1:] == "\n":
error = error[:-1]
error_lines = error.split("\n")
for line in error_lines:
line_ok = False
for pattern in self.ignore_patterns:
if re.search(pattern, line):
line_ok = True
break
if not line_ok:
self.error_fh.write("FAILED LINE: '"+line+"'\n")
cmd = join(self.args, ' ')
self.error_fh.write("Output of command '%s' on STDERR:\n%s\n" % (cmd, error))
raise Exception("Command '%s' wrote to STDERR (see above)!" % cmd)
def close(self):
p = self.process
self._empty_stderr()
p.stdout.close()
self._empty_stderr()
p.stderr.close()
status = waitpid(p.pid, 0)[1]
if status:
raise Exception(
"Command '%s' exited with status %d"
% (join(self.args), status)
)
|
TNG/svnfiltereddump
|
src/svnfiltereddump/CheckedCommandFileHandle.py
|
Python
|
gpl-3.0
| 3,044 |
#!/snacks/bin/python
"""
Ebml.py
Use this package to decode and encode EBML data. Multimedia containers WebM and
Matroska are supported. Extend support for EBML-based formats by modifying the
Tags dictionary (see bottom of file).
All native EBML Elements are defined with their own decoder/encoder class in
this package, with the following exceptions:
- String and UTF-8 types are both treated with Pythons encoding for str()
- Date is a subclass of SignedInteger and isn't interpreted as datetime
- SignedInteger doesn't have an encoder
Parts of this code (bitswap operators) favours little endian CPUs (Intel)
TODO: Opensource this package?
"""
from struct import pack, unpack
from math import ceil
import binascii
import io
class Writer:
def __init__(self, doctype, version=2):
self.load("ebml")
self.dtd = self.tag("EBML",
# EBML headers with default values are omitted
self.tag("DocType", String(doctype)) +
self.tag("DocTypeVersion", UnsignedInteger(version)) +
self.tag("DocTypeReadVersion", UnsignedInteger(version)))
self.load(doctype)
def tag(self, tagName, value=-1):
# Lazy shorthand
if type(value) == str:
value = String(value).encode()
# Unknown size
# Some parsers have problem reading 1 byte, "\xFF", so code it with 8.
elif value == -1:
return self.tags[tagName] + "\x01" + ("\xFF" * 7)
# Empty
elif value == None:
value = ""
else:
value = value.encode()
return self.tags[tagName] + SizeInteger(len(value)).encode() + value
def load(self, doctype):
if doctype not in Tags:
raise Exception("Don't know '%s' doctype" % self.doctype)
self.tags = {}
self.doctype = doctype
for id, (name, t, l) in Tags[doctype].iteritems():
self.tags[name] = UnsignedInteger(id).encode()
class Reader:
"""Light-weight, non-strict Ebml parser capable of reading unknown size
master elements.
"""
def __init__(self, input):
self.input = input
self.doctype = None
self.tags = Tags['ebml']
try:
self.input.tell()
self.seekable = True
except (AttributeError, IOError):
self.seekable = False
tagsread = 0
for tag in self:
if tagsread == 0 and tag.name != "EBML":
break
if tag.name == "EBMLReadVersion":
if tag != 1 and ReaderIgnoreEBMLVersion == False:
raise Exception("EBML Reader v%d required" % tag)
if tag.name == "DocType":
self.doctype = str(tag)
break
if tagsread == 8:
break
tagsread += 1
if self.doctype == None:
raise Exception("No DocType header found")
if self.doctype not in Tags:
raise Exception("Unrecognized DocType '%s'" % self.doctype)
self.tags = Tags[self.doctype]
def __del__(self):
self.input.close()
def __iter__(self):
masters = [None] * (ReaderMaxMasterLevel + 1)
while True:
id = self.readElement(UnsignedInteger)
size = self.readElement(SizeInteger)
if id == None:
for master in masters:
if master:
master.closed = True
raise StopIteration()
try:
tagName, tagType, tagLevel = self.tags[id]
if masters[tagLevel]:
for level in range(tagLevel, ReaderMaxMasterLevel + 1):
if masters[level] != None:
masters[level].closed = True
masters[level] = None
if tagType == Master:
tag = Master()
masters[tagLevel] = tag
self.master = tagName
elif size == None:
raise Exception("Tag %s with unknown size is invalid" % tagName)
else:
data = self.input.read(size)
tag = tagType(data)
tag.level = tagLevel
tag.name = tagName
except KeyError: # no result in self.tags
if size != None:
self.seek(size)
if ReaderIgnoreUnknown == True:
continue
tag = Unknown()
tag.id = id
tag.size = size
yield tag
def dump(self):
for tag in self:
if tag == None:
break
print repr(tag)
def seek(self, length):
if self.seekable == False:
return self.input.read(length)
return self.input.seek(length, 1)
"""Reads data coded with length, as specified for 'EBML IDs' and
'Data size' in the Matroska specification.
Call with class constructor, e.g. UnsignedInteger for EBML IDs."""
def readElement(self, classConstructor):
raw = self.input.read(1)
if not raw:
return None
b1 = ord(raw)
for bytes in range(8):
if b1 & 1 << 7 - bytes:
if bytes:
raw += self.input.read(bytes)
return classConstructor(raw)
class Element:
def __repr__(self):
return "%s%s%s" % (" " * self.level, self.name, self.attr())
def attr(self):
return ""
def encode(self):
return self
def tag(self):
data = self.encode()
return self.id.encode() + SizeInteger(len(data)).encode() + data
class Master(Element):
""".closed tells if this Master element have any child nodes left"""
def __init__(self):
self.closed = False
def encode(self):
raise Exception("Master tag can't be encoded")
class String(Element, str):
def attr(self):
return ' "%s"' % self
class Binary(String):
def attr(self):
return " (%d byte)" % len(self)
class Unknown(Binary):
def __init__(self):
self.name = "Unknown"
self.level = 0
def __repr__(self):
return "Unknown id %x (%d byte)" % (self.id, len(self))
class SimpleBlock(Binary):
def __init__(self, raw):
flags = ord(raw[3])
self.track = SignedInteger(raw[0])
self.timecode = UnsignedInteger(raw[1:3])
self.keyframe = bool(flags & 0x80)
self.invisible = bool(flags & 0x10)
self.discardable = bool(flags & 0x02)
def attr(self):
return " track %d, keyframe %s, timecode %d, data=%d" % (
self.track, self.keyframe, self.timecode, len(self))
class UnsignedInteger(Element, long):
def __new__(cls, *args, **kwargs):
raw = args[0]
if raw.__class__ in (int, long):
return super(UnsignedInteger, cls).__new__(cls, raw)
size = len(raw)
if size == 3:
raw = raw.rjust(4, "\x00")
elif size in (5,6,7):
raw = raw.rjust(8, "\x00")
try:
number = unpack(">%s" % "xBHIIQQQQ"[size], raw)[0]
except IndexError:
raise IndexError("Invalid integer of length %d" % size)
return super(UnsignedInteger, cls).__new__(cls, number)
def attr(self):
return " %d" % self
def encode(self):
binlen = len(bin(self)) - 2
size = int(ceil(binlen / 8.0))
data = pack(">%s" % "BBHIIQQQQ"[size], self)
if size in (3, 5, 6, 7):
return data.lstrip("\x00")
else:
return data
class SizeInteger(UnsignedInteger):
def __new__(cls, *args, **kwargs):
raw = args[0]
if raw.__class__ in (int, long):
return super(UnsignedInteger, cls).__new__(cls, raw)
# Strip size/length bit
raw = chr(ord(raw[0]) - (1 << 8 - len(raw))) + raw[1:]
return super(SizeInteger, cls).__new__(cls, raw)
def encode(self):
binlen = len(bin(self)) - 2 # -2 from pythons 0b
size = int(ceil((binlen+1) / 7.0)) # +1 for "1 << size"
num = self | (1 << size*7)
try:
data = pack(">%s" % "BBHIIQQQQ"[size], num)
except IndexError:
raise Exception ("Need %d bytes to encode, limited to 8" % size)
if size in (3, 5, 6, 7):
return data.lstrip("\x00")
else:
return data
class SignedInteger(UnsignedInteger):
def __new__(cls, *args, **kwargs):
num = super(SignedInteger, cls).__new__(cls, args[0])
num -= (1<<8*len(args[0]))/2
return num
# TODO: SignedInteger.encode()
class DateElm(SignedInteger):
pass
class Float(Element, float):
def __new__(cls, *args, **kwargs):
raw = args[0]
if raw.__class__ == float:
return super(Float, cls).__new__(cls, raw)
if len(raw) == 4: # float
number = unpack('>f', raw)[0]
elif len(raw) == 8: # double float
number = unpack('>d', raw)[0]
return super(Float, cls).__new__(cls, number)
def attr(self):
return " %lf" % self
def encode(self):
return pack('>d', self)
"""
Sources:
http://www.webmproject.org/code/specs/container/
http://matroska.org/technical/specs/index.html
Tags = {"doctype": {EBML ID: (Element name, Element Type (class), Level)}}
"""
Tags = {
"ebml": {
0x1a45dfa3: ('EBML', Master, 0),
0x4286: ('EBMLVersion', UnsignedInteger, 1),
0x42f7: ('EBMLReadVersion', UnsignedInteger, 1),
0x42f2: ('EBMLMaxIDLength', UnsignedInteger, 1),
0x42f3: ('EBMLMaxSizeLength', UnsignedInteger, 1),
0x4282: ('DocType', String, 1),
0x4287: ('DocTypeVersion', UnsignedInteger, 1),
0x4285: ('DocTypeReadVersion', UnsignedInteger, 1),
},
"webm": {
0xec: ('Void', Binary, 0),
# Segment
0x18538067: ('Segment', Master, 0),
# Seek
0x114d9b74: ('SeekHead', Master, 1),
0x4dbb: ('Seek', Master, 2),
0x53ab: ('SeekID', Binary, 3),
0x53ac: ('SeekPosition', UnsignedInteger, 3),
# Info
0x1549a966: ('Info', Master, 1),
0x2ad7B1: ('TimecodeScale', UnsignedInteger, 2),
0x4489: ('Duration', Float, 2),
# 0x4461: ('DateUTC', DateElm, 2),
0x7ba9: ('Title', String, 2), # Actually not WebM, only in Matroska
0x4d80: ('MuxingApp', String, 2),
0x5741: ('WritingApp', String, 2),
# Cluster
0x1f43b675: ('Cluster', Master, 1),
0xe7: ('Timecode', UnsignedInteger, 2),
0xab: ('PrevSize', UnsignedInteger, 2),
0xa3: ('SimpleBlock', SimpleBlock, 2),
0xa0: ('BlockGroup', Master, 2),
0xa1: ('Block', Binary, 3),
0x9b: ('BlockDuration', UnsignedInteger, 3),
0xfb: ('ReferenceBlock', SignedInteger, 3),
0x8e: ('Slices', Master, 3),
0xe8: ('TimeSlice', Master, 4),
0xcc: ('LaceNumber', UnsignedInteger, 5),
# Track
0x1654ae6b: ('Tracks', Master, 1),
0xae: ('TrackEntry', Master, 2),
0xd7: ('TrackNumber', UnsignedInteger, 3),
0x73c5: ('TrackUID', UnsignedInteger, 3),
0x83: ('TrackType', UnsignedInteger, 3),
0xb9: ('FlagEnabled', UnsignedInteger, 3),
0x88: ('FlagDefault', UnsignedInteger, 3),
0x55aa: ('FlagForced', UnsignedInteger, 3),
0x9c: ('FlagLacing', UnsignedInteger, 3),
0x23e383: ('DefaultDuration', UnsignedInteger, 3),
0x536e: ('Name', String, 3),
0x22b59c: ('Language', String, 3),
0x86: ('CodecID', String, 3),
0x63a2: ('CodecPrivate', Binary, 3),
0x258688: ('CodecName', String, 3),
# Video
0xe0: ('Video', Master, 3),
0x9a: ('FlagInterlaced', UnsignedInteger, 4),
0x53b8: ('StereoMode', UnsignedInteger, 4),
0xb0: ('PixelWidth', UnsignedInteger, 4),
0xba: ('PixelHeight', UnsignedInteger, 4),
0x54aa: ('PixelCropBottom', UnsignedInteger, 4),
0x54bb: ('PixelCropTop', UnsignedInteger, 4),
0x54cc: ('PixelCropLeft', UnsignedInteger, 4),
0x54dd: ('PixelCropRight', UnsignedInteger, 4),
0x54B0: ('DisplayWidth', UnsignedInteger, 4),
0x54BA: ('DisplayHeight', UnsignedInteger, 4),
0x54b2: ('DisplayUnit', UnsignedInteger, 4),
0x54b3: ('AspectRatioType', UnsignedInteger, 4),
# Audio
0xe1: ('Audio', Master, 3),
0xb5: ('SamplingFrequency', Float, 4),
0x78b5: ('OutputSamplingFrequency', Float, 4),
0x9F: ('Channels', UnsignedInteger, 4),
0x6264: ('BitDepth', UnsignedInteger, 4),
# Cues
0x1c53bb6b: ('Cues', Master, 1),
0xbb: ('CuePoint', Master, 2),
0xb3: ('CueTime', UnsignedInteger, 3),
0xb7: ('CueTrackPositions', Master, 3),
0xf7: ('CueTrack', UnsignedInteger, 4),
0xf1: ('CueClusterPosition', UnsignedInteger, 4),
0x5378: ('CueBlockNumber', UnsignedInteger, 4),
},
}
# Matroska is partly supported
Tags["matroska"] = Tags['webm']
ReaderIgnoreUnknown = True
ReaderIgnoreEBMLVersion = False
ReaderMaxMasterLevel = 5
if __name__ == '__main__':
from sys import argv, exit
a = Float(11.3)
print a
print type(a.encode())
print binascii.hexlify((a.encode()))
a = io.BytesIO()
a.write("Foo")
a.write("Bar")
print a.getvalue()
print len(a.getvalue())
f='\x1C\x53\xBB\x6B'
print f
print len(f)
print type(f)
print f[3]
print len(bytearray(f))
print binascii.hexlify(f)
bufSz = 2
foo = ""
bar = ""
bar = a.read(bufSz)
print "bar: ", bar
while bar:
foo += bar
bar = a.read(bufSz)
print "bar: ", bar
print "foo:", foo
# if len(argv) == 1:
# print "Syntax: %s <file.mkv|file.webm>" % argv[0]
# exit(1)
#
# Reader(io.open(argv[1], "rb")).dump()
|
paoletto/mediastreamer
|
ebml.py
|
Python
|
gpl-3.0
| 14,181 |
# Copyright (C) 2003-2007 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.mxbase
class MX(dns.rdtypes.mxbase.MXBase):
"""MX record"""
pass
|
ostinelli/pyopenspime
|
lib/dns/rdtypes/ANY/MX.py
|
Python
|
gpl-3.0
| 869 |
import model
import tensorflow as tf
tf.VERSION
class Config:
def __init__(self):
self.valid_step = 2000
self.valid_num_steps = 765
self.valid_data_list = './dataset/valid.txt'
self.data_dir = './data_preprocessed'
self.batch_size = 2
self.input_height = 512
self.input_width = 512
self.num_classes = 2
self.ignore_label = 0
self.random_scale = False
self.random_mirror = False
self.modeldir = 'model'
self.logfile = 'log.txt'
self.logdir = 'log'
self.encoder_name = 'res101'
sess = tf.Session()
m = model.Model(sess, Config())
m.test_setup()
from tensorflow.python.framework import graph_util
output_graph = "./frozen_model.pb"
# Before exporting our graph, we need to precise what is our output node
# This is how TF decides what part of the Graph he has to keep and what part it can dump
# NOTE: this variable is plural, because you can have multiple output nodes
input_node_names = "image_batch"
output_node_names = "predictions"
# We retrieve the protobuf graph definition
graph = tf.get_default_graph()
input_graph_def = graph.as_graph_def()
m.sess.run(tf.global_variables_initializer())
m.sess.run(tf.local_variables_initializer())
# load checkpoint
checkpointfile = m.conf.modeldir+ '/model.ckpt-' + str(m.conf.valid_step)
m.load(m.loader, checkpointfile)
# We use a built-in TF helper to export variables to constants
output_graph_def = graph_util.convert_variables_to_constants(
sess, # The session is used to retrieve the weights
input_graph_def, # The graph_def is used to retrieve the nodes
output_node_names.split(",") # The output node names are used to select the usefull nodes
)
# Finally we serialize and dump the output graph to the filesystem
with tf.gfile.GFile(output_graph, "wb") as f:
f.write(output_graph_def.SerializeToString())
print("%d ops in the final graph." % len(output_graph_def.node))
|
mstritt/orbit-image-analysis
|
src/main/python/deeplearn/export_pb_file.py
|
Python
|
gpl-3.0
| 1,973 |
# coding=utf-8
# This file is part of SickRage.
#
# URL: https://sick-rage.github.io
# Git: https://github.com/Sick-Rage/Sick-Rage.git
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from sickbeard.image_cache import ImageCache
from sickrage.media.GenericMedia import GenericMedia
class ShowFanArt(GenericMedia):
"""
Get the fan art of a show
"""
def get_default_media_name(self):
return 'fanart.png'
def get_media_path(self):
if self.get_show():
return ImageCache().fanart_path(self.indexer_id)
return ''
|
Arcanemagus/SickRage
|
sickrage/media/ShowFanArt.py
|
Python
|
gpl-3.0
| 1,183 |
import os
import hashlib
FILES = [
'extractor.tmp',
'tagger.tmp',
'alerts.tmp',
]
def clean_files():
for file in FILES:
os.remove(file)
def generateId(*args):
try:
return hashlib.sha1(
u''.join(args).encode('utf-8')
).hexdigest()
except:
return 'ID_ERROR'
|
CIECODE-Madrid/tipi-engine
|
utils.py
|
Python
|
gpl-3.0
| 344 |
"""
"""
__author__ = "Xun Li <xunli@asu.edu> "
__all__ = ['HeatMatrix']
import math
import wx
import numpy as np
from stars.visualization.utils import GradientColor
from stars.visualization.PlotWidget import PlottingCanvas
class HeatMatrix(PlottingCanvas):
def __init__(self,parent, layer, data,**kwargs):
PlottingCanvas.__init__(self,parent,data)
try:
self.layer_name = layer.name
self.title = "Transition probility matrix (%s)" % self.layer_name
self.x_label = "LISA transition states (1=HH,2=LH,3=LL,4=HL)"
self.y_label = "LISA transition states"
self.data = data
n = len(self.data)
self.enable_axis_labels = False
self.enable_axis = True
self.enable_axis_x = False
self.enable_axis_y = False
# a NxN matrix
self.x_min = 1
self.x_max = n+1
self.y_min = 1
self.y_max = n+1
self.extent = (self.x_min, self.y_min, self.x_max,self.y_max)
self.selected_polygon_ids = []
self.status_bar = self.parentFrame.status_bar
self.gradient_color = GradientColor(gradient_type='rdyibu')
self.margin_right = 100
# color schema: from blue to red
self.color_matrix = []
for i in range(n):
color_row = []
for j in range(n):
p = self.data[i][j]
color_row.append( self.gradient_color.get_color_at(p))
self.color_matrix.append(color_row)
except Exception as err:
self.ShowMsgBox('Fail to init heat map! ' + str(err.message))
self.isValidPlot = False
self.parentFrame.Close(True)
return None
def OnClose(self,event):
event.Skip()
def plot_data(self,dc):
# draw a NxN matrix
w,h = 1,1
for i,row in enumerate(self.data):
for j,item in enumerate(row):
start_x = j + self.x_min
start_y = self.y_max - i
pixel_x,pixel_y = self.point_to_screen(start_x,start_y)
pixel_w,pixel_h = math.ceil(self.length_to_screen(w)),math.ceil(self.length_to_screen(h,axis=1))
brush = wx.Brush(self.color_matrix[i][j])
dc.SetBrush(brush)
dc.DrawRectangle(pixel_x,pixel_y,pixel_w,pixel_h)
if i==len(self.data)-1:
dc.DrawText(str(j+1), pixel_x + pixel_w/2, pixel_y+pixel_h+5)
if j==0:
dc.DrawText(str(len(self.data)-i), pixel_x - 10, pixel_y + pixel_h/2)
text_pixel_x, text_pixel_y = pixel_x + pixel_w/2.0 - 10, pixel_y + pixel_h / 2.0
dc.SetPen(wx.WHITE_PEN)
dc.SetBrush(wx.WHITE_BRUSH)
dc.DrawText('%.4f'%(self.data[i][j]), text_pixel_x,text_pixel_y)
# draw a legend bar
pixel_x,pixel_y = self.point_to_screen( start_x+w, self.y_max)
pixel_x += 20
pixel_h = self.length_to_screen(self.y_max-self.y_min,axis=1)
pixel_w = 20
gradient_colorbar = self.gradient_color.get_bmp(pixel_w, pixel_h)
dc.DrawBitmap( gradient_colorbar, pixel_x, pixel_y)
pixel_x = pixel_x + pixel_w + 10
dc.SetPen(wx.BLACK_PEN)
dc.DrawText(str('%.2f'% np.max(self.data)), pixel_x,pixel_y)
pixel_y = pixel_y + pixel_h - 12
dc.DrawText(str('%.2f'% np.min(self.data)), pixel_x,pixel_y)
|
GeoDaCenter/CAST
|
stars/visualization/plots/HeatMatrix.py
|
Python
|
gpl-3.0
| 3,750 |
"""Base class of calibration.
@author : Liangjun Zhu
@changelog:
- 18-01-22 - lj - design and implement.
- 18-01-25 - lj - redesign the individual class, add 95PPU, etc.
- 18-02-09 - lj - compatible with Python3.
- 20-07-22 - lj - update to use global MongoClient object.
"""
from __future__ import absolute_import, unicode_literals
import time
from collections import OrderedDict
import os
import sys
from copy import deepcopy
if os.path.abspath(os.path.join(sys.path[0], '..')) not in sys.path:
sys.path.insert(0, os.path.abspath(os.path.join(sys.path[0], '..')))
from typing import Optional
from pygeoc.utils import FileClass
from utility import read_data_items_from_txt
import global_mongoclient as MongoDBObj
from preprocess.text import DBTableNames
from run_seims import MainSEIMS
from calibration.config import CaliConfig, get_optimization_config
from calibration.sample_lhs import lhs
class TimeseriesData(object):
"""Time series data, for observation and simulation data."""
def __init__(self):
self.vars = list()
self.data = OrderedDict()
class ObsSimData(object):
"""Paired time series data of observation and simulation, associated with statistics."""
def __init__(self):
self.vars = list()
self.data = OrderedDict()
self.sim_obs_data = OrderedDict()
self.objnames = list()
self.objvalues = list()
self.valid = False
def efficiency_values(self, varname, effnames):
values = list()
tmpvars = list()
for name in effnames:
tmpvar = '%s-%s' % (varname, name)
if tmpvar not in self.objnames:
values.append(-9999.)
else:
if name.upper() == 'PBIAS':
tmpvars.append('%s-abs(PBIAS)' % varname)
else:
tmpvars.append(tmpvar)
values.append(self.objvalues[self.objnames.index(tmpvar)])
return values, tmpvars
def output_header(self, varname, effnames, prefix=''):
concate = ''
for name in effnames:
tmpvar = '%s-%s' % (varname, name)
if tmpvar not in self.objnames:
concate += '\t'
else:
if name.upper() == 'PBIAS':
tmpvar = '%s-abs(PBIAS)' % varname
if prefix != '':
concate += '%s-%s\t' % (prefix, tmpvar)
else:
concate += '%s\t' % tmpvar
return concate
def output_efficiency(self, varname, effnames):
concate = ''
for name in effnames:
tmpvar = '%s-%s' % (varname, name)
if tmpvar not in self.objnames:
concate += '\t'
else:
concate += '%.3f\t' % self.objvalues[self.objnames.index(tmpvar)]
return concate
class Calibration(object):
"""Base class of automatic calibration.
Attributes:
ID(integer): Calibration ID in current generation, range from 0 to N-1(individuals).
modelrun(boolean): Has SEIMS model run successfully?
"""
def __init__(self, cali_cfg, id=-1):
# type: (CaliConfig, Optional[int]) -> None
"""Initialize."""
self.cfg = cali_cfg
self.model = cali_cfg.model
self.ID = id
self.param_defs = dict()
# run seims related
self.modelrun = False
self.reset_simulation_timerange()
@property
def ParamDefs(self):
"""Read cali_param_rng.def file
name,lower_bound,upper_bound
e.g.,
Param1,0,1
Param2,0.5,1.2
Param3,-1.0,1.0
Returns:
a dictionary containing:
- names - the names of the parameters
- bounds - a list of lists of lower and upper bounds
- num_vars - a scalar indicating the number of variables
(the length of names)
"""
# read param_defs.json if already existed
if self.param_defs:
return self.param_defs
# read param_range_def file and output to json file
conn = MongoDBObj.client
db = conn[self.cfg.model.db_name]
collection = db['PARAMETERS']
names = list()
bounds = list()
num_vars = 0
if not FileClass.is_file_exists(self.cfg.param_range_def):
raise ValueError('Parameters definition file: %s is not'
' existed!' % self.cfg.param_range_def)
items = read_data_items_from_txt(self.cfg.param_range_def)
for item in items:
if len(item) < 3:
continue
# find parameter name, print warning message if not existed
cursor = collection.find({'NAME': item[0]}, no_cursor_timeout=True)
if not cursor.count():
print('WARNING: parameter %s is not existed!' % item[0])
continue
num_vars += 1
names.append(item[0])
bounds.append([float(item[1]), float(item[2])])
self.param_defs = {'names': names, 'bounds': bounds, 'num_vars': num_vars}
return self.param_defs
def reset_simulation_timerange(self):
"""Update simulation time range in MongoDB [FILE_IN]."""
conn = MongoDBObj.client
db = conn[self.cfg.model.db_name]
stime_str = self.cfg.model.simu_stime.strftime('%Y-%m-%d %H:%M:%S')
etime_str = self.cfg.model.simu_etime.strftime('%Y-%m-%d %H:%M:%S')
db[DBTableNames.main_filein].find_one_and_update({'TAG': 'STARTTIME'},
{'$set': {'VALUE': stime_str}})
db[DBTableNames.main_filein].find_one_and_update({'TAG': 'ENDTIME'},
{'$set': {'VALUE': etime_str}})
def initialize(self, n=1):
"""Initialize parameters samples by Latin-Hypercube sampling method.
Returns:
A list contains parameter value at each gene location.
"""
param_num = self.ParamDefs['num_vars']
lhs_samples = lhs(param_num, n)
all = list()
for idx in range(n):
gene_values = list()
for i, param_bound in enumerate(self.ParamDefs['bounds']):
gene_values.append(lhs_samples[idx][i] * (param_bound[1] - param_bound[0]) +
param_bound[0])
all.append(gene_values)
return all
def initialize_calibrations(cf):
"""Initial individual of population.
"""
cali = Calibration(cf)
return cali.initialize()
def calibration_objectives(cali_obj, ind):
"""Evaluate the objectives of given individual.
"""
cali_obj.ID = ind.id
model_args = cali_obj.model.ConfigDict
model_args.setdefault('calibration_id', -1)
model_args['calibration_id'] = ind.id
model_obj = MainSEIMS(args_dict=model_args)
# Set observation data to model_obj, no need to query database
model_obj.SetOutletObservations(ind.obs.vars, ind.obs.data)
# Execute model
model_obj.SetMongoClient()
model_obj.run()
time.sleep(0.1) # Wait a moment in case of unpredictable file system error
# read simulation data of the entire simulation period (include calibration and validation)
if model_obj.ReadTimeseriesSimulations():
ind.sim.vars = model_obj.sim_vars[:]
ind.sim.data = deepcopy(model_obj.sim_value)
else:
model_obj.clean(calibration_id=ind.id)
model_obj.UnsetMongoClient()
return ind
# Calculate NSE, R2, RMSE, PBIAS, and RSR, etc. of calibration period
ind.cali.vars, ind.cali.data = model_obj.ExtractSimData(cali_obj.cfg.cali_stime,
cali_obj.cfg.cali_etime)
ind.cali.sim_obs_data = model_obj.ExtractSimObsData(cali_obj.cfg.cali_stime,
cali_obj.cfg.cali_etime)
ind.cali.objnames, \
ind.cali.objvalues = model_obj.CalcTimeseriesStatistics(ind.cali.sim_obs_data,
cali_obj.cfg.cali_stime,
cali_obj.cfg.cali_etime)
if ind.cali.objnames and ind.cali.objvalues:
ind.cali.valid = True
# Calculate NSE, R2, RMSE, PBIAS, and RSR, etc. of validation period
if cali_obj.cfg.calc_validation:
ind.vali.vars, ind.vali.data = model_obj.ExtractSimData(cali_obj.cfg.vali_stime,
cali_obj.cfg.vali_etime)
ind.vali.sim_obs_data = model_obj.ExtractSimObsData(cali_obj.cfg.vali_stime,
cali_obj.cfg.vali_etime)
ind.vali.objnames, \
ind.vali.objvalues = model_obj.CalcTimeseriesStatistics(ind.vali.sim_obs_data,
cali_obj.cfg.vali_stime,
cali_obj.cfg.vali_etime)
if ind.vali.objnames and ind.vali.objvalues:
ind.vali.valid = True
# Get timespan
ind.io_time, ind.comp_time, ind.simu_time, ind.runtime = model_obj.GetTimespan()
# delete model output directory for saving storage
model_obj.clean(calibration_id=ind.id)
model_obj.UnsetMongoClient()
return ind
if __name__ == '__main__':
cf, method = get_optimization_config()
cfg = CaliConfig(cf, method=method)
caliobj = Calibration(cfg)
# test the picklable of Scenario class.
import pickle
s = pickle.dumps(caliobj)
# print(s)
new_cali = pickle.loads(s)
print(new_cali.bin_dir)
|
lreis2415/SEIMS
|
seims/calibration/calibrate.py
|
Python
|
gpl-3.0
| 9,807 |
from __future__ import print_function
from ctypes import *
from distutils.sysconfig import get_python_lib
from os import path
try:
d = path.dirname(__file__)
lib = cdll.LoadLibrary("%s/libpyZipHMM.so" % (d))
library_location = "%s/libpyZipHMM.so" % (d)
except OSError:
python_lib = get_python_lib()
lib = cdll.LoadLibrary(python_lib + "/libpyZipHMM.so")
library_location = python_lib + "/libpyZipHMM.so"
except OSError as e:
print("Error: pyZipHMM not found:")
print("\t libpyZipHMM.so missing")
print("Looked at:", python_lib, '/libpyZipHMM.so and ./libpyZipHMM.so')
print("{0}: {1}".format(e.errno, e.strerror))
exit(-1)
## HMM IO
def readHMMspec(filename):
nStates = c_uint()
nObservables = c_uint()
lib.c_read_HMM_spec(byref(nStates), byref(nObservables), c_char_p(filename.encode('utf-8') if six.PY3 else filename))
return (nStates, nObservables)
def readHMM(filename):
pi = Matrix()
A = Matrix()
B = Matrix()
lib.c_read_HMM(pi.obj, A.obj, B.obj, c_char_p(filename.encode('utf-8') if six.PY3 else filename))
return (pi, A, B)
def writeHMM(pi, A, B, filename):
lib.c_write_HMM(pi.obj, A.obj, B.obj, c_char_p(filename.encode('utf-8') if six.PY3 else filename))
## Forwarder
lib.Forwarder_new.restype = c_void_p
lib.Forwarder_forward.restype = c_double
lib.Forwarder_pthread_forward.restype = c_double
lib.Forwarder_pthread_forward_par_stage1.restype = c_double
lib.Forwarder_get_orig_seq_length.restype = c_uint
lib.Forwarder_get_orig_alphabet_size.restype = c_uint
lib.Forwarder_get_seq_length.restype = c_uint
lib.Forwarder_get_alphabet_size.restype = c_uint
lib.Forwarder_get_pair.restype = py_object
class Forwarder(object):
def __init__(self):
self.obj = c_void_p(lib.Forwarder_new())
@staticmethod
def fromSequence(seqFilename, alphabetSize, nStatesSave = None, minNoEvals = 1):
forwarder = Forwarder()
if nStatesSave != None:
arr = ( c_uint * len(nStatesSave) )()
arr[:] = nStatesSave
lib.Forwarder_read_seq(forwarder.obj, c_char_p(seqFilename.encode('utf-8') if six.PY3 else seqFilename), alphabetSize, arr, len(nStatesSave), minNoEvals)
else:
arr = ( c_uint * 0 )()
lib.Forwarder_read_seq(forwarder.obj, c_char_p(seqFilename.encode('utf-8') if six.PY3 else seqFilename), alphabetSize, arr, 0, minNoEvals)
return forwarder
@staticmethod
def fromDirectory(directory, nStates = None):
forwarder = Forwarder()
if nStates == None:
lib.Forwarder_read_from_directory(forwarder.obj, c_char_p(directory.encode('utf8') if six.PY3 else directory))
else:
lib.Forwarder_read_from_directory(forwarder.obj, c_char_p(directory.encode('utf8') if six.PY3 else directory), nStates)
return forwarder
def __del__(self):
from ctypes import cdll
lib = cdll.LoadLibrary(library_location)
lib.Forwarder_destructor(self.obj)
def forward(self, pi, A, B):
return lib.Forwarder_forward(self.obj, pi.obj, A.obj, B.obj)
def ptforward(self, pi, A, B, device_filename = None):
if device_filename == None:
return lib.Forwarder_pthread_forward(self.obj, pi.obj, A.obj, B.obj, "-")
else :
return lib.Forwarder_pthread_forward(self.obj, pi.obj, A.obj, B.obj, device_filename)
def ptforwardParStage1(self, pi, A, B, device_filename = None):
if device_filename == None:
return lib.Forwarder_pthread_forward_par_stage1(self.obj, pi.obj, A.obj, B.obj, "-")
else :
return lib.Forwarder_pthread_forward_par_stage1(self.obj, pi.obj, A.obj, B.obj, device_filename)
def getOrigSeqLength(self):
return lib.Forwarder_get_orig_seq_length(self.obj)
def getOrigAlphabetSize(self):
return lib.Forwarder_get_orig_alphabet_size(self.obj)
def getSeqLength(self, no_states):
return lib.Forwarder_get_seq_length(self.obj, no_states)
def getAlphabetSize(self, no_states):
return lib.Forwarder_get_alphabet_size(self.obj, no_states)
def getPair(self, symbol):
return lib.Forwarder_get_pair(self.obj, symbol)
def writeToDirectory(self, directory):
lib.Forwarder_write_to_directory(self.obj, c_char_p(directory.encode('utf8') if six.PY3 else directory))
## SimpleForwarder
lib.SimpleForwarder_new.restype = c_void_p
lib.SimpleForwarder_forward.restype = c_double
class SimpleForwarder(object):
def __init__(self, seqFilename):
self.obj = c_void_p(lib.SimpleForwarder_new(seqFilename.encode('utf-8') if six.PY3 else seqFilename))
def forward(self, pi, A, B):
return lib.SimpleForwarder_forward(self.obj, pi.obj, A.obj, B.obj)
## SimpleStopForwarder
lib.SimpleStopForwarder_new.restype = c_void_p
lib.SimpleStopForwarder_forward.restype = c_double
lib.SimpleStopForwarder_pthread_forward.restype = c_double
lib.SimpleStopForwarder_pthread_forward_par_stage1.restype = c_double
lib.SimpleStopForwarder_get_orig_seq_length.restype = c_uint
lib.SimpleStopForwarder_get_orig_alphabet_size.restype = c_uint
lib.SimpleStopForwarder_get_seq_length.restype = c_uint
lib.SimpleStopForwarder_get_alphabet_size.restype = c_uint
lib.SimpleStopForwarder_get_pair.restype = py_object
class SimpleStopForwarder(object):
def __init__(self):
self.obj = c_void_p(lib.SimpleStopForwarder_new())
@staticmethod
def fromSequence(seqFilename, alphabetSize, nStatesSave = None):
forwarder = SimpleStopForwarder()
if nStatesSave != None:
arr = ( c_uint * len(nStatesSave) )()
arr[:] = nStatesSave
lib.SimpleStopForwarder_read_seq(forwarder.obj, c_char_p(seqFilename.encode('utf-8') if six.PY3 else seqFilename), alphabetSize, arr, len(nStatesSave))
else:
arr = ( c_uint * 0 )()
lib.SimpleStopForwarder_read_seq(forwarder.obj, c_char_p(seqFilename.encode('utf-8') if six.PY3 else seqFilename), alphabetSize, arr, 0)
return forwarder
@staticmethod
def fromDirectory(directory, nStates = None):
forwarder = Forwarder()
if nStates == None:
lib.SimpleStopForwarder_read_from_directory(forwarder.obj, c_char_p(directory.encode('utf8') if six.PY3 else directory))
else:
lib.SimpleStopForwarder_new_from_directory(forwarder.obj, c_char_p(directory.encode('utf8') if six.PY3 else directory), nStates)
return forwarder
def __del__(self):
from ctypes import cdll
lib = cdll.LoadLibrary(library_location)
lib.SimpleStopForwarder_destructor(self.obj)
def forward(self, pi, A, B):
return lib.SimpleStopForwarder_forward(self.obj, pi.obj, A.obj, B.obj)
def ptforward(self, pi, A, B, device_filename = None):
if device_filename == None:
return lib.SimpleStopForwarder_pthread_forward(self.obj, pi.obj, A.obj, B.obj, "-")
else :
return lib.SimpleStopForwarder_pthread_forward(self.obj, pi.obj, A.obj, B.obj, device_filename)
def ptforwardParStage1(self, pi, A, B, device_filename = None):
if device_filename == None:
return lib.SimpleStopForwarder_pthread_forward_par_stage1(self.obj, pi.obj, A.obj, B.obj, "-")
else :
return lib.SimpleStopForwarder_pthread_forward_par_stage1(self.obj, pi.obj, A.obj, B.obj, device_filename)
def getOrigSeqLength(self):
return lib.SimpleStopForwarder_get_orig_seq_length(self.obj)
def getOrigAlphabetSize(self):
return lib.SimpleStopForwarder_get_orig_alphabet_size(self.obj)
def getSeqLength(self, no_states):
return lib.SimpleStopForwarder_get_seq_length(self.obj, no_states)
def getAlphabetSize(self, no_states):
return lib.SimpleStopForwarder_get_alphabet_size(self.obj, no_states)
def getPair(self, symbol):
return lib.SimpleStopForwarder_get_pair(self.obj, symbol)
def writeToDirectory(self, directory):
lib.SimpleStopForwarder_write_to_directory(self.obj, c_char_p(directory.encode('utf8') if six.PY3 else directory))
## Sequence
lib.Sequence_new.restype = c_void_p
lib.Sequence_destructor.restype = c_int
lib.Sequence_get.restype = c_uint
lib.Sequence_len.restype = c_uint
class Sequence(object):
def __init__(self):
self.obj = c_void_p(lib.Sequence_new())
def __del__(self):
from ctypes import cdll
lib = cdll.LoadLibrary(library_location)
lib.Sequence_destructor(self.obj)
def __len__(self):
return lib.Sequence_len(self.obj)
def __getitem__(self, key):
if isinstance(key, slice) :
return [ self[ii] for ii in range(*key.indices(len(self))) ]
elif isinstance( key, int ) :
if key < 0 :
key += len( self )
if key >= len( self ) :
raise IndexError("The index (%d) is out of range." % key)
return int(lib.Sequence_get(self.obj, key))
else:
raise TypeError("Invalid argument type.")
def __str__(self):
string = ""
for i in range(len(self)):
string = string + ("%d" % self[i]) + " "
return string.strip()
## Matrix
lib.Matrix_new_empty.restype = c_void_p
lib.Matrix_new_height_width.restype = c_void_p
lib.Matrix_get_width.restype = c_uint
lib.Matrix_get_height.restype = c_uint
lib.Matrix_get.restype = c_double
class Matrix(object):
def __init__(self, height = 0, width = 0):
if height == 0 or width == 0:
self.obj = c_void_p(lib.Matrix_new_empty())
else:
self.obj = c_void_p(lib.Matrix_new_height_width(height, width))
def __del__(self):
from ctypes import cdll
lib = cdll.LoadLibrary(library_location)
lib.Matrix_destructor(self.obj)
def getWidth(self):
return lib.Matrix_get_width(self.obj)
def getHeight(self):
return lib.Matrix_get_height(self.obj)
def reset(self, height, width):
lib.Matrix_reset(self.obj, c_uint(height), c_uint(width))
def __setitem__(self, xxx_todo_changeme, value):
(row, column) = xxx_todo_changeme
lib.Matrix_set(self.obj, c_uint(row), c_uint(column), c_double(value))
def __getitem__(self, xxx_todo_changeme1):
(row, column) = xxx_todo_changeme1
return lib.Matrix_get(self.obj, row, column)
@staticmethod
def transpose(f, t):
lib.Matrix_transpose(f.obj, t.obj)
def p(self):
lib.Matrix_print(self.obj)
## posterior decoding
def posteriorDecoding(seqFilename, pi, A, B):
pdTable = Matrix()
pdPath = Sequence()
lib.c_posterior_decoding(pdPath.obj, pdTable.obj, pi.obj, A.obj, B.obj, c_char_p(seqFilename.encode('utf-8') if six.PY3 else seqFilename))
return pdPath, pdTable
## Viterbi
lib.c_viterbi.restype = c_double
def viterbi(seqFilename, pi, A, B):
viterbiPath = Sequence()
viterbi_ll = lib.c_viterbi(viterbiPath.obj, pi.obj, A.obj, B.obj, c_char_p(seqFilename.encode('utf-8') if six.PY3 else seqFilename))
return viterbiPath, viterbi_ll
## calibrate
def calibrate(deviceFilename = None):
if deviceFilename == None:
lib.c_calibrate("-")
else:
lib.c_calibrate(deviceFilename.encode('utf-8') if six.PY3 else deviceFilename)
if __name__ == "__main__":
print("Constructing Matrix(3,7)")
m = Matrix(3, 7)
print("Calling getHeight()")
assert m.getHeight() == 3
print("Calling getWidth()")
assert m.getWidth() == 7
print("Calling setitem method")
m[1,2] = 0.5
print("Calling getitem method")
assert m[1, 2] == 0.5
print("Calling reset method")
m.reset(7,3)
assert m.getHeight() == 7
assert m.getWidth() == 3
print("Calling readHMM method")
(pi, A, B) = readHMM("test_data/test1.hmm")
assert pi.getHeight() == 2
assert pi.getWidth() == 1
assert A.getHeight() == 2
assert A.getWidth() == 2
assert B.getHeight() == 2
assert B.getWidth() == 2
print("Creating Forwarder object from files")
f = Forwarder(newSeqFilename = "../new_seq.tmp", dataStructureFilename = "../data_structure.tmp")
assert f.getOrigAlphabetSize() == 2
assert f.getOrigSeqLength() == 18
assert f.getNewAlphabetSize() == 4
print("Calling forward on Forwarder object")
assert abs(f.forward(pi, A, B) - -12.5671022728) < 0.001
print("Calling readHMMspec method")
(nStates, nObservables) = readHMMspec("test_data/test1.hmm")
assert nStates.value == 2
assert nObservables.value == 2
print("Creating Forwarder from sequence and hmm spec")
f = Forwarder(seqFilename = "test_data/test1.seq", nStates = nStates, nObservables = nObservables)
assert f.getOrigAlphabetSize() == 2
assert f.getOrigSeqLength() == 18
assert f.getNewAlphabetSize() == 4
print("Calling forward")
assert abs(f.forward(pi, A, B) - -12.5671022728) < 0.001
|
mailund/ziphmm
|
web/files/OSX/pyZipHMM_1.0.2/pyZipHMM/pyZipHMM.py
|
Python
|
gpl-3.0
| 13,080 |
# coding=utf-8
"""
InaSAFE Disaster risk assessment tool developed by AusAid and World Bank
- **Impact function Test Cases.**
Contact : kolesov.dm@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'lucernae'
__date__ = '11/12/2014'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
import unittest
from qgis.core import QgsVectorLayer
from safe.impact_functions.impact_function_manager import ImpactFunctionManager
from safe.impact_functions.inundation.flood_polygon_roads\
.impact_function import FloodVectorRoadsExperimentalFunction
from safe.test.utilities import (
get_qgis_app,
test_data_path)
from safe.utilities.qgis_layer_wrapper import QgisWrapper
QGIS_APP, CANVAS, IFACE, PARENT = get_qgis_app()
class TestFloodVectorPolygonRoadsFunction(unittest.TestCase):
"""Test for Flood Vector Building Impact Function."""
def setUp(self):
registry = ImpactFunctionManager().registry
registry.clear()
registry.register(FloodVectorRoadsExperimentalFunction)
def test_run(self):
function = FloodVectorRoadsExperimentalFunction.instance()
hazard_path = test_data_path('hazard', 'flood_multipart_polygons.shp')
exposure_path = test_data_path('exposure', 'roads.shp')
# noinspection PyCallingNonCallable
hazard_layer = QgsVectorLayer(hazard_path, 'Flood', 'ogr')
# noinspection PyCallingNonCallable
exposure_layer = QgsVectorLayer(exposure_path, 'Roads', 'ogr')
# Let's set the extent to the hazard extent
extent = hazard_layer.extent()
rect_extent = [
extent.xMinimum(), extent.yMaximum(),
extent.xMaximum(), extent.yMinimum()]
function.hazard = QgisWrapper(hazard_layer)
function.exposure = QgisWrapper(exposure_layer)
function.requested_extent = rect_extent
function.parameters['affected_field'] = 'FLOODPRONE'
function.parameters['affected_value'] = 'YES'
function.run()
impact = function.impact
# Count of flooded objects is calculated "by the hands"
# the count = 69
expected_feature_total = 69
count = sum(impact.get_data(attribute=function.target_field))
message = 'Expecting %s, but it returns %s' % (
expected_feature_total, count)
self.assertEquals(count, expected_feature_total, message)
def test_filter(self):
"""Test filtering IF from layer keywords"""
hazard_keywords = {
'layer_purpose': 'hazard',
'layer_mode': 'classified',
'layer_geometry': 'polygon',
'hazard': 'flood',
'hazard_category': 'single_event',
'vector_hazard_classification': 'flood_vector_hazard_classes'
}
exposure_keywords = {
'layer_purpose': 'exposure',
'layer_mode': 'classified',
'layer_geometry': 'line',
'exposure': 'road'
}
impact_functions = ImpactFunctionManager().filter_by_keywords(
hazard_keywords, exposure_keywords)
message = 'There should be 1 impact function, but there are: %s' % \
len(impact_functions)
self.assertEqual(1, len(impact_functions), message)
retrieved_if = impact_functions[0].metadata().as_dict()['id']
expected = ImpactFunctionManager().get_function_id(
FloodVectorRoadsExperimentalFunction)
message = 'Expecting %s, but getting %s instead' % (
expected, retrieved_if)
self.assertEqual(expected, retrieved_if, message)
|
wonder-sk/inasafe
|
safe/impact_functions/inundation/flood_polygon_roads/test/test_flood_polygon_roads.py
|
Python
|
gpl-3.0
| 3,890 |
# -*- coding: utf-8 -*-
## Copyright 2015-2017 Frankfurt Institute for Advanced Studies
## This program is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
"""
from __future__ import absolute_import
import numpy as np
import pandas as pd
import os
import glob
import pytz, datetime
from scipy.optimize import leastsq
from six.moves import range, zip
from . import shapes as vshapes, mapping as vmapping, transfer as vtransfer
from .decorators import cachable
from . import make_toDataDir
toDataDir = make_toDataDir(__file__)
@cachable(keepweakref=True)
def timeseries_entsoe(years=list(range(2011, 2015+1)), countries=None, directory=None):
"""
Read consumption data from ENTSO-E country packages
Parameters
----------
years : list of int
Years for which to read consumption data (defaults to
2011-2015)
countries : list or None
Country names in the encoding of ENTSO-E as full names
(refer to the data/entsoe_country_packages directory).
If None, read data for all countries (default).
Returns
-------
load : pd.DataFrame
Load time-series with UTC timestamps x ISO-2 countries
"""
# Only take into account years from 2006 to 2015
years = [y for y in years if y >= 2006 and y <= 2015]
if directory is None:
directory = toDataDir('entsoe_country_packages')
fns = sum((glob.glob(os.path.join(directory, '{}_{}.xls'.format(c, y)))
for y in years
for c in (('*',) if countries is None else countries)), [])
def read_all_excel(fns):
for fn in fns:
try:
yield pd.read_excel(fn, skiprows=6, header=0, sheetname='hourly_load_values', na_values=[u' '])
except StopIteration:
pass
tz = pytz.timezone('Europe/Berlin')
data = pd.concat(read_all_excel(fns))
transdata = data.ix[:,['Country','Date/Time', '3B:00:00']] \
.set_index(['Country', 'Date/Time']).stack().unstack(0)
del data['3B:00:00']
data = data \
.set_index(['Country', 'Date/Time']) \
.stack().unstack(0)
transitions = [t for t in tz._utc_transition_times if t.year in years]
since = datetime.datetime(years[0], 1, 1)
for forward, backward in zip(*[iter(transitions)]*2):
forward_ind = 24*(forward - since).days + 2
backward_ind = 24*(backward - since).days + 2
data.iloc[forward_ind:backward_ind+1] = data.iloc[forward_ind:backward_ind+1].shift(-1)
try:
data.iloc[backward_ind] = transdata.loc[backward.strftime("%Y-%m-%d"), "3B:00:00"]
except KeyError:
data.iloc[backward_ind] = data.iloc[backward_ind - 1]
data = data \
.set_index(pd.date_range('{}-01-01'.format(years[0]),
'{}-01-01'.format(int(years[-1]) + 1),
closed='left', freq='1h', tz=tz)) \
.tz_convert(pytz.utc)
if countries is None or set(('Kosovo', 'Albania')).issubset(countries):
# manual alterations:
# Kosovo gets the same load curve as Serbia
# scaled by energy consumption ratio from IEA 2012
data['KV'] = data['RS'] * (4.8 / 27.)
# Albania gets the same load curve as Macedonia
data['AL'] = data['MK'] * (4.1 / 7.4)
return data
def timeseries_opsd(years=slice("2011", "2015"), fn=None):
"""
Read load data from OPSD time-series package.
Parameters
----------
years : None or slice()
Years for which to read load data (defaults to
slice("2011","2015"))
Returns
-------
load : pd.DataFrame
Load time-series with UTC timestamps x ISO-2 countries
"""
if fn is None:
fn = toDataDir('time_series_60min_singleindex_filtered.csv')
load = (pd.read_csv(fn, index_col=0, parse_dates=True)
.loc[:, lambda df: df.columns.to_series().str.endswith('_load_old')]
.rename(columns=lambda s: s[:-len('_load_old')])
.dropna(how="all", axis=0))
if years is not None:
load = load.loc[years]
# manual alterations:
# Kosovo gets the same load curve as Serbia
# scaled by energy consumption ratio from IEA 2012
load['KV'] = load['RS'] * (4.8 / 27.)
# Albania gets the same load curve as Macedonia
load['AL'] = load['MK'] * (4.1 / 7.4)
# To fill the half week gap in Greece from start to stop,
# we copy the week before into it
start = pd.Timestamp('2015-08-11 21:00')
stop = pd.Timestamp('2015-08-15 20:00')
w = pd.Timedelta(weeks=1)
if start in load.index and stop in load.index:
load.loc[start:stop, 'GR'] = load.loc[start-w:stop-w, 'GR'].values
# There are three missing hours in 2014 and four in 2015
# we interpolate linearly (copying from the previous week
# might be better)
load['EE'] = load['EE'].interpolate()
return load
def _upsampling_fitfunc(weights, gdp, pop):
return weights[0] * gdp + weights[1] * pop
def _upsampling_weights(load):
"""
Fit the weights for gdp and pop using leastsq from
the some load data for each country in europe.
Parameters
----------
load : pd.DataFrame (index=times, columns=ISO2 country codes)
Returns
-------
weights : np.array((gdp, pop), dtype=np.float)
"""
load = load.resample('AS').sum()
if (load.iloc[0] < 0.1 * load.iloc[1]).all():
# Year is not complete
load = load.iloc[1:]
def read_eurostat(fn, extradims=[]):
data = pd.read_csv(toDataDir(fn), thousands=' ', na_values=':')
data = data.set_index(['TIME', 'GEO'] + extradims).unstack()['Value']
data = data.unstack(list(range(-len(extradims), 0)))
data.set_index(pd.to_datetime(data.index, format="%Y"), inplace=True)
return data
def reindex_like_load(data, load):
data = data.stack().reindex(load.columns, level=1).unstack()
data = data.reindex(load.index)
data.interpolate('time', inplace=True)
data.bfill(inplace=True)
return data
gdp = reindex_like_load(read_eurostat('nama_10_gdp_1_Data.csv', ['NA_ITEM']), load)
pop = reindex_like_load(read_eurostat('demo_gind_1_Data.csv'), load)
def normed(x):
return x.divide(x.sum(axis=1), axis=0)
data = pd.Panel(dict(gdp=normed(gdp['Gross domestic product at market prices']),
# gdpva=normed(gdp['Value added, gross']),
pop=normed(pop),
load=normed(load)))
data.dropna(axis=2, inplace=True)
gdp_n = np.ravel(data["gdp"])
pop_n = np.ravel(data["pop"])
y = np.ravel(data["load"])
Jerr = - np.hstack((gdp_n[:,np.newaxis],
pop_n[:,np.newaxis]))
def errfunc(weights, gdp, pop):
return y - _upsampling_fitfunc(weights, gdp, pop)
weights, cov_x, infodict, mesg, ier = \
leastsq(errfunc, np.array((0.5, 0.5)), Dfun=lambda x,_,__: Jerr,
args=(gdp_n, pop_n), full_output=True)
return weights / weights.sum()
def gdppop_nuts3():
pop = pd.read_table(toDataDir('nama_10r_3popgdp.tsv.gz'), na_values=[':'], delimiter=' ?\t', engine='python')
pop = (pop
.set_index(pd.MultiIndex.from_tuples(pop.pop('unit,geo\\time').str.split(','))).loc['THS']
.applymap(lambda x: pd.to_numeric(x, errors='coerce'))
.fillna(method='bfill', axis=1))['2014']
gdp = pd.read_table(toDataDir('nama_10r_3gdp.tsv.gz'), na_values=[':'], delimiter=' ?\t', engine='python')
gdp = (gdp
.set_index(pd.MultiIndex.from_tuples(gdp.pop('unit,geo\\time').str.split(','))).loc['EUR_HAB']
.applymap(lambda x: pd.to_numeric(x, errors='coerce'))
.fillna(method='bfill', axis=1))['2014']
# Swiss data
cantons = pd.read_csv(toDataDir('ch_cantons.csv'))
cantons = cantons.set_index(cantons['HASC'].str[3:])['NUTS']
swiss = pd.read_excel(toDataDir('je-e-21.03.02.xls'), skiprows=3, index_col=0)
swiss.columns = swiss.columns.to_series().map(cantons)
pop = pop.append(pd.to_numeric(swiss.loc['Residents in 1000', 'CH04':]))
gdp = gdp.append(pd.to_numeric(swiss.loc['Gross domestic product per capita in Swiss francs', 'CH04':]))
return gdp, pop
def timeseries_shapes(shapes, countries, years=slice("2011", "2015"), weights=None, load=None):
if load is None:
load = timeseries_opsd(years)
if weights is None:
weights = _upsampling_weights(load=load)
gdp, pop = gdppop_nuts3()
nuts3 = pd.Series(vshapes.nuts3(tolerance=None, minarea=0.))
mapping = vmapping.countries_to_nuts3()
def normed(x): return x.divide(x.sum())
def upsample(cntry, group):
l = load[cntry]
if len(group) == 1:
return pd.DataFrame({group.index[0]: l})
else:
nuts3_inds = mapping.index[mapping == cntry]
transfer = vtransfer.Shapes2Shapes(group, nuts3.reindex(nuts3_inds), normed=False).T.tocsr()
gdp_n = pd.Series(transfer.dot(gdp.reindex(nuts3_inds, fill_value=1.).values), index=group.index)
pop_n = pd.Series(transfer.dot(pop.reindex(nuts3_inds, fill_value=1.).values), index=group.index)
factors = normed(_upsampling_fitfunc(weights, normed(gdp_n), normed(pop_n)))
return pd.DataFrame(factors.values * l.values[:,np.newaxis], index=l.index, columns=factors.index)
return pd.concat([upsample(cntry, group)
for cntry, group in shapes.groupby(countries)], axis=1)
|
FRESNA/vresutils
|
vresutils/load.py
|
Python
|
gpl-3.0
| 10,161 |
import sys
if __name__=="__main__":
freq=float(sys.argv[1])*1E6
m_refclk=14.4E6
m_prescale_divide=40
m_r=14.4E6/12.5E3
#m_r=14.4E6/10.0E3
if len(sys.argv)>2:
freq=freq+21.4E6
x = (freq * m_r)/m_refclk
n = int(x/m_prescale_divide)
a = int(round(x-n*m_prescale_divide))
encoded = ((n<<7) + a)*2
print "%x" % encoded
print "%d %d" % (((encoded & 0xFFFF) >> 16),(encoded & 0xFFFF))
|
johngumb/danphone
|
junkbox/freq.py
|
Python
|
gpl-3.0
| 440 |
#!/usr/bin/env python
try:
import gi
gi.require_version('NumCosmo', '1.0')
gi.require_version('NumCosmoMath', '1.0')
except:
pass
import scipy.stats as ss
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
from tqdm import tqdm
import time
import math
import sys
import math
from gi.repository import GObject
from gi.repository import NumCosmo as Nc
from gi.repository import NumCosmoMath as Ncm
#
# Initializing the library objects, this must be called before
# any other library function.
#
Ncm.cfg_init ()
#
# New homogeneous and isotropic cosmological model NcHICosmoQGRW
#
cosmo = Nc.HICosmo.new_from_name (Nc.HICosmo, "NcHICosmoQGRW")
if len (sys.argv) != 3:
print ("twofluids_wkb_mode.py mode_k w")
sys.exit (0)
w = float (sys.argv[2])
prec = 1.0e-6
mode_k = float (sys.argv[1])
cosmo.props.w = w
cosmo.props.Omegar = 2.0 * (1.0e-5)
cosmo.props.Omegaw = 2.0 * (1.0 - 1.0e-5)
cosmo.props.xb = 1.e30
pert = Nc.HIPertTwoFluids.new ()
pert.props.reltol = prec
pert.set_mode_k (mode_k);
wkb_prec = prec
cross_size = 1.0e-5
alpha_try = -cosmo.abs_alpha (1.0e-12 * mode_k**2)
alpha_mode1main = pert.get_cross_time (cosmo, Nc.HIPertTwoFluidsCross.MODE1MAIN, alpha_try, cross_size)
alpha_mode1sub = pert.get_cross_time (cosmo, Nc.HIPertTwoFluidsCross.MODE1SUB, alpha_try, cross_size)
alpha_mode2main = pert.get_cross_time (cosmo, Nc.HIPertTwoFluidsCross.MODE2MAIN, alpha_try, cross_size)
alpha_mode2sub = pert.get_cross_time (cosmo, Nc.HIPertTwoFluidsCross.MODE2SUB, alpha_try, cross_size)
alphai = alpha_mode1sub
alphaf = +cosmo.abs_alpha (1.0e20)
print ("# Mode k = % 21.15g" % (mode_k))
pert.set_stiff_solver (False)
alpha_a = []
gammabar11_a = []
gammabar22_a = []
gammabar12_a = []
taubar12_a = []
nu1_a = []
nu2_a = []
for alpha in np.linspace (alphai, alphaf, 10000):
eom = pert.eom (cosmo, alpha)
alpha_a.append (alpha)
gammabar11_a.append (math.fabs (eom.gammabar11))
gammabar22_a.append (math.fabs (eom.gammabar22))
gammabar12_a.append (math.fabs (eom.gammabar12))
taubar12_a.append (math.fabs (eom.taubar))
nu1_a.append (eom.nu1)
nu2_a.append (eom.nu2)
print ("# Calculating mode 1, initial time % 20.15f [%8.2e]: " % (alphai, cosmo.x_alpha (alphai)))
ci = Ncm.Vector.new (8)
pert.get_init_cond_zetaS (cosmo, alphai, 1, 0.25 * math.pi, ci)
pert.set_init_cond (cosmo, alphai, 3, False, ci)
Ps_zeta1 = []
Ps_S1 = []
Ps_Pzeta1 = []
Ps_PS1 = []
Ps_zeta1.append (math.hypot (ci.get (Nc.HIPertITwoFluidsVars.ZETA_R), 0.0*ci.get (Nc.HIPertITwoFluidsVars.ZETA_I))**2)
Ps_S1.append (math.hypot (ci.get (Nc.HIPertITwoFluidsVars.S_R), 0.0*ci.get (Nc.HIPertITwoFluidsVars.S_I))**2)
Ps_Pzeta1.append (math.hypot (ci.get (Nc.HIPertITwoFluidsVars.PZETA_R), 0.0*ci.get (Nc.HIPertITwoFluidsVars.PZETA_I))**2)
Ps_PS1.append (math.hypot (ci.get (Nc.HIPertITwoFluidsVars.PS_R), 0.0*ci.get (Nc.HIPertITwoFluidsVars.PS_I))**2)
for alpha in tqdm (alpha_a[1:]):
#for alpha in alpha_a[1:]:
pert.evolve (cosmo, alpha)
v, alphac = pert.peek_state (cosmo)
Ps_zeta1.append (math.hypot (v.get (Nc.HIPertITwoFluidsVars.ZETA_R), 0.0*v.get (Nc.HIPertITwoFluidsVars.ZETA_I))**2)
Ps_S1.append (math.hypot (v.get (Nc.HIPertITwoFluidsVars.S_R), 0.0*v.get (Nc.HIPertITwoFluidsVars.S_I))**2)
Ps_Pzeta1.append (math.hypot (v.get (Nc.HIPertITwoFluidsVars.PZETA_R), 0.0*v.get (Nc.HIPertITwoFluidsVars.PZETA_I))**2)
Ps_PS1.append (math.hypot (v.get (Nc.HIPertITwoFluidsVars.PS_R), 0.0*v.get (Nc.HIPertITwoFluidsVars.PS_I))**2)
print ("norm = % 8.2e % 21.15f [%8.2e]" % (pert.get_state_mod () - 1.0, alpha, cosmo.x_alpha (alpha)))
"""
alphai = alpha_mode2main
print ("# Calculating mode 2, initial time % 20.15f [%8.2e]: " % (alphai, cosmo.x_alpha (alphai)))
pert.get_init_cond_zetaS (cosmo, alphai, 2, 0.25 * math.pi, ci)
pert.set_init_cond (cosmo, alphai, 2, False, ci)
Ps_zeta2 = []
Ps_S2 = []
Ps_Pzeta2 = []
Ps_PS2 = []
alpha_a_pre = np.linspace (alphai, alpha_mode1main, 1000, endpoint = False)
for alpha in tqdm (alpha_a_pre[1:]):
pert.evolve (cosmo, alpha)
for alpha in tqdm (alpha_a):
#for alpha in alpha_a:
pert.evolve (cosmo, alpha)
v, alphac = pert.peek_state (cosmo)
Ps_zeta2.append (math.hypot (v.get (Nc.HIPertITwoFluidsVars.ZETA_R), v.get (Nc.HIPertITwoFluidsVars.ZETA_I))**2)
Ps_S2.append (math.hypot (v.get (Nc.HIPertITwoFluidsVars.S_R), v.get (Nc.HIPertITwoFluidsVars.S_I))**2)
Ps_Pzeta2.append (math.hypot (v.get (Nc.HIPertITwoFluidsVars.PZETA_R), v.get (Nc.HIPertITwoFluidsVars.PZETA_I))**2)
Ps_PS2.append (math.hypot (v.get (Nc.HIPertITwoFluidsVars.PS_R), v.get (Nc.HIPertITwoFluidsVars.PS_I))**2)
"""
"""
plt.plot (alpha_a, gammabar11_a, label = r'$\bar\gamma_{11}$')
plt.plot (alpha_a, gammabar22_a, label = r'$\bar\gamma_{22}$')
plt.plot (alpha_a, gammabar12_a, label = r'$\bar\gamma_{12}$')
plt.plot (alpha_a, taubar12_a, label = r'$\bar\tau_{12}$')
plt.plot (alpha_a, nu1_a, label = r'$\nu_{1}$')
plt.plot (alpha_a, nu2_a, label = r'$\nu_{2}$')
"""
plt.plot (alpha_a, Ps_zeta1, label = r'$P^1_\zeta$')
plt.plot (alpha_a, Ps_S1, label = r'$P^1_S$')
#plt.plot (alpha_a, Ps_zeta2, label = r'$P^2_\zeta$')
#plt.plot (alpha_a, Ps_S2, label = r'$P^2_S$')
plt.plot (alpha_a, Ps_Pzeta1, label = r'$P^1_{P_\zeta}$')
plt.plot (alpha_a, Ps_PS1, label = r'$P^1_{P_S}$')
#plt.plot (alpha_a, Ps_Pzeta2, label = r'$P^2_{P_\zeta}$')
#plt.plot (alpha_a, Ps_PS2, label = r'$P^2_{P_S}$')
plt.grid ()
plt.legend (loc="upper left")
#plt.xscale('log')
plt.yscale('log')
Delta_zeta1 = mode_k**3 * Ps_zeta1.pop () / (2.0 * math.pi**2 * cosmo.RH_planck ()**2)
#Delta_zeta2 = mode_k**3 * Ps_zeta2.pop () / (2.0 * math.pi**2 * cosmo.RH_planck ()**2)
Delta_S1 = mode_k**3 * Ps_S1.pop () / (2.0 * math.pi**2 * cosmo.RH_planck ()**2)
#Delta_S2 = mode_k**3 * Ps_S2.pop () / (2.0 * math.pi**2 * cosmo.RH_planck ()**2)
Delta_Pzeta1 = mode_k**3 * Ps_Pzeta1.pop () / (2.0 * math.pi**2 * cosmo.RH_planck ()**2)
#Delta_Pzeta2 = mode_k**3 * Ps_Pzeta2.pop () / (2.0 * math.pi**2 * cosmo.RH_planck ()**2)
Delta_PS1 = mode_k**3 * Ps_PS1.pop () / (2.0 * math.pi**2 * cosmo.RH_planck ()**2)
#Delta_PS2 = mode_k**3 * Ps_PS2.pop () / (2.0 * math.pi**2 * cosmo.RH_planck ()**2)
#print ("# Final values k= % 20.15g Ps_zeta1 = % 21.15e Ps_zeta2 = % 21.15e Ps_S1 = % 21.15e Ps_S2 = % 21.15e" % (mode_k, Delta_zeta1, Delta_zeta2, Delta_S1, Delta_S2))
#print ("# Final values k= % 20.15g Ps_Pzeta1= % 21.15e Ps_Pzeta2= % 21.15e Ps_PS1= % 21.15e Ps_PS2= % 21.15e" % (mode_k, Delta_Pzeta1, Delta_Pzeta2, Delta_PS1, Delta_PS2))
print ("# Final values k= % 20.15g Ps_zeta1 = % 21.15e Ps_Pzeta1 = % 21.15e Ps_S1 = % 21.15e Ps_PS1 = % 21.15e" % (mode_k, Delta_zeta1, Delta_Pzeta1, Delta_S1, Delta_PS1))
#print ("# Final values k= % 20.15g Ps_zeta2 = % 21.15e Ps_Pzeta2 = % 21.15e Ps_S2 = % 21.15e Ps_PS2 = % 21.15e" % (mode_k, Delta_zeta2, Delta_Pzeta2, Delta_S2, Delta_PS2))
plt.show ()
plt.clf ()
|
NumCosmo/NumCosmo
|
examples/twofluids_wkb_mode.py
|
Python
|
gpl-3.0
| 7,072 |
# vim: set ts=8 sw=4 sts=4 et:
#=======================================================================
# Copyright (C) 2008, OSSO B.V.
# This file is part of LightCount.
#
# LightCount is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# LightCount is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with LightCount. If not, see <http://www.gnu.org/licenses/>.
#=======================================================================
def inet_atol(ip):
''' Converts the Internet host address IP from the standard numbers-and-dots notation into a long integer. '''
ip_long = 0L
for byte in [int(byte) << (8 * (3 - pos)) for pos, byte in enumerate(ip.split('.'))]:
ip_long |= byte
return ip_long
def inet_ltoa(ip):
''' Converts the an unsigned 32 bits integer to standard numbers-and-dots notation. '''
ip = long(ip)
return '%u.%u.%u.%u' % (ip >> 24, (ip >> 16) & 0xff, (ip >> 8) & 0xff, ip & 0xff)
def bitfloor(number):
''' Rounds down to the nearest number with only one active bit. '''
number = long(number)
for i in range(32):
if (number >> (i + 1)) == 0:
return (number >> i) << i
|
ossobv/lightcount
|
interface/lightcount/bits.py
|
Python
|
gpl-3.0
| 1,599 |
"""
Copyright (C) 2008 Leonard Norrgard <leonard.norrgard@refactor.fi>
This file is part of Geohash.
Geohash is free software: you can redistribute it and/or modify it
under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Geohash is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
License for more details.
You should have received a copy of the GNU Affero General Public
License along with Geohash. If not, see
<http://www.gnu.org/licenses/>.
"""
from math import log10
# Note: the alphabet in geohash differs from the common base32
# alphabet described in IETF's RFC 4648
# (http://tools.ietf.org/html/rfc4648)
__base32 = '0123456789bcdefghjkmnpqrstuvwxyz'
__decodemap = { }
for i in range(len(__base32)):
__decodemap[__base32[i]] = i
del i
def decode_exactly(geohash):
"""
Decode the geohash to its exact values, including the error
margins of the result. Returns four float values: latitude,
longitude, the plus/minus error for latitude (as a positive
number) and the plus/minus error for longitude (as a positive
number).
"""
lat_interval, lon_interval = (-90.0, 90.0), (-180.0, 180.0)
lat_err, lon_err = 90.0, 180.0
is_even = True
for c in geohash:
cd = __decodemap[c]
for mask in [16, 8, 4, 2, 1]:
if is_even: # adds longitude info
lon_err /= 2
if cd & mask:
lon_interval = ((lon_interval[0]+lon_interval[1])/2, lon_interval[1])
else:
lon_interval = (lon_interval[0], (lon_interval[0]+lon_interval[1])/2)
else: # adds latitude info
lat_err /= 2
if cd & mask:
lat_interval = ((lat_interval[0]+lat_interval[1])/2, lat_interval[1])
else:
lat_interval = (lat_interval[0], (lat_interval[0]+lat_interval[1])/2)
is_even = not is_even
lat = (lat_interval[0] + lat_interval[1]) / 2
lon = (lon_interval[0] + lon_interval[1]) / 2
return lat, lon, lat_err, lon_err
def decode(geohash):
"""
Decode geohash, returning two strings with latitude and longitude
containing only relevant digits and with trailing zeroes removed.
"""
lat, lon, lat_err, lon_err = decode_exactly(geohash)
# Format to the number of decimals that are known
lats = "%.*f" % (max(1, int(round(-log10(lat_err)))) - 1, lat)
lons = "%.*f" % (max(1, int(round(-log10(lon_err)))) - 1, lon)
if '.' in lats: lats = lats.rstrip('0')
if '.' in lons: lons = lons.rstrip('0')
return lats, lons
def encode(latitude, longitude, precision=12):
"""
Encode a position given in float arguments latitude, longitude to
a geohash which will have the character count precision.
"""
lat_interval, lon_interval = (-90.0, 90.0), (-180.0, 180.0)
geohash = []
bits = [ 16, 8, 4, 2, 1 ]
bit = 0
ch = 0
even = True
while len(geohash) < precision:
if even:
mid = (lon_interval[0] + lon_interval[1]) / 2
if longitude > mid:
ch |= bits[bit]
lon_interval = (mid, lon_interval[1])
else:
lon_interval = (lon_interval[0], mid)
else:
mid = (lat_interval[0] + lat_interval[1]) / 2
if latitude > mid:
ch |= bits[bit]
lat_interval = (mid, lat_interval[1])
else:
lat_interval = (lat_interval[0], mid)
even = not even
if bit < 4:
bit += 1
else:
geohash += __base32[ch]
bit = 0
ch = 0
return ''.join(geohash)
def bounding_box_hashes(min_lat, min_lon, max_lat, max_lon, length):
top_left = encode(min_lat, min_lon, precision=length)
decoded = decode_exactly(top_left)
delta = decoded[2]
hashes = set()
curr_lat = min_lat
curr_lon = min_lon
while curr_lat <= max_lat:
while curr_lon <= max_lon:
hashes.add(encode(curr_lat, curr_lon, precision=length))
curr_lon += delta
curr_lat += delta
curr_lon = min_lon
return list(hashes)
def bounding_box_hash(min_lat, min_lon, max_lat, max_lon):
center_lat = ((max_lat - min_lat)/2) + min_lat
center_lon = ((max_lon - min_lon)/2) + min_lon
width = abs(max_lat - min_lat)
for geohash_length in range(5, 0, -1):
geohash = encode(center_lat, center_lon, precision=geohash_length)
geohash_lat, geohash_lon, geohash_width_err, geohash_height_err = decode_exactly(geohash)
if (2 * geohash_width_err) > width:
break
return geohash
|
sibsibsib/pressureNET-server
|
utils/geohash.py
|
Python
|
gpl-3.0
| 4,955 |
# -*- coding: utf-8 -*-
"""Threading module, used to launch games while monitoring them"""
import os
import sys
import threading
import subprocess
from gi.repository import GLib
from textwrap import dedent
from lutris import settings
from lutris.util.log import logger
from lutris.util.process import Process
from lutris.util.system import find_executable
HEARTBEAT_DELAY = 1500 # Number of milliseconds between each heartbeat
class LutrisThread(threading.Thread):
"""Runs the game in a separate thread"""
debug_output = True
def __init__(self, command, runner=None, env={}, rootpid=None, term=None):
"""Thread init"""
threading.Thread.__init__(self)
self.env = env
self.command = command
self.runner = runner
self.game_process = None
self.return_code = None
self.rootpid = rootpid or os.getpid()
self.terminal = term
self.is_running = True
self.stdout = ''
self.attached_threads = []
self.cycles_without_children = 0
self.max_cycles_without_children = 40
if self.runner:
self.path = runner.working_dir
else:
self.path = '/tmp/'
self.env_string = ''
for (k, v) in self.env.iteritems():
self.env_string += '%s="%s" ' % (k, v)
self.command_string = ' '.join(
['"%s"' % token for token in self.command]
)
def attach_thread(self, thread):
"""Attach child process that need to be killed on game exit"""
self.attached_threads.append(thread)
def run(self):
"""Run the thread"""
logger.debug("Command env: " + self.env_string)
logger.debug("Running command: " + self.command_string)
GLib.timeout_add(HEARTBEAT_DELAY, self.watch_children)
if self.terminal and find_executable(self.terminal):
self.run_in_terminal()
else:
env = os.environ.copy()
env.update(self.env)
self.game_process = subprocess.Popen(self.command, bufsize=1,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=self.path, env=env)
os.chdir(os.path.expanduser('~'))
for line in iter(self.game_process.stdout.readline, ''):
self.stdout += line
if self.debug_output:
sys.stdout.write(line)
def run_in_terminal(self):
# Write command in a script file.
'''Running it from a file is likely the only way to set env vars only
for the command (not for the terminal app).
It also permits the only reliable way to keep the term open when the
game is exited.'''
file_path = os.path.join(settings.CACHE_DIR, 'run_in_term.sh')
with open(file_path, 'w') as f:
f.write(dedent(
"""\
#!/bin/sh
cd "%s"
%s %s
exec sh # Keep term open
""" % (self.path, self.env_string, self.command_string)
))
os.chmod(file_path, 0744)
term_command = [self.terminal, '-e', file_path]
self.game_process = subprocess.Popen(term_command, bufsize=1,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=self.path)
os.chdir(os.path.expanduser('~'))
def iter_children(self, process, topdown=True, first=True):
if self.runner and self.runner.name.startswith('wine') and first:
pids = self.runner.get_pids()
for pid in pids:
wineprocess = Process(pid)
if wineprocess.name not in self.runner.core_processes:
process.children.append(wineprocess)
for child in process.children:
if topdown:
yield child
subs = self.iter_children(child, topdown=topdown, first=False)
for sub in subs:
yield sub
if not topdown:
yield child
def set_stop_command(self, func):
self.stop_func = func
def stop(self, killall=False):
for thread in self.attached_threads:
logger.debug("Stopping thread %s", thread)
thread.stop()
if hasattr(self, 'stop_func'):
logger.debug("Calling custom stop function %s", self.stop_func)
self.stop_func()
if not killall:
return
for process in self.iter_children(Process(self.rootpid),
topdown=False):
logger.debug("Killing process %s", process)
process.kill()
def watch_children(self):
"""pokes at the running process"""
process = Process(self.rootpid)
num_children = 0
num_watched_children = 0
terminated_children = 0
for child in self.iter_children(process):
num_children += 1
if child.name in ('steamwebhelper', 'steam', 'sh', 'tee', 'bash',
'Steam.exe', 'steamwebhelper.',
'steamerrorrepor'):
continue
num_watched_children += 1
logger.debug("{}\t{}\t{}".format(child.pid,
child.state,
child.name))
if child.state == 'Z':
terminated_children += 1
if terminated_children and terminated_children == num_watched_children:
logger.debug("All children terminated")
self.game_process.wait()
if num_watched_children == 0:
self.cycles_without_children += 1
if(num_children == 0
or self.cycles_without_children >= self.max_cycles_without_children):
logger.debug("No children left in thread, exiting")
self.is_running = False
self.return_code = self.game_process.returncode
return False
return True
|
GoeGaming/lutris
|
lutris/thread.py
|
Python
|
gpl-3.0
| 6,218 |
"""
This file is part of the geometry module.
This module contains functions to create vertex lists of basic
geometrical shapes.
"""
__author__ = 'Florian Krause <florian@expyriment.org>, \
Oliver Lindemann <oliver@expyriment.org>'
__version__ = ''
__revision__ = ''
__date__ = ''
import math as _math
from ._geometry import XYPoint, points2vertices
def _angular_vertex(angle, length):
"""Helper function.
Calculates the vertex coordinates of a line with a particular
length and angle (relative to the horizontal).
"""
angle = _math.radians(angle)
return -1*_math.cos(angle)*float(length), -1*_math.sin(angle)*float(length)
def vertices_rectangle(size):
"""Returns a list of vertices describing a rectangle.
Notes
-----
The resulting vertices can be plotted with the class
stimuli.Shape(vertex_list=...).
Parameters
----------
size : (int, int)
size (width, height) of the rectangle
Returns
-------
vtx : list of vertices
"""
return [ (size[0]-1, 0),
(0, -size[1]+1),
(-size[0]+1, 0),
(0, size[1]-1)]
def vertices_cross(size, line_width):
"""Returns a list of vertices describing a cross.
Notes
-----
The resulting vertices can be plotted with the class
stimuli.Shape(vertex_list=...).
Parameters
----------
size : (int, int)
xy, length of the horizontal (x) and vertical (y) line
line_width : int
width of the lines
Returns
-------
vtx : list of vertices
See Also
--------
expyriment.stimuli.FixCross
"""
x_a = (size[0] - line_width) // 2
x_b = x_a
y_a = (size[1] - line_width) // 2
y_b = y_a
if (size[0] - line_width) % 2: # both have the different parities
x_b = x_a + 1
# to ensure that Shape behaves like two crossed surfaces plotted on each other
if line_width % 2: # odd line width swap x_a - x_b
x_b, x_a = x_a, x_b
if (size[1] - line_width) % 2: # both have the different parities
y_b = y_a + 1
if line_width % 2 == 0: # even line width swap x_a - x_b
y_b, y_a = y_a, y_b
return [(line_width - 1, 0),
(0, -y_a),
(x_a, 0),
(0, -line_width + 1),
(-x_a, 0),
(0, -y_b),
(-line_width + 1, 0),
(0, y_b),
(-x_b, 0),
(0, line_width - 1),
(x_b, 0)]
def vertices_trapezoid(width_top, width_bottom, height):
"""Returns a list of vertices describing a trapezoid
Notes
-----
The resulting vertices can be plotted with the class
stimuli.Shape(vertex_list=...).
Parameters
----------
width_top: int
width of the top edge
width_bottom: int
width of the bottom edge
height : int
height of the trapezoid
Returns
-------
vtx : list of vertices
"""
left_bottom =XYPoint(x = 0, y = 0)
right_bottom = XYPoint(x = width_bottom, y = 0)
left_top = XYPoint(x = 0 + (width_bottom-width_top)/2.0, y = height)
right_top = XYPoint(x = width_bottom - (width_bottom-width_top)/2.0, y = height)
return list(map(lambda xy: (int(xy[0]), int(xy[1])),
points2vertices((left_top, right_top, right_bottom, left_bottom))))
def vertices_triangle(angle, length1, length2):
"""Returns a list of vertices describing a triangle A, B, C.
::
A --- B
.
.
C
Notes
-----
The resulting vertices can be plotted with the class
stimuli.Shape(vertex_list=...).
Parameters
----------
angle : float
the angle between the lines AB and BC in degrees
length1 : float
the length between AB
length2 : float
the length between BC
Returns
-------
vtx : list of vertices
"""
xy = _angular_vertex(angle, length2)
return [(length1-1, 0), (int(xy[0]), int(xy[1]))]
def vertices_parallelogram(angle, length1, length2):
"""Returns a list of vertices describing a parallelogram A, B, C, D.
::
A --- B
. .
. .
D --- C
Notes
-----
The resulting vertices can be plotted with the class
stimuli.Shape(vertex_list=...).
Parameters
----------
angle : float
the angle between the lines AB and BC in degrees
length1 : float
the length between AB
length2 : float
the length between BC
Returns
-------
vtx : list of vertices
"""
vtx = vertices_triangle(angle=angle, length1=length1, length2=length2)
vtx.append((-length1+1, 0))
return vtx
def vertices_regular_polygon(n_edges, length):
"""Returns a list of vertices describing a regular polygon.
Notes
-----
The resulting vertices can be plotted with the class
stimuli.Shape(vertex_list=...).
Parameters
----------
n_edges : int
the number of edges
length : float
the length of one side of the polygon
Returns
-------
vtx : list of vertices
"""
sum_of_angle = (n_edges - 2) * 180.0
angle = 180 - (sum_of_angle / n_edges)
x = 180
vtx = []
for _ in range(n_edges - 1):
v = _angular_vertex(x, length=length)
vtx.append((int(v[0]), int(v[1])))
x += angle
return vtx
def vertices_frame(size, frame_thickness):
"""Returns a list of vertices describing a frame
Notes
-----
The resulting vertices can be plotted with the class
stimuli.Shape(vertex_list=...).
Parameters
----------
size : (int, int)
size (width, height) of the rectangle
frame_thickness : int
the thickness of the frame
Returns
-------
vtx : list of vertices
"""
return [(size[0] - frame_thickness - 1, 0),
(0, -size[1]+1),
(-size[0]+1, 0),
(0, size[1]-1),
(frame_thickness - 1, 0),
(0, -(size[1] - frame_thickness - 1)),
(size[0] - 2 * frame_thickness - 1, 0),
(0, size[1] - 2 * frame_thickness - 1),
(-(size[0] - 2 * frame_thickness - 2), 0)]
|
expyriment/expyriment
|
expyriment/misc/geometry/_basic_shapes.py
|
Python
|
gpl-3.0
| 6,280 |
import pkgutil
from importlib import import_module
TYPE_AMOUNT = 0 # This is used pretty much only in tests
__path__ = pkgutil.extend_path(__path__, __name__)
for _,modname,_ in pkgutil.walk_packages(path=__path__, prefix=__name__+"."):
import_module(modname)
TYPE_AMOUNT = TYPE_AMOUNT + 1
|
nyxxxie/spade
|
spade/typesystem/types/__init__.py
|
Python
|
gpl-3.0
| 299 |
from tkinter import *
from PlotTemp import *
from PlotLight import *
import time
class View():
def __init__(self, canvas, unit):
self.canvas = canvas
self.unit = unit
###--- drawGraph ---###
self.canvas.create_line(450,300,930,300, width=2) # x-axis
self.canvas.create_line(450,300,450,50, width=2) # y-axis lichtintensiteit
self.canvas.create_line(930,300,930,50, width=2) # y2-axis temperatuur
self.canvas.create_line(0,450,1000,450, width=2) # bottom GUI
# x-axis
for i in range(7):
x = 450 + (i * 80) #offset vanaf linker scherm rand = 450 + voor elke stap ix80 verderop een lijn
self.canvas.create_line(x,300,x,50, width=1, dash=(2,5))
self.canvas.create_text(x,300, text='%d'% (10*(i)), anchor=N)
self.canvas.create_text(x-50,320, text='Time in seconds', font = "Helvetica 16 bold", anchor=N)
# y-axis lichtintensiteit
for i in range(6):
y = 300 - (i * 50)
self.canvas.create_line(450,y,930,y, width=1, dash=(2,5))
if (i == 0):
self.canvas.create_text(440,y, text='0', anchor=E)
else:
self.canvas.create_text(440,y, text='%d00'% (i*2), anchor=E)
self.canvas.create_text(480,35, text='Lichtintensiteit', font = "Helvetica 16 bold", anchor=E, fill='red')
# y-axis temp (-40, 60)
for i in range(6):
y = 300 - (i * 50) #offset vanaf linker scherm rand = 50 + voor elke stap ix50 verderop een lijn
self.canvas.create_text(960,y, text='%d'% (20*(i-2)), anchor=E)
self.canvas.create_text(990,35, text='Degrees Celsius', font = "Helvetica 16 bold", anchor=E, fill='blue')
###---- drawLinesInGraph ----####
s = 1
x2 = 450
yTemp = (((self.unit.getTemp() * -1) / 20) * 50) + 200
yLight = (((self.unit.getLight() * -1) / 200) * 50) + 300
# Update de lijnen van temperatuur en lichtintensiteit
plotTemp = PlotTemp(s, x2, yTemp, 1000, self.unit, self.canvas)
plotLight = PlotLight(s, x2, yLight, 1000, self.unit, self.canvas)
plotTemp.keepPlotting()
plotLight.keepPlotting()
|
femkeh/project2-1
|
python/View.py
|
Python
|
gpl-3.0
| 1,974 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# kate: space-indent on; indent-width 4; mixedindent off; indent-mode python;
from ..plugin import *
from arsoft.filelist import *
from arsoft.utils import which
from arsoft.sshutils import SudoSessionException
import hashlib
import sys
class SlapdBackupPluginConfig(BackupPluginConfig):
class ServerInstance(object):
def __init__(self, name, hostname, port=22, username='root', password=''):
self.name = name
self.hostname = hostname
self.port = port
self.username = username
self.password = password
def __str__(self):
return '%s (%s:***@%s:%i)' % (self.name, self.username, self.hostname, self.port)
def __init__(self, parent):
BackupPluginConfig.__init__(self, parent, 'slapd')
self._server_list = []
@property
def server_list(self):
return self._server_list
@server_list.setter
def server_list(self, value):
self._server_list = value
def _read_conf(self, inifile):
for sect in inifile.sections:
hostname = inifile.get(sect, 'host', None)
port = inifile.getAsInteger(sect, 'port', 22)
username = inifile.get(sect, 'username', None)
password = inifile.get(sect, 'password', None)
if hostname:
self._server_list.append(SlapdBackupPluginConfig.ServerInstance(sect,
hostname=hostname, port=port,
username=username, password=password))
return True
def _write_conf(self, inifile):
return True
def __str__(self):
ret = BackupPluginConfig.__str__(self)
ret = ret + 'servers:\n'
if self._server_list:
for item in self._server_list:
ret = ret + ' %s:\n' % item.name
ret = ret + ' server: %s:%i\n' % (item.hostname, item.port)
ret = ret + ' username: %s\n' % item.username
ret = ret + ' password: %s\n' % item.password
return ret
class SlapdBackupPlugin(BackupPlugin):
def __init__(self, backup_app):
self.config = SlapdBackupPluginConfig(backup_app)
BackupPlugin.__init__(self, backup_app, 'slapd')
self.slapcat_exe = which('slapcat', only_first=True)
def _update_dump_file(self, dest_file, dump_data):
m = hashlib.md5()
m.update(dump_data)
new_checksum = m.hexdigest()
old_checksum = None
try:
f = open(dest_file + '.md5', 'r')
old_checksum = f.read().strip()
f.close()
except IOError:
pass
ret = True
if old_checksum != new_checksum:
try:
f = open(dest_file, 'w')
f.write(dump_data.decode())
# protect the file content (includes passwords and other sensitive information)
# from the rest of the world.
os.fchmod(f.fileno(), 0o600)
f.close()
f = open(dest_file + '.md5', 'w')
f.write(new_checksum)
# same for checksump file not necessary, but looks better.
os.fchmod(f.fileno(), 0o600)
f.close()
except IOError:
ret = False
return ret
def perform_backup(self, **kwargs):
ret = True
backup_dir = self.config.intermediate_backup_directory
if not self._mkdir(backup_dir):
ret = False
if ret:
slapd_backup_filelist = FileListItem(base_directory=self.config.base_directory)
for server in self.config.server_list:
if self.backup_app._verbose:
print('backup LDAP server %s' % str(server))
slapd_dumpfile = os.path.join(backup_dir, server.name + '.ldif')
slapd_checksumfile = slapd_dumpfile + '.md5'
slapd_dump_data = None
exe = 'slapcat'
if self.backup_app.is_localhost(server.hostname):
if self.slapcat_exe is None:
sys.stderr.write('Unable to find slapcat executable for local LDAP backup of server %s.\n' % str(server))
ret = False
else:
exe = self.slapcat_exe
if ret:
if self.backup_app._verbose:
print('backup remote LDAP server %s' % server.hostname)
server_item = self.backup_app.find_remote_server_entry(hostname=server.hostname)
if self.backup_app._verbose:
print('use remote server %s' % str(server_item))
if server_item:
cxn = server_item.connection
try:
(sts, stdout_data, stderr_data) = cxn.runcmdAndGetData(args=[exe], sudo=True, outputStdErr=False, outputStdOut=False)
if sts != 0:
sys.stderr.write('slapcat failed, error %s\n' % stderr_data)
ret = False
else:
slapd_dump_data = stdout_data
except SudoSessionException as e:
sys.stderr.write('slapcat failed, because sudo failed: %s.\n' % str(e))
ret = False
if ret and slapd_dump_data:
ret = self._update_dump_file(slapd_dumpfile, slapd_dump_data)
if ret:
slapd_backup_filelist.append(slapd_dumpfile)
slapd_backup_filelist.append(slapd_checksumfile)
self.backup_app.append_to_filelist(slapd_backup_filelist)
return ret
|
aroth-arsoft/arsoft-python
|
python3/arsoft/backup/plugins/slapd.py
|
Python
|
gpl-3.0
| 6,014 |
#!/usr/bin/python
#
# Copyright 2016 Red Hat | Ansible
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: docker_image
short_description: Manage docker images.
version_added: "1.5"
description:
- Build, load or pull an image, making the image available for creating containers. Also supports tagging an
image into a repository and archiving an image to a .tar file.
options:
archive_path:
description:
- Use with state C(present) to archive an image to a .tar file.
required: false
version_added: "2.1"
load_path:
description:
- Use with state C(present) to load an image from a .tar file.
required: false
version_added: "2.2"
dockerfile:
description:
- Use with state C(present) to provide an alternate name for the Dockerfile to use when building an image.
default: Dockerfile
required: false
version_added: "2.0"
force:
description:
- Use with state I(absent) to un-tag and remove all images matching the specified name. Use with state
C(present) to build, load or pull an image when the image already exists.
default: false
required: false
version_added: "2.1"
http_timeout:
description:
- Timeout for HTTP requests during the image build operation. Provide a positive integer value for the number of
seconds.
required: false
version_added: "2.1"
name:
description:
- "Image name. Name format will be one of: name, repository/name, registry_server:port/name.
When pushing or pulling an image the name can optionally include the tag by appending ':tag_name'."
required: true
path:
description:
- Use with state 'present' to build an image. Will be the path to a directory containing the context and
Dockerfile for building an image.
aliases:
- build_path
required: false
pull:
description:
- When building an image downloads any updates to the FROM image in Dockerfile.
default: true
required: false
version_added: "2.1"
push:
description:
- Push the image to the registry. Specify the registry as part of the I(name) or I(repository) parameter.
default: false
required: false
version_added: "2.2"
rm:
description:
- Remove intermediate containers after build.
default: true
required: false
version_added: "2.1"
nocache:
description:
- Do not use cache when building an image.
default: false
required: false
repository:
description:
- Full path to a repository. Use with state C(present) to tag the image into the repository. Expects
format I(repository:tag). If no tag is provided, will use the value of the C(tag) parameter or I(latest).
required: false
version_added: "2.1"
state:
description:
- Make assertions about the state of an image.
- When C(absent) an image will be removed. Use the force option to un-tag and remove all images
matching the provided name.
- When C(present) check if an image exists using the provided name and tag. If the image is not found or the
force option is used, the image will either be pulled, built or loaded. By default the image will be pulled
from Docker Hub. To build the image, provide a path value set to a directory containing a context and
Dockerfile. To load an image, specify load_path to provide a path to an archive file. To tag an image to a
repository, provide a repository path. If the name contains a repository path, it will be pushed.
- "NOTE: C(build) is DEPRECATED and will be removed in release 2.3. Specifying C(build) will behave the
same as C(present)."
required: false
default: present
choices:
- absent
- present
- build
tag:
description:
- Used to select an image when pulling. Will be added to the image when pushing, tagging or building. Defaults to
I(latest).
- If C(name) parameter format is I(name:tag), then tag value from C(name) will take precedence.
default: latest
required: false
buildargs:
description:
- Provide a dictionary of C(key:value) build arguments that map to Dockerfile ARG directive.
- Docker expects the value to be a string. For convenience any non-string values will be converted to strings.
- Requires Docker API >= 1.21 and docker-py >= 1.7.0.
required: false
version_added: "2.2"
container_limits:
description:
- A dictionary of limits applied to each container created by the build process.
required: false
version_added: "2.1"
suboptions:
memory:
description: Set memory limit for build
memswap:
description: Total memory (memory + swap), -1 to disable swap
cpushares:
description: CPU shares (relative weight)
cpusetcpus:
description: CPUs in which to allow execution, e.g., "0-3", "0,1"
use_tls:
description:
- "DEPRECATED. Whether to use tls to connect to the docker server. Set to C(no) when TLS will not be used. Set to
C(encrypt) to use TLS. And set to C(verify) to use TLS and verify that the server's certificate is valid for the
server. NOTE: If you specify this option, it will set the value of the tls or tls_verify parameters."
choices:
- no
- encrypt
- verify
default: no
required: false
version_added: "2.0"
extends_documentation_fragment:
- docker
requirements:
- "python >= 2.6"
- "docker-py >= 1.7.0"
- "Docker API >= 1.20"
author:
- Pavel Antonov (@softzilla)
- Chris Houseknecht (@chouseknecht)
- James Tanner (@jctanner)
'''
EXAMPLES = '''
- name: pull an image
docker_image:
name: pacur/centos-7
- name: Tag and push to docker hub
docker_image:
name: pacur/centos-7
repository: dcoppenhagan/myimage
tag: 7.0
push: yes
- name: Tag and push to local registry
docker_image:
name: centos
repository: localhost:5000/centos
tag: 7
push: yes
- name: Remove image
docker_image:
state: absent
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
- name: Build an image and push it to a private repo
docker_image:
path: ./sinatra
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
push: yes
- name: Archive image
docker_image:
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
archive_path: my_sinatra.tar
- name: Load image from archive and push to a private registry
docker_image:
name: localhost:5000/myimages/sinatra
tag: v1
push: yes
load_path: my_sinatra.tar
- name: Build image and with buildargs
docker_image:
path: /path/to/build/dir
name: myimage
buildargs:
log_volume: /var/log/myapp
listen_port: 8080
'''
RETURN = '''
image:
description: Image inspection results for the affected image.
returned: success
type: complex
sample: {}
'''
from ansible.module_utils.docker_common import *
try:
if HAS_DOCKER_PY_2:
from docker.auth import resolve_repository_name
else:
from docker.auth.auth import resolve_repository_name
from docker.utils.utils import parse_repository_tag
except ImportError:
# missing docker-py handled in docker_common
pass
class ImageManager(DockerBaseClass):
def __init__(self, client, results):
super(ImageManager, self).__init__()
self.client = client
self.results = results
parameters = self.client.module.params
self.check_mode = self.client.check_mode
self.archive_path = parameters.get('archive_path')
self.container_limits = parameters.get('container_limits')
self.dockerfile = parameters.get('dockerfile')
self.force = parameters.get('force')
self.load_path = parameters.get('load_path')
self.name = parameters.get('name')
self.nocache = parameters.get('nocache')
self.path = parameters.get('path')
self.pull = parameters.get('pull')
self.repository = parameters.get('repository')
self.rm = parameters.get('rm')
self.state = parameters.get('state')
self.tag = parameters.get('tag')
self.http_timeout = parameters.get('http_timeout')
self.push = parameters.get('push')
self.buildargs = parameters.get('buildargs')
# If name contains a tag, it takes precedence over tag parameter.
repo, repo_tag = parse_repository_tag(self.name)
if repo_tag:
self.name = repo
self.tag = repo_tag
if self.state in ['present', 'build']:
self.present()
elif self.state == 'absent':
self.absent()
def fail(self, msg):
self.client.fail(msg)
def present(self):
'''
Handles state = 'present', which includes building, loading or pulling an image,
depending on user provided parameters.
:returns None
'''
image = self.client.find_image(name=self.name, tag=self.tag)
if not image or self.force:
if self.path:
# Build the image
if not os.path.isdir(self.path):
self.fail("Requested build path %s could not be found or you do not have access." % self.path)
image_name = self.name
if self.tag:
image_name = "%s:%s" % (self.name, self.tag)
self.log("Building image %s" % image_name)
self.results['actions'].append("Built image %s from %s" % (image_name, self.path))
self.results['changed'] = True
if not self.check_mode:
self.results['image'] = self.build_image()
elif self.load_path:
# Load the image from an archive
if not os.path.isfile(self.load_path):
self.fail("Error loading image %s. Specified path %s does not exist." % (self.name,
self.load_path))
image_name = self.name
if self.tag:
image_name = "%s:%s" % (self.name, self.tag)
self.results['actions'].append("Loaded image %s from %s" % (image_name, self.load_path))
self.results['changed'] = True
if not self.check_mode:
self.results['image'] = self.load_image()
else:
# pull the image
self.results['actions'].append('Pulled image %s:%s' % (self.name, self.tag))
self.results['changed'] = True
if not self.check_mode:
self.results['image'] = self.client.pull_image(self.name, tag=self.tag)
if self.archive_path:
self.archive_image(self.name, self.tag)
if self.push and not self.repository:
self.push_image(self.name, self.tag)
elif self.repository:
self.tag_image(self.name, self.tag, self.repository, force=self.force, push=self.push)
def absent(self):
'''
Handles state = 'absent', which removes an image.
:return None
'''
image = self.client.find_image(self.name, self.tag)
if image:
name = self.name
if self.tag:
name = "%s:%s" % (self.name, self.tag)
if not self.check_mode:
try:
self.client.remove_image(name, force=self.force)
except Exception as exc:
self.fail("Error removing image %s - %s" % (name, str(exc)))
self.results['changed'] = True
self.results['actions'].append("Removed image %s" % (name))
self.results['image']['state'] = 'Deleted'
def archive_image(self, name, tag):
'''
Archive an image to a .tar file. Called when archive_path is passed.
:param name - name of the image. Type: str
:return None
'''
if not tag:
tag = "latest"
image = self.client.find_image(name=name, tag=tag)
if not image:
self.log("archive image: image %s:%s not found" % (name, tag))
return
image_name = "%s:%s" % (name, tag)
self.results['actions'].append('Archived image %s to %s' % (image_name, self.archive_path))
self.results['changed'] = True
if not self.check_mode:
self.log("Getting archive of image %s" % image_name)
try:
image = self.client.get_image(image_name)
except Exception as exc:
self.fail("Error getting image %s - %s" % (image_name, str(exc)))
try:
with open(self.archive_path, 'w') as fd:
for chunk in image.stream(2048, decode_content=False):
fd.write(chunk)
except Exception as exc:
self.fail("Error writing image archive %s - %s" % (self.archive_path, str(exc)))
image = self.client.find_image(name=name, tag=tag)
if image:
self.results['image'] = image
def push_image(self, name, tag=None):
'''
If the name of the image contains a repository path, then push the image.
:param name Name of the image to push.
:param tag Use a specific tag.
:return: None
'''
repository = name
if not tag:
repository, tag = parse_repository_tag(name)
registry, repo_name = resolve_repository_name(repository)
self.log("push %s to %s/%s:%s" % (self.name, registry, repo_name, tag))
if registry:
self.results['actions'].append("Pushed image %s to %s/%s:%s" % (self.name, registry, repo_name, tag))
self.results['changed'] = True
if not self.check_mode:
status = None
try:
for line in self.client.push(repository, tag=tag, stream=True, decode=True):
self.log(line, pretty_print=True)
if line.get('errorDetail'):
raise Exception(line['errorDetail']['message'])
status = line.get('status')
except Exception as exc:
if re.search('unauthorized', str(exc)):
if re.search('authentication required', str(exc)):
self.fail("Error pushing image %s/%s:%s - %s. Try logging into %s first." %
(registry, repo_name, tag, str(exc), registry))
else:
self.fail("Error pushing image %s/%s:%s - %s. Does the repository exist?" %
(registry, repo_name, tag, str(exc)))
self.fail("Error pushing image %s: %s" % (repository, str(exc)))
self.results['image'] = self.client.find_image(name=repository, tag=tag)
if not self.results['image']:
self.results['image'] = dict()
self.results['image']['push_status'] = status
def tag_image(self, name, tag, repository, force=False, push=False):
'''
Tag an image into a repository.
:param name: name of the image. required.
:param tag: image tag.
:param repository: path to the repository. required.
:param force: bool. force tagging, even it image already exists with the repository path.
:param push: bool. push the image once it's tagged.
:return: None
'''
repo, repo_tag = parse_repository_tag(repository)
if not repo_tag:
repo_tag = "latest"
if tag:
repo_tag = tag
image = self.client.find_image(name=repo, tag=repo_tag)
found = 'found' if image else 'not found'
self.log("image %s was %s" % (repo, found))
if not image or force:
self.log("tagging %s:%s to %s:%s" % (name, tag, repo, repo_tag))
self.results['changed'] = True
self.results['actions'].append("Tagged image %s:%s to %s:%s" % (name, tag, repo, repo_tag))
if not self.check_mode:
try:
# Finding the image does not always work, especially running a localhost registry. In those
# cases, if we don't set force=True, it errors.
image_name = name
if tag and not re.search(tag, name):
image_name = "%s:%s" % (name, tag)
tag_status = self.client.tag(image_name, repo, tag=repo_tag, force=True)
if not tag_status:
raise Exception("Tag operation failed.")
except Exception as exc:
self.fail("Error: failed to tag image - %s" % str(exc))
self.results['image'] = self.client.find_image(name=repo, tag=repo_tag)
if push:
self.push_image(repo, repo_tag)
def build_image(self):
'''
Build an image
:return: image dict
'''
params = dict(
path=self.path,
tag=self.name,
rm=self.rm,
nocache=self.nocache,
stream=True,
timeout=self.http_timeout,
pull=self.pull,
forcerm=self.rm,
dockerfile=self.dockerfile,
decode=True
)
build_output = []
if self.tag:
params['tag'] = "%s:%s" % (self.name, self.tag)
if self.container_limits:
params['container_limits'] = self.container_limits
if self.buildargs:
for key, value in self.buildargs.items():
if not isinstance(value, basestring):
self.buildargs[key] = str(value)
params['buildargs'] = self.buildargs
for line in self.client.build(**params):
# line = json.loads(line)
self.log(line, pretty_print=True)
if "stream" in line:
build_output.append(line["stream"])
if line.get('error'):
if line.get('errorDetail'):
errorDetail = line.get('errorDetail')
self.fail(
"Error building %s - code: %s, message: %s, logs: %s" % (
self.name,
errorDetail.get('code'),
errorDetail.get('message'),
build_output))
else:
self.fail("Error building %s - message: %s, logs: %s" % (
self.name, line.get('error'), build_output))
return self.client.find_image(name=self.name, tag=self.tag)
def load_image(self):
'''
Load an image from a .tar archive
:return: image dict
'''
try:
self.log("Opening image %s" % self.load_path)
image_tar = open(self.load_path, 'r')
except Exception as exc:
self.fail("Error opening image %s - %s" % (self.load_path, str(exc)))
try:
self.log("Loading image from %s" % self.load_path)
self.client.load_image(image_tar)
except Exception as exc:
self.fail("Error loading image %s - %s" % (self.name, str(exc)))
try:
image_tar.close()
except Exception as exc:
self.fail("Error closing image %s - %s" % (self.name, str(exc)))
return self.client.find_image(self.name, self.tag)
def main():
argument_spec = dict(
archive_path=dict(type='path'),
container_limits=dict(type='dict'),
dockerfile=dict(type='str'),
force=dict(type='bool', default=False),
http_timeout=dict(type='int'),
load_path=dict(type='path'),
name=dict(type='str', required=True),
nocache=dict(type='str', default=False),
path=dict(type='path', aliases=['build_path']),
pull=dict(type='bool', default=True),
push=dict(type='bool', default=False),
repository=dict(type='str'),
rm=dict(type='bool', default=True),
state=dict(type='str', choices=['absent', 'present', 'build'], default='present'),
tag=dict(type='str', default='latest'),
use_tls=dict(type='str', default='no', choices=['no', 'encrypt', 'verify']),
buildargs=dict(type='dict', default=None),
)
client = AnsibleDockerClient(
argument_spec=argument_spec,
supports_check_mode=True,
)
results = dict(
changed=False,
actions=[],
image={}
)
ImageManager(client, results)
client.module.exit_json(**results)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
Inspq/ansible
|
lib/ansible/modules/cloud/docker/docker_image.py
|
Python
|
gpl-3.0
| 21,821 |
# UrbanFootprint v1.5
# Copyright (C) 2017 Calthorpe Analytics
#
# This file is part of UrbanFootprint version 1.5
#
# UrbanFootprint is distributed under the terms of the GNU General
# Public License version 3, as published by the Free Software Foundation. This
# code is distributed WITHOUT ANY WARRANTY, without implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License v3 for more details; see <http://www.gnu.org/licenses/>.
from footprint.main.managers.geo_inheritance_manager import GeoInheritanceManager
from footprint.main.models.built_form.client_land_use_definition import ClientLandUseDefinition
__author__ = 'calthorpe_analytics'
from django.db import models
class SacogLandUseDefinition(ClientLandUseDefinition):
objects = GeoInheritanceManager()
@property
def label(self):
return self.land_use
land_use = models.CharField(max_length=100, null=True, blank=True)
min_du_ac = models.DecimalField(max_digits=9, decimal_places=2, default=0)
max_du_ac = models.DecimalField(max_digits=9, decimal_places=2, default=0)
max_emp_ac = models.DecimalField(max_digits=9, decimal_places=2, default=0)
rural_flag = models.BooleanField(default=False)
detached_flag = models.BooleanField(default=False)
attached_flag = models.BooleanField(default=False)
pct_ret_rest = models.DecimalField(max_digits=9, decimal_places=2, default=0)
pct_ret_ret = models.DecimalField(max_digits=9, decimal_places=2, default=0)
pct_ret_svc = models.DecimalField(max_digits=9, decimal_places=2, default=0)
pct_off_gov = models.DecimalField(max_digits=9, decimal_places=2, default=0)
pct_off_off = models.DecimalField(max_digits=9, decimal_places=2, default=0)
pct_off_svc = models.DecimalField(max_digits=9, decimal_places=2, default=0)
pct_off_med = models.DecimalField(max_digits=9, decimal_places=2, default=0)
pct_ind = models.DecimalField(max_digits=9, decimal_places=2, default=0)
pct_pub_edu = models.DecimalField(max_digits=9, decimal_places=2, default=0)
pct_pub_med = models.DecimalField(max_digits=9, decimal_places=2, default=0)
pct_pub_gov = models.DecimalField(max_digits=9, decimal_places=2, default=0)
pct_other = models.DecimalField(max_digits=9, decimal_places=2, default=0)
class Meta(object):
abstract = False
app_label = 'main'
|
CalthorpeAnalytics/urbanfootprint
|
footprint/client/configuration/sacog/built_form/sacog_land_use_definition.py
|
Python
|
gpl-3.0
| 2,408 |
'''
Author: Caleb Moses
Date: 04-06-2017
This file trains a character-level multi-layer RNN on text data.
Code is based on Andrej Karpathy's implementation in Torch at:
https://github.com/karpathy/char-rnn/blob/master/train.lua
I modified the model to run using TensorFlow and Keras. Supports GPUs,
as well as many other common model/optimization bells and whistles.
TO DO:
- Add learning rate
- Improve TensorBoard logs
'''
import os, re, random
import sys, argparse, codecs
import itertools as it
import numpy as np
from keras.models import Sequential, load_model
from keras.layers import Dense
from keras.layers import Dropout
from keras.layers import LSTM
from keras.callbacks import ModelCheckpoint
from keras.utils import np_utils
from keras.callbacks import TensorBoard
def parse_args():
'''Parses all keyword arguments for model and returns them.
Returns:
- data_dir: (str) The directory to the text file(s) for training.
- rnn_size: (int) The number of cells in each hidden layer in
the network.
- num_layers: (int) The number of hidden layers in the network.
- dropout: (float) Dropout value (between 0, 1 exclusive).'''
# initialise parser
parser = argparse.ArgumentParser()
# add arguments, set default values and expected types
parser.add_argument("-data_dir",
help="The directory to the text file(s) for training.")
parser.add_argument("-seq_length", type=int, default=25,
help="The length of sequences to be used for training")
parser.add_argument("-batch_size", type=int, default=100,
help="The number of minibatches to be used for training")
parser.add_argument("-rnn_size", type=int, default=128,
help="The number of cells in each hidden layer in the network")
parser.add_argument("-num_layers", type=int, default=2,
help="The number of hidden layers in the network")
parser.add_argument("-dropout", type=float, default=0.1,
help="Dropout value (between 0, 1 exclusive)")
parser.add_argument("-epochs", type=int, default=1,
help="Number of epochs for training")
parser.add_argument("-tensorboard", type=int, default=1,
help="Save model statistics to TensorBoard")
# parse arguments and return their values
args = parser.parse_args()
return args.data_dir, args.seq_length, args.batch_size, args.rnn_size, \
args.num_layers, args.dropout, args.epochs, args.tensorboard
def print_data(text):
'''Re-encodes text so that it can be printed to command line
without raising a UnicodeEncodeError, and then prints it.
Incompatible characters are simply dropped before printing.
Args:
- text: (str) The text to be printed'''
print(text.encode(sys.stdout.encoding, errors='replace'))
def load_data(data_dir, encoding='utf-8'):
'''Appends all text files in data_dir into a single string and returns it.
All files are assumed to be utf-8 encoded, and of type '.txt'.
Args:
- data_dir: (str) The directory to text files for training.
- encoding: (str) The type of encoding to use when decoding each file.
Returns:
- text_data: (str) Appended files as a single string.'''
print("Loading data from %s" % os.path.abspath(data_dir))
# Initialise text string
text_data = ''
# select .txt files from data_dir
for filename in filter(lambda s: s.endswith(".txt"), os.listdir(data_dir)):
# open file with default encoding
print("Loading file: %s" % filename)
filepath = os.path.abspath(os.path.join(data_dir, filename))
with open(filepath,'r', encoding = encoding) as f:
text_data += f.read() + "\n"
return text_data
def get_text_data(text_data):
# create mapping of unique chars to integers, and a reverse mapping
chars = sorted(set(text_data))
char_to_int = {c: i for i, c in enumerate(chars)}
int_to_char = {i: c for i, c in enumerate(chars)}
# summarize the loaded data
n_text = len(text_data)
n_chars = len(chars)
print("n_text:", n_text)
print("n_chars:", n_chars)
return char_to_int, n_text, n_chars
def pre_processing(text_data, seq_length, char_to_int, n_text, n_chars):
'''Preprocesses text_data for RNN model.
Args:
- text: (str) text file to be processed.
- seq_length: (int) length of character sequences to be considered
in the training set.
Returns:
- char_to_int: (dict) Maps characters in the character set to ints.
- int_to_char: (dict) Maps ints to characters in the character set.
- n_text: (int) The number of characters in the text.
- n_chars: (int) The number of unique characters in the text.'''
# prepare the dataset of input to output pairs encoded as integers
dataX = []
dataY = []
for start in range(n_text - seq_length):
seq_in = text_data[start:start + seq_length]
seq_out = text_data[start + seq_length]
dataX.append([char_to_int[char] for char in seq_in])
dataY.append(char_to_int[seq_out])
X = np.reshape(dataX, (n_text - seq_length, seq_length, 1))
# normalise X to [0, 1]
X = X / n_chars
# one hot encode the output variable
y = np_utils.to_categorical(dataY, num_classes=n_chars)
return X, y
def build_model(seq_length, n_text, n_chars, rnn_size, num_layers, drop_prob):
'''Defines the RNN LSTM model.
Args:
- seq_length: (int) The length of each sequence for the model.
- rnn_size: (int) The number of cells in each hidden layer.
- num_layers: (int) The number of hidden layers in the network.
- drop_prob: (float) The proportion of cells to drop in each dropout
layer.
Returns:
- model: (keras.models.Sequential) The constructed Keras model.'''
model = Sequential()
for i in range(num_layers):
if i == num_layers - 1:
# add last hidden layer
model.add(LSTM(rnn_size, return_sequences=False))
elif i == 0:
# add first hidden layer
model.add(LSTM(rnn_size,
input_shape=(seq_length, 1),
return_sequences=True))
else:
# add middle hidden layer
model.add(LSTM(rnn_size, return_sequences=True))
model.add(Dropout(drop_prob))
# add output layer
model.add(Dense(n_chars, activation='softmax'))
# compile model
model.compile(loss='categorical_crossentropy', optimizer='adam',
metric=['accuracy', 'loss', 'val_loss'])
return model
def set_callbacks(tensorboard):
'''Set callbacks for Keras model.
Args:
- tensorboard: (int) Add tensorboard callback if tensorboard == 1
Returns:
- callbacks: (list) list of callbacks for model'''
callbacks = [ModelCheckpoint(
'checkpoints\\weights.{epoch:02d}-{val_loss:.2f}.hdf5')]
if tensorboard:
tb_callback = TensorBoard(log_dir=r'..\logs', histogram_freq=0.01,
write_grads=True, write_images=True)
callbacks.append(tb_callback)
return callbacks
# def fit_model(model, X, y, text_data, seq_length, batch_size, char_to_int,
# n_text, n_chars):
# '''Trains the model on the training data.
# Args:
# - model:
# - text_data:
# - seq_length:
# - batch_size:
# - char_to_int:'''
# model.fit(X, y, validation_split = 0.3)
# return model
def Main():
# load text data to memory
text_data = load_data(data_dir)
# comment
char_to_int, n_text, n_chars = get_text_data(text_data)
# preprocess the text - construct character dictionaries etc
X, y = pre_processing(text_data, seq_length, char_to_int, n_text, n_chars)
# build and compile Keras model
model = build_model(seq_length, n_text, n_chars,
rnn_size, num_layers, drop_prob)
model.fit(X, y, validation_split = 0.3, verbose=2)
# # fit model using generator
# model = fit_model(model, X, y, text_data, seq_length, batch_size,
# char_to_int, n_text, n_chars)
if __name__ == "__main__":
# parse keyword arguments
data_dir, seq_length, batch_size, rnn_size, \
num_layers, drop_prob, epochs, tensorboard = parse_args()
Main()
|
mathematiguy/welcome-to-night-vale
|
scripts/test/model.py
|
Python
|
gpl-3.0
| 8,363 |
from aiohttp import web
from prometheus_client import (
REGISTRY,
Counter,
Gauge,
Histogram,
generate_latest,
)
from prometheus_client.core import GaugeMetricFamily
CLIENT_CONNECTIONS = Gauge(
'hpfeeds_broker_client_connections',
'Number of clients connected to broker',
)
CONNECTION_MADE = Counter(
'hpfeeds_broker_connection_made',
'Number of connections established',
)
CONNECTION_READY = Counter(
'hpfeeds_broker_connection_ready',
'Number of connections established + authenticated',
['ident'],
)
CONNECTION_ERROR = Counter(
'hpfeeds_broker_connection_error',
'Number of connections that experienced a protocol error',
['ident', 'category'],
)
CONNECTION_LOST = Counter(
'hpfeeds_broker_connection_lost',
'Number of connections lost',
['ident'],
)
CLIENT_SEND_BUFFER_SIZE = Gauge(
'hpfeeds_broker_connection_send_buffer_size',
'Number of bytes queued for transmission',
['ident'],
)
CLIENT_RECEIVE_BUFFER_SIZE = Gauge(
'hpfeeds_broker_connection_receive_buffer_size',
'Number of bytes received but not yet parsed',
['ident'],
)
CLIENT_RECEIVE_BUFFER_FILL = Counter(
'hpfeeds_broker_connection_receive_buffer_fill',
'Number of bytes queued in the parsing buffer',
['ident'],
)
CLIENT_SEND_BUFFER_FILL = Counter(
'hpfeeds_broker_connection_send_buffer_fill',
'Number of bytes queued in the send buffer',
['ident'],
)
CLIENT_SEND_BUFFER_DRAIN = Counter(
'hpfeeds_broker_connection_send_buffer_drain',
'Number of bytes drained from the send buffer and sent',
['ident'],
)
CLIENT_SEND_BUFFER_DEADLINE_START = Counter(
'hpfeeds_broker_connection_send_buffer_deadline_start',
'High watermark was exceeded and this connection was put on a deadline timer',
['ident'],
)
CLIENT_SEND_BUFFER_DEADLINE_RECOVER = Counter(
'hpfeeds_broker_connection_send_buffer_deadline_recover',
'Buffer recovered to low watermark or better and deadline timer was cancelled',
['ident'],
)
SUBSCRIPTIONS = Gauge(
'hpfeeds_broker_subscriptions',
'Number of subscriptions to a channel',
['ident', 'chan'],
)
RECEIVE_PUBLISH_COUNT = Counter(
'hpfeeds_broker_receive_publish_count',
'Number of events received by broker for a channel',
['ident', 'chan'],
)
RECEIVE_PUBLISH_SIZE = Histogram(
'hpfeeds_broker_receive_publish_size',
'Sizes of messages received by broker for a channel',
['ident', 'chan'],
buckets=[1024, 2048, 4096, 8192, 16384, 32768, 65536, 131072, 262144, 524288, 1048576, 2097152, 4194304],
)
def reset():
''' Reset the metrics to 0. This is intended for tests **only**. '''
CLIENT_CONNECTIONS._value.set(0)
SUBSCRIPTIONS._metrics = {}
RECEIVE_PUBLISH_SIZE._metrics = {}
RECEIVE_PUBLISH_COUNT._metrics = {}
CLIENT_RECEIVE_BUFFER_FILL._metrics = {}
CLIENT_SEND_BUFFER_FILL._metrics = {}
CLIENT_SEND_BUFFER_DRAIN._metrics = {}
CONNECTION_ERROR._metrics = {}
CONNECTION_LOST._metrics = {}
CONNECTION_MADE._value.set(0)
CONNECTION_READY._metrics = {}
def collect_metrics(broker):
CLIENT_SEND_BUFFER_SIZE._metrics = {}
CLIENT_RECEIVE_BUFFER_SIZE._metrics = {}
send_buffer_size = {}
receive_buffer_size = {}
for conn in broker.connections:
if not conn.ak:
continue
send_buffer_size[conn.ak] = send_buffer_size.get(conn.ak, 0) + conn.transport.get_write_buffer_size()
receive_buffer_size[conn.ak] = receive_buffer_size.get(conn.ak, 0) + len(conn.unpacker.buf)
for ak in send_buffer_size.keys():
CLIENT_SEND_BUFFER_SIZE.labels(ak).set(send_buffer_size[ak])
CLIENT_RECEIVE_BUFFER_SIZE.labels(ak).set(receive_buffer_size[ak])
class CustomCollector:
def __init__(self, server):
self._server = server
def collect(self):
g = GaugeMetricFamily('hpfeeds_client_authenticated_connections', 'Authenticated connections', labels=['ident', 'owner'])
metrics = {}
for conn in self._server.connections:
if not conn.ak:
continue
key = (conn.ak, conn.uid)
metrics[key] = metrics.get(key, 0) + 1
for (ident, owner), count in metrics.items():
g.add_metric([ident, owner or ''], count)
yield g
async def metrics(request):
collect_metrics(request.app.broker)
data = generate_latest(REGISTRY)
return web.Response(text=data.decode('utf-8'), content_type='text/plain', charset='utf-8')
async def healthz(request):
return web.Response(text='{}', content_type='application/json', charset='utf-8')
async def start_metrics_server(server, host, port):
collector = CustomCollector(server)
REGISTRY.register(collector)
app = web.Application()
app.broker = server
app.router.add_get('/metrics', metrics)
app.router.add_get('/healthz', healthz)
runner = web.AppRunner(app, access_log=None)
await runner.setup()
site = web.TCPSite(runner, host, port)
await site.start()
async def close():
try:
await runner.cleanup()
finally:
REGISTRY.unregister(collector)
return close
|
rep/hpfeeds
|
hpfeeds/broker/prometheus.py
|
Python
|
gpl-3.0
| 5,199 |
import modelx as mx
import pytest
@pytest.fixture
def param_formula_sample():
"""
m---SpaceA[a]---SpaceB---x
+-bar
"""
def param(a):
refs = {"y": SpaceB.x,
"z": SpaceB.bar()}
return {"refs": refs}
m = mx.new_model()
A = m.new_space("SpaceA", formula=param)
B = A.new_space("SpaceB")
B.x = 3
@mx.defcells(B)
def bar():
return 5
@mx.defcells(A)
def foo():
return y * z
return A
def test_change_ref_in_param_formula(
param_formula_sample
):
A = param_formula_sample
assert A[1].foo() == 3 * 5
A.SpaceB.x = 7
assert A[1].foo() == 7 * 5
def test_assign_value_to_cells_in_param_formula(
param_formula_sample
):
A = param_formula_sample
assert A[1].foo() == 3 * 5
A.SpaceB.bar = 11
assert A[1].foo() == 3 * 11
def test_change_cells_in_param_formula(
param_formula_sample
):
A = param_formula_sample
assert A[1].foo() == 3 * 5
A.SpaceB.bar.formula = lambda : 13
assert A[1].foo() == 3 * 13
|
fumitoh/modelx
|
modelx/tests/core/space/dynamic_spaces/test_param_formula.py
|
Python
|
gpl-3.0
| 1,105 |
import socket, threading
DEFAULT_ROBOT_IP = "172.16.0.2"
ROBOT_TCP_PORT = 80
CONNEXION_TIMEOUT = 2 # seconds
class LowLevelCom:
def __init__(self):
self.tcp_ip_interface = None
self.messageBuffer = []
self.rBuffer = bytearray()
self.readingMsg = False
self.currentMsgId = None
self.currentMsgLength = None
self.currentMgsData = []
self.connectionSuccess = None
def connect(self, ip=DEFAULT_ROBOT_IP):
self.tcp_ip_interface = TCPIP_interface()
port = ip
self.connectionSuccess = None
connectThread = threading.Thread(target=self.bg_connect, args=(port,))
connectThread.start()
def bg_connect(self, port):
self.connectionSuccess = self.tcp_ip_interface.open(port)
if not self.connectionSuccess:
self.tcp_ip_interface = None
def disconnect(self):
if self.tcp_ip_interface is not None:
self.tcp_ip_interface.close()
self.tcp_ip_interface = None
self.connectionSuccess = None
def sendMessage(self, message):
if self.tcp_ip_interface is not None:
b = bytearray([0xFF, message.id])
if message.standard:
b += bytearray([len(message.data)])
else:
b += bytearray([0xFF])
b += bytearray(message.data)
self.tcp_ip_interface.sendBytes(b)
def available(self):
return len(self.messageBuffer)
def getLastMessage(self):
message = self.messageBuffer[0]
self.messageBuffer = self.messageBuffer[1:]
return message
def communicate(self):
if self.tcp_ip_interface is not None:
try:
avail = self.tcp_ip_interface.available()
if avail > 0:
self.rBuffer += bytearray(self.tcp_ip_interface.read(avail))
while len(self.rBuffer) > 0:
byte = self.rBuffer[0]
self.rBuffer = self.rBuffer[1:]
endReached = False
if not self.readingMsg:
if byte == 0xFF:
self.readingMsg = True
else:
print("Received incorrect byte:", byte)
elif self.currentMsgId is None:
self.currentMsgId = byte
elif self.currentMsgLength is None:
self.currentMsgLength = byte
if byte == 0:
endReached = True
else:
self.currentMgsData.append(byte)
if self.currentMsgLength == 0xFF and byte == 0x00:
endReached = True
elif self.currentMsgLength != 0xFF and len(self.currentMgsData) == self.currentMsgLength:
endReached = True
if endReached:
try:
message = Message(self.currentMsgId, bytes(self.currentMgsData), self.currentMsgLength != 0xFF)
self.messageBuffer.append(message)
except ValueError:
print("Incoherent frame received")
self.readingMsg = False
self.currentMsgId = None
self.currentMsgLength = None
self.currentMgsData = []
except IOError:
self.disconnect()
raise
class Message:
def __init__(self, ID, data=bytes(), standard=True):
if isinstance(ID, int) and 0 <= ID < 256 and isinstance(data, bytes) and isinstance(standard, bool):
self.id = ID
self.data = data
self.standard = standard
if not standard and data[len(data) - 1] != 0:
raise ValueError
else:
print("ERR - id=", ID, "data=", data, "std=", standard)
raise ValueError
def __str__(self):
return "id=" + str(self.id) + " data=" + self.data.decode('utf-8', errors='ignore') + " std=" + str(self.standard)
class TCPIP_interface:
def __init__(self):
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.rBuffer = bytearray()
self.receptionThread = threading.Thread(target=self._backgroundReception)
self.isOpen = False
def open(self, ip):
if self.isOpen:
return False
else:
try:
self.socket.settimeout(CONNEXION_TIMEOUT)
self.socket.connect((ip, ROBOT_TCP_PORT))
self.isOpen = True
self.socket.setblocking(True)
self.receptionThread.start()
return True
except (socket.timeout, OSError) as e:
print("[CONNEXION ERROR]", e)
return False
def close(self):
if self.isOpen:
self.isOpen = False
self.socket.close()
self.receptionThread.join()
def sendBytes(self, b):
if len(b) > 0:
nbSent = self.socket.send(b)
if nbSent == 0:
raise OSError
elif nbSent < len(b):
self.sendBytes(b[nbSent:])
def available(self):
return len(self.rBuffer)
def read(self, nbBytes):
ret = bytes(self.rBuffer[0:nbBytes])
self.rBuffer = self.rBuffer[nbBytes:]
return ret
def _backgroundReception(self):
print("TCP/IP _backgroundReception start")
while self.isOpen:
try:
b = bytearray(self.socket.recv(4096))
self.rBuffer += b
except OSError:
pass
print("TCP/IP _backgroundReception end")
|
INTechSenpai/eurobotruck
|
debug_tools/wiimote_controller/low_level_com.py
|
Python
|
gpl-3.0
| 5,895 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# scikit-tensor documentation build configuration file, created by
# sphinx-quickstart on Sun Apr 20 14:28:17 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import sphinx_rtd_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.pngmath',
'sphinx.ext.napoleon',
'sphinx.ext.autosummary',
'numpydoc'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'scikit-tensor'
copyright = '2016, Maximilian Nickel'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', '**tests**', '**setup**', '**extern**',
'**data**']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'dspydoc'
# -- Options for LaTeX output ---------------------------------------------
pngmath_latex_preamble = (
'\\usepackage{amsmath}\n'
'\\usepackage{amssymb}\n'
'\\newcommand{\\unfold}[2]{{#1}_{(#2)}}\n'
'\\newcommand{\\ten}[1]{\\mathcal{#1}}\n'
'\\newcommand{\\kr}{\\otimes}'
)
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
'preamble': pngmath_latex_preamble,
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'index.tex', 'scikit-tensor Documentation',
'Maximilian Nickel', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'scikit-tensor', 'scikit-tensor Documentation',
['Maximilian Nickel'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'scikit-tensor', 'scikit-tensor Documentation',
'Maximilian Nickel', 'scikit-tensor',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
|
mnick/scikit-tensor
|
docs/conf.py
|
Python
|
gpl-3.0
| 8,802 |
"""
WSGI config for gamesDB project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gamesDB.settings")
application = get_wsgi_application()
|
pbalaguer19/GamesDB-SistemesWeb2016
|
gamesDB/wsgi.py
|
Python
|
gpl-3.0
| 391 |
# uses scipy's sparse linear algebra module to do LU factorization
import sys
import time
import numpy as np
import numpy.linalg as nla
import scipy.io as scio
import scipy.sparse.linalg as sla
def LUsparse(matfile):
# start timing computations
start_time = time.time()
# import A and b from file given on command line after script name
mat_contents = scio.loadmat(matfile)
A = mat_contents['A']
b = mat_contents['b']
b = b.toarray()
# now we simply use the module to solve the equation Ax = b:
Ainv = sla.splu(A)
X = Ainv.solve(b)
# stop timing computations
end_time = time.time()
comp_time = end_time - start_time
X_and_comptime = [X, comp_time]
return(X_and_comptime)
# to run this script on its own, un-comment the following lines
#ans = LUsparse(sys.argv[1])
#
#print(ans[0])
#print(ans[1])
|
OliverEvans96/rte_matrix
|
factorizations/LUsparse.py
|
Python
|
gpl-3.0
| 886 |
__author__ = "Horea Christian"
import argh
from pyvote.utils.votes import one
def main():
argh.dispatch_commands([one])
if __name__ == '__main__':
main()
|
TheChymera/PyVote
|
pyvote/cli.py
|
Python
|
gpl-3.0
| 159 |
import os, datetime, urlparse, string, urllib, re
import time, functools, cgi
import json
from netlib import http
def timestamp():
"""
Returns a serializable UTC timestamp.
"""
return time.time()
def format_timestamp(s):
s = time.localtime(s)
d = datetime.datetime.fromtimestamp(time.mktime(s))
return d.strftime("%Y-%m-%d %H:%M:%S")
def isBin(s):
"""
Does this string have any non-ASCII characters?
"""
for i in s:
i = ord(i)
if i < 9:
return True
elif i > 13 and i < 32:
return True
elif i > 126:
return True
return False
def isXML(s):
for i in s:
if i in "\n \t":
continue
elif i == "<":
return True
else:
return False
def pretty_json(s):
try:
p = json.loads(s)
except ValueError:
return None
return json.dumps(p, sort_keys=True, indent=4).split("\n")
def urldecode(s):
"""
Takes a urlencoded string and returns a list of (key, value) tuples.
"""
return cgi.parse_qsl(s, keep_blank_values=True)
def urlencode(s):
"""
Takes a list of (key, value) tuples and returns a urlencoded string.
"""
s = [tuple(i) for i in s]
return urllib.urlencode(s, False)
def del_all(dict, keys):
for key in keys:
if key in dict:
del dict[key]
def pretty_size(size):
suffixes = [
("B", 2**10),
("kB", 2**20),
("MB", 2**30),
]
for suf, lim in suffixes:
if size >= lim:
continue
else:
x = round(size/float(lim/2**10), 2)
if x == int(x):
x = int(x)
return str(x) + suf
class Data:
def __init__(self, name):
m = __import__(name)
dirname, _ = os.path.split(m.__file__)
self.dirname = os.path.abspath(dirname)
def path(self, path):
"""
Returns a path to the package data housed at 'path' under this
module.Path can be a path to a file, or to a directory.
This function will raise ValueError if the path does not exist.
"""
fullpath = os.path.join(self.dirname, path)
if not os.path.exists(fullpath):
raise ValueError, "dataPath: %s does not exist."%fullpath
return fullpath
pkg_data = Data(__name__)
class LRUCache:
"""
A decorator that implements a self-expiring LRU cache for class
methods (not functions!).
Cache data is tracked as attributes on the object itself. There is
therefore a separate cache for each object instance.
"""
def __init__(self, size=100):
self.size = size
def __call__(self, f):
cacheName = "_cached_%s"%f.__name__
cacheListName = "_cachelist_%s"%f.__name__
size = self.size
@functools.wraps(f)
def wrap(self, *args):
if not hasattr(self, cacheName):
setattr(self, cacheName, {})
setattr(self, cacheListName, [])
cache = getattr(self, cacheName)
cacheList = getattr(self, cacheListName)
if cache.has_key(args):
cacheList.remove(args)
cacheList.insert(0, args)
return cache[args]
else:
ret = f(self, *args)
cacheList.insert(0, args)
cache[args] = ret
if len(cacheList) > size:
d = cacheList.pop()
cache.pop(d)
return ret
return wrap
def parse_proxy_spec(url):
p = http.parse_url(url)
if not p or not p[1]:
return None
return p[:3]
def parse_content_type(c):
"""
A simple parser for content-type values. Returns a (type, subtype,
parameters) tuple, where type and subtype are strings, and parameters
is a dict. If the string could not be parsed, return None.
E.g. the following string:
text/html; charset=UTF-8
Returns:
("text", "html", {"charset": "UTF-8"})
"""
parts = c.split(";", 1)
ts = parts[0].split("/", 1)
if len(ts) != 2:
return None
d = {}
if len(parts) == 2:
for i in parts[1].split(";"):
clause = i.split("=", 1)
if len(clause) == 2:
d[clause[0].strip()] = clause[1].strip()
return ts[0].lower(), ts[1].lower(), d
def hostport(scheme, host, port):
"""
Returns the host component, with a port specifcation if needed.
"""
if (port, scheme) in [(80, "http"), (443, "https")]:
return host
else:
return "%s:%s"%(host, port)
def unparse_url(scheme, host, port, path=""):
"""
Returns a URL string, constructed from the specified compnents.
"""
return "%s://%s%s"%(scheme, hostport(scheme, host, port), path)
def clean_hanging_newline(t):
"""
Many editors will silently add a newline to the final line of a
document (I'm looking at you, Vim). This function fixes this common
problem at the risk of removing a hanging newline in the rare cases
where the user actually intends it.
"""
if t and t[-1] == "\n":
return t[:-1]
return t
def parse_size(s):
"""
Parses a size specification. Valid specifications are:
123: bytes
123k: kilobytes
123m: megabytes
123g: gigabytes
"""
if not s:
return None
mult = None
if s[-1].lower() == "k":
mult = 1024**1
elif s[-1].lower() == "m":
mult = 1024**2
elif s[-1].lower() == "g":
mult = 1024**3
if mult:
s = s[:-1]
else:
mult = 1
try:
return int(s) * mult
except ValueError:
raise ValueError("Invalid size specification: %s"%s)
def safe_subn(pattern, repl, target, *args, **kwargs):
"""
There are Unicode conversion problems with re.subn. We try to smooth
that over by casting the pattern and replacement to strings. We really
need a better solution that is aware of the actual content ecoding.
"""
return re.subn(str(pattern), str(repl), target, *args, **kwargs)
|
win0x86/Lab
|
mitm/libmproxy/utils.py
|
Python
|
gpl-3.0
| 6,325 |
from gpiozero import LEDBoard
from gpiozero.tools import random_values
from signal import pause
tree = LEDBoard(*range(2,28),pwm=True)
for led in tree:
led.source_delay = 0.1
led.source = random_values()
pause()
|
markcarline/CoderDojo
|
project04/xmas-tree-random.py
|
Python
|
gpl-3.0
| 214 |
from pupa.scrape import Scraper, Person
from .utils import MDBMixin
class NJPersonScraper(Scraper, MDBMixin):
def scrape(self, session=None):
if not session:
session = self.jurisdiction.legislative_sessions[-1]['name']
self.info('no session specified, using %s', session)
year_abr = session[0:4]
self._init_mdb(year_abr)
roster_csv = self.access_to_csv('Roster')
bio_csv = self.access_to_csv('LegBio')
photos = {}
for rec in bio_csv:
photos[rec['Roster Key']] = rec['URLPicture']
for rec in roster_csv:
first_name = rec["Firstname"]
middle_name = rec["MidName"]
last_name = rec["LastName"]
suffix = rec["Suffix"]
full_name = first_name + " " + middle_name + " " + last_name + " " + suffix
full_name = full_name.replace(' ', ' ')
full_name = full_name[0: len(full_name) - 1]
district = str(int(rec["District"]))
party = rec["Party"]
if party == 'R':
party = "Republican"
elif party == 'D':
party = "Democratic"
else:
party = party
chamber = rec["House"]
if chamber == 'A':
chamber = "lower"
elif chamber == 'S':
chamber = "upper"
leg_status = rec["LegStatus"]
# skip Deceased/Retired members
if leg_status != 'Active':
continue
phone = rec["Phone"] or None
email = None
if rec["Email"]:
email = rec["Email"]
try:
photo_url = photos[rec['Roster Key']]
except KeyError:
photo_url = ''
self.warning('no photo url for %s', rec['Roster Key'])
url = ('http://www.njleg.state.nj.us/members/bio.asp?Leg=' +
str(int(rec['Roster Key'])))
address = '{0}\n{1}, {2} {3}'.format(rec['Address'], rec['City'],
rec['State'], rec['Zipcode'])
gender = {'M': 'Male', 'F': 'Female'}[rec['Sex']]
person = Person(
name=full_name,
district=district,
primary_org=chamber,
party=party,
image=photo_url,
gender=gender,
)
person.add_link(url)
person.add_source(url)
person.add_source('http://www.njleg.state.nj.us/downloads.asp')
person.add_contact_detail(type='address', value=address, note='District Office')
if phone is not None:
person.add_contact_detail(type='voice', value=phone, note='District Office')
if email is not None:
person.add_contact_detail(type='email', value=email, note='District Office')
yield person
|
cliftonmcintosh/openstates
|
openstates/nj/people.py
|
Python
|
gpl-3.0
| 2,988 |
#!/usr/bin/env python
# -*-coding: utf-8 -*-
import wx
import os
from ll_global import *
import ll_menubar
import ll_textwin
import ll_filterwin
class LovelyLogUI(wx.Frame):
def __init__(self, parent, id):
wx.Frame.__init__(self, parent, id, 'lovely log', style=wx.MAXIMIZE |wx.DEFAULT_FRAME_STYLE)
self.title = "Lovely log"
self._handlers = list()
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
#---- Menus ----#
menuBar = ll_menubar.LlMenuBar()
self.SetMenuBar(menuBar)
#---- main window ----#
self.createWindow()
#---- Actions to take on menu events ----#
self._handlers.extend([ # File Menu
(ID_OPEN, self.OnOpen),
(ID_RELOAD, self.OnReload),
(ID_LOAD_FILTERS, self.OnLoadFilters),
(ID_SAVE_FILTERS, self.OnSaveFilters),
(ID_EXIT, self.OnCloseWindow),
# Edit Menu
(ID_COPY, self.OnCopy),
(ID_SELECTALL, self.OnSelectAll),
(ID_FONT, self.OnFont),
(ID_FIND, self.OnFind),
(ID_FIND_PREVIOUS, self.OnFindPrevious),
(ID_FIND_NEXT, self.OnFindNext),
# About Menu
(ID_ABOUT, self.OnAbout)])
self.bindHandlerForId()
def createWindow(self):
self.win = wx.SplitterWindow(self)
self.textWin = ll_textwin.LlTextWin(self.win)
self.filterWin = ll_filterwin.LlFilterWin(self.win)
self.win.SplitHorizontally(self.textWin, self.filterWin)
self.win.SetSashGravity(0.7)
def bindHandlerForId(self):
for tId, handler in self._handlers:
self.Bind(wx.EVT_MENU, handler, id=tId)
def DoOpen(self, evt, fname=u'', lunm=-1):
print ("do open")
dlg = wx.FileDialog(self, "Open", os.getcwd(),
style=wx.OPEN | wx.MULTIPLE | wx.CHANGE_DIR)
print "open dlg"
if dlg.ShowModal() == wx.ID_OK:
print "OK"
self.filename = dlg.GetPath()
print "Next set title"
self.SetTitle(self.title + ' -- ' + self.filename)
print self.filename
dlg.Destroy()
def OnOpen(self, event):
if event.GetId() == ID_OPEN:
self.DoOpen(event)
else:
pass
def OnReload(self, event): pass
def OnLoadFilters(self, event): pass
def OnSaveFilters(self, event): pass
def OnCopy(self, event): pass
def OnSelectAll(self, event): pass
def OnFont(self, event): pass
def OnFind(self, event): pass
def OnFindPrevious(self, event): pass
def OnFindNext(self, event): pass
def OnAbout(self, event): pass
def OnCloseWindow(self, event):
self.Destroy()
# main function
if __name__ == '__main__':
app = wx.PySimpleApp()
frame = LovelyLogUI(parent=None, id=-1)
frame.Show()
app.MainLoop()
|
sdphome/LovelyLog
|
src/lovely_log.py
|
Python
|
gpl-3.0
| 3,182 |
import struct
from socket import error
from .exceptions import ProtocolError
from .exceptions import WebSocketError
from .exceptions import FrameTooLargeException
from .utf8validator import Utf8Validator
MSG_SOCKET_DEAD = "Socket is dead"
MSG_ALREADY_CLOSED = "Connection is already closed"
class WebSocket(object):
"""
Base class for supporting websocket operations.
:ivar environ: The http environment referenced by this connection.
:ivar closed: Whether this connection is closed/closing.
:ivar stream: The underlying file like object that will be read from /
written to by this WebSocket object.
"""
__slots__ = ('utf8validator', 'utf8validate_last', 'environ', 'closed',
'stream', 'raw_write', 'raw_read', 'handler')
OPCODE_CONTINUATION = 0x00
OPCODE_TEXT = 0x01
OPCODE_BINARY = 0x02
OPCODE_CLOSE = 0x08
OPCODE_PING = 0x09
OPCODE_PONG = 0x0a
def __init__(self, environ, stream, handler):
self.environ = environ
self.closed = False
self.stream = stream
self.raw_write = stream.write
self.raw_read = stream.read
self.utf8validator = Utf8Validator()
self.handler = handler
def __del__(self):
try:
self.close()
except:
# close() may fail if __init__ didn't complete
pass
def _decode_bytes(self, bytestring):
"""
Internal method used to convert the utf-8 encoded bytestring into
unicode.
If the conversion fails, the socket will be closed.
"""
if not bytestring:
return u''
try:
return bytestring.decode('utf-8')
except UnicodeDecodeError:
self.close(1007)
raise
def _encode_bytes(self, text):
"""
:returns: The utf-8 byte string equivalent of `text`.
"""
if isinstance(text, str):
return text
if not isinstance(text, unicode):
text = unicode(text or '')
return text.encode('utf-8')
def _is_valid_close_code(self, code):
"""
:returns: Whether the returned close code is a valid hybi return code.
"""
if code < 1000:
return False
if 1004 <= code <= 1006:
return False
if 1012 <= code <= 1016:
return False
if code == 1100:
# not sure about this one but the autobahn fuzzer requires it.
return False
if 2000 <= code <= 2999:
return False
return True
@property
def current_app(self):
if hasattr(self.handler.server.application, 'current_app'):
return self.handler.server.application.current_app
else:
# For backwards compatibility reasons
class MockApp():
def on_close(self, *args):
pass
return MockApp()
@property
def origin(self):
if not self.environ:
return
return self.environ.get('HTTP_ORIGIN')
@property
def protocol(self):
if not self.environ:
return
return self.environ.get('HTTP_SEC_WEBSOCKET_PROTOCOL')
@property
def version(self):
if not self.environ:
return
return self.environ.get('HTTP_SEC_WEBSOCKET_VERSION')
@property
def path(self):
if not self.environ:
return
return self.environ.get('PATH_INFO')
@property
def logger(self):
return self.handler.logger
def handle_close(self, header, payload):
"""
Called when a close frame has been decoded from the stream.
:param header: The decoded `Header`.
:param payload: The bytestring payload associated with the close frame.
"""
if not payload:
self.close(1000, None)
return
if len(payload) < 2:
raise ProtocolError('Invalid close frame: {0} {1}'.format(
header, payload))
code = struct.unpack('!H', str(payload[:2]))[0]
payload = payload[2:]
if payload:
validator = Utf8Validator()
val = validator.validate(payload)
if not val[0]:
raise UnicodeError
if not self._is_valid_close_code(code):
raise ProtocolError('Invalid close code {0}'.format(code))
self.close(code, payload)
def handle_ping(self, header, payload):
self.send_frame(payload, self.OPCODE_PONG)
def handle_pong(self, header, payload):
pass
def read_frame(self):
"""
Block until a full frame has been read from the socket.
This is an internal method as calling this will not cleanup correctly
if an exception is called. Use `receive` instead.
:return: The header and payload as a tuple.
"""
header = Header.decode_header(self.stream)
if header.flags:
raise ProtocolError
if not header.length:
return header, ''
try:
payload = self.raw_read(header.length)
except error:
payload = ''
except Exception:
# TODO log out this exception
payload = ''
if len(payload) != header.length:
raise WebSocketError('Unexpected EOF reading frame payload')
if header.mask:
payload = header.unmask_payload(payload)
return header, payload
def validate_utf8(self, payload):
# Make sure the frames are decodable independently
self.utf8validate_last = self.utf8validator.validate(payload)
if not self.utf8validate_last[0]:
raise UnicodeError("Encountered invalid UTF-8 while processing "
"text message at payload octet index "
"{0:d}".format(self.utf8validate_last[3]))
def read_message(self):
"""
Return the next text or binary message from the socket.
This is an internal method as calling this will not cleanup correctly
if an exception is called. Use `receive` instead.
"""
opcode = None
message = ""
while True:
header, payload = self.read_frame()
f_opcode = header.opcode
if f_opcode in (self.OPCODE_TEXT, self.OPCODE_BINARY):
# a new frame
if opcode:
raise ProtocolError("The opcode in non-fin frame is "
"expected to be zero, got "
"{0!r}".format(f_opcode))
# Start reading a new message, reset the validator
self.utf8validator.reset()
self.utf8validate_last = (True, True, 0, 0)
opcode = f_opcode
elif f_opcode == self.OPCODE_CONTINUATION:
if not opcode:
raise ProtocolError("Unexpected frame with opcode=0")
elif f_opcode == self.OPCODE_PING:
self.handle_ping(header, payload)
continue
elif f_opcode == self.OPCODE_PONG:
self.handle_pong(header, payload)
continue
elif f_opcode == self.OPCODE_CLOSE:
self.handle_close(header, payload)
return
else:
raise ProtocolError("Unexpected opcode={0!r}".format(f_opcode))
if opcode == self.OPCODE_TEXT:
self.validate_utf8(payload)
message += payload
if header.fin:
break
if opcode == self.OPCODE_TEXT:
self.validate_utf8(message)
return message
else:
return bytearray(message)
def receive(self):
"""
Read and return a message from the stream. If `None` is returned, then
the socket is considered closed/errored.
"""
if self.closed:
self.current_app.on_close(MSG_ALREADY_CLOSED)
raise WebSocketError(MSG_ALREADY_CLOSED)
try:
return self.read_message()
except UnicodeError:
self.close(1007)
except ProtocolError:
self.close(1002)
except error:
self.current_app.on_close(MSG_SOCKET_DEAD)
return None
def send_frame(self, message, opcode):
"""
Send a frame over the websocket with message as its payload
"""
if self.closed:
self.current_app.on_close(MSG_ALREADY_CLOSED)
raise WebSocketError(MSG_ALREADY_CLOSED)
if opcode == self.OPCODE_TEXT:
message = self._encode_bytes(message)
elif opcode == self.OPCODE_BINARY:
message = str(message)
header = Header.encode_header(True, opcode, '', len(message), 0)
try:
self.raw_write(header + message)
except error:
raise WebSocketError("Socket is dead")
def send(self, message, binary=None):
"""
Send a frame over the websocket with message as its payload
"""
if binary is None:
binary = not isinstance(message, (str, unicode))
opcode = self.OPCODE_BINARY if binary else self.OPCODE_TEXT
try:
self.send_frame(message, opcode)
except WebSocketError:
self.current_app.on_close(MSG_SOCKET_DEAD)
raise WebSocketError(MSG_SOCKET_DEAD)
def close(self, code=1000, message=''):
"""
Close the websocket and connection, sending the specified code and
message. The underlying socket object is _not_ closed, that is the
responsibility of the initiator.
"""
if self.closed:
self.current_app.on_close(MSG_ALREADY_CLOSED)
try:
message = self._encode_bytes(message)
self.send_frame(
struct.pack('!H%ds' % len(message), code, message),
opcode=self.OPCODE_CLOSE)
except WebSocketError:
# Failed to write the closing frame but it's ok because we're
# closing the socket anyway.
self.logger.debug("Failed to write closing frame -> closing socket")
finally:
self.logger.debug("Closed WebSocket")
self.closed = True
self.stream = None
self.raw_write = None
self.raw_read = None
self.environ = None
self.current_app.on_close("Connection closed")
class Stream(object):
"""
Wraps the handler's socket/rfile attributes and makes it in to a file like
object that can be read from/written to by the lower level websocket api.
"""
__slots__ = ('handler', 'read', 'write')
def __init__(self, handler):
self.handler = handler
self.read = handler.rfile.read
self.write = handler.socket.sendall
class Header(object):
__slots__ = ('fin', 'mask', 'opcode', 'flags', 'length')
FIN_MASK = 0x80
OPCODE_MASK = 0x0f
MASK_MASK = 0x80
LENGTH_MASK = 0x7f
RSV0_MASK = 0x40
RSV1_MASK = 0x20
RSV2_MASK = 0x10
# bitwise mask that will determine the reserved bits for a frame header
HEADER_FLAG_MASK = RSV0_MASK | RSV1_MASK | RSV2_MASK
def __init__(self, fin=0, opcode=0, flags=0, length=0):
self.mask = ''
self.fin = fin
self.opcode = opcode
self.flags = flags
self.length = length
def mask_payload(self, payload):
payload = bytearray(payload)
mask = bytearray(self.mask)
for i in xrange(self.length):
payload[i] ^= mask[i % 4]
return str(payload)
# it's the same operation
unmask_payload = mask_payload
def __repr__(self):
return ("<Header fin={0} opcode={1} length={2} flags={3} at "
"0x{4:x}>").format(self.fin, self.opcode, self.length,
self.flags, id(self))
@classmethod
def decode_header(cls, stream):
"""
Decode a WebSocket header.
:param stream: A file like object that can be 'read' from.
:returns: A `Header` instance.
"""
read = stream.read
data = read(2)
if len(data) != 2:
raise WebSocketError("Unexpected EOF while decoding header")
first_byte, second_byte = struct.unpack('!BB', data)
header = cls(
fin=first_byte & cls.FIN_MASK == cls.FIN_MASK,
opcode=first_byte & cls.OPCODE_MASK,
flags=first_byte & cls.HEADER_FLAG_MASK,
length=second_byte & cls.LENGTH_MASK)
has_mask = second_byte & cls.MASK_MASK == cls.MASK_MASK
if header.opcode > 0x07:
if not header.fin:
raise ProtocolError(
"Received fragmented control frame: {0!r}".format(data))
# Control frames MUST have a payload length of 125 bytes or less
if header.length > 125:
raise FrameTooLargeException(
"Control frame cannot be larger than 125 bytes: "
"{0!r}".format(data))
if header.length == 126:
# 16 bit length
data = read(2)
if len(data) != 2:
raise WebSocketError('Unexpected EOF while decoding header')
header.length = struct.unpack('!H', data)[0]
elif header.length == 127:
# 64 bit length
data = read(8)
if len(data) != 8:
raise WebSocketError('Unexpected EOF while decoding header')
header.length = struct.unpack('!Q', data)[0]
if has_mask:
mask = read(4)
if len(mask) != 4:
raise WebSocketError('Unexpected EOF while decoding header')
header.mask = mask
return header
@classmethod
def encode_header(cls, fin, opcode, mask, length, flags):
"""
Encodes a WebSocket header.
:param fin: Whether this is the final frame for this opcode.
:param opcode: The opcode of the payload, see `OPCODE_*`
:param mask: Whether the payload is masked.
:param length: The length of the frame.
:param flags: The RSV* flags.
:return: A bytestring encoded header.
"""
first_byte = opcode
second_byte = 0
extra = ''
if fin:
first_byte |= cls.FIN_MASK
if flags & cls.RSV0_MASK:
first_byte |= cls.RSV0_MASK
if flags & cls.RSV1_MASK:
first_byte |= cls.RSV1_MASK
if flags & cls.RSV2_MASK:
first_byte |= cls.RSV2_MASK
# now deal with length complexities
if length < 126:
second_byte += length
elif length <= 0xffff:
second_byte += 126
extra = struct.pack('!H', length)
elif length <= 0xffffffffffffffff:
second_byte += 127
extra = struct.pack('!Q', length)
else:
raise FrameTooLargeException
if mask:
second_byte |= cls.MASK_MASK
extra += mask
return chr(first_byte) + chr(second_byte) + extra
|
hatnote/barnsworth
|
barnsworth/geventwebsocket/websocket.py
|
Python
|
gpl-3.0
| 15,384 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""""
/***************************************************************************
least_cost.py
Perform a least cost path with a raster conversion in graph
Need : OsGeo library
-------------------
begin : 2017-07-07
git sha : 2017-07-07
copyright : (C) 2017 by Peillet Sebastien
email : peillet.seb@gmail.com
***************************************************************************/
"""
import os
import sys
from osgeo import gdal
from osgeo import ogr
from osgeo import osr
from osgeo import gdal_array
from osgeo import gdalconst
from collections import defaultdict
from datetime import datetime
import math
#Timer to show processing time
class Timer():
startTimes=dict()
stopTimes=dict()
@staticmethod
def start(key = 0):
Timer.startTimes[key] = datetime.now()
Timer.stopTimes[key] = None
@staticmethod
def stop(key = 0):
Timer.stopTimes[key] = datetime.now()
@staticmethod
def show(key = 0):
if key in Timer.startTimes:
if Timer.startTimes[key] is not None:
if key in Timer.stopTimes:
if Timer.stopTimes[key] is not None:
delta = Timer.stopTimes[key] - Timer.startTimes[key]
print delta
class Graph ():
def __init__(self):
self.nodes=set()
self.edges=defaultdict(list)
self.slope_info=defaultdict(list)
self.length = {}
self.slope = {}
self.weight = {}
def add_nodes(self, id):
self.nodes.add(id)
def add_edge(self, beg, end, w):
self.edges[beg].append(end)
self.weight[(beg,end)] = w
def add_info(self, beg, end, length, slope):
self.slope_info[beg].append(end)
self.length[(beg,end)] = length
self.slope[(beg,end)] = slope
def imp_raster():
print 'ENTER input raster path'
iFile_name=raw_input()
# Open the input raster to retrieve values in an array
data = gdal.Open(iFile_name,1)
proj = data.GetProjection()
scr = data.GetGeoTransform()
resolution = scr[1]
band=data.GetRasterBand(1)
iArray=band.ReadAsArray()
return iArray, scr, proj, resolution
def imp_init_point(gt):
print 'ENTER input init point path'
iFile_name=raw_input()
init_list = []
#Open the init point shapefile to project each feature in pixel coordinates
ds=ogr.Open(iFile_name)
lyr=ds.GetLayer()
src = lyr.GetSpatialRef()
for feat in lyr:
geom = feat.GetGeometryRef()
mx,my=geom.GetX(), geom.GetY()
#Convert from map to pixel coordinates.
px = int(( my - gt[3] + gt[5]/2) / gt[5])
py = int(((mx - gt[0] - gt[1]/2) / gt[1]))
init_list.append((px,py))
#return the list of init point with x,y pixel coordinates
return init_list, src
def output_prep(src):
#Initialize the shapefile output
print 'path output :'
oLineFile_name=raw_input()
oDriver=ogr.GetDriverByName("ESRI Shapefile")
if os.path.exists(oLineFile_name):
oDriver.DeleteDataSource(oLineFile_name)
oDataSource=oDriver.CreateDataSource(oLineFile_name)
#Create a LineString layer
oLayer = oDataSource.CreateLayer("ridge",src,geom_type=ogr.wkbLineString)
#Add two fields to store the col_id and the pic_id
colID_field=ogr.FieldDefn("col_id",ogr.OFTString)
picID_field=ogr.FieldDefn("pic_id",ogr.OFTString)
weight_field=ogr.FieldDefn("weight",ogr.OFTReal)
oLayer.CreateField(colID_field)
oLayer.CreateField(picID_field)
oLayer.CreateField(weight_field)
return oLineFile_name
def out_point_prep(src):
print 'point output :'
oPointFile_name = raw_input()
oDriver=ogr.GetDriverByName("ESRI Shapefile")
if os.path.exists(oPointFile_name):
oDriver.DeleteDataSource(oPointFile_name)
oDataSource=oDriver.CreateDataSource(oPointFile_name)
#Create a LineString layer
oLayer = oDataSource.CreateLayer("point",src,geom_type=ogr.wkbPoint)
ordreID_field=ogr.FieldDefn("ordre_id",ogr.OFTString)
nodeID_field=ogr.FieldDefn("node_id",ogr.OFTString)
weightID_field=ogr.FieldDefn("weight",ogr.OFTReal)
pathID_field=ogr.FieldDefn("path_id",ogr.OFTString)
previous_field = ogr.FieldDefn("previous",ogr.OFTString)
oLayer.CreateField(ordreID_field)
oLayer.CreateField(nodeID_field)
oLayer.CreateField(weightID_field)
oLayer.CreateField(pathID_field)
oLayer.CreateField(previous_field)
return oPointFile_name
def rast_to_graph(rastArray, res, nb_edge, max_slope) :
G= Graph()
[H,W] = rastArray.shape
#Shifts to get every edges from each nodes. For now, based on 48 direction like :
# | | | | 43| | 42| | | |
# ---|---|---|---|---|---|---|---|---|---|---
# | | | | | | | | | |
# ---|---|---|---|---|---|---|---|---|---|---
# | | | 30| 29| | 28| 27| | |
# ---|---|---|---|---|---|---|---|---|---|---
# | | 31| 14| 13| 12| 11| 10| 26| |
# ---|---|---|---|---|---|---|---|---|---|---
# 44| | 32| 15| 3 | 2 | 1 | 9 | 25| | 41
# ---|---|---|---|---|---|---|---|---|---|---
# | | | 16| 4 | 0 | 8 | 24| | |
# ---|---|---|---|---|---|---|---|---|---|---
# 45| | 33| 17| 5 | 6 | 7 | 23| 40| | 48
# ---|---|---|---|---|---|---|---|---|---|---
# | | 34| 18| 19| 20| 21| 22| 39| |
# ---|---|---|---|---|---|---|---|---|---|---
# | | | 35| 36| | 37| 38| | |
# ---|---|---|---|---|---|---|---|---|---|---
# | | | | | | | | | |
# ---|---|---|---|---|---|---|---|---|---|---
# | | | | 46| | 47| | | |
# px py
shift = [( 0, 0), #0
(-1, 1), #1
(-1, 0), #2
(-1, -1), #3
( 0, -1), #4
( 1, -1), #5
( 1, 0), #6
( 1, 1), #7
( 0, 1), #8
(-1, 2), #9
(-2, 2), #10
(-2, 1), #11
(-2, 0), #12
(-2, -1), #13
(-2, -2), #14
(-1, -2), #15
( 0, -2), #16
( 1, -2), #17
( 2, -2), #18
( 2, -1), #19
( 2, 0), #20
( 2, 1), #21
( 2, 2), #22
( 1, 2), #23
( 0, 2), #24
(-1, 3), #25
(-2, 3), #26
(-3, 2), #27
(-3, 1), #28
(-3, -1), #29
(-3, -2), #30
(-2, -3), #31
(-1, -3), #32
( 1, -3), #33
( 2, -3), #34
( 3, -2), #35
( 3, -1), #36
( 3, 1), #37
( 3, 2), #38
( 2, 3), #39
( 1, 3), #40
(-1, 5), #41
(-5, 1), #42
(-5, -1), #43
(-1, -5), #44
( 1, -5), #45
( 5, -1), #46
( 5, 1), #47
( 1, 5) #48
]
slope_calc_coord = [( 0, 0), #0
([ [shift[2] , shift[8]] ]), #1
([ [shift[4] , shift[8]] , [shift[3] , shift[1]] ]), #2
([ [shift[4] , shift[2]] ]), #3
([ [shift[6] , shift[2]] , [shift[5] , shift[3]] ]), #4
([ [shift[4] , shift[6]] ]), #5
([ [shift[8] , shift[4]] , [shift[7] , shift[5]] ]), #6
([ [shift[8] , shift[6]] ]), #7
([ [shift[2] , shift[6]] , [shift[1] , shift[7]] ]), #8
([ [shift[2] , shift[7]] , [shift[11] , shift[24]] , [shift[12], shift[8]] , [shift[1] , shift[23]] ]), #9
([ [shift[11] , shift[9]] , [shift[2] , shift[8]] ]) , #10
([ [shift[3] , shift[8]] , [shift[2] , shift[24]] , [shift[12], shift[9]] , [shift[13] , shift[1]] ]), #11
([ [shift[13] , shift[11]], [shift[3] , shift[1]] , [shift[4] , shift[8]] ]) , #12
([ [shift[4] , shift[1]] , [shift[3] , shift[11]] , [shift[16], shift[2]] , [shift[15] , shift[12]] ]), #13
([ [shift[4] , shift[2]] , [shift[15] , shift[13]] ]), #14
([ [shift[5] , shift[2]] , [shift[4] , shift[12]] , [shift[16], shift[13]] , [shift[17] , shift[3]] ]), #15
([ [shift[17] , shift[15]], [shift[5] , shift[3]] , [shift[6] , shift[2]] ]) , #16
([ [shift[6] , shift[3]] , [shift[20] , shift[4]] , [shift[5] , shift[15]] , [shift[19] , shift[16]] ]), #17
([ [shift[6] , shift[4]] , [shift[19] , shift[17]] ]), #18
([ [shift[7] , shift[4]] , [shift[6] , shift[16]] , [shift[21], shift[5]] , [shift[20] , shift[17]] ]), #19
([ [shift[8] , shift[4]] , [shift[5] , shift[7]] , [shift[21], shift[19]] ]), #20
([ [shift[8] , shift[5]] , [shift[24] , shift[6]] , [shift[7] , shift[19]] , [shift[23] , shift[20]] ]), #21
([ [shift[8] , shift[6]] , [shift[23] , shift[21]] ]), #22
([ [shift[1] , shift[6]] , [shift[8] , shift[20]] , [shift[24], shift[21]] , [shift[9] , shift[7]] ]), #23
([ [shift[2] , shift[6]] , [shift[7] , shift[1]] , [shift[9] , shift[23]] ]), #24
([ [shift[2] , shift[21]] , [shift[12] , shift[7]] , [shift[1], shift[22]] , [shift[11] , shift[23]] ]), #25
([ [shift[2] , shift[22]] , [shift[12] , shift[23]] , [shift[1], shift[39]] , [shift[13] , shift[7]] ]), #26
([ [shift[3] , shift[23]] , [shift[2] , shift[40]] , [shift[13], shift[24]] , [shift[14] , shift[8]] ]), #27
([ [shift[3] , shift[24]] , [shift[15] , shift[8]] , [shift[13], shift[9]] , [shift[14] , shift[1]] ]), #28
([ [shift[4] , shift[9]] , [shift[16] , shift[1]] , [shift[3], shift[10]] , [shift[15] , shift[11]] ]), #29
([ [shift[4] , shift[10]] , [shift[16] , shift[11]] , [shift[3], shift[27]] , [shift[17] , shift[1]] ]), #30
([ [shift[5] , shift[11]] , [shift[4] , shift[28]] , [shift[17], shift[12]] , [shift[18] , shift[2]] ]), #31
([ [shift[5] , shift[12]] , [shift[19] , shift[2]] , [shift[17], shift[13]] , [shift[18] , shift[3]] ]), #32
([ [shift[6] , shift[13]] , [shift[20] , shift[3]] , [shift[5], shift[14]] , [shift[19] , shift[15]] ]), #33
([ [shift[6] , shift[14]] , [shift[20] , shift[15]] , [shift[5], shift[31]] , [shift[21] , shift[3]] ]), #34
([ [shift[6] , shift[32]] , [shift[7] , shift[15]] , [shift[21], shift[16]] , [shift[22] , shift[4]] ]), #35
([ [shift[7] , shift[16]] , [shift[23] , shift[4]] , [shift[21], shift[17]] , [shift[22] , shift[5]] ]), #36
([ [shift[8] , shift[17]] , [shift[24] , shift[5]] , [shift[7], shift[18]] , [shift[23] , shift[19]] ]), #37
([ [shift[8] , shift[18]] , [shift[24] , shift[19]] , [shift[7], shift[35]] , [shift[23] , shift[36]] ]), #38
([ [shift[1] , shift[19]] , [shift[9] , shift[20]] , [shift[8], shift[36]] , [shift[10] , shift[6]] ]), #39
([ [shift[1] , shift[20]] , [shift[9] , shift[21]] , [shift[24], shift[37]] , [shift[11] , shift[6]] ]), #40
([ [shift[12] , shift[37]] , [shift[28] , shift[22]] , [shift[27], shift[39]] ]), #41
([ [shift[14] , shift[25]] , [shift[32] , shift[24]] , [shift[30], shift[26]] ]), #42
([ [shift[16] , shift[25]] , [shift[32] , shift[10]] , [shift[30], shift[26]] ]), #43
([ [shift[18] , shift[29]] , [shift[36] , shift[12]] , [shift[34], shift[30]] ]), #44
([ [shift[20] , shift[29]] , [shift[36] , shift[14]] , [shift[35], shift[31]] ]), #45
([ [shift[22] , shift[33]] , [shift[40] , shift[16]] , [shift[38], shift[34]] ]), #46
([ [shift[24] , shift[33]] , [shift[40] , shift[18]] , [shift[39], shift[35]] ]), #47
([ [shift[10] , shift[37]] , [shift[28] , shift[20]] , [shift[26], shift[38]] ]) #48
]
nb_edge+=1
#Loop over each pixel to convert it into nodes
for i in range(0,H) :
for j in range(0,W) :
#node id based on x and y pixel coordinates
nodeName = "x"+str(i)+"y"+str(j)
G.add_nodes(nodeName)
#Loop over each pixel again to create slope and length dictionnary
for i in range(0,H) :
for j in range(0,W) :
nodeBeg = "x"+str(i)+"y"+str(j)
nodeBegValue= rastArray[i,j]
for index in range(1,nb_edge) :
x,y=shift[index]
nodeEnd="x"+str(i+x)+"y"+str(j+y)
try :
nodeEndValue= rastArray[i+x,j+y]
#Calculate cost on length + addcost based on slope percent
if index in [2,4,6,8] :
length = res
elif index in [1,3,5,7] :
length = res*math.sqrt(2)
elif index in [9,11,13,15,17,19,21,23]:
length = res*math.sqrt(res)
elif index in [10,14,18,22] :
length = 2*res*math.sqrt(2)
elif index in [12,16,20,24] :
length = 2*res
elif index in [25,28,29,32,33,36,37,40] :
length = res*math.sqrt(10)
elif index in [26,27,30,31,34,35,38,39] :
length = res*math.sqrt(13)
else :
length = res*math.sqrt(26)
slope = math.fabs(nodeEndValue-nodeBegValue)/length*100
# #max slope accepted in percent
# max_slope_wanted= 12
# if slope <= max_slope_wanted :
G.add_info(nodeBeg,nodeEnd,length,slope)
except IndexError :
continue
for i in range(0,H) :
for j in range(0,W) :
nodeBeg = "x"+str(i)+"y"+str(j)
for index in range(1,nb_edge) :
x,y=shift[index]
nodeEnd="x"+str(i+x)+"y"+str(j+y)
if (i+x) > 0 and (j+y) > 0 and (i+x) < H and (j+y) < W :
try :
length = G.length[(nodeBeg, nodeEnd)]
slope = G.slope[(nodeBeg, nodeEnd)]
if slope <= max_slope :
coords_list = slope_calc_coord[index]
c_slope_list=[]
c_slope = None
count = 0
for coords in coords_list :
lx,ly = coords[0]
nodeLeft="x"+str(i+lx)+"y"+str(j+ly)
rx,ry = coords[1]
nodeRight="x"+str(i+rx)+"y"+str(j+ry)
if (i+lx) > 0 and (j+ly) > 0 and (i+rx) > 0 and (j+ry) > 0 and\
(i+lx) < H and (j+ly) < W and (i+rx) < H and (j+ry) < W :
c_slope_list.append(G.slope[nodeLeft,nodeRight])
count+=1
if len(c_slope_list) == count and count != 0 :
c_slope = sum(c_slope_list) / len(c_slope_list)
pmax = 25
pmin = 60
larg = 4
if c_slope < pmax :
assise = larg/2
else :
assise = min(round((larg / 2*(1 + ((c_slope - pmax)/(pmin - pmax))**2)),2),larg)
talus = assise**2 *larg * (c_slope/100) / 2 /(larg - (c_slope/100))
addcost = talus
cost = length * addcost + length * 1
G.add_edge(nodeBeg, nodeEnd, cost)
except IndexError :
continue
return G
def dijkstra(graph, init, end_list, scr, method, threshold, out_point, nb_path):
#change the end point coordinates to graph id
end_name=[]
for end_point in end_list :
x,y=end_point
end_id = "x"+str(x)+"y"+str(y)
if end_id != init :
end_name.append(end_id)
#dict to get visited nodes and path
visited = {init: 0}
path = defaultdict(list)
nodes = set(graph.nodes)
#dijkstra algo
min_node = None
while nodes:
if min_node not in end_name:
min_node = None
for node in nodes:
if node in visited:
if node in end_name :
finish = node
if min_node is None:
min_node = node
elif visited[node] < visited[min_node]:
min_node = node
if min_node != None :
current_weight = visited[min_node]
if min_node in path :
pid,w = path[min_node][-1]
else :
pid = ''
if out_point != None :
createPoint(out_point, min_node, scr, current_weight, nb_path, pid)
nodes.remove(min_node)
for edge in graph.edges[min_node]:
if method == 'angle' :
if min_node in path :
pid,w = path[min_node][-1]
x1,y1 = id_to_coord(pid)
x2,y2 = id_to_coord(min_node)
x3,y3 = id_to_coord(edge)
az1 = math.degrees(math.atan2(x2 - x1, y2 - y1))
az2 = math.degrees(math.atan2(x3 - x2, y3 - y2))
if az1 < 0 and az2 > 0 :
angle = math.fabs(az1)+az2
elif az1 > 0 and az2 < 0 :
angle = math.fabs(az2)+az1
else :
angle = math.fabs(az1-az2)
if angle < -180 :
angle = angle + 360
if angle > 180 :
angle = angle - 360
if math.fabs(angle) <= threshold :
weight = current_weight + graph.weight[(min_node, edge)]
if edge not in visited or weight < visited[edge]:
visited[edge] = weight
path[edge].append((min_node,weight))
else :
weight = current_weight + graph.weight[(min_node, edge)]
if edge not in visited or weight < visited[edge]:
visited[edge] = weight
path[edge].append((min_node,weight))
if method == 'radius' :
if min_node in path :
pid,w = path[min_node][-1]
x1,y1 = id_to_coord(pid)
x2,y2 = id_to_coord(min_node)
x3,y3 = id_to_coord(edge)
if min(x1,x3) <= x2 <= max(x1,x3) and min(y1,y3) <= y2 <= max(y1,y3):
mag_v1 = math.sqrt((x1-x2)**2+(y1-y2)**2)
mag_v2 = math.sqrt((x3-x2)**2+(y3-y2)**2)
if mag_v1 < mag_v2 :
x_v2 , y_v2 = (x3 - x2, y3 - y2)
x3,y3 = x2+x_v2/mag_v2*mag_v1 ,y2+y_v2/mag_v2*mag_v1
elif mag_v2 < mag_v1 :
x_v2 , y_v2 = (x1 - x2, y1 - y2)
x1,y1 = x2+x_v2/mag_v1*mag_v2 ,y2+y_v2/mag_v1*mag_v2
x_v1 , y_v1 = (x2 - x1, y2 - y1)
x_v1_ort , y_v1_ort = y_v1 , -x_v1
x_v2 , y_v2 = (x3 - x2, y3 - y2)
x_v2_ort , y_v2_ort = y_v2 , -x_v2
c_v1_ort = y_v1_ort*x1+(-x_v1_ort)*y1
c_v1_ort = -c_v1_ort
c_v2_ort = y_v2_ort*x3+(-x_v2_ort)*y3
c_v2_ort = -c_v2_ort
e = [-y_v1_ort,x_v1_ort,c_v1_ort]
f = [-y_v2_ort,x_v2_ort,c_v2_ort]
x4 , y4, colineaire = equationResolve(e,f)
if (x4 != None and y4 != None) :
dist1 = math.sqrt((x1-x4)**2+(y1-y4)**2)*5
dist2 = math.sqrt((x3-x4)**2+(y3-y4)**2)*5
if dist1 >= threshold :
weight = current_weight + graph.weight[(min_node, edge)]
if edge not in visited or weight < visited[edge]:
visited[edge] = weight
path[edge].append((min_node,weight))
elif colineaire == True :
weight = current_weight + graph.weight[(min_node, edge)]
if edge not in visited or weight < visited[edge]:
visited[edge] = weight
path[edge].append((min_node,weight))
else :
weight = current_weight + graph.weight[(min_node, edge)]
if edge not in visited or weight < visited[edge]:
visited[edge] = weight
path[edge].append((min_node,weight))
else :
print 'no solution'
finish = None
break
else :
break
return path, finish, visited
def equationResolve(e1,e2):
determinant=e1[0]*e2[1]-e1[1]*e2[0]
x , y = None,None
colineaire = False
if determinant != 0:
x=(e1[2]*e2[1]-e1[1]*e2[2])/determinant
y=(e1[0]*e2[2]-e1[2]*e2[0])/determinant
else :
colineaire = True
return x, y, colineaire
def id_to_coord(id):
id=id[1:]
px,py=id.split('y')
px,py=int(px),int(py)
return px,py
def ids_to_coord(lcp,gt):
#Reproj pixel coordinates to map coordinates
coord_list = []
for id in lcp :
id=id[1:]
px,py=id.split('y')
px,py=int(px),int(py)
#Convert from pixel to map coordinates.
mx = py * gt[1] + gt[0] + gt[1]/2
my = px * gt[5] + gt[3] + gt[5]/2
coord_list.append((mx,my))
#return the list of end point with x,y map coordinates
return coord_list
def create_ridge(oFile,lcp, col, pic, weight) :
driver= ogr.GetDriverByName("ESRI Shapefile")
#Open the output shapefile
iDataSource = driver.Open(oFile,1)
iLayer = iDataSource.GetLayer()
featDefn = iLayer.GetLayerDefn()
#Initiate feature
feat = ogr.Feature(featDefn)
#Initiate feature geometry
line = ogr.Geometry(ogr.wkbLineString)
for coord in lcp :
x,y = coord
#Add new vertice to the linestring
line.AddPoint(x,y)
feat.SetGeometry(line)
#Update the data field
feat.SetField("col_id",col)
feat.SetField("pic_id",pic)
feat.SetField("weight",weight)
iLayer.CreateFeature(feat)
feature = None
iDataSource = None
def createPoint(oFile, node, gt, weight, nb_path, previous) :
driver= ogr.GetDriverByName("ESRI Shapefile")
#Open the output shapefile
iDataSource = driver.Open(oFile,1)
iLayer = iDataSource.GetLayer()
featDefn = iLayer.GetLayerDefn()
count = iLayer.GetFeatureCount()
#Initiate feature
feat = ogr.Feature(featDefn)
px,py=id_to_coord(node)
#Initiate feature geometry
point = ogr.Geometry(ogr.wkbPoint)
mx = py * gt[1] + gt[0] + gt[1]/2
my = px * gt[5] + gt[3] + gt[5]/2
point.AddPoint(mx,my)
feat.SetGeometry(point)
feat.SetField('ordre_id',count+1)
feat.SetField('node_id',node)
feat.SetField('weight',weight)
feat.SetField('path_id', nb_path)
feat.SetField('previous', previous)
iLayer.CreateFeature(feat)
feature = None
iDataSource = None
def main() :
#Main function
print 'Import raster...'
in_array, scr, proj, res = imp_raster()
print 'Import raster done'
print 'Import vector ...'
beg_list, scr_shp = imp_init_point(scr)
print '%s feature(s)' % len(beg_list)
print 'Import vector done'
print 'Name vector output...'
print 'path :'
out_line=output_prep(scr_shp)
print 'Get points process history ? (y/n)'
point_save = raw_input()
if point_save == 'y' :
out_point = out_point_prep(scr_shp)
else :
out_point = None
print 'Edges model : (8/24/40/48)'
nb_edge = int(input())
if nb_edge not in [8,24,40,48] :
print "Wrong edges model, %s edges model does'nt exist" % str(nb_edge)
print 'Method a/r (angle/radius) :'
method = raw_input()
if method == 'a' or method == 'angle' :
method = 'angle'
print "Angle max (%) :"
threshold = int(input())
elif method == 'r' or method == 'radius' :
method = 'radius'
print "Radius min (m) :"
threshold = int(input())
else :
print "Wrong method"
exit()
print 'Along slope limit : (percent, ex : 10 for 10 %)'
max_slope= int(input())
time=Timer()
time.start()
print 'Convert rast to graph...'
G = rast_to_graph(in_array, res, nb_edge, max_slope)
print 'Convert rast to graph done'
print '%s nodes in the graph' % len(G.nodes)
sum_nodes=0
for node in G.nodes :
sum_nodes += len(G.edges[node])
print '%s edges in the graph' % sum_nodes
#Begin to search least_cost path for each beg point
i=0
for beg_point in beg_list :
x,y = beg_point
beg_id = "x"+str(x)+"y"+str(y)
print 'Searching the least cost path for %s' % beg_id
path, end_id, visited = dijkstra(G,beg_id,beg_list, scr, method, threshold, out_point,i)
i+=1
print 'Searching the least cost path done'
if end_id != None :
act=end_id
leastCostPath=[end_id]
print 'Create the least cost path as OGR LineString...'
while act!=beg_id :
id,w=path[act][-1]
act=id
leastCostPath.append(id)
filename="lcp"+str(i)+".txt"
file = open(filename,"w")
file.write(str(leastCostPath))
file.close()
filename="path"+str(i)+".txt"
file = open(filename,"w")
file.write(str(path))
file.close()
coord_list = ids_to_coord(leastCostPath,scr)
id,w=path[end_id][-1]
create_ridge(out_line,coord_list,beg_id,end_id,w)
print 'Create the least cost path as OGR LineString done'
time.stop()
print 'processing Time :'
time.show()
if __name__ == '__main__':
sys.exit(main())
|
SebastienPeillet/rast_to_graph
|
least_terr_cost.py
|
Python
|
gpl-3.0
| 30,577 |
# -*- coding: utf-8 -*-
############################ Copyrights and license ############################
# #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2014 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2016 Peter Buckley <dx-pbuckley@users.noreply.github.com> #
# Copyright 2018 sfdye <tsfdye@gmail.com> #
# #
# This file is part of PyGithub. #
# http://pygithub.readthedocs.io/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
################################################################################
import Framework
class ExposeAllAttributes(Framework.TestCase):
def testAllClasses(self):
authenticatedUser = self.g.get_user()
namedUser = self.g.get_user("nvie")
repository = authenticatedUser.get_repo("PyGithub")
organization = self.g.get_organization("BeaverSoftware")
plan = authenticatedUser.plan
branch = repository.get_branch("master")
commit = repository.get_commit("1292bf0e22c796e91cc3d6e24b544aece8c21f2a")
commitStats = commit.stats
commitStatus = commit.get_statuses()[0]
milestone = repository.get_milestone(17)
gist = self.g.get_gist("149016")
gistComment = gist.get_comment(4565)
gistFile = gist.files[".gitignore"]
gistHistoryState = gist.history[0]
gitCommit = repository.get_git_commit("be37b8a7f3a68631c32672dcd84d9eba27438ee6")
gitAuthor = gitCommit.author
gitTree = repository.get_git_tree("6f7c2d8c66d78863f7b91792deaead619799a1ce")
gitTreeElement = gitTree.tree[0]
gitBlob = repository.get_git_blob("681fb61f1761743a02f5c790f1c762cbfe8cfad1")
gitRef = repository.get_git_ref("tags/v1.17.0")
gitObject = gitRef.object
issue = repository.get_issue(188)
issueComment = issue.get_comment(22686536)
issueEvent = issue.get_events()[0]
issuePullRequest = issue.pull_request
gitignoreTemplate = self.g.get_gitignore_template("Python")
team = organization.get_team(141487)
label = repository.get_label("Bug")
pullRequest = repository.get_pull(31)
pullRequestComment = pullRequest.get_review_comment(1580134)
pullRequestPart = pullRequest.base
file = pullRequest.get_files()[0]
commitComment = repository.get_comment(3630301)
status = self.g.get_api_status()
statusMessage = self.g.get_last_api_status_message()
rateLimit = self.g.get_rate_limit()
rate = rateLimit.rate
hook = repository.get_hooks()[0]
hookResponse = hook.last_response
hookDescription = self.g.get_hooks()[0]
comparison = repository.compare("master", "develop")
contentFile = repository.get_file_contents("README.rst")
permissions = repository.permissions
event = repository.get_events()[0]
notification = authenticatedUser.get_notification("8406712")
notificationSubject = notification.subject
missingAttributes = self.gatherMissingAttributes([
authenticatedUser,
# authorization, # Security issue if put as-is in ReplayData
# authorizationApplication, # Security issue if put as-is in ReplayData
branch,
commit,
commitComment,
commitStats,
commitStatus,
comparison,
contentFile,
# download, # Deprecated: https://github.com/blog/1302-goodbye-uploads
event,
file,
gist,
gistComment,
gistFile,
gistHistoryState,
gitAuthor,
gitBlob,
gitCommit,
gitignoreTemplate,
gitObject,
gitRef,
# gitTag,
gitTree,
gitTreeElement,
hook,
hookDescription,
hookResponse,
issue,
issueComment,
issueEvent,
issuePullRequest,
label,
milestone,
namedUser,
notification,
notificationSubject,
organization,
permissions,
plan,
pullRequest,
pullRequestComment,
# pullRequestMergeStatus, # Only obtained when merging a pull request through the API
pullRequestPart,
rate,
rateLimit,
repository,
# repositoryKey, # Security issue if put as-is in ReplayData
status,
statusMessage,
# tag,
team,
# userKey, # Security issue if put as-is in ReplayData
])
for className, attributesMissingInClass in sorted(missingAttributes.iteritems()):
for attrName, value in sorted(attributesMissingInClass.iteritems()):
print className, attrName, "->", repr(value)
self.assertEqual(sum(len(attrs) for attrs in missingAttributes.values()), 0)
def findMissingAttributes(self, obj):
if hasattr(obj, "update"):
obj.update()
className = obj.__class__.__name__
missingAttributes = {}
for attribute in obj.raw_data:
if attribute != "_links":
if not hasattr(obj, attribute):
missingAttributes[attribute] = obj.raw_data[attribute]
return (className, missingAttributes)
def gatherMissingAttributes(self, objs):
allMissingAttributes = dict()
for obj in objs:
className, attributesMissingInClass = self.findMissingAttributes(obj)
if len(attributesMissingInClass) > 0:
if className not in allMissingAttributes:
allMissingAttributes[className] = dict()
allMissingAttributes[className].update(attributesMissingInClass)
return allMissingAttributes
|
fernandog/Medusa
|
ext/github/tests/ExposeAllAttributes.py
|
Python
|
gpl-3.0
| 7,340 |
from django.contrib.sitemaps import Sitemap
from .models import Graffiti
class GraffitiSitemap(Sitemap):
changefreq = "daily"
def items(self):
return Graffiti.objects.filter(active=True, checked=True)
def lastmod(self, obj):
return obj.date_updated
|
stleon/graffiti_map
|
graffities/sitemaps.py
|
Python
|
gpl-3.0
| 281 |
from ambition_validators import AmphotericinMissedDosesFormValidator
from ..models import AmphotericinMissedDoses
from .form_mixins import InlineSubjectModelFormMixin
class AmphotericinMissedDosesForm(InlineSubjectModelFormMixin):
form_validator_cls = AmphotericinMissedDosesFormValidator
class Meta:
model = AmphotericinMissedDoses
fields = '__all__'
|
botswana-harvard/ambition-subject
|
ambition_subject/forms/amphotericin_missed_doses_form.py
|
Python
|
gpl-3.0
| 381 |
'''
Created on Aug 3, 2013
Peer manager handles information of peers who have joined/left the swarm.
This file is part of CryptikChaos.
CryptikChaos is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CryptikChaos is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CryptikChaos. If not, see <http://www.gnu.org/licenses/>.
@author: vaizguy
'''
__author__ = "Arun Vaidya"
__version__ = "0.6.1"
from kivy.logger import Logger
from cryptikchaos.core.env.configuration import constants
from cryptikchaos.libs.utilities import md5hash
if constants.NETWORKX_AVAILABLE:
import networkx as nx
import matplotlib.pyplot as plt
from cryptikchaos.storage.manager import StoreManager
from cryptikchaos.core.comm.swarm.peer import Peer
class SwarmManager(StoreManager):
"Manage peers in the swarm."
def __init__(self, peerid, peerkey):
# Authorized keys
self._valid_keys = (
"PEER_ID", "PEER_KEY", "PEER_IP", "PEER_PORT",
"PEER_STATUS", "PEER_COLOR"
)
# Create store
super(SwarmManager, self).__init__(
"{}_SwarmStore".format(peerid),
self._valid_keys,
)
# Client Attributes
self.my_peerid = peerid
self.my_key = peerkey
self.my_msg_rcc = peerid
# Hold peer commections
self.peer_connections = {}
# Hold peer pending streams
self.peer_stream_buffer = {}
# Create graph
if constants.NETWORKX_AVAILABLE:
self.swarm_graph = nx.Graph()
def __del__(self):
peer_ids = self.list_peer_ids()
# Exit if no connections to clear
if peer_ids:
# Remove peer connections
for pid in peer_ids:
# Delete peer
self.delete_peer(pid)
# Close store
if super(SwarmManager, self):
super(SwarmManager, self).__del__()
def add_peer(self, pid, key, host, port):
"Add peer to database."
# localhost - 127.0.0.1 mapping.
if host == "localhost":
host = constants.LOCAL_TEST_HOST
Logger.debug("SWARM: Adding Peer {} , {}@{}".format(pid, host, port))
if pid in self.keys():
Logger.warn("SWARM: Peer {} already exists. No changes made.".format(pid))
return None
else:
Logger.debug("SWARM: Adding Peer {} , {}@{}".format(pid, host, port))
# Peer dictionary structure defined here
self.add_store(
pid, dictionary=Peer(pid, key, host, port).dict
)
# init stream buffer
self.peer_stream_buffer[pid] = []
# Add peer to swarm graph
if constants.NETWORKX_AVAILABLE:
self.add_swarm_graph_node(pid)
def delete_peer(self, pid):
"Remove unauth peer."
Logger.warn("SWARM: Peer [{}] left swarm.".format(pid))
# remove peer connection
del self.peer_connections[pid]
return self.delete_store(pid)
def get_peer(self, pid):
"Get peer from db."
return self.get_store(pid)
def add_peer_connection(self, pid, conn):
"Add a peer connection."
try:
self.peer_connections[pid] = conn
except KeyError:
Logger.error("SWARM: Invalid Peer ID.")
return False
else:
return True
def connect_to_peer(self, pid):
"Get stored peer connection from pid."
try:
stat = self.get_peer_connection_status(pid)
except KeyError:
Logger.error("SWARM: Invalid Peer ID.")
return None
else:
if stat:
return self.get_peer_connection(pid)
else:
return None
def update_peer_connection_status(self, pid, status):
"Update peer's connection status."
if status in (True, False):
# Set new connection status
self.set_store_item(pid, "PEER_STATUS", status)
else:
raise Exception(
"Invalid Peer Connection Status, must be True or False."
)
def list_peer_ids(self):
"Returns a list of all peer IDs present in swarm."
try:
return self.keys()
except AttributeError:
return []
def list_peers(self):
"Returns a list of all the peers."
peerlist = []
for k in self.keys():
# Get peer attributes
p_info = self.get_store(k)
# Concatenate if key is bigger than 4 chars
if len(p_info["PEER_KEY"]) >= 4:
peer_key = p_info["PEER_KEY"][0:3] + "XXX"
else:
peer_key = p_info["PEER_KEY"]
# Append as tuples (peer id, peer host, peer port, peer status)
peerlist.append(
(p_info["PEER_ID"],
peer_key,
p_info["PEER_IP"],
p_info["PEER_PORT"],
p_info["PEER_STATUS"]))
return peerlist
def list_peer_id_colors(self):
"Returns a list of all the peers."
rcclist = [self.my_msg_rcc]
for sid in self.keys():
# Get peer color attributes
rcclist.append(
self.get_store_item(sid, "PEER_COLOR")
)
return rcclist
def peer_table(self):
"Display all peers"
def pkey_action(val):
val = md5hash(val)
return val
table = self.storage_table(action_dict={"PEER_KEY":pkey_action})
if table:
return """
\nPeers:
{}
""".format(table)
else:
return "No peers in swarm."
def peer_host(self, pid):
"Returns a peer's IPv4 address."
return self.get_store_item(pid, "PEER_IP")
# Need to simplify mapping TODO
def get_peerid_from_ip(self, peer_ip, peer_port=constants.PEER_PORT):
"Get a peerid from stored IP addresses. Assumes 1to1 relation."
for (pid, _, ip, port, _) in self.list_peers():
if ip == peer_ip and port == peer_port:
return pid
return None # Add relevant catch
def get_peer_connection_status(self, pid):
"Get the peer connection status."
return self.get_store_item(pid, "PEER_STATUS")
def get_peer_connection(self, pid):
"Get the peer connection."
return self.peer_connections[pid]
def get_peer_key(self, pid):
"Get the peers key."
return self.get_store_item(pid, "PEER_KEY")
def get_peerid_color(self, pid):
"Return peer's color code."
pid_rcc = self.get_store_item(pid, "PEER_COLOR")
if pid_rcc:
return pid_rcc
else:
return self.my_msg_rcc
def is_peer(self, pid):
"Check if peer got added successfully."
return self.in_store(pid)
def add_stream_buffer(self, pid, stream_id):
"Add pending streams to peer stream buffer"
self.peer_stream_buffer[pid].append(stream_id)
def get_stream_buffer(self, pid):
"Return stream buffer"
return self.peer_stream_buffer[pid]
# Swarm Graphing functions
if constants.NETWORKX_AVAILABLE:
def add_swarm_graph_node(self, pid):
"Add peer node to swarm graph."
self.swarm_graph.add_edge(self.my_peerid, pid)
def plot_swarm_graph(self):
"Visualize the swarm"
# Check if no peers in swarm
if not self.list_peers():
return False
# Plot circular graph
nx.draw_circular(self.swarm_graph)
if not constants.PLATFORM_ANDROID:
# Show graph plot
plt.show()
else:
plt.savefig("graph.pdf")
return True
if __name__ == '__main__':
sm = SwarmManager(1000, "key")
sm.add_peer(123, "k1", 'localhost', 8000)
sm.add_peer(234, "k2", 'localhost', 8001)
sm.add_peer(345, "k3", 'localhost', 8002)
sm.add_peer(456, "k4", 'localhost', 8003)
print sm.list_peers()
|
vaizguy/cryptikchaos
|
src/cryptikchaos/core/comm/swarm/manager.py
|
Python
|
gpl-3.0
| 8,638 |
# coding=utf-8
"""Tests for medusa/search/core.py."""
from __future__ import unicode_literals
import functools
import logging
from medusa.common import HD1080p, Quality
from medusa.search.core import filter_results, pick_result
from mock.mock import Mock
import pytest
from six import iteritems
@pytest.mark.parametrize('p', [
{ # p0 - No results
'results': []
},
{ # p1
'config': {
'IGNORE_WORDS': ['dubbed', 'whatever'],
'REQUIRE_WORDS': [],
},
'series': {
'quality': HD1080p,
'rls_ignore_words': 'BadRobot', # Comma separated
'rls_require_words': 'h264,x265', # Comma separated
},
'provider': {
'minseed': 5,
'minleech': 2,
},
'results': [
{
'expected': True,
'name': 'Show.Name.S03E04.1080p.HDTV.h264-RlsGrp',
'quality': Quality.FULLHDTV,
'seeders': 100,
'leechers': 300,
},
{
'expected': True,
'name': 'Show.Name.S03E04.1080p.BluRay.x265-RlsGrp',
'quality': Quality.FULLHDBLURAY,
'seeders': 5,
'leechers': 5,
},
{
'expected': False, # Global ignored word: dubbed
'name': 'Show.Name.S03E04.DUBBED.1080p.HDTV.h264-RlsGrp',
'quality': Quality.FULLHDTV,
'seeders': 10,
'leechers': 20,
},
{
'expected': False, # Global ignored word: whatever + Series required word: x265
'name': 'Show.Name.S03E04.whatever.1080p.HDTV.x265-RlsGrp',
'quality': Quality.FULLHDTV,
'seeders': 10,
'leechers': 20,
},
{
'expected': False, # result seeders < provider minseed
'name': 'Show.Name.S03E04.1080p.WEB-DL.h264-RlsGrp',
'quality': Quality.FULLHDWEBDL,
'seeders': 2,
'leechers': 7,
},
{
'expected': False, # Series ignored word: BadRobot
'name': 'Show.Name.S03E04.1080p.BluRay.h264-BadRobot',
'quality': Quality.FULLHDBLURAY,
'seeders': 20,
'leechers': 17,
},
{
'expected': False, # Series required words
'name': 'Show.Name.S03E04.1080p.BluRay.h265-RlsGrp',
'quality': Quality.FULLHDBLURAY,
'seeders': 5,
'leechers': 5,
},
{
'expected': False, # Unwanted quality
'name': 'Show.Name.S03E04.720p.HDTV.h264-RlsGrp',
'quality': Quality.HDTV,
'seeders': 10,
'leechers': 5,
}
]
},
])
def test_filter_results(p, app_config, create_search_result, search_provider, create_tvshow, create_tvepisode, caplog):
caplog.set_level(logging.DEBUG, logger='medusa')
# Given
config_attrs = p.get('config', {})
for attr, value in iteritems(config_attrs):
app_config(attr, value)
series_attrs = p.get('series', {})
series = create_tvshow(**series_attrs)
series.want_episode = Mock(return_value=True)
episode = create_tvepisode(series, 3, 4)
provider_attrs = p.get('provider', {})
results = []
expected = []
for item in p['results']:
is_expected = item.pop('expected', False)
result = create_search_result(
provider=search_provider(**provider_attrs),
series=series,
episode=episode,
**item
)
results.append(result)
if is_expected:
expected.append(result)
# When
actual = filter_results(results)
# Then
assert expected == actual
@pytest.mark.parametrize('p', [
{ # p0 - No results
'results': [],
'expected': None
},
{ # p1 - same quality - proper tags / preferred words / undesired words
'config': {
'PREFERRED_WORDS': ['x265', 'h265'],
'UNDESIRED_WORDS': ['internal', 'subbed'],
},
'series': {
'quality': HD1080p,
},
'expected': 3, # Index of the expected result
'results': [
{ # 0
'name': 'Show.Name.S03E04.1080p.HDTV.h264-RlsGrp',
'quality': Quality.FULLHDTV
},
{ # 1 - Proper tag: REPACK
'name': 'Show.Name.S03E04.REPACK.1080p.HDTV.h264-RlsGrp',
'quality': Quality.FULLHDTV,
'proper_tags': ['REPACK']
},
{ # 2 - Global undesired word: internal
'name': 'Show.Name.S03E04.iNTERNAL.1080p.HDTV.h264-RlsGrp',
'quality': Quality.FULLHDTV
},
{ # 3 - Global preferred word: x265
'name': 'Show.Name.S03E04.1080p.HDTV.x265-RlsGrp',
'quality': Quality.FULLHDTV
},
]
},
{ # p2 - quality upgrades + proper tags + words
'config': {
'PREFERRED_WORDS': ['x265', 'h265'],
'UNDESIRED_WORDS': ['internal', 'subbed'],
},
'series': {
'quality': HD1080p,
},
'expected': 4, # Index of the expected result
'results': [
{ # 0 - Preferred: x265 + Proper tag: PROPER
'name': 'Show.Name.S03E04.PROPER.1080p.WEB-DL.x265-RlsGrp',
'quality': Quality.FULLHDWEBDL,
'proper_tags': ['PROPER']
},
{ # 1 - Preferred: x265 + Better quality
'name': 'Show.Name.S03E04.1080p.BluRay.x265-RlsGrp',
'quality': Quality.FULLHDBLURAY
},
{ # 2 - Better quality
'name': 'Show.Name.S03E04.1080p.BluRay.h264-RlsGrp',
'quality': Quality.FULLHDBLURAY
},
{ # 3 - Preferred: h265 + Better quality + Undesired: subbed
'name': 'Show.Name.S03E04.1080p.BluRay.h265.SUBBED-RlsGrp',
'quality': Quality.FULLHDBLURAY
},
{ # 4 - Preferred: h265 + Better quality + Proper tag: REPACK
'name': 'Show.Name.S03E04.REPACK.1080p.BluRay.h265-RlsGrp',
'quality': Quality.FULLHDBLURAY,
'proper_tags': ['REPACK']
},
{ # 5 - Preferred: h265 + Undesired: subbed
'name': 'Show.Name.S03E04.1080p.WEB-DL.h265.SUBBED-RlsGrp',
'quality': Quality.FULLHDWEBDL
},
]
},
{ # p3 - everything undesired
'config': {
'PREFERRED_WORDS': [],
'UNDESIRED_WORDS': ['internal', 'subbed'],
},
'series': {
'quality': HD1080p,
},
'expected': 2, # Index of the expected result
'results': [
{ # 0
'name': 'Show.Name.S03E04.iNTERNAL.1080p.HDTV.x264-RlsGrp',
'quality': Quality.FULLHDTV
},
{ # 1
'name': 'Show.Name.S03E04.1080p.HDTV.x264.SUBBED-RlsGrp',
'quality': Quality.FULLHDTV
},
{ # 2
'name': 'Show.Name.S03E04.iNTERNAL.1080p.WEB-DL.x264-RlsGrp',
'quality': Quality.FULLHDWEBDL
},
]
},
{ # p4 - preferred lower quality
'config': {
'PREFERRED_WORDS': [],
'UNDESIRED_WORDS': [],
},
'series': {
'quality': Quality.combine_qualities(
[Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY],
[Quality.HDTV]
),
},
'expected': 1, # Index of the expected result
'results': [
{ # 0
'name': 'Show.Name.S03E04.1080p.WEB-DL.x264-RlsGrp',
'quality': Quality.FULLHDWEBDL
},
{ # 1
'name': 'Show.Name.S03E04.720p.HDTV.x264-RlsGrp',
'quality': Quality.HDTV
},
{ # 2
'name': 'Show.Name.S03E04.1080p.HDTV.x264-RlsGrp',
'quality': Quality.FULLHDTV
},
{ # 3
'name': 'Show.Name.S03E04.1080p.BluRay.x264-RlsGrp',
'quality': Quality.FULLHDBLURAY
},
]
},
{ # p5 - higher quality, lower quality and proper lower quality
'config': {
'PREFERRED_WORDS': [],
'UNDESIRED_WORDS': [],
},
'series': {
'quality': HD1080p,
},
'expected': 1, # Index of the expected result
'results': [
{ # 0
'name': 'Show.Name.S03E04.720p.HDTV.x264-RlsGrp',
'quality': Quality.HDTV
},
{ # 1
'name': 'Show.Name.S03E04.1080p.HDTV.x264-RlsGrp',
'quality': Quality.FULLHDTV
},
{ # 2
'name': 'Show.Name.S03E04.PROPER.720p.HDTV.x264-RlsGrp',
'quality': Quality.HDTV,
'proper_tags': ['PROPER']
},
]
},
{ # p6 - higher quality, preferred lower quality and proper lower quality
'config': {
'PREFERRED_WORDS': [],
'UNDESIRED_WORDS': [],
},
'series': {
'quality': Quality.combine_qualities([Quality.FULLHDTV], [Quality.HDTV]),
},
'expected': 2, # Index of the expected result
'results': [
{ # 0
'name': 'Show.Name.S03E04.PROPER.1080p.HDTV.x264-RlsGrp',
'quality': Quality.FULLHDTV,
'proper_tags': ['PROPER']
},
{ # 1
'name': 'Show.Name.S03E04.720p.HDTV.x264-RlsGrp',
'quality': Quality.HDTV,
},
{ # 2
'name': 'Show.Name.S03E04.PROPER.720p.HDTV.x264-RlsGrp',
'quality': Quality.HDTV,
'proper_tags': ['PROPER']
},
]
},
{ # p7 - higher quality, preferred lower quality, real proper lower quality
'config': {
'PREFERRED_WORDS': [],
'UNDESIRED_WORDS': [],
},
'series': {
'quality': Quality.combine_qualities([Quality.FULLHDTV], [Quality.HDTV]),
},
'expected': 2, # Index of the expected result
'results': [
{ # 0
'name': 'Show.Name.S03E04.PROPER.1080p.HDTV.x264-RlsGrp',
'quality': Quality.FULLHDTV,
'proper_tags': ['PROPER']
},
{ # 1
'name': 'Show.Name.S03E04.720p.HDTV.x264-RlsGrp',
'quality': Quality.HDTV,
},
{ # 2
'name': 'Show.Name.S03E04.REAL.PROPER.720p.HDTV.x264-RlsGrp',
'quality': Quality.HDTV,
'proper_tags': ['REAL', 'PROPER']
},
{ # 3
'name': 'Show.Name.S03E04.PROPER.720p.HDTV.x264-RlsGrp',
'quality': Quality.HDTV,
'proper_tags': ['PROPER']
},
]
},
{ # p8 - real proper higher quality, preferred lower proper quality
'config': {
'PREFERRED_WORDS': [],
'UNDESIRED_WORDS': [],
},
'series': {
'quality': Quality.combine_qualities([Quality.FULLHDTV], [Quality.HDTV]),
},
'expected': 2, # Index of the expected result
'results': [
{ # 0
'name': 'Show.Name.S03E04.REAL.PROPER.1080p.HDTV.x264-RlsGrp',
'quality': Quality.FULLHDTV,
'proper_tags': ['REAL', 'PROPER']
},
{ # 1
'name': 'Show.Name.S03E04.720p.HDTV.x264-RlsGrp',
'quality': Quality.HDTV,
},
{ # 2
'name': 'Show.Name.S03E04.PROPER.720p.HDTV.x264-RlsGrp',
'quality': Quality.HDTV,
'proper_tags': ['PROPER']
},
]
},
{ # p9 - real proper over proper
'config': {
'PREFERRED_WORDS': [],
'UNDESIRED_WORDS': [],
},
'series': {
'quality': HD1080p,
},
'expected': 2, # Index of the expected result
'results': [
{ # 0
'name': 'Show.Name.S03E04.PROPER.1080p.HDTV.x264-RlsGrp',
'quality': Quality.FULLHDTV,
'proper_tags': ['PROPER']
},
{ # 1
'name': 'Show.Name.S03E04.1080p.HDTV.x264-RlsGrp',
'quality': Quality.FULLHDTV,
},
{ # 2
'name': 'Show.Name.S03E04.REAL.PROPER.1080p.HDTV.x264-RlsGrp',
'quality': Quality.FULLHDTV,
'proper_tags': ['REAL', 'PROPER']
},
]
},
{ # p10 - higher quality, proper higher quality, preferred quality
'config': {
'PREFERRED_WORDS': [],
'UNDESIRED_WORDS': [],
},
'series': {
'quality': Quality.combine_qualities([Quality.FULLHDTV], [Quality.HDTV]),
},
'expected': 2, # Index of the expected result
'results': [
{ # 0
'name': 'Show.Name.S03E04.1080p.HDTV.x264-RlsGrp',
'quality': Quality.FULLHDTV,
},
{ # 1
'name': 'Show.Name.S03E04.PROPER.1080p.HDTV.x264-RlsGrp',
'quality': Quality.FULLHDTV,
'proper_tags': ['PROPER']
},
{ # 2
'name': 'Show.Name.S03E04.720p.HDTV.x264-RlsGrp',
'quality': Quality.HDTV
},
]
}
])
def test_pick_result(p, app_config, create_search_result, search_provider, create_tvshow, create_tvepisode, caplog):
caplog.set_level(logging.DEBUG, logger='medusa')
# Given
config_attrs = p.get('config', {})
for attr, value in iteritems(config_attrs):
app_config(attr, value)
series_attrs = p.get('series', {})
series = create_tvshow(**series_attrs)
episode = create_tvepisode(series, 3, 4)
provider_attrs = p.get('provider', {})
make_result = functools.partial(
create_search_result,
provider=search_provider(**provider_attrs),
series=series,
episode=episode
)
results = [make_result(**item) for item in p['results']]
expected = p['expected']
if isinstance(expected, int):
expected = results[expected]
# When
actual = pick_result(results)
# Then
assert expected == actual
|
pymedusa/SickRage
|
tests/test_search_core.py
|
Python
|
gpl-3.0
| 14,970 |
#
# Honeybee: A Plugin for Environmental Analysis (GPL) started by Mostapha Sadeghipour Roudsari
#
# This file is part of Honeybee.
#
# Copyright (c) 2013-2015, Mostapha Sadeghipour Roudsari <Sadeghipour@gmail.com>
# Honeybee is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License,
# or (at your option) any later version.
#
# Honeybee is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Honeybee; If not, see <http://www.gnu.org/licenses/>.
#
# @license GPL-3.0+ <http://spdx.org/licenses/GPL-3.0+>
"""
Create an HBZone from HB Surfaces
-
Provided by Honeybee 0.0.57
Args:
_name_: The name of the zone as a string
zoneProgram_: Optional input for the program of this zone
isConditioned_: True/False value. This value will be applied to the ouput zone to either condition them with an Ideal Air Loads System (True) or not condition them at all (False). If no value is connected here, all zones will be conditioned with an Ideal Air Loads System by default.
_HBSurfaces: A list of Honeybee Surfaces
Returns:
readMe!:...
HBZone: Honeybee zone as the result
"""
import rhinoscriptsyntax as rs
import Rhino as rc
import scriptcontext as sc
import os
import sys
import System
import Grasshopper.Kernel as gh
import uuid
import math
ghenv.Component.Name = 'Honeybee_createHBZones'
ghenv.Component.NickName = 'createHBZones'
ghenv.Component.Message = 'VER 0.0.57\nSEP_07_2015'
ghenv.Component.Category = "Honeybee"
ghenv.Component.SubCategory = "00 | Honeybee"
#compatibleHBVersion = VER 0.0.56\nFEB_01_2015
#compatibleLBVersion = VER 0.0.59\nFEB_01_2015
try: ghenv.Component.AdditionalHelpFromDocStrings = "3"
except: pass
tolerance = sc.doc.ModelAbsoluteTolerance
def main(zoneName, HBZoneProgram, HBSurfaces, isConditioned):
# import the classes
if sc.sticky.has_key('honeybee_release'):
try:
if not sc.sticky['honeybee_release'].isCompatible(ghenv.Component): return -1
except:
warning = "You need a newer version of Honeybee to use this compoent." + \
"Use updateHoneybee component to update userObjects.\n" + \
"If you have already updated userObjects drag Honeybee_Honeybee component " + \
"into canvas and try again."
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, warning)
return
# don't customize this part
hb_EPZone = sc.sticky["honeybee_EPZone"]
hb_EPSrf = sc.sticky["honeybee_EPSurface"]
hb_EPZoneSurface = sc.sticky["honeybee_EPSurface"]
else:
print "You should first let Honeybee to fly..."
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, "You should first let Honeybee to fly...")
return
# call the surface from the hive
hb_hive = sc.sticky["honeybee_Hive"]()
HBSurfaces = hb_hive.callFromHoneybeeHive(HBSurfaces)
# bldg program
try: bldgProgram, zoneProgram = HBZoneProgram.split("::")
except: bldgProgram, zoneProgram = 'Office', 'OpenOffice'
# initiate the zone
zoneID = str(uuid.uuid4())
# default for isConditioned is True
if isConditioned== None: isConditioned = True
HBZone = hb_EPZone(None, zoneID, zoneName.strip().replace(" ","_"), (bldgProgram, zoneProgram), isConditioned)
for hbSrf in HBSurfaces:
HBZone.addSrf(hbSrf)
# create the zone from the surfaces
HBZone.createZoneFromSurfaces()
if not HBZone.isClosed:
message = "All of your HBSrfs must make a closed volume."
print message
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, message)
HBZone = hb_hive.addToHoneybeeHive([HBZone], ghenv.Component.InstanceGuid.ToString() + str(uuid.uuid4()))
return HBZone
if _name != None and _HBSurfaces and _HBSurfaces[0]!=None:
result= main(_name, zoneProgram_, _HBSurfaces, isConditioned_)
HBZone = result
|
samuto/Honeybee
|
src/Honeybee_createHBZones.py
|
Python
|
gpl-3.0
| 4,480 |
# -*- coding: UTF-8 -*-
# Copyright (C) 2008 Gautier Hayoun <gautier.hayoun@itaapy.com>
# Copyright (C) 2008 Juan David Ibáñez Palomar <jdavid@itaapy.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Import from the Standard Library
from os.path import join
from rfc822 import Message
# Import from itools
from itools.datatypes import String, LanguageTag, Tokens
from itools.handlers import ConfigFile, TextFile, register_handler_class
from itools.fs import vfs
class SetupFile(ConfigFile):
"""abstract a setup.conf file
"""
schema = {
'name': String(default=''),
'title': String(default=''),
'url': String(default=''),
'author_name': String(default=''),
'author_email': String(default=''),
'license': String(default=''),
'description': String(default=''),
'packages': Tokens,
'requires': Tokens,
'provides': Tokens,
'scripts': Tokens,
'source_language': LanguageTag(default=('en', 'EN')),
'target_languages': Tokens(default=(('en', 'EN'),))
}
class RFC822File(TextFile):
""" holds a rfc822 Message """
attrs = {}
message = None
list_types = (type([]), type(()))
str_types = (type(''),)
def new(self, **kw):
if 'attrs' in kw.keys():
self.set_attrs(kw['attrs'])
def _load_state_from_file(self, file):
self.attrs = {}
self.message = Message(file)
for k in self.message.keys():
if self.schema is None:
if len(self.message.getheaders(k)) == 1:
self.attrs[k] = self.message.getheader(k)
else:
self.attrs[k] = self.message.getheaders(k)
elif k in self.schema:
if issubclass(self.schema[k], String):
self.attrs[k] = self.message.getheader(k)
elif issubclass(self.schema[k], Tokens):
self.attrs[k] = self.message.getheaders(k)
def to_str(self):
data = ''
list_types = (type([]), type(()))
str_types = (type(''),)
for key, val in self.attrs.items():
if type(val) in str_types:
data += '%s: %s\n' % (key, val)
elif type(val) in list_types:
# a new line for each item of the list
for v in val:
data += '%s: %s\n' % (key, v)
return data
#######################################################################
# API
#######################################################################
def set_attrs(self, attrs):
# Check types of values
type_error_msg = 'One of the given values is not compatible'
for key, val in attrs.items():
if type(val) in self.list_types:
for v in val:
if type(v) not in self.str_types:
raise TypeError, type_error_msg
elif self.schema is not None and key not in self.schema:
del attrs[key]
# Now attrs is sure
self.attrs = attrs
self.set_changed()
def get_attrs(self):
if self.schema is not None:
for key in self.schema:
if key not in self.attrs:
self.attrs[key] = self.schema[key].get_default()
return self.attrs
class PKGINFOFile(RFC822File):
class_mimetypes = ['text/x-egg-info']
schema = {
'metadata-version': String(default=''),
'name': String(default=''),
'version': String(default=''),
'summary': String(default=''),
'author-email': String(default=''),
'license': String(default=''),
'download-url': String(default=''),
# Optional
'description': String(default=''),
'keywords': Tokens,
'home-page': String(default=''),
'author': String(default=''),
'platform': String(default=''),
'supported-platform': String(default=''),
'classifiers': Tokens,
'requires': Tokens,
'provides': Tokens,
'obsoletes': Tokens,
}
register_handler_class(PKGINFOFile)
def parse_setupconf(package_dir):
"""Return a dict containing information from setup.conf in a itools package
plus the version of the package
"""
attributes = {}
if not vfs.is_folder(package_dir):
return attributes
if not vfs.exists(join(package_dir, "setup.conf")):
return attributes
config = SetupFile(join(package_dir, "setup.conf"))
for attribute in config.schema:
attributes[attribute] = config.get_value(attribute)
if vfs.exists(join(package_dir, "version.txt")):
attributes['version'] = open(join(package_dir, "version.txt")).read()
else:
attributes['version'] = get_package_version(attributes['name'])
return attributes
def get_package_version(package_name):
try:
mod = __import__(package_name)
except ImportError:
return '?'
for name in ['Version', '__version__', 'version']:
version = getattr(mod, name, None)
if version is not None:
if hasattr(version,'__call__'):
return version()
return version
return '?'
|
kennym/itools
|
pkg/metadata.py
|
Python
|
gpl-3.0
| 5,870 |
#!/usr/bin/python3
import argparse
import glob
import os
import re
import string
import sys
import unicodedata
from collections import defaultdict
from ucca import layer0
from ucca.ioutil import file2passage
desc = """Prints the unicode general categories of characters in words/punctuation in UCCA passages
"""
UNICODE_ESCAPE_PATTERN = re.compile(r"\\u\d+")
def main():
argparser = argparse.ArgumentParser(description=desc)
argparser.add_argument('directory', help="directory containing XML files to process")
punctuations, words = read_words_and_punctuations(argparser.parse_args())
word_char_categories, punctuation_char_categories, wrong_words, wrong_punctuation = \
group_by_categories(punctuations, words)
print("word character categories: " + ", ".join(sorted(word_char_categories)))
print("punctuation character categories: " + ", ".join(sorted(punctuation_char_categories)))
print("words matching punctuation rule: " + ", ".join(wrong_words))
print("punctuation not matching punctuation rule: " + ", ".join(wrong_punctuation))
print("tokens in both lists: " + ", ".join(set(punctuations).intersection(words)))
sys.exit(0)
def group_by_categories(punctuations, words):
word_char_categories = defaultdict(list)
punctuation_char_categories = defaultdict(list)
wrong_words = []
wrong_punctuation = []
for word in words:
if all(is_punct(c) for c in word):
wrong_words.append(word)
for c in word:
word_char_categories[unicodedata.category(c)].append(word)
for punctuation in punctuations:
if not UNICODE_ESCAPE_PATTERN.match(punctuation):
if not all(is_punct(c) for c in punctuation):
wrong_punctuation.append(punctuation)
for c in punctuation:
punctuation_char_categories[unicodedata.category(c)].append(punctuation)
return word_char_categories, punctuation_char_categories, wrong_words, wrong_punctuation
def is_punct(c):
return c in string.punctuation or c not in string.printable
def read_words_and_punctuations(args):
words = set()
punctuations = set()
passages = glob.glob(args.directory + "/*.xml")
words_file_name = os.path.join(args.directory, "words.txt")
punctuations_file_name = os.path.join(args.directory, "punctuations.txt")
if passages:
for filename in passages:
sys.stderr.write("Reading passage '%s'...\n" % filename)
passage = file2passage(filename)
terminals = passage.layer(layer0.LAYER_ID).all
w, p = [[terminal.attrib.get("text") for terminal in terminals if terminal.tag == tag]
for tag in (layer0.NodeTags.Word, layer0.NodeTags.Punct)]
words.update(w)
punctuations.update(p)
words = sorted(words)
punctuations = sorted(punctuations)
with open(words_file_name, "w") as words_file:
words_file.writelines(word + "\n" for word in words)
with open(punctuations_file_name, "w") as punctuations_file:
punctuations_file.writelines(punctuation + "\n" for punctuation in punctuations)
else:
with open(words_file_name) as words_file:
words = [word.rstrip() for word in words_file.readlines()]
with open(punctuations_file_name) as punctuations_file:
punctuations = [punctuation.rstrip() for punctuation in punctuations_file.readlines()]
return punctuations, words
if __name__ == '__main__':
main()
|
borgr/ucca
|
scenes/punctuation_unicode_categories.py
|
Python
|
gpl-3.0
| 3,547 |
# -*- coding: utf-8 -*-
from openerp import models, fields, api, tools, exceptions as ex
class HrEmployeeLicence(models.Model):
_name = 'hr.employee.licence'
employee_id = fields.Many2one('hr.employee', 'Employee', required=True)
licence_type_id = fields.Many2one('hr.licence.type', 'Licence type', required=True)
info = fields.Text('Notes')
valid_from = fields.Date('Valid from', required=True)
valid_to = fields.Date('Valid to')
|
nemanja-d/odoo_project_extensions
|
hr_employee_licences/models/hr_employee_licence.py
|
Python
|
gpl-3.0
| 459 |
import lxml.html
def get_machines():
lvs = lxml.html.parse('http://laundryview.com/lvs.php')
div = lvs.find(".//div[@id='campus1']")
rooms = []
status = []
for a in div.findall('.//a'):
rooms.append(str(a.text).strip().title())
for span in div.findall('.//span'):
status.append(str(span.text).strip())
return dict(zip(rooms, status))
print get_machines()
|
dormbase/dormbase
|
dormbase/data/laundry.py
|
Python
|
gpl-3.0
| 402 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: junos_l3_interface
version_added: "2.4"
author: "Ganesh Nalawade (@ganeshrn)"
short_description: Manage L3 interfaces on Juniper JUNOS network devices
description:
- This module provides declarative management of L3 interfaces
on Juniper JUNOS network devices.
options:
name:
description:
- Name of the L3 interface.
ipv4:
description:
- IPv4 of the L3 interface.
ipv6:
description:
- IPv6 of the L3 interface.
unit:
description:
- Logical interface number.
default: 0
aggregate:
description: List of L3 interfaces definitions
state:
description:
- State of the L3 interface configuration.
default: present
choices: ['present', 'absent']
active:
description:
- Specifies whether or not the configuration is active or deactivated
default: True
choices: [True, False]
requirements:
- ncclient (>=v0.5.2)
notes:
- This module requires the netconf system service be enabled on
the remote device being managed.
"""
EXAMPLES = """
- name: Set ge-0/0/1 IPv4 address
junos_l3_interface:
name: ge-0/0/1
ipv4: 192.168.0.1
- name: Remove ge-0/0/1 IPv4 address
junos_l3_interface:
name: ge-0/0/1
state: absent
- name: Set ipv4 address using aggregate
junos_l3_interface:
aggregate:
- name: ge-0/0/1
ipv4: 1.1.1.1
- name: ge-0/0/2
ipv4: 2.2.2.2
ipv6: fd5d:12c9:2201:2::2
- name: Delete ipv4 address using aggregate
junos_l3_interface:
aggregate:
- name: ge-0/0/1
ipv4: 1.1.1.1
- name: ge-0/0/2
ipv4: 2.2.2.2
state: absent
"""
RETURN = """
diff:
description: Configuration difference before and after applying change.
returned: when configuration is changed and diff option is enabled.
type: string
sample: >
[edit interfaces ge-0/0/1 unit 0 family inet]
+ address 1.1.1.1/32;
[edit interfaces ge-0/0/1 unit 0 family inet6]
+ address fd5d:12c9:2201:1::1/128;
"""
import collections
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network_common import remove_default_spec
from ansible.module_utils.junos import junos_argument_spec, check_args
from ansible.module_utils.junos import load_config, map_params_to_obj, map_obj_to_ele
from ansible.module_utils.junos import commit_configuration, discard_changes, locked_config, to_param_list
try:
from lxml.etree import tostring
except ImportError:
from xml.etree.ElementTree import tostring
USE_PERSISTENT_CONNECTION = True
def main():
""" main entry point for module execution
"""
element_spec = dict(
name=dict(),
ipv4=dict(),
ipv6=dict(),
unit=dict(default=0, type='int'),
state=dict(default='present', choices=['present', 'absent']),
active=dict(default=True, type='bool')
)
aggregate_spec = deepcopy(element_spec)
aggregate_spec['name'] = dict(required=True)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec),
)
argument_spec.update(element_spec)
argument_spec.update(junos_argument_spec)
required_one_of = [['name', 'aggregate']]
mutually_exclusive = [['name', 'aggregate']]
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=mutually_exclusive,
required_one_of=required_one_of)
warnings = list()
check_args(module, warnings)
result = {'changed': False}
if warnings:
result['warnings'] = warnings
top = 'interfaces/interface'
param_to_xpath_map = collections.OrderedDict()
param_to_xpath_map.update([
('name', {'xpath': 'name', 'parent_attrib': False, 'is_key': True}),
('unit', {'xpath': 'name', 'top': 'unit', 'parent_attrib': False, 'is_key': True}),
('ipv4', {'xpath': 'inet/address/name', 'top': 'unit/family', 'is_key': True}),
('ipv6', {'xpath': 'inet6/address/name', 'top': 'unit/family', 'is_key': True})
])
params = to_param_list(module)
requests = list()
for param in params:
# if key doesn't exist in the item, get it from module.params
for key in param:
if param.get(key) is None:
param[key] = module.params[key]
item = param.copy()
if not item['ipv4'] and not item['ipv6']:
module.fail_json(msg="one of the following is required: ipv4,ipv6")
want = map_params_to_obj(module, param_to_xpath_map, param=item)
requests.append(map_obj_to_ele(module, want, top, param=item))
diff = None
with locked_config(module):
for req in requests:
diff = load_config(module, tostring(req), warnings, action='replace')
commit = not module.check_mode
if diff:
if commit:
commit_configuration(module)
else:
discard_changes(module)
result['changed'] = True
if module._diff:
result['diff'] = {'prepared': diff}
module.exit_json(**result)
if __name__ == "__main__":
main()
|
DazWorrall/ansible
|
lib/ansible/modules/network/junos/junos_l3_interface.py
|
Python
|
gpl-3.0
| 5,778 |
# -*- coding: utf-8 -*-
import re
from module.utils import html_unescape, parseFileSize
from module.plugins.Hoster import Hoster
from module.network.RequestFactory import getURL
from module.plugins.Plugin import chunks
from module.plugins.ReCaptcha import ReCaptcha
key = "bGhGMkllZXByd2VEZnU5Y2NXbHhYVlZ5cEE1bkEzRUw=".decode('base64')
def getID(url):
""" returns id from file url"""
m = re.match(r"http://[\w\.-]*?(uploaded\.(to|net)(/file/|/?\?id=|.*?&id=)|ul\.to/)(?P<ID>\w+)", url)
return m.group('ID')
def getAPIData(urls):
post = {"apikey" : key}
idMap = {}
for i, url in enumerate(urls):
id = getID(url)
post["id_%s" % i] = id
idMap[id] = url
api = unicode(getURL("http://uploaded.net/api/filemultiple", post=post, decode=False), 'iso-8859-1')
result = {}
if api:
for line in api.splitlines():
data = line.split(",", 4)
if data[1] in idMap:
result[data[1]] = (data[0], data[2], data[4], data[3], idMap[data[1]])
return result
def parseFileInfo(self, url = '', html = ''):
if not html and hasattr(self, "html"): html = self.html
name, size, status, found, fileid = url, 0, 3, None, None
if re.search(self.FILE_OFFLINE_PATTERN, html):
# File offline
status = 1
else:
found = re.search(self.FILE_INFO_PATTERN, html)
if found:
name, fileid = html_unescape(found.group('N')), found.group('ID')
size = parseFileSize(found.group('S'))
status = 2
return name, size, status, fileid
def getInfo(urls):
for chunk in chunks(urls, 80):
result = []
api = getAPIData(chunk)
for data in api.itervalues():
if data[0] == "online":
result.append((html_unescape(data[2]), data[1], 2, data[4]))
elif data[0] == "offline":
result.append((data[4], 0, 1, data[4]))
yield result
class UploadedTo(Hoster):
__name__ = "UploadedTo"
__type__ = "hoster"
__pattern__ = r"http://[\w\.-]*?(uploaded\.(to|net)(/file/|/?\?id=|.*?&id=)|ul\.to/)\w+"
__version__ = "0.62"
__description__ = """Uploaded.net Download Hoster"""
__author_name__ = ("spoob", "mkaay", "zoidberg", "netpok")
__author_mail__ = ("spoob@pyload.org", "mkaay@mkaay.de", "zoidberg@mujmail.cz", "netpok@gmail.com")
FILE_INFO_PATTERN = r'<a href="file/(?P<ID>\w+)" id="filename">(?P<N>[^<]+)</a> \s*<small[^>]*>(?P<S>[^<]+)</small>'
FILE_OFFLINE_PATTERN = r'<small class="cL">Error: 404</small>'
def setup(self):
self.html = None
self.multiDL = False
self.resumeDownload = False
self.url = False
self.chunkLimit = 1 # critical problems with more chunks
if self.account:
self.premium = self.account.getAccountInfo(self.user)["premium"]
if self.premium:
self.multiDL = True
self.resumeDownload = True
self.fileID = getID(self.pyfile.url)
self.pyfile.url = "http://uploaded.net/file/%s" % self.fileID
def process(self, pyfile):
self.req.cj.setCookie("uploaded.net", "lang", "en") # doesn't work anymore
self.load("http://uploaded.net/language/en")
api = getAPIData([pyfile.url])
# TODO: fallback to parse from site, because api sometimes delivers wrong status codes
if not api:
self.logWarning("No response for API call")
self.html = unicode(self.load(pyfile.url, decode = False), 'iso-8859-1')
name, size, status, self.fileID = parseFileInfo(self)
self.logDebug(name, size, status, self.fileID)
if status == 1:
self.offline()
elif status == 2:
pyfile.name, pyfile.size = name, size
else:
self.fail('Parse error - file info')
elif api == 'Access denied':
self.fail(_("API key invalid"))
else:
if self.fileID not in api:
self.offline()
self.data = api[self.fileID]
if self.data[0] != "online":
self.offline()
pyfile.name = html_unescape(self.data[2])
# self.pyfile.name = self.get_file_name()
if self.premium:
self.handlePremium()
else:
self.handleFree()
def handlePremium(self):
info = self.account.getAccountInfo(self.user, True)
self.log.debug("%(name)s: Use Premium Account (%(left)sGB left)" % {"name" :self.__name__, "left" : info["trafficleft"]/1024/1024})
if int(self.data[1])/1024 > info["trafficleft"]:
self.log.info(_("%s: Not enough traffic left" % self.__name__))
self.account.empty(self.user)
self.resetAccount()
self.fail(_("Traffic exceeded"))
header = self.load("http://uploaded.net/file/%s" % self.fileID, just_header=True)
if "location" in header:
#Direct download
print "Direct Download: " + header['location']
self.download(header['location'])
else:
#Indirect download
self.html = self.load("http://uploaded.net/file/%s" % self.fileID)
found = re.search(r'<div class="tfree".*\s*<form method="post" action="(.*?)"', self.html)
if not found:
self.fail("Download URL not found. Try to enable direct downloads.")
url = found.group(1)
print "Premium URL: " + url
self.download(url, post={})
def handleFree(self):
self.html = self.load(self.pyfile.url, decode=True)
if 'var free_enabled = false;' in self.html:
self.logError("Free-download capacities exhausted.")
self.retry(24, 300)
found = re.search(r"Current waiting period: <span>(\d+)</span> seconds", self.html)
if not found:
self.fail("File not downloadable for free users")
self.setWait(int(found.group(1)))
js = self.load("http://uploaded.net/js/download.js", decode=True)
challengeId = re.search(r'Recaptcha\.create\("([^"]+)', js)
url = "http://uploaded.net/io/ticket/captcha/%s" % self.fileID
downloadURL = ""
for i in range(5):
#self.req.lastURL = str(self.url)
re_captcha = ReCaptcha(self)
challenge, result = re_captcha.challenge(challengeId.group(1))
options = {"recaptcha_challenge_field" : challenge, "recaptcha_response_field": result}
self.wait()
result = self.load(url, post=options)
self.logDebug("result: %s" % result)
if "limit-size" in result:
self.fail("File too big for free download")
elif "limit-slot" in result: # Temporary restriction so just wait a bit
self.setWait(30 * 60, True)
self.wait()
self.retry()
elif "limit-parallel" in result:
self.fail("Cannot download in parallel")
elif "You have reached the max. number of possible free downloads for this hour" in result: # limit-dl
self.setWait(60 * 60, True)
self.wait()
self.retry()
elif 'err:"captcha"' in result:
self.logError("ul.net captcha is disabled")
self.invalidCaptcha()
elif "type:'download'" in result:
self.correctCaptcha()
downloadURL = re.search("url:'([^']+)", result).group(1)
break
else:
self.fail("Unknown error '%s'")
self.setWait(60 * 60, True)
self.wait()
self.retry()
if not downloadURL:
self.fail("No Download url retrieved/all captcha attempts failed")
self.download(downloadURL)
|
fener06/pyload
|
module/plugins/hoster/UploadedTo.py
|
Python
|
gpl-3.0
| 8,019 |
""" This test only need the JobLoggingDB to be present
"""
# pylint: disable=invalid-name,wrong-import-position
import unittest
import datetime
import sys
from DIRAC.Core.Base.Script import parseCommandLine
parseCommandLine()
from DIRAC.WorkloadManagementSystem.DB.JobLoggingDB import JobLoggingDB
class JobLoggingDBTestCase(unittest.TestCase):
""" Base class for the JobLoggingDB test cases
"""
def setUp(self):
self.jlogDB = JobLoggingDB()
def tearDown(self):
pass
class JobLoggingCase(JobLoggingDBTestCase):
""" TestJobDB represents a test suite for the JobDB database front-end
"""
def test_JobStatus(self):
result = self.jlogDB.addLoggingRecord(1, status="testing",
minor='date=datetime.datetime.utcnow()',
date=datetime.datetime.utcnow(),
source='Unittest')
self.assertTrue(result['OK'], result.get('Message'))
date = '2006-04-25 14:20:17'
result = self.jlogDB.addLoggingRecord(1, status="testing",
minor='2006-04-25 14:20:17',
date=date,
source='Unittest')
self.assertTrue(result['OK'], result.get('Message'))
result = self.jlogDB.addLoggingRecord(1, status="testing",
minor='No date 1',
source='Unittest')
self.assertTrue(result['OK'], result.get('Message'))
result = self.jlogDB.addLoggingRecord(1, status="testing",
minor='No date 2',
source='Unittest')
self.assertTrue(result['OK'], result.get('Message'))
result = self.jlogDB.getJobLoggingInfo(1)
self.assertTrue(result['OK'], result.get('Message'))
result = self.jlogDB.getWMSTimeStamps(1)
self.assertTrue(result['OK'], result.get('Message'))
self.jlogDB.deleteJob(1)
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(JobLoggingCase)
testResult = unittest.TextTestRunner(verbosity=2).run(suite)
sys.exit(not testResult.wasSuccessful())
|
fstagni/DIRAC
|
tests/Integration/WorkloadManagementSystem/Test_JobLoggingDB.py
|
Python
|
gpl-3.0
| 2,254 |
"""***************************************************
--- FRANCISCO JESÚS JIMÉNEZ HIDALGO ---
--- FUNDAMENTOS DE PROGRAMACIÓN ---
--- Sopa de letras ---
---************************************************"""
vectorDirecciones = [['E', 0, 1], ['O'], ['S', 1, 0], ['N'],
['SE', 1, 1], ['NO'], ['SO', -1, 1], ['NE']]
def buscarPalabra(tablero, palabra, filas, columnas, vectorDirecciones):
"""list[][], str -> list [coincidencias E, O, S, N, SE, NO, SO, NE
+++OBJ: busca la palabra en la sopa de letras
+++PRE: tablero lista de string"""
coincidencias = [0 for i in range(8)]
"""Horizontal/Vertical"""
for i in range(0, 4, 2):
vectorDireccion = vectorDirecciones[i]
"""Veces que tenemos que buscar una palabra en una matriz filas x columnas"""
veces = max(filas, columnas)
x, y, j = 0, 0, 1
while j <= veces:
aux = []
while x >= 0 and y >= 0 and y < filas and x < columnas:
aux += tablero[y][x]
y += vectorDireccion[1]
x += vectorDireccion[2]
if palabra in ''.join(aux):
coincidencias[i] += 1
aux.reverse()
if palabra in ''.join(aux):
coincidencias[i + 1] += 1
y = (y + vectorDireccion[2]) % filas
x = (x + vectorDireccion[1]) % columnas
j += 1
"""DIAGONALES"""
for i in range(4, 8, 2):
vectorDireccion = vectorDirecciones[i]
for j in range(0, filas):
if j == 0:
if i == 4:
delInicio = columnas-1
delFinal = -1
incremento = -1
elif i == 6:
delInicio = 0
delFinal = columnas
incremento = 1
else:
if i == 4:
delInicio = 0
delFinal = 1
incremento = 1
if i == 6:
delInicio = columnas-1
delFinal = columnas
incremento = 1
for k in range(delInicio, delFinal, incremento):
y, x = j, k
aux = []
while x >= 0 and y >= 0 and y < filas and x < columnas:
aux += tablero[y][x]
x += vectorDireccion[1]
y += vectorDireccion[2]
if palabra in ''.join(aux):
coincidencias[i] += 1
aux.reverse()
if palabra in ''.join(aux):
coincidencias[i + 1] += 1
return coincidencias
"""
# PROBADOR
sopa = [['h', 'o', 'l', 'a', 'x', 'x', 'a', 'l', 'o', 'h'],
['x', 'o', 'i', 'n', 'f', 'o', 'z', 'x', 'o', 'x'],
['x', 'x', 'l', 'm', 'o', 'l', 'a', 'l', 'x', 'x'],
['x', 'x', 'x', 'a', 'x', 'x', 'a', 'x', 'x', 'x'],
['x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x'],
['x', 'x', 'x', 'x', 'a', 'x', 'x', 'x', 'x', 'x'],
['h', 'x', 'x', 'l', 'x', 'x', 'a', 'x', 'x', 'a'],
['o', 'x', 'o', 'x', 'x', 'x', 'x', 'l', 'x', 'l'],
['l', 'h', 'x', 'x', 'x', 'x', 'x', 'x', 'o', 'o'],
['a', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'h']]
print(buscarPalabra(sopa, 'hola', 10, 10, vectorDirecciones))
"""
palabra = input('Introduzca la palabra a buscar: ')
filas = int(input('Introduzca el número de filas: '))
columnas = int(input('Introduzca el número de columnas: '))
tablero = [[0 for i in range(filas)] for j in range(columnas)]
"""Pedimos cada elemento al usuario"""
for i in range(columnas):
for j in range(filas):
print('Introduzca el elemento %d, %d: ' % (i + 1, j + 1), end='')
tablero[i][j] = input()
coincidencias = buscarPalabra(tablero, palabra, filas, columnas, vectorDirecciones)
print('\n\n\nHas introducido la siguiente sopa:\n')
for i in range(0, filas):
for j in range(0, columnas):
print('%3s' %tablero[i][j], end='')
print('')
print('')
for i in range(0,8):
print('Se han encontrado', coincidencias[i], 'en sentido', vectorDirecciones[i][0])
|
MrXuso/Fundamentos-de-Computacion
|
SopaDeLetras.py
|
Python
|
gpl-3.0
| 4,245 |
QuestionCategory.objects.all().delete()
Topic.objects.all().delete()
Position.objects.all().delete()
cat = QuestionCategory.objects.create(name=u"#Gênero")
Topic.objects.create(category=cat, description="A prática de aborto é crime no Brasil: mulheres e médicos que praticam aborto ilegal podem ser presos. Uma em cada cinco mulheres já fez aborto no país e, por ano, cerca de 1 milhão de abortos são realizados. Metade das mulheres que praticam aborto ilegal acabam tendo complicações médicas e precisam ser internadas e muitas delas morrem. O Supremo Tribunal Federal fez recentemente uma audiência pública sobre o tema, com participação da sociedade civil e de pesquisadores, e vai decidir se mulheres com gravidez de até 12 semanas podem abortar (ADPF 442).",
label=u"Legalização do aborto")
Topic.objects.create(category=cat, description="Cerca de 12 mulheres são assassinadas por dia no Brasil. O feminicídio (assassinato motivado pela vítima ser mulher) atinge principalmente mulheres negras e de baixa renda. Mesmo depois de três anos da lei do feminicídio ter sido aprovada no Brasil, os seus efeitos ainda não podem ser medidos. A falta de dados oficiais dificulta o combate a este tipo de violência contra mulheres. A aplicação efetiva da lei do feminicídio depende de seu monitoramento pelos órgãos públicos, responsáveis pela coleta e divulgação dos dados.",
label=u"Monitoramento da Lei do feminicídio")
Topic.objects.create(category=cat, description="27% da população: essa é a porcentagem de mulheres negras no Brasil. Mas no Congresso Nacional, elas são apenas 2%. Mulheres brancas também são minoria na política, a diferença é que elas têm 3 vezes mais chances de ganhar uma eleição do que mulheres negras. Dentro dos partidos políticos, apenas 2,5% dos recursos são destinados para candidaturas de mulheres negras. Hoje está no Tribunal Superior Eleitoral uma consulta pública que exige um investimento mínimo de dinheiro dos partidos em candidatas negras.",
label=u"Financiamento público de campanhas de mulheres negras")
cat = QuestionCategory.objects.create(name=u"#Raça")
Topic.objects.create(category=cat, description="A Constituição protege a liberdade religiosa e a Lei de Crimes Ambientais proíbe maus tratos contra animais. Abates religiosos são praticados por judeus, muçulmanos e fiéis de religiões afro-brasileiras de um jeito que provoca morte instantânea, com mínimo de dor. Tribunais do Rio Grande do Sul e São Paulo já decidiram que estes casos não são maus tratos. Agora o Supremo Tribunal Federal dará a palavra final sobre o assunto (RE 494601).",
label=u"Tornar crime o abate religioso de animais")
Topic.objects.create(category=cat, description="Apenas <a src='https://www.cartacapital.com.br/sociedade/ibge-apenas-10-das-mulheres-negras-completam-o-ensino-superior' target='_blank'>10% das mulheres negras e 7% dos homens negros</a> têm acesso à universidade, mostrando que esse espaço ainda não é democrático e não representa a realidade da população brasileira. O valor médio do salário da população negra é quase metade do valor do salário da população branca (60%). As ações afirmativas, como cotas raciais nas universidades, servem para mudar esse cenário. Em 2012, o Supremo Tribunal Federal entendeu que as cotas raciais são um direito e ajudam a corrigir o histórico de racismo e escravidão no Brasil.",
label=u"Cotas raciais nas universidades")
Topic.objects.create(category=cat, description="Mulheres negras sofrem com preconceito racial no atendimento de saúde no SUS. Em relação a mulheres brancas, recebem menos anestesia no parto, esperam mais tempo por atendimento, têm menos acesso a exames médicos, como mamografia, e 60% das vítimas de mortalidade materna no país são negras..",
label=u"Prioridade no atendimento de mulheres negras no SUS ")
cat = QuestionCategory.objects.create(name=u"#LGBTs")
Topic.objects.create(category=cat, description="O Brasil é um dos países que mais mata LGBTs no mundo, em especial travestis e transexuais. Mas não há legislação que considere crime o preconceito contra lésbicas, transexuais e travestis, bissexuais, gays. Propostas para tornar crime o preconceito contra LGBTs estão sendo discutidas no Congresso Nacional (PL 134/2018, PL 515/2017 e 7582/2014).",
label=u"O preconceito contra LGBTs deve ser crime")
Topic.objects.create(category=cat, description="A população transexual e travesti é frequentemente impedida de usar banheiros de acordo com sua identidade de gênero. Não existe uma legislação sobre o assunto. Porém, o Supremo Tribunal Federal iniciou essa discussão em 2015, dizendo que não permitir o uso de banheiros conforme identidade de gênero feriria a dignidade humana, mas o julgamento foi suspenso (RE 845779).",
label=u"Trans e travestis podem usar o banheiro que quiserem")
Topic.objects.create(category=cat, description="“Escola sem partido” é nome dado a uma série de projetos lei que têm sido apresentados nos municípios, estados e também no Congresso Nacional (PL 7180/2014), que querem tirar o ensino sobre raça, classe social, gênero, identidade de gênero e orientação sexual nas escolas. 73% dos estudantes LGBTs já sofreu agressão verbal, 60% se sente inseguro e 36% já sofreu violência física nas escolas.",
label=u"Projeto “Escola sem partido”")
cat = QuestionCategory.objects.create(name=u"#Povos tradicionais & Meio Ambiente")
Topic.objects.create(category=cat, description="A demarcação de terras é uma luta histórica dos povos indígenas e quilombolas, por conta de seus vínculos históricos, culturais e ancestrais com o território. Uma série de iniciativas quer impedir o direito à terra que esses povos têm, seja no Congresso Nacional (PEC 215/2000), seja no judiciário. É o caso do “marco temporal”, argumento usado por alguns juízes para limitar o direito à terra apenas para os povos tradicionais que estivessem vivendo nela em 5 de outubro de 1988. Mas isso ignora que esses povos tradicionais foram expulsos e impedidos de retornar a suas terras.",
label=u"Marco temporal na demarcação de terra ")
Topic.objects.create(category=cat, description="Nos últimos 30 anos de democracia no Brasil, apenas um representante indígena foi eleito para o Congresso Nacional, apesar dos indígenas serem 0,4% da população brasileira. Para mudar esse cenário, recursos públicos para campanhas ou cadeiras no Congresso poderiam ser reservados a candidaturas de indígenas.",
label=u"Cotas para indígenas no Congresso")
Topic.objects.create(category=cat, description="O Brasil é campeão mundial no consumo de agrotóxicos, mercado que gira bilhões de dólares todos os anos. Mais da metade dos alimentos consumidos pelos brasileiros está contaminado por agrotóxicos e milhares de pessoas são atendidas pelo SUS com sintomas de intoxicação. O tema está sendo discutido no Congresso (PL 6299/2002), para permitir ainda mais o uso de alimentos com agrotóxicos.",
label=u"Facilitar uso de agrotóxico")
cat = QuestionCategory.objects.create(name=u"#Trabalho, Saúde e Educação")
Topic.objects.create(category=cat, description="A reforma trabalhista aprovada no atual governo criou algumas formas precárias de contratação, como o contrato intermitente, enfraqueceu os sindicatos ao retirar a contribuição sindical obrigatória, permitiu o trabalho insalubre da mulher gestante e retirou o acesso gratuito do trabalhador à justiça.",
label=u"Reforma trabalhista")
Topic.objects.create(category=cat, description="O governo atual adotou uma política econômica conhecida por “teto de gastos públicos”, para limitar os gastos públicos federais. Essa política é formada por algumas medidas como: congelamento de gastos sociais com políticas para a saúde, educação e seguro desemprego pelos próximos 20 anos. A ONU condenou tais medidas, por afetarem a população mais pobre. Ainda assim, ela foi aprovada no Congresso Nacional pela Emenda Constitucional 95.",
label=u"Teto de gastos públicos")
cat = QuestionCategory.objects.create(name=u"#Segurança e Direitos Humanos")
Topic.objects.create(category=cat, description="“Auto de resistência” era o nome dado pela polícia ao homicídio de pessoas que ofereceram “resistência à prisão”. Na prática, significava dizer que esses homicídios eram praticados por policiais em legítima defesa. O problema é que esses casos acabam não sendo investigados e, dos que são, 98% são arquivados. Hoje essa expressão foi proibida, mas outras parecidas, como 'mortes em decorrência de ação policial', continuam sendo usadas pela polícia.",
label=u"Autos de resistência")
Topic.objects.create(category=cat, description="Nossa Constituição não permite que menores de 18 anos sejam processados e presos como adultos, mas permite que esses adolescentes sejam internados em Fundações Casa. Alguns membros do Congresso Nacional defendem a alteração da Constituição para reduzir a “maioridade penal” (PEC 171/93 e PEC 33/12), ou seja, que possam ser presos como adultos. Como não estão conseguindo, agora tentam outra estratégia: aumentar o tempo que os adolescentes passam internados (PL 7197/02). Estudos comprovam que a redução da maioridade penal em diferentes países não levou à redução da criminalidade.",
label=u"Redução de maioridade penal")
cat = QuestionCategory.objects.create(name=u"#Corrupção")
Topic.objects.create(category=cat, description="Hoje existem 32 deputados federais e 8 senadores no Congresso Nacional que são donos de emissoras de rádio e TV. Assim eles podem influenciar o que a mídia fala sobre eles. Esses veículos de comunicação são concessões públicas que dependem de autorização do próprio Congresso Nacional, ou seja, dos próprios deputados federais e senadores. Duas ações no Supremo Tribunal Federal questionam se essa situação viola a nossa Constituição (ADPF 246 e ADPF 379).",
label=u"Políticos serem donos de emissoras de rádio e TV")
Topic.objects.create(category=cat, description="Todos os partidos políticos recebem dinheiro público do chamado “Fundo Partidário”. Nas eleições de 2018, pela primeira vez, também receberão 1,7 bilhão de reais de dinheiro público para financiar suas campanhas eleitorais de um “Fundo Especial de Financiamento de Campanha”. As lideranças dos partidos têm liberdade para escolher como gastar esse dinheiro e não existe um controle da sociedade sobre esses gastos. A obrigação dos partidos divulgarem seus balanços financeiros e prestações de contas pode ajudar na fiscalização da utilização desses recursos públicos pela sociedade.",
label=u"Transparência nos gastos dos partidos políticos")
cat = QuestionCategory.objects.create(name=u"#Drogas")
Topic.objects.create(category=cat, description="A maconha não seria mais um mercado ilegal se fossem aprovadas leis dizendo como ela deveria ser produzida e vendida, do mesmo jeito que já acontece com outras drogas, como álcool, tabaco e medicamentos. Isso significa que a maconha poderia ser produzida, vendida e utilizada de acordo com o direito. O mercado da maconha seria fiscalizado e não financiaria mais atividades criminosas. A legalização da maconha está sendo discutida no Congresso Nacional (Projetos de Lei 7.270/2014 e 10.549/2018).",
label=u"Legalização da maconha")
Topic.objects.create(category=cat, description="A internação psiquiátrica compulsória ocorre quando uma pessoa é internada contra a sua vontade. Atualmente, ela pode ocorrer por decisão do Judiciário (sem precisar da autorização da família) em casos extremos, quando o paciente não tem mais controle sobre sua condição psicológica e física. Alguns políticos têm tentado combater o uso de drogas com a internação compulsória coletiva de usuários, ou seja, contra sua vontade e sem avaliação da condição psicológica e física de cada um. É o que ocorre atualmente na cidade de São Paulo, na região da Cracolândia.",
label=u"Internação compulsória para usuários de drogas")
cat = QuestionCategory.objects.create(name=u"#Migrantes")
Topic.objects.create(category=cat, description="A Venezuela está passando por uma grave crise econômica e humanitária. O Brasil faz fronteira com a Venezuela, mas é um dos países da América do Sul que menos recebe migrantes de lá. Em Roraima, o governo restringiu o acesso de venezuelanos à saúde e o judiciário chegou a fechar as fronteiras para a entrada de novos migrantes. Casos de xenofobia (ódio e preconceito por causa da origem da pessoa) também têm acontecido.",
label=u"Acolhimento de migrantes venezuelanos no Brasil")
Topic.objects.create(category=cat, description="Cerca de 3 milhões de migrantes residem, trabalham e estudam no Brasil. Porém, eles não podem votar, nem se candidatar. Esse cenário pode mudar caso a nossa Constituição seja alterada e garanta o direito à participação dos migrantes na política do país (PEC 25/2012). Diversos países já garantiram esse direito.",
label=u"Direito a voto de migrantes")
for t in Topic.objects.all():
label_yes = u"Sou a <strong>FAVOR</strong> da %s" % t.label
yes = Position.objects.create(topic=t, label=label_yes)
label_no = u"Sou <strong>CONTRA</strong> a %s" % t.label
no = Position.objects.create(topic=t, label=label_no)
|
ciudadanointeligente/votainteligente-portal-electoral
|
merepresenta/datos_iniciales.py
|
Python
|
gpl-3.0
| 13,550 |
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
from __future__ import print_function, absolute_import
import contextlib
import logging
import dns.exception as dnsexception
import dns.name as dnsname
import os
import shutil
import socket
import sys
import tempfile
import textwrap
import traceback
from pkg_resources import parse_version
import six
from ipaclient.install.client import check_ldap_conf, sssd_enable_ifp
import ipaclient.install.timeconf
from ipalib.install import certstore, sysrestore
from ipalib.install.kinit import kinit_keytab
from ipapython import ipaldap, ipautil
from ipapython.dn import DN
from ipapython.dnsutil import DNSResolver
from ipapython.admintool import ScriptError
from ipapython.ipachangeconf import IPAChangeConf
from ipaplatform import services
from ipaplatform.tasks import tasks
from ipaplatform.paths import paths
from ipalib import api, constants, create_api, errors, rpc, x509
from ipalib.config import Env
from ipalib.facts import is_ipa_configured, is_ipa_client_configured
from ipalib.util import no_matching_interface_for_ip_address_warning
from ipaclient.install.client import configure_krb5_conf, purge_host_keytab
from ipaserver.install import (
adtrust, bindinstance, ca, dns, dsinstance, httpinstance,
installutils, kra, krbinstance, otpdinstance, custodiainstance, service)
from ipaserver.install.installutils import (
ReplicaConfig, load_pkcs12, validate_mask)
from ipaserver.install.replication import (
ReplicationManager, replica_conn_check)
from ipaserver.masters import find_providing_servers, find_providing_server
import SSSDConfig
from subprocess import CalledProcessError
if six.PY3:
unicode = str
NoneType = type(None)
logger = logging.getLogger(__name__)
def get_dirman_password():
return installutils.read_password("Directory Manager (existing master)",
confirm=False, validate=False)
def make_pkcs12_info(directory, cert_name, password_name):
"""Make pkcs12_info
:param directory: Base directory (config.dir)
:param cert_name: Cert filename (e.g. "dscert.p12")
:param password_name: Cert filename (e.g. "dirsrv_pin.txt")
:return: a (full cert path, password) tuple, or None if cert is not found
"""
cert_path = os.path.join(directory, cert_name)
if os.path.isfile(cert_path):
password_file = os.path.join(directory, password_name)
password = open(password_file).read().strip()
return cert_path, password
else:
return None
def install_replica_ds(config, options, ca_is_configured, remote_api,
ca_file, pkcs12_info=None, fstore=None):
dsinstance.check_ports()
# if we have a pkcs12 file, create the cert db from
# that. Otherwise the ds setup will create the CA
# cert
if pkcs12_info is None:
pkcs12_info = make_pkcs12_info(config.dir, "dscert.p12",
"dirsrv_pin.txt")
if ca_is_configured:
ca_subject = ca.lookup_ca_subject(remote_api, config.subject_base)
else:
ca_subject = installutils.default_ca_subject_dn(config.subject_base)
ds = dsinstance.DsInstance(
config_ldif=options.dirsrv_config_file,
fstore=fstore)
ds.create_replica(
realm_name=config.realm_name,
master_fqdn=config.master_host_name,
fqdn=config.host_name,
domain_name=config.domain_name,
dm_password=config.dirman_password,
subject_base=config.subject_base,
ca_subject=ca_subject,
pkcs12_info=pkcs12_info,
ca_is_configured=ca_is_configured,
ca_file=ca_file,
api=remote_api,
setup_pkinit=not options.no_pkinit,
)
return ds
def install_krb(config, setup_pkinit=False, pkcs12_info=None, fstore=None):
krb = krbinstance.KrbInstance(fstore=fstore)
# pkinit files
if pkcs12_info is None:
pkcs12_info = make_pkcs12_info(config.dir, "pkinitcert.p12",
"pkinit_pin.txt")
krb.create_replica(config.realm_name,
config.master_host_name, config.host_name,
config.domain_name, config.dirman_password,
setup_pkinit, pkcs12_info,
subject_base=config.subject_base)
return krb
def install_ca_cert(ldap, base_dn, realm, cafile, destfile=paths.IPA_CA_CRT):
try:
try:
certs = certstore.get_ca_certs(ldap, base_dn, realm, False)
except errors.NotFound:
try:
shutil.copy(cafile, destfile)
except shutil.Error:
# cafile == IPA_CA_CRT
pass
else:
certs = [c[0] for c in certs if c[2] is not False]
x509.write_certificate_list(certs, destfile, mode=0o644)
except Exception as e:
raise ScriptError("error copying files: " + str(e))
return destfile
def install_http(config, auto_redirect, ca_is_configured, ca_file,
pkcs12_info=None, fstore=None):
# if we have a pkcs12 file, create the cert db from
# that. Otherwise the ds setup will create the CA
# cert
if pkcs12_info is None:
pkcs12_info = make_pkcs12_info(config.dir, "httpcert.p12",
"http_pin.txt")
http = httpinstance.HTTPInstance(fstore=fstore)
http.create_instance(
config.realm_name, config.host_name, config.domain_name,
config.dirman_password, pkcs12_info,
auto_redirect=auto_redirect, ca_file=ca_file,
ca_is_configured=ca_is_configured, promote=True,
subject_base=config.subject_base, master_fqdn=config.master_host_name)
return http
def install_dns_records(config, options, remote_api, fstore=None):
if not bindinstance.dns_container_exists(
ipautil.realm_to_suffix(config.realm_name)):
return
try:
bind = bindinstance.BindInstance(api=remote_api, fstore=fstore)
for ip in config.ips:
reverse_zone = bindinstance.find_reverse_zone(ip, remote_api)
bind.add_master_dns_records(config.host_name,
[str(ip)],
config.realm_name,
config.domain_name,
reverse_zone)
except errors.NotFound as e:
logger.debug('Replica DNS records could not be added '
'on master: %s', str(e))
# we should not fail here no matter what
except Exception as e:
logger.info('Replica DNS records could not be added '
'on master: %s', str(e))
def create_ipa_conf(fstore, config, ca_enabled, master=None):
"""
Create /etc/ipa/default.conf master configuration
:param fstore: sysrestore file store used for backup and restore of
the server configuration
:param config: replica config
:param ca_enabled: True if the topology includes a CA
:param master: if set, the xmlrpc_uri parameter will use the provided
master instead of this host
"""
# Save client file on Domain Level 1
target_fname = paths.IPA_DEFAULT_CONF
fstore.backup_file(target_fname)
ipaconf = IPAChangeConf("IPA Replica Install")
ipaconf.setOptionAssignment(" = ")
ipaconf.setSectionNameDelimiters(("[", "]"))
if master:
xmlrpc_uri = 'https://{0}/ipa/xml'.format(
ipautil.format_netloc(master))
else:
xmlrpc_uri = 'https://{0}/ipa/xml'.format(
ipautil.format_netloc(config.host_name))
ldapi_uri = ipaldap.realm_to_ldapi_uri(config.realm_name)
# [global] section
gopts = [
ipaconf.setOption('basedn', str(config.basedn)),
ipaconf.setOption('host', config.host_name),
ipaconf.setOption('realm', config.realm_name),
ipaconf.setOption('domain', config.domain_name),
ipaconf.setOption('xmlrpc_uri', xmlrpc_uri),
ipaconf.setOption('ldap_uri', ldapi_uri),
ipaconf.setOption('mode', 'production')
]
if ca_enabled:
gopts.extend([
ipaconf.setOption('enable_ra', 'True'),
ipaconf.setOption('ra_plugin', 'dogtag'),
ipaconf.setOption('dogtag_version', '10')
])
if not config.setup_ca:
gopts.append(ipaconf.setOption('ca_host', config.ca_host_name))
else:
gopts.extend([
ipaconf.setOption('enable_ra', 'False'),
ipaconf.setOption('ra_plugin', 'None')
])
opts = [
ipaconf.setSection('global', gopts),
{'name': 'empty', 'type': 'empty'}
]
ipaconf.newConf(target_fname, opts)
# the new file must be readable for httpd
# Also, umask applies when creating a new file but we want 0o644 here
os.chmod(target_fname, 0o644)
def check_dirsrv():
(ds_unsecure, ds_secure) = dsinstance.check_ports()
if not ds_unsecure or not ds_secure:
msg = ("IPA requires ports 389 and 636 for the Directory Server.\n"
"These are currently in use:\n")
if not ds_unsecure:
msg += "\t389\n"
if not ds_secure:
msg += "\t636\n"
raise ScriptError(msg)
def check_dns_resolution(host_name, dns_servers):
"""Check forward and reverse resolution of host_name using dns_servers
"""
# Point the resolver at specified DNS server
server_ips = []
for dns_server in dns_servers:
try:
server_ips = list(
a[4][0] for a in socket.getaddrinfo(dns_server, None))
except socket.error:
pass
else:
break
if not server_ips:
logger.error(
'Could not resolve any DNS server hostname: %s', dns_servers)
return False
resolver = DNSResolver()
resolver.nameservers = server_ips
logger.debug('Search DNS server %s (%s) for %s',
dns_server, server_ips, host_name)
# Get IP addresses of host_name
addresses = set()
for rtype in 'A', 'AAAA':
try:
result = resolver.resolve(host_name, rtype)
except dnsexception.DNSException:
rrset = []
else:
rrset = result.rrset
if rrset:
addresses.update(r.address for r in result.rrset)
if not addresses:
logger.error(
'Could not resolve hostname %s using DNS. '
'Clients may not function properly. '
'Please check your DNS setup. '
'(Note that this check queries IPA DNS directly and '
'ignores /etc/hosts.)',
host_name)
return False
no_errors = True
# Check each of the IP addresses
checked = set()
for address in addresses:
if address in checked:
continue
checked.add(address)
try:
logger.debug('Check reverse address %s (%s)', address, host_name)
rrset = resolver.resolve_address(address).rrset
except Exception as e:
logger.debug('Check failed: %s %s', type(e).__name__, e)
logger.error(
'Reverse DNS resolution of address %s (%s) failed. '
'Clients may not function properly. '
'Please check your DNS setup. '
'(Note that this check queries IPA DNS directly and '
'ignores /etc/hosts.)',
address, host_name)
no_errors = False
else:
host_name_obj = dnsname.from_text(host_name)
if rrset:
names = [r.target.to_text() for r in rrset]
else:
names = []
logger.debug(
'Address %s resolves to: %s. ', address, ', '.join(names))
if not rrset or not any(
r.target == host_name_obj for r in rrset):
logger.error(
'The IP address %s of host %s resolves to: %s. '
'Clients may not function properly. '
'Please check your DNS setup. '
'(Note that this check queries IPA DNS directly and '
'ignores /etc/hosts.)',
address, host_name, ', '.join(names))
no_errors = False
return no_errors
def configure_certmonger():
dbus = services.knownservices.dbus
if not dbus.is_running():
# some platforms protect dbus with RefuseManualStart=True
try:
dbus.start()
except Exception as e:
raise ScriptError("dbus service unavailable: %s" % str(e),
rval=3)
# Ensure that certmonger has been started at least once to generate the
# cas files in /var/lib/certmonger/cas.
cmonger = services.knownservices.certmonger
try:
cmonger.restart()
except Exception as e:
raise ScriptError("Certmonger service unavailable: %s" % str(e),
rval=3)
try:
cmonger.enable()
except Exception as e:
raise ScriptError("Failed to enable Certmonger: %s" % str(e),
rval=3)
def remove_replica_info_dir(installer):
# always try to remove decrypted replica file
try:
if installer._top_dir is not None:
shutil.rmtree(installer._top_dir)
except OSError:
pass
def common_cleanup(func):
def decorated(installer):
try:
try:
func(installer)
except BaseException:
remove_replica_info_dir(installer)
raise
except KeyboardInterrupt:
raise ScriptError()
except Exception:
print(
"Your system may be partly configured.\n"
"Run /usr/sbin/ipa-server-install --uninstall to clean up.\n")
raise
return decorated
def preserve_enrollment_state(func):
"""
Makes sure the machine is unenrollled if the decorated function
failed.
"""
def decorated(installer):
try:
func(installer)
except BaseException:
if installer._enrollment_performed:
uninstall_client()
raise
return decorated
def uninstall_client():
"""
Attempts to unenroll the IPA client using the ipa-client-install utility.
An unsuccessful attempt to uninstall is ignored (no exception raised).
"""
print("Removing client side components")
ipautil.run([paths.IPA_CLIENT_INSTALL, "--unattended", "--uninstall"],
raiseonerr=False, redirect_output=True)
print()
def promote_sssd(host_name):
sssdconfig = SSSDConfig.SSSDConfig()
sssdconfig.import_config()
domains = sssdconfig.list_active_domains()
for name in domains:
domain = sssdconfig.get_domain(name)
try:
hostname = domain.get_option('ipa_hostname')
if hostname == host_name:
break
except SSSDConfig.NoOptionError:
continue
else:
raise RuntimeError("Couldn't find IPA domain in sssd.conf")
domain.set_option('ipa_server', host_name)
domain.set_option('ipa_server_mode', True)
sssdconfig.save_domain(domain)
sssd_enable_ifp(sssdconfig)
sssdconfig.write()
sssd = services.service('sssd', api)
try:
sssd.restart()
except CalledProcessError:
logger.warning("SSSD service restart was unsuccessful.")
def promote_openldap_conf(hostname, master):
"""
Reset the URI directive in openldap-client configuration file to point to
newly promoted replica. If this directive was set by third party, then
replace the added comment with the one pointing to replica
:param hostname: replica FQDN
:param master: FQDN of remote master
"""
ldap_conf = paths.OPENLDAP_LDAP_CONF
ldap_change_conf = IPAChangeConf("IPA replica installer")
ldap_change_conf.setOptionAssignment((" ", "\t"))
new_opts = []
with open(ldap_conf, 'r') as f:
old_opts = ldap_change_conf.parse(f)
for opt in old_opts:
if opt['type'] == 'comment' and master in opt['value']:
continue
if (opt['type'] == 'option' and opt['name'] == 'URI' and
master in opt['value']):
continue
new_opts.append(opt)
change_opts = [
{'action': 'addifnotset',
'name': 'URI',
'type': 'option',
'value': 'ldaps://' + hostname}
]
try:
ldap_change_conf.newConf(ldap_conf, new_opts)
ldap_change_conf.changeConf(ldap_conf, change_opts)
except Exception as e:
logger.info("Failed to update %s: %s", ldap_conf, e)
@contextlib.contextmanager
def rpc_client(api):
"""
Context manager for JSON RPC client.
:param api: api to initiate the RPC client
"""
client = rpc.jsonclient(api)
client.finalize()
client.connect()
try:
yield client
finally:
client.disconnect()
def check_remote_fips_mode(client, local_fips_mode):
"""
Verify remote server's fips-mode is the same as this server's fips-mode
:param client: RPC client
:param local_fips_mode: boolean indicating whether FIPS mode is turned on
:raises: ScriptError: if the checks fails
"""
env = client.forward(u'env', u'fips_mode')['result']
remote_fips_mode = env.get('fips_mode', False)
if local_fips_mode != remote_fips_mode:
if local_fips_mode:
raise ScriptError(
"Cannot join FIPS-enabled replica into existing topology: "
"FIPS is not enabled on the master server.")
else:
raise ScriptError(
"Cannot join replica into existing FIPS-enabled topology: "
"FIPS has to be enabled locally first.")
def check_remote_version(client, local_version):
"""
Verify remote server's version is not higher than this server's version
:param client: RPC client
:param local_version: API version of local server
:raises: ScriptError: if the checks fails
"""
env = client.forward(u'env', u'version')['result']
remote_version = parse_version(env['version'])
if remote_version > local_version:
raise ScriptError(
"Cannot install replica of a server of higher version ({}) than "
"the local version ({})".format(remote_version, local_version))
def common_check(no_ntp, skip_mem_check, setup_ca):
if not skip_mem_check:
installutils.check_available_memory(ca=setup_ca)
tasks.check_ipv6_stack_enabled()
tasks.check_selinux_status()
check_ldap_conf()
mask_str = validate_mask()
if mask_str:
raise ScriptError(
"Unexpected system mask: %s, expected 0022" % mask_str)
if is_ipa_configured():
raise ScriptError(
"IPA server is already configured on this system.\n"
"If you want to reinstall the IPA server, please uninstall "
"it first using 'ipa-server-install --uninstall'.")
check_dirsrv()
if not no_ntp:
try:
ipaclient.install.timeconf.check_timedate_services()
except ipaclient.install.timeconf.NTPConflictingService as e:
print("WARNING: conflicting time&date synchronization service "
"'{svc}' will\nbe disabled in favor of chronyd\n"
.format(svc=e.conflicting_service))
except ipaclient.install.timeconf.NTPConfigurationError:
pass
def current_domain_level(api):
"""Return the current domain level.
"""
# Detect the current domain level
try:
return api.Command['domainlevel_get']()['result']
except errors.NotFound:
# If we're joining an older master, domain entry is not
# available
return constants.DOMAIN_LEVEL_0
def check_domain_level_is_supported(current):
"""Check that the given domain level is supported by this server version.
:raises: ScriptError if DL is out of supported range for this IPA version.
"""
under_lower_bound = current < constants.MIN_DOMAIN_LEVEL
above_upper_bound = current > constants.MAX_DOMAIN_LEVEL
if under_lower_bound or above_upper_bound:
message = ("This version of FreeIPA does not support "
"the Domain Level which is currently set for "
"this domain. The Domain Level needs to be "
"raised before installing a replica with "
"this version is allowed to be installed "
"within this domain.")
logger.error("%s", message)
raise ScriptError(message, rval=3)
def enroll_dl0_replica(installer, fstore, remote_api, debug=False):
"""
Do partial host enrollment in DL0:
* add host entry to remote master
* request host keytab from remote master
* configure client-like /etc/krb5.conf to enable GSSAPI auth further
down the replica installation
"""
logger.info("Enrolling host to IPA domain")
config = installer._config
hostname = config.host_name
try:
installer._enrollment_performed = True
host_result = remote_api.Command.host_add(
unicode(config.host_name), force=installer.no_host_dns
)['result']
host_princ = unicode(host_result['krbcanonicalname'][0])
purge_host_keytab(config.realm_name)
getkeytab_args = [
paths.IPA_GETKEYTAB,
'-s', config.master_host_name,
'-p', host_princ,
'-D', unicode(ipaldap.DIRMAN_DN),
'-w', config.dirman_password,
'-k', paths.KRB5_KEYTAB,
'--cacert', os.path.join(config.dir, 'ca.crt')
]
ipautil.run(getkeytab_args, nolog=(config.dirman_password,))
_hostname, _sep, host_domain = hostname.partition('.')
fstore.backup_file(paths.KRB5_CONF)
configure_krb5_conf(
config.realm_name,
config.domain_name,
[config.master_host_name],
[config.master_host_name],
False,
paths.KRB5_CONF,
host_domain,
hostname,
configure_sssd=False
)
except CalledProcessError as e:
raise RuntimeError("Failed to fetch host keytab: {}".format(e))
def ensure_enrolled(installer):
args = [paths.IPA_CLIENT_INSTALL, "--unattended"]
stdin = None
nolog = []
if installer.domain_name:
args.extend(["--domain", installer.domain_name])
if installer.server:
args.extend(["--server", installer.server])
if installer.realm_name:
args.extend(["--realm", installer.realm_name])
if installer.host_name:
args.extend(["--hostname", installer.host_name])
if installer.password:
args.extend(["--password", installer.password])
nolog.append(installer.password)
else:
if installer.admin_password:
# Always set principal if password was set explicitly,
# the password itself gets passed directly via stdin
args.extend(["--principal", installer.principal or "admin"])
stdin = installer.admin_password
if installer.keytab:
args.extend(["--keytab", installer.keytab])
if installer.no_dns_sshfp:
args.append("--no-dns-sshfp")
if installer.ssh_trust_dns:
args.append("--ssh-trust-dns")
if installer.no_ssh:
args.append("--no-ssh")
if installer.no_sshd:
args.append("--no-sshd")
if installer.mkhomedir:
args.append("--mkhomedir")
if installer.force_join:
args.append("--force-join")
if installer.no_ntp:
args.append("--no-ntp")
if installer.ip_addresses:
for ip in installer.ip_addresses:
# installer.ip_addresses is of type [CheckedIPAddress]
args.extend(("--ip-address", str(ip)))
if installer.ntp_servers:
for server in installer.ntp_servers:
args.extend(("--ntp-server", server))
if installer.ntp_pool:
args.extend(("--ntp-pool", installer.ntp_pool))
try:
# Call client install script
service.print_msg("Configuring client side components")
installer._enrollment_performed = True
ipautil.run(args, stdin=stdin, nolog=nolog, redirect_output=True)
print()
except ipautil.CalledProcessError:
raise ScriptError("Configuration of client side components failed!")
def promotion_check_ipa_domain(master_ldap_conn, basedn):
entry = master_ldap_conn.get_entry(basedn, ['associatedDomain'])
if 'associatedDomain' not in entry:
raise RuntimeError('IPA domain not found in LDAP.')
if len(entry['associatedDomain']) > 1:
logger.critical(
"Multiple IPA domains found. We are so sorry :-(, you are "
"probably experiencing this bug "
"https://fedorahosted.org/freeipa/ticket/5976. Please contact us "
"for help.")
raise RuntimeError(
'Multiple IPA domains found in LDAP database ({domains}). '
'Only one domain is allowed.'.format(
domains=u', '.join(entry['associatedDomain'])
))
if entry['associatedDomain'][0] != api.env.domain:
raise RuntimeError(
"Cannot promote this client to a replica. Local domain "
"'{local}' does not match IPA domain '{ipadomain}'. ".format(
local=api.env.domain,
ipadomain=entry['associatedDomain'][0]
))
@common_cleanup
@preserve_enrollment_state
def promote_check(installer):
options = installer
installer._enrollment_performed = False
installer._top_dir = tempfile.mkdtemp("ipa")
# check selinux status, http and DS ports, NTP conflicting services
common_check(options.no_ntp, options.skip_mem_check, options.setup_ca)
if options.setup_ca and any([options.dirsrv_cert_files,
options.http_cert_files,
options.pkinit_cert_files]):
raise ScriptError("--setup-ca and --*-cert-file options are "
"mutually exclusive")
if not is_ipa_client_configured(on_master=True):
# One-step replica installation
if options.password and options.admin_password:
raise ScriptError("--password and --admin-password options are "
"mutually exclusive")
ensure_enrolled(installer)
else:
if (options.domain_name or options.server or options.realm_name or
options.host_name or options.password or options.keytab):
print("IPA client is already configured on this system, ignoring "
"the --domain, --server, --realm, --hostname, --password "
"and --keytab options.")
# Make sure options.server is not used
options.server = None
# The NTP configuration can not be touched on pre-installed client:
if options.no_ntp or options.ntp_servers or options.ntp_pool:
raise ScriptError(
"NTP configuration cannot be updated during promotion")
sstore = sysrestore.StateFile(paths.SYSRESTORE)
fstore = sysrestore.FileStore(paths.SYSRESTORE)
env = Env()
env._bootstrap(context='installer', confdir=paths.ETC_IPA, log=None)
env._finalize_core(**dict(constants.DEFAULT_CONFIG))
# pylint: disable=no-member
xmlrpc_uri = 'https://{}/ipa/xml'.format(ipautil.format_netloc(env.host))
api.bootstrap(in_server=True,
context='installer',
confdir=paths.ETC_IPA,
ldap_uri=ipaldap.realm_to_ldapi_uri(env.realm),
xmlrpc_uri=xmlrpc_uri)
# pylint: enable=no-member
api.finalize()
config = ReplicaConfig()
config.realm_name = api.env.realm
config.host_name = api.env.host
config.domain_name = api.env.domain
config.master_host_name = api.env.server
if not api.env.ca_host or api.env.ca_host == api.env.host:
# ca_host has not been configured explicitly, prefer source master
config.ca_host_name = api.env.server
else:
# default to ca_host from IPA config
config.ca_host_name = api.env.ca_host
config.kra_host_name = config.ca_host_name
config.ca_ds_port = 389
config.setup_ca = options.setup_ca
config.setup_kra = options.setup_kra
config.dir = installer._top_dir
config.basedn = api.env.basedn
config.hidden_replica = options.hidden_replica
http_pkcs12_file = None
http_pkcs12_info = None
http_ca_cert = None
dirsrv_pkcs12_file = None
dirsrv_pkcs12_info = None
dirsrv_ca_cert = None
pkinit_pkcs12_file = None
pkinit_pkcs12_info = None
pkinit_ca_cert = None
if options.http_cert_files:
if options.http_pin is None:
options.http_pin = installutils.read_password(
"Enter Apache Server private key unlock",
confirm=False, validate=False, retry=False)
if options.http_pin is None:
raise ScriptError(
"Apache Server private key unlock password required")
http_pkcs12_file, http_pin, http_ca_cert = load_pkcs12(
cert_files=options.http_cert_files,
key_password=options.http_pin,
key_nickname=options.http_cert_name,
ca_cert_files=options.ca_cert_files,
host_name=config.host_name)
http_pkcs12_info = (http_pkcs12_file.name, http_pin)
if options.dirsrv_cert_files:
if options.dirsrv_pin is None:
options.dirsrv_pin = installutils.read_password(
"Enter Directory Server private key unlock",
confirm=False, validate=False, retry=False)
if options.dirsrv_pin is None:
raise ScriptError(
"Directory Server private key unlock password required")
dirsrv_pkcs12_file, dirsrv_pin, dirsrv_ca_cert = load_pkcs12(
cert_files=options.dirsrv_cert_files,
key_password=options.dirsrv_pin,
key_nickname=options.dirsrv_cert_name,
ca_cert_files=options.ca_cert_files,
host_name=config.host_name)
dirsrv_pkcs12_info = (dirsrv_pkcs12_file.name, dirsrv_pin)
if options.pkinit_cert_files:
if options.pkinit_pin is None:
options.pkinit_pin = installutils.read_password(
"Enter Kerberos KDC private key unlock",
confirm=False, validate=False, retry=False)
if options.pkinit_pin is None:
raise ScriptError(
"Kerberos KDC private key unlock password required")
pkinit_pkcs12_file, pkinit_pin, pkinit_ca_cert = load_pkcs12(
cert_files=options.pkinit_cert_files,
key_password=options.pkinit_pin,
key_nickname=options.pkinit_cert_name,
ca_cert_files=options.ca_cert_files,
realm_name=config.realm_name)
pkinit_pkcs12_info = (pkinit_pkcs12_file.name, pkinit_pin)
if (options.http_cert_files and options.dirsrv_cert_files and
http_ca_cert != dirsrv_ca_cert):
raise RuntimeError("Apache Server SSL certificate and Directory "
"Server SSL certificate are not signed by the same"
" CA certificate")
if (options.http_cert_files and
options.pkinit_cert_files and
http_ca_cert != pkinit_ca_cert):
raise RuntimeError("Apache Server SSL certificate and PKINIT KDC "
"certificate are not signed by the same CA "
"certificate")
installutils.verify_fqdn(config.host_name, options.no_host_dns)
# Inside the container environment master's IP address does not
# resolve to its name. See https://pagure.io/freeipa/issue/6210
container_environment = tasks.detect_container() is not None
installutils.verify_fqdn(config.master_host_name, options.no_host_dns,
local_hostname=not container_environment)
ccache = os.environ['KRB5CCNAME']
kinit_keytab('host/{env.host}@{env.realm}'.format(env=api.env),
paths.KRB5_KEYTAB,
ccache)
cafile = paths.IPA_CA_CRT
if not os.path.isfile(cafile):
raise RuntimeError("CA cert file is not available! Please reinstall"
"the client and try again.")
ldapuri = 'ldaps://%s' % ipautil.format_netloc(config.master_host_name)
xmlrpc_uri = 'https://{}/ipa/xml'.format(
ipautil.format_netloc(config.master_host_name))
remote_api = create_api(mode=None)
remote_api.bootstrap(in_server=True,
context='installer',
confdir=paths.ETC_IPA,
ldap_uri=ldapuri,
xmlrpc_uri=xmlrpc_uri)
remote_api.finalize()
installer._remote_api = remote_api
with rpc_client(remote_api) as client:
check_remote_version(client, parse_version(api.env.version))
check_remote_fips_mode(client, api.env.fips_mode)
conn = remote_api.Backend.ldap2
replman = None
try:
# Try out authentication
conn.connect(ccache=ccache)
replman = ReplicationManager(config.realm_name,
config.master_host_name, None)
promotion_check_ipa_domain(conn, remote_api.env.basedn)
# Make sure that domain fulfills minimal domain level
# requirement
domain_level = current_domain_level(remote_api)
check_domain_level_is_supported(domain_level)
if domain_level < constants.MIN_DOMAIN_LEVEL:
raise RuntimeError(
"Cannot promote this client to a replica. The domain level "
"must be raised to {mindomainlevel} before the replica can be "
"installed".format(
mindomainlevel=constants.MIN_DOMAIN_LEVEL
))
# Check authorization
result = remote_api.Command['hostgroup_find'](
cn=u'ipaservers',
host=[unicode(api.env.host)]
)['result']
add_to_ipaservers = not result
if add_to_ipaservers:
if options.password and not options.admin_password:
raise errors.ACIError(info="Not authorized")
if installer._ccache is None:
del os.environ['KRB5CCNAME']
else:
os.environ['KRB5CCNAME'] = installer._ccache
try:
installutils.check_creds(options, config.realm_name)
installer._ccache = os.environ.get('KRB5CCNAME')
finally:
os.environ['KRB5CCNAME'] = ccache
conn.disconnect()
conn.connect(ccache=installer._ccache)
try:
result = remote_api.Command['hostgroup_show'](
u'ipaservers',
all=True,
rights=True
)['result']
if 'w' not in result['attributelevelrights']['member']:
raise errors.ACIError(info="Not authorized")
finally:
conn.disconnect()
conn.connect(ccache=ccache)
# Check that we don't already have a replication agreement
if replman.get_replication_agreement(config.host_name):
msg = ("A replication agreement for this host already exists. "
"It needs to be removed.\n"
"Run this command:\n"
" %% ipa-replica-manage del {host} --force"
.format(host=config.host_name))
raise ScriptError(msg, rval=3)
# Detect if the other master can handle replication managers
# cn=replication managers,cn=sysaccounts,cn=etc,$SUFFIX
dn = DN(('cn', 'replication managers'),
api.env.container_sysaccounts,
ipautil.realm_to_suffix(config.realm_name))
try:
conn.get_entry(dn)
except errors.NotFound:
msg = ("The Replication Managers group is not available in "
"the domain. Replica promotion requires the use of "
"Replication Managers to be able to replicate data. "
"Upgrade the peer master or use the ipa-replica-prepare "
"command on the master and use a prep file to install "
"this replica.")
logger.error("%s", msg)
raise ScriptError(rval=3)
dns_masters = remote_api.Object['dnsrecord'].get_dns_masters()
if dns_masters:
if not options.no_host_dns:
logger.debug('Check forward/reverse DNS resolution')
resolution_ok = (
check_dns_resolution(config.master_host_name,
dns_masters) and
check_dns_resolution(config.host_name, dns_masters))
if not resolution_ok and installer.interactive:
if not ipautil.user_input("Continue?", False):
raise ScriptError(rval=0)
else:
logger.debug('No IPA DNS servers, '
'skipping forward/reverse resolution check')
entry_attrs = conn.get_ipa_config()
subject_base = entry_attrs.get('ipacertificatesubjectbase', [None])[0]
if subject_base is not None:
config.subject_base = DN(subject_base)
# Find any server with a CA
# The order of preference is
# 1. the first server specified in --server, if any
# 2. the server specified in the config file
# 3. any other
preferred_cas = [config.ca_host_name]
if options.server:
preferred_cas.insert(0, options.server)
ca_host = find_providing_server(
'CA', conn, preferred_cas
)
if ca_host is not None:
config.ca_host_name = ca_host
ca_enabled = True
if options.dirsrv_cert_files:
logger.error("Certificates could not be provided when "
"CA is present on some master.")
raise ScriptError(rval=3)
if options.setup_ca and options.server and \
ca_host != options.server:
# Installer was provided with a specific master
# but this one doesn't provide CA
logger.error("The specified --server %s does not provide CA, "
"please provide a server with the CA role",
options.server)
raise ScriptError(rval=4)
else:
if options.setup_ca:
logger.error("The remote master does not have a CA "
"installed, can't set up CA")
raise ScriptError(rval=3)
ca_enabled = False
if not options.dirsrv_cert_files:
logger.error("Cannot issue certificates: a CA is not "
"installed. Use the --http-cert-file, "
"--dirsrv-cert-file options to provide "
"custom certificates.")
raise ScriptError(rval=3)
# Find any server with a KRA
# The order of preference is
# 1. the first server specified in --server, if any
# 2. the server specified in the config file
# 3. any other
preferred_kras = [config.kra_host_name]
if options.server:
preferred_kras.insert(0, options.server)
kra_host = find_providing_server(
'KRA', conn, preferred_kras
)
if kra_host is not None:
config.kra_host_name = kra_host
kra_enabled = True
if options.setup_kra and options.server and \
kra_host != options.server:
# Installer was provided with a specific master
# but this one doesn't provide KRA
logger.error("The specified --server %s does not provide KRA, "
"please provide a server with the KRA role",
options.server)
raise ScriptError(rval=4)
else:
if options.setup_kra:
logger.error("There is no active KRA server in the domain, "
"can't setup a KRA clone")
raise ScriptError(rval=3)
kra_enabled = False
if ca_enabled:
options.realm_name = config.realm_name
options.host_name = config.host_name
ca.install_check(False, config, options)
if kra_enabled:
try:
kra.install_check(remote_api, config, options)
except RuntimeError as e:
raise ScriptError(e)
if options.setup_dns:
dns.install_check(False, remote_api, True, options,
config.host_name)
config.ips = dns.ip_addresses
else:
config.ips = installutils.get_server_ip_address(
config.host_name, not installer.interactive,
False, options.ip_addresses)
# check addresses here, dns module is doing own check
no_matching_interface_for_ip_address_warning(config.ips)
if options.setup_adtrust:
adtrust.install_check(False, options, remote_api)
except errors.ACIError:
logger.debug("%s", traceback.format_exc())
raise ScriptError("\nInsufficient privileges to promote the server."
"\nPossible issues:"
"\n- A user has insufficient privileges"
"\n- This client has insufficient privileges "
"to become an IPA replica")
except errors.LDAPError:
logger.debug("%s", traceback.format_exc())
raise ScriptError("\nUnable to connect to LDAP server %s" %
config.master_host_name)
finally:
if replman and replman.conn:
replman.conn.unbind()
if conn.isconnected():
conn.disconnect()
# check connection
if not options.skip_conncheck:
if add_to_ipaservers:
# use user's credentials when the server host is not ipaservers
if installer._ccache is None:
del os.environ['KRB5CCNAME']
else:
os.environ['KRB5CCNAME'] = installer._ccache
try:
replica_conn_check(
config.master_host_name, config.host_name, config.realm_name,
options.setup_ca, 389,
options.admin_password, principal=options.principal,
ca_cert_file=cafile)
finally:
if add_to_ipaservers:
os.environ['KRB5CCNAME'] = ccache
installer._ca_enabled = ca_enabled
installer._kra_enabled = kra_enabled
installer._ca_file = cafile
installer._fstore = fstore
installer._sstore = sstore
installer._config = config
installer._add_to_ipaservers = add_to_ipaservers
installer._dirsrv_pkcs12_file = dirsrv_pkcs12_file
installer._dirsrv_pkcs12_info = dirsrv_pkcs12_info
installer._http_pkcs12_file = http_pkcs12_file
installer._http_pkcs12_info = http_pkcs12_info
installer._pkinit_pkcs12_file = pkinit_pkcs12_file
installer._pkinit_pkcs12_info = pkinit_pkcs12_info
@common_cleanup
def install(installer):
options = installer
ca_enabled = installer._ca_enabled
kra_enabled = installer._kra_enabled
fstore = installer._fstore
sstore = installer._sstore
config = installer._config
cafile = installer._ca_file
dirsrv_pkcs12_info = installer._dirsrv_pkcs12_info
http_pkcs12_info = installer._http_pkcs12_info
pkinit_pkcs12_info = installer._pkinit_pkcs12_info
remote_api = installer._remote_api
conn = remote_api.Backend.ldap2
ccache = os.environ['KRB5CCNAME']
# Be clear that the installation process is beginning but not done
sstore.backup_state('installation', 'complete', False)
if tasks.configure_pkcs11_modules(fstore):
print("Disabled p11-kit-proxy")
if installer._add_to_ipaservers:
try:
conn.connect(ccache=installer._ccache)
remote_api.Command['hostgroup_add_member'](
u'ipaservers',
host=[unicode(api.env.host)],
)
finally:
if conn.isconnected():
conn.disconnect()
os.environ['KRB5CCNAME'] = ccache
config.dirman_password = ipautil.ipa_generate_password()
# FIXME: allow to use passed in certs instead
if ca_enabled:
configure_certmonger()
try:
conn.connect(ccache=ccache)
# Update and istall updated CA file
cafile = install_ca_cert(conn, api.env.basedn, api.env.realm, cafile)
install_ca_cert(conn, api.env.basedn, api.env.realm, cafile,
destfile=paths.KDC_CA_BUNDLE_PEM)
install_ca_cert(conn, api.env.basedn, api.env.realm, cafile,
destfile=paths.CA_BUNDLE_PEM)
# Configure dirsrv
ds = install_replica_ds(config, options, ca_enabled,
remote_api,
ca_file=cafile,
pkcs12_info=dirsrv_pkcs12_info,
fstore=fstore)
# Always try to install DNS records
install_dns_records(config, options, remote_api, fstore=fstore)
finally:
if conn.isconnected():
conn.disconnect()
# Create the management framework config file. Do this irregardless
# of the state of DS installation. Even if it fails,
# we need to have master-like configuration in order to perform a
# successful uninstallation
# The configuration creation has to be here otherwise previous call
# To config certmonger would try to connect to local server
create_ipa_conf(fstore, config, ca_enabled)
krb = install_krb(
config,
setup_pkinit=not options.no_pkinit,
pkcs12_info=pkinit_pkcs12_info,
fstore=fstore)
# We need to point to the master when certmonger asks for
# a DS or HTTP certificate.
# During http installation, the <service>/hostname principal is
# created locally then the installer waits for the entry to appear
# on the master selected for the installation.
# In a later step, the installer requests a SSL certificate through
# Certmonger (and the op adds the principal if it does not exist yet).
# If xmlrpc_uri points to the soon-to-be replica,
# the httpd service is not ready yet to handle certmonger requests
# and certmonger tries to find another master. The master can be
# different from the one selected for the installation, and it is
# possible that the principal has not been replicated yet. This
# may lead to a replication conflict.
# This is why we need to force the use of the same master by
# setting xmlrpc_uri
create_ipa_conf(fstore, config, ca_enabled,
master=config.master_host_name)
# we now need to enable ssl on the ds
ds.enable_ssl()
install_http(
config,
auto_redirect=not options.no_ui_redirect,
pkcs12_info=http_pkcs12_info,
ca_is_configured=ca_enabled,
ca_file=cafile,
fstore=fstore)
# Need to point back to ourself after the cert for HTTP is obtained
create_ipa_conf(fstore, config, ca_enabled)
otpd = otpdinstance.OtpdInstance()
otpd.create_instance('OTPD', config.host_name,
ipautil.realm_to_suffix(config.realm_name))
if kra_enabled:
# A KRA peer always provides a CA, too.
mode = custodiainstance.CustodiaModes.KRA_PEER
elif ca_enabled:
mode = custodiainstance.CustodiaModes.CA_PEER
else:
mode = custodiainstance.CustodiaModes.MASTER_PEER
custodia = custodiainstance.get_custodia_instance(config, mode)
custodia.create_instance()
if ca_enabled:
options.realm_name = config.realm_name
options.domain_name = config.domain_name
options.host_name = config.host_name
options.dm_password = config.dirman_password
ca.install(False, config, options, custodia=custodia)
# configure PKINIT now that all required services are in place
krb.enable_ssl()
# Apply any LDAP updates. Needs to be done after the replica is synced-up
service.print_msg("Applying LDAP updates")
ds.apply_updates()
service.print_msg("Finalize replication settings")
ds.finalize_replica_config()
if kra_enabled:
kra.install(api, config, options, custodia=custodia)
service.print_msg("Restarting the KDC")
krb.restart()
custodia.import_dm_password()
promote_sssd(config.host_name)
promote_openldap_conf(config.host_name, config.master_host_name)
if options.setup_dns:
dns.install(False, True, options, api)
if options.setup_adtrust:
adtrust.install(False, options, fstore, api)
if options.hidden_replica:
# Set services to hidden
service.hide_services(config.host_name)
else:
# Enable configured services
service.enable_services(config.host_name)
# update DNS SRV records. Although it's only really necessary in
# enabled-service case, also perform update in hidden replica case.
api.Command.dns_update_system_records()
if options.setup_adtrust:
dns_help = adtrust.generate_dns_service_records_help(api)
if dns_help:
for line in dns_help:
service.print_msg(line, sys.stdout)
ca_servers = find_providing_servers('CA', api.Backend.ldap2, api=api)
api.Backend.ldap2.disconnect()
# Everything installed properly, activate ipa service.
sstore.delete_state('installation', 'complete')
sstore.backup_state('installation', 'complete', True)
services.knownservices.ipa.enable()
# Print a warning if CA role is only installed on one server
if len(ca_servers) == 1:
msg = textwrap.dedent(u'''
WARNING: The CA service is only installed on one server ({}).
It is strongly recommended to install it on another server.
Run ipa-ca-install(1) on another master to accomplish this.
'''.format(ca_servers[0]))
print(msg, file=sys.stderr)
def init(installer):
installer.unattended = not installer.interactive
if installer.servers:
installer.server = installer.servers[0]
else:
installer.server = None
installer.password = installer.host_password
installer._ccache = os.environ.get('KRB5CCNAME')
installer._top_dir = None
installer._config = None
installer._update_hosts_file = False
installer._dirsrv_pkcs12_file = None
installer._http_pkcs12_file = None
installer._pkinit_pkcs12_file = None
installer._dirsrv_pkcs12_info = None
installer._http_pkcs12_info = None
installer._pkinit_pkcs12_info = None
|
encukou/freeipa
|
ipaserver/install/server/replicainstall.py
|
Python
|
gpl-3.0
| 51,681 |
from PyQt5.QtWidgets import QLabel, QLineEdit, QFormLayout
from .abstract_settings_widget import AbstractSettingsWidget
class ImgurSettingsWidget(AbstractSettingsWidget):
def __init__(self):
super().__init__(init_ui=False)
self.setWindowTitle('Imgur Settings')
self.client_id_line_edit = QLineEdit()
self.client_secret_line_edit = QLineEdit()
self.mashape_key_line_edit = QLineEdit()
layout = QFormLayout()
self.setLayout(layout)
layout.addRow(QLabel('Imgur client id:'), self.client_id_line_edit)
layout.addRow(QLabel('Imgur client secret:'), self.client_secret_line_edit)
layout.addRow(QLabel('RapidAPI key:'), self.mashape_key_line_edit)
@property
def description(self):
return 'Enter the credentials provided to you from imgur when you registered for the client. If you do not ' \
'yet have these credentials, instructions on how to register can be found ' \
'<a href="https://github.com/MalloyDelacroix/DownloaderForReddit#imgur-posts">here.</a>' \
'<br><br>' \
'If the standard API is not enough for your needs, you can get commercial API credentials' \
' from <a href="https://rapidapi.com/imgur/api/imgur-9/pricing">RapidAPI</a>. They have a free tier ' \
'with 100,000 requests/ month and larger paid tiers.'
def load_settings(self):
self.client_id_line_edit.setText(self.settings.imgur_client_id)
self.client_secret_line_edit.setText(self.settings.imgur_client_secret)
self.mashape_key_line_edit.setText(self.settings.imgur_mashape_key)
def apply_settings(self):
self.settings.imgur_client_id = self.client_id_line_edit.text()
self.settings.imgur_client_secret = self.client_secret_line_edit.text()
self.settings.imgur_mashape_key = self.mashape_key_line_edit.text()
|
MalloyDelacroix/DownloaderForReddit
|
DownloaderForReddit/gui/settings/imgur_settings_widget.py
|
Python
|
gpl-3.0
| 1,934 |
from functools import partial
from os import path
import sys
from collections import OrderedDict
import numpy as np
import yaml
from .array import format_vector
from .lp import Problem
from .util import VectorMemory, _name
from .symmetry import parse_symmetries, SymmetryGroup, group_by_symmetry
def detect_prefix(s, prefix, on_prefix):
"""
Check whether ``s`` starts with ``prefix``. If so, call ``on_prefix`` with
the stripped string.
"""
if s.startswith(prefix):
if on_prefix:
on_prefix(s[len(prefix):])
return True
return False
def remove_comments(lines, on_comment=None):
"""Iterate over non-comment lines. Forward comments to ``on_comment``."""
for line in lines:
if not detect_prefix(line.strip(), '#', on_comment):
yield line
def read_system_from_file(file):
lines = list(file)
try:
return read_table_from_file(lines)
except ValueError:
pass
from .parse import parse_text
return parse_text("\n".join(lines))
def expand_symmetries(matrix, cols, symm):
if not symm:
return matrix
sg = SymmetryGroup.load(symm, cols)
seen = VectorMemory()
return np.array([v for r in matrix for v in sg(r)
if not seen(v)])
def read_table_from_file(file):
comments = []
contents = remove_comments(file, comments.append)
matrix = np.loadtxt(contents, ndmin=2)
cols = []
symm = []
subs = []
def add_cols(s):
cols.extend(map(_name, s.split()))
def add_symm(s):
symm.extend(parse_symmetries(s))
def add_subs(s):
spec = parse_symmetries(s)
symm.extend(spec)
subs.extend(spec)
for line in comments:
l = line.strip()
detect_prefix(l, '::', add_cols)
detect_prefix(l, '>>', add_symm)
detect_prefix(l, '~~', add_subs)
matrix = expand_symmetries(matrix, cols, subs)
return matrix, cols or None, symm
def read_system(filename):
"""Read linear system from file, return tuple (matrix, colnames)."""
matrix, cols, symmetries = _read_system(filename)
if '_' in cols:
ccol = cols.index('_')
if np.allclose(matrix[:,ccol], 0):
matrix = np.delete(matrix, ccol, axis=1)
del cols[ccol]
return matrix, cols, symmetries
def _read_system(filename):
if filename == '-':
return read_system_from_file(sys.stdin)
else:
with open(filename) as f:
return read_system_from_file(f)
def _unique(items):
ret = []
seen = set()
for item in items:
if item not in seen:
seen.add(item)
ret.append(item)
return ret
def _name_list(s):
try:
return int(s)
except TypeError:
return s
except ValueError:
pass
if path.exists(s):
with open(s) as f:
return f.read().split()
return s.split()
def _fmt_float(f):
if round(f) == f:
return str(int(f))
return str(f)
def _coef(coef):
if coef < 0:
prefix = '-'
coef = -coef
else:
prefix = '+'
if coef != 1:
prefix += ' ' + _fmt_float(coef)
return prefix
def _sort_col_indices(constraint, columns):
# len() is used as approximation for number of terms involved. For most
# cases this should be fine.
key = lambda i: (constraint[i] > 0, len(columns[i]), abs(constraint[i]))
nonzero = (i for i, c in enumerate(constraint) if c != 0)
return sorted(nonzero, key=key, reverse=True)
def format_human_readable(constraint, columns, indices=None):
if indices is None:
indices = _sort_col_indices(constraint, columns)
rhs = ["{} {}".format(_coef(constraint[i]), columns[i])
for i in indices if columns[i] != '_']
if not rhs:
rhs = ["0"]
try:
lhs = -constraint[columns.index('_')]
except ValueError:
lhs = 0
return "{} ≤ {}".format(lhs, " ".join(rhs).lstrip('+ '))
def format_ineq(constraint, pretty=False, columns=None, indices=None):
if pretty:
return format_human_readable(constraint, columns, indices)
return format_vector(constraint)
class System:
"""
IO utility for systems. Keeps track of column names.
"""
def __init__(self, matrix=None, columns=None, symmetries=None):
self.matrix = matrix
self.columns = columns
self.symmetries = symmetries
@classmethod
def load(cls, filename=None, *, default=sys.stdin, force=True):
if not force:
if not filename or (filename != '-' and not path.exists(filename)):
return cls()
return cls(*read_system(filename))
@property
def dim(self):
return self.matrix.shape[1]
@property
def shape(self):
return self.matrix.shape
def __bool__(self):
return self.matrix is not None
def update_symmetries(self, symmetries):
if symmetries is not None:
self.symmetries = symmetries
return bool(self.symmetries)
def symmetry_group(self):
return SymmetryGroup.load(self.symmetries, self.columns)
def slice(self, columns, fill=False):
"""Return reordered system. ``fill=True`` appends missing columns."""
indices = [self._get_column_index(c) for c in columns]
indices = _unique(indices)
subdim = len(indices)
if fill:
indices += sorted(set(range(self.dim)) - set(indices))
if self.columns:
columns = [self.columns[i] for i in indices]
else:
columns = None
return System(self.matrix[:,indices], columns, self.symmetries), subdim
def merge(self, other):
if not self: return other
if not other: return self
assert self.columns and other.columns, \
"Need to set column names for merge operation!"
columns = self.columns[:]
columns += [c for c in other.columns if c not in columns]
col_idx = [columns.index(c) for c in other.columns]
matrix = np.zeros((self.shape[0]+other.shape[0], len(columns)))
matrix[:self.shape[0],:self.shape[1]] = self.matrix
matrix[self.shape[0]:,col_idx] = other.matrix
return self.__class__(matrix, columns)
def _get_column_index(self, col):
try:
return int(col)
except ValueError:
pass
col = _name(col)
return self.columns.index(col)
def lp(self):
"""Get the LP."""
return Problem(self.matrix)
def prepare_for_projection(self, subspace):
"""
Return a tuple ``(system, subspace_dimension)`` with the subspace
occupying the columns with the lowest indices in the returned system.
The ``subspace`` parameter can be either of:
- integer — subspace dimension, using the leftmost columns
- filename — file containing the subspace column names
- string — string containing the subspace column names
"""
subspace_columns = _name_list(subspace)
if isinstance(subspace_columns, int):
return self, subspace_columns
return self.slice(subspace_columns, fill=True)
class SystemFile:
"""Sink for matrix files."""
def __init__(self, filename=None, *,
default=sys.stdout, append=False, columns=None,
symmetries=None, pretty=False):
self.columns = columns
self.file_columns = columns
self.symm_spec = symmetries
self.pretty = pretty
self._seen = VectorMemory()
self._slice = None
self._started = False
self._matrix = None
if append:
self._read_for_append(filename)
self._print = print_to(filename, default=default, append=append)
def _read_for_append(self, filename):
system = System.load(filename, force=False)
if system.matrix:
self._matrix = system.matrix
self._seen.add(*system.matrix)
self._started = True
if system.columns:
self.file_columns = file_columns = system.columns
if self.columns:
self._slice = list(map(self.columns.index, file_columns))
else:
self.columns = file_columns
def __call__(self, v):
"""Output the vector ``v``."""
if self._seen(v):
return
self._put_header()
if self._slice:
v = v[self._slice]
self._print(format_ineq(v, self.pretty, self.columns))
def _put_header(self):
if self._started:
return
if self.pretty:
if self.file_columns:
# TODO: add 'columns' statement!
self._print('#::', *self.file_columns)
if self.symm_spec:
self._print('symm', '; '.join(map('<>'.join, self.symm_spec)))
else:
if self.file_columns:
self._print('#::', *self.file_columns)
if self.symm_spec:
self._print('#>>', '; '.join(map('<>'.join, self.symm_spec)))
self._started = True
def pprint_symmetries(self, rows, short=False):
sg = SymmetryGroup.load(self.symm_spec, self.columns)
groups = group_by_symmetry(sg, rows)
representatives = [g[0] for g in groups]
if short:
self.pprint(representatives)
else:
for rep in representatives:
self._pprint_group(sg, rep)
self._print()
return groups
def _pprint_group(self, sg, rep):
indices = _sort_col_indices(rep, self.columns)
for permutation in sg.permutations:
inverted = permutation.inverse()
permuted = permutation(rep)
if self._seen(permuted):
continue
order = [inverted.p[i] for i in indices]
self._print(format_human_readable(permuted, self.columns, order))
def pprint(self, rows):
for row in rows:
self._print(format_human_readable(row, self.columns))
def print_to(filename=None, *default_prefix,
append=False, default=sys.stdout):
"""
Return a print function that prints to filename.
If filename is left empty, prints to STDOUT.
"""
if filename and filename != '-':
mode = 'a' if append else 'w'
file = open(filename, mode, buffering=1)
return partial(print, file=file)
elif default:
return partial(print, *default_prefix, file=default)
else:
return lambda *args, **kwargs: None
def repeat(func, *args, **kwargs):
"""Call the function endlessly."""
while True:
yield func(*args, **kwargs)
def take(count, iterable):
"""Take count elements from iterable."""
for i, v in zip(range(count), iterable):
yield v
def get_bits(num):
"""Return tuple of indices corresponding to 1-bits."""
return tuple(i for i in range(num.bit_length())
if num & (1 << i))
def subsets(sup):
sup = sorted(list(sup))
for i in range(2**len(sup)):
yield set(sup[k] for k in get_bits(i))
def supersets(sub, world):
sub, world = set(sub), set(world)
return map(sub.union, subsets(world - sub))
def default_column_labels(dim):
return ['_'] + ['_'+str(i) for i in range(1, dim)]
def _column_label(index, varnames="ABCDEFGHIJKLMNOPQRSTUVWXYZ"):
return _name({varnames[i] for i in get_bits(index)})
def column_varname_labels(varnames):
if isinstance(varnames, int):
varnames = [chr(ord('A') + i) for i in range(varnames)]
dim = 2**len(varnames)
return ['_'] + [_column_label(i, varnames) for i in range(1, dim)]
class StatusInfo:
def __init__(self, file=sys.stderr):
self.file = file
def write(self, blob):
self.file.write(blob)
self.file.flush()
def cursor_up(self, num_lines=1):
self.write("\033[" + str(num_lines) + "A")
def clear_line(self):
self.write("\r\033[K")
def __call__(self, *args):
if args:
self.clear_line()
self.write(" ".join(args))
else:
self.write("\n")
def yaml_dump(data, stream=None, Dumper=yaml.SafeDumper, **kwds):
class _Dumper(Dumper):
pass
def numpy_scalar_representer(dumper, data):
return dumper.represent_data(np.asscalar(data))
def numpy_array_representer(dumper, data):
return dumper.represent_data([x for x in data])
def complex_representer(dumper, data):
return dumper.represent_data([data.real, data.imag])
def odict_representer(dumper, data):
return dumper.represent_mapping(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
data.items())
_Dumper.add_representer(OrderedDict, odict_representer)
_Dumper.add_multi_representer(np.generic, numpy_scalar_representer)
_Dumper.add_representer(np.ndarray, numpy_array_representer)
_Dumper.add_representer(complex, complex_representer)
return yaml.dump(data, stream, _Dumper, **kwds)
def yaml_load(stream, Loader=yaml.SafeLoader, object_pairs_hook=OrderedDict):
class OrderedLoader(Loader):
pass
def construct_mapping(loader, node):
loader.flatten_mapping(node)
return object_pairs_hook(loader.construct_pairs(node))
OrderedLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
construct_mapping)
return yaml.load(stream, OrderedLoader)
|
coldfix/pystif
|
pystif/core/io.py
|
Python
|
gpl-3.0
| 13,580 |
"""
Implementation of the IEAgent interface for the mechanical engineering and
mechanics website, located here: http://drexel.edu/mem/contact/faculty-directory/
"""
__all__ = ['MemIEAgent']
__version__ = '0.1'
__author__ = 'Tom Amon'
import requests
from bs4 import BeautifulSoup
import abc
from .ieagent import IEAgent
import ttl
class MemIEAgent(IEAgent):
_link = "http://drexel.edu/mem/contact/faculty-directory/"
ttl_filename = "ttl/mem.ttl"
def write_ttl(self):
ttl_file = ttl.TtlFile(self.ttl_filename)
webpage = requests.get(self._link)
try:
webpage.raise_for_status()
except Exception as exc:
print('There was a problem: %s' % (exc))
soup = BeautifulSoup(webpage.text, "html.parser")
elems = soup.select('tr')
for i in range(2, len(elems)):
nameStr = elems[i].find('strong').getText()
titleStr = elems[i].find('br').next_sibling
contact_info = elems[i].select('p')[2].getText().split('\n')
emailStr = contact_info[0]
phoneStr = contact_info[1]
roomStr = contact_info[2]
interestsStr = elems[i].select('p')[3].getText()
prof = ttl.TtlFileEntry()
prof.name = nameStr
prof.property = "faculty"
prof.title = titleStr
prof.email = emailStr
prof.phone = phoneStr
prof.room = roomStr
prof.Interests = interestsStr
prof.write_to(ttl_file)
ttl_file.close()
return ttl_file
|
DrexelChatbotGroup/DrexelChatbot
|
ie/ieagents/mem_ieagent.py
|
Python
|
gpl-3.0
| 1,588 |
# coding=utf-8
from autosubliminal.server.api.items import ItemsApi
from autosubliminal.server.api.logs import LogsApi
from autosubliminal.server.api.movies import MoviesApi
from autosubliminal.server.api.settings import SettingsApi
from autosubliminal.server.api.shows import ShowsApi
from autosubliminal.server.api.subtitles import SubtitlesApi
from autosubliminal.server.api.system import SystemApi
from autosubliminal.server.rest import RestResource
class Api(RestResource):
"""
The Auto-Subliminal REST api.
Rest resource for handling the /api path.
"""
def __init__(self):
super().__init__()
# Set the allowed methods
self.allowed_methods = ('GET',)
# Add all sub paths here: /api/...
self.items = ItemsApi()
self.logs = LogsApi()
self.movies = MoviesApi()
self.settings = SettingsApi()
self.shows = ShowsApi()
self.subtitles = SubtitlesApi()
self.system = SystemApi()
def get(self, *args, **kwargs):
return {'api': 'Welcome to the Auto-Subliminal REST api'}
|
h3llrais3r/Auto-Subliminal
|
autosubliminal/server/api/__init__.py
|
Python
|
gpl-3.0
| 1,092 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from collections import Iterable
import contextlib
import json
from unittest import mock
from django.core.cache import cache
from django.contrib.auth.models import User, Permission
from django.conf import settings
from django.forms import ValidationError
from django.urls import reverse
from django.utils.encoding import smart_text
from markus.testing import MetricsMock
import pyquery
import pytest
from crashstats import productlib
from crashstats.api.views import (
api_models_and_names,
is_valid_model_class,
MultipleStringField,
TYPE_MAP,
)
from crashstats.crashstats.models import (
BugAssociation,
NoOpMiddleware,
ProcessedCrash,
Reprocessing,
RawCrash,
SocorroMiddleware,
UnredactedCrash,
)
from crashstats.crashstats.tests.conftest import BaseTestViews
from crashstats.supersearch.models import (
SuperSearch,
SuperSearchUnredacted,
ESSocorroMiddleware,
)
from crashstats.tokens.models import Token
from socorro.lib.ooid import create_new_ooid
class TestDedentLeft:
def test_dedent_left(self):
from crashstats.api.views import dedent_left
assert dedent_left("Hello", 2) == "Hello"
assert dedent_left(" Hello", 2) == " Hello"
assert dedent_left(" Hello ", 2) == " Hello "
text = """Line 1
Line 2
Line 3
""".rstrip()
# because this code right above is indented with 2 * 4 spaces
assert dedent_left(text, 8) == "Line 1\nLine 2\nLine 3"
class TestIsValidModelClass:
"""Test that is_valid_model_class validates API models."""
@pytest.mark.parametrize("model", (SuperSearch, SuperSearchUnredacted))
def test_valid(self, model):
assert is_valid_model_class(model)
@pytest.mark.parametrize(
"not_model",
("SuperSearch", int, contextlib, SocorroMiddleware, ESSocorroMiddleware),
)
def test_invalid(self, not_model):
assert not is_valid_model_class(not_model)
class TestDocumentationViews(BaseTestViews):
def test_documentation_home_page(self):
from crashstats.api import views
models_and_names = views.api_models_and_names()
valid_names = [pair[1] for pair in models_and_names]
url = reverse("api:documentation")
response = self.client.get(url)
assert response.status_code == 200
doc = pyquery.PyQuery(response.content)
for elt in doc("#mainbody .panel .title h2 a"):
assert elt.text in valid_names
class TestViews(BaseTestViews):
def setUp(self):
super().setUp()
self._middleware = settings.MIDDLEWARE
settings.MIDDLEWARE += (
"crashstats.crashstats.middleware.SetRemoteAddrFromRealIP",
)
def tearDown(self):
super().tearDown()
settings.MIDDLEWARE = self._middleware
def test_invalid_url(self):
url = reverse("api:model_wrapper", args=("BlaBLabla",))
response = self.client.get(url)
assert response.status_code == 404
def test_base_classes_raise_not_found(self):
url = reverse("api:model_wrapper", args=("SocorroMiddleware",))
response = self.client.get(url)
assert response.status_code == 404
url = reverse("api:model_wrapper", args=("ESSocorroMiddleware",))
response = self.client.get(url)
assert response.status_code == 404
def test_option_CORS(self):
"""OPTIONS request for model_wrapper returns CORS headers"""
url = reverse("api:model_wrapper", args=("NoOp",))
response = self.client.options(url, HTTP_ORIGIN="http://example.com")
assert response.status_code == 200
assert response["Access-Control-Allow-Origin"] == "*"
def test_cache_control(self):
"""Verifies Cache-Control header for models that cache results"""
url = reverse("api:model_wrapper", args=("NoOp",))
response = self.client.get(
url, {"product": productlib.get_default_product().name}
)
assert response.status_code == 200
assert response["Cache-Control"]
assert "private" in response["Cache-Control"]
cache_seconds = NoOpMiddleware.cache_seconds
assert f"max-age={cache_seconds}" in response["Cache-Control"]
def test_metrics_gathering(self):
url = reverse("api:model_wrapper", args=("NoOp",))
with MetricsMock() as metrics_mock:
response = self.client.get(url, {"product": "good"})
assert response.status_code == 200
metrics_mock.assert_incr("webapp.api.pageview", tags=["endpoint:apiNoOp"])
def test_param_exceptions(self):
# missing required parameter
url = reverse("api:model_wrapper", args=("NoOp",))
response = self.client.get(url)
assert response.status_code == 400
assert "This field is required." in smart_text(response.content)
response = self.client.get(url, {"product": "bad"})
assert response.status_code == 400
assert "Bad value for parameter(s) 'Bad product'" in smart_text(
response.content
)
def test_hit_or_not_hit_ratelimit(self):
url = reverse("api:model_wrapper", args=("NoOp",))
response = self.client.get(url, {"product": "good"})
assert response.status_code == 200
with self.settings(API_RATE_LIMIT="3/m", API_RATE_LIMIT_AUTHENTICATED="6/m"):
current_limit = 3 # see above mentioned settings override
# Double to avoid
# https://bugzilla.mozilla.org/show_bug.cgi?id=1148470
for i in range(current_limit * 2):
response = self.client.get(
url, {"product": "good"}, HTTP_X_REAL_IP="12.12.12.12"
)
assert response.status_code == 429
# But it'll work if you use a different X-Real-IP
# because the rate limit is based on your IP address
response = self.client.get(
url, {"product": "good"}, HTTP_X_REAL_IP="11.11.11.11"
)
assert response.status_code == 200
user = User.objects.create(username="test")
token = Token.objects.create(
user=user, notes="Just for avoiding rate limit"
)
response = self.client.get(
url, {"product": "good"}, HTTP_AUTH_TOKEN=token.key
)
assert response.status_code == 200
for i in range(current_limit):
response = self.client.get(url, {"product": "good"})
assert response.status_code == 200
# But even being logged in has a limit.
authenticated_limit = 6 # see above mentioned settings override
assert authenticated_limit > current_limit
for i in range(authenticated_limit * 2):
response = self.client.get(url, {"product": "good"})
# Even if you're authenticated - sure the limit is higher -
# eventually you'll run into the limit there too.
assert response.status_code == 429
def test_ProcessedCrash(self):
url = reverse("api:model_wrapper", args=("ProcessedCrash",))
response = self.client.get(url)
assert response.status_code == 400
assert response["Content-Type"] == "application/json"
dump = json.loads(response.content)
assert dump["errors"]["crash_id"]
def mocked_get(**params):
if "datatype" in params and params["datatype"] == "processed":
return {
"client_crash_date": "2012-06-11T06:08:45",
"dump": dump,
"signature": "FakeSignature1",
"user_comments": None,
"uptime": 14693,
"release_channel": "nightly",
"uuid": "11cb72f5-eb28-41e1-a8e4-849982120611",
"flash_version": "[blank]",
"hangid": None,
"process_type": None,
"id": 383569625,
"os_version": "10.6.8 10K549",
"version": "5.0a1",
"build": "20120609030536",
"ReleaseChannel": "nightly",
"addons_checked": None,
"product": "WaterWolf",
"os_name": "Mac OS X",
"last_crash": 371342,
"date_processed": "2012-06-11T06:08:44",
"cpu_arch": "amd64",
"reason": "EXC_BAD_ACCESS / KERN_INVALID_ADDRESS",
"address": "0x8",
"completed_datetime": "2012-06-11T06:08:57",
"success": True,
"upload_file_minidump_browser": "a crash",
"upload_file_minidump_flash1": "a crash",
"upload_file_minidump_flash2": "a crash",
"upload_file_minidump_plugin": "a crash",
}
raise NotImplementedError
ProcessedCrash.implementation().get.side_effect = mocked_get
response = self.client.get(url, {"crash_id": "123"})
assert response.status_code == 200
dump = json.loads(response.content)
assert dump["uuid"] == "11cb72f5-eb28-41e1-a8e4-849982120611"
assert "upload_file_minidump_flash2" in dump
assert "url" not in dump
def test_UnredactedCrash(self):
url = reverse("api:model_wrapper", args=("UnredactedCrash",))
response = self.client.get(url)
# because we don't have the sufficient permissions yet to use it
assert response.status_code == 403
user = User.objects.create(username="test")
self._add_permission(user, "view_pii")
self._add_permission(user, "view_exploitability")
view_pii_perm = Permission.objects.get(codename="view_pii")
token = Token.objects.create(user=user, notes="Only PII token")
view_exploitability_perm = Permission.objects.get(
codename="view_exploitability"
)
token.permissions.add(view_pii_perm)
token.permissions.add(view_exploitability_perm)
response = self.client.get(url, HTTP_AUTH_TOKEN=token.key)
assert response.status_code == 400
assert response["Content-Type"] == "application/json"
dump = json.loads(response.content)
assert dump["errors"]["crash_id"]
def mocked_get(**params):
if "datatype" in params and params["datatype"] == "unredacted":
return {
"client_crash_date": "2012-06-11T06:08:45",
"dump": dump,
"signature": "FakeSignature1",
"user_comments": None,
"uptime": 14693,
"release_channel": "nightly",
"uuid": "11cb72f5-eb28-41e1-a8e4-849982120611",
"flash_version": "[blank]",
"hangid": None,
"process_type": None,
"id": 383569625,
"os_version": "10.6.8 10K549",
"version": "5.0a1",
"build": "20120609030536",
"ReleaseChannel": "nightly",
"addons_checked": None,
"product": "WaterWolf",
"os_name": "Mac OS X",
"last_crash": 371342,
"date_processed": "2012-06-11T06:08:44",
"cpu_arch": "amd64",
"reason": "EXC_BAD_ACCESS / KERN_INVALID_ADDRESS",
"address": "0x8",
"completed_datetime": "2012-06-11T06:08:57",
"success": True,
"upload_file_minidump_browser": "a crash",
"upload_file_minidump_flash1": "a crash",
"upload_file_minidump_flash2": "a crash",
"upload_file_minidump_plugin": "a crash",
"exploitability": "Unknown Exploitability",
}
raise NotImplementedError
UnredactedCrash.implementation().get.side_effect = mocked_get
response = self.client.get(url, {"crash_id": "123"}, HTTP_AUTH_TOKEN=token.key)
assert response.status_code == 200
dump = json.loads(response.content)
assert dump["uuid"] == "11cb72f5-eb28-41e1-a8e4-849982120611"
assert "upload_file_minidump_flash2" in dump
assert "exploitability" in dump
def test_RawCrash(self):
def mocked_get(**params):
if "uuid" in params and params["uuid"] == "abc123":
return {
"InstallTime": "1366691881",
"AdapterVendorID": "0x8086",
"Theme": "classic/1.0",
"Version": "23.0a1",
"id": "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}",
"Vendor": "Mozilla",
"EMCheckCompatibility": "true",
"URL": "http://system.gaiamobile.org:8080/",
"version": "23.0a1",
"AdapterDeviceID": "0x 46",
"ReleaseChannel": "nightly",
"submitted_timestamp": "2013-04-29T16:42:28.961187+00:00",
"buildid": "20130422105838",
"Notes": "AdapterVendorID: 0x8086, AdapterDeviceID: ...",
"CrashTime": "1366703112",
"StartupTime": "1366702830",
"Add-ons": "activities%40gaiamobile.org:0.1,%40gaiam...",
"BuildID": "20130422105838",
"SecondsSinceLastCrash": "23484",
"ProductName": "WaterWolf",
"ProductID": "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}",
"AsyncShutdownTimeout": 12345,
"BIOS_Manufacturer": "abc123",
"Comments": "I visited http://example.com and mail@example.com",
"upload_file_minidump_browser": "a crash",
"upload_file_minidump_flash1": "a crash",
"upload_file_minidump_flash2": "a crash",
"upload_file_minidump_plugin": "a crash",
}
raise NotImplementedError
RawCrash.implementation().get.side_effect = mocked_get
url = reverse("api:model_wrapper", args=("RawCrash",))
response = self.client.get(url)
assert response.status_code == 400
assert response["Content-Type"] == "application/json"
dump = json.loads(response.content)
assert dump["errors"]["crash_id"]
response = self.client.get(url, {"crash_id": "abc123"})
assert response.status_code == 200
dump = json.loads(response.content)
assert "id" in dump
assert "URL" not in dump
assert "AsyncShutdownTimeout" in dump
assert "BIOS_Manufacturer" in dump
assert "upload_file_minidump_browser" in dump
assert "upload_file_minidump_flash1" in dump
assert "upload_file_minidump_flash2" in dump
assert "upload_file_minidump_plugin" in dump
def test_RawCrash_binary_blob(self):
def mocked_get(**params):
if "uuid" in params and params["uuid"] == "abc":
return "\xe0"
raise NotImplementedError
RawCrash.implementation().get.side_effect = mocked_get
url = reverse("api:model_wrapper", args=("RawCrash",))
response = self.client.get(url, {"crash_id": "abc", "format": "raw"})
# because we don't have permission
assert response.status_code == 403
response = self.client.get(url, {"crash_id": "abc", "format": "wrong"}) # note
# invalid format
assert response.status_code == 400
assert response["Content-Type"] == "application/json"
user = self._login()
self._add_permission(user, "view_pii")
response = self.client.get(url, {"crash_id": "abc", "format": "raw"})
# still don't have the right permission
assert response.status_code == 403
self._add_permission(user, "view_rawdump")
response = self.client.get(url, {"crash_id": "abc", "format": "raw"})
# finally!
assert response.status_code == 200
assert response["Content-Disposition"] == 'attachment; filename="abc.dmp"'
assert response["Content-Type"] == "application/octet-stream"
def test_RawCrash_invalid_crash_id(self):
# NOTE(alexisdeschamps): this undoes the mocking of the implementation so we can test
# the implementation code.
RawCrash.implementation = self._mockeries[RawCrash]
url = reverse("api:model_wrapper", args=("RawCrash",))
response = self.client.get(
url, {"crash_id": "821fcd0c-d925-4900-85b6-687250180607docker/as_me.sh"}
)
assert response.status_code == 400
def test_Bugs(self):
BugAssociation.objects.create(bug_id="999999", signature="OOM | small")
url = reverse("api:model_wrapper", args=("Bugs",))
response = self.client.get(url)
assert response.status_code == 400
assert response["Content-Type"] == "application/json"
dump = json.loads(response.content)
assert dump["errors"]["signatures"]
response = self.client.get(url, {"signatures": "OOM | small"})
assert response.status_code == 200
assert json.loads(response.content) == {
"hits": [{"id": 999999, "signature": "OOM | small"}],
"total": 1,
}
def test_SignaturesForBugs(self):
BugAssociation.objects.create(bug_id="999999", signature="OOM | small")
url = reverse("api:model_wrapper", args=("SignaturesByBugs",))
response = self.client.get(url)
assert response.status_code == 400
assert response["Content-Type"] == "application/json"
dump = json.loads(response.content)
assert dump["errors"]["bug_ids"]
response = self.client.get(url, {"bug_ids": "999999"})
assert response.status_code == 200
assert json.loads(response.content) == {
"hits": [{"id": 999999, "signature": "OOM | small"}],
"total": 1,
}
def test_Field(self):
url = reverse("api:model_wrapper", args=("Field",))
response = self.client.get(url)
assert response.status_code == 404
def test_SuperSearch(self):
def mocked_supersearch_get(**params):
assert "exploitability" not in params
restricted_params = ("_facets", "_aggs.signature", "_histogram.date")
for key in restricted_params:
if key in params:
assert "url" not in params[key]
if "product" in params:
assert params["product"] == ["WaterWolf", "NightTrain"]
return {
"hits": [
{
"signature": "abcdef",
"product": "WaterWolf",
"version": "1.0",
"exploitability": "high",
"url": "http://embarassing.website.com",
"user_comments": "hey I am thebig@lebowski.net",
}
],
"facets": {"signature": []},
"total": 0,
}
SuperSearch.implementation().get.side_effect = mocked_supersearch_get
url = reverse("api:model_wrapper", args=("SuperSearch",))
response = self.client.get(url)
assert response.status_code == 200
res = json.loads(response.content)
assert res["hits"]
assert res["facets"]
# Verify forbidden fields are not exposed.
assert "exploitability" not in res["hits"]
assert "url" not in res["hits"]
# Verify it's not possible to use restricted parameters.
response = self.client.get(
url,
{
"exploitability": "high",
"_facets": ["url", "product"],
"_aggs.signature": ["url", "product"],
"_histogram.date": ["url", "product"],
},
)
assert response.status_code == 200
# Verify values can be lists.
response = self.client.get(url, {"product": ["WaterWolf", "NightTrain"]})
assert response.status_code == 200
def test_SuperSearchUnredacted(self):
def mocked_supersearch_get(**params):
assert "exploitability" in params
if "product" in params:
assert params["product"] == ["WaterWolf", "NightTrain"]
return {
"hits": [
{
"signature": "abcdef",
"product": "WaterWolf",
"version": "1.0",
"exploitability": "high",
"url": "http://embarassing.website.com",
"user_comments": "hey I am thebig@lebowski.net",
}
],
"facets": {"signature": []},
"total": 0,
}
SuperSearchUnredacted.implementation().get.side_effect = mocked_supersearch_get
url = reverse("api:model_wrapper", args=("SuperSearchUnredacted",))
response = self.client.get(url, {"exploitability": "high"})
assert response.status_code == 403
assert response["Content-Type"] == "application/json"
error = json.loads(response.content)["error"]
permission = Permission.objects.get(codename="view_exploitability")
assert permission.name in error
# Log in to get permissions.
user = self._login()
self._add_permission(user, "view_pii")
self._add_permission(user, "view_exploitability")
response = self.client.get(url, {"exploitability": "high"})
assert response.status_code == 200
res = json.loads(response.content)
assert res["hits"]
assert res["facets"]
# Verify forbidden fields are exposed.
assert "exploitability" in res["hits"][0]
assert "url" in res["hits"][0]
assert "thebig@lebowski.net" in res["hits"][0]["user_comments"]
# Verify values can be lists.
response = self.client.get(
url, {"exploitability": "high", "product": ["WaterWolf", "NightTrain"]}
)
assert response.status_code == 200
def test_Reprocessing(self):
crash_id = create_new_ooid()
def mocked_publish(queue, crash_ids):
assert queue == "reprocessing"
assert crash_ids == [crash_id]
return True
Reprocessing.implementation().publish = mocked_publish
url = reverse("api:model_wrapper", args=("Reprocessing",))
response = self.client.get(url)
assert response.status_code == 403
params = {"crash_ids": crash_id}
response = self.client.get(url, params, HTTP_AUTH_TOKEN="somecrap")
assert response.status_code == 403
user = User.objects.create(username="test")
self._add_permission(user, "reprocess_crashes")
perm = Permission.objects.get(codename="reprocess_crashes")
# but make a token that only has the 'reprocess_crashes'
# permission associated with it
token = Token.objects.create(user=user, notes="Only reprocessing")
token.permissions.add(perm)
response = self.client.get(url, params, HTTP_AUTH_TOKEN=token.key)
assert response.status_code == 405
response = self.client.post(url, params, HTTP_AUTH_TOKEN=token.key)
assert response.status_code == 200
assert json.loads(response.content) is True
class TestCrashVerify:
def setup_method(self):
cache.clear()
@contextlib.contextmanager
def supersearch_returns_crashes(self, uuids):
"""Mock supersearch implementation to return result with specified crashes"""
def mocked_supersearch_get(**params):
assert sorted(params.keys()) == [
"_columns",
"_fields",
"_results_number",
"uuid",
]
return {
"hits": [{"uuid": uuid} for uuid in uuids],
"facets": {"signature": []},
"total": len(uuids),
}
with mock.patch(
"crashstats.supersearch.models.SuperSearch.implementation"
) as mock_ss:
mock_ss.return_value.get.side_effect = mocked_supersearch_get
yield
def create_s3_buckets(self, boto_helper):
bucket = settings.SOCORRO_CONFIG["resource"]["boto"]["bucket_name"]
boto_helper.create_bucket(bucket)
telemetry_bucket = settings.SOCORRO_CONFIG["telemetrydata"]["bucket_name"]
boto_helper.create_bucket(telemetry_bucket)
def test_bad_uuid(self, client):
url = reverse("api:crash_verify")
resp = client.get(url, {"crash_id": "foo"})
assert resp.status_code == 400
data = json.loads(resp.content)
assert data == {"error": "unknown crash id"}
def test_elastcsearch_has_crash(self, boto_helper, client):
self.create_s3_buckets(boto_helper)
uuid = create_new_ooid()
with self.supersearch_returns_crashes([uuid]):
url = reverse("api:crash_verify")
resp = client.get(url, {"crash_id": uuid})
assert resp.status_code == 200
data = json.loads(resp.content)
assert data == {
"uuid": uuid,
"elasticsearch_crash": True,
"s3_raw_crash": False,
"s3_processed_crash": False,
"s3_telemetry_crash": False,
}
def test_raw_crash_has_crash(self, boto_helper, client):
self.create_s3_buckets(boto_helper)
uuid = create_new_ooid()
crash_data = {"submitted_timestamp": "2018-03-14-09T22:21:18.646733+00:00"}
bucket = settings.SOCORRO_CONFIG["resource"]["boto"]["bucket_name"]
raw_crash_key = "v2/raw_crash/%s/20%s/%s" % (uuid[0:3], uuid[-6:], uuid)
boto_helper.upload_fileobj(
bucket_name=bucket,
key=raw_crash_key,
data=json.dumps(crash_data).encode("utf-8"),
)
with self.supersearch_returns_crashes([]):
url = reverse("api:crash_verify")
resp = client.get(url, {"crash_id": uuid})
assert resp.status_code == 200
data = json.loads(resp.content)
assert data == {
"uuid": uuid,
"s3_raw_crash": True,
"s3_processed_crash": False,
"elasticsearch_crash": False,
"s3_telemetry_crash": False,
}
def test_processed_has_crash(self, boto_helper, client):
self.create_s3_buckets(boto_helper)
uuid = create_new_ooid()
crash_data = {
"signature": "[@signature]",
"uuid": uuid,
"completed_datetime": "2018-03-14 10:56:50.902884",
}
bucket = settings.SOCORRO_CONFIG["resource"]["boto"]["bucket_name"]
boto_helper.upload_fileobj(
bucket_name=bucket,
key="v1/processed_crash/%s" % uuid,
data=json.dumps(crash_data).encode("utf-8"),
)
with self.supersearch_returns_crashes([]):
url = reverse("api:crash_verify")
resp = client.get(url, {"crash_id": uuid})
assert resp.status_code == 200
data = json.loads(resp.content)
assert data == {
"uuid": uuid,
"s3_processed_crash": True,
"s3_raw_crash": False,
"elasticsearch_crash": False,
"s3_telemetry_crash": False,
}
def test_telemetry_has_crash(self, boto_helper, client):
self.create_s3_buckets(boto_helper)
uuid = create_new_ooid()
crash_data = {
"platform": "Linux",
"signature": "now_this_is_a_signature",
"uuid": uuid,
}
telemetry_bucket = settings.SOCORRO_CONFIG["telemetrydata"]["bucket_name"]
boto_helper.upload_fileobj(
bucket_name=telemetry_bucket,
key="v1/crash_report/20%s/%s" % (uuid[-6:], uuid),
data=json.dumps(crash_data).encode("utf-8"),
)
with self.supersearch_returns_crashes([]):
url = reverse("api:crash_verify")
resp = client.get(url, {"crash_id": uuid})
assert resp.status_code == 200
data = json.loads(resp.content)
assert data == {
"uuid": uuid,
"s3_telemetry_crash": True,
"s3_raw_crash": False,
"s3_processed_crash": False,
"elasticsearch_crash": False,
}
class TestCrashSignature:
# NOTE(willkg): This doesn't test signature generation--just the API wrapper.
def test_no_payload(self, client):
url = reverse("api:crash_signature")
resp = client.post(url, content_type="application/json")
assert resp.status_code == 400
def test_wrong_contenttype(self, client):
url = reverse("api:crash_signature")
resp = client.post(url, content_type="application/multipart-formdata")
assert resp.status_code == 415
def test_basic(self, client):
payload = {
"jobs": [
{
"os": "Linux",
"crashing_thread": 0,
"threads": [
{
"frames": [
{
"frame": 0,
"function": "SomeFunc",
"line": 20,
"file": "somefile.cpp",
"module": "foo.so.5.15.0",
"module_offset": "0x37a92",
"offset": "0x7fc641052a92",
},
{
"frame": 1,
"function": "SomeOtherFunc",
"line": 444,
"file": "someotherfile.cpp",
"module": "bar.so",
"module_offset": "0x39a55",
"offset": "0x7fc641044a55",
},
]
}
],
}
]
}
url = reverse("api:crash_signature")
resp = client.post(url, data=payload, content_type="application/json")
expected_payload = {
"results": [
{
"extra": {
"normalized_frames": ["SomeFunc", "SomeOtherFunc"],
"proto_signature": "SomeFunc | SomeOtherFunc",
},
"notes": [],
"signature": "SomeFunc",
}
]
}
assert resp.json() == expected_payload
assert resp.status_code == 200
class TestMultipleStringField:
"""Test the MultipleStringField class."""
def test_empty_list_required(self):
"""If a field is required, an empty list is a validation error."""
field = MultipleStringField()
with pytest.raises(ValidationError):
field.clean([])
def test_empty_list_optional(self):
"""If a field is optional, an empty list is valid."""
assert MultipleStringField(required=False).clean([]) == []
def test_good_argument(self):
"""A list with one string arguments is valid."""
assert MultipleStringField().clean(["one"]) == ["one"]
def test_null_arg(self):
"""A embedded null character is a validation error."""
field = MultipleStringField()
value = "Embeded_Null_\x00"
with pytest.raises(ValidationError):
field.clean([value])
API_MODEL_NAMES = [
"Bugs",
"CrashSignature",
"NoOp",
"ProcessedCrash",
"RawCrash",
"Reprocessing",
"SignatureFirstDate",
"SignaturesByBugs",
"SuperSearch",
"SuperSearchFields",
"SuperSearchUnredacted",
"UnredactedCrash",
"VersionString",
]
def test_api_model_names():
"""
Verify the expected publicly exposed API model list.
This allows parametrized testing of the API Models, for better failure messages.
"""
names = [name for model, name in api_models_and_names()]
assert names == API_MODEL_NAMES
@pytest.mark.parametrize("name", API_MODEL_NAMES)
class TestAPIModels:
MODEL = {}
def setup_class(cls):
"""Generate the dictionary of model names to model classes."""
for model, model_name in api_models_and_names():
cls.MODEL[model_name] = model
def test_api_required_permissions(self, name):
"""API_REQUIRED_PERMISSIONS is None or an iterable."""
model_obj = self.MODEL[name]()
req_perms = model_obj.API_REQUIRED_PERMISSIONS
assert req_perms is None or (
isinstance(req_perms, Iterable) and not isinstance(req_perms, str)
)
def test_api_binary_permissions(self, name):
"""API_BINARY_PERMISSIONS is None or an iterable."""
model_obj = self.MODEL[name]()
bin_perms = model_obj.API_BINARY_PERMISSIONS
assert bin_perms is None or (
isinstance(bin_perms, Iterable) and not isinstance(bin_perms, str)
)
def test_api_allowlist(self, name):
"""API_ALLOWLIST is defined."""
model = self.MODEL[name]
api_allowlist = model.API_ALLOWLIST
assert (
api_allowlist is None
or isinstance(api_allowlist, Iterable)
or (callable(api_allowlist) and isinstance(api_allowlist(), Iterable))
)
def test_get_annotated_params(self, name):
"""get_annotated_params returns a list suitable for creating the form."""
model_obj = self.MODEL[name]()
params = model_obj.get_annotated_params()
for param in params:
assert "required" in param
assert "name" in param
assert param["type"] in TYPE_MAP
|
lonnen/socorro
|
webapp-django/crashstats/api/tests/test_views.py
|
Python
|
mpl-2.0
| 34,754 |
import os
import pytest
import requests
import time
from pages.list_verification import ListVerificationPage
CLIENT_CHECK_DELAY = 3600
TEST_ENV = os.environ['TEST_ENV']
@pytest.mark.nondestructive
def test_list_verification(base_url, selenium, channel, conf):
"""Test that Firefox Tracking Protection serves correct lists"""
base_url = conf.get(TEST_ENV, 'browser.safebrowsing.provider.mozilla.updateURL')
page = ListVerificationPage(selenium, base_url).open()
results = page.read_lists(conf)
g = conf.get('filelist_{0}'.format(TEST_ENV), 'lists')
s = g.split()
expected = sorted(s)
assert results == expected
@pytest.mark.nondestructive
def test_individual_list_verification(selenium, channel, conf):
"""Test individual list responses"""
update_url = conf.get(TEST_ENV, 'browser.safebrowsing.provider.mozilla.downloads')
list = conf.get('list_index','file_list_{0}'.format(TEST_ENV))
s = list.split(',')
for item in s:
resp = requests.post(update_url, data=item)
results = str(resp.text).splitlines()
assert results[0] == 'n:{0}'.format(CLIENT_CHECK_DELAY)
assert results[1]+';' == 'i:{0}'.format(item)
t = results[2].split('/')
assert t[0] == 'u:{0}'.format(conf.get(TEST_ENV, 'base_domain'))
assert t[1]+';' == item
assert t[2].isdigit()
|
rbillings/shavar-e2e-tests
|
tests/test_list_verification.py
|
Python
|
mpl-2.0
| 1,363 |
from tests.support.asserts import assert_success
from tests.support.inline import inline
from . import opener, window_name
def new_window(session, type_hint=None):
return session.transport.send(
"POST", "session/{session_id}/window/new".format(**vars(session)),
{"type": type_hint})
def test_payload(session):
original_handles = session.handles
response = new_window(session, type_hint="window")
value = assert_success(response)
handles = session.handles
assert len(handles) == len(original_handles) + 1
assert value["handle"] in handles
assert value["handle"] not in original_handles
assert value["type"] == "window"
def test_keeps_current_window_handle(session):
original_handle = session.window_handle
response = new_window(session, type_hint="window")
value = assert_success(response)
assert value["type"] == "window"
assert session.window_handle == original_handle
def test_opens_about_blank_in_new_window(session):
url = inline("<p>foo")
session.url = url
response = new_window(session, type_hint="window")
value = assert_success(response)
assert value["type"] == "window"
assert session.url == url
session.window_handle = value["handle"]
assert session.url == "about:blank"
def test_sets_no_window_name(session):
response = new_window(session, type_hint="window")
value = assert_success(response)
assert value["type"] == "window"
session.window_handle = value["handle"]
assert window_name(session) == ""
def test_sets_no_opener(session):
response = new_window(session, type_hint="window")
value = assert_success(response)
assert value["type"] == "window"
session.window_handle = value["handle"]
assert opener(session) is None
|
asajeffrey/servo
|
tests/wpt/web-platform-tests/webdriver/tests/new_window/new_window.py
|
Python
|
mpl-2.0
| 1,798 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tuple functions"""
import data
DIRECTIONS = data.DIRECTIONS
DIRECTIONS = (DIRECTIONS[:-1]) + ('West',)
|
ct3080a/is210-week-06-warmup
|
task_03.py
|
Python
|
mpl-2.0
| 155 |
from django.contrib.auth.models import User
from django.template.loader import render_to_string
from django.core.mail import send_mail
from django.conf import settings
from xbrowse_server.base.models import Project
def add_new_collaborator(email, referrer):
"""
Someone has added a new user to the system - create user and email them
Args:
referrer (User): person that is adding this user; email will reference them.
"""
username = User.objects.make_random_password()
user = User.objects.create_user(username, email=email, last_login='1970-01-01 00:00')
profile = user.profile
profile.set_password_token = User.objects.make_random_password(length=30)
profile.save()
link = settings.BASE_URL + user.profile.get_set_password_link()
email_content = render_to_string('emails/new_collaborator.txt', {'user': user, 'link': link, 'referrer': referrer})
send_mail('Set up your seqr account', email_content, settings.FROM_EMAIL, [user.email,], fail_silently=False )
return user
def get_projects_for_user(user):
"""
Return all projects for which the given user has 'view' access.
"""
all_projects = Project.objects.all()
if user.is_superuser:
return all_projects
if user.is_staff:
return [p for p in all_projects if not p.disable_staff_access or p.can_view(user)]
else:
return [p for p in all_projects if p.can_view(user)]
def get_fellow_collaborators(user):
"""
All the users that collaborate on any project with user
"""
pass
|
macarthur-lab/xbrowse
|
xbrowse_server/user_controls.py
|
Python
|
agpl-3.0
| 1,555 |
# Copyright 2014, Oliver Nagy <olitheolix@gmail.com>
#
# This file is part of Azrael (https://github.com/olitheolix/azrael)
#
# Azrael is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Azrael is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Azrael. If not, see <http://www.gnu.org/licenses/>.
"""
Vector grid engine.
All grids extend to infinity with a default value of zero. The basic usage
pattern is to define a new grid and then set/query values on it.
All grids have a spatial granularity. It is possible to set/query values at any
(floating point) position but the set/get functions will always round it to the
nearest granularity multiple.
Internally, the engine only adds non-zero values to the database, and removes
all those set to zero.
"""
import logging
import numpy as np
import azrael.config as config
from IPython import embed as ipshell
from azrael.types import typecheck, RetVal
# Global database handle.
_DB_Grid = config.getMongoClient()['azrael_grid']
# Create module logger.
logit = logging.getLogger('azrael.' + __name__)
def deleteAllGrids():
"""
Delete all currently defined grids.
:return: Success
"""
global _DB_Grid
client = config.getMongoClient()
name = 'azrael_grid'
client.drop_database(name)
_DB_Grid = client[name]
return RetVal(True, None, None)
def getAllGridNames():
"""
Return all the names of all currently defined grids.
:return: grid names.
:rtype: tuple of strings.
"""
if _DB_Grid is None:
return RetVal(False, 'Not initialised', None)
else:
# Every grid sits in its own collection. The grid names are hence the
# grid names save 'system.indexes' which Mongo creates internally
# itself.
names = set(_DB_Grid.collection_names())
names.discard('system.indexes')
return RetVal(True, None, tuple(names))
@typecheck
def defineGrid(name: str, vecDim: int, granularity: (int, float)):
"""
Define a new grid with ``name``.
Every grid element is a vector with ``vecDim`` elements. The grid has the
spatial ``granularity`` (in meters). The minimum granularity is 1E-9m.
:param str name: grid name
:param int vecDim: number of data dimensions.
:param float granularity: spatial granularity in Meters.
:return: Success
"""
# DB handle must have been initialised.
if _DB_Grid is None:
return RetVal(False, 'Not initialised', None)
# Sanity check.
if granularity < 1E-9:
return RetVal(False, 'Granularity must be >1E-9', None)
# Sanity check.
if vecDim <= 0:
return RetVal(False, 'Vector dimension must be positive integer', None)
# Return with an error if the grid ``name`` is already defined.
if name in _DB_Grid.collection_names():
msg = 'Grid <{}> already exists'.format(name)
logit.info(msg)
return RetVal(False, msg, None)
# Flush the DB (just a pre-caution) and add the admin element.
db = _DB_Grid[name]
db.drop()
db.insert({'admin': 'admin', 'vecDim': vecDim, 'gran': granularity})
# Create indexes for fast lookups.
db.ensure_index([('x', 1), ('y', 1), ('z', 1)])
db.ensure_index([('strPos', 1)])
# All good.
return RetVal(True, None, None)
def getGridDB(name: str):
"""
Return the database handle and admin field of the ``name`` grid.
:param str name: name of grid.
:return: (db, admin)
"""
# DB handle must have been initialised.
if _DB_Grid is None:
return RetVal(False, 'Not initialised', None)
# Return with an error if the grid ``name`` does not exist.
if name not in _DB_Grid.collection_names():
msg = 'Unknown grid <{}>'.format(name)
logit.info(msg)
return RetVal(False, msg, None)
# Ensure the admin element exists.
db = _DB_Grid[name]
admin = db.find_one({'admin': 'admin'})
if admin is None:
return RetVal(False, 'Bug: could not find admin element', None)
return RetVal(True, None, (db, admin))
@typecheck
def resetGrid(name: str):
"""
Reset all values of the grid ``name``.
:param str name: grid name to reset.
:return: Success
"""
# Fetch the database handle.
ret = getGridDB(name)
if not ret.ok:
return ret
db, admin = ret.data
# Resetting a grid equates to deleting all values in the collection so that
# all values assume their default again. We therefore simply drop the
# entire collection and re-insert the admin element.
db.drop()
db.insert(admin)
return RetVal(True, None, None)
@typecheck
def deleteGrid(name: str):
"""
Delete the grid ``name``.
:param str name: grid name.
:return: Success
"""
# Fetch the database handle (we will not use it but this function call does
# all the error checking for us already).
ret = getGridDB(name)
if not ret.ok:
return ret
# Flush the collection and insert the admin element again.
_DB_Grid.drop_collection(name)
# All good.
return RetVal(True, None, None)
def _encodePosition(pos: np.ndarray, granularity: float):
"""
Return the grid index based on ``pos`` and ``granularity``.
:param array pos: 3-element vector.
:param float granularity: positive scalar to specify the grid granularity.
:return: (px, py, pz, strPos)
"""
# Enforce NumPy types to ensure consistent rounding/truncatio behaviour.
pos = np.array(pos, np.float64)
granularity = float(granularity)
# Compute the array index in each dimension.
px, py, pz = [int(_ // granularity) for _ in pos]
# Compute a string representation of the array index.
strPos = '{}:{}:{}'.format(px, py, pz)
# Return the indexes.
return px, py, pz, strPos
def _encodeData(px: int, py: int, pz: int, strPos, val: list):
query = {'strPos': strPos}
data = {'x': px, 'y': py, 'z': pz,
'val': val, 'strPos': strPos}
return query, data
@typecheck
def getValues(name: str, positions: (tuple, list)):
"""
Return the value at ``positions`` in a tuple of NumPy arrays.
:param str name: grid name
:param list positions: grid positions (in string format)
:return: list of grid values at ``positions``.
"""
# Return immediately if we did not get any values.
if len(positions) == 0:
return RetVal(False, '<setValues> received no arguments', None)
# Fetch the database handle.
ret = getGridDB(name)
if not ret.ok:
return ret
db, admin = ret.data
gran, vecDim = admin['gran'], admin['vecDim']
del admin, ret
# Ensure the positions are valid.
strPositions = []
try:
for pos in positions:
assert isinstance(pos, (tuple, list, np.ndarray))
assert len(pos) == 3
px, py, pz, strPos = _encodePosition(pos, gran)
strPositions.append(strPos)
except AssertionError:
return RetVal(False, '<getValues> received invalid positions', None)
# Find all values and compile the output list.
values = {_['strPos']: _['val']
for _ in db.find({'strPos': {'$in': strPositions}})}
# Put the grid values into the output list. The ``positions`` argument (or
# ``strPositions``) uniquely specifies their order. User zeros whenever a
# grid value was unavailable.
out = np.zeros((len(strPositions), vecDim), np.float64)
for idx, pos in enumerate(strPositions):
if pos in values:
out[idx, :] = np.array(values[pos], np.float64)
return RetVal(True, None, out)
@typecheck
def setValues(name: str, posVals: (tuple, list)):
"""
Update the grid values as specified in ``posVals``.
:param list posVals: list of (pos, val) tuples.
:return: Success
"""
# Return immediately if we did not get any values.
if len(posVals) == 0:
return RetVal(False, '<setValues> received no arguments', None)
# Fetch the database handle.
ret = getGridDB(name)
if not ret.ok:
return ret
db, admin = ret.data
gran, vecDim = admin['gran'], admin['vecDim']
del admin, ret
# Ensure the region dimensions are positive integers.
bulk = db.initialize_unordered_bulk_op()
try:
for pv in posVals:
assert isinstance(pv, (tuple, list, np.ndarray))
assert len(pv) == 2
assert isinstance(pv[0], (tuple, np.ndarray))
assert isinstance(pv[1], (tuple, np.ndarray))
pos, val = pv
assert len(pos) == 3
assert len(val) == vecDim
# Convert the position to grid indexes.
px, py, pz, strPos = _encodePosition(pos, gran)
# Get database- query and entry.
query, data = _encodeData(px, py, pz, strPos, val.tolist())
# Update the value in the DB, unless it is essentially zero, in
# which case remove it to free up space.
if np.sum(np.abs(val)) < 1E-9:
bulk.find(query).remove()
else:
bulk.find(query).upsert().update({'$set': data})
except AssertionError:
return RetVal(False, '<setValues> received invalid arguments', None)
bulk.execute()
return RetVal(True, None, None)
@typecheck
def getRegion(name: str, ofs: np.ndarray,
regionDim: (np.ndarray, list, tuple)):
"""
Return the grid values starting at 3D position ``ofs``.
The returned array comprises foure dimensions. The first three correspond
to x/y/z position and the fourth contains the data. That data is itself a
vector. The size of that vector was specified when the grid was created.
The dimension of the returned region depends on ``regionDim`` and the
``vecDim`` of the grid. For instance, if regionDim=(1, 2, 3) and the
vecDim=4, then the shape of the returned NumPy array is (1, 2, 3, 4).
:param str name: grid name.
:param 3D-vector ofs: start position in grid from where to read values.
:param 3D-vector regionDim: number of values to read in each dimension.
:return: 4D matrix.
"""
# Fetch the database handle.
ret = getGridDB(name)
if not ret.ok:
return ret
db, admin = ret.data
gran, vecDim = admin['gran'], admin['vecDim']
del admin, ret
# Sanity check: ``ofs`` and ``regionDim`` must have 3 entries each.
if (len(ofs) != 3) or (len(regionDim) != 3):
return RetVal(False, 'Invalid parameter values', None)
# Sanity check: ``regionDim`` must only contain positive integers.
regionDim = np.array(regionDim, np.int64)
if np.amin(regionDim) < 1:
return RetVal(False, 'Dimensions must be positive', None)
# Compute the grid index of ``ofs``.
x0, y0, z0, strPos = _encodePosition(ofs, gran)
# Convenience: the ``regionDim`` parameter uniquely specifies the number of
# grid positions to query in each dimension.
x1 = int(x0 + regionDim[0])
y1 = int(y0 + regionDim[1])
z1 = int(z0 + regionDim[2])
# Query the values of all the specified grid positions.
res = db.find({'x': {'$gte': x0, '$lt': x1},
'y': {'$gte': y0, '$lt': y1},
'z': {'$gte': z0, '$lt': z1}})
# Populate the output data structure.
out = np.zeros(np.hstack((regionDim, vecDim)), np.float64)
for doc in res:
# Convert the grid index to an array index, ie simply compute all grid
# indices relative to the ``ofs`` position.
x = int(doc['x'] - x0)
y = int(doc['y'] - y0)
z = int(doc['z'] - z0)
out[x, y, z, :] = np.array(doc['val'], np.float64)
return RetVal(True, None, out)
@typecheck
def setRegion(name: str, ofs: np.ndarray, gridValues: np.ndarray):
"""
Update the grid values starting at ``ofs`` with ``gridValues``.
:param str name: grid name.
:param 3D-vector ofs: the values are inserted relative to this ``ofs``.
:param 4D-vector gridValues: the data values to set.
:return: Success
"""
# Fetch the database handle.
ret = getGridDB(name)
if not ret.ok:
return ret
db, admin = ret.data
gran, vecDim = admin['gran'], admin['vecDim']
del admin, ret
# Sanity check: ``ofs`` must denote a position in 3D space.
if len(ofs) != 3:
return RetVal(False, 'Invalid parameter values', None)
# Sanity check: every ``gridValues`` must be a 3D matrix where every entry
# is a vector with ``vecDim`` elements.
if (len(gridValues.shape) != 4) or (gridValues.shape[3] != vecDim):
return RetVal(False, 'Invalid gridValues dimension', None)
# Populate the output array.
bulk = db.initialize_unordered_bulk_op()
for x in range(gridValues.shape[0]):
for y in range(gridValues.shape[1]):
for z in range(gridValues.shape[2]):
# Convenience.
val = gridValues[x, y, z, :]
# Compute the grid position of the current data value and
# convert it to integer indexes.
pos = ofs + np.array([x, y, z])
px, py, pz, strPos = _encodePosition(pos, gran)
# Get database- query and entry.
query, data = _encodeData(px, py, pz, strPos, val.tolist())
if np.sum(np.abs(val)) < 1E-9:
bulk.find(query).remove()
else:
bulk.find(query).upsert().update({'$set': data})
# Execute the Mongo query. Don't bother with the return value.
bulk.execute()
return RetVal(True, None, None)
|
daviddeng/azrael
|
azrael/vectorgrid.py
|
Python
|
agpl-3.0
| 14,099 |
# -*- coding: utf-8 -*-
"""
*** RES PARTNER
Created: 26 Aug 2016
Last updated: 20 Sep 2019
"""
from openerp import models, fields, api
from . import partner_vars
class Partner(models.Model):
"""
"""
_inherit = 'res.partner'
_order = 'write_date desc'
# ----------------------------------------------------------- Primitives ---------------------
# Name
name = fields.Char(
'Name',
#'(Generado automaticamente, no tocar)',
select=True,
index=True,
)
# ----------------------------------------------------------- Address -----------------------------
# Address
x_address = fields.Char(
"Dirección",
compute='_compute_x_address',
)
@api.multi
#@api.depends('')
def _compute_x_address(self):
for record in self:
if record.street != False and record.street2 != False and record.city != False:
record.x_address = record.street.title() + ' ' + record.street2.title() + ' - ' + record.city.title()
# ----------------------------------------------------------- Hard wired - With Patient -----------
# Company
x_ruc = fields.Char(
"RUC",
)
x_firm = fields.Char(
"Razón social",
)
# Phones
phone = fields.Char(
'Fijo',
required=False,
)
mobile = fields.Char(
'Celular',
)
email = fields.Char(
string = 'Email',
placeholder = '',
required=False,
)
# Address
country_id = fields.Many2one(
'res.country',
string = 'País',
default = 175, # Peru
#ondelete='restrict',
required=True,
)
city = fields.Selection(
selection = partner_vars._city_list,
string = 'Departamento',
default = 'lima',
required=False,
)
# For patient short card
city_char = fields.Char(
compute='_compute_city_char',
)
#@api.multi
@api.depends('city')
def _compute_city_char(self):
for record in self:
record.city_char = record.city
street2 = fields.Char(
string = "Distrito 2",
#required=True,
required=False,
)
street2_sel = fields.Selection(
selection = partner_vars._street2_list,
string = "Distrito",
required=False,
)
street = fields.Char(
string = "Dirección",
required=False,
)
zip = fields.Char(
#'Zip',
string = 'Código',
size=24,
#change_default=True,
required=False,
)
# Only for foreign addresses
x_foreign = fields.Boolean(
string="Dirección en el extranjero",
)
x_address_foreign = fields.Text(
'Dirección',
#string=".",
)
# Vspace
vspace = fields.Char(
' ',
readonly=True
)
# ----------------------------------------------------------- Indexed ------------------------------------------------------
# Dni
x_dni = fields.Char(
"DNI",
index=True,
required=False,
)
# ----------------------------------------------------------- My Company ------------------------------------------------------
# My company
x_my_company = fields.Boolean(
'Mi compañía ?',
)
# Series
x_series = fields.Char(
string='Serie',
)
# Autorization
x_authorization = fields.Char(
string='Autorización',
)
# Warning Sales
x_warning = fields.Text(
'Condiciones de Venta',
)
# Warning Purchase
x_warning_purchase = fields.Text(
'Condiciones de Compra',
)
# ----------------------------------------------------------- Vip ------------------------------------------------------
# Vip
x_vip = fields.Boolean(
string="VIP",
default=False,
compute='_compute_x_vip',
)
@api.multi
#@api.depends('x_card')
def _compute_x_vip(self):
# Does he have a Vip card ?
for record in self:
x_card = record.env['openhealth.card'].search([
('patient_name','=', record.name),
],
#order='appointment_date desc',
limit=1,)
if x_card.name != False:
record.x_vip = True
#pricelist_name = 'VIP'
record.action_ppl_vip()
else:
record.x_vip = False
#pricelist_name = 'Public Pricelist'
record.action_ppl_public()
# ----------------------------------------------------------- Pricelist ------------------------------------------------------
# PPL - Public
@api.multi
def action_ppl_public(self):
pricelist_name = 'Public Pricelist'
# Pricelist
pricelist = self.env['product.pricelist'].search([
('name','=', pricelist_name),
],
#order='appointment_date desc',
limit=1,)
self.property_product_pricelist = pricelist
# action_ppl_public
# PPL - Vip
@api.multi
def action_ppl_vip(self):
pricelist_name = 'VIP'
# Pricelist
pricelist = self.env['product.pricelist'].search([
('name','=', pricelist_name),
],
#order='appointment_date desc',
limit=1,)
self.property_product_pricelist = pricelist
# action_ppl_vip
# ----------------------------------------------------------- On Changes ------------------------------------------------------
# ----------------------------------------------------------- Validate - DNI RUC ------------------------------------------------------
# Ternary If
#isApple = True if fruit == 'Apple' else False
# Name
@api.onchange('name')
def _onchange_name(self):
if self.name != False:
name = self.name.strip().upper()
self.name = " ".join(name.split())
# Foreign
@api.onchange('x_foreign')
def _onchange_x_foreign(self):
self.city = "" if self.x_foreign == True else 'don'
# ----------------------------------------------------------- CRUD ------------------------------------------------------
# Create
@api.model
def create(self,vals):
#print
#print 'CRUD - Partner - Create'
#print vals
#print
# Compact and Uppercase Name
if 'name' in vals:
# Name to upper. And strips beg and end spaces.
#print 'Compact'
name = vals['name']
name = name.strip().upper() # Uppercase
name = " ".join(name.split()) # Compact - Strip beginning, inside and ending extra spaces
vals['name'] = name
# Put your logic here
res = super(Partner, self).create(vals)
# Put your logic here
return res
# CRUD - Create
|
gibil5/openhealth
|
models/patient/partner.py
|
Python
|
agpl-3.0
| 6,152 |
from comics.aggregator.crawler import CrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Something of that Ilk"
language = "en"
url = "http://www.somethingofthatilk.com/"
start_date = "2011-02-19"
end_date = "2013-11-06"
active = False
rights = "Ty Devries"
class Crawler(CrawlerBase):
def crawl(self, pub_date):
pass # Comic no longer published
|
jodal/comics
|
comics/comics/somethingofthatilk.py
|
Python
|
agpl-3.0
| 441 |
#
# Copyright (C) 2019 pengjian.uestc @ gmail.com
#
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
import pytest
import redis
import logging
import re
import time
from util import random_string, connect
logger = logging.getLogger('redis-test')
def test_set_get_delete(redis_host, redis_port):
r = connect(redis_host, redis_port)
key = random_string(10)
val = random_string(10)
assert r.set(key, val) == True
assert r.get(key) == val
assert r.delete(key) == 1
assert r.get(key) == None
def test_get(redis_host, redis_port):
r = connect(redis_host, redis_port)
key = random_string(10)
r.delete(key)
assert r.get(key) == None
def test_del_existent_key(redis_host, redis_port):
r = connect(redis_host, redis_port)
key = random_string(10)
val = random_string(10)
r.set(key, val)
assert r.get(key) == val
assert r.delete(key) == 1
@pytest.mark.xfail(reason="DEL command does not support to return number of deleted keys")
def test_del_non_existent_key(redis_host, redis_port):
r = connect(redis_host, redis_port)
key = random_string(10)
r.delete(key)
assert r.delete(key) == 0
def test_set_empty_string(redis_host, redis_port):
r = connect(redis_host, redis_port)
key = random_string(10)
val = ""
r.set(key, val)
assert r.get(key) == val
r.delete(key)
def test_set_large_string(redis_host, redis_port):
r = connect(redis_host, redis_port)
key = random_string(10)
val = random_string(4096)
r.set(key, val)
assert r.get(key) == val
r.delete(key)
def test_ping(redis_host, redis_port):
r = connect(redis_host, redis_port)
assert r.ping() == True
def test_echo(redis_host, redis_port):
r = connect(redis_host, redis_port)
assert r.echo('hello world') == 'hello world'
def test_select(redis_host, redis_port):
r = connect(redis_host, redis_port)
key = random_string(10)
val = random_string(4096)
r.set(key, val)
assert r.get(key) == val
logger.debug('Switch to database 1')
assert r.execute_command('SELECT 1') == 'OK'
assert r.get(key) == None
logger.debug('Switch back to default database 0')
assert r.execute_command('SELECT 0') == 'OK'
assert r.get(key) == val
r.delete(key)
assert r.get(key) == None
def test_select_invalid_db(redis_host, redis_port):
r = connect(redis_host, redis_port)
logger.debug('Assume that user will not set redis_database_count to be bigger as 100')
invalid_db_idx = 100
logger.debug('Try to switch to invalid database {}'.format(invalid_db_idx))
try:
query = 'SELECT {}'.format(invalid_db_idx)
r.execute_command(query)
raise Exception('Expect that `{}` does not work'.format(query))
except redis.exceptions.ResponseError as ex:
assert str(ex) == 'DB index is out of range'
def test_exists_existent_key(redis_host, redis_port):
r = connect(redis_host, redis_port)
key = random_string(10)
val = random_string(10)
r.set(key, val)
assert r.get(key) == val
assert r.exists(key) == 1
def test_exists_non_existent_key(redis_host, redis_port):
r = connect(redis_host, redis_port)
key = random_string(10)
r.delete(key)
assert r.exists(key) == 0
def test_exists_multiple_existent_key(redis_host, redis_port):
r = connect(redis_host, redis_port)
key1 = random_string(10)
val1 = random_string(10)
key2 = random_string(10)
val2 = random_string(10)
key3 = random_string(10)
val3 = random_string(10)
key4 = random_string(10)
r.set(key1, val1)
r.set(key2, val2)
r.set(key3, val3)
r.delete(key4)
assert r.get(key1) == val1
assert r.get(key2) == val2
assert r.get(key3) == val3
assert r.get(key4) == None
assert r.exists(key1, key2, key3, key4) == 3
def test_exists_lots_of_keys(redis_host, redis_port):
r = connect(redis_host, redis_port)
keys = []
for i in range(0, 30):
k = random_string(11)
v = random_string(10)
r.set(k, v)
keys.append(k)
assert r.exists(*keys) == len(keys)
def test_setex_ttl(redis_host, redis_port):
r = connect(redis_host, redis_port)
key = random_string(10)
val = random_string(10)
assert r.setex(key, 100, val) == True
time.sleep(1)
assert r.ttl(key) == 99
def test_set_ex(redis_host, redis_port):
r = connect(redis_host, redis_port)
key = random_string(10)
val = random_string(10)
assert r.execute_command('SET', key, val, 'EX', 100)
time.sleep(1)
assert r.ttl(key) == 99
def test_lolwut(redis_host, redis_port):
pattern1 = r'''
^⠀⡤⠤⠤⠤⠤⠤⠤⠤⡄
⠀⡇⠀⠀⠀⠀⠀⠀⠀⡇
⠀⡇⠀⠀⠀⠀⠀⠀⠀⡇
⠀⡇⠀⠀⠀⠀⠀⠀⠀⡇
⠀⡧⠤⠤⠤⠤⠤⠤⠤⡇
⠀⡇⠀⠀⠀⠀⠀⠀⠀⡇
⠀⡇⠀⠀⠀⠀⠀⠀⠀⡇
⠀⡇⠀⠀⠀⠀⠀⠀⠀⡇
⠀⠧⠤⠤⠤⠤⠤⠤⠤⠇
Georg Nees - schotter, plotter on paper, 1968\. Redis ver\. [0-9]+\.[0-9]+\.[0-9]+
'''[1:-1]
pattern2 = r'''
^⠀⡤⠤⠤⠤⡤⠤⠤⠤⡄
⠀⡇⠀⠀⠀⡇⠀⠀⠀⡇
⠀⡧⠤⠤⠤⡧⠤⠤⠤⡇
⠀⡇⠀⠀⠀⡇⠀⠀⠀⡇
⠀⠧⠤⠤⠤⠧⠤⠤⠤⠇
Georg Nees - schotter, plotter on paper, 1968\. Redis ver\. [0-9]+\.[0-9]+\.[0-9]+
'''[1:-1]
pattern3 = r'''
^⠀⡤⠤⡤⠤⡤⠤⡤⠤⡄
⠀⡧⠤⡧⠤⡧⠤⡧⠤⡇
⠀⠧⠤⠧⠤⠧⠤⠧⠤⠇
Georg Nees - schotter, plotter on paper, 1968\. Redis ver\. [0-9]+\.[0-9]+\.[0-9]+
'''[1:-1]
r = connect(redis_host, redis_port)
res = r.execute_command('LOLWUT', 10, 1, 2)
assert re.match(pattern1, res)
res = r.execute_command('LOLWUT', 10, 2, 2)
assert re.match(pattern2, res)
res = r.execute_command('LOLWUT', 10, 4, 2)
assert re.match(pattern3, res)
def test_strlen(redis_host, redis_port):
r = connect(redis_host, redis_port)
key1 = random_string(10)
val1 = random_string(10)
key2 = random_string(10)
val2 = random_string(1000)
key3 = random_string(10)
assert r.set(key1, val1) == True
assert r.set(key2, val2) == True
r.delete(key3)
assert r.strlen(key1) == 10
assert r.strlen(key2) == 1000
assert r.strlen(key3) == 0
@pytest.mark.xfail(reason="types on redis does not implemented yet")
def test_strlen_wrongtype(redis_host, redis_port):
r = connect(redis_host, redis_port)
key1 = random_string(10)
val1 = random_string(10)
val2 = random_string(10)
assert r.rpush(key1, val1)
assert r.rpush(key1, val2)
try:
r.strlen(key1)
except redis.exceptions.ResponseError as ex:
assert str(ex) == 'WRONGTYPE Operation against a key holding the wrong kind of value'
|
scylladb/scylla
|
test/redis/test_strings.py
|
Python
|
agpl-3.0
| 6,704 |
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('account', '0002_email_max_length'),
('badgeuser', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CachedEmailAddress',
fields=[
],
options={
'proxy': True,
},
bases=('account.emailaddress', models.Model),
),
]
|
concentricsky/badgr-server
|
apps/badgeuser/migrations/0002_cachedemailaddress.py
|
Python
|
agpl-3.0
| 491 |
"""
Django settings for tumuli project on Heroku. For more info, see:
https://github.com/heroku/heroku-django-template
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
import dj_database_url
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
BASE_DIR = os.path.dirname(PROJECT_ROOT)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "^tzy2jws(kgjxwx__!8&#xmxpas83dcp*p5q$_0n7r0(xtcvvx"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
SITE_ID = 1
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.sites',
# Disable Django's own staticfiles handling in favour of WhiteNoise, for
# greater consistency between gunicorn and `./manage.py runserver`. See:
# http://whitenoise.evans.io/en/stable/django.html#using-whitenoise-in-development
'whitenoise.runserver_nostatic',
'django.contrib.staticfiles',
'storages',
'photologue',
'sortedm2m',
'biography',
'rest_framework',
'drf_multiple_model',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tumuli.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'debug': DEBUG,
},
},
]
WSGI_APPLICATION = 'tumuli.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'he'
LANGUAGES = (
('en-us', 'English'),
('he', 'Hebrew'),
)
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Change 'default' database configuration with $DATABASE_URL.
DATABASES['default'].update(dj_database_url.config(conn_max_age=500))
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = [
os.path.join(PROJECT_ROOT, 'static'),
]
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
if 'AWS_SECRET_ACCESS_KEY' in os.environ:
# S3 media
AWS_DEFAULT_ACL="public-read"
AWS_STORAGE_BUCKET_NAME=os.environ['MEDIA_S3_BUCKET']
AWS_S3_SIGNATURE_VERSION="s3v4"
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
AWS_SECRET_ACCESS_KEY= os.environ['AWS_SECRET_ACCESS_KEY']
AWS_ACCESS_KEY_ID=os.environ['AWS_ACCESS_KEY_ID']
AWS_S3_REGION_NAME=os.environ['AWS_REGION_NAME']
else:
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_URL = '/media/'
|
daonb/tumulus
|
tumuli/settings.py
|
Python
|
agpl-3.0
| 4,745 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd.
# License: GNU General Public License v3. See license.txt
def execute():
import webnotes
entries = webnotes.conn.sql("""select voucher_type, voucher_no
from `tabGL Entry` group by voucher_type, voucher_no""", as_dict=1)
for entry in entries:
try:
docstatus = webnotes.conn.sql("""select docstatus from `tab%s` where name = %s
and docstatus=2""" % (entry['voucher_type'], "%s"), entry['voucher_no'])
is_cancelled = docstatus and 'Yes' or None
if is_cancelled:
print entry['voucher_type'], entry['voucher_no']
webnotes.conn.sql("""update `tabGL Entry` set is_cancelled = 'Yes'
where voucher_type = %s and voucher_no = %s""",
(entry['voucher_type'], entry['voucher_no']))
except Exception, e:
pass
|
gangadhar-kadam/sapphire_app
|
patches/october_2012/fix_cancelled_gl_entries.py
|
Python
|
agpl-3.0
| 795 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields, _
class ICPProperty(models.AbstractModel):
""" An ICP property is a class linked to projects to describe
the project hobbies/activities/etc... in several languages. """
_name = 'icp.property'
_inherit = 'connect.multipicklist'
res_model = 'compassion.project'
value = fields.Char(translate=True)
class ProjectInvolvement(models.Model):
_inherit = 'icp.property'
_name = 'icp.involvement'
_description = 'ICP Involvement'
res_field = 'involvement_ids'
class ChurchMinistry(models.Model):
_name = 'icp.church.ministry'
_inherit = 'icp.property'
_description = 'ICP Church ministry'
res_field = 'ministry_ids'
class ICPProgram(models.Model):
_name = 'icp.program'
_inherit = 'icp.property'
_description = 'ICP Program'
res_field = 'implemented_program_ids'
class ICPChurchFacility(models.Model):
_name = 'icp.church.facility'
_inherit = 'icp.property'
_description = 'ICP Church facility'
res_field = 'facility_ids'
class ICPMobileDevice(models.Model):
_name = 'icp.mobile.device'
_inherit = 'icp.property'
_description = 'ICP mobile device'
res_field = 'mobile_device_ids'
class ICPChurchUtility(models.Model):
_name = 'icp.church.utility'
_inherit = 'icp.property'
_description = 'ICP Church utility'
res_field = 'utility_ids'
class ICPSpiritualActivity(models.Model):
_name = 'icp.spiritual.activity'
_inherit = 'icp.property'
_description = 'ICP spiritual activity'
_order = 'name'
class ICPCognitiveActivity(models.Model):
_name = 'icp.cognitive.activity'
_inherit = 'icp.property'
_description = 'ICP cognitive activity'
_order = 'name'
class ICPPhysicalActivity(models.Model):
_name = 'icp.physical.activity'
_inherit = 'icp.property'
_description = 'ICP physical activity'
_order = 'name'
class ICPSociologicalActivity(models.Model):
_name = 'icp.sociological.activity'
_inherit = 'icp.property'
_description = 'ICP sociological activity'
_order = 'name'
class ICPCommunityOccupation(models.Model):
_name = 'icp.community.occupation'
_inherit = 'icp.property'
_description = 'ICP Community occupation'
res_field = 'primary_adults_occupation_ids'
class ICPSchoolCost(models.Model):
_name = 'icp.school.cost'
_inherit = 'icp.property'
_description = 'ICP School costs'
res_field = 'school_cost_paid_ids'
class ConnectMonth(models.Model):
_name = 'connect.month'
_inherit = 'connect.multipicklist'
_description = 'Connect month'
name = fields.Char(translate=True)
@api.model
def get_months_selection(self):
return [
('Jan', _('January')),
('Feb', _('February')),
('Mar', _('March')),
('Apr', _('April')),
('May', _('May')),
('Jun', _('June')),
('Jul', _('July')),
('Aug', _('August')),
('Sep', _('September')),
('Oct', _('October')),
('Nov', _('November')),
('Dec', _('December')),
('January', _('January')),
('February', _('February')),
('March', _('March')),
('April', _('April')),
('May', _('May')),
('June', _('June')),
('July', _('July')),
('August', _('August')),
('September', _('September')),
('October', _('October')),
('November', _('November')),
('December', _('December')),
]
class ICPDiet(models.Model):
_name = 'icp.diet'
_inherit = 'icp.property'
_description = 'ICP Diet'
res_field = 'primary_diet_ids'
class ICPLifecycleReason(models.Model):
_name = 'icp.lifecycle.reason'
_inherit = 'icp.property'
_description = 'ICP Lifecycle Reason'
res_model = 'compassion.project.ile'
res_field = 'suspension_reason_ids'
class ICPSuspensionExtensionReason(models.Model):
_name = 'icp.suspension.extension.reason'
_inherit = 'icp.property'
_description = 'ICP Suspension Reason'
res_model = 'compassion.project.ile'
res_field = 'extension_1_reason_ids'
|
maxime-beck/compassion-modules
|
child_compassion/models/icp_property.py
|
Python
|
agpl-3.0
| 4,638 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..."
reports_no_fixed_at = orm['fixmystreet.Report'].objects.filter(fixed_at__isnull=True, status=3)
for report in reports_no_fixed_at:
event = orm['fixmystreet.ReportEventLog'].objects.filter(report_id=report.id, event_type=2).latest("event_at")
report.fixed_at = event.event_at
report.save()
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'default': "'!'", 'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'fixmystreet.faqentry': {
'Meta': {'ordering': "['order']", 'object_name': 'FaqEntry'},
'a_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'a_nl': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'q_fr': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'q_nl': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'fixmystreet.fmsuser': {
'Meta': {'ordering': "['last_name']", 'object_name': 'FMSUser', '_ormbases': [u'auth.User']},
'agent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'applicant': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'contractor': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fmsuser_created'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
'last_used_language': ('django.db.models.fields.CharField', [], {'default': "'FR'", 'max_length': '10', 'null': 'True'}),
'leader': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'logical_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'manager': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fmsuser_modified'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
'organisation': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'team'", 'null': 'True', 'to': u"orm['fixmystreet.OrganisationEntity']"}),
'quality': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
u'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True', 'primary_key': 'True'})
},
u'fixmystreet.groupmailconfig': {
'Meta': {'object_name': 'GroupMailConfig'},
'digest_closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'digest_created': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'digest_inprogress': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'digest_other': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['fixmystreet.OrganisationEntity']", 'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notify_group': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'notify_members': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'fixmystreet.historicalfmsuser': {
'Meta': {'ordering': "(u'-history_date', u'-history_id')", 'object_name': 'HistoricalFMSUser'},
'agent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'applicant': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'contractor': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'created_by_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'db_index': 'True', 'max_length': '75', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
u'history_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'history_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'history_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
u'history_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
u'id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_used_language': ('django.db.models.fields.CharField', [], {'default': "'FR'", 'max_length': '10', 'null': 'True'}),
'leader': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'logical_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'manager': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'modified_by_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
u'organisation_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'default': "'!'", 'max_length': '128'}),
'quality': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
u'user_ptr_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '75', 'db_index': 'True'})
},
u'fixmystreet.historicalorganisationentity': {
'Meta': {'ordering': "(u'-history_date', u'-history_id')", 'object_name': 'HistoricalOrganisationEntity'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'created_by_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
u'dependency_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'feature_id': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
u'history_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'history_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'history_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
u'history_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
u'id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'modified_by_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_nl': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'slug_fr': ('django.db.models.fields.SlugField', [], {'max_length': '100'}),
'slug_nl': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1'})
},
u'fixmystreet.historicalpage': {
'Meta': {'ordering': "(u'-history_date', u'-history_id')", 'object_name': 'HistoricalPage'},
'content_fr': ('ckeditor.fields.RichTextField', [], {}),
'content_nl': ('ckeditor.fields.RichTextField', [], {'null': 'True', 'blank': 'True'}),
u'history_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'history_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'history_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
u'history_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
u'id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'blank': 'True'}),
'slug_fr': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug_nl': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title_fr': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'title_nl': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'fixmystreet.historicalreport': {
'Meta': {'ordering': "(u'-history_date', u'-history_id')", 'object_name': 'HistoricalReport'},
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'address_fr': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'address_nl': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'address_number': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'address_number_as_int': ('django.db.models.fields.IntegerField', [], {'max_length': '255'}),
'address_regional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'category_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
u'citizen_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'close_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'contractor_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'created_by_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'date_planned': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'false_address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'fixed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gravity': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'hash_code': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'history_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'history_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'history_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
u'history_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
u'id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'blank': 'True'}),
u'merged_with_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'modified_by_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'pending': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'photo': ('django.db.models.fields.TextField', [], {'max_length': '100', 'blank': 'True'}),
'point': ('django.contrib.gis.db.models.fields.PointField', [], {'srid': '31370', 'null': 'True', 'blank': 'True'}),
'postalcode': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'probability': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'quality': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'responsible_department_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
u'responsible_entity_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
u'secondary_category_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.TextField', [], {'default': "'web'"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'terms_of_use_validated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'third_party_responsibility': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'thumbnail': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'thumbnail_pro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'fixmystreet.listitem': {
'Meta': {'object_name': 'ListItem'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label_fr': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'label_nl': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'model_class': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'model_field': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'fixmystreet.mailnotificationtemplate': {
'Meta': {'object_name': 'MailNotificationTemplate'},
'content_fr': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'content_nl': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'title_fr': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'title_nl': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'fixmystreet.organisationentity': {
'Meta': {'ordering': "['name_fr']", 'object_name': 'OrganisationEntity'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'organisationentity_created'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
'dependency': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'associates'", 'null': 'True', 'to': u"orm['fixmystreet.OrganisationEntity']"}),
'dispatch_categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'assigned_to_department'", 'blank': 'True', 'to': u"orm['fixmystreet.ReportCategory']"}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'feature_id': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'organisationentity_modified'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_nl': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'slug_fr': ('django.db.models.fields.SlugField', [], {'max_length': '100'}),
'slug_nl': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1'})
},
u'fixmystreet.organisationentitysurface': {
'Meta': {'object_name': 'OrganisationEntitySurface'},
'geom': ('django.contrib.gis.db.models.fields.GeometryField', [], {'srid': '31370'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['fixmystreet.OrganisationEntity']"}),
'urbis_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'version_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'fixmystreet.page': {
'Meta': {'object_name': 'Page'},
'content_fr': ('ckeditor.fields.RichTextField', [], {}),
'content_nl': ('ckeditor.fields.RichTextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug_fr': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug_nl': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title_fr': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'title_nl': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'fixmystreet.report': {
'Meta': {'object_name': 'Report'},
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'address_fr': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'address_nl': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'address_number': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'address_number_as_int': ('django.db.models.fields.IntegerField', [], {'max_length': '255'}),
'address_regional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['fixmystreet.ReportMainCategoryClass']", 'null': 'True', 'blank': 'True'}),
'citizen': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'citizen_reports'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
'close_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'contractor': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'assigned_reports'", 'null': 'True', 'to': u"orm['fixmystreet.OrganisationEntity']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'report_created'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
'date_planned': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'false_address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'fixed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gravity': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'hash_code': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'merged_with': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'merged_reports'", 'null': 'True', 'to': u"orm['fixmystreet.Report']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'report_modified'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
'pending': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'photo': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
'point': ('django.contrib.gis.db.models.fields.PointField', [], {'srid': '31370', 'null': 'True', 'blank': 'True'}),
'postalcode': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'previous_managers': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'previous_reports'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['fixmystreet.FMSUser']"}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'probability': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'quality': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'responsible_department': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reports_in_department'", 'null': 'True', 'to': u"orm['fixmystreet.OrganisationEntity']"}),
'responsible_entity': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'reports_in_charge'", 'null': 'True', 'to': u"orm['fixmystreet.OrganisationEntity']"}),
'secondary_category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['fixmystreet.ReportCategory']", 'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.TextField', [], {'default': "'web'"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'terms_of_use_validated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'third_party_responsibility': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'thumbnail': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'thumbnail_pro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'fixmystreet.reportattachment': {
'Meta': {'object_name': 'ReportAttachment'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reportattachment_created'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logical_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reportattachment_modified'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attachments'", 'to': u"orm['fixmystreet.Report']"}),
'security_level': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'fixmystreet.reportcategory': {
'Meta': {'object_name': 'ReportCategory'},
'category_class': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'categories'", 'to': u"orm['fixmystreet.ReportMainCategoryClass']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reportcategory_created'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reportcategory_modified'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_nl': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'organisation_communal': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'categories_communal'", 'null': 'True', 'to': u"orm['fixmystreet.OrganisationEntity']"}),
'organisation_regional': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'categories_regional'", 'null': 'True', 'to': u"orm['fixmystreet.OrganisationEntity']"}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'secondary_category_class': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'categories'", 'to': u"orm['fixmystreet.ReportSecondaryCategoryClass']"}),
'slug_fr': ('django.db.models.fields.SlugField', [], {'max_length': '100'}),
'slug_nl': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'fixmystreet.reportcategoryhint': {
'Meta': {'object_name': 'ReportCategoryHint'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label_fr': ('django.db.models.fields.TextField', [], {}),
'label_nl': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'fixmystreet.reportcomment': {
'Meta': {'object_name': 'ReportComment', '_ormbases': [u'fixmystreet.ReportAttachment']},
u'reportattachment_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['fixmystreet.ReportAttachment']", 'unique': 'True', 'primary_key': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
u'fixmystreet.reporteventlog': {
'Meta': {'ordering': "['event_at']", 'object_name': 'ReportEventLog'},
'event_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'merged_with_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'organisation': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activities'", 'to': u"orm['fixmystreet.OrganisationEntity']"}),
'related_content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']", 'null': 'True'}),
'related_new_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'related_old_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activities'", 'to': u"orm['fixmystreet.Report']"}),
'status_new': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'status_old': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activities'", 'null': 'True', 'to': u"orm['auth.User']"}),
'value_old': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'})
},
u'fixmystreet.reportfile': {
'Meta': {'object_name': 'ReportFile', '_ormbases': [u'fixmystreet.ReportAttachment']},
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
'file_creation_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'file_type': ('django.db.models.fields.IntegerField', [], {}),
'image': ('django_fixmystreet.fixmystreet.utils.FixStdImageField', [], {'max_length': '100', 'name': "'image'", 'blank': 'True'}),
u'reportattachment_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['fixmystreet.ReportAttachment']", 'unique': 'True', 'primary_key': 'True'}),
'title': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'})
},
u'fixmystreet.reportmaincategoryclass': {
'Meta': {'object_name': 'ReportMainCategoryClass'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reportmaincategoryclass_created'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
'hint': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['fixmystreet.ReportCategoryHint']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reportmaincategoryclass_modified'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_nl': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'slug_fr': ('django.db.models.fields.SlugField', [], {'max_length': '100'}),
'slug_nl': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'fixmystreet.reportnotification': {
'Meta': {'object_name': 'ReportNotification'},
'content_template': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'error_msg': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recipient': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'notifications'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
'recipient_mail': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'related_content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'related_object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'reply_to': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'sent_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'success': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'fixmystreet.reportreopenreason': {
'Meta': {'object_name': 'ReportReopenReason', '_ormbases': [u'fixmystreet.ReportComment']},
'reason': ('django.db.models.fields.IntegerField', [], {}),
u'reportcomment_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['fixmystreet.ReportComment']", 'unique': 'True', 'primary_key': 'True'})
},
u'fixmystreet.reportsecondarycategoryclass': {
'Meta': {'object_name': 'ReportSecondaryCategoryClass'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reportsecondarycategoryclass_created'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reportsecondarycategoryclass_modified'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_nl': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'slug_fr': ('django.db.models.fields.SlugField', [], {'max_length': '100'}),
'slug_nl': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'fixmystreet.reportsubscription': {
'Meta': {'unique_together': "(('report', 'subscriber'),)", 'object_name': 'ReportSubscription'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subscriptions'", 'to': u"orm['fixmystreet.Report']"}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subscriptions'", 'to': u"orm['fixmystreet.FMSUser']"})
},
u'fixmystreet.streetsurface': {
'Meta': {'object_name': 'StreetSurface'},
'administrator': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'geom': ('django.contrib.gis.db.models.fields.GeometryField', [], {'srid': '31370'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pw_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'ssft': ('django.db.models.fields.CharField', [], {'max_length': '1', 'blank': 'True'}),
'sslv': ('django.db.models.fields.CharField', [], {'max_length': '1', 'blank': 'True'}),
'urbis_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'version_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'fixmystreet.userorganisationmembership': {
'Meta': {'unique_together': "(('user', 'organisation'),)", 'object_name': 'UserOrganisationMembership'},
'contact_user': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'userorganisationmembership_created'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'userorganisationmembership_modified'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"}),
'organisation': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'memberships'", 'null': 'True', 'to': u"orm['fixmystreet.OrganisationEntity']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'memberships'", 'null': 'True', 'to': u"orm['fixmystreet.FMSUser']"})
},
u'fixmystreet.zipcode': {
'Meta': {'object_name': 'ZipCode'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'commune': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'zipcode'", 'to': u"orm['fixmystreet.OrganisationEntity']"}),
'hide': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_nl': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['fixmystreet']
symmetrical = True
|
IMIO/django-fixmystreet
|
django_fixmystreet/fixmystreet/migrations/0058_restore_fixed_at.py
|
Python
|
agpl-3.0
| 43,915 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2010-2011 University of California, Berkeley, 2005-2009 University of Washington
# See opus_core/LICENSE
#update SqlAlchemy to work with elixir
import sqlalchemy.orm
sqlalchemy.orm.ScopedSession = sqlalchemy.orm.scoped_session
from elixir import Entity, Field, Integer, DateTime, Text, \
ManyToOne, OneToOne, using_options, BLOB
class ResultsComputedIndicators(Entity):
using_options(tablename='computed_indicators')
indicator_name = Field(Text)
dataset_name = Field(Text)
expression = Field(Text)
run_id = Field(Integer)
data_path = Field(Text)
processor_name = Field(Text)
date_time = Field(DateTime)
project_name = Field(Text)
class RunsRunActivity(Entity):
using_options(tablename='run_activity')
run_id = Field(Integer, primary_key = True)
run_name = Field(Text)
run_description = Field(Text)
scenario_name = Field(Text)
cache_directory = Field(Text)
processor_name = Field(Text)
date_time = Field(DateTime)
status = Field(Text)
resources = Field(BLOB)
project_name = Field(Text)
#class ResultsVisualizations(Entity):
# using_options(tablename='visualizations')
#
# years = List
# indicators = ManyToMany('computed_indicators')
# data_path = Field(String)
# visualization_type = Integer
|
apdjustino/DRCOG_Urbansim
|
src/opus_core/services/services_tables.py
|
Python
|
agpl-3.0
| 1,396 |
"""
Instructor Dashboard API views
JSON views which the instructor dashboard requests.
Many of these GETs may become PUTs in the future.
"""
import csv
import json
import logging
import random
import re
import string
import six
import unicodecsv
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, PermissionDenied, ValidationError
from django.core.validators import validate_email
from django.db import IntegrityError, transaction
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotFound
from django.shortcuts import redirect
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.html import strip_tags
from django.utils.translation import ugettext as _
from django.views.decorators.cache import cache_control
from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_http_methods, require_POST
from edx_rest_framework_extensions.auth.jwt.authentication import JwtAuthentication
from edx_rest_framework_extensions.auth.session.authentication import SessionAuthenticationAllowInactiveUser
from edx_when.api import get_date_for_block
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey, UsageKey
from rest_framework import status
from rest_framework.permissions import IsAuthenticated, IsAdminUser
from rest_framework.response import Response
from rest_framework.views import APIView
from six import text_type
from six.moves import map, range
from submissions import api as sub_api # installed from the edx-submissions repository
import instructor_analytics.basic
import instructor_analytics.csvs
import instructor_analytics.distributions
from bulk_email.api import is_bulk_email_feature_enabled
from bulk_email.models import CourseEmail
from course_modes.models import CourseMode
from lms.djangoapps.certificates import api as certs_api
from lms.djangoapps.certificates.models import (
CertificateInvalidation,
CertificateStatuses,
CertificateWhitelist,
GeneratedCertificate
)
from lms.djangoapps.courseware.access import has_access
from lms.djangoapps.courseware.courses import get_course_by_id, get_course_with_access
from lms.djangoapps.courseware.models import StudentModule
from lms.djangoapps.discussion.django_comment_client.utils import (
get_course_discussion_settings,
get_group_id_for_user,
get_group_name,
has_forum_access
)
from lms.djangoapps.instructor import enrollment
from lms.djangoapps.instructor.access import ROLES, allow_access, list_with_level, revoke_access, update_forum_role
from lms.djangoapps.instructor.enrollment import (
enroll_email,
get_email_params,
get_user_email_language,
send_beta_role_email,
send_mail_to_student,
unenroll_email
)
from lms.djangoapps.instructor.views import INVOICE_KEY
from lms.djangoapps.instructor.views.instructor_task_helpers import extract_email_features, extract_task_features
from lms.djangoapps.instructor_task import api as task_api
from lms.djangoapps.instructor_task.api_helper import AlreadyRunningError, QueueConnectionError
from lms.djangoapps.instructor_task.models import ReportStore
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.course_groups.cohorts import is_course_cohorted
from openedx.core.djangoapps.django_comment_common.models import (
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_GROUP_MODERATOR,
FORUM_ROLE_MODERATOR,
Role
)
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.user_api.preferences.api import get_user_preference
from openedx.core.djangolib.markup import HTML, Text
from openedx.core.lib.api.authentication import BearerAuthenticationAllowInactiveUser
from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin
from student import auth
from student.models import (
ALLOWEDTOENROLL_TO_ENROLLED,
ALLOWEDTOENROLL_TO_UNENROLLED,
DEFAULT_TRANSITION_STATE,
ENROLLED_TO_ENROLLED,
ENROLLED_TO_UNENROLLED,
UNENROLLED_TO_ALLOWEDTOENROLL,
UNENROLLED_TO_ENROLLED,
UNENROLLED_TO_UNENROLLED,
CourseEnrollment,
CourseEnrollmentAllowed,
EntranceExamConfiguration,
ManualEnrollmentAudit,
Registration,
UserProfile,
anonymous_id_for_user,
get_user_by_username_or_email,
is_email_retired,
unique_id_for_user
)
from student.roles import CourseFinanceAdminRole, CourseSalesAdminRole
from util.file import (
FileValidationException,
UniversalNewlineIterator,
course_and_time_based_filename_generator,
store_uploaded_file
)
from util.json_request import JsonResponse, JsonResponseBadRequest
from util.views import require_global_staff
from xmodule.modulestore.django import modulestore
from .. import permissions
from .tools import (
dump_module_extensions,
dump_student_extensions,
find_unit,
get_student_from_identifier,
handle_dashboard_error,
parse_datetime,
require_student_from_identifier,
set_due_date_extension,
strip_if_string
)
log = logging.getLogger(__name__)
TASK_SUBMISSION_OK = 'created'
SUCCESS_MESSAGE_TEMPLATE = _(u"The {report_type} report is being created. "
"To view the status of the report, see Pending Tasks below.")
def common_exceptions_400(func):
"""
Catches common exceptions and renders matching 400 errors.
(decorator without arguments)
"""
def wrapped(request, *args, **kwargs):
use_json = (request.is_ajax() or
request.META.get("HTTP_ACCEPT", "").startswith("application/json"))
try:
return func(request, *args, **kwargs)
except User.DoesNotExist:
message = _('User does not exist.')
except MultipleObjectsReturned:
message = _('Found a conflict with given identifier. Please try an alternative identifier')
except (AlreadyRunningError, QueueConnectionError, AttributeError) as err:
message = six.text_type(err)
if use_json:
return JsonResponseBadRequest(message)
else:
return HttpResponseBadRequest(message)
return wrapped
def require_post_params(*args, **kwargs):
"""
Checks for required parameters or renders a 400 error.
(decorator with arguments)
`args` is a *list of required POST parameter names.
`kwargs` is a **dict of required POST parameter names
to string explanations of the parameter
"""
required_params = []
required_params += [(arg, None) for arg in args]
required_params += [(key, kwargs[key]) for key in kwargs]
# required_params = e.g. [('action', 'enroll or unenroll'), ['emails', None]]
def decorator(func):
def wrapped(*args, **kwargs):
request = args[0]
error_response_data = {
'error': 'Missing required query parameter(s)',
'parameters': [],
'info': {},
}
for (param, extra) in required_params:
default = object()
if request.POST.get(param, default) == default:
error_response_data['parameters'].append(param)
error_response_data['info'][param] = extra
if error_response_data['parameters']:
return JsonResponse(error_response_data, status=400)
else:
return func(*args, **kwargs)
return wrapped
return decorator
def require_course_permission(permission):
"""
Decorator with argument that requires a specific permission of the requesting
user. If the requirement is not satisfied, returns an
HttpResponseForbidden (403).
Assumes that request is in args[0].
Assumes that course_id is in kwargs['course_id'].
"""
def decorator(func):
def wrapped(*args, **kwargs):
request = args[0]
course = get_course_by_id(CourseKey.from_string(kwargs['course_id']))
if request.user.has_perm(permission, course):
return func(*args, **kwargs)
else:
return HttpResponseForbidden()
return wrapped
return decorator
def require_sales_admin(func):
"""
Decorator for checking sales administrator access before executing an HTTP endpoint. This decorator
is designed to be used for a request based action on a course. It assumes that there will be a
request object as well as a course_id attribute to leverage to check course level privileges.
If the user does not have privileges for this operation, this will return HttpResponseForbidden (403).
"""
def wrapped(request, course_id):
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
log.error(u"Unable to find course with course key %s", course_id)
return HttpResponseNotFound()
access = auth.user_has_role(request.user, CourseSalesAdminRole(course_key))
if access:
return func(request, course_id)
else:
return HttpResponseForbidden()
return wrapped
def require_finance_admin(func):
"""
Decorator for checking finance administrator access before executing an HTTP endpoint. This decorator
is designed to be used for a request based action on a course. It assumes that there will be a
request object as well as a course_id attribute to leverage to check course level privileges.
If the user does not have privileges for this operation, this will return HttpResponseForbidden (403).
"""
def wrapped(request, course_id):
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
log.error(u"Unable to find course with course key %s", course_id)
return HttpResponseNotFound()
access = auth.user_has_role(request.user, CourseFinanceAdminRole(course_key))
if access:
return func(request, course_id)
else:
return HttpResponseForbidden()
return wrapped
EMAIL_INDEX = 0
USERNAME_INDEX = 1
NAME_INDEX = 2
COUNTRY_INDEX = 3
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_ENROLL)
def register_and_enroll_students(request, course_id): # pylint: disable=too-many-statements
"""
Create new account and Enroll students in this course.
Passing a csv file that contains a list of students.
Order in csv should be the following email = 0; username = 1; name = 2; country = 3.
Requires staff access.
-If the email address and username already exists and the user is enrolled in the course,
do nothing (including no email gets sent out)
-If the email address already exists, but the username is different,
match on the email address only and continue to enroll the user in the course using the email address
as the matching criteria. Note the change of username as a warning message (but not a failure).
Send a standard enrollment email which is the same as the existing manual enrollment
-If the username already exists (but not the email), assume it is a different user and fail
to create the new account.
The failure will be messaged in a response in the browser.
"""
if not configuration_helpers.get_value(
'ALLOW_AUTOMATED_SIGNUPS',
settings.FEATURES.get('ALLOW_AUTOMATED_SIGNUPS', False),
):
return HttpResponseForbidden()
course_id = CourseKey.from_string(course_id)
warnings = []
row_errors = []
general_errors = []
# for white labels we use 'shopping cart' which uses CourseMode.DEFAULT_SHOPPINGCART_MODE_SLUG as
# course mode for creating course enrollments.
if CourseMode.is_white_label(course_id):
course_mode = CourseMode.DEFAULT_SHOPPINGCART_MODE_SLUG
else:
course_mode = None
if 'students_list' in request.FILES:
students = []
try:
upload_file = request.FILES.get('students_list')
if upload_file.name.endswith('.csv'):
students = [row for row in csv.reader(upload_file.read().decode('utf-8').splitlines())]
course = get_course_by_id(course_id)
else:
general_errors.append({
'username': '', 'email': '',
'response': _(
'Make sure that the file you upload is in CSV format with no extraneous characters or rows.')
})
except Exception: # pylint: disable=broad-except
general_errors.append({
'username': '', 'email': '', 'response': _('Could not read uploaded file.')
})
finally:
upload_file.close()
generated_passwords = []
row_num = 0
for student in students:
row_num = row_num + 1
# verify that we have exactly four columns in every row but allow for blank lines
if len(student) != 4:
if student:
error = _(u'Data in row #{row_num} must have exactly four columns: '
'email, username, full name, and country').format(row_num=row_num)
general_errors.append({
'username': '',
'email': '',
'response': error
})
continue
# Iterate each student in the uploaded csv file.
email = student[EMAIL_INDEX]
username = student[USERNAME_INDEX]
name = student[NAME_INDEX]
country = student[COUNTRY_INDEX][:2]
email_params = get_email_params(course, True, secure=request.is_secure())
try:
validate_email(email) # Raises ValidationError if invalid
except ValidationError:
row_errors.append({
'username': username,
'email': email,
'response': _(u'Invalid email {email_address}.').format(email_address=email)
})
else:
if User.objects.filter(email=email).exists():
# Email address already exists. assume it is the correct user
# and just register the user in the course and send an enrollment email.
user = User.objects.get(email=email)
# see if it is an exact match with email and username
# if it's not an exact match then just display a warning message, but continue onwards
if not User.objects.filter(email=email, username=username).exists():
warning_message = _(
u'An account with email {email} exists but the provided username {username} '
u'is different. Enrolling anyway with {email}.'
).format(email=email, username=username)
warnings.append({
'username': username, 'email': email, 'response': warning_message
})
log.warning(u'email %s already exist', email)
else:
log.info(
u"user already exists with username '%s' and email '%s'",
username,
email
)
# enroll a user if it is not already enrolled.
if not CourseEnrollment.is_enrolled(user, course_id):
# Enroll user to the course and add manual enrollment audit trail
create_manual_course_enrollment(
user=user,
course_id=course_id,
mode=course_mode,
enrolled_by=request.user,
reason='Enrolling via csv upload',
state_transition=UNENROLLED_TO_ENROLLED,
)
enroll_email(course_id=course_id,
student_email=email,
auto_enroll=True,
email_students=True,
email_params=email_params)
elif is_email_retired(email):
# We are either attempting to enroll a retired user or create a new user with an email which is
# already associated with a retired account. Simply block these attempts.
row_errors.append({
'username': username,
'email': email,
'response': _(u'Invalid email {email_address}.').format(email_address=email),
})
log.warning(u'Email address %s is associated with a retired user, so course enrollment was ' +
u'blocked.', email)
else:
# This email does not yet exist, so we need to create a new account
# If username already exists in the database, then create_and_enroll_user
# will raise an IntegrityError exception.
password = generate_unique_password(generated_passwords)
errors = create_and_enroll_user(
email, username, name, country, password, course_id, course_mode, request.user, email_params
)
row_errors.extend(errors)
else:
general_errors.append({
'username': '', 'email': '', 'response': _('File is not attached.')
})
results = {
'row_errors': row_errors,
'general_errors': general_errors,
'warnings': warnings
}
return JsonResponse(results)
def generate_random_string(length):
"""
Create a string of random characters of specified length
"""
chars = [
char for char in string.ascii_uppercase + string.digits + string.ascii_lowercase
if char not in 'aAeEiIoOuU1l'
]
return ''.join((random.choice(chars) for i in range(length)))
def generate_unique_password(generated_passwords, password_length=12):
"""
generate a unique password for each student.
"""
password = generate_random_string(password_length)
while password in generated_passwords:
password = generate_random_string(password_length)
generated_passwords.append(password)
return password
def create_user_and_user_profile(email, username, name, country, password):
"""
Create a new user, add a new Registration instance for letting user verify its identity and create a user profile.
:param email: user's email address
:param username: user's username
:param name: user's name
:param country: user's country
:param password: user's password
:return: User instance of the new user.
"""
user = User.objects.create_user(username, email, password)
reg = Registration()
reg.register(user)
profile = UserProfile(user=user)
profile.name = name
profile.country = country
profile.save()
return user
def create_manual_course_enrollment(user, course_id, mode, enrolled_by, reason, state_transition):
"""
Create course enrollment for the given student and create manual enrollment audit trail.
:param user: User who is to enroll in course
:param course_id: course identifier of the course in which to enroll the user.
:param mode: mode for user enrollment, e.g. 'honor', 'audit' etc.
:param enrolled_by: User who made the manual enrollment entry (usually instructor or support)
:param reason: Reason behind manual enrollment
:param state_transition: state transition denoting whether student enrolled from un-enrolled,
un-enrolled from enrolled etc.
:return CourseEnrollment instance.
"""
enrollment_obj = CourseEnrollment.enroll(user, course_id, mode=mode)
ManualEnrollmentAudit.create_manual_enrollment_audit(
enrolled_by, user.email, state_transition, reason, enrollment_obj
)
log.info(u'user %s enrolled in the course %s', user.username, course_id)
return enrollment_obj
def create_and_enroll_user(email, username, name, country, password, course_id, course_mode, enrolled_by, email_params):
"""
Create a new user and enroll him/her to the given course, return list of errors in the following format
Error format:
each error is key-value pait dict with following key-value pairs.
1. username: username of the user to enroll
1. email: email of the user to enroll
1. response: readable error message
:param email: user's email address
:param username: user's username
:param name: user's name
:param country: user's country
:param password: user's password
:param course_id: course identifier of the course in which to enroll the user.
:param course_mode: mode for user enrollment, e.g. 'honor', 'audit' etc.
:param enrolled_by: User who made the manual enrollment entry (usually instructor or support)
:param email_params: information to send to the user via email
:return: list of errors
"""
errors = list()
try:
with transaction.atomic():
# Create a new user
user = create_user_and_user_profile(email, username, name, country, password)
# Enroll user to the course and add manual enrollment audit trail
create_manual_course_enrollment(
user=user,
course_id=course_id,
mode=course_mode,
enrolled_by=enrolled_by,
reason='Enrolling via csv upload',
state_transition=UNENROLLED_TO_ENROLLED,
)
except IntegrityError:
errors.append({
'username': username,
'email': email,
'response': _(u'Username {user} already exists.').format(user=username)
})
except Exception as ex: # pylint: disable=broad-except
log.exception(type(ex).__name__)
errors.append({
'username': username, 'email': email, 'response': type(ex).__name__,
})
else:
try:
# It's a new user, an email will be sent to each newly created user.
email_params.update({
'message_type': 'account_creation_and_enrollment',
'email_address': email,
'password': password,
'platform_name': configuration_helpers.get_value('platform_name', settings.PLATFORM_NAME),
})
send_mail_to_student(email, email_params)
except Exception as ex: # pylint: disable=broad-except
log.exception(
u"Exception '{exception}' raised while sending email to new user.".format(exception=type(ex).__name__)
)
errors.append({
'username': username,
'email': email,
'response':
_(u"Error '{error}' while sending email to new user (user email={email}). "
u"Without the email student would not be able to login. "
u"Please contact support for further information.").format(error=type(ex).__name__, email=email),
})
else:
log.info(u'email sent to new created user at %s', email)
return errors
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_ENROLL)
@require_post_params(action="enroll or unenroll", identifiers="stringified list of emails and/or usernames")
def students_update_enrollment(request, course_id):
"""
Enroll or unenroll students by email.
Requires staff access.
Query Parameters:
- action in ['enroll', 'unenroll']
- identifiers is string containing a list of emails and/or usernames separated by anything split_input_list can handle.
- auto_enroll is a boolean (defaults to false)
If auto_enroll is false, students will be allowed to enroll.
If auto_enroll is true, students will be enrolled as soon as they register.
- email_students is a boolean (defaults to false)
If email_students is true, students will be sent email notification
If email_students is false, students will not be sent email notification
Returns an analog to this JSON structure: {
"action": "enroll",
"auto_enroll": false,
"results": [
{
"email": "testemail@test.org",
"before": {
"enrollment": false,
"auto_enroll": false,
"user": true,
"allowed": false
},
"after": {
"enrollment": true,
"auto_enroll": false,
"user": true,
"allowed": false
}
}
]
}
"""
course_id = CourseKey.from_string(course_id)
action = request.POST.get('action')
identifiers_raw = request.POST.get('identifiers')
identifiers = _split_input_list(identifiers_raw)
auto_enroll = _get_boolean_param(request, 'auto_enroll')
email_students = _get_boolean_param(request, 'email_students')
reason = request.POST.get('reason')
role = request.POST.get('role')
allowed_role_choices = configuration_helpers.get_value('MANUAL_ENROLLMENT_ROLE_CHOICES',
settings.MANUAL_ENROLLMENT_ROLE_CHOICES)
if role and role not in allowed_role_choices:
return JsonResponse(
{
'action': action,
'results': [{'error': True, 'message': 'Not a valid role choice'}],
'auto_enroll': auto_enroll,
}, status=400)
enrollment_obj = None
state_transition = DEFAULT_TRANSITION_STATE
email_params = {}
if email_students:
course = get_course_by_id(course_id)
email_params = get_email_params(course, auto_enroll, secure=request.is_secure())
results = []
for identifier in identifiers:
# First try to get a user object from the identifer
user = None
email = None
language = None
try:
user = get_student_from_identifier(identifier)
except User.DoesNotExist:
email = identifier
else:
email = user.email
language = get_user_email_language(user)
try:
# Use django.core.validators.validate_email to check email address
# validity (obviously, cannot check if email actually /exists/,
# simply that it is plausibly valid)
validate_email(email) # Raises ValidationError if invalid
if action == 'enroll':
before, after, enrollment_obj = enroll_email(
course_id, email, auto_enroll, email_students, email_params, language=language
)
before_enrollment = before.to_dict()['enrollment']
before_user_registered = before.to_dict()['user']
before_allowed = before.to_dict()['allowed']
after_enrollment = after.to_dict()['enrollment']
after_allowed = after.to_dict()['allowed']
if before_user_registered:
if after_enrollment:
if before_enrollment:
state_transition = ENROLLED_TO_ENROLLED
else:
if before_allowed:
state_transition = ALLOWEDTOENROLL_TO_ENROLLED
else:
state_transition = UNENROLLED_TO_ENROLLED
else:
if after_allowed:
state_transition = UNENROLLED_TO_ALLOWEDTOENROLL
elif action == 'unenroll':
before, after = unenroll_email(
course_id, email, email_students, email_params, language=language
)
before_enrollment = before.to_dict()['enrollment']
before_allowed = before.to_dict()['allowed']
enrollment_obj = CourseEnrollment.get_enrollment(user, course_id) if user else None
if before_enrollment:
state_transition = ENROLLED_TO_UNENROLLED
else:
if before_allowed:
state_transition = ALLOWEDTOENROLL_TO_UNENROLLED
else:
state_transition = UNENROLLED_TO_UNENROLLED
else:
return HttpResponseBadRequest(strip_tags(
u"Unrecognized action '{}'".format(action)
))
except ValidationError:
# Flag this email as an error if invalid, but continue checking
# the remaining in the list
results.append({
'identifier': identifier,
'invalidIdentifier': True,
})
except Exception as exc: # pylint: disable=broad-except
# catch and log any exceptions
# so that one error doesn't cause a 500.
log.exception(u"Error while #{}ing student")
log.exception(exc)
results.append({
'identifier': identifier,
'error': True,
})
else:
ManualEnrollmentAudit.create_manual_enrollment_audit(
request.user, email, state_transition, reason, enrollment_obj, role
)
results.append({
'identifier': identifier,
'before': before.to_dict(),
'after': after.to_dict(),
})
response_payload = {
'action': action,
'results': results,
'auto_enroll': auto_enroll,
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_BETATEST)
@common_exceptions_400
@require_post_params(
identifiers="stringified list of emails and/or usernames",
action="add or remove",
)
def bulk_beta_modify_access(request, course_id):
"""
Enroll or unenroll users in beta testing program.
Query parameters:
- identifiers is string containing a list of emails and/or usernames separated by
anything split_input_list can handle.
- action is one of ['add', 'remove']
"""
course_id = CourseKey.from_string(course_id)
action = request.POST.get('action')
identifiers_raw = request.POST.get('identifiers')
identifiers = _split_input_list(identifiers_raw)
email_students = _get_boolean_param(request, 'email_students')
auto_enroll = _get_boolean_param(request, 'auto_enroll')
results = []
rolename = 'beta'
course = get_course_by_id(course_id)
email_params = {}
if email_students:
secure = request.is_secure()
email_params = get_email_params(course, auto_enroll=auto_enroll, secure=secure)
for identifier in identifiers:
try:
error = False
user_does_not_exist = False
user = get_student_from_identifier(identifier)
user_active = user.is_active
if action == 'add':
allow_access(course, user, rolename)
elif action == 'remove':
revoke_access(course, user, rolename)
else:
return HttpResponseBadRequest(strip_tags(
u"Unrecognized action '{}'".format(action)
))
except User.DoesNotExist:
error = True
user_does_not_exist = True
user_active = None
# catch and log any unexpected exceptions
# so that one error doesn't cause a 500.
except Exception as exc: # pylint: disable=broad-except
log.exception(u"Error while #{}ing student")
log.exception(exc)
error = True
else:
# If no exception thrown, see if we should send an email
if email_students:
send_beta_role_email(action, user, email_params)
# See if we should autoenroll the student
if auto_enroll:
# Check if student is already enrolled
if not CourseEnrollment.is_enrolled(user, course_id):
CourseEnrollment.enroll(user, course_id)
finally:
# Tabulate the action result of this email address
results.append({
'identifier': identifier,
'error': error,
'userDoesNotExist': user_does_not_exist,
'is_active': user_active
})
response_payload = {
'action': action,
'results': results,
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.EDIT_COURSE_ACCESS)
@require_post_params(
unique_student_identifier="email or username of user to change access",
rolename="'instructor', 'staff', 'beta', or 'ccx_coach'",
action="'allow' or 'revoke'"
)
@common_exceptions_400
def modify_access(request, course_id):
"""
Modify staff/instructor access of other user.
Requires instructor access.
NOTE: instructors cannot remove their own instructor access.
Query parameters:
unique_student_identifer is the target user's username or email
rolename is one of ['instructor', 'staff', 'beta', 'ccx_coach']
action is one of ['allow', 'revoke']
"""
course_id = CourseKey.from_string(course_id)
course = get_course_with_access(
request.user, 'instructor', course_id, depth=None
)
try:
user = get_student_from_identifier(request.POST.get('unique_student_identifier'))
except User.DoesNotExist:
response_payload = {
'unique_student_identifier': request.POST.get('unique_student_identifier'),
'userDoesNotExist': True,
}
return JsonResponse(response_payload)
# Check that user is active, because add_users
# in common/djangoapps/student/roles.py fails
# silently when we try to add an inactive user.
if not user.is_active:
response_payload = {
'unique_student_identifier': user.username,
'inactiveUser': True,
}
return JsonResponse(response_payload)
rolename = request.POST.get('rolename')
action = request.POST.get('action')
if rolename not in ROLES:
error = strip_tags(u"unknown rolename '{}'".format(rolename))
log.error(error)
return HttpResponseBadRequest(error)
# disallow instructors from removing their own instructor access.
if rolename == 'instructor' and user == request.user and action != 'allow':
response_payload = {
'unique_student_identifier': user.username,
'rolename': rolename,
'action': action,
'removingSelfAsInstructor': True,
}
return JsonResponse(response_payload)
if action == 'allow':
allow_access(course, user, rolename)
elif action == 'revoke':
revoke_access(course, user, rolename)
else:
return HttpResponseBadRequest(strip_tags(
u"unrecognized action u'{}'".format(action)
))
response_payload = {
'unique_student_identifier': user.username,
'rolename': rolename,
'action': action,
'success': 'yes',
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.EDIT_COURSE_ACCESS)
@require_post_params(rolename="'instructor', 'staff', or 'beta'")
def list_course_role_members(request, course_id):
"""
List instructors and staff.
Requires instructor access.
rolename is one of ['instructor', 'staff', 'beta', 'ccx_coach']
Returns JSON of the form {
"course_id": "some/course/id",
"staff": [
{
"username": "staff1",
"email": "staff1@example.org",
"first_name": "Joe",
"last_name": "Shmoe",
}
]
}
"""
course_id = CourseKey.from_string(course_id)
course = get_course_with_access(
request.user, 'instructor', course_id, depth=None
)
rolename = request.POST.get('rolename')
if rolename not in ROLES:
return HttpResponseBadRequest()
def extract_user_info(user):
""" convert user into dicts for json view """
return {
'username': user.username,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
}
response_payload = {
'course_id': text_type(course_id),
rolename: list(map(extract_user_info, list_with_level(
course, rolename
))),
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
@common_exceptions_400
def get_problem_responses(request, course_id):
"""
Initiate generation of a CSV file containing all student answers
to a given problem.
Responds with JSON
{"status": "... status message ...", "task_id": created_task_UUID}
if initiation is successful (or generation task is already running).
Responds with BadRequest if problem location is faulty.
"""
course_key = CourseKey.from_string(course_id)
problem_location = request.POST.get('problem_location', '')
report_type = _('problem responses')
try:
problem_key = UsageKey.from_string(problem_location)
# Are we dealing with an "old-style" problem location?
run = problem_key.run
if not run:
problem_key = UsageKey.from_string(problem_location).map_into_course(course_key)
if problem_key.course_key != course_key:
raise InvalidKeyError(type(problem_key), problem_key)
except InvalidKeyError:
return JsonResponseBadRequest(_("Could not find problem with this location."))
task = task_api.submit_calculate_problem_responses_csv(
request, course_key, problem_location
)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status, "task_id": task.task_id})
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
def get_grading_config(request, course_id):
"""
Respond with json which contains a html formatted grade summary.
"""
course_id = CourseKey.from_string(course_id)
# course = get_course_with_access(
# request.user, 'staff', course_id, depth=None
# )
course = get_course_by_id(course_id)
grading_config_summary = instructor_analytics.basic.dump_grading_context(course)
response_payload = {
'course_id': text_type(course_id),
'grading_config_summary': grading_config_summary,
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.VIEW_ISSUED_CERTIFICATES)
def get_issued_certificates(request, course_id):
"""
Responds with JSON if CSV is not required. contains a list of issued certificates.
Arguments:
course_id
Returns:
{"certificates": [{course_id: xyz, mode: 'honor'}, ...]}
"""
course_key = CourseKey.from_string(course_id)
csv_required = request.GET.get('csv', 'false')
query_features = ['course_id', 'mode', 'total_issued_certificate', 'report_run_date']
query_features_names = [
('course_id', _('CourseID')),
('mode', _('Certificate Type')),
('total_issued_certificate', _('Total Certificates Issued')),
('report_run_date', _('Date Report Run'))
]
certificates_data = instructor_analytics.basic.issued_certificates(course_key, query_features)
if csv_required.lower() == 'true':
__, data_rows = instructor_analytics.csvs.format_dictlist(certificates_data, query_features)
return instructor_analytics.csvs.create_csv_response(
'issued_certificates.csv',
[col_header for __, col_header in query_features_names],
data_rows
)
else:
response_payload = {
'certificates': certificates_data,
'queried_features': query_features,
'feature_names': dict(query_features_names)
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
@common_exceptions_400
def get_students_features(request, course_id, csv=False): # pylint: disable=redefined-outer-name
"""
Respond with json which contains a summary of all enrolled students profile information.
Responds with JSON
{"students": [{-student-info-}, ...]}
TO DO accept requests for different attribute sets.
"""
course_key = CourseKey.from_string(course_id)
course = get_course_by_id(course_key)
report_type = _('enrolled learner profile')
available_features = instructor_analytics.basic.AVAILABLE_FEATURES
# Allow for sites to be able to define additional columns.
# Note that adding additional columns has the potential to break
# the student profile report due to a character limit on the
# asynchronous job input which in this case is a JSON string
# containing the list of columns to include in the report.
# TODO: Refactor the student profile report code to remove the list of columns
# that should be included in the report from the asynchronous job input.
# We need to clone the list because we modify it below
query_features = list(configuration_helpers.get_value('student_profile_download_fields', []))
if not query_features:
query_features = [
'id', 'username', 'name', 'email', 'language', 'location',
'year_of_birth', 'gender', 'level_of_education', 'mailing_address',
'goals', 'enrollment_mode', 'verification_status',
'last_login', 'date_joined',
]
# Provide human-friendly and translatable names for these features. These names
# will be displayed in the table generated in data_download.js. It is not (yet)
# used as the header row in the CSV, but could be in the future.
query_features_names = {
'id': _('User ID'),
'username': _('Username'),
'name': _('Name'),
'email': _('Email'),
'language': _('Language'),
'location': _('Location'),
'year_of_birth': _('Birth Year'),
'gender': _('Gender'),
'level_of_education': _('Level of Education'),
'mailing_address': _('Mailing Address'),
'goals': _('Goals'),
'enrollment_mode': _('Enrollment Mode'),
'verification_status': _('Verification Status'),
'last_login': _('Last Login'),
'date_joined': _('Date Joined'),
}
if is_course_cohorted(course.id):
# Translators: 'Cohort' refers to a group of students within a course.
query_features.append('cohort')
query_features_names['cohort'] = _('Cohort')
if course.teams_enabled:
query_features.append('team')
query_features_names['team'] = _('Team')
# For compatibility reasons, city and country should always appear last.
query_features.append('city')
query_features_names['city'] = _('City')
query_features.append('country')
query_features_names['country'] = _('Country')
if not csv:
student_data = instructor_analytics.basic.enrolled_students_features(course_key, query_features)
response_payload = {
'course_id': six.text_type(course_key),
'students': student_data,
'students_count': len(student_data),
'queried_features': query_features,
'feature_names': query_features_names,
'available_features': available_features,
}
return JsonResponse(response_payload)
else:
task_api.submit_calculate_students_features_csv(
request,
course_key,
query_features
)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
@common_exceptions_400
def get_students_who_may_enroll(request, course_id):
"""
Initiate generation of a CSV file containing information about
students who may enroll in a course.
Responds with JSON
{"status": "... status message ..."}
"""
course_key = CourseKey.from_string(course_id)
query_features = ['email']
report_type = _('enrollment')
task_api.submit_calculate_may_enroll_csv(request, course_key, query_features)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status})
def _cohorts_csv_validator(file_storage, file_to_validate):
"""
Verifies that the expected columns are present in the CSV used to add users to cohorts.
"""
with file_storage.open(file_to_validate) as f:
if six.PY2:
reader = unicodecsv.reader(UniversalNewlineIterator(f), encoding='utf-8')
else:
reader = csv.reader(f.read().decode('utf-8').splitlines())
try:
fieldnames = next(reader)
except StopIteration:
fieldnames = []
msg = None
if "cohort" not in fieldnames:
msg = _("The file must contain a 'cohort' column containing cohort names.")
elif "email" not in fieldnames and "username" not in fieldnames:
msg = _("The file must contain a 'username' column, an 'email' column, or both.")
if msg:
raise FileValidationException(msg)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_POST
@require_course_permission(permissions.ASSIGN_TO_COHORTS)
@common_exceptions_400
def add_users_to_cohorts(request, course_id):
"""
View method that accepts an uploaded file (using key "uploaded-file")
containing cohort assignments for users. This method spawns a celery task
to do the assignments, and a CSV file with results is provided via data downloads.
"""
course_key = CourseKey.from_string(course_id)
try:
__, filename = store_uploaded_file(
request, 'uploaded-file', ['.csv'],
course_and_time_based_filename_generator(course_key, "cohorts"),
max_file_size=2000000, # limit to 2 MB
validator=_cohorts_csv_validator
)
# The task will assume the default file storage.
task_api.submit_cohort_students(request, course_key, filename)
except (FileValidationException, PermissionDenied) as err:
return JsonResponse({"error": six.text_type(err)}, status=400)
return JsonResponse()
# The non-atomic decorator is required because this view calls a celery
# task which uses the 'outer_atomic' context manager.
@method_decorator(transaction.non_atomic_requests, name='dispatch')
class CohortCSV(DeveloperErrorViewMixin, APIView):
"""
**Use Cases**
Submit a CSV file to assign users to cohorts
**Example Requests**:
POST /api/cohorts/v1/courses/{course_id}/users/
**Response Values**
* Empty as this is executed asynchronously.
"""
authentication_classes = (
JwtAuthentication,
BearerAuthenticationAllowInactiveUser,
SessionAuthenticationAllowInactiveUser,
)
permission_classes = (IsAuthenticated, IsAdminUser)
def post(self, request, course_key_string):
"""
View method that accepts an uploaded file (using key "uploaded-file")
containing cohort assignments for users. This method spawns a celery task
to do the assignments, and a CSV file with results is provided via data downloads.
"""
course_key = CourseKey.from_string(course_key_string)
try:
__, file_name = store_uploaded_file(
request, 'uploaded-file', ['.csv'],
course_and_time_based_filename_generator(course_key, 'cohorts'),
max_file_size=2000000, # limit to 2 MB
validator=_cohorts_csv_validator
)
task_api.submit_cohort_students(request, course_key, file_name)
except (FileValidationException, ValueError) as e:
raise self.api_error(status.HTTP_400_BAD_REQUEST, str(e), 'failed-validation')
return Response(status=status.HTTP_204_NO_CONTENT)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.ENROLLMENT_REPORT)
@common_exceptions_400
def get_course_survey_results(request, course_id):
"""
get the survey results report for the particular course.
"""
course_key = CourseKey.from_string(course_id)
report_type = _('survey')
task_api.submit_course_survey_report(request, course_key)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.EXAM_RESULTS)
@common_exceptions_400
def get_proctored_exam_results(request, course_id):
"""
get the proctored exam resultsreport for the particular course.
"""
course_key = CourseKey.from_string(course_id)
report_type = _('proctored exam results')
task_api.submit_proctored_exam_results_report(request, course_key)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
def get_anon_ids(request, course_id):
"""
Respond with 2-column CSV output of user-id, anonymized-user-id
"""
# TODO: the User.objects query and CSV generation here could be
# centralized into instructor_analytics. Currently instructor_analytics
# has similar functionality but not quite what's needed.
course_id = CourseKey.from_string(course_id)
def csv_response(filename, header, rows):
"""Returns a CSV http response for the given header and rows (excel/utf-8)."""
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = u'attachment; filename={0}'.format(
text_type(filename).encode('utf-8') if six.PY2 else text_type(filename)
)
writer = csv.writer(response, dialect='excel', quotechar='"', quoting=csv.QUOTE_ALL)
# In practice, there should not be non-ascii data in this query,
# but trying to do the right thing anyway.
encoded = [text_type(s) for s in header]
writer.writerow(encoded)
for row in rows:
encoded = [text_type(s) for s in row]
writer.writerow(encoded)
return response
students = User.objects.filter(
courseenrollment__course_id=course_id,
).order_by('id')
header = ['User ID', 'Anonymized User ID', 'Course Specific Anonymized User ID']
rows = [[s.id, unique_id_for_user(s, save=False), anonymous_id_for_user(s, course_id, save=False)]
for s in students]
return csv_response(text_type(course_id).replace('/', '-') + '-anon-ids.csv', header, rows)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_ENROLL)
@require_post_params(
unique_student_identifier="email or username of student for whom to get enrollment status"
)
def get_student_enrollment_status(request, course_id):
"""
Get the enrollment status of a student.
Limited to staff access.
Takes query parameter unique_student_identifier
"""
error = ''
user = None
mode = None
is_active = None
course_id = CourseKey.from_string(course_id)
unique_student_identifier = request.POST.get('unique_student_identifier')
try:
user = get_student_from_identifier(unique_student_identifier)
mode, is_active = CourseEnrollment.enrollment_mode_for_user(user, course_id)
except User.DoesNotExist:
# The student could have been invited to enroll without having
# registered. We'll also look at CourseEnrollmentAllowed
# records, so let the lack of a User slide.
pass
enrollment_status = _(u'Enrollment status for {student}: unknown').format(student=unique_student_identifier)
if user and mode:
if is_active:
enrollment_status = _(u'Enrollment status for {student}: active').format(student=user)
else:
enrollment_status = _(u'Enrollment status for {student}: inactive').format(student=user)
else:
email = user.email if user else unique_student_identifier
allowed = CourseEnrollmentAllowed.may_enroll_and_unenrolled(course_id)
if allowed and email in [cea.email for cea in allowed]:
enrollment_status = _(u'Enrollment status for {student}: pending').format(student=email)
else:
enrollment_status = _(u'Enrollment status for {student}: never enrolled').format(student=email)
response_payload = {
'course_id': text_type(course_id),
'error': error,
'enrollment_status': enrollment_status
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@common_exceptions_400
@require_course_permission(permissions.ENROLLMENT_REPORT)
@require_post_params(
unique_student_identifier="email or username of student for whom to get progress url"
)
@common_exceptions_400
def get_student_progress_url(request, course_id):
"""
Get the progress url of a student.
Limited to staff access.
Takes query parameter unique_student_identifier and if the student exists
returns e.g. {
'progress_url': '/../...'
}
"""
course_id = CourseKey.from_string(course_id)
user = get_student_from_identifier(request.POST.get('unique_student_identifier'))
progress_url = reverse('student_progress', kwargs={'course_id': text_type(course_id), 'student_id': user.id})
response_payload = {
'course_id': text_type(course_id),
'progress_url': progress_url,
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GIVE_STUDENT_EXTENSION)
@require_post_params(
problem_to_reset="problem urlname to reset"
)
@common_exceptions_400
def reset_student_attempts(request, course_id):
"""
Resets a students attempts counter or starts a task to reset all students
attempts counters. Optionally deletes student state for a problem. Limited
to staff access. Some sub-methods limited to instructor access.
Takes some of the following query paremeters
- problem_to_reset is a urlname of a problem
- unique_student_identifier is an email or username
- all_students is a boolean
requires instructor access
mutually exclusive with delete_module
mutually exclusive with delete_module
- delete_module is a boolean
requires instructor access
mutually exclusive with all_students
"""
course_id = CourseKey.from_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
all_students = _get_boolean_param(request, 'all_students')
if all_students and not has_access(request.user, 'instructor', course):
return HttpResponseForbidden("Requires instructor access.")
problem_to_reset = strip_if_string(request.POST.get('problem_to_reset'))
student_identifier = request.POST.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
delete_module = _get_boolean_param(request, 'delete_module')
# parameter combinations
if all_students and student:
return HttpResponseBadRequest(
"all_students and unique_student_identifier are mutually exclusive."
)
if all_students and delete_module:
return HttpResponseBadRequest(
"all_students and delete_module are mutually exclusive."
)
try:
module_state_key = UsageKey.from_string(problem_to_reset).map_into_course(course_id)
except InvalidKeyError:
return HttpResponseBadRequest()
response_payload = {}
response_payload['problem_to_reset'] = problem_to_reset
if student:
try:
enrollment.reset_student_attempts(
course_id,
student,
module_state_key,
requesting_user=request.user,
delete_module=delete_module
)
except StudentModule.DoesNotExist:
return HttpResponseBadRequest(_("Module does not exist."))
except sub_api.SubmissionError:
# Trust the submissions API to log the error
error_msg = _("An error occurred while deleting the score.")
return HttpResponse(error_msg, status=500)
response_payload['student'] = student_identifier
elif all_students:
task_api.submit_reset_problem_attempts_for_all_students(request, module_state_key)
response_payload['task'] = TASK_SUBMISSION_OK
response_payload['student'] = 'All Students'
else:
return HttpResponseBadRequest()
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GIVE_STUDENT_EXTENSION)
@common_exceptions_400
def reset_student_attempts_for_entrance_exam(request, course_id):
"""
Resets a students attempts counter or starts a task to reset all students
attempts counters for entrance exam. Optionally deletes student state for
entrance exam. Limited to staff access. Some sub-methods limited to instructor access.
Following are possible query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
requires instructor access
mutually exclusive with delete_module
- delete_module is a boolean
requires instructor access
mutually exclusive with all_students
"""
course_id = CourseKey.from_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
if not course.entrance_exam_id:
return HttpResponseBadRequest(
_("Course has no entrance exam section.")
)
student_identifier = request.POST.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = _get_boolean_param(request, 'all_students')
delete_module = _get_boolean_param(request, 'delete_module')
# parameter combinations
if all_students and student:
return HttpResponseBadRequest(
_("all_students and unique_student_identifier are mutually exclusive.")
)
if all_students and delete_module:
return HttpResponseBadRequest(
_("all_students and delete_module are mutually exclusive.")
)
# instructor authorization
if all_students or delete_module:
if not has_access(request.user, 'instructor', course):
return HttpResponseForbidden(_("Requires instructor access."))
try:
entrance_exam_key = UsageKey.from_string(course.entrance_exam_id).map_into_course(course_id)
if delete_module:
task_api.submit_delete_entrance_exam_state_for_student(
request,
entrance_exam_key,
student
)
else:
task_api.submit_reset_problem_attempts_in_entrance_exam(
request,
entrance_exam_key,
student
)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
response_payload = {'student': student_identifier or _('All Students'), 'task': TASK_SUBMISSION_OK}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.OVERRIDE_GRADES)
@require_post_params(problem_to_reset="problem urlname to reset")
@common_exceptions_400
def rescore_problem(request, course_id):
"""
Starts a background process a students attempts counter. Optionally deletes student state for a problem.
Rescore for all students is limited to instructor access.
Takes either of the following query paremeters
- problem_to_reset is a urlname of a problem
- unique_student_identifier is an email or username
- all_students is a boolean
all_students and unique_student_identifier cannot both be present.
"""
course_id = CourseKey.from_string(course_id)
course = get_course_with_access(request.user, 'staff', course_id)
all_students = _get_boolean_param(request, 'all_students')
if all_students and not has_access(request.user, 'instructor', course):
return HttpResponseForbidden("Requires instructor access.")
only_if_higher = _get_boolean_param(request, 'only_if_higher')
problem_to_reset = strip_if_string(request.POST.get('problem_to_reset'))
student_identifier = request.POST.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
if not (problem_to_reset and (all_students or student)):
return HttpResponseBadRequest("Missing query parameters.")
if all_students and student:
return HttpResponseBadRequest(
"Cannot rescore with all_students and unique_student_identifier."
)
try:
module_state_key = UsageKey.from_string(problem_to_reset).map_into_course(course_id)
except InvalidKeyError:
return HttpResponseBadRequest("Unable to parse problem id")
response_payload = {'problem_to_reset': problem_to_reset}
if student:
response_payload['student'] = student_identifier
try:
task_api.submit_rescore_problem_for_student(
request,
module_state_key,
student,
only_if_higher,
)
except NotImplementedError as exc:
return HttpResponseBadRequest(text_type(exc))
elif all_students:
try:
task_api.submit_rescore_problem_for_all_students(
request,
module_state_key,
only_if_higher,
)
except NotImplementedError as exc:
return HttpResponseBadRequest(text_type(exc))
else:
return HttpResponseBadRequest()
response_payload['task'] = TASK_SUBMISSION_OK
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.OVERRIDE_GRADES)
@require_post_params(problem_to_reset="problem urlname to reset", score='overriding score')
@common_exceptions_400
def override_problem_score(request, course_id):
course_key = CourseKey.from_string(course_id)
score = strip_if_string(request.POST.get('score'))
problem_to_reset = strip_if_string(request.POST.get('problem_to_reset'))
student_identifier = request.POST.get('unique_student_identifier', None)
if not problem_to_reset:
return HttpResponseBadRequest("Missing query parameter problem_to_reset.")
if not student_identifier:
return HttpResponseBadRequest("Missing query parameter student_identifier.")
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
else:
return _create_error_response(request, u"Invalid student ID {}.".format(student_identifier))
try:
usage_key = UsageKey.from_string(problem_to_reset).map_into_course(course_key)
except InvalidKeyError:
return _create_error_response(request, u"Unable to parse problem id {}.".format(problem_to_reset))
# check the user's access to this specific problem
if not has_access(request.user, "staff", modulestore().get_item(usage_key)):
_create_error_response(request, u"User {} does not have permission to override scores for problem {}.".format(
request.user.id,
problem_to_reset
))
response_payload = {
'problem_to_reset': problem_to_reset,
'student': student_identifier
}
try:
task_api.submit_override_score(
request,
usage_key,
student,
score,
)
except NotImplementedError as exc: # if we try to override the score of a non-scorable block, catch it here
return _create_error_response(request, text_type(exc))
except ValueError as exc:
return _create_error_response(request, text_type(exc))
response_payload['task'] = TASK_SUBMISSION_OK
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.RESCORE_EXAMS)
@common_exceptions_400
def rescore_entrance_exam(request, course_id):
"""
Starts a background process a students attempts counter for entrance exam.
Optionally deletes student state for a problem. Limited to instructor access.
Takes either of the following query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
all_students and unique_student_identifier cannot both be present.
"""
course_id = CourseKey.from_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
student_identifier = request.POST.get('unique_student_identifier', None)
only_if_higher = request.POST.get('only_if_higher', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = _get_boolean_param(request, 'all_students')
if not course.entrance_exam_id:
return HttpResponseBadRequest(
_("Course has no entrance exam section.")
)
if all_students and student:
return HttpResponseBadRequest(
_("Cannot rescore with all_students and unique_student_identifier.")
)
try:
entrance_exam_key = UsageKey.from_string(course.entrance_exam_id).map_into_course(course_id)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
response_payload = {}
if student:
response_payload['student'] = student_identifier
else:
response_payload['student'] = _("All Students")
task_api.submit_rescore_entrance_exam_for_student(
request, entrance_exam_key, student, only_if_higher,
)
response_payload['task'] = TASK_SUBMISSION_OK
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.EMAIL)
def list_background_email_tasks(request, course_id):
"""
List background email tasks.
"""
course_id = CourseKey.from_string(course_id)
task_type = 'bulk_course_email'
# Specifying for the history of a single task type
tasks = task_api.get_instructor_task_history(
course_id,
task_type=task_type
)
response_payload = {
'tasks': list(map(extract_task_features, tasks)),
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.EMAIL)
def list_email_content(request, course_id):
"""
List the content of bulk emails sent
"""
course_id = CourseKey.from_string(course_id)
task_type = 'bulk_course_email'
# First get tasks list of bulk emails sent
emails = task_api.get_instructor_task_history(course_id, task_type=task_type)
response_payload = {
'emails': list(map(extract_email_features, emails)),
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.SHOW_TASKS)
def list_instructor_tasks(request, course_id):
"""
List instructor tasks.
Takes optional query paremeters.
- With no arguments, lists running tasks.
- `problem_location_str` lists task history for problem
- `problem_location_str` and `unique_student_identifier` lists task
history for problem AND student (intersection)
"""
course_id = CourseKey.from_string(course_id)
problem_location_str = strip_if_string(request.POST.get('problem_location_str', False))
student = request.POST.get('unique_student_identifier', None)
if student is not None:
student = get_student_from_identifier(student)
if student and not problem_location_str:
return HttpResponseBadRequest(
"unique_student_identifier must accompany problem_location_str"
)
if problem_location_str:
try:
module_state_key = UsageKey.from_string(problem_location_str).map_into_course(course_id)
except InvalidKeyError:
return HttpResponseBadRequest()
if student:
# Specifying for a single student's history on this problem
tasks = task_api.get_instructor_task_history(course_id, module_state_key, student)
else:
# Specifying for single problem's history
tasks = task_api.get_instructor_task_history(course_id, module_state_key)
else:
# If no problem or student, just get currently running tasks
tasks = task_api.get_running_instructor_tasks(course_id)
response_payload = {
'tasks': list(map(extract_task_features, tasks)),
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.SHOW_TASKS)
def list_entrance_exam_instructor_tasks(request, course_id):
"""
List entrance exam related instructor tasks.
Takes either of the following query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
"""
course_id = CourseKey.from_string(course_id)
course = get_course_by_id(course_id)
student = request.POST.get('unique_student_identifier', None)
if student is not None:
student = get_student_from_identifier(student)
try:
entrance_exam_key = UsageKey.from_string(course.entrance_exam_id).map_into_course(course_id)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
if student:
# Specifying for a single student's entrance exam history
tasks = task_api.get_entrance_exam_instructor_task_history(
course_id,
entrance_exam_key,
student
)
else:
# Specifying for all student's entrance exam history
tasks = task_api.get_entrance_exam_instructor_task_history(
course_id,
entrance_exam_key
)
response_payload = {
'tasks': list(map(extract_task_features, tasks)),
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
def list_report_downloads(request, course_id):
"""
List grade CSV files that are available for download for this course.
Takes the following query parameters:
- (optional) report_name - name of the report
"""
course_id = CourseKey.from_string(course_id)
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
report_name = request.POST.get("report_name", None)
response_payload = {
'downloads': [
dict(name=name, url=url, link=HTML(u'<a href="{}">{}</a>').format(HTML(url), Text(name)))
for name, url in report_store.links_for(course_id) if report_name is None or name == report_name
]
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
@require_finance_admin
def list_financial_report_downloads(_request, course_id):
"""
List grade CSV files that are available for download for this course.
"""
course_id = CourseKey.from_string(course_id)
report_store = ReportStore.from_config(config_name='FINANCIAL_REPORTS')
response_payload = {
'downloads': [
dict(name=name, url=url, link=HTML(u'<a href="{}">{}</a>').format(HTML(url), Text(name)))
for name, url in report_store.links_for(course_id)
]
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
@common_exceptions_400
def export_ora2_data(request, course_id):
"""
Pushes a Celery task which will aggregate ora2 responses for a course into a .csv
"""
course_key = CourseKey.from_string(course_id)
report_type = _('ORA data')
task_api.submit_export_ora2_data(request, course_key)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
@common_exceptions_400
def calculate_grades_csv(request, course_id):
"""
AlreadyRunningError is raised if the course's grades are already being updated.
"""
report_type = _('grade')
course_key = CourseKey.from_string(course_id)
task_api.submit_calculate_grades_csv(request, course_key)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
@common_exceptions_400
def problem_grade_report(request, course_id):
"""
Request a CSV showing students' grades for all problems in the
course.
AlreadyRunningError is raised if the course's grades are already being
updated.
"""
course_key = CourseKey.from_string(course_id)
report_type = _('problem grade')
task_api.submit_problem_grade_report(request, course_key)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status})
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_ENROLL)
@require_post_params('rolename')
def list_forum_members(request, course_id):
"""
Lists forum members of a certain rolename.
Limited to staff access.
The requesting user must be at least staff.
Staff forum admins can access all roles EXCEPT for FORUM_ROLE_ADMINISTRATOR
which is limited to instructors.
Takes query parameter `rolename`.
"""
course_id = CourseKey.from_string(course_id)
course = get_course_by_id(course_id)
has_instructor_access = has_access(request.user, 'instructor', course)
has_forum_admin = has_forum_access(
request.user, course_id, FORUM_ROLE_ADMINISTRATOR
)
rolename = request.POST.get('rolename')
# default roles require either (staff & forum admin) or (instructor)
if not (has_forum_admin or has_instructor_access):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
# EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor)
if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access:
return HttpResponseBadRequest("Operation requires instructor access.")
# filter out unsupported for roles
if rolename not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_GROUP_MODERATOR,
FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
u"Unrecognized rolename '{}'.".format(rolename)
))
try:
role = Role.objects.get(name=rolename, course_id=course_id)
users = role.users.all().order_by('username')
except Role.DoesNotExist:
users = []
course_discussion_settings = get_course_discussion_settings(course_id)
def extract_user_info(user):
""" Convert user to dict for json rendering. """
group_id = get_group_id_for_user(user, course_discussion_settings)
group_name = get_group_name(group_id, course_discussion_settings)
return {
'username': user.username,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
'group_name': group_name,
}
response_payload = {
'course_id': text_type(course_id),
rolename: list(map(extract_user_info, users)),
'division_scheme': course_discussion_settings.division_scheme,
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.EMAIL)
@require_post_params(send_to="sending to whom", subject="subject line", message="message text")
@common_exceptions_400
def send_email(request, course_id):
"""
Send an email to self, staff, cohorts, or everyone involved in a course.
Query Parameters:
- 'send_to' specifies what group the email should be sent to
Options are defined by the CourseEmail model in
lms/djangoapps/bulk_email/models.py
- 'subject' specifies email's subject
- 'message' specifies email's content
"""
course_id = CourseKey.from_string(course_id)
if not is_bulk_email_feature_enabled(course_id):
log.warning(u'Email is not enabled for course %s', course_id)
return HttpResponseForbidden("Email is not enabled for this course.")
targets = json.loads(request.POST.get("send_to"))
subject = request.POST.get("subject")
message = request.POST.get("message")
# allow two branding points to come from Site Configuration: which CourseEmailTemplate should be used
# and what the 'from' field in the email should be
#
# If these are None (there is no site configuration enabled for the current site) than
# the system will use normal system defaults
course_overview = CourseOverview.get_from_id(course_id)
from_addr = configuration_helpers.get_value('course_email_from_addr')
if isinstance(from_addr, dict):
# If course_email_from_addr is a dict, we are customizing
# the email template for each organization that has courses
# on the site. The dict maps from addresses by org allowing
# us to find the correct from address to use here.
from_addr = from_addr.get(course_overview.display_org_with_default)
template_name = configuration_helpers.get_value('course_email_template_name')
if isinstance(template_name, dict):
# If course_email_template_name is a dict, we are customizing
# the email template for each organization that has courses
# on the site. The dict maps template names by org allowing
# us to find the correct template to use here.
template_name = template_name.get(course_overview.display_org_with_default)
# Create the CourseEmail object. This is saved immediately, so that
# any transaction that has been pending up to this point will also be
# committed.
try:
email = CourseEmail.create(
course_id,
request.user,
targets,
subject, message,
template_name=template_name,
from_addr=from_addr
)
except ValueError as err:
log.exception(u'Cannot create course email for course %s requested by user %s for targets %s',
course_id, request.user, targets)
return HttpResponseBadRequest(repr(err))
# Submit the task, so that the correct InstructorTask object gets created (for monitoring purposes)
task_api.submit_bulk_course_email(request, course_id, email.id)
response_payload = {
'course_id': text_type(course_id),
'success': True,
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.EDIT_FORUM_ROLES)
@require_post_params(
unique_student_identifier="email or username of user to change access",
rolename="the forum role",
action="'allow' or 'revoke'",
)
@common_exceptions_400
def update_forum_role_membership(request, course_id):
"""
Modify user's forum role.
The requesting user must be at least staff.
Staff forum admins can access all roles EXCEPT for FORUM_ROLE_ADMINISTRATOR
which is limited to instructors.
No one can revoke an instructors FORUM_ROLE_ADMINISTRATOR status.
Query parameters:
- `email` is the target users email
- `rolename` is one of [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_GROUP_MODERATOR,
FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]
- `action` is one of ['allow', 'revoke']
"""
course_id = CourseKey.from_string(course_id)
course = get_course_by_id(course_id)
has_instructor_access = has_access(request.user, 'instructor', course)
has_forum_admin = has_forum_access(
request.user, course_id, FORUM_ROLE_ADMINISTRATOR
)
unique_student_identifier = request.POST.get('unique_student_identifier')
rolename = request.POST.get('rolename')
action = request.POST.get('action')
# default roles require either (staff & forum admin) or (instructor)
if not (has_forum_admin or has_instructor_access):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
# EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor)
if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access:
return HttpResponseBadRequest("Operation requires instructor access.")
if rolename not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_GROUP_MODERATOR,
FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
u"Unrecognized rolename '{}'.".format(rolename)
))
user = get_student_from_identifier(unique_student_identifier)
try:
update_forum_role(course_id, user, rolename, action)
except Role.DoesNotExist:
return HttpResponseBadRequest("Role does not exist.")
response_payload = {
'course_id': text_type(course_id),
'action': action,
}
return JsonResponse(response_payload)
@require_POST
def get_user_invoice_preference(request, course_id):
"""
Gets invoice copy user's preferences.
"""
invoice_copy_preference = True
invoice_preference_value = get_user_preference(request.user, INVOICE_KEY)
if invoice_preference_value is not None:
invoice_copy_preference = invoice_preference_value == 'True'
return JsonResponse({
'invoice_copy': invoice_copy_preference
})
def _display_unit(unit):
"""
Gets string for displaying unit to user.
"""
name = getattr(unit, 'display_name', None)
if name:
return u'{0} ({1})'.format(name, text_type(unit.location))
else:
return text_type(unit.location)
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GIVE_STUDENT_EXTENSION)
@require_post_params('student', 'url', 'due_datetime')
def change_due_date(request, course_id):
"""
Grants a due date extension to a student for a particular unit.
"""
course = get_course_by_id(CourseKey.from_string(course_id))
student = require_student_from_identifier(request.POST.get('student'))
unit = find_unit(course, request.POST.get('url'))
due_date = parse_datetime(request.POST.get('due_datetime'))
reason = strip_tags(request.POST.get('reason', ''))
set_due_date_extension(course, unit, student, due_date, request.user, reason=reason)
return JsonResponse(_(
u'Successfully changed due date for student {0} for {1} '
u'to {2}').format(student.profile.name, _display_unit(unit),
due_date.strftime(u'%Y-%m-%d %H:%M')))
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GIVE_STUDENT_EXTENSION)
@require_post_params('student', 'url')
def reset_due_date(request, course_id):
"""
Rescinds a due date extension for a student on a particular unit.
"""
course = get_course_by_id(CourseKey.from_string(course_id))
student = require_student_from_identifier(request.POST.get('student'))
unit = find_unit(course, request.POST.get('url'))
reason = strip_tags(request.POST.get('reason', ''))
original_due_date = get_date_for_block(course_id, unit.location)
set_due_date_extension(course, unit, student, None, request.user, reason=reason)
if not original_due_date:
# It's possible the normal due date was deleted after an extension was granted:
return JsonResponse(
_("Successfully removed invalid due date extension (unit has no due date).")
)
original_due_date_str = original_due_date.strftime(u'%Y-%m-%d %H:%M')
return JsonResponse(_(
u'Successfully reset due date for student {0} for {1} '
u'to {2}').format(student.profile.name, _display_unit(unit),
original_due_date_str))
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GIVE_STUDENT_EXTENSION)
@require_post_params('url')
def show_unit_extensions(request, course_id):
"""
Shows all of the students which have due date extensions for the given unit.
"""
course = get_course_by_id(CourseKey.from_string(course_id))
unit = find_unit(course, request.POST.get('url'))
return JsonResponse(dump_module_extensions(course, unit))
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GIVE_STUDENT_EXTENSION)
@require_post_params('student')
def show_student_extensions(request, course_id):
"""
Shows all of the due date extensions granted to a particular student in a
particular course.
"""
student = require_student_from_identifier(request.POST.get('student'))
course = get_course_by_id(CourseKey.from_string(course_id))
return JsonResponse(dump_student_extensions(course, student))
def _split_input_list(str_list):
"""
Separate out individual student email from the comma, or space separated string.
e.g.
in: "Lorem@ipsum.dolor, sit@amet.consectetur\nadipiscing@elit.Aenean\r convallis@at.lacus\r, ut@lacinia.Sed"
out: ['Lorem@ipsum.dolor', 'sit@amet.consectetur', 'adipiscing@elit.Aenean', 'convallis@at.lacus', 'ut@lacinia.Sed']
`str_list` is a string coming from an input text area
returns a list of separated values
"""
new_list = re.split(r'[\n\r\s,]', str_list)
new_list = [s.strip() for s in new_list]
new_list = [s for s in new_list if s != '']
return new_list
def _instructor_dash_url(course_key, section=None):
"""Return the URL for a section in the instructor dashboard.
Arguments:
course_key (CourseKey)
Keyword Arguments:
section (str): The name of the section to load.
Returns:
unicode: The URL of a section in the instructor dashboard.
"""
url = reverse('instructor_dashboard', kwargs={'course_id': six.text_type(course_key)})
if section is not None:
url += u'#view-{section}'.format(section=section)
return url
@require_global_staff
@require_POST
def generate_example_certificates(request, course_id=None):
"""Start generating a set of example certificates.
Example certificates are used to verify that certificates have
been configured correctly for the course.
Redirects back to the intructor dashboard once certificate
generation has begun.
"""
course_key = CourseKey.from_string(course_id)
certs_api.generate_example_certificates(course_key)
return redirect(_instructor_dash_url(course_key, section='certificates'))
@require_course_permission(permissions.ENABLE_CERTIFICATE_GENERATION)
@require_POST
def enable_certificate_generation(request, course_id=None):
"""Enable/disable self-generated certificates for a course.
Once self-generated certificates have been enabled, students
who have passed the course will be able to generate certificates.
Redirects back to the intructor dashboard once the
setting has been updated.
"""
course_key = CourseKey.from_string(course_id)
is_enabled = (request.POST.get('certificates-enabled', 'false') == 'true')
certs_api.set_cert_generation_enabled(course_key, is_enabled)
return redirect(_instructor_dash_url(course_key, section='certificates'))
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.ALLOW_STUDENT_TO_BYPASS_ENTRANCE_EXAM)
@require_POST
def mark_student_can_skip_entrance_exam(request, course_id):
"""
Mark a student to skip entrance exam.
Takes `unique_student_identifier` as required POST parameter.
"""
course_id = CourseKey.from_string(course_id)
student_identifier = request.POST.get('unique_student_identifier')
student = get_student_from_identifier(student_identifier)
__, created = EntranceExamConfiguration.objects.get_or_create(user=student, course_id=course_id)
if created:
message = _(u'This student (%s) will skip the entrance exam.') % student_identifier
else:
message = _(u'This student (%s) is already allowed to skip the entrance exam.') % student_identifier
response_payload = {
'message': message,
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_POST
@common_exceptions_400
def start_certificate_generation(request, course_id):
"""
Start generating certificates for all students enrolled in given course.
"""
course_key = CourseKey.from_string(course_id)
task = task_api.generate_certificates_for_students(request, course_key)
message = _('Certificate generation task for all students of this course has been started. '
'You can view the status of the generation task in the "Pending Tasks" section.')
response_payload = {
'message': message,
'task_id': task.task_id
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_POST
@common_exceptions_400
def start_certificate_regeneration(request, course_id):
"""
Start regenerating certificates for students whose certificate statuses lie with in 'certificate_statuses'
entry in POST data.
"""
course_key = CourseKey.from_string(course_id)
certificates_statuses = request.POST.getlist('certificate_statuses', [])
if not certificates_statuses:
return JsonResponse(
{'message': _('Please select one or more certificate statuses that require certificate regeneration.')},
status=400
)
# Check if the selected statuses are allowed
allowed_statuses = [
CertificateStatuses.downloadable,
CertificateStatuses.error,
CertificateStatuses.notpassing,
CertificateStatuses.audit_passing,
CertificateStatuses.audit_notpassing,
]
if not set(certificates_statuses).issubset(allowed_statuses):
return JsonResponse(
{'message': _('Please select certificate statuses from the list only.')},
status=400
)
task_api.regenerate_certificates(request, course_key, certificates_statuses)
response_payload = {
'message': _('Certificate regeneration task has been started. '
'You can view the status of the generation task in the "Pending Tasks" section.'),
'success': True
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_http_methods(['POST', 'DELETE'])
def certificate_exception_view(request, course_id):
"""
Add/Remove students to/from certificate white list.
:param request: HttpRequest object
:param course_id: course identifier of the course for whom to add/remove certificates exception.
:return: JsonResponse object with success/error message or certificate exception data.
"""
course_key = CourseKey.from_string(course_id)
# Validate request data and return error response in case of invalid data
try:
certificate_exception, student = parse_request_data_and_get_user(request, course_key)
except ValueError as error:
return JsonResponse({'success': False, 'message': text_type(error)}, status=400)
# Add new Certificate Exception for the student passed in request data
if request.method == 'POST':
try:
exception = add_certificate_exception(course_key, student, certificate_exception)
except ValueError as error:
return JsonResponse({'success': False, 'message': text_type(error)}, status=400)
return JsonResponse(exception)
# Remove Certificate Exception for the student passed in request data
elif request.method == 'DELETE':
try:
remove_certificate_exception(course_key, student)
except ValueError as error:
return JsonResponse({'success': False, 'message': text_type(error)}, status=400)
return JsonResponse({}, status=204)
def add_certificate_exception(course_key, student, certificate_exception):
"""
Add a certificate exception to CertificateWhitelist table.
Raises ValueError in case Student is already white listed.
:param course_key: identifier of the course whose certificate exception will be added.
:param student: User object whose certificate exception will be added.
:param certificate_exception: A dict object containing certificate exception info.
:return: CertificateWhitelist item in dict format containing certificate exception info.
"""
if CertificateWhitelist.get_certificate_white_list(course_key, student):
raise ValueError(
_(u"Student (username/email={user}) already in certificate exception list.").format(user=student.username)
)
certificate_white_list, __ = CertificateWhitelist.objects.get_or_create(
user=student,
course_id=course_key,
defaults={
'whitelist': True,
'notes': certificate_exception.get('notes', '')
}
)
log.info(u'%s has been added to the whitelist in course %s', student.username, course_key)
generated_certificate = GeneratedCertificate.eligible_certificates.filter(
user=student,
course_id=course_key,
status=CertificateStatuses.downloadable,
).first()
exception = dict({
'id': certificate_white_list.id,
'user_email': student.email,
'user_name': student.username,
'user_id': student.id,
'certificate_generated': generated_certificate and generated_certificate.created_date.strftime(u"%B %d, %Y"),
'created': certificate_white_list.created.strftime(u"%A, %B %d, %Y"),
})
return exception
def remove_certificate_exception(course_key, student):
"""
Remove certificate exception for given course and student from CertificateWhitelist table and
invalidate its GeneratedCertificate if present.
Raises ValueError in case no exception exists for the student in the given course.
:param course_key: identifier of the course whose certificate exception needs to be removed.
:param student: User object whose certificate exception needs to be removed.
:return:
"""
try:
certificate_exception = CertificateWhitelist.objects.get(user=student, course_id=course_key)
except ObjectDoesNotExist:
raise ValueError(
_(u'Certificate exception (user={user}) does not exist in certificate white list. '
'Please refresh the page and try again.').format(user=student.username)
)
try:
generated_certificate = GeneratedCertificate.objects.get(
user=student,
course_id=course_key
)
generated_certificate.invalidate()
log.info(
u'Certificate invalidated for %s in course %s when removed from certificate exception list',
student.username,
course_key
)
except ObjectDoesNotExist:
# Certificate has not been generated yet, so just remove the certificate exception from white list
pass
log.info(u'%s has been removed from the whitelist in course %s', student.username, course_key)
certificate_exception.delete()
def parse_request_data_and_get_user(request, course_key):
"""
Parse request data into Certificate Exception and User object.
Certificate Exception is the dict object containing information about certificate exception.
:param request:
:param course_key: Course Identifier of the course for whom to process certificate exception
:return: key-value pairs containing certificate exception data and User object
"""
certificate_exception = parse_request_data(request)
user = certificate_exception.get('user_name', '') or certificate_exception.get('user_email', '')
if not user:
raise ValueError(_('Student username/email field is required and can not be empty. '
'Kindly fill in username/email and then press "Add to Exception List" button.'))
db_user = get_student(user, course_key)
return certificate_exception, db_user
def parse_request_data(request):
"""
Parse and return request data, raise ValueError in case of invalid JSON data.
:param request: HttpRequest request object.
:return: dict object containing parsed json data.
"""
try:
data = json.loads(request.body.decode('utf8') or u'{}')
except ValueError:
raise ValueError(_('The record is not in the correct format. Please add a valid username or email address.'))
return data
def get_student(username_or_email, course_key):
"""
Retrieve and return User object from db, raise ValueError
if user is does not exists or is not enrolled in the given course.
:param username_or_email: String containing either user name or email of the student.
:param course_key: CourseKey object identifying the current course.
:return: User object
"""
try:
student = get_user_by_username_or_email(username_or_email)
except ObjectDoesNotExist:
raise ValueError(_(u"{user} does not exist in the LMS. Please check your spelling and retry.").format(
user=username_or_email
))
# Make Sure the given student is enrolled in the course
if not CourseEnrollment.is_enrolled(student, course_key):
raise ValueError(_(u"{user} is not enrolled in this course. Please check your spelling and retry.")
.format(user=username_or_email))
return student
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GENERATE_CERTIFICATE_EXCEPTIONS)
@require_POST
@common_exceptions_400
def generate_certificate_exceptions(request, course_id, generate_for=None):
"""
Generate Certificate for students in the Certificate White List.
:param request: HttpRequest object,
:param course_id: course identifier of the course for whom to generate certificates
:param generate_for: string to identify whether to generate certificates for 'all' or 'new'
additions to the certificate white-list
:return: JsonResponse object containing success/failure message and certificate exception data
"""
course_key = CourseKey.from_string(course_id)
if generate_for == 'all':
# Generate Certificates for all white listed students
students = 'all_whitelisted'
elif generate_for == 'new':
students = 'whitelisted_not_generated'
else:
# Invalid data, generate_for must be present for all certificate exceptions
return JsonResponse(
{
'success': False,
'message': _('Invalid data, generate_for must be "new" or "all".'),
},
status=400
)
task_api.generate_certificates_for_students(request, course_key, student_set=students)
response_payload = {
'success': True,
'message': _('Certificate generation started for white listed students.'),
}
return JsonResponse(response_payload)
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GENERATE_BULK_CERTIFICATE_EXCEPTIONS)
@require_POST
def generate_bulk_certificate_exceptions(request, course_id):
"""
Add Students to certificate white list from the uploaded csv file.
:return response in dict format.
{
general_errors: [errors related to csv file e.g. csv uploading, csv attachment, content reading etc. ],
row_errors: {
data_format_error: [users/data in csv file that are not well formatted],
user_not_exist: [csv with none exiting users in LMS system],
user_already_white_listed: [users that are already white listed],
user_not_enrolled: [rows with not enrolled users in the given course]
},
success: [list of successfully added users to the certificate white list model]
}
"""
user_index = 0
notes_index = 1
row_errors_key = ['data_format_error', 'user_not_exist', 'user_already_white_listed', 'user_not_enrolled']
course_key = CourseKey.from_string(course_id)
students, general_errors, success = [], [], []
row_errors = {key: [] for key in row_errors_key}
def build_row_errors(key, _user, row_count):
"""
inner method to build dict of csv data as row errors.
"""
row_errors[key].append(_(u'user "{user}" in row# {row}').format(user=_user, row=row_count))
if 'students_list' in request.FILES:
try:
upload_file = request.FILES.get('students_list')
if upload_file.name.endswith('.csv'):
students = [row for row in csv.reader(upload_file.read().decode('utf-8').splitlines())]
else:
general_errors.append(_('Make sure that the file you upload is in CSV format with no '
'extraneous characters or rows.'))
except Exception: # pylint: disable=broad-except
general_errors.append(_('Could not read uploaded file.'))
finally:
upload_file.close()
row_num = 0
for student in students:
row_num += 1
# verify that we have exactly two column in every row either email or username and notes but allow for
# blank lines
if len(student) != 2:
if student:
build_row_errors('data_format_error', student[user_index], row_num)
log.info(u'invalid data/format in csv row# %s', row_num)
continue
user = student[user_index]
try:
user = get_user_by_username_or_email(user)
except ObjectDoesNotExist:
build_row_errors('user_not_exist', user, row_num)
log.info(u'student %s does not exist', user)
else:
if CertificateWhitelist.get_certificate_white_list(course_key, user):
build_row_errors('user_already_white_listed', user, row_num)
log.warning(u'student %s already exist.', user.username)
# make sure user is enrolled in course
elif not CourseEnrollment.is_enrolled(user, course_key):
build_row_errors('user_not_enrolled', user, row_num)
log.warning(u'student %s is not enrolled in course.', user.username)
else:
CertificateWhitelist.objects.create(
user=user,
course_id=course_key,
whitelist=True,
notes=student[notes_index]
)
success.append(_(u'user "{username}" in row# {row}').format(username=user.username, row=row_num))
else:
general_errors.append(_('File is not attached.'))
results = {
'general_errors': general_errors,
'row_errors': row_errors,
'success': success
}
return JsonResponse(results)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_http_methods(['POST', 'DELETE'])
def certificate_invalidation_view(request, course_id):
"""
Invalidate/Re-Validate students to/from certificate.
:param request: HttpRequest object
:param course_id: course identifier of the course for whom to add/remove certificates exception.
:return: JsonResponse object with success/error message or certificate invalidation data.
"""
course_key = CourseKey.from_string(course_id)
# Validate request data and return error response in case of invalid data
try:
certificate_invalidation_data = parse_request_data(request)
certificate = validate_request_data_and_get_certificate(certificate_invalidation_data, course_key)
except ValueError as error:
return JsonResponse({'message': text_type(error)}, status=400)
# Invalidate certificate of the given student for the course course
if request.method == 'POST':
try:
certificate_invalidation = invalidate_certificate(request, certificate, certificate_invalidation_data)
except ValueError as error:
return JsonResponse({'message': text_type(error)}, status=400)
return JsonResponse(certificate_invalidation)
# Re-Validate student certificate for the course course
elif request.method == 'DELETE':
try:
re_validate_certificate(request, course_key, certificate)
except ValueError as error:
return JsonResponse({'message': text_type(error)}, status=400)
return JsonResponse({}, status=204)
def invalidate_certificate(request, generated_certificate, certificate_invalidation_data):
"""
Invalidate given GeneratedCertificate and add CertificateInvalidation record for future reference or re-validation.
:param request: HttpRequest object
:param generated_certificate: GeneratedCertificate object, the certificate we want to invalidate
:param certificate_invalidation_data: dict object containing data for CertificateInvalidation.
:return: dict object containing updated certificate invalidation data.
"""
if CertificateInvalidation.get_certificate_invalidations(
generated_certificate.course_id,
generated_certificate.user,
):
raise ValueError(
_(u"Certificate of {user} has already been invalidated. Please check your spelling and retry.").format(
user=generated_certificate.user.username,
)
)
# Verify that certificate user wants to invalidate is a valid one.
if not generated_certificate.is_valid():
raise ValueError(
_(u"Certificate for student {user} is already invalid, kindly verify that certificate was generated "
"for this student and then proceed.").format(user=generated_certificate.user.username)
)
# Add CertificateInvalidation record for future reference or re-validation
certificate_invalidation, __ = CertificateInvalidation.objects.update_or_create(
generated_certificate=generated_certificate,
defaults={
'invalidated_by': request.user,
'notes': certificate_invalidation_data.get("notes", ""),
'active': True,
}
)
# Invalidate GeneratedCertificate
generated_certificate.invalidate()
return {
'id': certificate_invalidation.id,
'user': certificate_invalidation.generated_certificate.user.username,
'invalidated_by': certificate_invalidation.invalidated_by.username,
'created': certificate_invalidation.created.strftime(u"%B %d, %Y"),
'notes': certificate_invalidation.notes,
}
@common_exceptions_400
def re_validate_certificate(request, course_key, generated_certificate):
"""
Remove certificate invalidation from db and start certificate generation task for this student.
Raises ValueError if certificate invalidation is present.
:param request: HttpRequest object
:param course_key: CourseKey object identifying the current course.
:param generated_certificate: GeneratedCertificate object of the student for the given course
"""
try:
# Fetch CertificateInvalidation object
certificate_invalidation = CertificateInvalidation.objects.get(generated_certificate=generated_certificate)
except ObjectDoesNotExist:
raise ValueError(_("Certificate Invalidation does not exist, Please refresh the page and try again."))
else:
# Deactivate certificate invalidation if it was fetched successfully.
certificate_invalidation.deactivate()
# We need to generate certificate only for a single student here
student = certificate_invalidation.generated_certificate.user
task_api.generate_certificates_for_students(
request, course_key, student_set="specific_student", specific_student_id=student.id
)
def validate_request_data_and_get_certificate(certificate_invalidation, course_key):
"""
Fetch and return GeneratedCertificate of the student passed in request data for the given course.
Raises ValueError in case of missing student username/email or
if student does not have certificate for the given course.
:param certificate_invalidation: dict containing certificate invalidation data
:param course_key: CourseKey object identifying the current course.
:return: GeneratedCertificate object of the student for the given course
"""
user = certificate_invalidation.get("user")
if not user:
raise ValueError(
_('Student username/email field is required and can not be empty. '
'Kindly fill in username/email and then press "Invalidate Certificate" button.')
)
student = get_student(user, course_key)
certificate = GeneratedCertificate.certificate_for_student(student, course_key)
if not certificate:
raise ValueError(_(
u"The student {student} does not have certificate for the course {course}. Kindly verify student "
"username/email and the selected course are correct and try again."
).format(student=student.username, course=course_key.course))
return certificate
def _get_boolean_param(request, param_name):
"""
Returns the value of the boolean parameter with the given
name in the POST request. Handles translation from string
values to boolean values.
"""
return request.POST.get(param_name, False) in ['true', 'True', True]
def _create_error_response(request, msg):
"""
Creates the appropriate error response for the current request,
in JSON form.
"""
return JsonResponse({"error": msg}, 400)
|
msegado/edx-platform
|
lms/djangoapps/instructor/views/api.py
|
Python
|
agpl-3.0
| 115,590 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# GNU General Public Licence (GPL)
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
# Place, Suite 330, Boston, MA 02111-1307 USA
#
# Miguel Colom
# http://mcolom.info
__author__ = '''Miguel Colom'''
__docformat__ = 'plaintext'
import optparse
import sys
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from cycler import cycler
plt.switch_backend('Agg')
# Set color cycle
mpl.rcParams['axes.prop_cycle'] = cycler('color', ['r', 'g', 'b', 'c', 'm', 'y', 'k', '#67d100'])
# Parse program arguments
parser = optparse.OptionParser()
#
const_NO_VALUE = -9E9
#
parser.add_option('--output', help='output PNG file', default='curve.png')
parser.add_option('--title', help='title', default='Title')
parser.add_option('--xName', help='X-axis name', default='X-axis')
parser.add_option('--yName', help='Y-axis name', default='Y-axis')
parser.add_option('--x0', help='X-axis first value', default=const_NO_VALUE)
parser.add_option('--x1', help='X-axis last value', default=const_NO_VALUE)
parser.add_option('--y0', help='Y-axis first value', default=const_NO_VALUE)
parser.add_option('--y1', help='Y-axis last value', default=const_NO_VALUE)
parser.add_option('--legend', help='Legend for each data channel', default='')
parser.add_option('--grid', help='use grid', default=1)
parser.add_option('--markers', help='use markers', default=1)
parser.add_option('--markersize', help='marker size', default=5)
parser.add_option('--style', help='use custom line style', default='')
(opts, args) = parser.parse_args()
if len(args) < 1:
print("Error: no input files specified!\n")
parser.print_help()
sys.exit(-1)
# Read parameters
outputName = opts.output
title = opts.title
x0 = float(opts.x0)
x1 = float(opts.x1)
y0 = float(opts.y0)
y1 = float(opts.y1)
grid = (int(opts.grid) > 0)
xName = opts.xName
yName = opts.yName
legend = opts.legend
useMarkers = (int(opts.markers) > 0)
markersize = float(opts.markersize)
lines_style = opts.style
if not outputName.lower().endswith('.png'):
#print "Error: only PNG format accepted\n"
#sys.exit(-1)
pass
# Init plot
plt.close('all')
fig = plt.figure()
plt.grid(b=grid)
plt.xlabel(xName)
plt.ylabel(yName)
if title == '':
plt.title('Noise curve')
else:
plt.title(title)
# Read all input files
is_first_loop = True
for filename in args:
# Read data
f = open(filename, 'r')
EOF = False
lines = []
while not EOF:
line = f.readline()
s = line.split()
EOF = (line == '')
lineStrip = line.strip()
isWhiteLine = (not EOF and lineStrip == '')
isComment = lineStrip.startswith('#')
if not (EOF or isWhiteLine or isComment):
lines.append(s)
#
f.close()
# Guess number of channels
numBins = len(lines)
if numBins > 0:
numChannels = len(lines[0])//2
else:
numChannels = 0
# Check number of channels
for i in range(numBins):
if len(lines[i])/2 != numChannels:
print('Error: in line ' + str(i+1) + ': number of channels doesn\'t match!')
sys.exit(-2)
#
# Check if number of channels keeps the same for all input files
if is_first_loop:
num_channels_all = numChannels
else: # Num channels check
if numChannels != num_channels_all:
print('Error: number of channels mismatch for file ' + filename)
exit(-2)
# Read data values
X = np.zeros((numChannels, numBins))
Y = np.zeros((numChannels, numBins))
indexes = np.zeros(numChannels, dtype=int)
for bin in range(numBins):
line = lines[bin]
for ch in range(numChannels):
x_value = line[ch].strip().upper()
y_value = line[ch+numChannels].strip().upper()
if x_value.upper() != 'X' and y_value.upper() != 'X':
index = indexes[ch]
X[ch, index] = x_value
Y[ch, index] = y_value
indexes[ch] += 1
if is_first_loop:
if legend != '':
legendNames = legend.split(',')
if len(legendNames) != numChannels:
print('Error: number of legends doesn\'t match number of channels!')
sys.exit(-3)
#
if lines_style != '':
lines_style_split = lines_style.split(',')
if len(lines_style_split)/2 != numChannels:
print('Error: number of parameters in styles doesn\'t match number of channels!')
sys.exit(-4)
#
lines_colors = []
lines_sty = []
for i in range(len(lines_style_split)//2):
lines_colors.append(lines_style_split[2*i])
lines_sty.append(lines_style_split[2*i+1])
# Plot curves
for ch in range(numChannels):
kwargs = {}
if useMarkers:
kwargs['marker'] = 'o'
kwargs['markersize'] = markersize
if lines_style != '':
kwargs['color'] = lines_colors[ch]
kwargs['linestyle'] = lines_sty[ch]
if legend != '':
chName = legendNames[ch]
kwargs['label'] = chName
plt.plot(X[ch,0:indexes[ch]], Y[ch,0:indexes[ch]], **kwargs)
# Horizontal and vertical limits
l0, l1 = plt.xlim()
if x0 != const_NO_VALUE:
l0 = x0
if x1 != const_NO_VALUE:
l1 = x1
plt.xlim((l0, l1))
l0, l1 = plt.ylim()
if y0 != const_NO_VALUE:
l0 = y0
if y1 != const_NO_VALUE:
l1 = y1
plt.ylim((l0, l1))
if is_first_loop:
if legend != '':
leg = plt.legend(loc='best', fancybox=True)
leg.get_frame().set_alpha(0.7)
leg.get_frame().set_facecolor('0.85')
is_first_loop = False
# Save result
fig.savefig(outputName)
|
mcolom/ipolDevel
|
ipol_demo/modules/demorunner/Tools/PythonTools/draw2Dcurve.py
|
Python
|
agpl-3.0
| 6,072 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 OpenERP S.A. http://www.openerp.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#.apidoc title: Query object
def _quote(to_quote):
if '"' not in to_quote:
return '"%s"' % to_quote
return to_quote
class Query(object):
"""
Dumb implementation of a Query object, using 3 string lists so far
for backwards compatibility with the (table, where_clause, where_params) previously used.
TODO: To be improved after v6.0 to rewrite part of the ORM and add support for:
- auto-generated multiple table aliases
- multiple joins to the same table with different conditions
- dynamic right-hand-side values in domains (e.g. a.name = a.description)
- etc.
"""
def __init__(self, tables=None, where_clause=None, where_clause_params=None, joins=None):
# holds the list of tables joined using default JOIN.
# the table names are stored double-quoted (backwards compatibility)
self.tables = tables or []
# holds the list of WHERE clause elements, to be joined with
# 'AND' when generating the final query
self.where_clause = where_clause or []
# holds the parameters for the formatting of `where_clause`, to be
# passed to psycopg's execute method.
self.where_clause_params = where_clause_params or []
# holds table joins done explicitly, supporting outer joins. The JOIN
# condition should not be in `where_clause`. The dict is used as follows:
# self.joins = {
# 'table_a': [
# ('table_b', 'table_a_col1', 'table_b_col', 'LEFT JOIN'),
# ('table_c', 'table_a_col2', 'table_c_col', 'LEFT JOIN'),
# ('table_d', 'table_a_col3', 'table_d_col', 'JOIN'),
# ]
# }
# which should lead to the following SQL:
# SELECT ... FROM "table_a" LEFT JOIN "table_b" ON ("table_a"."table_a_col1" = "table_b"."table_b_col")
# LEFT JOIN "table_c" ON ("table_a"."table_a_col2" = "table_c"."table_c_col")
self.joins = joins or {}
def _get_table_aliases(self):
from openerp.osv.expression import get_alias_from_query
return [get_alias_from_query(from_statement)[1] for from_statement in self.tables]
def _get_alias_mapping(self):
from openerp.osv.expression import get_alias_from_query
mapping = {}
for table in self.tables:
alias, statement = get_alias_from_query(table)
mapping[statement] = table
return mapping
def add_join(self, connection, implicit=True, outer=False):
""" Join a destination table to the current table.
:param implicit: False if the join is an explicit join. This allows
to fall back on the previous implementation of ``join`` before
OpenERP 7.0. It therefore adds the JOIN specified in ``connection``
If True, the join is done implicitely, by adding the table alias
in the from clause and the join condition in the where clause
of the query. Implicit joins do not handle outer parameter.
:param connection: a tuple ``(lhs, table, lhs_col, col, link)``.
The join corresponds to the SQL equivalent of::
(lhs.lhs_col = table.col)
Note that all connection elements are strings. Please refer to expression.py for more details about joins.
:param outer: True if a LEFT OUTER JOIN should be used, if possible
(no promotion to OUTER JOIN is supported in case the JOIN
was already present in the query, as for the moment
implicit INNER JOINs are only connected from NON-NULL
columns so it would not be correct (e.g. for
``_inherits`` or when a domain criterion explicitly
adds filtering)
"""
from openerp.osv.expression import generate_table_alias
(lhs, table, lhs_col, col, link) = connection
alias, alias_statement = generate_table_alias(lhs, [(table, link)])
if implicit:
if alias_statement not in self.tables:
self.tables.append(alias_statement)
condition = '("%s"."%s" = "%s"."%s")' % (lhs, lhs_col, alias, col)
self.where_clause.append(condition)
else:
# already joined
pass
return alias, alias_statement
else:
aliases = self._get_table_aliases()
assert lhs in aliases, "Left-hand-side table %s must already be part of the query tables %s!" % (lhs, str(self.tables))
if alias_statement in self.tables:
# already joined, must ignore (promotion to outer and multiple joins not supported yet)
pass
else:
# add JOIN
self.tables.append(alias_statement)
self.joins.setdefault(lhs, []).append((alias, lhs_col, col, outer and 'LEFT JOIN' or 'JOIN'))
return alias, alias_statement
def get_sql(self):
""" Returns (query_from, query_where, query_params). """
from openerp.osv.expression import get_alias_from_query
query_from = ''
tables_to_process = list(self.tables)
alias_mapping = self._get_alias_mapping()
def add_joins_for_table(table, query_from):
for (dest_table, lhs_col, col, join) in self.joins.get(table, []):
tables_to_process.remove(alias_mapping[dest_table])
query_from += ' %s %s ON ("%s"."%s" = "%s"."%s")' % \
(join, alias_mapping[dest_table], table, lhs_col, dest_table, col)
query_from = add_joins_for_table(dest_table, query_from)
return query_from
for table in tables_to_process:
query_from += table
table_alias = get_alias_from_query(table)[1]
if table_alias in self.joins:
query_from = add_joins_for_table(table_alias, query_from)
query_from += ','
query_from = query_from[:-1] # drop last comma
return (query_from, " AND ".join(self.where_clause), self.where_clause_params)
def __str__(self):
return '<osv.Query: "SELECT ... FROM %s WHERE %s" with params: %r>' % self.get_sql()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ovnicraft/openerp-server
|
openerp/osv/query.py
|
Python
|
agpl-3.0
| 7,544 |
from patients.forms import *
from patients.models import *
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponse
import json
from django.contrib.auth.decorators import login_required
@login_required
def registerPatient(request):
if request.method == 'POST':
form = PatientForm(request.POST)
if form.is_valid():
form.save()
else:
form = PatientForm()
return render(request, "register_patient.html", {
"form": form,
})
@login_required
def patientObservation(request):
if request.method == 'POST':
form = NumericObservationForm(request.POST)
if form.is_valid():
form.save()
else:
form = NumericObservationForm()
return render(request, "numeric_observation.html", {
"form": form,
})
@login_required
def getObs(request, mrn, obs):
patient = get_object_or_404(Patient, mrn = mrn)
numericobservationtype = get_object_or_404(NumericObservationType, name = obs)
obs = NumericObservation.objects.filter(patient = patient, observation_type = numericobservationtype)
response = HttpResponse()#content_type='text/json')
response.write(json.dumps([(o.datetime.isoformat(), o.value) for o in obs]))
return response
@login_required
def g(request, mrn, obs, start, end, compass, height, width, min_, max_, refmin, refmax):
import random
import django
import datetime
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
from matplotlib.dates import DateFormatter
import matplotlib.pyplot as plt
start = datetime.datetime.strptime(start, "%Y-%m-%d-%H-%M")
end = datetime.datetime.strptime(end, "%Y-%m-%d-%H-%M")
min_ = float(min_)
max_ = float(max_)
refmin = float(refmin)
refmax = float(refmax)
patient = get_object_or_404(Patient, mrn = mrn)
fig=Figure(figsize=(float(width) / 80., float(height) / 80.))
ax=fig.add_subplot(111)
c = compass.lower()
fig.subplots_adjust(left={True: 0.2, False:0}["w" in c],
right={True: 0.9, False:1}["e" in c],
bottom={True: 0.2, False:0}["s" in c],
top={True: 0.9, False:1}["n" in c])
#ax.set_frame_on(False)
x=[]
y=[]
if obs == "bp":
sbpt = get_object_or_404(NumericObservationType, name = "Systolic Blood Pressure")
dbpt = get_object_or_404(NumericObservationType, name = "Diastolic Blood Pressure")
sbp = NumericObservation.objects.filter(patient = patient, observation_type = sbpt)
#dbp = NumericObservation.objects.filter(patient = patient, observation_type = dbpt)
for s in sbp:#HACK||||||||
try:
d = NumericObservation.objects.get(patient = patient, observation_type = dbpt, datetime = s.datetime)
ax.plot_date([s.datetime, d.datetime], [s.value, d.value], "b-")
except:
pass
else:
numericobservationtype = get_object_or_404(NumericObservationType, name = obs)
nos = NumericObservation.objects.filter(patient = patient, observation_type = numericobservationtype)
ax.plot_date([no.datetime for no in nos], [no.value for no in nos], '.')
startday = datetime.date(start.year, start.month, start.day)
for d in range(20):
#try:HACK
ax.plot_date([startday + datetime.timedelta(d), startday + datetime.timedelta(d)], [refmin, refmax], "y-")
#except:
# pass
ax.set_xlim( (start, end) )
ax.set_ylim( (min_, max_) )
ax.xaxis.set_ticks([start, end])
ax.yaxis.set_ticks([min_, refmin, refmax, max_])
ax.yaxis.set_ticks_position("both")
rect = plt.Rectangle((start, refmin), end, refmax - refmin, facecolor="#dddddd", edgecolor="white")
fig.gca().add_patch(rect)
#ax.xaxis.set_major_formatter(DateFormatter('%Y-%m-%d'))
#fig.autofmt_xdate()
#fig.tight_layout(pad=0.5)
canvas=FigureCanvas(fig)
response=django.http.HttpResponse(content_type='image/png')
canvas.print_png(response, )
return response
|
martinspeleo/graphgrid
|
patients/views.py
|
Python
|
agpl-3.0
| 4,224 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Comunitea All Rights Reserved
# $Omar Castiñeira Saavedra <omar@comunitea.com>$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _
from openerp.exceptions import Warning
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
import time
class StockPicking(models.Model):
_inherit = "stock.picking"
pending_invoice_move_id = fields.Many2one('account.move',
'Account pending move',
readonly=True,
copy=False)
pending_stock_reverse_move_id = \
fields.Many2one('account.move', 'Account pending stock reverse move',
readonly=True, copy=False)
pending_stock_move_id = \
fields.Many2one('account.move', 'Account pending stock move',
readonly=True, copy=False)
@api.multi
def action_done(self):
res = super(StockPicking, self).action_done()
for pick in self:
if not pick.date_done:
pick.date_done = time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
return res
@api.multi
def account_pending_invoice(self, debit_account, credit_account, date):
self.ensure_one()
period_obj = self.env['account.period']
move_obj = self.env['account.move']
move_line_obj = self.env['account.move.line']
lines = {}
period_id = period_obj.find(date)
origin = self.name
if self.origin:
origin += ':' + self.origin
stock_journal_id = self.company_id.property_pending_stock_journal.id
move = {
'ref': origin,
'journal_id': stock_journal_id,
'period_id': period_id.id,
'date': date,
}
move_id = move_obj.create(move)
obj_precision = self.env['decimal.precision']
for move_line in self.move_lines:
name = move_line.name or origin
amount_line = round(move_line.price_unit, obj_precision.
precision_get('Account')) * \
move_line.product_qty
vals = {
'name': name,
'ref': origin,
'partner_id': move_line.partner_id.commercial_partner_id.id,
'product_id': move_line.product_id.id,
'account_id': debit_account.id,
'debit': amount_line,
'credit': 0,
'quantity': move_line.product_qty,
'move_id': move_id.id,
'journal_id': stock_journal_id,
'period_id': period_id.id,
}
move_line_obj.create(vals)
if move_line.partner_id.commercial_partner_id.id in lines:
lines[move_line.partner_id.commercial_partner_id.id] += \
amount_line
else:
lines[move_line.partner_id.commercial_partner_id.id] = \
amount_line
for partner_id in lines:
vals = {
'name': name,
'ref': origin,
'partner_id': partner_id,
'account_id': credit_account.id,
'debit': 0,
'credit': round(lines[partner_id], obj_precision.
precision_get('Account')),
'move_id': move_id.id,
'journal_id': stock_journal_id,
'period_id': period_id.id,
}
move_line_obj.create(vals)
move_id.post()
return move_id
@api.multi
def write(self, vals):
res = super(StockPicking, self).write(vals)
if vals.get('date_done', False):
journal_obj = self.env['account.journal']
journal_id = journal_obj.search([('type', '=', 'sale')])[0].id
inv_type = 'out_invoice'
ctx = dict(self._context or {})
ctx['date_inv'] = False
ctx['inv_type'] = inv_type
templates = []
validate = True
for pick in self:
if (pick.picking_type_id.code == "incoming" and pick.move_lines
and pick.move_lines[0].purchase_line_id and
pick.invoice_state in ['invoiced', '2binvoiced'] and
pick.company_id.required_invoice_pending_move and
not pick.pending_stock_reverse_move_id):
pick.refresh()
if not pick.company_id.\
property_pending_variation_account or not \
pick.company_id.property_pending_stock_account:
raise Warning(_("You need to configure the accounts "
"in the company for pending invoices"))
if not pick.company_id.property_pending_stock_journal:
raise Warning(_("You need to configure an account "
"journal in the company for pending "
"invoices"))
debit_account = pick.company_id.\
property_pending_variation_account
credit_account = pick.company_id.\
property_pending_stock_account
change_date = vals['date_done']
if pick.backorder_id:
change_date = pick.backorder_id.date_done
move_id = pick.account_pending_invoice(debit_account,
credit_account,
change_date)
pick.pending_stock_reverse_move_id = move_id.id
if pick.state == 'done' and pick.invoice_state == '2binvoiced' and not pick.tests and \
pick.invoice_type_id.name == 'Diaria' and pick.picking_type_id.code == 'outgoing':
# Create invoice
res = pick.with_context(ctx).action_invoice_create(journal_id=journal_id, group=False, type=inv_type)
invoice_created = self.env['account.invoice'].browse(res)
if not invoice_created:
templates.append(self.env.ref('picking_invoice_pending.alert_picking_autocreate_invoices', False))
validate = False
elif not invoice_created.invoice_line:
# Invoice created without lines
templates.append(
self.env.ref('picking_invoice_pending.alert_picking_autocreate_invoices_empty_lines', False))
# Do not validate it because it will generate an error
validate = False
if validate:
# Validate invoice
invoice_created.signal_workflow('invoice_open')
if invoice_created.state in ('draft', 'cancel', 'proforma', 'proforma2'):
templates.append(self.env.ref('picking_invoice_pending.alert_picking_autovalidate_invoices', False))
for tmpl in templates:
ctx.update({
'default_model': 'stock.picking',
'default_res_id': pick.id,
'default_use_template': bool(tmpl.id),
'default_template_id': tmpl.id,
'default_composition_mode': 'comment',
'mark_so_as_sent': True
})
composer_id = self.env['mail.compose.message'].with_context(ctx).create({})
composer_id.with_context(ctx).send_mail()
return res
@api.multi
def action_confirm(self):
res = super(StockPicking, self).action_confirm()
pick = self[0]
if not pick.company_id.\
property_pending_variation_account or not \
pick.company_id.property_pending_stock_account or not \
pick.company_id.property_pending_supplier_invoice_account:
raise Warning(_("You need to configure the accounts "
"in the company for pending invoices"))
if not pick.company_id.property_pending_stock_journal:
raise Warning(_("You need to configure an account "
"journal in the company for pending "
"invoices"))
for pick in self:
if pick.picking_type_id.code == "incoming" and pick.move_lines \
and pick.move_lines[0].purchase_line_id and \
pick.invoice_state in ['invoiced', '2binvoiced'] and \
pick.company_id.required_invoice_pending_move and \
not pick.backorder_id and \
not pick.pending_invoice_move_id and \
not pick.pending_stock_move_id:
debit_account = pick.company_id.\
property_pending_expenses_account
credit_account = pick.company_id.\
property_pending_supplier_invoice_account
move_id = pick.account_pending_invoice(debit_account,
credit_account,
pick.create_date[:10])
pick.pending_invoice_move_id = move_id.id
debit_account = pick.company_id.\
property_pending_stock_account
credit_account = pick.company_id.\
property_pending_variation_account
move_id = pick.account_pending_invoice(debit_account,
credit_account,
pick.create_date[:10])
pick.pending_stock_move_id = move_id.id
return res
@api.multi
def action_cancel(self):
res = super(StockPicking, self).action_cancel()
for pick in self:
if pick.pending_stock_move_id:
pick.pending_stock_move_id.button_cancel()
pick.pending_stock_move_id.unlink()
if pick.pending_invoice_move_id:
pick.pending_invoice_move_id.button_cancel()
pick.pending_invoice_move_id.unlink()
if pick.pending_stock_reverse_move_id:
pick.pending_stock_reverse_move_id.button_cancel()
pick.pending_stock_reverse_move_id.unlink()
return res
@api.multi
def unlink(self):
for pick in self:
if pick.pending_stock_move_id:
pick.pending_stock_move_id.button_cancel()
pick.pending_stock_move_id.unlink()
if pick.pending_invoice_move_id:
pick.pending_invoice_move_id.button_cancel()
pick.pending_invoice_move_id.unlink()
if pick.pending_stock_reverse_move_id:
pick.pending_stock_reverse_move_id.button_cancel()
pick.pending_stock_reverse_move_id.unlink()
res = super(StockPicking, self).unlink()
return res
@api.model
def cron_create_invoices(self):
picking_obj = self.env['stock.picking']
journal_obj = self.env['account.journal']
journal_id = journal_obj.search([('type', '=', 'sale')])[0].id
inv_type = 'out_invoice'
ctx = dict(self._context or {})
ctx['date_inv'] = False
ctx['inv_type'] = inv_type
templates = []
validate = True
# Deliveries to Invoice
pickings = picking_obj.with_context(ctx).search([('state', '=', 'done'),
('invoice_state', '=', '2binvoiced'),
('invoice_type_id.name', '=', 'Diaria'),
('picking_type_id.code', '=', 'outgoing'),
('tests', '=', False)],
order='date_done')
# Create invoice
res = pickings.action_invoice_create(journal_id=journal_id, group=False, type=inv_type)
invoices_created = self.env['account.invoice'].browse(res)
if len(pickings) != len(res):
templates.append(self.env.ref('picking_invoice_pending.alert_cron_create_invoices', False))
if len(res) != len(invoices_created.mapped('invoice_line.invoice_id.id')):
# There are invoices created without lines
templates.append(self.env.ref('picking_invoice_pending.alert_cron_create_invoices_empty_lines', False))
# Do not validate them because it will generate an error
validate = False
if validate:
# Validate invoice
invoices_created.signal_workflow('invoice_open')
invoice_states = invoices_created.mapped('state')
if 'draft' in invoice_states or 'cancel' in invoice_states or \
'proforma' in invoice_states or 'proforma2' in invoice_states:
templates.append(self.env.ref('picking_invoice_pending.alert_cron_validate_invoices', False))
for tmpl in templates:
ctx.update({
'default_model': 'account.invoice',
'default_res_id': invoices_created[0].id,
'default_use_template': bool(tmpl.id),
'default_template_id': tmpl.id,
'default_composition_mode': 'comment',
'mark_so_as_sent': True
})
composer_id = self.env['mail.compose.message'].with_context(ctx).create({})
composer_id.with_context(ctx).send_mail()
return True
|
jgmanzanas/CMNT_004_15
|
project-addons/picking_invoice_pending/stock_picking.py
|
Python
|
agpl-3.0
| 14,897 |
# -*- coding: utf-8 -*-
# Copyright 2017 OpenSynergy Indonesia
# Copyright 2022 PT. Simetri Sinergi Indonesia
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Scrap Operation",
"version": "8.0.1.0.4",
"website": "https://simetri-sinergi.id",
"author": "PT. Simetri Sinergi Indonesia, OpenSynergy Indonesia",
"category": "Stock Management",
"depends": [
"stock_warehouse_technical_information",
"stock_operation_type_location",
],
"data": ["views/stock_warehouse_view.xml"],
"images": [
"static/description/banner.png",
],
"installable": True,
"license": "AGPL-3",
}
|
open-synergy/opnsynid-stock-logistics-warehouse
|
stock_scrap_operation/__openerp__.py
|
Python
|
agpl-3.0
| 669 |
#! /usr/bin/env python
from __future__ import division
from timeside.plugins.decoder.file import FileDecoder
from timeside.plugins.analyzer.level import Level
from timeside.core.processor import ProcessPipe
import unittest
from unit_timeside import TestRunner
from timeside.core.tools.test_samples import samples
#from glib import GError as GST_IOError
# HINT : to use later with Gnonlin only
class TestDecodingFromStack(unittest.TestCase):
"Test decoder stack"
def setUp(self):
self.samplerate, self.channels, self.blocksize = None, None, None
self.start = 0
self.duration = None
self.expected_samplerate = 44100
self.expected_channels = 2
self.expected_totalframes = 352800
self.test_exact_duration = True
self.source_duration = 8
self.expected_mime_type = 'audio/x-wav'
self.source = samples["sweep.wav"]
def testProcess(self):
"Test decoder stack: test process"
decoder = FileDecoder(uri=self.source,
start=self.start,
duration=self.duration,
stack=True)
self.assertTrue(decoder.stack)
self.assertFalse(decoder.from_stack)
pipe = ProcessPipe(decoder)
pipe.run()
self.assertFalse(decoder.stack)
self.assertTrue(decoder.from_stack)
self.assertEqual(len(pipe.frames_stack), 44)
pipe.run()
def testResults(self):
"Test decoder stack: test frames content"
decoder = FileDecoder(uri=self.source,
start=self.start,
duration=self.duration,
stack=True)
level = Level()
pipe = (decoder | level)
pipe.run()
self.assertIsInstance(pipe.frames_stack, list)
results_on_file = level.results['level.rms'].data.copy()
# If the pipe is used for a second run, the processed frames stored
# in the stack are passed to the other processors
# without decoding the audio source again.
pipe.results = {} # to be sure the previous results are deleted
pipe.run()
# to assert that the frames passed to the two analyzers are the same,
# we check that the results of these analyzers are equivalent:
results_on_stack = level.results['level.rms'].data
self.assertEqual(results_on_stack,
results_on_file)
if __name__ == '__main__':
unittest.main(testRunner=TestRunner())
|
Parisson/TimeSide
|
tests/test_decoding_stack.py
|
Python
|
agpl-3.0
| 2,580 |
class Colour:
"""
Courtesy of:
http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python
"""
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
@classmethod
def info(cls, string):
return cls._wrap(string, cls.OKBLUE)
@classmethod
def success(cls, string):
return cls._wrap(string, cls.OKGREEN)
@classmethod
def warning(cls, string):
return cls._wrap(string, cls.WARNING)
@classmethod
def danger(cls, string):
return cls._wrap(string, cls.FAIL)
@classmethod
def _wrap(cls, string, colour):
return colour + string + cls.ENDC
|
danielquinn/isaac
|
isaac/colours.py
|
Python
|
agpl-3.0
| 790 |
from django import forms
from django.utils.translation import ugettext_lazy as _
from apps.grid.fields import MultiCharField, TitleField
from apps.grid.forms.base_form import BaseForm
from apps.grid.widgets import CommentInput, NumberInput
class DealLocalCommunitiesForm(BaseForm):
RECOGNITION_STATUS_CHOICES = (
(
"Indigenous Peoples traditional or customary rights recognized by government",
_(
"Indigenous Peoples traditional or customary rights recognized by government"
),
),
(
"Indigenous Peoples traditional or customary rights not recognized by government",
_(
"Indigenous Peoples traditional or customary rights not recognized by government"
),
),
(
"Community traditional or customary rights recognized by government",
_("Community traditional or customary rights recognized by government"),
),
(
"Community traditional or customary rights not recognized by government",
_("Community traditional or customary rights not recognized by government"),
),
)
COMMUNITY_CONSULTATION_CHOICES = (
("Not consulted", _("Not consulted")),
("Limited consultation", _("Limited consultation")),
(
"Free prior and informed consent",
_("Free, Prior and Informed Consent (FPIC)"),
),
(
"Certified Free, Prior and Informed Consent (FPIC)",
_("Certified Free, Prior and Informed Consent (FPIC)"),
),
("Other", _("Other")),
)
COMMUNITY_REACTION_CHOICES = (
("Consent", _("Consent")),
("Mixed reaction", _("Mixed reaction")),
("Rejection", _("Rejection")),
)
# TODO: convert to booleanfield?
BOOLEAN_CHOICES = (("Yes", _("Yes")), ("No", _("No")))
NEGATIVE_IMPACTS_CHOICES = (
("Environmental degradation", _("Environmental degradation")),
("Socio-economic", _("Socio-economic")),
("Cultural loss", _("Cultural loss")),
("Eviction", _("Eviction")),
("Displacement", _("Displacement")),
("Violence", _("Violence")),
("Other", _("Other")),
)
BENEFITS_CHOICES = (
("Health", _("Health")),
("Education", _("Education")),
(
"Productive infrastructure",
_("Productive infrastructure (e.g. irrigation, tractors, machinery...)"),
),
("Roads", _("Roads")),
("Capacity Building", _("Capacity Building")),
("Financial Support", _("Financial Support")),
(
"Community shares in the investment project",
_("Community shares in the investment project"),
),
("Other", _("Other")),
)
form_title = _("Local communities / indigenous peoples")
# Names of affected communities and indigenous peoples
tg_names_of_affected = TitleField(
required=False,
label="",
initial=_("Names of communities / indigenous peoples affected"),
)
name_of_community = MultiCharField(
required=False, label=_("Name of community"), widget=forms.TextInput
)
name_of_indigenous_people = MultiCharField(
required=False, label=_("Name of indigenous people"), widget=forms.TextInput
)
tg_affected_comment = forms.CharField(
required=False,
label=_("Comment on communities / indigenous peoples affected"),
widget=CommentInput,
)
# Recognitions status of community land tenure
tg_recognition_status = TitleField(
required=False,
label="",
initial=_("Recognitions status of community land tenure"),
)
recognition_status = forms.MultipleChoiceField(
required=False,
label=_("Recognition status of community land tenure"),
choices=RECOGNITION_STATUS_CHOICES,
widget=forms.CheckboxSelectMultiple,
)
tg_recognition_status_comment = forms.CharField(
required=False,
label=_("Comment on recognitions status of community land tenure"),
widget=CommentInput,
)
# Consultation of local community
tg_community_consultation = TitleField(
required=False, label="", initial=_("Consultation of local community")
)
community_consultation = forms.ChoiceField(
required=False,
label=_("Community consultation"),
choices=COMMUNITY_CONSULTATION_CHOICES,
widget=forms.RadioSelect,
)
tg_community_consultation_comment = forms.CharField(
required=False,
label=_("Comment on consultation of local community"),
widget=CommentInput,
)
# How did community react?
tg_community_reaction = TitleField(
required=False, label="", initial=_("How did the community react?")
)
community_reaction = forms.ChoiceField(
required=False,
label=_("Community reaction"),
choices=COMMUNITY_REACTION_CHOICES,
widget=forms.RadioSelect,
)
tg_community_reaction_comment = forms.CharField(
required=False, label=_("Comment on community reaction"), widget=CommentInput
)
# Land conflicts
tg_land_conflicts = TitleField(
required=False, label="", initial=_("Presence of land conflicts")
)
land_conflicts = forms.ChoiceField(
required=False,
label=_("Presence of land conflicts"),
choices=BOOLEAN_CHOICES,
widget=forms.RadioSelect,
)
tg_land_conflicts_comment = forms.CharField(
required=False,
label=_("Comment on presence of land conflicts"),
widget=CommentInput,
)
# Displacement of people
tg_displacement_of_people = TitleField(
required=False, label="", initial=_("Displacement of people")
)
displacement_of_people = forms.ChoiceField(
required=False,
label=_("Displacement of people"),
choices=BOOLEAN_CHOICES,
widget=forms.RadioSelect,
)
number_of_displaced_people = forms.IntegerField(
required=False,
label=_("Number of people actually displaced"),
widget=NumberInput,
)
number_of_displaced_households = forms.IntegerField(
required=False,
label=_("Number of households actually displaced"),
widget=NumberInput,
)
number_of_people_displaced_from_community_land = forms.IntegerField(
required=False,
label=_("Number of people displaced out of their community land"),
widget=NumberInput,
)
number_of_people_displaced_within_community_land = forms.IntegerField(
required=False,
label=_("Number of people displaced staying on community land"),
widget=NumberInput,
)
number_of_households_displaced_from_fields = forms.IntegerField(
required=False,
label=_('Number of households displaced "only" from their agricultural fields'),
widget=NumberInput,
)
number_of_people_displaced_on_completion = forms.IntegerField(
required=False,
label=_(
"Number of people facing displacement once project is fully implemented"
),
widget=NumberInput,
)
tg_number_of_displaced_people_comment = forms.CharField(
required=False,
label=_("Comment on displacement of people"),
widget=CommentInput,
)
tg_negative_impacts = TitleField(
required=False, label="", initial=_("Negative impacts for local communities")
)
negative_impacts = forms.MultipleChoiceField(
required=False,
label=_("Negative impacts for local communities"),
choices=NEGATIVE_IMPACTS_CHOICES,
widget=forms.CheckboxSelectMultiple,
)
tg_negative_impacts_comment = forms.CharField(
required=False,
label=_("Comment on negative impacts for local communities"),
widget=CommentInput,
)
# Promised compensation
tg_promised_compensation = TitleField(
required=False, label="", initial=_("Promised or received compensation")
)
promised_compensation = forms.CharField(
required=False,
label=_("Promised compensation (e.g. for damages or resettlements)"),
widget=CommentInput,
)
received_compensation = forms.CharField(
required=False,
label=_("Received compensation (e.g. for damages or resettlements)"),
widget=CommentInput,
)
# Promised benefits for local communities
tg_promised_benefits = TitleField(
required=False, label="", initial=_("Promised benefits for local communities")
)
promised_benefits = forms.MultipleChoiceField(
required=False,
label=_("Promised benefits for local communities"),
choices=BENEFITS_CHOICES,
widget=forms.CheckboxSelectMultiple,
)
tg_promised_benefits_comment = forms.CharField(
required=False,
label=_("Comment on promised benefits for local communities"),
widget=CommentInput,
)
# Materialized benefits for local communities
tg_materialized_benefits = TitleField(
required=False,
label="",
initial=_("Materialized benefits for local communities"),
)
materialized_benefits = forms.MultipleChoiceField(
required=False,
label=_("Materialized benefits for local communities"),
choices=BENEFITS_CHOICES,
widget=forms.CheckboxSelectMultiple,
)
tg_materialized_benefits_comment = forms.CharField(
required=False,
label=_("Comment on materialized benefits for local communities"),
widget=CommentInput,
)
# Presence of organizations and actions taken (e.g. farmer organizations, NGOs, etc.)
tg_presence_of_organizations = TitleField(
required=False,
initial=_(
"Presence of organizations and actions taken (e.g. farmer organizations, NGOs, etc.)"
),
)
presence_of_organizations = forms.CharField(
required=False,
label=_(
"Presence of organizations and actions taken (e.g. farmer organizations, NGOs, etc.)"
),
widget=CommentInput,
)
class Meta:
name = "local_communities"
|
sinnwerkstatt/landmatrix
|
apps/grid/forms/deal_local_communities_form.py
|
Python
|
agpl-3.0
| 10,279 |
"""
This config file follows the devstack enviroment, but adds the
requirement of a celery worker running in the background to process
celery tasks.
When testing locally, run lms/cms with this settings file as well, to test queueing
of tasks onto the appropriate workers.
In two separate processes on devstack:
paver devstack studio --settings=devstack_with_worker
DJANGO_SETTINGS_MODULE=cms.envs.devstack_with_worker celery worker --app=cms.celery:APP
"""
import os
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=wildcard-import, unused-wildcard-import
if 'BOK_CHOY_HOSTNAME' in os.environ:
from cms.envs.devstack_docker import *
else:
from cms.envs.devstack import *
# Require a separate celery worker
CELERY_ALWAYS_EAGER = False
# Disable transaction management because we are using a worker. Views
# that request a task and wait for the result will deadlock otherwise.
for database_name in DATABASES:
DATABASES[database_name]['ATOMIC_REQUESTS'] = False
|
cpennington/edx-platform
|
cms/envs/devstack_with_worker.py
|
Python
|
agpl-3.0
| 1,077 |
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
from django.contrib.auth.models import User
from scraper.spider.spiders import thingiverse
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-u', '--user',
action='store',
dest='user',
help='Attach to partiular user name'),
make_option('-i', '--user-id',
action='store',
dest='user_id',
help='Attach to partiular user by PK'),
)
def handle(self, *args, **options):
if all([options['user'], options['user_id']]):
print('You can\'t specify both a user name and a user ID at the same time')
return
if options['user']:
user = User.objects.get(username=options['user'])
if options['user_id']:
user = User.objects.get(pk=options['user_id'])
thingiverse.runScraper(urls=args)
|
Rhombik/rhombik-object-repository
|
scraper/management/commands/thingiverse.py
|
Python
|
agpl-3.0
| 986 |
"""
Helper methods for testing cohorts.
"""
from factory import post_generation, Sequence
from factory.django import DjangoModelFactory
import json
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from xmodule.modulestore.django import modulestore
from xmodule.modulestore import ModuleStoreEnum
from ..cohorts import set_course_cohort_settings
from ..models import CourseUserGroup, CourseCohort, CourseCohortsSettings, CohortMembership
class CohortFactory(DjangoModelFactory):
"""
Factory for constructing mock cohorts.
"""
class Meta(object):
model = CourseUserGroup
name = Sequence("cohort{}".format)
course_id = SlashSeparatedCourseKey("dummy", "dummy", "dummy")
group_type = CourseUserGroup.COHORT
@post_generation
def users(self, create, extracted, **kwargs): # pylint: disable=unused-argument
"""
Returns the users associated with the cohort.
"""
if extracted:
self.users.add(*extracted)
for user in self.users.all():
CohortMembership.objects.create(
user=user,
course_user_group=self,
)
class CourseCohortFactory(DjangoModelFactory):
"""
Factory for constructing mock course cohort.
"""
class Meta(object):
model = CourseCohort
class CourseCohortSettingsFactory(DjangoModelFactory):
"""
Factory for constructing mock course cohort settings.
"""
class Meta(object):
model = CourseCohortsSettings
is_cohorted = False
course_id = SlashSeparatedCourseKey("dummy", "dummy", "dummy")
cohorted_discussions = json.dumps([])
# pylint: disable=invalid-name
always_cohort_inline_discussions = False
def topic_name_to_id(course, name):
"""
Given a discussion topic name, return an id for that name (includes
course and url_name).
"""
return "{course}_{run}_{name}".format(
course=course.location.course,
run=course.url_name,
name=name
)
def config_course_cohorts_legacy(
course,
discussions,
cohorted,
cohorted_discussions=None,
auto_cohort_groups=None,
always_cohort_inline_discussions=None
):
"""
Given a course with no discussion set up, add the discussions and set
the cohort config on the course descriptor.
Since cohort settings are now stored in models.CourseCohortSettings,
this is only used for testing data migration from the CourseDescriptor
to the table.
Arguments:
course: CourseDescriptor
discussions: list of topic names strings. Picks ids and sort_keys
automatically.
cohorted: bool.
cohorted_discussions: optional list of topic names. If specified,
converts them to use the same ids as topic names.
auto_cohort_groups: optional list of strings
(names of groups to put students into).
Returns:
Nothing -- modifies course in place.
"""
def to_id(name):
"""
Helper method to convert a discussion topic name to a database identifier
"""
return topic_name_to_id(course, name)
topics = dict((name, {"sort_key": "A",
"id": to_id(name)})
for name in discussions)
course.discussion_topics = topics
config = {"cohorted": cohorted}
if cohorted_discussions is not None:
config["cohorted_discussions"] = [to_id(name)
for name in cohorted_discussions]
if auto_cohort_groups is not None:
config["auto_cohort_groups"] = auto_cohort_groups
if always_cohort_inline_discussions is not None:
config["always_cohort_inline_discussions"] = always_cohort_inline_discussions
course.cohort_config = config
try:
# Not implemented for XMLModulestore, which is used by test_cohorts.
modulestore().update_item(course, ModuleStoreEnum.UserID.test)
except NotImplementedError:
pass
# pylint: disable=dangerous-default-value
def config_course_cohorts(
course,
is_cohorted,
auto_cohorts=[],
manual_cohorts=[],
discussion_topics=[],
cohorted_discussions=[],
always_cohort_inline_discussions=False
):
"""
Set discussions and configure cohorts for a course.
Arguments:
course: CourseDescriptor
is_cohorted (bool): Is the course cohorted?
auto_cohorts (list): Names of auto cohorts to create.
manual_cohorts (list): Names of manual cohorts to create.
discussion_topics (list): Discussion topic names. Picks ids and
sort_keys automatically.
cohorted_discussions: Discussion topics to cohort. Converts the
list to use the same ids as discussion topic names.
always_cohort_inline_discussions (bool): Whether inline discussions
should be cohorted by default.
Returns:
Nothing -- modifies course in place.
"""
def to_id(name):
"""Convert name to id."""
return topic_name_to_id(course, name)
set_course_cohort_settings(
course.id,
is_cohorted=is_cohorted,
cohorted_discussions=[to_id(name) for name in cohorted_discussions],
always_cohort_inline_discussions=always_cohort_inline_discussions
)
for cohort_name in auto_cohorts:
cohort = CohortFactory(course_id=course.id, name=cohort_name)
CourseCohortFactory(course_user_group=cohort, assignment_type=CourseCohort.RANDOM)
for cohort_name in manual_cohorts:
cohort = CohortFactory(course_id=course.id, name=cohort_name)
CourseCohortFactory(course_user_group=cohort, assignment_type=CourseCohort.MANUAL)
course.discussion_topics = dict((name, {"sort_key": "A", "id": to_id(name)})
for name in discussion_topics)
try:
# Not implemented for XMLModulestore, which is used by test_cohorts.
modulestore().update_item(course, ModuleStoreEnum.UserID.test)
except NotImplementedError:
pass
|
romain-li/edx-platform
|
openedx/core/djangoapps/course_groups/tests/helpers.py
|
Python
|
agpl-3.0
| 6,163 |
# -*- coding: utf-8 -*-
import controllers
from . import models
|
jmankiewicz/odooAddons
|
stock_delivery_note/__init__.py
|
Python
|
agpl-3.0
| 64 |
# op_return_dogecoin.py
#
# Python script to generate and retrieve OP_RETURN dogecoin transactions
#
# Based on bitcoin python-OP_RETURN,
# Copyright (c) Coin Sciences Ltd
# (https://github.com/coinspark/python-OP_RETURN)
# Adaptions and changes for Dogecoin by Ingo Keck, see
# https://github.com/kubrik-engineering/dogecoin_py_OP_RETURN for details
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import subprocess, json, time, random, os.path, binascii, struct, string, re, hashlib
# Python 2-3 compatibility logic
try:
basestring
except NameError:
basestring = str
try:
from urllib2 import HTTPError
except ImportError:
from urllib.request import HTTPError
# User-defined quasi-constants
OP_RETURN_DOGECOIN_IP = '127.0.0.1' # IP address of your dogecoin node
OP_RETURN_DOGECOIN_USE_CMD = False # use command-line instead of JSON-RPC?
if OP_RETURN_DOGECOIN_USE_CMD:
OP_RETURN_DOGECOIN_PATH = '/usr/bin/dogecoin-cli' # path to dogecoin-cli executable on this server
else:
OP_RETURN_DOGECOIN_PORT = '' # leave empty to use default port for mainnet/testnet
OP_RETURN_DOGECOIN_USER = '' # leave empty to read from ~/.dogecoin/dogecoin.conf (Unix only)
OP_RETURN_DOGECOIN_PASSWORD = '' # leave empty to read from ~/.dogecoin/dogecoin.conf (Unix only)
OP_RETURN_BTC_FEE = 1 # DOGE fee to pay per transaction
OP_RETURN_BTC_DUST = 1 # omit DOGE outputs smaller than this
OP_RETURN_MAX_BYTES = 80 # maximum bytes in an OP_RETURN (80 as of Dogecoin 0.10)
OP_RETURN_MAX_BLOCKS = 10 # maximum number of blocks to try when retrieving data
OP_RETURN_NET_TIMEOUT = 10 # how long to time out (in seconds) when communicating with dogecoin node
# User-facing functions
def OP_RETURN_send(send_address, send_amount, metadata, testnet=False):
"""
Sends send_amount coins to send_address including metadata as op_return data
:param send_address: address where to send the coins
:param send_amount: amount of coins to send
:param metadata: string with the data to be embedded in transaction
:param testnet: True for testnet, false for real net
:return: signed raw transaction
"""
# Validate some parameters
if send_amount>10:
return {'error': 'dont send crazy amounts'}
if not OP_RETURN_dogecoin_check(testnet):
return {'error': 'Please check Dogecoin Core is running and OP_RETURN_DOGECOIN_* constants are set correctly'}
result = OP_RETURN_dogecoin_cmd('validateaddress', testnet, send_address)
if not ('isvalid' in result and result['isvalid']):
return {'error': 'Send address could not be validated: ' + send_address}
if isinstance(metadata, basestring):
try:
metadata = metadata.encode('latin1') # convert to binary string
except:
metadata = metadata.encode('utf-8') # will make message much longer
metadata_len = len(metadata)
#if metadata_len > 65536:
# return {'error': 'This library only supports metadata up to 65536 bytes in size'}
if metadata_len > OP_RETURN_MAX_BYTES:
return {'error': 'Metadata has ' + str(metadata_len) + ' bytes but is limited to ' + str(
OP_RETURN_MAX_BYTES) + ' (see OP_RETURN_MAX_BYTES)'}
# Calculate amounts and choose inputs
output_amount = send_amount + OP_RETURN_BTC_FEE
inputs_spend = OP_RETURN_select_inputs(output_amount, testnet)
if 'error' in inputs_spend:
return {'error': inputs_spend['error']}
change_amount = inputs_spend['total'] - output_amount
# Build the raw transaction
change_address = OP_RETURN_dogecoin_cmd('getrawchangeaddress', testnet)
outputs = {send_address: send_amount}
if change_amount >= OP_RETURN_BTC_DUST:
outputs[change_address] = change_amount
raw_txn = OP_RETURN_create_txn(inputs_spend['inputs'], outputs, metadata, len(outputs), testnet)
# Sign and send the transaction, return result
return OP_RETURN_sign_send_txn(raw_txn, testnet)
def OP_RETURN_store(data, testnet=False):
# Data is stored in OP_RETURNs within a series of chained transactions.
# If the OP_RETURN is followed by another output, the data continues in the transaction spending that output.
# When the OP_RETURN is the last output, this also signifies the end of the data.
# Validate parameters and get change address
if not OP_RETURN_dogecoin_check(testnet):
return {'error': 'Please check Dogecoin Core is running and OP_RETURN_DOGECOIN_* constants are set correctly'}
if isinstance(data, basestring):
data = data.encode('utf-8') # convert to binary string
data_len = len(data)
if data_len == 0:
return {'error': 'Some data is required to be stored'}
change_address = OP_RETURN_dogecoin_cmd('getrawchangeaddress', testnet)
# Calculate amounts and choose first inputs to use
output_amount = OP_RETURN_BTC_FEE * int(
(data_len + OP_RETURN_MAX_BYTES - 1) / OP_RETURN_MAX_BYTES) # number of transactions required
inputs_spend = OP_RETURN_select_inputs(output_amount, testnet)
if 'error' in inputs_spend:
return {'error': inputs_spend['error']}
inputs = inputs_spend['inputs']
input_amount = inputs_spend['total']
# Find the current blockchain height and mempool txids
height = int(OP_RETURN_dogecoin_cmd('getblockcount', testnet))
avoid_txids = OP_RETURN_dogecoin_cmd('getrawmempool', testnet)
# Loop to build and send transactions
result = {'txids': []}
for data_ptr in range(0, data_len, OP_RETURN_MAX_BYTES):
# Some preparation for this iteration
last_txn = ((data_ptr + OP_RETURN_MAX_BYTES) >= data_len) # is this the last tx in the chain?
change_amount = input_amount - OP_RETURN_BTC_FEE
metadata = data[data_ptr:data_ptr + OP_RETURN_MAX_BYTES]
# Build and send this transaction
outputs = {}
if change_amount >= OP_RETURN_BTC_DUST: # might be skipped for last transaction
outputs[change_address] = change_amount
raw_txn = OP_RETURN_create_txn(inputs, outputs, metadata, len(outputs) if last_txn else 0, testnet)
send_result = OP_RETURN_sign_send_txn(raw_txn, testnet)
# Check for errors and collect the txid
if 'error' in send_result:
result['error'] = send_result['error']
break
result['txids'].append(send_result['txid'])
if data_ptr == 0:
result['ref'] = OP_RETURN_calc_ref(height, send_result['txid'], avoid_txids)
# Prepare inputs for next iteration
inputs = [{
'txid': send_result['txid'],
'vout': 1,
}]
input_amount = change_amount
# Return the final result
return result
def OP_RETURN_get(txid, testnet=False):
"""
Retrieve a transaction by its ID and return the op_return data as dictionary
with the keys op_return: data and index:output-position, or None if no op_return is found
:param txid: transaction id
:param testnet: True if testnet
:return: op_return dict or none if no op_return is found.
"""
if not OP_RETURN_dogecoin_check(testnet):
return {'error': 'Please check Dogecoin Core is running and OP_RETURN_DOGECOIN_* constants are set correctly'}
try:
rawtx = OP_RETURN_dogecoin_cmd('getrawtransaction', testnet, txid)
except HTTPError as e:
print(e.reason)
return None
unpackedtx = OP_RETURN_unpack_txn(OP_RETURN_hex_to_bin(rawtx))
op_return = OP_RETURN_find_txn_data(unpackedtx)
return op_return
def OP_RETURN_retrieve(ref, max_results=1, testnet=False):
# Validate parameters and get status of Dogecoin Core
if not OP_RETURN_dogecoin_check(testnet):
return {'error': 'Please check Dogecoin Core is running and OP_RETURN_DOGECOIN_* constants are set correctly'}
max_height = int(OP_RETURN_dogecoin_cmd('getblockcount', testnet))
heights = OP_RETURN_get_ref_heights(ref, max_height)
if not isinstance(heights, list):
return {'error': 'Ref is not valid'}
# Collect and return the results
results = []
for height in heights:
if height == 0:
txids = OP_RETURN_list_mempool_txns(testnet) # if mempool, only get list for now (to save RPC calls)
txns = None
else:
txns = OP_RETURN_get_block_txns(height, testnet) # if block, get all fully unpacked
txids = txns.keys()
for txid in txids:
if OP_RETURN_match_ref_txid(ref, txid):
if height == 0:
txn_unpacked = OP_RETURN_get_mempool_txn(txid, testnet)
else:
txn_unpacked = txns[txid]
found = OP_RETURN_find_txn_data(txn_unpacked)
if found:
# Collect data from txid which matches ref and contains an OP_RETURN
result = {
'txids': [str(txid)],
'data': found['op_return'],
}
key_heights = {height: True}
# Work out which other block heights / mempool we should try
if height == 0:
try_heights = [] # nowhere else to look if first still in mempool
else:
result['ref'] = OP_RETURN_calc_ref(height, txid, txns.keys())
try_heights = OP_RETURN_get_try_heights(height + 1, max_height, False)
# Collect the rest of the data, if appropriate
if height == 0:
this_txns = OP_RETURN_get_mempool_txns(testnet) # now retrieve all to follow chain
else:
this_txns = txns
last_txid = txid
this_height = height
while found['index'] < (len(txn_unpacked['vout']) - 1): # this means more data to come
next_txid = OP_RETURN_find_spent_txid(this_txns, last_txid, found['index'] + 1)
# If we found the next txid in the data chain
if next_txid:
result['txids'].append(str(next_txid))
txn_unpacked = this_txns[next_txid]
found = OP_RETURN_find_txn_data(txn_unpacked)
if found:
result['data'] += found['op_return']
key_heights[this_height] = True
else:
result['error'] = 'Data incomplete - missing OP_RETURN'
break
last_txid = next_txid
# Otherwise move on to the next height to keep looking
else:
if len(try_heights):
this_height = try_heights.pop(0)
if this_height == 0:
this_txns = OP_RETURN_get_mempool_txns(testnet)
else:
this_txns = OP_RETURN_get_block_txns(this_height, testnet)
else:
result['error'] = 'Data incomplete - could not find next transaction'
break
# Finish up the information about this result
result['heights'] = list(key_heights.keys())
results.append(result)
if len(results) >= max_results:
break # stop if we have collected enough
return results
# Utility functions
def OP_RETURN_select_inputs(total_amount, testnet):
# List and sort unspent inputs by priority
unspent_inputs = OP_RETURN_dogecoin_cmd('listunspent', testnet, 0)
if not isinstance(unspent_inputs, list):
return {'error': 'Could not retrieve list of unspent inputs'}
unspent_inputs.sort(key=lambda unspent_input: unspent_input['amount'] * unspent_input['confirmations'],
reverse=True)
# Identify which inputs should be spent
inputs_spend = []
input_amount = 0
for unspent_input in unspent_inputs:
if not unspent_input['spendable']:
# amount is not spendable
continue
inputs_spend.append(unspent_input)
input_amount += unspent_input['amount']
if input_amount >= total_amount:
break # stop when we have enough
if input_amount < total_amount:
return {'error': 'Not enough funds are available to cover the amount and fee'}
# Return the successful result
return {
'inputs': inputs_spend,
'total': input_amount,
}
def OP_RETURN_create_txn(inputs, outputs, metadata, metadata_pos, testnet):
"""
Create a raw transaction pased on input, output and the data to be embedded
:param inputs: list of coin inputs
:param outputs: list of coin outputs
:param metadata: string with data to be embedded
:param metadata_pos: position of op_return in the outputs
:param testnet: True for testnet
:return: raw transaction
"""
raw_txn = OP_RETURN_dogecoin_cmd('createrawtransaction', testnet, inputs, outputs)
txn_unpacked = OP_RETURN_unpack_txn(OP_RETURN_hex_to_bin(raw_txn))
metadata_len = len(metadata)
if metadata_len <= 75:
payload = bytearray((metadata_len,)) + metadata # length byte + data (https://en.dogecoin.it/wiki/Script)
elif metadata_len <= 256:
payload = b"\x4c" + bytearray((metadata_len,)) + metadata # OP_PUSHDATA1 format
else:
payload = b"\x4d" + bytearray((metadata_len % 256,)) + bytearray(
(int(metadata_len / 256),)) + metadata # OP_PUSHDATA2 format
metadata_pos = min(max(0, metadata_pos), len(txn_unpacked['vout'])) # constrain to valid values
txn_unpacked['vout'][metadata_pos:metadata_pos] = [{
'value': 0,
'scriptPubKey': '6a' + OP_RETURN_bin_to_hex(payload) # here's the OP_RETURN
}]
return OP_RETURN_bin_to_hex(OP_RETURN_pack_txn(txn_unpacked))
def OP_RETURN_sign_send_txn(raw_txn, testnet):
"""
Sign and send the raw transaction
:param raw_txn: tranaction to be signed
:param testnet: True if testnet
:return: transaction ID or error
"""
signed_txn = OP_RETURN_dogecoin_cmd('signrawtransaction', testnet, raw_txn)
if not ('complete' in signed_txn and signed_txn['complete']):
return {'error': 'Could not sign the transaction'}
send_txid = OP_RETURN_dogecoin_cmd('sendrawtransaction', testnet, signed_txn['hex'])
if not (isinstance(send_txid, basestring) and len(send_txid) == 64):
return {'error': 'Could not send the transaction'}
return {'txid': str(send_txid)}
def OP_RETURN_list_mempool_txns(testnet):
return OP_RETURN_dogecoin_cmd('getrawmempool', testnet)
def OP_RETURN_get_mempool_txn(txid, testnet):
"""
retrieve and unpack a transaction by its id
:param txid: transaction id
:param testnet: True if testnet
:return: unpacked Transaktion
"""
raw_txn = OP_RETURN_dogecoin_cmd('getrawtransaction', testnet, txid)
return OP_RETURN_unpack_txn(OP_RETURN_hex_to_bin(raw_txn))
def OP_RETURN_get_mempool_txns(testnet):
txids = OP_RETURN_list_mempool_txns(testnet)
txns = {}
for txid in txids:
txns[txid] = OP_RETURN_get_mempool_txn(txid, testnet)
return txns
def OP_RETURN_get_raw_block(height, testnet):
"""
Get a raw block at height height
:param height: height of the raw block
:param testnet: True if testnet
:return: raw block or error
"""
block_hash = OP_RETURN_dogecoin_cmd('getblockhash', testnet, height)
if not (isinstance(block_hash, basestring) and len(block_hash) == 64):
return {'error': 'Block at height ' + str(height) + ' not found'}
return {
'block': OP_RETURN_hex_to_bin(OP_RETURN_dogecoin_cmd('getblock', testnet, block_hash, False))
}
def OP_RETURN_get_block_txns(height, testnet):
"""
Retrieve the raw block at height height and get back all transactions
:param height: height of block
:param testnet: True of testnet
:return: list of tranactions
"""
raw_block = OP_RETURN_get_raw_block(height, testnet)
if 'error' in raw_block:
return {'error': raw_block['error']}
block = OP_RETURN_unpack_block(raw_block['block'])
return block['txs']
# Talking to dogecoin-cli
def OP_RETURN_dogecoin_check(testnet):
info = OP_RETURN_dogecoin_cmd('getinfo', testnet)
return isinstance(info, dict) and 'balance' in info
def OP_RETURN_dogecoin_cmd(command, testnet, *args): # more params are read from here
"""
Send a command to the dogecoin daemon via RPC
:param command: command to be sent
:param testnet: True if testnet
:param args: arguments for the command
:return: command result
"""
if OP_RETURN_DOGECOIN_USE_CMD:
sub_args = [OP_RETURN_DOGECOIN_PATH]
if testnet:
sub_args.append('-testnet')
sub_args.append(command)
for arg in args:
sub_args.append(json.dumps(arg) if isinstance(arg, (dict, list, tuple)) else str(arg))
raw_result = subprocess.check_output(sub_args).decode("utf-8").rstrip("\n")
try: # decode JSON if possible
result = json.loads(raw_result)
except ValueError:
result = raw_result
else:
request = {
'id': str(time.time()) + '-' + str(random.randint(100000, 999999)),
'method': command,
'params': args,
}
port = OP_RETURN_DOGECOIN_PORT
user = OP_RETURN_DOGECOIN_USER
password = OP_RETURN_DOGECOIN_PASSWORD
if not (port and user and password):
if os.path.exists(os.path.expanduser('~') + '/.dogecoin/dogecoin.conf'):
conf_lines = open(os.path.expanduser('~') + '/.dogecoin/dogecoin.conf').readlines()
elif os.path.exists(os.path.expanduser('~') +
'/Library/Application Support/Dogecoin/dogecoin.conf'):
conf_lines = open(os.path.expanduser('~') +
'/Library/Application Support/Dogecoin/dogecoin.conf').readlines()
for conf_line in conf_lines:
parts = conf_line.strip().split('=', 1) # up to 2 parts
if (parts[0] == 'rpcport') and not len(port):
port = int(parts[1])
if (parts[0] == 'rpcuser') and not len(user):
user = parts[1]
if (parts[0] == 'rpcpassword') and not len(password):
password = parts[1]
if not port:
port = 22555 if testnet else 22555
if not user and password:
return None # no point trying in this case
url = 'http://' + OP_RETURN_DOGECOIN_IP + ':' + str(port) + '/'
try:
from urllib2 import HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, build_opener, install_opener, \
urlopen, HTTPError
except ImportError:
from urllib.request import HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, build_opener, \
install_opener, urlopen, HTTPError
passman = HTTPPasswordMgrWithDefaultRealm()
passman.add_password(None, url, user, password)
auth_handler = HTTPBasicAuthHandler(passman)
opener = build_opener(auth_handler)
install_opener(opener)
raw_result = urlopen(url, json.dumps(request).encode('utf-8'), OP_RETURN_NET_TIMEOUT).read()
result_array = json.loads(raw_result.decode('utf-8'))
result = result_array['result']
return result
# Working with data references
# The format of a data reference is: [estimated block height]-[partial txid] - where:
# [estimated block height] is the block where the first transaction might appear and following
# which all subsequent transactions are expected to appear. In the event of a weird blockchain
# reorg, it is possible the first transaction might appear in a slightly earlier block. When
# embedding data, we set [estimated block height] to 1+(the current block height).
# [partial txid] contains 2 adjacent bytes from the txid, at a specific position in the txid:
# 2*([partial txid] div 65536) gives the offset of the 2 adjacent bytes, between 0 and 28.
# ([partial txid] mod 256) is the byte of the txid at that offset.
# (([partial txid] mod 65536) div 256) is the byte of the txid at that offset plus one.
# Note that the txid is ordered according to user presentation, not raw data in the block.
def OP_RETURN_calc_ref(next_height, txid, avoid_txids):
"""
Calculate internal reference
:param next_height:
:param txid:
:param avoid_txids:
:return:
"""
txid_binary = OP_RETURN_hex_to_bin(txid)
for txid_offset in range(15):
sub_txid = txid_binary[2 * txid_offset:2 * txid_offset + 2]
clashed = False
for avoid_txid in avoid_txids:
avoid_txid_binary = OP_RETURN_hex_to_bin(avoid_txid)
if (
(avoid_txid_binary[2 * txid_offset:2 * txid_offset + 2] == sub_txid) and
(txid_binary != avoid_txid_binary)
):
clashed = True
break
if not clashed:
break
if clashed: # could not find a good reference
return None
tx_ref = ord(txid_binary[2 * txid_offset:1 + 2 * txid_offset]) + 256 * ord(
txid_binary[1 + 2 * txid_offset:2 + 2 * txid_offset]) + 65536 * txid_offset
return '%06d-%06d' % (next_height, tx_ref)
def OP_RETURN_get_ref_parts(ref):
if not re.search('^[0-9]+\-[0-9A-Fa-f]+$', ref): # also support partial txid for second half
return None
parts = ref.split('-')
if re.search('[A-Fa-f]', parts[1]):
if len(parts[1]) >= 4:
txid_binary = OP_RETURN_hex_to_bin(parts[1][0:4])
parts[1] = ord(txid_binary[0:1]) + 256 * ord(txid_binary[1:2]) + 65536 * 0
else:
return None
parts = list(map(int, parts))
if parts[1] > 983039: # 14*65536+65535
return None
return parts
def OP_RETURN_get_ref_heights(ref, max_height):
parts = OP_RETURN_get_ref_parts(ref)
if not parts:
return None
return OP_RETURN_get_try_heights(parts[0], max_height, True)
def OP_RETURN_get_try_heights(est_height, max_height, also_back):
forward_height = est_height
back_height = min(forward_height - 1, max_height)
heights = []
mempool = False
try_height = 0
while True:
if also_back and ((try_height % 3) == 2): # step back every 3 tries
heights.append(back_height)
back_height -= 1
else:
if forward_height > max_height:
if not mempool:
heights.append(0) # indicates to try mempool
mempool = True
elif not also_back:
break # nothing more to do here
else:
heights.append(forward_height)
forward_height += 1
if len(heights) >= OP_RETURN_MAX_BLOCKS:
break
try_height += 1
return heights
def OP_RETURN_match_ref_txid(ref, txid):
parts = OP_RETURN_get_ref_parts(ref)
if not parts:
return None
txid_offset = int(parts[1] / 65536)
txid_binary = OP_RETURN_hex_to_bin(txid)
txid_part = txid_binary[2 * txid_offset:2 * txid_offset + 2]
txid_match = bytearray([parts[1] % 256, int((parts[1] % 65536) / 256)])
return txid_part == txid_match # exact binary comparison
# Unpacking and packing dogecoin blocks and transactions
def OP_RETURN_unpack_block(binary):
"""
Unpack a block into single transactions
:param binary: raw block
"""
buffer = OP_RETURN_buffer(binary)
block = {}
block['version'] = buffer.shift_unpack(4, '<L')
block['hashPrevBlock'] = OP_RETURN_bin_to_hex(buffer.shift(32)[::-1])
block['hashMerkleRoot'] = OP_RETURN_bin_to_hex(buffer.shift(32)[::-1])
block['time'] = buffer.shift_unpack(4, '<L')
block['bits'] = buffer.shift_unpack(4, '<L')
block['nonce'] = buffer.shift_unpack(4, '<L')
block['tx_count'] = buffer.shift_varint()
block['txs'] = {}
old_ptr = buffer.used()
while buffer.remaining():
transaction = OP_RETURN_unpack_txn_buffer(buffer)
new_ptr = buffer.used()
size = new_ptr - old_ptr
raw_txn_binary = binary[old_ptr:old_ptr + size]
txid = OP_RETURN_bin_to_hex(hashlib.sha256(hashlib.sha256(raw_txn_binary).digest()).digest()[::-1])
old_ptr = new_ptr
transaction['size'] = size
block['txs'][txid] = transaction
return block
def OP_RETURN_unpack_txn(binary):
return OP_RETURN_unpack_txn_buffer(OP_RETURN_buffer(binary))
def OP_RETURN_unpack_txn_buffer(buffer):
# see: https://en.bitcoin.it/wiki/Transactions
txn = {
'vin': [],
'vout': [],
}
txn['version'] = buffer.shift_unpack(4, '<L') # small-endian 32-bits
inputs = buffer.shift_varint()
if inputs > 100000: # sanity check
return None
for _ in range(inputs):
input = {}
input['txid'] = OP_RETURN_bin_to_hex(buffer.shift(32)[::-1])
input['vout'] = buffer.shift_unpack(4, '<L')
length = buffer.shift_varint()
input['scriptSig'] = OP_RETURN_bin_to_hex(buffer.shift(length))
input['sequence'] = buffer.shift_unpack(4, '<L')
txn['vin'].append(input)
outputs = buffer.shift_varint()
if outputs > 100000: # sanity check
return None
for _ in range(outputs):
output = {}
output['value'] = float(buffer.shift_uint64()) / 100000000
length = buffer.shift_varint()
output['scriptPubKey'] = OP_RETURN_bin_to_hex(buffer.shift(length))
txn['vout'].append(output)
txn['locktime'] = buffer.shift_unpack(4, '<L')
return txn
def OP_RETURN_find_spent_txid(txns, spent_txid, spent_vout):
for txid, txn_unpacked in txns.items():
for input in txn_unpacked['vin']:
if (input['txid'] == spent_txid) and (input['vout'] == spent_vout):
return txid
return None
def OP_RETURN_find_txn_data(txn_unpacked):
"""
find and return op_return in unpacked transaction, else None
:param txn_unpacked: unpacked transaction
:return: None or op_return data
"""
for index, output in enumerate(txn_unpacked['vout']):
op_return = OP_RETURN_get_script_data(OP_RETURN_hex_to_bin(output['scriptPubKey']))
if op_return:
return {
'index': index,
'op_return': op_return,
}
return None
def OP_RETURN_get_script_data(scriptPubKeyBinary):
"""
Return op_return data from binary coin script (tests if it starts with b'\x6a' and extracts content)
:param scriptPubKeyBinary: binary coin script
:return: op_return data
"""
op_return = None
if scriptPubKeyBinary[0:1] == b'\x6a':
first_ord = ord(scriptPubKeyBinary[1:2])
if first_ord <= 75:
op_return = scriptPubKeyBinary[2:2 + first_ord]
elif first_ord == 0x4c:
op_return = scriptPubKeyBinary[3:3 + ord(scriptPubKeyBinary[2:3])]
elif first_ord == 0x4d:
op_return = scriptPubKeyBinary[4:4 + ord(scriptPubKeyBinary[2:3]) + 256 * ord(scriptPubKeyBinary[3:4])]
return op_return
def OP_RETURN_pack_txn(txn):
"""
Binary pack a transaction
:param txn: transaction
:return: binary packed version of transaction
"""
binary = b''
binary += struct.pack('<L', txn['version'])
binary += OP_RETURN_pack_varint(len(txn['vin']))
for input in txn['vin']:
binary += OP_RETURN_hex_to_bin(input['txid'])[::-1]
binary += struct.pack('<L', input['vout'])
binary += OP_RETURN_pack_varint(int(len(input['scriptSig']) / 2)) # divide by 2 because it is currently in hex
binary += OP_RETURN_hex_to_bin(input['scriptSig'])
binary += struct.pack('<L', input['sequence'])
binary += OP_RETURN_pack_varint(len(txn['vout']))
for output in txn['vout']:
binary += OP_RETURN_pack_uint64(int(round(output['value'] * 100000000)))
binary += OP_RETURN_pack_varint(
int(len(output['scriptPubKey']) / 2)) # divide by 2 because it is currently in hex
binary += OP_RETURN_hex_to_bin(output['scriptPubKey'])
binary += struct.pack('<L', txn['locktime'])
return binary
def OP_RETURN_pack_varint(integer):
if integer > 0xFFFFFFFF:
packed = "\xFF" + OP_RETURN_pack_uint64(integer)
elif integer > 0xFFFF:
packed = "\xFE" + struct.pack('<L', integer)
elif integer > 0xFC:
packed = "\xFD".struct.pack('<H', integer)
else:
packed = struct.pack('B', integer)
return packed
def OP_RETURN_pack_uint64(integer):
upper = int(integer / 4294967296)
lower = integer - upper * 4294967296
return struct.pack('<L', lower) + struct.pack('<L', upper)
class OP_RETURN_buffer():
"""
Helper class for unpacking dogecoin binary data
"""
def __init__(self, data, ptr=0):
self.data = data
self.len = len(data)
self.ptr = ptr
def shift(self, chars):
prefix = self.data[self.ptr:self.ptr + chars]
self.ptr += chars
return prefix
def shift_unpack(self, chars, format):
try:
unpack = struct.unpack(format, self.shift(chars))
except:
unpack = (0,)
return unpack[0]
def shift_varint(self):
value = self.shift_unpack(1, 'B')
if value == 0xFF:
value = self.shift_uint64()
elif value == 0xFE:
value = self.shift_unpack(4, '<L')
elif value == 0xFD:
value = self.shift_unpack(2, '<H')
return value
def shift_uint64(self):
return self.shift_unpack(4, '<L') + 4294967296 * self.shift_unpack(4, '<L')
def used(self):
return min(self.ptr, self.len)
def remaining(self):
return max(self.len - self.ptr, 0)
# Converting binary <-> hexadecimal
def OP_RETURN_hex_to_bin(hex):
"""
Converts hex to binary
:param hex: hex string
:return: binary form
"""
try:
raw = binascii.a2b_hex(hex)
except Exception:
return None
return raw
def OP_RETURN_bin_to_hex(string):
"""
Converts from binary to hex
:param string: binary string
:return: hex string
"""
return binascii.b2a_hex(string).decode('utf-8')
|
kubrik-engineering/opendocumentrepository
|
op_return_dogecoin.py
|
Python
|
agpl-3.0
| 31,994 |
from django.conf.urls import patterns, include, url
from django.conf.urls.defaults import *
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from recruit import views as recruit_views
urlpatterns = patterns('',
url(
r'^$',
recruit_views.homeViews,
),
url(
r'^registration$',
recruit_views.registrationViews,
),
url(
r'^response$',
recruit_views.responseViews,
),
url(
r'^mobile',
recruit_views.homeMobileViews,
),
url(
r'^responseMobile',
recruit_views.responseMobileViews,
),
)
urlpatterns += staticfiles_urlpatterns()
|
EducationAdministrationSystem/EducationSystem
|
recruit/urls.py
|
Python
|
agpl-3.0
| 666 |
# -*- coding: utf-8 -*-
# Copyright Puzzlebox Productions, LLC (2010-2014)
#
# This code is released under the GNU Pulic License (GPL) version 2
# For more information please refer to http://www.gnu.org/copyleft/gpl.html
__changelog__ = """\
Last Update: 2014.02.23
"""
__todo__ = """
"""
### IMPORTS ###
import os, sys, time
import Puzzlebox.Synapse.Configuration as configuration
if configuration.ENABLE_PYSIDE:
try:
import PySide as PyQt4
from PySide import QtCore, QtGui
except Exception, e:
print "ERROR: [Synapse:Session] Exception importing PySide:",
print e
configuration.ENABLE_PYSIDE = False
else:
print "INFO: [Synapse:Session] Using PySide module"
if not configuration.ENABLE_PYSIDE:
print "INFO: [Synapse:Session] Using PyQt4 module"
from PyQt4 import QtCore, QtGui
try:
import cPickle as pickle
except:
import pickle
#####################################################################
# Globals
#####################################################################
DEBUG = configuration.DEBUG
DEFAULT_SIGNAL_LEVEL_MESSAGE = \
{"poorSignalLevel": 0}
# A quantifier of the quality of the brainwave signal.
# This is an integer value that is generally in the
# range of 0 to 200, with 0 indicating a
# good signal and 200 indicating an off-head state.
DEFAULT_EEG_POWER_MESSAGE = \
{"eegPower": { \
'delta': 0, \
'theta': 0, \
'lowAlpha': 0, \
'highAlpha': 0, \
'lowBeta': 0, \
'highBeta': 0, \
'lowGamma': 0, \
'highGamma': 0, \
}, \
} # A container for the EEG powers. These may
# be either integer or floating-point values.
# Maximum values are undocumented but assumed to be 65535
DEFAULT_ESENSE_MESSAGE = \
{"eSense": { \
'attention': 0, \
'meditation': 0, \
}, \
} # A container for the eSense™ attributes.
# These are integer values between 0 and 100,
# where 0 is perceived as a lack of that attribute
# and 100 is an excess of that attribute.
DEFAULT_BLINK_MESSAGE = {"blinkStrength": 255}
# The strength of a detected blink. This is
# an integer in the range of 0-255.
DEFAULT_RAWEEG_MESSAGE = {"rawEeg": 255}
# The raw data reading off the forehead sensor.
# This may be either an integer or a floating-point value.
DEFAULT_PACKET = {}
DEFAULT_PACKET.update(DEFAULT_EEG_POWER_MESSAGE)
DEFAULT_PACKET.update(DEFAULT_SIGNAL_LEVEL_MESSAGE)
DEFAULT_PACKET.update(DEFAULT_ESENSE_MESSAGE)
PACKET_MINIMUM_TIME_DIFFERENCE_THRESHOLD = 0.75
#####################################################################
# Classes
#####################################################################
class puzzlebox_synapse_session(QtGui.QWidget):
def __init__(self, log, \
DEBUG=DEBUG, \
parent=None, \
):
self.log = log
self.DEBUG = DEBUG
self.parent=parent
if self.parent == None:
QtGui.QWidget.__init__(self, parent)
#self.setupUi(self)
self.configureSettings()
self.connectWidgets()
self.name = "Synapse:Session"
if (sys.platform == 'win32'):
self.homepath = os.path.join( \
os.environ['HOMEDRIVE'], \
os.environ['HOMEPATH'], \
'Desktop')
elif (sys.platform == 'darwin'):
desktop = os.path.join(os.environ['HOME'], 'Documents')
if os.path.exists(desktop):
self.homepath = desktop
else:
self.homepath = os.environ['HOME']
else:
desktop = os.path.join(os.environ['HOME'], 'Desktop')
if os.path.exists(desktop):
self.homepath = desktop
else:
self.homepath = os.environ['HOME']
if not os.path.exists(self.homepath):
if self.DEBUG:
print "WARNING: [Synapse:Session] User default path not found"
self.homepath = os.getcwd()
##################################################################
def configureSettings(self):
pass
##################################################################
def connectWidgets(self):
pass
##################################################################
def updateProfileSessionStatus(self, source=None, target=None):
session_time = self.calculateSessionTime()
#if source == None:
#if self.parent == None:
#source = self
#else:
#source = self.parent
#if target == None:
#if self.parent == None:
#target = self
#else:
#target = self.parent
#target.textLabelSessionTime.setText(session_time)
self.textLabelSessionTime.setText(session_time)
#self.parent.packet_count)
#self.synapseServer.protocol.packet_count)
try:
packet_count = self.parent.plugin_eeg.getPacketCount()
except:
try:
packet_count = self.synapseServer.protocol.packet_count
except:
packet_count = 0
self.textLabelPacketsReceived.setText( "%i" % packet_count)
try:
bad_packets = self.parent.plugin_eeg.getBadPackets()
except:
try:
bad_packets = self.synapseServer.protocol.bad_packets
except:
bad_packets = 0
self.textLabelPacketsDropped.setText( "%i" % bad_packets)
##################################################################
def calculateSessionTime(self):
session_time = self.getSessionTime()
session_time = time.time() - session_time
session_time = int(session_time)
session_time = self.convert_seconds_to_datetime(session_time)
return (session_time)
##################################################################
def getSessionTime(self):
return (self.synapseServer.session_start_timestamp)
##################################################################
def collectData(self, source=None, target=None):
if source == None:
if self.parent == None:
source = self
else:
source = self.parent
if target == None:
if self.parent == None:
target = self
else:
target = self.parent
data = {}
data['rawEeg'] = source.packets['rawEeg']
data['signals'] = source.packets['signals']
data['sessionTime'] = self.calculateSessionTime()
data['profileName'] = str(target.lineEditSessionProfile.text())
return(data)
##################################################################
def parseTimeStamp(self, timestamp, local_version=False, truncate_time_zone=False):
try:
decimal = '%f' % timestamp
decimal = decimal.split('.')[1]
except:
decimal = '0'
localtime = time.localtime(timestamp)
if local_version:
date = time.strftime('%x', localtime)
localtime = time.strftime('%X', localtime)
elif truncate_time_zone:
date = time.strftime('%Y-%m-%d', localtime)
localtime = time.strftime('%H:%M:%S', localtime)
localtime = '%s.%s' % (localtime, decimal[:3])
else:
date = time.strftime('%Y-%m-%d', localtime)
localtime = time.strftime('%H:%M:%S', localtime)
localtime = '%s.%s %s' % (localtime, decimal, \
time.strftime('%Z', time.localtime(timestamp)))
return(date, localtime)
##################################################################
def saveData(self, source=None, target=None, output_file=None, use_default=False):
if source == None:
if self.parent == None:
source = self
else:
source = self.parent
if target == None:
if self.parent == None:
target = self
else:
target = self.parent
data = self.collectData(source=source, target=target)
(date, localtime) = self.parseTimeStamp(time.time())
default_filename = '%s %s.synapse' % (date, \
target.lineEditSessionProfile.text())
default_filename = os.path.join(self.homepath, default_filename)
if output_file == None:
# use_default controls whether or not a file is automatically saves using the
# default name and path (as opposed to raising a GUI file selection menu)
# whenever an explicit filepath is not defined
if use_default:
output_file = default_filename
else:
output_file = QtGui.QFileDialog.getSaveFileName(parent=target, \
caption="Save Session Data to File", \
directory=default_filename, \
filter="Puzzlebox Synapse Data File (*.synapse)")
# TODO 2014-02-09 Disabling due to failure with Puzzlebox Orbit
# TODO 2014-02-23 Re-enabled due to failure to write files
try:
output_file = output_file[0]
except:
#output_file = ''
# TODO 2014-02-23 Attempted compromise
pass
if output_file == '':
return
file = open(str(output_file), 'w')
pickle.dump(data, file)
file.close()
##################################################################
def exportData(self, parent=None, source=None, target=None, output_file=None, use_default=False):
if parent == None:
if self.parent == None:
parent = self
else:
parent = self.parent
if source == None:
if self.parent == None:
source = self
else:
source = self.parent
if target == None:
if self.parent == None:
target = self
else:
target = self.parent
try:
export_csv_raw = target.configuration.EXPORT_CSV_RAW_DATA
except:
export_csv_raw = False
(date, localtime) = self.parseTimeStamp(time.time())
default_filename = '%s %s.csv' % (date, \
target.lineEditSessionProfile.text())
default_filename = os.path.join(target.homepath, default_filename)
if output_file == None:
# use_default controls whether or not a file is automatically saves using the
# default name and path (as opposed to raising a GUI file selection menu)
# whenever an explicit filepath is not defined
if use_default:
output_file = default_filename
else:
output_file = QtGui.QFileDialog.getSaveFileName(parent=target, \
caption="Export Session Data to File", \
directory=default_filename, \
filter="CSV File (*.csv);;Text File (*.txt)")
# TODO 2014-02-09 Disabling due to failure with Puzzlebox Orbit
# TODO 2014-02-23 Re-enabled due to failure to write files
try:
output_file = output_file[0]
except:
#output_file = ''
# TODO 2014-02-23 Attempted compromise
pass
if output_file == '':
return
if str(output_file).endswith('.csv'):
outputData = self.exportDataToCSV(parent=parent, source=source, target=target)
else:
try:
outputData = self.textEditDebugConsole.toPlainText()
except:
outputData = self.exportDataToCSV(parent=parent, source=source, target=target)
if self.DEBUG:
print "Writing file:",
print output_file
file = open(os.path.join(str(output_file)), 'w')
file.write(outputData)
file.close()
if export_csv_raw:
output_file = output_file.replace('.csv', '-rawEeg.csv')
outputData = self.exportRawDataToCSV(parent=parent, source=source, target=target)
if outputData != None:
file = open(str(output_file), 'w')
file.write(outputData)
file.close()
##################################################################
def exportDataToCSV(self, parent=None, source=None, target=None):
# handle importing class from multiple sources
if parent == None:
if self.parent == None:
parent = self
else:
parent = self.parent
if source == None:
if self.parent == None:
source = self
else:
source = self.parent
if target == None:
if self.parent == None:
target = self
else:
target = self.parent
try:
truncate_csv_timezone = target.configuration.EXPORT_CSV_TRUNCATE_TIMEZONE
except:
truncate_csv_timezone = False
#print source.name
#print source.packets['signals']
# NOTE: no need to scrub emulated data
try:
scrub_data = target.configuration.EXPORT_CSV_SCRUB_DATA
except:
scrub_data = False
try:
if self.parent.plugin_eeg.emulate_headset_data:
scrub_data = False
except:
pass
headers = 'Date,Time'
customDataHeaders = []
for header in parent.customDataHeaders:
customDataHeaders.append(header)
for plugin in parent.activePlugins:
#print plugin.name
for header in plugin.customDataHeaders:
customDataHeaders.append(header)
for each in customDataHeaders:
headers = headers + ',%s' % each
headers = headers + '\n'
csv = {}
for packet in source.packets['signals']:
# NOTE: Move this to ThinkGear Server object
#if 'rawEeg' in packet.keys():
#continue
if 'timestamp' not in packet.keys() and len(packet.keys()) == 1:
if self.DEBUG:
print "WARN: Skipping empty packet:",
print packet
# skip empty packets
continue
print "packet:",
print packet
timestamp = packet['timestamp']
#(date, localtime) = self.parseTimeStamp(timestamp, \
#truncate_time_zone=truncate_csv_timezone)
(date, localtime) = source.parseTimeStamp(timestamp, \
truncate_time_zone=truncate_csv_timezone)
if timestamp not in csv.keys():
#if 'blinkStrength' in packet.keys():
## Skip any blink packets from log
#continue
#timestamp = packet['timestamp']
##(date, localtime) = self.parseTimeStamp(timestamp, \
##truncate_time_zone=truncate_csv_timezone)
#(date, localtime) = source.parseTimeStamp(timestamp, \
#truncate_time_zone=truncate_csv_timezone)
csv[timestamp] = {}
csv[timestamp]['Date'] = date
csv[timestamp]['Time'] = localtime
for plugin in parent.activePlugins:
if plugin.customDataHeaders != []:
if self.DEBUG > 2:
print "INFO: [Synapse:Session] Exporting:",
print plugin.name
try:
csv[timestamp] = plugin.processPacketForExport(packet=packet, output=csv[timestamp])
if self.DEBUG > 2:
print "INFO [Synapse:Session]: Export Successful"
print plugin.name
except Exception, e:
if self.DEBUG:
print "ERROR: [Synapse:Session] Exception calling processPacketForExport on",
print plugin.name
for header in customDataHeaders:
if 'custom' in packet.keys() and \
header in packet['custom'].keys():
timestamp = packet['timestamp']
(date, localtime) = source.parseTimeStamp(timestamp, \
truncate_time_zone=truncate_csv_timezone)
if timestamp not in csv.keys():
csv[timestamp] = {}
csv[timestamp]['Date'] = date
csv[timestamp]['Time'] = localtime
if self.DEBUG:
print "WARN: Unmatched custom packet:",
print packet
csv[timestamp][header] = packet['custom'][header]
if scrub_data:
csv = self.scrubData(csv, truncate_csv_timezone, source=source)
output = headers
timestamps = csv.keys()
timestamps.sort()
for timestamp in timestamps:
row = '%s,%s' % \
(csv[timestamp]['Date'], \
csv[timestamp]['Time'])
for header in customDataHeaders:
if header in csv[timestamp].keys():
row = row + ',%s' % csv[timestamp][header]
else:
#row = row + ','
row = ''
if self.DEBUG > 1:
print "WARN: empty signals packet:",
print csv[timestamp]
break
if row != '':
row = row + '\n'
output = output + row
return(output)
##################################################################
def exportRawDataToCSV(self, parent=None, source=None, target=None):
# handle importing class from multiple sources
if parent == None:
if self.parent == None:
parent = self
else:
parent = self.parent
if source == None:
if self.parent == None:
source = self
else:
source = self.parent
if target == None:
if self.parent == None:
target = self
else:
target = self.parent
try:
truncate_csv_timezone = target.configuration.EXPORT_CSV_TRUNCATE_TIMEZONE
except:
truncate_csv_timezone = False
if source.packets['rawEeg'] == []:
return(None)
headers = 'Date,Time,Raw EEG'
headers = headers + '\n'
csv = {}
for packet in source.packets['rawEeg']:
# NOTE: Move this to ThinkGear Server object
if 'rawEeg' in packet.keys():
if packet['timestamp'] not in csv.keys():
timestamp = packet['timestamp']
(date, localtime) = source.parseTimeStamp(timestamp, \
truncate_time_zone=truncate_csv_timezone)
csv[timestamp] = {}
csv[timestamp]['Date'] = date
csv[timestamp]['Time'] = localtime
csv[timestamp]['rawEeg'] = packet['rawEeg']
output = headers
timestamps = csv.keys()
# Don't sort timestamps in order to better preserve the original raw signal
#timestamps.sort()
for timestamp in timestamps:
row = '%s,%s,%s' % \
(csv[timestamp]['Date'], \
csv[timestamp]['Time'], \
csv[timestamp]['rawEeg'])
row = row + '\n'
output = output + row
return(output)
#################################################################
def scrubData(self, csv, truncate_csv_timezone=False, source=None):
# If there are missing packets, repeat a given packet once per missing
# second until there is a gap between 1 and 2 seconds, in which case
# produce a final duplicate packet at the mid-point between the packets
if self.DEBUG:
print "INFO: Scrubbing Data"
if source == None:
if self.parent == None:
source = self
else:
source = self.parent
last_time = None
last_recorded_time = None
output = {}
csv_keys = csv.keys()
csv_keys.sort()
for key in csv_keys:
timestamp = key
if last_time == None:
# First entry in log
last_time = timestamp
last_recorded_time = timestamp
#output[key] = csv[key]
if key not in output.keys():
output[key] = DEFAULT_PACKET.copy()
output[key].update(csv[key])
continue
else:
#time_difference = timestamp - last_time
#time_difference = timestamp - last_recorded_time
time_difference = abs(timestamp - last_recorded_time)
if (time_difference <= 1) and \
(time_difference >= PACKET_MINIMUM_TIME_DIFFERENCE_THRESHOLD):
# Skip packets within the correct time threshold
last_time = timestamp
last_recorded_time = timestamp
#output[key] = csv[key]
if key not in output.keys():
output[key] = DEFAULT_PACKET.copy()
output[key].update(csv[key])
#print "<=1 and >=min"
continue
else:
if self.DEBUG > 1:
print "time_difference:",
print time_difference
print "timestamp:",
print source.parseTimeStamp(timestamp)[-1].split(' ')[0]
print "last_time:",
print source.parseTimeStamp(last_time)[-1].split(' ')[0]
print "last_recorded_time:",
print source.parseTimeStamp(last_recorded_time)[-1].split(' ')[0]
#new_packet = csv[key].copy()
if key not in output.keys():
new_packet = DEFAULT_PACKET.copy()
new_packet.update(csv[key])
if time_difference >= 2:
##new_time = last_time + 1
#new_time = last_recorded_time + 1
count = int(time_difference)
while count >= 1:
#new_packet = csv[key].copy()
if key not in output.keys():
new_packet = DEFAULT_PACKET.copy()
new_packet.update(csv[key])
new_time = last_recorded_time + 1
(date, formatted_new_time) = source.parseTimeStamp(new_time, \
truncate_time_zone=truncate_csv_timezone)
new_packet['Time'] = formatted_new_time
last_recorded_time = new_time
last_time = timestamp
if key not in output.keys():
output[new_time] = new_packet
else:
output[new_time].update(new_packet)
count = count - 1
continue
#print ">=2"
elif time_difference < PACKET_MINIMUM_TIME_DIFFERENCE_THRESHOLD:
# Spread out "bunched up" packets
#new_time = last_time + 1
new_time = last_recorded_time + 1
#new_time = last_recorded_time
#print "<min"
elif (time_difference < 2) and (time_difference > 1):
#new_time = last_time + ((last_time - timestamp) / 2)
#new_time = last_recorded_time + ((last_recorded_time - timestamp) / 2)
#new_time = last_time + 1
#new_time = last_recorded_time + 1
new_time = last_recorded_time
#print "<2"
(date, formatted_new_time) = source.parseTimeStamp(new_time, \
truncate_time_zone=truncate_csv_timezone)
new_packet['Time'] = formatted_new_time
#last_time = new_time
last_recorded_time = new_time
#last_time = timestamp
last_time = new_time
try:
output[new_time].update(new_packet)
except Exception, e:
output[new_time] = new_packet
#print e
if self.DEBUG > 1:
print "WARN: Scrubbing new packet:",
print new_packet
print
return(output)
##################################################################
def resetData(self, source=None):
if source == None:
if self.parent == None:
source = self
else:
source = self.parent
source.packets['rawEeg'] = []
source.packets['signals'] = []
if self.synapseServer != None:
self.synapseServer.protocol.resetSessionStartTime()
else:
self.resetSessionStartTime()
if self.synapseServer != None:
source.synapseServer.protocol.packet_count = 0
source.synapseServer.protocol.bad_packets = 0
else:
source.packet_count = 0
source.bad_packets = 0
self.updateProfileSessionStatus()
try:
source.textEditDebugConsole.setText("")
except:
pass
#####################################################################
def resetSessionStartTime(self, source=None):
self.session_start_timestamp = time.time()
#####################################################################
def convert_seconds_to_datetime(self, duration):
duration_hours = duration / (60 * 60)
duration_minutes = (duration - (duration_hours * (60 * 60))) / 60
duration_seconds = (duration - (duration_hours * (60 * 60)) - (duration_minutes * 60))
duration_hours = '%i' % duration_hours
if (len(duration_hours) == 1):
duration_hours = "0%s" % duration_hours
duration_minutes = '%i' % duration_minutes
if (len(duration_minutes) == 1):
duration_minutes = "0%s" % duration_minutes
duration_seconds = '%i' % duration_seconds
if (len(duration_seconds) == 1):
duration_seconds = "0%s" % duration_seconds
datetime = '%s:%s:%s' % (duration_hours, duration_minutes, duration_seconds)
return(datetime)
|
PuzzleboxIO/synapse-python
|
Puzzlebox/Synapse/Session.py
|
Python
|
agpl-3.0
| 22,816 |
import random
from collections import Counter, defaultdict
import itertools
# starter altid (0,0) -> (0,1)
# Sti har formen [2, l, r]*, da man kan forlænge med 2, gå til venstre eller gå til højre.
T, L, R = range(3)
class Path:
def __init__(self, steps):
self.steps = steps
def xys(self, dx=0, dy=1):
""" Yields all positions on path """
x, y = 0, 0
yield (x, y)
for step in self.steps:
x, y = x + dx, y + dy
yield (x, y)
if step == L:
dx, dy = -dy, dx
if step == R:
dx, dy = dy, -dx
elif step == T:
x, y = x + dx, y + dy
yield (x, y)
def test(self):
""" Tests path is non-overlapping. """
ps = list(self.xys())
return len(set(ps)) == len(ps)
def test_loop(self):
""" Tests path is non-overlapping, except for first and last. """
ps = list(self.xys())
seen = set(ps)
return len(ps) == len(seen) or len(ps) == len(seen) + 1 and ps[0] == ps[-1]
def winding(self):
return self.steps.count(R) - self.steps.count(L)
def __repr__(self):
""" Path to string """
return ''.join({T: '2', R: 'R', L: 'L'}[x] for x in self.steps)
def show(self):
import matplotlib.pyplot as plt
xs, ys = zip(*self.xys())
plt.plot(xs, ys)
plt.axis('scaled')
plt.show()
def unrotate(x, y, dx, dy):
""" Inverse rotate x, y by (dx,dy), where dx,dy=0,1 means 0 degrees.
Basically rotate(dx,dy, dx,dy) = (0, 1). """
while (dx, dy) != (0, 1):
x, y, dx, dy = -y, x, -dy, dx
return x, y
class Mitm:
def __init__(self, lr_price, t_price):
self.lr_price = lr_price
self.t_price = t_price
self.inv = defaultdict(list)
self.list = []
def prepare(self, budget):
dx0, dy0 = 0, 1
for path, (x, y, dx, dy) in self._good_paths(0, 0, dx0, dy0, budget):
self.list.append((path, x, y, dx, dy))
self.inv[x, y, dx, dy].append(path)
def rand_path(self, xn, yn, dxn, dyn):
""" Returns a path, starting at (0,0) with dx,dy = (0,1)
and ending at (xn,yn) with direction (dxn, dyn) """
while True:
path, x, y, dx, dy = random.choice(self.list)
path2s = self._lookup(dx, dy, xn - x, yn - y, dxn, dyn)
if path2s:
path2 = random.choice(path2s)
joined = Path(path + path2)
if joined.test():
return joined
def rand_path2(self, xn, yn, dxn, dyn):
""" Like rand_path, but uses a combination of a fresh random walk and
the lookup table. This allows for even longer paths. """
seen = set()
path = []
while True:
seen.clear()
del path[:]
x, y, dx, dy = 0, 0, 0, 1
seen.add((x, y))
for _ in range(2 * (abs(xn) + abs(yn))):
# We sample with weights proportional to what they are in _good_paths()
step, = random.choices(
[L, R, T], [1 / self.lr_price, 1 / self.lr_price, 2 / self.t_price])
path.append(step)
x, y = x + dx, y + dy
if (x, y) in seen:
break
seen.add((x, y))
if step == L:
dx, dy = -dy, dx
if step == R:
dx, dy = dy, -dx
elif step == T:
x, y = x + dx, y + dy
if (x, y) in seen:
break
seen.add((x, y))
if (x, y) == (xn, yn):
return Path(path)
ends = self._lookup(dx, dy, xn - x, yn - y, dxn, dyn)
if ends:
return Path(tuple(path) + random.choice(ends))
def rand_loop(self, clock=0):
""" Set clock = 1 for clockwise, -1 for anti clockwise. 0 for don't care. """
while True:
# The list only contains 0,1 starting directions
path, x, y, dx, dy = random.choice(self.list)
# Look for paths ending with the same direction
path2s = self._lookup(dx, dy, -x, -y, 0, 1)
if path2s:
path2 = random.choice(path2s)
joined = Path(path + path2)
# A clockwise path has 4 R's more than L's.
if clock and joined.winding() != clock * 4:
continue
if joined.test_loop():
return joined
def _good_paths(self, x, y, dx, dy, budget, seen=None):
if seen is None:
seen = set()
if budget >= 0:
yield (), (x, y, dx, dy)
if budget <= 0:
return
seen.add((x, y)) # Remember cleaning this up (A)
x1, y1 = x + dx, y + dy
if (x1, y1) not in seen:
for path, end in self._good_paths(
x1, y1, -dy, dx, budget - self.lr_price, seen):
yield (L,) + path, end
for path, end in self._good_paths(
x1, y1, dy, -dx, budget - self.lr_price, seen):
yield (R,) + path, end
seen.add((x1, y1)) # Remember cleaning this up (B)
x2, y2 = x1 + dx, y1 + dy
if (x2, y2) not in seen:
for path, end in self._good_paths(
x2, y2, dx, dy, budget - self.t_price, seen):
yield (T,) + path, end
seen.remove((x1, y1)) # Clean up (B)
seen.remove((x, y)) # Clean up (A)
def _lookup(self, dx, dy, xn, yn, dxn, dyn):
""" Return cached paths coming out of (0,0) with direction (dx,dy)
and ending up in (xn,yn) with direction (dxn,dyn). """
# Give me a path, pointing in direction (0,1) such that when I rotate
# it to (dx, dy) it ends at xn, yn in direction dxn, dyn.
xt, yt = unrotate(xn, yn, dx, dy)
dxt, dyt = unrotate(dxn, dyn, dx, dy)
return self.inv[xt, yt, dxt, dyt]
if __name__ == '__main__':
mitm = Mitm(1, 1)
mitm.prepare(10)
for i in range(1):
mitm.rand_loop().show()
for i in range(1, 10):
mitm.rand_path2(i, i, 0, 1).show()
for i in range(1, 10):
mitm.rand_path(i, i, 0, 1).show()
|
thomasahle/numberlink
|
gen/mitm.py
|
Python
|
agpl-3.0
| 6,461 |
"""
Badges related signal handlers.
"""
from django.dispatch import receiver
from lms.djangoapps.badges.events.course_meta import award_enrollment_badge
from lms.djangoapps.badges.utils import badges_enabled
from common.djangoapps.student.models import EnrollStatusChange
from common.djangoapps.student.signals import ENROLL_STATUS_CHANGE
@receiver(ENROLL_STATUS_CHANGE)
def award_badge_on_enrollment(sender, event=None, user=None, **kwargs): # pylint: disable=unused-argument
"""
Awards enrollment badge to the given user on new enrollments.
"""
if badges_enabled and event == EnrollStatusChange.enroll:
award_enrollment_badge(user)
|
stvstnfrd/edx-platform
|
lms/djangoapps/badges/handlers.py
|
Python
|
agpl-3.0
| 664 |
# -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# Django
from django.conf.urls import (
include,
url
)
from django.contrib.auth import views
from django.urls import path
from django.views.generic import TemplateView
# wger
from wger.core.forms import UserLoginForm
from wger.core.views import (
languages,
license,
misc,
repetition_units,
user,
weight_units
)
# sub patterns for languages
patterns_language = [
path('list',
languages.LanguageListView.as_view(),
name='overview'),
path('<int:pk>/view',
languages.LanguageDetailView.as_view(),
name='view'),
path('<int:pk>/delete',
languages.LanguageDeleteView.as_view(),
name='delete'),
path('<int:pk>/edit',
languages.LanguageEditView.as_view(),
name='edit'),
path('add',
languages.LanguageCreateView.as_view(),
name='add'),
]
# sub patterns for user
patterns_user = [
path('login',
views.LoginView.as_view(template_name='user/login.html',
authentication_form=UserLoginForm),
name='login'),
path('logout',
user.logout,
name='logout'),
path('delete',
user.delete,
name='delete'),
path('<int:user_pk>/delete',
user.delete,
name='delete'),
path('<int:user_pk>/trainer-login',
user.trainer_login,
name='trainer-login'),
path('registration',
user.registration,
name='registration'),
path('preferences',
user.preferences,
name='preferences'),
path('api-key',
user.api_key,
name='api-key'),
path('demo-entries',
misc.demo_entries,
name='demo-entries'),
path('<int:pk>/activate',
user.UserActivateView.as_view(),
name='activate'),
path('<int:pk>/deactivate',
user.UserDeactivateView.as_view(),
name='deactivate'),
path('<int:pk>/edit',
user.UserEditView.as_view(),
name='edit'),
path('<int:pk>/overview',
user.UserDetailView.as_view(),
name='overview'),
path('list',
user.UserListView.as_view(),
name='list'),
# Password reset is implemented by Django, no need to cook our own soup here
# (besides the templates)
path('password/change',
user.WgerPasswordChangeView.as_view(),
name='change-password'),
path('password/reset/',
user.WgerPasswordResetView.as_view(),
name='password_reset'),
path('password/reset/done/',
views.PasswordResetDoneView.as_view(),
name='password_reset_done'),
url(r'^password/reset/check/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,33})$',
user.WgerPasswordResetConfirmView.as_view(),
name='password_reset_confirm'),
path('password/reset/complete/',
views.PasswordResetCompleteView.as_view(),
name='password_reset_complete'),
]
# sub patterns for licenses
patterns_license = [
path('license/list',
license.LicenseListView.as_view(),
name='list'),
path('license/add',
license.LicenseAddView.as_view(),
name='add'),
path('license/<int:pk>)/edit',
license.LicenseUpdateView.as_view(),
name='edit'),
path('license/<int:pk>/delete',
license.LicenseDeleteView.as_view(),
name='delete'),
]
# sub patterns for setting units
patterns_repetition_units = [
path('list',
repetition_units.ListView.as_view(),
name='list'),
path('add',
repetition_units.AddView.as_view(),
name='add'),
path('<int:pk>/edit',
repetition_units.UpdateView.as_view(),
name='edit'),
path('<int:pk>/delete',
repetition_units.DeleteView.as_view(),
name='delete'),
]
# sub patterns for setting units
patterns_weight_units = [
path('list',
weight_units.ListView.as_view(),
name='list'),
path('add',
weight_units.AddView.as_view(),
name='add'),
path('<int:pk>)/edit',
weight_units.UpdateView.as_view(),
name='edit'),
path('<int:pk>/delete',
weight_units.DeleteView.as_view(),
name='delete'),
]
#
# Actual patterns
#
urlpatterns = [
# The landing page
path('',
misc.index,
name='index'),
# The dashboard
path('dashboard',
misc.dashboard,
name='dashboard'),
# Others
path('about',
TemplateView.as_view(template_name="misc/about.html"),
name='about'),
path('contact',
misc.ContactClassView.as_view(template_name="misc/contact.html"),
name='contact'),
path('feedback',
misc.FeedbackClass.as_view(),
name='feedback'),
path('language/', include((patterns_language, 'language'), namespace="language")),
path('user/', include((patterns_user, 'user'), namespace="user")),
path('license/', include((patterns_license, 'license'), namespace="license")),
path('repetition-unit/', include((patterns_repetition_units, 'repetition-unit'), namespace="repetition-unit")),
path('weight-unit/', include((patterns_weight_units, 'weight-unit'), namespace="weight-unit")),
]
|
rolandgeider/wger
|
wger/core/urls.py
|
Python
|
agpl-3.0
| 5,940 |
import requests
from datetime import datetime, timedelta
from pytz import timezone
from django.core.files.base import ContentFile
from django.core.management.base import BaseCommand
from django.conf import settings
from db.base.models import Satellite, Transmitter, DemodData
class Command(BaseCommand):
help = 'Fetch Satellite data from Network'
def handle(self, *args, **options):
apiurl = settings.NETWORK_API_ENDPOINT
data_url = "{0}data".format(apiurl)
start_date = datetime.utcnow() - timedelta(days=int(settings.DATA_FETCH_DAYS))
start_date = datetime.strftime(start_date, '%Y-%m-%dT%H:%M:%SZ')
params = {'start': start_date}
response = requests.get(data_url, params=params)
satellites = Satellite.objects.all()
for obj in response.json():
norad_cat_id = obj['norad_cat_id']
data_id = obj['id']
station = obj['station_name']
lat = obj['station_lat']
lng = obj['station_lng']
try:
satellite = satellites.get(norad_cat_id=norad_cat_id)
except Satellite.DoesNotExist:
continue
try:
transmitter = Transmitter.objects.get(uuid=obj['transmitter'])
except Transmitter.DoesNotExist:
transmitter = None
DemodData.objects.filter(data_id=data_id).delete()
for demoddata in obj['demoddata']:
payload_url = demoddata['payload_demod']
timestamp = datetime.strptime(payload_url.split('/')[-1].split('_')[0],
'%Y%m%dT%H%M%SZ').replace(tzinfo=timezone('UTC'))
frame = str(requests.get(payload_url).json())
payload_frame = ContentFile(frame, name='network')
DemodData.objects.create(satellite=satellite, transmitter=transmitter,
data_id=data_id, payload_frame=payload_frame,
timestamp=timestamp, source='network',
station=station, lat=lat, lng=lng)
|
Roboneet/satnogs-db
|
db/base/management/commands/fetch_data.py
|
Python
|
agpl-3.0
| 2,165 |
#------------------------------------------------------------------------------
# Copyright (C) 2009 Richard Lincoln
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation; version 2 dated June, 1991.
#
# This software is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANDABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#------------------------------------------------------------------------------
""" An overcurrent protective device with a circuit opening fusible part that is heated and severed by the passage of overcurrent through it. A fuse is considered a switching device because it breaks current.
"""
# <<< imports
# @generated
from cdpsm.iec61970.wires.switch import Switch
from cdpsm.iec61970.domain import CurrentFlow
from google.appengine.ext import db
# >>> imports
class Fuse(Switch):
""" An overcurrent protective device with a circuit opening fusible part that is heated and severed by the passage of overcurrent through it. A fuse is considered a switching device because it breaks current.
"""
# <<< fuse.attributes
# @generated
# Fault interrupting current rating.
rating_current = CurrentFlow
# >>> fuse.attributes
# <<< fuse.references
# @generated
# >>> fuse.references
# <<< fuse.operations
# @generated
# >>> fuse.operations
# EOF -------------------------------------------------------------------------
|
rwl/openpowersystem
|
cdpsm/iec61970/wires/fuse.py
|
Python
|
agpl-3.0
| 1,864 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# l10n FR FEC module for Odoo
# Copyright (C) 2013-2015 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'France - FEC',
'version': '0.1',
'category': 'French Localization',
'license': 'AGPL-3',
'summary': "Fichier d'Échange Informatisé (FEC) for France",
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com',
'depends': ['account_accountant'],
'description': "",
'external_dependencies': {
'python': ['unicodecsv'],
},
'data': [
'wizard/fec_view.xml',
],
'installable': True,
}
|
noemis-fr/old-custom
|
l10n_fr_fec/__openerp__.py
|
Python
|
agpl-3.0
| 1,536 |
from flask import *
app = Flask(__name__, static_url_path='')
app.config['PROPAGATE_EXCEPTIONS']=True
@app.route('/', methods=['GET'])
def home():
return app.send_static_file('index.html')
@app.route('/whitepaper', methods=['GET'])
def whitepper():
return app.send_static_file('blockfate.pdf')
if __name__ == '__main__':
app.run()
|
barisser/BlockFate_site
|
main.py
|
Python
|
agpl-3.0
| 342 |
# This file is part of OpenHatch.
# Copyright (C) 2010 Parker Phinney
# Copyright (C) 2009, 2010, 2011 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.http import HttpResponse, QueryDict, HttpResponseServerError, HttpResponseRedirect
from django.core import serializers
from django.shortcuts import get_object_or_404
from django.core.urlresolvers import reverse
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl # Python 2.5 on deployment
from mysite.search.models import Project
import mysite.search.controllers
import mysite.base.controllers
import mysite.base.unicode_sanity
from mysite.base.helpers import render_response
import datetime
from dateutil import tz
import pytz
from django.utils import simplejson
import mysite.search.forms
import mysite.base.decorators
# Via http://www.djangosnippets.org/snippets/1435/
def encode_datetime(obj):
if isinstance(obj, datetime.date):
fixed = datetime.datetime(obj.year, obj.month, obj.day, tzinfo=pytz.utc)
obj = fixed
if isinstance(obj, datetime.datetime):
return obj.astimezone(tz.tzutc()).strftime('%Y-%m-%dT%H:%M:%SZ')
raise TypeError("%s" % type(obj) + repr(obj) + " is not JSON serializable")
def fetch_bugs(request, invalid_subscribe_to_alert_form=None):
# Make the query string keys lowercase using a redirect.
if any([k.lower() != k for k in request.GET.keys()]):
new_GET = {}
for key in request.GET.keys():
new_GET[key.lower()] = request.GET[key]
return HttpResponseRedirect(reverse(fetch_bugs) + '?' + mysite.base.unicode_sanity.urlencode(new_GET))
if request.user.is_authenticated():
person = request.user.get_profile()
suggestion_keys = person.get_recommended_search_terms()
else:
suggestion_keys = []
suggestions = [(i, k, False) for i, k in enumerate(suggestion_keys)]
format = request.GET.get('format', None)
start = int(request.GET.get('start', 1))
end = int(request.GET.get('end', 10))
total_bug_count = 0
query = mysite.search.controllers.Query.create_from_GET_data(request.GET)
if query:
bugs = query.get_bugs_unordered()
# Sort
bugs = mysite.search.controllers.order_bugs(bugs)
total_bug_count = bugs.count()
bugs = bugs[start-1:end]
else:
bugs = []
data = {}
data['query'] = query
prev_page_query_str = QueryDict('')
prev_page_query_str = prev_page_query_str.copy()
next_page_query_str = QueryDict('')
next_page_query_str = next_page_query_str.copy()
if query:
prev_page_query_str['q'] = query.terms_string
next_page_query_str['q'] = query.terms_string
if format:
prev_page_query_str['format'] = format
next_page_query_str['format'] = format
for facet_name, selected_option in query.active_facet_options.items():
prev_page_query_str[facet_name] = selected_option
next_page_query_str[facet_name] = selected_option
diff = end - start
prev_page_query_str['start'] = start - diff - 1
prev_page_query_str['end'] = start - 1
next_page_query_str['start'] = end + 1
next_page_query_str['end'] = end + diff + 1
data['start'] = start
data['end'] = min(end, total_bug_count)
data['prev_page_url'] = '/search/?' + prev_page_query_str.urlencode()
data['next_page_url'] = '/search/?' + next_page_query_str.urlencode()
data['this_page_query_str'] = mysite.base.unicode_sanity.urlencode(request.GET)
is_this_page_1 = (start <= 1)
is_this_the_last_page = ( end >= (total_bug_count - 1) )
data['show_prev_page_link'] = not is_this_page_1
data['show_next_page_link'] = not is_this_the_last_page
if request.GET.get('confirm_email_alert_signup', ''):
data['confirm_email_alert_signup'] = 1
# If this the last page of results, display a form allowing user to
# subscribe to a Volunteer Opportunity search alert
if query and is_this_the_last_page:
if invalid_subscribe_to_alert_form:
alert_form = invalid_subscribe_to_alert_form
else:
initial = {
'query_string': request.META['QUERY_STRING'],
'how_many_bugs_at_time_of_request': len(bugs)
}
if request.user.is_authenticated():
initial['email'] = request.user.email
alert_form = mysite.search.forms.BugAlertSubscriptionForm(initial=initial)
data['subscribe_to_alert_form'] = alert_form
# FIXME
# The template has no way of grabbing what URLs to put in the [x]
# So we help it out here by hacking around our fruity list-of-dicts
# data structure.
facet2any_query_string = {}
for facet in query.active_facet_options:
facet2any_query_string[facet] = query.get_facet_options(
facet, [''])[0]['query_string']
Bug = mysite.search.models.Bug
from django.db.models import Q, Count
data['popular_projects'] = list(Project.objects.filter(name__in=['Miro', 'GnuCash', 'brasero', 'Evolution Exchange', 'songbird']).order_by('name').reverse())
data['all_projects'] = Project.objects.values('pk','name').filter(bug__looks_closed=False).annotate(Count('bug')).order_by('name')
Person = mysite.profile.models.Person
import random
random_start = int(random.random() * 700)
data['contributors'] = Person.objects.all()[random_start:random_start+5]
data['contributors2'] = Person.objects.all()[random_start+10:random_start+15]
data['languages'] = Project.objects.all().values_list('language', flat=True).order_by('language').exclude(language='').distinct()[:4]
if format == 'json':
# FIXME: Why `alert`?
return bugs_to_json_response(data, bugs, request.GET.get(
'jsoncallback', 'alert'))
else:
data['user'] = request.user
data['suggestions'] = suggestions
data['bunch_of_bugs'] = bugs
data['url'] = 'http://launchpad.net/'
data['total_bug_count'] = total_bug_count
data['facet2any_query_string'] = facet2any_query_string
data['project_count'] = mysite.search.controllers.get_project_count()
return mysite.base.decorators.as_view(request, 'search/search.html', data, slug=None)
def bugs_to_json_response(data, bunch_of_bugs, callback_function_name=''):
""" The search results page accesses this view via jQuery's getJSON method,
and loads its results into the DOM."""
# Purpose of this code: Serialize the list of bugs
# Step 1: Pull the bugs out of the database, getting them back
# as simple Python objects
obj_serializer = serializers.get_serializer('python')()
bugs = obj_serializer.serialize(bunch_of_bugs)
# Step 2: With a (tragically) large number of database calls,
# loop over these objects, replacing project primary keys with project
# display names.
for bug in bugs:
project = Project.objects.get(pk=int(bug['fields']['project']))
bug['fields']['project'] = project.display_name
# Step 3: Create a JSON-happy list of key-value pairs
data_list = [{'bugs': bugs}]
# Step 4: Create the string form of the JSON
json_as_string = simplejson.dumps(data_list, default=encode_datetime)
# Step 5: Prefix it with the desired callback function name
json_string_with_callback = callback_function_name + '(' + json_as_string + ')'
# Step 6: Return that.
return HttpResponse(json_string_with_callback)
def request_jquery_autocompletion_suggestions(request):
"""
Wraps get_autocompletion_suggestions and
list_to_jquery_autocompletion_format in an
HttpRequest -> HttpResponse loop.
Validates GET parameters. Expected:
?q=[suggestion fodder]
If q is absent or empty, this function
returns an HttpResponseServerError.
"""
partial_query = request.GET.get('q', None)
if (partial_query is None) or (partial_query == ''):
return HttpResponseServerError("Need partial_query in GET")
# jQuery autocomplete also gives us this variable:
# timestamp = request.GET.get('timestamp', None)
suggestions_list = get_autocompletion_suggestions(partial_query)
suggestions_string = list_to_jquery_autocompletion_format(
suggestions_list)
return HttpResponse(suggestions_string)
def list_to_jquery_autocompletion_format(list):
"""Converts a list to the format required by
jQuery's autocomplete plugin."""
return "\n".join(list)
class SearchableField:
"A field in the database you can search."
fields_by_prefix = {}
def __init__(self, _prefix):
self.prefix = _prefix
self.is_queried = False
self.fields_by_prefix[self.prefix] = self
def get_autocompletion_suggestions(input):
"""
This method returns a list of suggested queries.
It checks the query substring against a number of
fields in the database:
- project.display_name
- project.language
Not yet implemented:
- libraries (frameworks? toolkits?) like Django
- search by date
"""
sf_project = SearchableField('project')
sf_language = SearchableField('lang')
sf_dependency = SearchableField('dep')
sf_library = SearchableField('lib')
sf_date_before = SearchableField('before')
sf_date_after = SearchableField('after')
separator = ":"
prefix = ''
partial_query = ''
if separator in input[1:-1]:
prefix = input.split(separator)[0]
partial_query = input.split(separator)[1]
sf = SearchableField.fields_by_prefix.get(prefix, None)
if sf is not None:
sf.is_queried = True
# FIXME: What happens when
# the user enters a bad prefix?
else:
for p in SearchableField.fields_by_prefix:
SearchableField.fields_by_prefix[
p].is_queried = True
partial_query = input
project_max = 5
lang_max = 5
suggestions = []
if sf_project.is_queried:
# Compile list of projects
# XXX: This searches on display_name, as that is what the user is more
# likely to be trying to type. And also because it is display_name that
# search uses to query projects.
projects_by_name = Project.objects.filter(
display_name__istartswith=partial_query)
# FIXME: Is __istartswith faster than
# lowercasing and using startswith?
# Produce a list of names like ['Exaile', 'GNOME-DO', ...]
project_names = projects_by_name.values_list('display_name', flat=True)
# Limit
project_names = project_names[:project_max]
suggestions += [sf_project.prefix + separator + name
for name in project_names]
if sf_language.is_queried:
# For languages, get projects first
projects_by_lang = Project.objects.filter(
language__istartswith=partial_query)
# Then use bugs to compile a list of languages.
langs = projects_by_lang.values_list(
'language', flat=True).order_by(
'language')[:lang_max]
if langs:
suggestions += [sf_language.prefix + separator + lang
for lang in langs]
return suggestions
def subscribe_to_bug_alert_do(request):
confirmation_query_string_fragment = "&confirm_email_alert_signup=1"
alert_form = mysite.search.forms.BugAlertSubscriptionForm(request.POST)
query_string = request.POST.get('query_string', '') # Lacks initial '?'
query_string = query_string.replace(confirmation_query_string_fragment, '')
next = reverse(fetch_bugs) + '?' + query_string
if alert_form.is_valid():
alert = alert_form.save()
if request.user.is_authenticated():
alert.user = request.user
alert.save()
next += confirmation_query_string_fragment
return HttpResponseRedirect(next)
elif query_string:
# We want fetch_bugs to get the right query string but we can't exactly
# do that. What we *can* do is fiddle with the request obj we're about
# to pass to fetch_bugs.
# Commence fiddling.
request.GET = dict(parse_qsl(query_string))
return fetch_bugs(request, alert_form)
else:
# If user tries to do a different bug search after invalid form input
return HttpResponseRedirect(next + request.META['QUERY_STRING'])
def project_has_icon(request, project_name):
p = get_object_or_404(Project, name=project_name)
if p.date_icon_was_fetched_from_ohloh is None:
return HttpResponse("keep polling")
return HttpResponse(p.get_url_of_icon_or_generic())
"""
Ways we could do autocompletion:
Method 1.
Cache languages, search those first.
Ask server to give a list of projects beginning with "c"
Server returns list, cache that.
Method 2.
Ask server to give a list of projects and languages beginning with "c"
Add top 100 fulltext words to the mix.
"""
# vim: set ai ts=4 sw=4 et nu:
|
mzdaniel/oh-mainline
|
mysite/search/views.py
|
Python
|
agpl-3.0
| 13,739 |
#! /usr/bin/python
################################################################################
# This file is part of python_finite_volume_solver
# Copyright (C) 2017 Bert Vandenbroucke (bert.vandenbroucke@gmail.com)
#
# python_finite_volume_solver is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# python_finite_volume_solver is distributed in the hope that it will be useful,
# but WITOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with python_finite_volume_solver. If not, see
# <http://www.gnu.org/licenses/>.
################################################################################
################################################################################
# @file sodshock_lagrangian.py
#
# @brief Second order Lagrangian finite volume solutio for the 1D Sod shock
# problem.
#
# This script is only intended as an example; many improvements are possible,
# and you should definitely try to write your own before looking at this file.
#
# Note also that Lagrangian schemes work best if every cell has a similar mass
# resolution, which is not the case in the specific setup used here. As a result
# the overall accuracy of this particular solution is not really better than the
# corresponding Eulerian version.
#
# @author Bert Vandenbroucke (bv7@st-andrews.ac.uk)
################################################################################
# Import the Riemann solver library. Note that this will only work if the file
# 'riemannsolver.py' is in the same directory as this script.
import riemannsolver as riemann
# Import the Python numerical libraries, as we need them for arange.
import numpy as np
# Import Matplotlib, which we will use to plot the results.
import pylab as pl
################################################################################
# some global definitions
################################################################################
# the constant adiabatic index
GAMMA = 5./3.
# the Riemann solver
solver = riemann.RiemannSolver(GAMMA)
# the constant time step
timestep = 0.001
# number of steps
numstep = 200
# number of cells
numcell = 100
################################################################################
# the actual program
################################################################################
##
# @ brief The Cell class.
##
class Cell:
##
# @brief Constructor.
#
# This method initializes some variables. This is not really necessary for
# most of them, but it provides a nice overview of which variables are
# actually part of the class (because Python does not have nice and clear
# class definitions like C++).
##
def __init__(self):
self._midpoint = 0.
self._volume = 0.
self._mass = 0.
self._momentum = 0.
self._energy = 0.
self._density = 0.
self._velocity = 0.
self._pressure = 0.
self._gradient_density = 0.
self._gradient_velocity = 0.
self._gradient_pressure = 0.
self._right_ngb = None
# Note: the surface area is not really necessary in the 1D case
self._surface_area = 1.
# NEW: velocity with which the cell itself moves
self._cell_velocity = 0.
# set up the cells
cells = []
for i in range(numcell):
cell = Cell()
cell._midpoint = (i + 0.5) / numcell
cell._volume = 1. / numcell
if i < numcell/2:
cell._mass = cell._volume
cell._energy = cell._volume / (GAMMA - 1.)
else:
cell._mass = 0.125 * cell._volume
cell._energy = 0.1 * cell._volume / (GAMMA - 1.)
cell._momentum = 0.
# set the neighbour of the previous cell (only if there is a previous cell
if len(cells) > 0:
cells[-1]._right_ngb = cell
cells.append(cell)
# do the actual time integration loop
for i in range(numstep):
# NEW: move the cells
for cell in cells:
cell._midpoint += timestep * cell._cell_velocity
# NEW: recompute cell volumes
for icell in range(numcell):
xmin = -cells[icell]._midpoint
if icell > 0:
xmin = cells[icell - 1]._midpoint
xplu = 2. - cells[icell]._midpoint
if icell < numcell - 1:
xplu = cells[icell + 1]._midpoint
cells[icell]._volume = 0.5 * (xplu - xmin)
# convert conserved into primitive variables
for cell in cells:
volume = cell._volume
mass = cell._mass
momentum = cell._momentum
energy = cell._energy
density = mass / volume
velocity = momentum / mass
pressure = (GAMMA - 1.) * (energy / volume - 0.5 * density * velocity**2)
cell._density = density
cell._velocity = velocity
cell._pressure = pressure
# reset cell velocity
cell._cell_velocity = velocity
# compute gradients for the primitive variables
for icell in range(numcell):
xcell = cells[icell]._midpoint
densitycell = cells[icell]._density
velocitycell = cells[icell]._velocity
pressurecell = cells[icell]._pressure
xmin = -cells[icell]._midpoint
densitymin = cells[icell]._density
velocitymin = -cells[icell]._velocity
pressuremin = cells[icell]._pressure
if icell > 0:
xmin = cells[icell - 1]._midpoint
densitymin = cells[icell - 1]._density
velocitymin = cells[icell - 1]._velocity
pressuremin = cells[icell - 1]._pressure
xplu = 2. - cells[icell]._midpoint
densityplu = cells[icell]._density
velocityplu = -cells[icell]._velocity
pressureplu = cells[icell]._pressure
if icell < numcell - 1:
xplu = cells[icell + 1]._midpoint
densityplu = cells[icell + 1]._density
velocityplu = cells[icell + 1]._velocity
pressureplu = cells[icell + 1]._pressure
dxplu = xplu - xcell
gradient_density_plu = (densityplu - densitycell) / dxplu
gradient_velocity_plu = (velocityplu - velocitycell) / dxplu
gradient_pressure_plu = (pressureplu - pressurecell) / dxplu
dxmin = xcell - xmin
gradient_density_min = (densitycell - densitymin) / dxmin
gradient_velocity_min = (velocitycell - velocitymin) / dxmin
gradient_pressure_min = (pressurecell - pressuremin) / dxmin
if abs(gradient_density_min) < abs(gradient_density_plu):
cells[icell]._gradient_density = gradient_density_min
else:
cells[icell]._gradient_density = gradient_density_plu
if abs(gradient_velocity_min) < abs(gradient_velocity_plu):
cells[icell]._gradient_velocity = gradient_velocity_min
else:
cells[icell]._gradient_velocity = gradient_velocity_plu
if abs(gradient_pressure_min) < abs(gradient_pressure_plu):
cells[icell]._gradient_pressure = gradient_pressure_min
else:
cells[icell]._gradient_pressure = gradient_pressure_plu
# solve the Riemann problem and do the flux exchanges
for cell in cells:
densityL = cell._density
velocityL = cell._velocity
pressureL = cell._pressure
xL = cell._midpoint
gradient_densityL = cell._gradient_density
gradient_velocityL = cell._gradient_velocity
gradient_pressureL = cell._gradient_pressure
# NEW: get the cell velocity
cell_velocityL = cell._cell_velocity
cell_right = cell._right_ngb
if not cell_right:
# the last cell does not have a right neigbhour: impose reflective
# boundary conditions
densityR = densityL
velocityR = -velocityL
pressureR = pressureL
xR = 2. - cell._midpoint
gradient_densityR = -cell._gradient_density
gradient_velocityR = cell._gradient_velocity
gradient_pressureR = -cell._gradient_pressure
cell_velocityR = -cell._cell_velocity
else:
densityR = cell_right._density
velocityR = cell_right._velocity
pressureR = cell_right._pressure
xR = cell_right._midpoint
gradient_densityR = cell_right._gradient_density
gradient_velocityR = cell_right._gradient_velocity
gradient_pressureR = cell_right._gradient_pressure
cell_velocityR = cell_right._cell_velocity
# extrapolate the variables from the cell midpoint position to the position
# of the face
dx = 0.5 * (xR - xL)
densityL_ext = densityL + dx * gradient_densityL
velocityL_ext = velocityL + dx * gradient_velocityL
pressureL_ext = pressureL + dx * gradient_pressureL
densityR_ext = densityR - dx * gradient_densityR
velocityR_ext = velocityR - dx * gradient_velocityR
pressureR_ext = pressureR - dx * gradient_pressureR
# predict variables forward in time for half a time step
densityL_ext -= 0.5 * timestep * (densityL * gradient_velocityL + \
velocityL * gradient_densityL)
velocityL_ext -= 0.5 * timestep * (velocityL * gradient_velocityL + \
gradient_pressureL / densityL)
pressureL_ext -= 0.5 * timestep * (velocityL * gradient_pressureL + \
GAMMA * pressureL * gradient_velocityL)
densityR_ext -= 0.5 * timestep * (densityR * gradient_velocityR + \
velocityR * gradient_densityR)
velocityR_ext -= 0.5 * timestep * (velocityR * gradient_velocityR + \
gradient_pressureR / densityR)
pressureR_ext -= 0.5 * timestep * (velocityR * gradient_pressureR + \
GAMMA * pressureR * gradient_velocityR)
# overwrite the left and right state with the extrapolated values
densityL = densityL_ext
velocityL = velocityL_ext
pressureL = pressureL_ext
densityR = densityR_ext
velocityR = velocityR_ext
pressureR = pressureR_ext
if densityL < 0. or pressureL < 0.:
print "Negative density or pressure!"
print "Density:", densityL
print "Pressure:", pressureL
exit()
if densityR < 0. or pressureR < 0.:
print "Negative density or pressure!"
print "Density:", densityR
print "Pressure:", pressureR
exit()
# NEW: boost to a frame moving with the face velocity
vface = 0.5 * (cell_velocityL + cell_velocityR)
velocityL -= vface
velocityR -= vface
# now feed everything to the Riemann solver (we ignore the last return
# value)
densitysol, velocitysol, pressuresol, _ = \
solver.solve(densityL, velocityL, pressureL,
densityR, velocityR, pressureR)
# NEW: deboost back to the lab frame
velocitysol += vface
# get the fluxes
# NEW: correction terms due to the movement of the face
flux_mass = densitysol * (velocitysol - vface)
flux_momentum = densitysol * velocitysol * (velocitysol - vface) + \
pressuresol
flux_energy = pressuresol * velocitysol + \
(pressuresol / (GAMMA - 1.) + \
0.5 * densitysol * velocitysol**2) * (velocitysol - vface)
# do the flux exchange
A = cell._surface_area
cell._mass -= flux_mass * A * timestep
cell._momentum -= flux_momentum * A * timestep
cell._energy -= flux_energy * A * timestep
if cell_right:
cell_right._mass += flux_mass * A * timestep
cell_right._momentum += flux_momentum * A * timestep
cell_right._energy += flux_energy * A * timestep
# we need to do something special for the left boundary of the first cell
# (we will just impose reflective boundary conditions)
# note that this also means that the face does not move, so no need to do
# anything different here
densityL = cells[0]._density
velocityL = -cells[0]._velocity
pressureL = cells[0]._pressure
xL = -cells[0]._midpoint
gradient_densityL = -cells[0]._gradient_density
gradient_velocityL = cells[0]._gradient_velocity
gradient_pressureL = -cells[0]._gradient_pressure
densityR = cells[0]._density
velocityR = cells[0]._velocity
pressureR = cells[0]._pressure
xR = cells[0]._midpoint
gradient_densityR = cells[0]._gradient_density
gradient_velocityR = cells[0]._gradient_velocity
gradient_pressureR = cells[0]._gradient_pressure
# extrapolate the variables from the cell midpoint position to the position
# of the face
dx = 0.5 * (xR - xL)
densityL_ext = densityL + dx * gradient_densityL
velocityL_ext = velocityL + dx * gradient_velocityL
pressureL_ext = pressureL + dx * gradient_pressureL
densityR_ext = densityR - dx * gradient_densityR
velocityR_ext = velocityR - dx * gradient_velocityR
pressureR_ext = pressureR - dx * gradient_pressureR
# predict variables forward in time for half a time step
densityL_ext -= 0.5 * timestep * (densityL * gradient_velocityL + \
velocityL * gradient_densityL)
velocityL_ext -= 0.5 * timestep * (velocityL * gradient_velocityL + \
gradient_pressureL / densityL)
pressureL_ext -= 0.5 * timestep * (velocityL * gradient_pressureL + \
GAMMA * pressureL * gradient_velocityL)
densityR_ext -= 0.5 * timestep * (densityR * gradient_velocityR + \
velocityR * gradient_densityR)
velocityR_ext -= 0.5 * timestep * (velocityR * gradient_velocityR + \
gradient_pressureR / densityR)
pressureR_ext -= 0.5 * timestep * (velocityR * gradient_pressureR + \
GAMMA * pressureR * gradient_velocityR)
# overwrite the left and right state with the extrapolated values
densityL = densityL_ext
velocityL = velocityL_ext
pressureL = pressureL_ext
densityR = densityR_ext
velocityR = velocityR_ext
pressureR = pressureR_ext
# call the Riemann solver
densitysol, velocitysol, pressuresol, _ = \
solver.solve(densityL, velocityL, pressureL, densityR, velocityR, pressureR)
# get the fluxes
flux_mass = densitysol * velocitysol
flux_momentum = densitysol * velocitysol**2 + pressuresol
flux_energy = (pressuresol * GAMMA / (GAMMA - 1.) + \
0.5 * densitysol * velocitysol**2) * velocitysol
# do the flux exchange
A = cells[0]._surface_area
cells[0]._mass += flux_mass * A * timestep
cells[0]._momentum += flux_momentum * A * timestep
cells[0]._energy += flux_energy * A * timestep
# reference solution: as the Sod shock problem is in fact a Riemann problem,
# this is just the actual solution of the Riemann problem, evaluated at the
# final time of the simulation.
xref = np.arange(0., 1., 0.001)
rhoref = [solver.solve(1., 0., 1., 0.125, 0., 0.1,
(x - 0.5) / (timestep * numstep))[0] \
for x in xref]
# plot the reference solution and the actual solution
pl.plot(xref, rhoref, "r-")
pl.plot([cell._midpoint for cell in cells],
[cell._density for cell in cells],
"k.")
pl.ylim(0., 1.1)
pl.xlabel("Position")
pl.ylabel("Density")
pl.tight_layout()
# save the plot as a PNG image
pl.savefig("sodshock_lagrangian.png")
|
bwvdnbro/python_finite_volume_solver
|
example_solutions/sodshock_lagrangian.py
|
Python
|
agpl-3.0
| 15,137 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-10-18 07:37
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('base', '0157_entitymanager_entity'),
('attribution', '0015_attribution_deleted'),
]
operations = [
migrations.CreateModel(
name='AttributionChargeNew',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deleted', models.BooleanField(default=False)),
('external_id', models.CharField(blank=True, max_length=100, null=True)),
('allocation_charge', models.DecimalField(blank=True, decimal_places=1, max_digits=6, null=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='AttributionNew',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deleted', models.BooleanField(default=False)),
('external_id', models.CharField(blank=True, max_length=100, null=True)),
('changed', models.DateTimeField(auto_now=True, null=True)),
('function', models.CharField(blank=True, choices=[('COORDINATOR', 'COORDINATOR'), ('HOLDER', 'HOLDER'), ('CO_HOLDER', 'CO_HOLDER'), ('DEPUTY', 'DEPUTY'), ('DEPUTY_AUTHORITY', 'DEPUTY_AUTHORITY'), ('DEPUTY_SABBATICAL', 'DEPUTY_SABBATICAL'), ('DEPUTY_TEMPORARY', 'DEPUTY_TEMPORARY'), ('PROFESSOR', 'PROFESSOR'), ('INTERNSHIP_SUPERVISOR', 'INTERNSHIP_SUPERVISOR'), ('INTERNSHIP_CO_SUPERVISOR', 'INTERNSHIP_CO_SUPERVISOR')], db_index=True, max_length=35, null=True)),
('start_date', models.DateField(blank=True, null=True)),
('end_date', models.DateField(blank=True, null=True)),
('start_year', models.IntegerField(blank=True, null=True)),
('end_year', models.IntegerField(blank=True, null=True)),
('score_responsible', models.BooleanField(default=False)),
('learning_container_year', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base.LearningContainerYear')),
('tutor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base.Tutor')),
],
),
migrations.AddField(
model_name='attributionchargenew',
name='attribution',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='attribution.AttributionNew'),
),
migrations.AddField(
model_name='attributionchargenew',
name='learning_component_year',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base.LearningComponentYear'),
),
migrations.AlterUniqueTogether(
name='attributionnew',
unique_together=set([('learning_container_year', 'tutor', 'function')]),
),
]
|
uclouvain/osis_louvain
|
attribution/migrations/0016_auto_20171018_0937.py
|
Python
|
agpl-3.0
| 3,172 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-15 08:03
from __future__ import unicode_literals
from decimal import Decimal
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
replaces = [('event', '0001_initial'), ('event', '0002_event_game'), ('event', '0003_auto_20161005_2231'), ('event', '0004_event_max_ratio_km'), ('event', '0005_auto_20161223_0730'), ('event', '0006_event_pos'), ('event', '0007_auto_20161223_1138'), ('event', '0008_auto_20170114_2009')]
dependencies = [
('player', '0006_remove_player_events'),
('game', '0002_auto_20161005_0750'),
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=200, null=True)),
('start_date', models.DateTimeField(blank=True, null=True)),
('end_date', models.DateTimeField(blank=True, null=True)),
('max_players', models.PositiveIntegerField(default=10)),
('price', models.DecimalField(decimal_places=2, default=Decimal('0.00'), max_digits=5)),
('game', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='events', to='game.Game')),
],
),
migrations.CreateModel(
name='Membership',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.CharField(choices=[('registered', 'registered'), ('paying', 'paying'), ('payed', 'payed'), ('cancelled', 'cancelled')], default='registered', max_length=16)),
('event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='event.Event')),
('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='player.Player')),
],
),
migrations.AddField(
model_name='event',
name='players',
field=models.ManyToManyField(through='event.Membership', to='player.Player'),
),
migrations.AddField(
model_name='event',
name='meeting_distance',
field=models.PositiveIntegerField(blank=True, default=10, help_text='max meeting ditance in m', null=True),
),
migrations.AddField(
model_name='event',
name='place',
field=django.contrib.gis.db.models.fields.MultiPolygonField(blank=True, null=True, srid=4326),
),
migrations.AddField(
model_name='event',
name='vision_distance',
field=models.PositiveIntegerField(blank=True, default=1000, help_text='max vision ditance in m', null=True),
),
]
|
wadobo/socializa
|
backend/event/migrations/0001_initial_squashed_0008_auto_20170114_2009.py
|
Python
|
agpl-3.0
| 3,007 |
"""check builtin data descriptors such as mode and name attributes
on a file are correctly handled
bug notified by Pierre Rouleau on 2005-04-24
"""
__revision__ = None
class File(file): # pylint: disable=file-builtin
""" Testing new-style class inheritance from file"""
#
def __init__(self, name, mode="r", buffering=-1, verbose=False):
"""Constructor"""
self.was_modified = False
self.verbose = verbose
super(File, self).__init__(name, mode, buffering)
if self.verbose:
print("File %s is opened. The mode is: %s" % (self.name,
self.mode))
#
def write(self, a_string):
""" Write a string to the file."""
super(File, self).write(a_string)
self.was_modified = True
#
def writelines(self, sequence):
""" Write a sequence of strings to the file. """
super(File, self).writelines(sequence)
self.was_modified = True
#
def close(self):
"""Close the file."""
if self.verbose:
print("Closing file %s" % self.name)
super(File, self).close()
self.was_modified = False
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/pylint/test/input/func_noerror_new_style_class_py_30.py
|
Python
|
agpl-3.0
| 1,255 |
"""
CourseGrade Class
"""
from abc import abstractmethod
from collections import OrderedDict, defaultdict
from django.conf import settings
from lazy import lazy
from ccx_keys.locator import CCXLocator
from xmodule import block_metadata_utils
from .config import assume_zero_if_absent
from .subsection_grade import ZeroSubsectionGrade
from .subsection_grade_factory import SubsectionGradeFactory
from .scores import compute_percent
class CourseGradeBase(object):
"""
Base class for Course Grades.
"""
def __init__(self, user, course_data, percent=0.0, letter_grade=None, passed=False, force_update_subsections=False):
self.user = user
self.course_data = course_data
self.percent = percent
self.passed = passed
# Convert empty strings to None when reading from the table
self.letter_grade = letter_grade or None
self.force_update_subsections = force_update_subsections
def __unicode__(self):
return u'Course Grade: percent: {}, letter_grade: {}, passed: {}'.format(
unicode(self.percent),
self.letter_grade,
self.passed,
)
@property
def attempted(self):
"""
Returns whether at least one problem was attempted
by the user in the course.
"""
return False
def subsection_grade(self, subsection_key):
"""
Returns the subsection grade for the given subsection usage key.
Note: does NOT check whether the user has access to the subsection.
Assumes that if a grade exists, the user has access to it. If the
grade doesn't exist then either the user does not have access to
it or hasn't attempted any problems in the subsection.
"""
return self._get_subsection_grade(self.course_data.effective_structure[subsection_key])
@lazy
def graded_subsections_by_format(self):
"""
Returns grades for the subsections in the course in
a dict keyed by subsection format types.
"""
subsections_by_format = defaultdict(OrderedDict)
for chapter in self.chapter_grades.itervalues():
for subsection_grade in chapter['sections']:
if subsection_grade.graded:
graded_total = subsection_grade.graded_total
if graded_total.possible > 0:
subsections_by_format[subsection_grade.format][subsection_grade.location] = subsection_grade
return subsections_by_format
@lazy
def chapter_grades(self):
"""
Returns a dictionary of dictionaries.
The primary dictionary is keyed by the chapter's usage_key.
The secondary dictionary contains the chapter's
subsection grades, display name, and url name.
"""
course_structure = self.course_data.structure
grades = OrderedDict()
for chapter_key in course_structure.get_children(self.course_data.location):
grades[chapter_key] = self._get_chapter_grade_info(course_structure[chapter_key], course_structure)
return grades
@lazy
def subsection_grades(self):
"""
Returns an ordered dictionary of subsection grades,
keyed by subsection location.
"""
subsection_grades = defaultdict(OrderedDict)
for chapter in self.chapter_grades.itervalues():
for subsection_grade in chapter['sections']:
subsection_grades[subsection_grade.location] = subsection_grade
return subsection_grades
@lazy
def problem_scores(self):
"""
Returns a dict of problem scores keyed by their locations.
"""
problem_scores = {}
for chapter in self.chapter_grades.itervalues():
for subsection_grade in chapter['sections']:
problem_scores.update(subsection_grade.problem_scores)
return problem_scores
def chapter_percentage(self, chapter_key):
"""
Returns the rounded aggregate weighted percentage for the given chapter.
Raises:
KeyError if the chapter is not found.
"""
earned, possible = 0.0, 0.0
chapter_grade = self.chapter_grades[chapter_key]
for section in chapter_grade['sections']:
earned += section.graded_total.earned
possible += section.graded_total.possible
return compute_percent(earned, possible)
def score_for_module(self, location):
"""
Calculate the aggregate weighted score for any location in the course.
This method returns a tuple containing (earned_score, possible_score).
If the location is of 'problem' type, this method will return the
possible and earned scores for that problem. If the location refers to a
composite module (a vertical or section ) the scores will be the sums of
all scored problems that are children of the chosen location.
"""
if location in self.problem_scores:
score = self.problem_scores[location]
return score.earned, score.possible
children = self.course_data.structure.get_children(location)
earned, possible = 0.0, 0.0
for child in children:
child_earned, child_possible = self.score_for_module(child)
earned += child_earned
possible += child_possible
return earned, possible
@lazy
def grader_result(self):
"""
Returns the result from the course grader.
"""
course = self._prep_course_for_grading(self.course_data.course)
return course.grader.grade(
self.graded_subsections_by_format,
generate_random_scores=settings.GENERATE_PROFILE_SCORES,
)
@property
def summary(self):
"""
Returns the grade summary as calculated by the course's grader.
DEPRECATED: To be removed as part of TNL-5291.
"""
# TODO(TNL-5291) Remove usages of this deprecated property.
grade_summary = self.grader_result
grade_summary['percent'] = self.percent
grade_summary['grade'] = self.letter_grade
return grade_summary
@classmethod
def get_subsection_type_graders(cls, course):
"""
Returns a dictionary mapping subsection types to their
corresponding configured graders, per grading policy.
"""
course = cls._prep_course_for_grading(course)
return {
subsection_type: subsection_type_grader
for (subsection_type_grader, subsection_type, _)
in course.grader.subgraders
}
@classmethod
def _prep_course_for_grading(cls, course):
"""
Make sure any overrides to the grading policy are used.
This is most relevant for CCX courses.
Right now, we still access the grading policy from the course
object. Once we get the grading policy from the BlockStructure
this will no longer be needed - since BlockStructure correctly
retrieves/uses all field overrides.
"""
if isinstance(course.id, CCXLocator):
# clean out any field values that may have been set from the
# parent course of the CCX course.
course._field_data_cache = {} # pylint: disable=protected-access
# this is "magic" code that automatically retrieves any overrides
# to the grading policy and updates the course object.
course.set_grading_policy(course.grading_policy)
return course
def _get_chapter_grade_info(self, chapter, course_structure):
"""
Helper that returns a dictionary of chapter grade information.
"""
chapter_subsection_grades = self._get_subsection_grades(course_structure, chapter.location)
return {
'display_name': block_metadata_utils.display_name_with_default_escaped(chapter),
'url_name': block_metadata_utils.url_name_for_block(chapter),
'location': block_metadata_utils.location(chapter),
'sections': chapter_subsection_grades,
}
def _get_subsection_grades(self, course_structure, chapter_key):
"""
Returns a list of subsection grades for the given chapter.
"""
return [
self._get_subsection_grade(course_structure[subsection_key], self.force_update_subsections)
for subsection_key in _uniqueify_and_keep_order(course_structure.get_children(chapter_key))
]
@abstractmethod
def _get_subsection_grade(self, subsection, force_update_subsections=False):
"""
Abstract method to be implemented by subclasses for returning
the grade of the given subsection.
"""
raise NotImplementedError
class ZeroCourseGrade(CourseGradeBase):
"""
Course Grade class for Zero-value grades when no problems were
attempted in the course.
"""
def _get_subsection_grade(self, subsection, force_update_subsections=False):
return ZeroSubsectionGrade(subsection, self.course_data)
class CourseGrade(CourseGradeBase):
"""
Course Grade class when grades are updated or read from storage.
"""
def __init__(self, user, course_data, *args, **kwargs):
super(CourseGrade, self).__init__(user, course_data, *args, **kwargs)
self._subsection_grade_factory = SubsectionGradeFactory(user, course_data=course_data)
def update(self):
"""
Updates the grade for the course. Also updates subsection grades
if self.force_update_subsections is true, via the lazy call
to self.grader_result.
"""
# TODO update this code to be more functional and readable.
# Currently, it is hard to follow since there are plenty of
# side-effects. Once functional, force_update_subsections
# can be passed through and not confusingly stored and used
# at a later time.
grade_cutoffs = self.course_data.course.grade_cutoffs
self.percent = self._compute_percent(self.grader_result)
self.letter_grade = self._compute_letter_grade(grade_cutoffs, self.percent)
self.passed = self._compute_passed(grade_cutoffs, self.percent)
return self
@lazy
def attempted(self):
"""
Returns whether any of the subsections in this course
have been attempted by the student.
"""
if assume_zero_if_absent(self.course_data.course_key):
return True
for chapter in self.chapter_grades.itervalues():
for subsection_grade in chapter['sections']:
if subsection_grade.all_total.first_attempted:
return True
return False
def _get_subsection_grade(self, subsection, force_update_subsections=False):
if self.force_update_subsections:
return self._subsection_grade_factory.update(subsection, force_update_subsections=force_update_subsections)
else:
# Pass read_only here so the subsection grades can be persisted in bulk at the end.
return self._subsection_grade_factory.create(subsection, read_only=True)
@staticmethod
def _compute_percent(grader_result):
"""
Computes and returns the grade percentage from the given
result from the grader.
"""
return round(grader_result['percent'] * 100 + 0.05) / 100
def _compute_course_grade_total_raw(self):
"""
Computes and returns the raw grade earned and total for a course
"""
course_total_earned = 0.0
course_total_possible = 0.0
for chapter_key in self.chapter_grades.keys():
chapter_score = self.score_for_chapter(chapter_key)
course_total_earned += chapter_score[0]
course_total_possible += chapter_score[1]
return course_total_earned, course_total_possible
@staticmethod
def _compute_letter_grade(grade_cutoffs, percent):
"""
Computes and returns the course letter grade given the
inputs, as defined in the grading_policy (e.g. 'A' 'B' 'C')
or None if not passed.
"""
letter_grade = None
# Possible grades, sorted in descending order of score
descending_grades = sorted(grade_cutoffs, key=lambda x: grade_cutoffs[x], reverse=True)
for possible_grade in descending_grades:
if percent >= grade_cutoffs[possible_grade]:
letter_grade = possible_grade
break
return letter_grade
@staticmethod
def _compute_passed(grade_cutoffs, percent):
"""
Computes and returns whether the given percent value
is a passing grade according to the given grade cutoffs.
"""
nonzero_cutoffs = [cutoff for cutoff in grade_cutoffs.values() if cutoff > 0]
success_cutoff = min(nonzero_cutoffs) if nonzero_cutoffs else None
return success_cutoff and percent >= success_cutoff
def _uniqueify_and_keep_order(iterable):
return OrderedDict([(item, None) for item in iterable]).keys()
|
proversity-org/edx-platform
|
lms/djangoapps/grades/course_grade.py
|
Python
|
agpl-3.0
| 13,201 |
Subsets and Splits
Unique Repositories with URLs
Lists unique repository names along with their GitHub URLs, providing basic identification information for each repository.