repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
arokem/PyEMMA
|
pyemma/msm/tests/test_its.py
|
1
|
9069
|
# Copyright (c) 2015, 2014 Computational Molecular Biology Group, Free University
# Berlin, 14195 Berlin, Germany.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
r"""Unit test for the its method
.. moduleauthor:: F.Noe <frank DOT noe AT fu-berlin DOT de>
.. moduleauthor:: B.Trendelkamp-Schroer <benjamin DOT trendelkamp-schroer AT fu-berlin DOT de>
"""
import unittest
import numpy as np
from pyemma import msm
from pyemma.msm.analysis import timescales
from pyemma.msm.api import timescales_msm
class TestITS_MSM(unittest.TestCase):
def setUp(self):
from pyemma.msm.generation import generate_traj
self.dtrajs = []
# simple case
dtraj_simple = [0, 1, 1, 1, 0]
self.dtrajs.append([dtraj_simple])
# as ndarray
self.dtrajs.append([np.array(dtraj_simple)])
dtraj_disc = [0, 1, 1, 0, 0]
self.dtrajs.append([dtraj_disc])
# multitrajectory case
self.dtrajs.append([[0], [1, 1, 1, 1], [0, 1, 1, 1, 0], [0, 1, 0, 1, 0, 1, 0, 1]])
# large-scale case
large_trajs = []
for i in range(10):
large_trajs.append(np.random.randint(10, size=1000))
self.dtrajs.append(large_trajs)
# Markovian timeseries with timescale about 5
self.P2 = np.array([[0.9, 0.1], [0.1, 0.9]])
self.dtraj2 = generate_traj(self.P2, 1000)
self.dtrajs.append([self.dtraj2])
# Markovian timeseries with timescale about 5
self.P4 = np.array([[0.95, 0.05, 0.0, 0.0],
[0.05, 0.93, 0.02, 0.0],
[0.0, 0.02, 0.93, 0.05],
[0.0, 0.0, 0.05, 0.95]])
self.dtraj4_2 = generate_traj(self.P4, 20000)
I = [0, 0, 1, 1] # coarse-graining
for i in range(len(self.dtraj4_2)):
self.dtraj4_2[i] = I[self.dtraj4_2[i]]
self.dtrajs.append([self.dtraj4_2])
# print "T4 ", timescales(self.P4)[1]
def compute_nice(self, reversible):
"""
Tests if standard its estimates run without errors
:return:
"""
for i in range(len(self.dtrajs)):
its = msm.timescales_msm(self.dtrajs[i], reversible=reversible)
# print its.get_lagtimes()
#print its.get_timescales()
def test_nice_sliding_rev(self):
"""
Tests if nonreversible sliding estimate runs without errors
:return:
"""
self.compute_nice(True)
def test_nice_sliding_nonrev(self):
"""
Tests if nonreversible sliding estimate runs without errors
:return:
"""
self.compute_nice(False)
def test_lag_generation(self):
its = msm.timescales_msm(self.dtraj4_2, lags=1000)
assert np.array_equal(its.lags, [1, 2, 3, 5, 8, 12, 18, 27, 41, 62, 93, 140, 210, 315, 473, 710])
def test_too_large_lagtime(self):
dtraj = [[0, 1, 1, 1, 0]]
lags = [1, 2, 3, 4, 5, 6, 7, 8]
expected_lags = [1, 2] # 3, 4 is impossible because no finite timescales.
its = msm.timescales_msm(dtraj, lags=lags, reversible=False)
# TODO: should catch warnings!
# with warnings.catch_warnings(record=True) as w:
# warnings.simplefilter("always")
# assert issubclass(w[-1].category, UserWarning)
got_lags = its.lagtimes
assert (np.shape(got_lags) == np.shape(expected_lags))
assert (np.allclose(got_lags, expected_lags))
def test_2(self):
t2 = timescales(self.P2)[1]
lags = [1, 2, 3, 4, 5]
its = msm.timescales_msm([self.dtraj2], lags=lags)
est = its.timescales[0]
assert (np.alltrue(est < t2 + 2.0))
assert (np.alltrue(est > t2 - 2.0))
def test_2_parallel(self):
t2 = timescales(self.P2)[1]
lags = [1, 2, 3, 4, 5]
its = timescales_msm([self.dtraj2], lags=lags, n_jobs=2)
est = its.timescales[0]
assert (np.alltrue(est < t2 + 2.0))
assert (np.alltrue(est > t2 - 2.0))
def test_4_2(self):
t4 = timescales(self.P4)[1]
lags = [int(t4)]
its = msm.timescales_msm([self.dtraj4_2], lags=lags)
est = its.timescales[0]
assert (np.alltrue(est < t4 + 20.0))
assert (np.alltrue(est > t4 - 20.0))
def test_fraction_of_frames(self):
dtrajs = [
[0, 1, 0], # These two will fail for lag >2
[1, 0, 1], # These two will fail for lag >2
[0, 1, 1, 1],
[1, 0, 0, 1],
[0, 1, 0, 1, 0],
[1, 0, 1, 0, 1],
]
lengths = [len(traj) for traj in dtrajs]
lags = [1, 2, 3]
its = msm.timescales_msm(dtrajs, lags=lags)
all_frames = np.sum(lengths)
longer_than_3 = np.sum(lengths[2:])
test_frac = longer_than_3/all_frames
assert np.allclose(its.fraction_of_frames, np.array([1, 1, test_frac]))
class TestITS_AllEstimators(unittest.TestCase):
""" Integration tests for various estimators
"""
@classmethod
def setUpClass(cls):
# load double well data
import pyemma.datasets
cls.double_well_data = pyemma.datasets.load_2well_discrete()
def test_its_msm(self):
estimator = msm.timescales_msm([self.double_well_data.dtraj_T100K_dt10_n6good], lags = [1, 10, 100, 1000])
ref = np.array([[ 174.22244263, 3.98335928, 1.61419816, 1.1214093 , 0.87692952],
[ 285.56862305, 6.66532284, 3.05283223, 2.6525504 , 1.9138432 ],
[ 325.35442195, 24.17388446, 20.52185604, 20.10058217, 17.35451648],
[ 343.53679359, 255.92796581, 196.26969348, 195.56163418, 170.58422303]])
# rough agreement with MLE
assert np.allclose(estimator.timescales, ref, rtol=0.1, atol=10.0)
def test_its_bmsm(self):
estimator = msm.its([self.double_well_data.dtraj_T100K_dt10_n6good], lags = [10, 50, 200],
errors='bayes', nsamples=1000)
ref = np.array([[ 284.87479737, 6.68390402, 3.0375248, 2.65314172, 1.93066562],
[ 320.08583492, 11.14612743, 10.3450663, 9.42799075, 8.2109752 ],
[ 351.41541961, 42.87427869, 41.17841657, 37.35485197, 23.24254608]])
# rough agreement with MLE
assert np.allclose(estimator.timescales, ref, rtol=0.1, atol=10.0)
# within left / right intervals. This test should fail only 1 out of 1000 times.
L, R = estimator.get_sample_conf(conf=0.999)
assert np.alltrue(L < estimator.timescales)
assert np.alltrue(estimator.timescales < R)
def test_its_hmsm(self):
estimator = msm.timescales_hmsm([self.double_well_data.dtraj_T100K_dt10_n6good], 2, lags = [1, 10, 100])
ref = np.array([[ 222.0641768 ],
[ 336.530405 ],
[ 369.57961198]])
assert np.allclose(estimator.timescales, ref, rtol=0.1, atol=10.0) # rough agreement
def test_its_bhmm(self):
estimator = msm.timescales_hmsm([self.double_well_data.dtraj_T100K_dt10_n6good], 2, lags = [1, 10, 100],
errors='bayes', nsamples=100)
ref = np.array([[ 222.0641768 ],
[ 332.57667046],
[ 370.33580404]])
# rough agreement with MLE
assert np.allclose(estimator.timescales, ref, rtol=0.1, atol=10.0)
# within left / right intervals. This test should fail only 1 out of 1000 times.
L, R = estimator.get_sample_conf(conf=0.999)
assert np.alltrue(L < estimator.timescales)
assert np.alltrue(estimator.timescales < R)
if __name__ == "__main__":
unittest.main()
|
bsd-2-clause
| 1,425,284,439,965,800,200 | 40.605505 | 114 | 0.595986 | false |
tflink/testCloud
|
testCloud.py
|
1
|
9870
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2014, Red Hat, Inc.
# License: GPL-2.0+ <http://spdx.org/licenses/GPL-2.0+>
# See the LICENSE file for more details on Licensing
"""
This is a module for downloading fedora cloud images (and probably any other
qcow2) and then booting them locally with qemu.
"""
import os
import glob
import subprocess
import sys
import urllib2
import shutil
import config
def koji_download(urls):
""" Downloads files (qcow2s, specifically) from a list of URLs with an
optional progress bar. Returns a list of raw image files. """
# This code was blatantly stolen from fedimg - but it was depreciated,
# that's the internet version of sitting in front of someone's house with
# a sign saying "FREE." Thanks oddshocks!
# Create the proper local upload directory if it doesn't exist.
if not os.path.exists(config.LOCAL_DOWNLOAD_DIR):
os.makedirs(config.LOCAL_DOWNLOAD_DIR)
print "Local downloads will be stored in {}.".format(
config.LOCAL_DOWNLOAD_DIR)
# When qcow2s are downloaded and converted, they are added here
raw_files = list()
for url in urls:
file_name = url.split('/')[-1]
local_file_name = config.LOCAL_DOWNLOAD_DIR + file_name
u = urllib2.urlopen(url)
try:
with open(local_file_name, 'wb') as f:
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print "Downloading {0} ({1} bytes)".format(url, file_size)
bytes_downloaded = 0
block_size = 8192
while True:
buff = u.read(block_size) # buffer
if not buff:
raw_files.append(local_file_name)
print "Succeeded at downloading {}".format(file_name)
break
bytes_downloaded += len(buff)
f.write(buff)
bytes_remaining = float(bytes_downloaded) / file_size
if config.DOWNLOAD_PROGRESS:
# TODO: Improve this progress indicator by making
# it more readable and user-friendly.
status = r"{0} [{1:.2%}]".format(bytes_downloaded,
bytes_remaining)
status = status + chr(8) * (len(status) + 1)
sys.stdout.write(status)
return raw_files
except OSError:
print "Problem writing to {}.".format(config.LOCAL_DOWNLOAD_DIR)
def expand_qcow(image, size="+10G"):
"""Expand the storage for a qcow image. Currently only used for Atomic
Hosts."""
subprocess.call(['qemu-img',
'resize',
image,
size])
print "Resized image for Atomic testing..."
return
def create_user_data(path, password, overwrite=False, atomic=False):
"""Save the right password to the 'user-data' file needed to
emulate cloud-init. Default username on cloud images is "fedora"
Will not overwrite an existing user-data file unless
the overwrite kwarg is set to True."""
if atomic:
file_data = config.ATOMIC_USER_DATA % password
else:
file_data = config.USER_DATA % password
if os.path.isfile(path + '/meta/user-data'):
if overwrite:
with open(path + '/meta/user-data', 'w') as user_file:
user_file.write(file_data)
return "user-data file generated."
else:
return "user-data file already exists"
with open(path + '/meta/user-data', 'w') as user_file:
user_file.write(file_data)
return "user-data file generated."
def create_meta_data(path, hostname, overwrite=False):
"""Save the required hostname data to the 'meta-data' file needed to
emulate cloud-init.
Will not overwrite an existing user-data file unless
the overwrite kwarg is set to True."""
file_data = config.META_DATA % hostname
if os.path.isfile(path + '/meta/meta-data'):
if overwrite:
with open(path + '/meta/meta-data', 'w') as meta_data_file:
meta_data_file.write(file_data)
return "meta-data file generated."
else:
return "meta-data file already exists"
with open(path + '/meta/meta-data', 'w') as meta_data_file:
meta_data_file.write(file_data)
return "meta-data file generated."
def create_seed_img(meta_path, img_path):
"""Create a virtual filesystem needed for boot with virt-make-fs on a given
path (it should probably be somewhere in '/tmp'."""
make_image = subprocess.call(['virt-make-fs',
'--type=msdos',
'--label=cidata',
meta_path,
img_path + '/seed.img'])
if make_image == 0:
return "seed.img created at %s" % img_path
return "creation of the seed.img failed."
def download_initrd_and_kernel(qcow2_image, path):
"""Download the necessary kernal and initrd for booting a specified cloud
image. Returns a dict {'kernel': '', 'initrd': ''} after the download
is completed."""
subprocess.call(['virt-builder', '--get-kernel', qcow2_image], cwd=path)
result = {}
try:
result['kernel'] = glob.glob("%s/*vmlinuz*" % path)[0]
result['initrd'] = glob.glob("%s/*initramfs*" % path)[0]
except IndexError:
print "Unable to find kernel or initrd, did they download?"
return
return result
def boot_image(
qcow2, seed, initrd=None, kernel=None, ram=1024, graphics=False,
vnc=False, atomic=False):
"""Boot the cloud image redirecting local port 8888 to 80 on the vm as
well as local port 2222 to 22 on the vm so http and ssh can be accessed."""
boot_args = ['/usr/bin/qemu-kvm',
'-m',
str(ram),
'-drive',
'file=%s,if=virtio' % qcow2,
'-drive',
'file=%s,if=virtio' % seed,
'-redir',
'tcp:2222::22',
'-redir',
'tcp:8888::80',
]
if not atomic:
boot_args.extend(['-kernel',
'%s' % kernel,
'-initrd',
'%s' % initrd,
'-append',
'root=/dev/vda1 ro ds=nocloud-net'
])
if graphics:
boot_args.extend(['-nographic'])
if vnc:
boot_args.extend(['-vnc', '0.0.0.0:1'])
vm = subprocess.Popen(boot_args)
print "Successfully booted your local cloud image!"
print "PID: %d" % vm.pid
return vm
def build_and_run(
image_url, ram=1024, graphics=False, vnc=False, atomic=False):
"""Run through all the steps."""
print "cleaning and creating dirs..."
clean_dirs()
create_dirs()
base_path = '/tmp/testCloud'
# Create cloud-init data
print "Creating meta-data..."
create_user_data(base_path, "passw0rd", atomic=atomic)
create_meta_data(base_path, "testCloud")
create_seed_img(base_path + '/meta', base_path)
# Download image and get kernel/initrd
image_file = '/tmp/' + image_url.split('/')[-1]
if not os.path.isfile(image_file):
print "downloading new image..."
image = koji_download([image_url])[0]
if atomic:
expand_qcow(image)
else:
print "using existing image..."
image = image_file
if not atomic:
external = download_initrd_and_kernel(image, base_path)
if atomic:
vm = boot_image(image,
base_path + '/seed.img',
ram=ram,
graphics=graphics,
vnc=vnc,
atomic=atomic)
else:
vm = boot_image(image,
base_path + '/seed.img',
external['initrd'],
external['kernel'],
ram=ram,
graphics=graphics,
vnc=vnc)
return vm
def create_dirs():
"""Create the dirs in /tmp that we need to store things."""
os.makedirs('/tmp/testCloud/meta')
return "Created tmp directories."
def clean_dirs():
"""Remove dirs after a test run."""
if os.path.exists('/tmp/testCloud'):
shutil.rmtree('/tmp/testCloud')
return "All cleaned up!"
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("url",
help="URL to qcow2 image is required.",
type=str)
parser.add_argument("--ram",
help="Specify the amount of ram for the VM.",
type=int,
default=512)
parser.add_argument("--no-graphic",
help="Turn off graphical display.",
action="store_true")
parser.add_argument("--vnc",
help="Turns on vnc at :1 to the instance.",
action="store_true")
parser.add_argument("--atomic",
help="Use this flag if you're booting an Atomic Host.",
action="store_true")
args = parser.parse_args()
gfx = False
vnc = False
atomic = False
if args.no_graphic:
gfx = True
if args.vnc:
vnc = True
if args.atomic:
atomic = True
build_and_run(args.url, args.ram, graphics=gfx, vnc=vnc, atomic=atomic)
if __name__ == '__main__':
main()
|
gpl-2.0
| -165,019,787,094,557,220 | 29 | 79 | 0.539716 | false |
Jitrixis/2ARC-Network-stack
|
TVpy/api.py
|
1
|
1265
|
__author__ = 'jitrixis'
from Factory.machinery import *
class Api:
def __init__(self, device="wlan0"):
self.__engine = Engine(device)
pass
def getIP(self, mac):
r = self.__engine.getArpIP(mac)
return r
def getMAC(self, ip):
r = self.__engine.getArpMAC(ip)
return r
def sendPing(self, ip):
r = self.__engine.ping(ip, 1)
return r
def sendManyPing(self, ip, salve):
r = self.__engine.ping(ip, salve)
return r
def listen(self, ip, dport):
self.__dport = dport
self.__sport = randint(0x0, 0xffff)
self.__ipdest = ip
def connect(self):
synack = self.__engine.sendSYNConn(self.__ipdest, self.__dport, self.__sport)
self.__engine.sendACKConn(synack)
def accept(self):
f = "tcp and host "+self.__ipdest
syn = sniff(filter=f, count=1)
self.__engine.sendSYNACKConn(syn)
def send(self, data):
data = self.__engine.sendPSHACKData(self.__ipdest, self.__dport, self.__sport, data)
return data
def recv(self):
f = "tcp and host "+self.__ipdest
pshack = sniff(filter=f, count=1)
self.__engine.sendACKData(pshack)
def close(self):
pass
|
mit
| -7,564,539,796,375,667,000 | 24.32 | 92 | 0.564427 | false |
SpamExperts/trac-slack
|
application.py
|
1
|
18428
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
""""""
import os
import pwd
import json
import logging
import calendar
import urlparse
import functools
try:
import configparser
except ImportError:
import ConfigParser as configparser
try:
from xmlrpc import client
except ImportError:
import xmlrpclib as client
try:
import raven
import raven.transport
from raven.contrib.flask import Sentry
from raven.handlers.logging import SentryHandler
_has_raven = True
except ImportError:
_has_raven = False
import flask
import flask.views
from flask import jsonify
from mimerender import FlaskMimeRender
import slackclient
import tracxml
import natural
import trac_to_markdown
from core import load_configuration
CONF = load_configuration()
# This is the WSGI application that we are creating.
application = flask.Flask(__name__)
mimerender = FlaskMimeRender()(default='json', json=jsonify)
slack_client = slackclient.SlackClient(CONF.get("slack", "bot_token"))
def setup_logging(logger):
user = CONF.get("logging", "user")
filename = CONF.get("logging", "file")
sentry = CONF.get("logging", "sentry")
level = getattr(logging, CONF.get("logging", "level").upper())
if user and pwd.getpwuid(os.getuid()).pw_name != user:
return
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
logger.setLevel(logging.DEBUG)
if filename:
file_handler = logging.FileHandler(filename)
file_handler.setFormatter(formatter)
file_handler.setLevel(level)
logger.addHandler(file_handler)
if sentry and _has_raven:
client = raven.Client(sentry,
enable_breadcrumbs=False,
transport=raven.transport.HTTPTransport)
# Wrap the application in Sentry middleware.
Sentry(application, client=client, logging=True,
level=logging.WARN)
# Add Sentry handle to application logger.
sentry_handler = SentryHandler(client)
sentry_handler.setLevel(logging.WARNING)
logger.addHandler(sentry_handler)
null_loggers = [
logging.getLogger("sentry.errors"),
logging.getLogger("sentry.errors.uncaught")
]
for null_logger in null_loggers:
null_logger.handlers = [logging.NullHandler()]
setup_logging(application.logger)
setup_logging(natural.logger)
def verify_token():
try:
token = flask.request.form["token"]
except KeyError:
token = json.loads(flask.request.form["payload"])["token"]
conf_token = CONF.get("slack", "token")
if token != conf_token:
return "Invalid token"
application.before_request(verify_token)
try:
import jsonrpclib.config
import jsonrpclib.jsonrpc
except ImportError:
trac_proxy = client.ServerProxy(
"https://%s:%s@%s/login/rpc" %
(CONF.get("trac", "user"), CONF.get("trac", "password"),
CONF.get("trac", "host")), transport=tracxml.SafeRequestsTransport()
)
else:
jsonrpclib.config.use_jsonclass = False
trac_proxy = jsonrpclib.jsonrpc.ServerProxy(
"https://%s:%s@%s/login/rpc" %
(CONF.get("trac", "user"), CONF.get("trac", "password"),
CONF.get("trac", "host")),
transport=tracxml.SafeJSONRequestsTransport()
)
INTRO = (u"Trac slash command allows you to query Trac tickets from slack. The "
u"Trac slash command will do its best to interpret your query from "
u"english, and translate to a Trac query. Some examples include:")
BODY_TEXT = u"""
• `/trac 12354` - to view detailed specifications of a ticket
• `/trac query status=new&type=task&priority=normal` - to view a list of tickets matching this query
• `/trac show my tickets` - to list all tickets assigned to you
• `/trac show my normal or higher bug tickets`
• `/trac list task tickets opened in the last three weeks`
• `/trac list tickets where I'm in the cc`
• `/trac list feature tickets that need testing modified in the last week`
• `/trac text like "Some error I know", opened two months ago`
"""
HELP_TEXT = INTRO + BODY_TEXT
QUERY_TEMPLATE = (u" • <https://%(host)s/ticket/%(number)s|#%(number)s> "
u"- %(summary)s")
BUG_DIALOG = {
"title": "Create a Trac bug ticket",
"elements": [
{
"label": "Description",
"type": "textarea",
"name": "description",
"hint": "Describe the bug",
},
{
"label": "Version",
"type": "text",
"subtype": "number",
"name": "version",
"hint": CONF.get("misc", "bug_dialog_version_hint"),
},
{
"label": "Steps to Reproduce",
"type": "textarea",
"name": "reproduce",
"hint": "Explain how to reproduce the bug",
"optional": True,
},
{
"label": "Log",
"type": "textarea",
"name": "log",
"hint": "Add any available log data",
"optional": True,
},
{
"label": "Link",
"type": "text",
"subtype": "url",
"name": "link",
"placeholder": CONF.get("misc", "bug_dialog_link_hint"),
"optional": True,
},
],
}
class QueryTrac(flask.views.MethodView):
_to_md = functools.partial(trac_to_markdown.convert,
base="https://%s" % CONF.get("trac", "host"),
flavour="mrkdwn")
@staticmethod
def _escape(value):
return value.replace("&", "&").replace("<", "<").replace(">",
">")
@classmethod
def _get_tick_attributes(cls, ticket):
escape = cls._escape
to_md = cls._to_md
attributes = dict(trac_proxy.ticket.get(ticket)[3])
stamp = calendar.timegm(attributes['time'].timetuple())
attributes["stamp"] = stamp
attributes["host"] = CONF.get("trac", "host")
attributes["number"] = str(ticket)
attributes["summary"] = escape(attributes["summary"])
attributes["description"] = to_md(escape(attributes["description"]))
attributes["keywords"] = escape(attributes["keywords"])
if attributes["status"] == "closed":
attributes["status+"] = "%s: %s" % (attributes["status"],
attributes["resolution"])
else:
attributes["status+"] = attributes["status"]
return attributes
def _handle_query(self, query):
limit = int(CONF.get("trac", "limit"))
result = []
try:
tickets = trac_proxy.ticket.query(query)
except Exception:
return {"text": ("Oops, something went wrong! :sweat:\n"
"The query might not be valid?")}
total_tickets = len(tickets)
for ticket in tickets[:limit]:
attr = self._get_tick_attributes(ticket)
result.append(QUERY_TEMPLATE % attr)
if total_tickets > limit:
result.append("")
result.append("_%s tickets not shown!_" % (total_tickets - limit))
result.append("_The rest of the results available "
"<https://%s/query?%s|here>_" %
(CONF.get("trac", "host"), query))
elif not total_tickets:
result.append("No tickets found")
result.append("_See in <https://%s/query?%s|trac>_" %
(CONF.get("trac", "host"), query))
else:
result.append("_See in <https://%s/query?%s|trac>_" %
(CONF.get("trac", "host"), query))
return {"text": "\n".join(result), "response_type": "in_channel"}
@classmethod
def handle_describe(cls, query):
try:
ticket = trac_proxy.ticket.query(query)[0]
except IndexError:
# This should be ephemeral
return {"text": "No such ticket"}
attr = cls._get_tick_attributes(ticket)
color = "#f5f5ef"
if attr["type"] == "feature":
color = "good"
elif attr["type"] == "task":
color = "warning"
elif attr["type"] == "bug":
color = "danger"
fields = [
{
"title": field.title(),
"value": attr.get(field, "_(unknown)_"),
"short": True,
}
for field in CONF.get("trac", "describe_fields").split(",")
if attr[field]
]
return {
"response_type": "in_channel",
"attachments": [
{
"fallback": attr["summary"],
"color": color,
"title": attr["summary"],
"author_name": attr["owner"],
"title_link": "https://%(host)s/ticket/%(number)s" % attr,
"text": cls._to_md(attr["description"]),
"fields": fields,
"footer": "#%(number)s" % attr,
"ts": attr["stamp"],
"mrkdwn_in": ["text"],
}
]
}
def handle_help(self):
return {"text": HELP_TEXT}
def handle_adjust(self, user, query):
possible_fields = CONF.get("trac", "adjust_fields")
if not possible_fields:
return {"text": "Sorry, I'm not set up for this yet."}
possible_fields = possible_fields.split(",")
try:
ticket_id, field, value, details = query.split(None, 3)
except ValueError:
return {"text": "Sorry, I didn't understand that."}
if field not in possible_fields:
if len(possible_fields) == 1:
possible = possible_fields[0]
elif len(possible_fields) == 2:
possible = " or ".join(possible_fields)
else:
possible = ", ".join(possible_fields[:-1])
possible = "%s, or %s" % (possible, possible_fields[-1])
return {"text":
"Sorry, I don't know how to do that yet. Try %s." %
possible}
try:
value = float(value)
except ValueError:
# We probably could figure out an appropriate way to handle
# non-numeric fields.
return {"text": "Sorry, I can only increase numeric fields."}
try:
ticket_id = int(ticket_id.lstrip("#"))
except ValueError:
example_details = CONF.get("trac", "example_adjust_details")
return {
"text": "Sorry, I didn't understand that. I'm expecting "
"`adjust [ticket id] [field] [value] [details]`, like "
"`adjust #12345 %s 5 %s`" %
(possible_fields[0], example_details)}
try:
template = CONF.get("trac", "adjust_template_%s" % field)
except configparser.NoOptionError:
pass
else:
details = template % {"details": details, "value": value}
attributes = dict(trac_proxy.ticket.get(ticket_id)[3])
if attributes[field]:
try:
new_value = float(attributes[field]) + value
except ValueError:
return {"text": "Sorry, %s's %s is not a number." %
(ticket_id, field)}
else:
new_value = value
changes = {
"description": "%s\n%s" % (attributes["description"], details),
field: str(new_value),
}
trac_proxy.ticket.update(ticket_id, "", changes, True, user)
return {"text": "Done! New %s for #%s is %s" %
(field, ticket_id, new_value)}
def handle_new_bug(self):
return {
"text": "Create a bug ticket for which component?",
"response_type": "ephemeral",
"attachments": [
{
"fallback": "Upgrade your Slack client to use messages like these.",
"callback_id": "new_bug",
"actions": [
{
"name": "component",
"text": "Component",
"type": "select",
"data_source": "external",
"min_query_length": 3,
},
],
},
],
}
@mimerender
def post(self):
text = flask.request.form["text"]
user = flask.request.form["user_name"]
if text.lower() == "help":
return self.handle_help()
if text.lower() == "bug":
return self.handle_new_bug()
try:
command, query = text.split(None, 1)
assert command.lower() in ("describe", "show", "query", "adjust")
except (ValueError, AssertionError):
# Try to figure out what the user wants
try:
command, query = "describe", int(text.lstrip('#'))
except (ValueError, TypeError):
query = text
if "=" in text or "&" in text:
command = "query"
else:
command = "show"
command = command.lower()
if command == "describe":
return self.handle_describe("id=%s" % query)
if command == "adjust":
return self.handle_adjust(user, query)
if command == "show":
query = natural.natural_to_query(query, user)
if not query:
# Might be nice to have random responses.
return {
"text": ("Didn't quite get that :thinking_face: \n"
"Have you tried quoting your text searches?")}
return self._handle_query(query)
if command == "query":
return self._handle_query(query)
return {"text": "Invalid command: %s" % command}
application.add_url_rule(
CONF.get("slack", "endpoint"),
view_func=QueryTrac.as_view('trac_query')
)
# This could get loaded from the configuration / a file, and be less generic.
BUG_TEMPLATE = """%(description)s
=== Version ===
[%(version)s]
=== How to Reproduce ===
''Steps:''
%(steps)s
=== Error log output ===
%(log)s
=== Other notes ===
- [%(link)s %(link_name)s]
"""
def new_bug_ticket(user, data, component):
team = data["channel"]["name"].title()
form = data["submission"]
application.logger.info("Bug ticket: %r", form)
summary, description = form["description"].split(".", 1)
reporter = user
ticket_type = "bug"
priority = "normal"
if form.get("link"):
link_name = urlparse.urlparse(form["link"]).netloc
try:
link_name = CONF.get("misc", "link_%s" % link_name)
except (configparser.NoSectionError, configparser.NoOptionError):
link_name = link_name.split(".", 1)[0].title()
else:
link_name = ""
description = BUG_TEMPLATE % {
"description": description.strip(),
"version": form["version"],
"steps": form.get("reproduce") or "",
"log": form.get("log") or "",
"link": form.get("link") or "",
"link_name": link_name,
}
ticket_id = trac_proxy.ticket.create(
summary,
description,
{
"team": team,
"reporter": user,
"type": ticket_type,
"priority": priority,
"component": component,
},
True)
# Post a message to show the ticket was created.
response = QueryTrac.handle_describe("id=%s" % ticket_id)
slack_client.api_call(
"chat.postMessage", channel=data["channel"]["id"],
attachments=response["attachments"])
return ""
@application.route(CONF.get("slack", "action-endpoint"), methods=['POST'])
def slack_action():
"""Route the action to the appropriate method."""
data = json.loads(flask.request.form["payload"])
user = data["user"]["name"]
if data["type"] == "dialog_submission":
if data["callback_id"].startswith("new_bug_"):
component = data["callback_id"].split("_")[2]
return new_bug_ticket(user, data, component)
callback_id = data["callback_id"]
if callback_id.startswith("adjust_"):
field, tickets = callback_id.split("_")[1:]
tickets = tickets.split(",")
if len(tickets) == 1:
ticket_desc = "#%s" % tickets[0]
else:
ticket_desc = ("tickets %s" %
", ".join(["#%s" % ticket for ticket in tickets]))
# We only support one action.
action = data["actions"][0]
# We only support one option.
option = action["selected_options"][0]["value"]
for ticket in tickets:
changes = {field: option}
trac_proxy.ticket.update(int(ticket), "", changes, True, user)
return ("@%s set %s to %s for %s" %
(user, field, option, ticket_desc))
elif callback_id.startswith("new_bug"):
dialog = BUG_DIALOG.copy()
component = data["actions"][0]["selected_options"][0]["value"]
trigger_id = data["trigger_id"]
dialog["callback_id"] = "new_bug_%s" % component
open_dialog = slack_client.api_call("dialog.open", trigger_id=trigger_id,
dialog=dialog)
return "Unknown action."
@application.route(CONF.get("slack", "options-endpoint"), methods=['POST'])
@mimerender
def slack_options():
"""Provide options when users invoke message menus."""
data = json.loads(flask.request.form["payload"])
if data["name"] == "component":
try:
typeahead = data["value"]
except KeyError:
typeahead = ""
response = []
for component in trac_proxy.ticket.component.getAll():
if not typeahead or component.lower().startswith(typeahead):
response.append({"text": component, "value": component})
return {"options": response}
# Testing code.
if __name__ == "__main__":
application.run(debug=True)
|
gpl-2.0
| 6,069,118,977,178,579,000 | 33.092593 | 101 | 0.533351 | false |
RobinQuetin/CAIRIS-web
|
cairis/cairis/controllers/UserController.py
|
1
|
2731
|
import httplib
from flask.ext.restful_swagger import swagger
from flask import request, make_response, session
from flask.ext.restful import Resource
from jsonpickle import encode
from Borg import Borg
from CairisHTTPError import MissingParameterHTTPError, MalformedJSONHTTPError
from tools.ModelDefinitions import UserConfigModel
from tools.SessionValidator import validate_proxy, get_logger
__author__ = 'Robin Quetin'
def set_dbproxy(conf):
b = Borg()
db_proxy = validate_proxy(None, -1, conf=conf)
pSettings = db_proxy.getProjectSettings()
id = b.init_settings()
db_proxy.close()
session['session_id'] = id
b.settings[id]['dbProxy'] = db_proxy
b.settings[id]['dbUser'] = conf['user']
b.settings[id]['dbPasswd'] = conf['passwd']
b.settings[id]['dbHost'] = conf['host']
b.settings[id]['dbPort'] = conf['port']
b.settings[id]['dbName'] = conf['db']
b.settings[id]['fontSize'] = pSettings['Font Size']
b.settings[id]['apFontSize'] = pSettings['AP Font Size']
b.settings[id]['fontName'] = pSettings['Font Name']
b.settings[id]['jsonPrettyPrint'] = conf.get('jsonPrettyPrint', False)
return b.settings[id]
class UserConfigAPI(Resource):
# region Swagger Doc
@swagger.operation(
notes='Sets up the user session',
nickname='user-config-post',
responseClass=str.__name__,
parameters=[
{
'name': 'body',
"description": "The configuration settings for the user's session",
"required": True,
"allowMultiple": False,
'type': UserConfigModel.__name__,
'paramType': 'body'
}
],
responseMessages=[
{
'code': httplib.BAD_REQUEST,
'message': 'The method is not callable without setting up a database connection'
},
{
'code': httplib.BAD_REQUEST,
'message': 'The provided parameters are invalid'
}
]
)
# endregion
def post(self):
try:
dict_form = request.get_json(silent=True)
if dict_form is False or dict_form is None:
raise MalformedJSONHTTPError(data=request.get_data())
logger = get_logger()
logger.info(dict_form)
s = set_dbproxy(dict_form)
resp_dict = {'session_id': s['session_id'], 'message': 'Configuration successfully applied'}
resp = make_response(encode(resp_dict), httplib.OK)
resp.headers['Content-type'] = 'application/json'
return resp
except KeyError:
return MissingParameterHTTPError()
|
apache-2.0
| -2,372,690,068,079,911,400 | 31.915663 | 104 | 0.596851 | false |
cstorey/clap-rs
|
clap-tests/run_tests.py
|
1
|
8808
|
#!/usr/bin/env python
import sys
import subprocess
import re
import difflib
failed = False
_ansi = re.compile(r'\x1b[^m]*m')
_help = '''claptests v1.4.8
Kevin K. <kbknapp@gmail.com>
tests clap library
USAGE:
\tclaptests [FLAGS] [OPTIONS] [ARGS] [SUBCOMMAND]
FLAGS:
-f, --flag tests flags
-F tests flags with exclusions
-h, --help Prints help information
-V, --version Prints version information
OPTIONS:
-O, --Option <option3> tests options with specific value sets [values: fast slow]
--long-option-2 <option2> tests long options with exclusions
--maxvals3 <maxvals>... Tests 3 max vals
--minvals2 <minvals>... Tests 2 min vals
--multvals <one> <two> Tests mutliple values, not mult occs
--multvalsmo <one> <two> Tests mutliple values, and mult occs
-o, --option <opt>... tests options
ARGS:
positional tests positionals
positional2 tests positionals with exclusions
positional3... tests positionals with specific values
SUBCOMMANDS:
help Prints this message
subcmd tests subcommands'''
_version = "claptests v1.4.8"
_sc_dym_usage = '''error: The subcommand 'subcm' wasn't recognized
\tDid you mean 'subcmd' ?
If you believe you received this message in error, try re-running with 'claptests -- subcm'
USAGE:
\tclaptests [FLAGS] [OPTIONS] [ARGS] [SUBCOMMAND]
For more information try --help'''
_arg_dym_usage = '''error: Found argument '--optio' which wasn't expected, or isn't valid in this context
\tDid you mean --option ?
USAGE:
\tclaptests --option <opt>...
For more information try --help'''
_pv_dym_usage = '''error: 'slo' isn't a valid value for '--Option <option3>'
\t[valid values: fast slow]
Did you mean 'slow' ?
USAGE:
\tclaptests --Option <option3>
For more information try --help'''
_excluded = '''error: The argument '--flag' cannot be used with '-F'
USAGE:
\tclaptests [positional2] -F --long-option-2 <option2>
For more information try --help'''
_excluded_l = '''error: The argument '-f' cannot be used with '-F'
USAGE:
\tclaptests [positional2] -F --long-option-2 <option2>
For more information try --help'''
_required = '''error: The following required arguments were not provided:
\t[positional2]
\t--long-option-2 <option2>
USAGE:
\tclaptests [positional2] -F --long-option-2 <option2>
For more information try --help'''
_fop = '''flag present 1 times
option present 1 times with value: some
An option: some
positional present with value: value
flag2 NOT present
option2 maybe present with value of: Nothing
positional2 maybe present with value of: Nothing
option3 NOT present
positional3 NOT present
option present 1 times with value: some
An option: some
positional present with value: value
subcmd NOT present'''
_f2op = '''flag present 2 times
option present 1 times with value: some
An option: some
positional present with value: value
flag2 NOT present
option2 maybe present with value of: Nothing
positional2 maybe present with value of: Nothing
option3 NOT present
positional3 NOT present
option present 1 times with value: some
An option: some
positional present with value: value
subcmd NOT present'''
_o2p = '''flag NOT present
option present 2 times with value: some
An option: some
An option: other
positional present with value: value
flag2 NOT present
option2 maybe present with value of: Nothing
positional2 maybe present with value of: Nothing
option3 NOT present
positional3 NOT present
option present 2 times with value: some
An option: some
An option: other
positional present with value: value
subcmd NOT present'''
_schelp = '''claptests-subcmd 0.1
Kevin K. <kbknapp@gmail.com>
tests subcommands
USAGE:
\tclaptests subcmd [FLAGS] [OPTIONS] [--] [ARGS]
FLAGS:
-f, --flag tests flags
-h, --help Prints help information
-V, --version Prints version information
OPTIONS:
-o, --option <scoption>... tests options
ARGS:
scpositional tests positionals'''
_scfop = '''flag NOT present
option NOT present
positional NOT present
flag2 NOT present
option2 maybe present with value of: Nothing
positional2 maybe present with value of: Nothing
option3 NOT present
positional3 NOT present
option NOT present
positional NOT present
subcmd present
flag present 1 times
scoption present with value: some
An scoption: some
scpositional present with value: value'''
_scf2op = '''flag NOT present
option NOT present
positional NOT present
flag2 NOT present
option2 maybe present with value of: Nothing
positional2 maybe present with value of: Nothing
option3 NOT present
positional3 NOT present
option NOT present
positional NOT present
subcmd present
flag present 2 times
scoption present with value: some
An scoption: some
scpositional present with value: value'''
_bin = './target/release/claptests'
cmds = {'help short: ': ['{} -h'.format(_bin), _help, 0],
'help long: ': ['{} --help'.format(_bin), _help, 0],
'version short: ': ['{} -V'.format(_bin), _version, 0],
'version long: ': ['{} --version'.format(_bin), _version, 0],
'help subcmd: ': ['{} help'.format(_bin), _help, 0],
'missing required: ': ['{} -F'.format(_bin), _required, 1],
'F2(ss),O(s),P: ': ['{} value -f -f -o some'.format(_bin), _f2op, 0],
'arg dym: ': ['{} --optio=foo'.format(_bin), _arg_dym_usage, 1],
'O2(ll)P: ': ['{} value --option some --option other'.format(_bin), _o2p, 0],
'O2(l=l=)P: ': ['{} value --option=some --option=other'.format(_bin), _o2p, 0],
'O2(ss)P: ': ['{} value -o some -o other'.format(_bin), _o2p, 0],
'F2(s2),O(s),P: ': ['{} value -ff -o some'.format(_bin), _f2op, 0],
'F(s),O(s),P: ': ['{} value -f -o some'.format(_bin), _fop, 0],
'F(l),O(l),P: ': ['{} value --flag --option some'.format(_bin), _fop, 0],
'F(l),O(l=),P: ': ['{} value --flag --option=some'.format(_bin), _fop, 0],
'sc dym: ': ['{} subcm'.format(_bin), _sc_dym_usage, 1],
'sc help short: ': ['{} subcmd -h'.format(_bin), _schelp, 0],
'sc help long: ': ['{} subcmd --help'.format(_bin), _schelp, 0],
'scF(l),O(l),P: ': ['{} subcmd value --flag --option some'.format(_bin), _scfop, 0],
'scF(l),O(s),P: ': ['{} subcmd value --flag -o some'.format(_bin), _scfop, 0],
'scF(l),O(l=),P: ': ['{} subcmd value --flag --option=some'.format(_bin), _scfop, 0],
'scF(s),O(l),P: ': ['{} subcmd value -f --option some'.format(_bin), _scfop, 0],
'scF(s),O(s),P: ': ['{} subcmd value -f -o some'.format(_bin), _scfop, 0],
'scF(s),O(l=),P: ': ['{} subcmd value -f --option=some'.format(_bin), _scfop, 0],
'scF2(s),O(l),P: ': ['{} subcmd value -ff --option some'.format(_bin), _scf2op, 0],
'scF2(s),O(s),P: ': ['{} subcmd value -ff -o some'.format(_bin), _scf2op, 0],
'scF2(s),O(l=),P: ': ['{} subcmd value -ff --option=some'.format(_bin), _scf2op, 0],
'scF2(l2),O(l),P: ': ['{} subcmd value --flag --flag --option some'.format(_bin), _scf2op, 0],
'scF2(l2),O(s),P: ': ['{} subcmd value --flag --flag -o some'.format(_bin), _scf2op, 0],
'scF2(l2),O(l=),P: ': ['{} subcmd value --flag --flag --option=some'.format(_bin), _scf2op, 0],
'scF2(s2),O(l),P: ': ['{} subcmd value -f -f --option some'.format(_bin), _scf2op, 0],
'scF2(s2),O(s),P: ': ['{} subcmd value -f -f -o some'.format(_bin), _scf2op, 0],
'scF2(s2),O(l=),P: ': ['{} subcmd value -f -f --option=some'.format(_bin), _scf2op, 0]
}
def pass_fail(name, cmd, check, good):
sys.stdout.write(name)
if check == good:
print('Pass')
return 0
print('Fail')
print('\n\n{}\n# Should be:\n$ {}\n{}\n\n{}\n# But is:\n$ {}\n{}\n\n'.format('#'*25, cmd, good, '#'*25, cmd, check))
for line in difflib.context_diff(good.splitlines(), check.splitlines(), fromfile="Should Be", tofile="Currently Is", lineterm=""):
print(line)
print()
return 1
def main():
num_failed = 0
total = len(cmds)
for cmd, cmd_v in cmds.items():
proc = subprocess.Popen(cmd_v[0], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
out, err = proc.communicate()
out = _ansi.sub('', out.strip())
err = _ansi.sub('', err.strip())
rc = proc.returncode
if rc != cmd_v[-1]:
print('{}Fail (Exit Code={}; Should be={})'.format(cmd, rc, cmd_v[-1]))
num_failed += 1
continue
if out and rc == cmd_v[-1]:
num_failed += pass_fail(cmd, cmd_v[0], out, cmd_v[1])
elif rc == cmd_v[-1]:
num_failed += pass_fail(cmd, cmd_v[0], err, cmd_v[1])
print()
if num_failed:
print('{}/{} tests failed'.format(num_failed, total))
return 1
print('{}/{} tests passed!'.format(total, total))
if __name__ == '__main__':
sys.exit(main())
|
mit
| -5,824,107,350,463,208,000 | 32.363636 | 131 | 0.63238 | false |
aiyappaganesh/OP
|
handlers/request_handler.py
|
1
|
1093
|
from util import http_util
import webapp2
import webapp2_extras
from webapp2_extras import sessions
webapp2_extras.sessions.default_config['secret_key'] = 'asdfasdf'
webapp2_extras.sessions.default_config['cookie_name'] = 'underdogs'
APP_JSON = "application/json"
class RequestHandlerMixin(object):
def write(self,text=None, status=None, content_type = None):
http_util.write(self.response, text, status, content_type)
def set_status(self,value):
self.response.set_status(value)
def __getitem__(self, name):
return self.request.get(name, default_value=None)
def get_all(self, name):
return self.request.get_all(name, default_value=None)
class RequestHandler(webapp2.RequestHandler, RequestHandlerMixin):
def dispatch(self):
self.session_store = sessions.get_store(request=self.request)
try:
webapp2.RequestHandler.dispatch(self)
finally:
self.session_store.save_sessions(self.response)
@webapp2.cached_property
def session(self):
return self.session_store.get_session()
|
gpl-2.0
| 7,885,501,371,286,362,000 | 31.176471 | 69 | 0.705398 | false |
ConvergenceDev/apiai-selvycure-booking-webhook
|
action_processor.py
|
1
|
5739
|
# -*- coding: utf-8 -*-
import os
from datetime import datetime
import json
from apscheduler.schedulers.background import BackgroundScheduler
import requests
FACEBOOK_SEND_URL = "https://graph.facebook.com/v2.6/me/messages"
class ActionProcessor(object):
action = None
request = None
sender_id = None
page_access_token = None
def __init__(self, action, request):
super(ActionProcessor, self).__init__()
self.action = action
self.request = request
self.__init_sender_id()
self.__init_page_token_acces()
def __init_page_token_acces(self):
try:
self.page_access_token = os.environ["PAGE_ACCESS_TOKEN"]
except AttributeError:
print("can't extract page token access.")
def __init_sender_id(self):
try:
self.sender_id = self.request.get("originalRequest").get("data").get("sender").get("id")
except AttributeError:
print("can't extract sender id.")
def process_request(self):
pass
class BookingProcessor(ActionProcessor):
def __init__(self, action, request):
super(BookingProcessor, self).__init__(action, request)
def process_request(self):
super(BookingProcessor, self).process_request()
try:
params = self.request.get("result").get("contexts")[0].get("parameters")
booking_date = params.get("date")
symptoms = ",".join(symptom.encode('utf-8') for symptom in params.get("cold-symptom"))
department = params.get("department").encode('utf-8')
self.__reserve_message(booking_date)
self.__send_medical_certificate(symptoms, booking_date, department)
except AttributeError as e:
print(e.message)
return {}
def __reserve_message(self, booking_date):
message = BookingProcessor.get_message(booking_date)
time = BookingProcessor.get_message_reservation_time()
scheduler = BackgroundScheduler()
scheduler.add_job(BookingProcessor.send_message, 'date', run_date=time,
args=(self.page_access_token, self.sender_id, message))
scheduler.start()
@staticmethod
def get_message(booking_date):
return "{0} 병원 예약되어 있습니다.".format(booking_date)
@staticmethod
def get_message_reservation_time():
time = datetime.today()
time = time.replace(second=time.second + 30)
return time
@staticmethod
def send_message(page_access_token, sender_id, message):
params = {
"access_token": page_access_token
}
headers = {
"Content-Type": "application/json"
}
data = json.dumps({
"recipient": {
"id": sender_id
},
"message": {
"text": message
}
})
r = requests.post(FACEBOOK_SEND_URL, params=params, headers=headers, data=data)
if r.status_code != 200:
log(r.status_code)
log(r.text)
def __send_medical_certificate(self, symptom, booking_date, department):
params = {
"access_token": self.page_access_token
}
headers = {
"Content-Type": "application/json"
}
data = json.dumps({
"recipient": {
"id": self.sender_id
},
"message": {
"attachment": {
"type": "template",
"payload": {
"template_type": "generic",
"elements": [
{
"title": "예약 확인",
# "image_url": "https://cdn.pixabay.com/photo/2013/07/13/13/34/diagnostics-161140_960_720.png",
"image_url": "http://dbscthumb.phinf.naver.net/2765_000_1/20131013154801108_XDA9SDAD9.jpg/60050.jpg?type=m250&wm=N",
"subtitle": "[환자 증상] {0} [진료 예약 날짜] {1} [진료과] {2}".format(symptom, booking_date,
department),
},
{
"title": "감기 원인",
"subtitle": "200여개 이상의 서로 다른 종류의 바이러스가 감기를 일으킨다. 감기 바이러스는 사람의 코나 목을 통해 들어와 감염을 일으킨다.",
},
{
"title": "감기 관련 증상",
"subtitle": "기침, 인두통 및 인후통, 비루, 비폐색, 재채기, 근육통, 발열",
},
{
"title": "진료과",
"subtitle": "가정의학과, 감염내과, 호흡기내과, 소아청소년과, 이비인후과",
},
{
"title": "예방방법",
"subtitle": "감기 바이러스 접촉 기회를 차단해야 한다. 손을 자주 씻어 감기 바이러스를 없애고 손으로 눈이나 코, 입을 비비지 않도록 한다.",
}
]
}
}
}
})
r = requests.post(FACEBOOK_SEND_URL, params=params, headers=headers, data=data)
if r.status_code != 200:
log(r.status_code)
log(r.text)
|
apache-2.0
| -8,093,412,793,826,818,000 | 33.56129 | 148 | 0.48404 | false |
cosbynator/NPSGD
|
npsgd_queue.py
|
1
|
18139
|
#!/usr/bin/python
# Author: Thomas Dimson [tdimson@gmail.com]
# Date: January 2011
# For distribution details, see LICENSE
"""Queue server for npsgd modelling tasks.
The queue server is the data backend for NPSGD. It listens to both workers and
the web interface. The web interface populates it with requests while the workers
poll for requests and pull them off the queue. Additionally, the queue is
responsible for sending out confirmation code e-mail messages.
"""
import os
import sys
import anydbm
import shelve
import logging
import tornado.web
import tornado.ioloop
import tornado.escape
import tornado.httpserver
import threading
from datetime import datetime
from optparse import OptionParser
import npsgd.email_manager
from npsgd.email_manager import Email
from npsgd import model_manager
from npsgd.config import config
from npsgd.task_queue import TaskQueue
from npsgd.task_queue import TaskQueueException
from npsgd.confirmation_map import ConfirmationMap
from npsgd.model_manager import modelManager
glb = None
"""Queue globals object - assigned at startup."""
class QueueGlobals(object):
"""Queue state objects along with disk serialization mechanisms for them."""
def __init__(self, shelve):
self.shelve = shelve
self.shelveLock = threading.RLock()
self.idLock = threading.RLock()
self.taskQueue = TaskQueue()
self.confirmationMap = ConfirmationMap()
if shelve.has_key("idCounter"):
self.idCounter = shelve["idCounter"]
else:
self.idCounter = 0
self.loadDiskTaskQueue()
self.loadConfirmationMap()
self.expireWorkerTaskThread = ExpireWorkerTaskThread(self.taskQueue)
self.lastWorkerCheckin = datetime(1,1,1)
def loadDiskTaskQueue(self):
"""Load task queue from disk using the shelve reserved for the queue."""
if not self.shelve.has_key("taskQueue"):
logging.info("Unable to read task queue from disk db, starting fresh")
return
logging.info("Reading task queue from disk")
readTasks = 0
failedTasks = 0
taskDicts = self.shelve["taskQueue"]
for taskDict in taskDicts:
try:
task = modelManager.getModelFromTaskDict(taskDict)
except model_manager.InvalidModelError, e:
emailAddress = taskDict["emailAddress"]
subject = config.lostTaskEmailSubject.generate(full_name=taskDict["modelFullName"],
visibleId=taskDict["visibleId"])
body = config.lostTaskEmailTemplate.generate()
emailObject = Email(emailAddress, subject, body)
logging.info("Invalid model-version pair, notifying %s", emailAddress)
npsgd.email_manager.backgroundEmailSend(Email(emailAddress, subject, body))
failedTasks += 1
continue
readTasks += 1
self.taskQueue.putTask(task)
logging.info("Read %s tasks, failed while reading %s tasks", readTasks, failedTasks)
def loadConfirmationMap(self):
"""Load confirmation map ([code, modelDict] pairs) from shelve reserved for the queue."""
if not self.shelve.has_key("confirmationMap"):
logging.info("Unable to read confirmation map from disk db, starting fresh")
return
logging.info("Reading confirmation map from disk")
confirmationMapEntries = self.shelve["confirmationMap"]
readCodes = 0
failedCodes = 0
for code, taskDict in confirmationMapEntries.iteritems():
try:
task = modelManager.getModelFromTaskDict(taskDict)
except model_manager.InvalidModelError, e:
emailAddress = taskDict["emailAddress"]
subject = config.confirmationFailedEmailSubject.generate(full_name=taskDict["modelFullName"],
visibleId=taskDict["visibleId"])
body = config.confirmationFailedEmailTemplate.generate(code=code)
emailObject = Email(emailAddress, subject, body)
logging.info("Invalid model-version pair, notifying %s", emailAddress)
npsgd.email_manager.backgroundEmailSend(Email(emailAddress, subject, body))
failedCodes += 1
continue
readCodes += 1
self.confirmationMap.putRequestWithCode(task, code)
logging.info("Read %s codes, failed while reading %s codes", readCodes, failedCodes)
def syncShelve(self):
"""Serializes the task queue, confirmation map and id counter to disk using the queue shelve."""
try:
with self.shelveLock:
self.shelve["taskQueue"] = [e.asDict() \
for e in self.taskQueue.allRequests()]
self.shelve["confirmationMap"] = dict( (code, task.asDict())\
for (code, task) in self.confirmationMap.getRequestsWithCodes())
with self.idLock:
self.shelve["idCounter"] = self.idCounter
except pickle.PicklingError, e:
logging.warning("Unable sync task queue and confirmation error to disk due to a pickling (serialization error): %s", e)
return
logging.info("Synced queue and confirmation map to disk")
def touchWorkerCheckin(self):
self.lastWorkerCheckin = datetime.now()
def newTaskId(self):
with self.idLock:
self.idCounter += 1
return self.idCounter
class ExpireWorkerTaskThread(threading.Thread):
"""Task Expiration Thread
Moves tasks back into the queue whenever
We haven't heard from a worker in a while
"""
def __init__(self, taskQueue):
threading.Thread.__init__(self)
self.daemon = True
self.taskQueue = taskQueue
self.done = threading.Event()
def run(self):
logging.info("Expire worker task thread booting up...")
while True:
self.done.wait(config.keepAliveInterval)
if self.done.isSet():
break
badTasks = self.taskQueue.pullProcessingTasksOlderThan(
time.time() - config.keepAliveTimeout)
if len(badTasks) > 0:
logging.info("Found %d tasks to expire", len(badTasks))
for task in badTasks:
task.failureCount += 1
logging.warning("Task '%s' failed due to timeout (failure #%d)", task.taskId, task.failureCount)
if task.failureCount > config.maxJobFailures:
logging.warning("Exceeded max job failures, sending fail email")
npsgd.email_manager.backgroundEmailSend(task.failureEmail())
else:
logging.warning("Inserting task back in to queue with new taskId")
task.taskId = glb.newTaskId()
self.taskQueue.putTask(task)
class QueueRequestHandler(tornado.web.RequestHandler):
"""Superclass to all queue request methods."""
def checkSecret(self):
"""Checks the request for a 'secret' parameter that matches the queue's own."""
if self.get_argument("secret") == config.requestSecret:
return True
else:
self.write(tornado.escape.json_encode({"error": "bad_secret"}))
return False
class ClientModelCreate(QueueRequestHandler):
"""HTTP handler for clients creating a model request (before confirmation)."""
def post(self):
"""Post handler for model requests from the web daemon.
Attempts to build a model from its known models (essentially performing
parameter verification) then places a request in the queue if it succeeds.
Additionally, it will send out an e-mail to the user for confirmation of
the request
"""
if not self.checkSecret():
return
task_json = tornado.escape.json_decode(self.get_argument("task_json"))
task = modelManager.getModelFromTaskDict(task_json)
task.taskId = glb.newTaskId()
code = glb.confirmationMap.putRequest(task)
emailAddress = task.emailAddress
logging.info("Generated a request for %s, confirmation %s required", emailAddress, code)
subject = config.confirmEmailSubject.generate(task=task)
body = config.confirmEmailTemplate.generate(code=code, task=task, expireDelta=config.confirmTimeout)
emailObject = Email(emailAddress, subject, body)
npsgd.email_manager.backgroundEmailSend(emailObject)
glb.syncShelve()
self.write(tornado.escape.json_encode({
"response": {
"task" : task.asDict(),
"code" : code
}
}))
class ClientQueueHasWorkers(QueueRequestHandler):
"""Request handler for the web daemon to check if workers are available.
We keep track of the last time workers checked into the queue in order
to ensure that all requests can be processed.
"""
def get(self):
if not self.checkSecret():
return
td = datetime.now() - glb.lastWorkerCheckin
hasWorkers = (td.seconds + td.days * 24 * 3600) < config.keepAliveTimeout
self.write(tornado.escape.json_encode({
"response": {
"has_workers" : hasWorkers
}
}))
previouslyConfirmed = set()
class ClientConfirm(QueueRequestHandler):
"""HTTP handler for clients confirming a model request.
This handler moves requests from the confirmation map to the general
request queue for processing.
"""
def get(self, code):
global previouslyConfirmed
if not self.checkSecret():
return
try:
#Expire old confirmations first, just in case
glb.confirmationMap.expireConfirmations()
confirmedRequest = glb.confirmationMap.getRequest(code)
previouslyConfirmed.add(code)
except KeyError, e:
if code in previouslyConfirmed:
self.write(tornado.escape.json_encode({
"response": "already_confirmed"
}))
return
else:
raise tornado.web.HTTPError(404)
glb.taskQueue.putTask(confirmedRequest)
glb.syncShelve()
self.write(tornado.escape.json_encode({
"response": "okay"
}))
class WorkerInfo(QueueRequestHandler):
"""HTTP handler for workers checking into the queue."""
def get(self):
if not self.checkSecret():
return
glb.touchWorkerCheckin()
self.write("{}")
class WorkerTaskKeepAlive(QueueRequestHandler):
"""HTTP handler for workers pinging the queue while working on a task.
Having this request makes sure that we don't time out any jobs that
are currently being handled by some worker. If a worker goes down,
we will put the job back into the queue because this request won't have
been made.
"""
def get(self, taskIdString):
if not self.checkSecret():
return
glb.touchWorkerCheckin()
taskId = int(taskIdString)
logging.info("Got heartbeat for task id '%s'", taskId)
try:
task = glb.taskQueue.touchProcessingTaskById(taskId)
except TaskQueueException, e:
logging.info("Bad keep alive request: no such task id '%s' exists" % taskId)
self.write(tornado.escape.json_encode({
"error": {"type" : "bad_id" }
}))
self.write("{}")
class WorkerSucceededTask(QueueRequestHandler):
"""HTTP handler for workers telling the queue that they have succeeded processing.
After this request, the queue no longer needs to keep track of the job in any way
and declares it complete.
"""
def get(self, taskIdString):
if not self.checkSecret():
return
glb.touchWorkerCheckin()
taskId = int(taskIdString)
try:
task = glb.taskQueue.pullProcessingTaskById(taskId)
except TaskQueueException, e:
logging.info("Bad succeed request: no task id exists")
self.write(tornado.escape.json_encode({
"error": {"type" : "bad_id" }
}))
return
glb.syncShelve()
self.write(tornado.escape.json_encode({
"status": "okay"
}))
class WorkerHasTask(QueueRequestHandler):
"""HTTP handler for workers ensuring that a job still exists.
This handler helps eliminate certain race conditions in NPSGD. Before a
worker sends an e-mail with job results, it checks back with the queue to
make sure that the job hasn't already been handler by another worker
(this could happen if the queue declares that the first worker had timed out).
If there is no task with that id still in the processing list then
an e-mail being sent out would be a duplicate.
"""
def get(self, taskIdString):
if not self.checkSecret():
return
glb.touchWorkerCheckin()
taskId = int(taskIdString)
logging.info("Got 'has task' request for task of id '%d'", taskId)
if glb.taskQueue.hasProcessingTaskById(taskId):
self.write(tornado.escape.json_encode({
"response": "yes"
}))
else:
self.write(tornado.escape.json_encode({
"response": "no"
}))
class WorkerFailedTask(QueueRequestHandler):
"""HTTP handler for workers reporting failure to complete a job.
Upon failure, we will either recycle the request into the queue or we will
report a failure (with an e-mail message to the user).
"""
def get(self, taskIdString):
if not self.checkSecret():
return
glb.touchWorkerCheckin()
taskId = int(taskIdString)
try:
task = glb.taskQueue.pullProcessingTaskById(taskId)
except TaskQueueException, e:
logging.info("Bad failed request: no such task id exists, ignoring request")
self.write(tornado.escape.json_encode({
"error": {"type" : "bad_id" }
}))
return
task.failureCount += 1
logging.warning("Worker had a failure while processing task '%s' (failure #%d)",\
task.taskId, task.failureCount)
if task.failureCount >= config.maxJobFailures:
logging.warning("Max job failures found, sending failure email")
npsgd.email_manager.backgroundEmailSend(task.failureEmail())
else:
logging.warning("Returning task to queue for another attempt")
glb.taskQueue.putTask(task)
self.write(tornado.escape.json_encode({
"status": "okay"
}))
class WorkerTaskRequest(QueueRequestHandler):
"""HTTP handler for workers grabbings tasks off the queue."""
def post(self):
if not self.checkSecret():
return
modelVersions = tornado.escape.json_decode(self.get_argument("model_versions_json"))
glb.touchWorkerCheckin()
logging.info("Received worker task request with models %s", modelVersions)
if glb.taskQueue.isEmpty():
self.write(tornado.escape.json_encode({
"status": "empty_queue"
}))
else:
task = glb.taskQueue.pullNextVersioned(modelVersions)
if task == None:
logging.info("Found no models in queue matching worker's supported versions")
self.write(tornado.escape.json_encode({
"status": "no_version"
}))
else:
glb.taskQueue.putProcessingTask(task)
self.write(tornado.escape.json_encode({
"task": task.asDict()
}))
def main():
global glb
parser = OptionParser()
parser.add_option("-c", "--config", dest="config",
help="Config file", default="config.cfg")
parser.add_option("-p", "--port", dest="port",
help="Queue port number", default=9000)
parser.add_option('-l', '--log-filename', dest='log',
help="Log filename (use '-' for stderr)", default="-")
(options, args) = parser.parse_args()
config.loadConfig(options.config)
config.setupLogging(options.log)
model_manager.setupModels()
model_manager.startScannerThread()
if not os.path.exists(os.path.dirname(config.queueFile)):
logging.warning("Queue directory does not exist, attempting to create")
os.makedirs(os.path.dirname(config.queueFile))
try:
queueShelve = shelve.open(config.queueFile)
except anydbm.error:
logging.warning("Queue file '%s' is corrupt, removing and starting afresh", config.queueFile)
os.remove(config.queueFile)
queueShelve = shelve.open(config.queueFile)
try:
glb = QueueGlobals(queueShelve)
queueHTTP = tornado.httpserver.HTTPServer(tornado.web.Application([
(r"/worker_info", WorkerInfo),
(r"/client_model_create", ClientModelCreate),
(r"/client_queue_has_workers", ClientQueueHasWorkers),
(r"/client_confirm/(\w+)", ClientConfirm),
(r"/worker_failed_task/(\d+)", WorkerFailedTask),
(r"/worker_succeed_task/(\d+)", WorkerSucceededTask),
(r"/worker_has_task/(\d+)", WorkerHasTask),
(r"/worker_keep_alive_task/(\d+)", WorkerTaskKeepAlive),
(r"/worker_work_task", WorkerTaskRequest)
]))
queueHTTP.listen(options.port)
logging.info("NPSGD Queue Booted up, serving on port %d", options.port)
print >>sys.stderr, "NPSGD queue server listening on %d" % options.port
tornado.ioloop.IOLoop.instance().start()
finally:
queueShelve.close()
if __name__ == "__main__":
main()
|
bsd-3-clause
| -3,869,175,241,003,585,000 | 36.554865 | 131 | 0.621423 | false |
Lincoln-Cybernetics/Explore-
|
player.py
|
1
|
21476
|
import pygame
import random
class Player(pygame.sprite.Sprite):
def __init__(self, level, *groups):
super(Player, self).__init__(*groups)
#the game level
self.level = level
#spritegrups
self.unpassable = pygame.sprite.Group()
#base image
self.gender = "M"
self.complexion = "B"
self.set_Image('R')
self.orient = 'R'
self.scrnx = 0
self.mapx = 0
self.scrny = 0
self.mapy = 0
#self.myloc = self.level.mymap[self.mapx][self.mapy]
#reference of old location data
self.pxs = self.scrnx
self.pys = self.scrny
self.pmx = self.mapx
self.pmy = self.mapy
self.bgsig = ""
#item inventory
self.inventory = {'axe': 0, 'wood': 0, 'telescope': 0, 'canteen':0, 'bearskin':0, 'squatchhide':0, 'yetiskin':0, 'coin':0, 'binoculars':0}
self.score = 0
#player stats
self.visibility = 1
self.vision = 1
self.televis = 0
self.screen_border = 1
self.AP_max = 10
self.AP_c = 10
self.APcost = {"U": 1, "D": 1, "L": 1, "R": 1, "UL":2, "UR": 2, "LL": 2, "LR":2, "Chop":3, "Plant": 3}
self.perks = []
#fighting
self.HP_max = 10
self.HP_c = 10
self.ATT = 3
self.DEF = 2
self.DMG = 2
self.enemy = 0
self.regeneration = 0
self.FOYflag = False
#status
self.Alive = True
self.HYD_max = 10
self.HYD_c = 10
self.skipflag = False
def spawn(self,x,y):
self.mapx = x
self.mapy = y
self.myloc = self.level.mymap[self.mapx][self.mapy]
def position_scrn(self,x,y):
self.scrnx = x
self.scrny = y
self.rect = pygame.rect.Rect((x * self.level.tilex, y * self.level.tiley), self.image.get_size())
self.prevrect = self.rect.copy()
def add_Perk(self,perk):
#must be called AFTER the map is created
self.perks.append(perk)
if perk == "Early Bird":
self.AP_max += 4
if perk == "Eagle Eye":
self.vision += 1
if perk == "Hearty":
self.HP_max += 5
self.HP_c += 5
if perk == "Strong":
self.DMG += 2
self.APcost["Chop"] -= 1
if perk == "Fighter":
self.ATT += 2
self.DEF += 2
if perk == "Swimmer":
for land in self.unpassable:
if land.flavor == "Ocean" or land.flavor == "Whirlpool":
self.unpassable.remove(land)
if perk == "Runner":
pass
if perk == "Mountaineer":
for land in self.unpassable:
if land.flavor == "Active Volcano":
self.unpassable.remove(land)
if perk == "Resilient":
self.regeneration += 1
def AP_check(self, biome, com):
tot = 0
tot += self.APcost[com]
tot += biome.AP_cost
if "Swimmer" in self.perks:
if biome.flavor == "Water" or biome.flavor == "Ocean" or biome.flavor == "Whirlpool":
if biome in self.unpassable:
self.unpassable.remove(biome)
tot -= 1
if "Runner" in self.perks:
if biome.flavor != "Water" or biome.flavor != "Ocean" or biome.flavor != "Whirlpool":
tot -= 1
if "Mountaineer" in self.perks:
if biome.flavnum == 12 or biome.flavnum == 11 or biome.flavnum == 10:
tot -= 1
if tot < 1:
tot = 1
return tot
def command(self, cmd):
#print cmd
#reference of old location data
self.myloc = self.level.mymap[self.mapx][self.mapy]
self.prevrect = self.rect.copy()
self.pxs = self.scrnx
self.pys = self.scrny
self.pmx = self.mapx
self.pmy = self.mapy
self.bgsig = ""
self.skipflag = False
#Move Up
if cmd == "U":
target = self.level.mymap[self.mapx][self.mapy-1]
APnum = self.AP_check(target, cmd)
if "Runner" in self.perks and target.flavnum != 13 and target.flavnum != 14 and target.flavnum != 15:
APnum -= 1
if APnum < 1:
APnum = 1
if target in self.unpassable:
pass
else:
if self.reckonAP(APnum):
self.mapy -= 1
if self.scrny*self.level.tiley <= self.level.tiley*self.screen_border:
self.bgsig = "D"
self.level.move_BG(self.bgsig)
else:
self.move("U")
else:
self.skipflag = True
#Move Down
if cmd == "D":
target = self.level.mymap[self.mapx][self.mapy+1]
APnum = self.AP_check(target, cmd)
if target in self.unpassable:
pass
else:
if self.reckonAP(APnum):
self.mapy += 1
if self.scrny*self.level.tiley >= self.level.winy-((self.level.tiley*(self.screen_border+1))):
self.bgsig = "U"
self.level.move_BG(self.bgsig)
else:
self.move("D")
else:
self.skipflag = True
#Move Left
if cmd == "L":
target = self.level.mymap[self.mapx-1][self.mapy]
APnum = self.AP_check(target, cmd)
if target in self.unpassable:
pass
else:
if self.reckonAP(APnum):
self.mapx -= 1
if self.scrnx*self.level.tilex <= self.level.tilex*self.screen_border:
self.bgsig = "R"
self.level.move_BG(self.bgsig)
else:
self.move("L")
else:
self.skipflag = True
#Move Right
if cmd == "R":
target = self.level.mymap[self.mapx+1][self.mapy]
APnum = self.AP_check(target, cmd)
if target in self.unpassable:
pass
else:
if self.reckonAP(APnum):
self.mapx += 1
if self.scrnx*self.level.tilex >= self.level.winx-((self.level.tilex*(self.screen_border+1))):
self.bgsig = "L"
self.level.move_BG(self.bgsig)
else:
self.move("R")
else:
self.skipflag = True
#Move Up and Left
if cmd == "UL":
target = self.level.mymap[self.mapx-1][self.mapy-1]
APnum = self.AP_check(target, cmd)
if target in self.unpassable:
pass
else:
if self.reckonAP(APnum):
self.mapx -= 1
self.mapy -= 1
if self.scrny*self.level.tiley <= self.level.tiley*self.screen_border or self.scrnx*self.level.tilex <= self.level.tilex*self.screen_border:
self.bgsig = "LR"
self.level.move_BG(self.bgsig)
else:
self.move("UL")
else:
self.skipflag = True
#Move Up and Right
if cmd == "UR":
target = self.level.mymap[self.mapx+1][self.mapy-1]
APnum = self.AP_check(target, cmd)
if target in self.unpassable:
pass
else:
if self.reckonAP(APnum):
self.mapx += 1
self.mapy -= 1
if self.scrny*self.level.tiley <= self.level.tiley*self.screen_border or self.scrnx*self.level.tilex >= self.level.winx-((self.level.tilex*(self.screen_border+1))):
self.bgsig = "LL"
self.level.move_BG(self.bgsig)
else:
self.move("UR")
else:
self.skipflag = True
#Move Down and Left
if cmd == "LL":
target = self.level.mymap[self.mapx-1][self.mapy+1]
APnum = self.AP_check(target, cmd)
if target in self.unpassable:
pass
else:
if self.reckonAP(APnum):
self.mapx -= 1
self.mapy += 1
if self.scrny*self.level.tiley >= self.level.winy-((self.level.tiley*(self.screen_border+1))) or self.scrnx*self.level.tilex <= self.level.tilex*self.screen_border:
self.bgsig = "UR"
self.level.move_BG(self.bgsig)
else:
self.move("LL")
else:
self.skipflag = True
#Move Down and Right
if cmd == "LR":
target = self.level.mymap[self.mapx+1][self.mapy+1]
APnum = self.AP_check(target, cmd)
if target in self.unpassable:
pass
else:
if self.reckonAP(APnum):
self.mapx += 1
self.mapy += 1
if self.scrny*self.level.tiley >= self.level.winy-((self.level.tiley*(self.screen_border+1))) or self.scrnx*self.level.tilex >= self.level.winx-((self.level.tilex*(self.screen_border+1))):
self.bgsig = "UL"
self.level.move_BG(self.bgsig)
else:
self.move("LR")
else:
self.skipflag = True
#Chop Trees
if cmd == "Chop":
choppable = { "Dense Woods":4, "Medium Woods":3, "Light Woods": 2, "Grass and Trees":1, "Cactus":8 }
if self.inventory['axe'] > 0:
if self.level.mymap[self.mapx][self.mapy].flavor in choppable:
if self.reckonAP(self.APcost[cmd]):
if self.myloc.flavor == "Cactus":
self.HYD_c += 5
if self.HYD_c > self.HYD_max:
self.HYD_c = self.HYD_max
else:
self.inventory['wood'] += self.level.mymap[self.mapx][self.mapy].wood_level
self.level.mymap[self.mapx][self.mapy].set_type(choppable[self.level.mymap[self.mapx][self.mapy].flavor])
self.level.mymap[self.mapx][self.mapy].reset()
else:
self.skipflag = True
#Plant Trees
if cmd == "Plant":
if self.level.mymap[self.mapx][self.mapy].flavor == "Grassland":
if self.inventory['wood'] > 0:
if self.reckonAP(self.APcost[cmd]):
self.level.mymap[self.mapx][self.mapy].set_type(2)
self.inventory['wood'] -= 1
self.level.mymap[self.mapx][self.mapy].reset()
else:
self.skipflag = True
#Do Nothing
if cmd == "":
pass
if self.mobcheck() == False:
self.rect = self.prevrect
self.scrnx = self.pxs
self.scrny = self.pys
self.mapx = self.pmx
self.mapy = self.pmy
if self.bgsig != "":
bs = {"U":"D", "D":"U", "L":"R", "R":"L", "UL":"LR", "LR":"UL", "UR":"LL", "LL":"UR"}
self.level.move_BG(bs[self.bgsig])
self.enemy.set_Image("Fight")
self.level.display()
pygame.time.wait(500)
self.enemy.remember('img')
self.level.display()
self.myloc = self.level.mymap[self.mapx][self.mapy]
if self.skipflag == False:
self.mountainview()
self.spacecheck()
self.itemcheck()
self.hydrate()
self.TOcheck()
def move(self, vec):
if vec == "U":
self.scrny -= 1
if vec == "D":
self.scrny += 1
if vec == "L":
self.set_Image('L')
self.scrnx -= 1
if vec == "R":
self.set_Image('R')
self.scrnx += 1
if vec == "UL":
self.set_Image('L')
self.scrny -= 1
self.scrnx -= 1
if vec == "UR":
self.set_Image('R')
self.scrny -= 1
self.scrnx += 1
if vec == "LL":
self.set_Image('L')
self.scrny += 1
self.scrnx -= 1
if vec == "LR":
self.set_Image('R')
self.scrny += 1
self. scrnx += 1
self.rect.x = self.scrnx*self.level.tilex
self.rect.y = self.scrny*self.level.tiley
def spacecheck(self):
for space in pygame.sprite.spritecollide(self, self.level.space, False):
self.skipflag = True
self.level.Game_Over = 1
def itemcheck(self):
for item in pygame.sprite.spritecollide(self, self.level.items, True):
self.score += item.points
if item.flavor == 'gem':
#self.level.Game_Over = 2
pass
if item.flavor == 'axe':
self.inventory['axe'] += 1
if item.flavor == 'sammich':
self.HP_c = self.HP_max
if item.flavor == 'telescope':
#self.visibility += 1
#if self.visibility > 4:
# self.visibility = 4
self.televis = 2
self.screen_border = 3
self.inventory['telescope'] += 1
if item.flavor == 'binoculars':
if self.inventory['telescope'] <= 0:
self.televis = 1
self.screen_border = 2
self.inventory['binoculars'] += 1
if item.flavor == 'canteen':
if self.inventory['canteen'] >= 4:
pass
else:
self.HYD_max += 10
self.inventory['canteen'] += 1
if item.flavor == 'coin':
self.inventory['coin'] += 1
def hydrate(self):
for mon in pygame.sprite.spritecollide(self, self.level.landmarks, False):
if mon.flavor == "FoY":
self.HYD_max = 1000
self.HYD_c = 1000
if self.FOYflag == False:
self.regeneration = 1
self.FOYflag = True
#self.HP_max = 1000
#self.HP_c = 1000
for land in pygame.sprite.spritecollide(self, self.level.terrain, False):
#print land.wood_level, land.wp
if land.flavor == "Scrub":
self.HYD_c -= 1
elif land.flavor == "Dunes":
self.HYD_c -= 2
elif land.flavor == "Cactus":
self.HYD_c -= 2
self.damage(1)
elif land.flavor == "Lava":
self.HYD_c -= 2
self.damage(2)
elif land.flavor == "Water":
self.HYD_c = self.HYD_max
elif land.flavor == "Oasis":
self.HYD_c += 10
if self.HYD_c <= 0:
self.HP_c -= 1
if self.HP_c <= 0:
self.level.Game_Over = 4
if self.HYD_c > self.HYD_max:
self.HYD_c = self.HYD_max
def TOcheck(self):
if self.skipflag == False:
APnums = []
APmin = self.AP_max
APnums.append(self.AP_check( self.level.mymap[self.mapx][self.mapy-1] , "U") )
APnums.append(self.AP_check( self.level.mymap[self.mapx][self.mapy+1] , "D") )
APnums.append(self.AP_check( self.level.mymap[self.mapx-1][self.mapy] , "L") )
APnums.append(self.AP_check( self.level.mymap[self.mapx+1][self.mapy] , "R") )
APnums.append(self.AP_check( self.level.mymap[self.mapx-1][self.mapy-1] , "UL") )
APnums.append(self.AP_check( self.level.mymap[self.mapx+1][self.mapy-1] , "UR") )
APnums.append(self.AP_check( self.level.mymap[self.mapx-1][self.mapy+1] , "LL") )
APnums.append(self.AP_check( self.level.mymap[self.mapx+1][self.mapy+1] , "LR") )
if self.inventory['axe'] > 0:
APnums.append( self.APcost["Chop"] )
if self.inventory['wood'] > 0:
APnums.append( self.APcost["Plant"] )
for pos in APnums:
if pos < APmin:
APmin = pos
if self.AP_c < APmin:
self.level.Turn_Over = 1
def mountainview(self, aug= 0):
if self.myloc.flavor == "Mountain" or self.myloc.flavor == "Extinct Volcano" or self.myloc.flavor == "Active Volcano":
self.visibility = self.vision + 2 + aug
for mon in pygame.sprite.spritecollide(self, self.level.landmarks, False):
if mon.flavor == "Peak":
self.visibility = self.vision + 3 + aug
elif self.myloc.flavor == "Hills":
self.visibility = self.vision + 1 + aug
else:
self.visibility = self.vision + aug
def set_Image(self, name):
xind = 7
yind = 2
if name == 'L':
self.orient = 'L'
if self.complexion == "B":
yind = 3
elif self.complexion == "W":
yind = 1
if self.gender == "M":
xind = 7
elif self.gender == "F":
xind = 5
if name == 'R':
self.orient = 'R'
if self.complexion == "B":
yind = 2
if self.complexion == "W":
yind = 0
if self.gender == "M":
xind = 7
elif self.gender == "F":
xind = 5
self.level.animator.set_Img(xind,yind)
self.image = self.level.animator.get_Img().convert()
self.image.set_colorkey((255,0,0))
if name == "Fight":
xind = random.randrange(4)
yind = 5
self.level.mobdraw.set_Img(xind,yind)
self.image = self.level.mobdraw.get_Img().convert()
self.image.set_colorkey((255,0,0))
def remember(self,parameter):
if parameter == 'img':
self.set_Image(self.orient)
else:
pass
def reckonAP(self, cost):
if self.AP_c >= cost:
self.AP_c -= cost
return True
else:
return False
def mobcheck(self):
for mob in pygame.sprite.spritecollide(self, self.level.fightable, False):
self.enemy = 0
if mob == self:
pass
else:
self.enemy = mob
if mob.Alive == True:
self.fight(mob)
return False
if mob.Alive == False:
for item in mob.inventory.keys():
self.inventory[item] += mob.inventory[item]
mob.kill()
return True
return True
def fight(self, opponent):
if self.ATT > opponent.DEF:
opponent.damage(self.DMG, self)
def damage(self, dmg, source= None):
sanctuary = False
for mon in pygame.sprite.spritecollide(self, self.level.landmarks, False):
if mon.flavor == "Henge":
sanctuary = True
if sanctuary:
pass
else:
self.HP_c -= dmg
if self.HP_c <= 0:
self.level.Game_Over = 3
|
unlicense
| -5,346,653,016,161,406,000 | 37.145648 | 212 | 0.429037 | false |
Microsoft/PTVS
|
Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/conda_env/env.py
|
1
|
5704
|
from __future__ import absolute_import, print_function
from collections import OrderedDict
from copy import copy
from itertools import chain
import os
from conda.base.context import context
from conda.cli import common # TODO: this should never have to import form conda.cli
from conda.common.serialize import yaml_load_standard
from conda.core.prefix_data import linked
from conda.models.match_spec import MatchSpec
from conda_env.yaml import dump
from . import compat, exceptions, yaml
from .pip_util import add_pip_installed
def load_from_directory(directory):
"""Load and return an ``Environment`` from a given ``directory``"""
files = ['environment.yml', 'environment.yaml']
while True:
for f in files:
try:
return from_file(os.path.join(directory, f))
except exceptions.EnvironmentFileNotFound:
pass
old_directory = directory
directory = os.path.dirname(directory)
if directory == old_directory:
break
raise exceptions.EnvironmentFileNotFound(files[0])
# TODO This should lean more on conda instead of divining it from the outside
# TODO tests!!!
def from_environment(name, prefix, no_builds=False, ignore_channels=False):
"""
Get environment object from prefix
Args:
name: The name of environment
prefix: The path of prefix
no_builds: Whether has build requirement
ignore_channels: whether ignore_channels
Returns: Environment object
"""
installed = linked(prefix, ignore_channels=ignore_channels)
conda_pkgs = copy(installed)
# json=True hides the output, data is added to installed
add_pip_installed(prefix, installed, json=True)
pip_pkgs = sorted(installed - conda_pkgs)
if no_builds:
dependencies = ['='.join((a.name, a.version)) for a in sorted(conda_pkgs)]
else:
dependencies = ['='.join((a.name, a.version, a.build)) for a in sorted(conda_pkgs)]
if len(pip_pkgs) > 0:
dependencies.append({'pip': ['=='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})
# conda uses ruamel_yaml which returns a ruamel_yaml.comments.CommentedSeq
# this doesn't dump correctly using pyyaml
channels = list(context.channels)
if not ignore_channels:
for dist in conda_pkgs:
if dist.channel not in channels:
channels.insert(0, dist.channel)
return Environment(name=name, dependencies=dependencies, channels=channels, prefix=prefix)
def from_yaml(yamlstr, **kwargs):
"""Load and return a ``Environment`` from a given ``yaml string``"""
data = yaml_load_standard(yamlstr)
if kwargs is not None:
for key, value in kwargs.items():
data[key] = value
return Environment(**data)
def from_file(filename):
if not os.path.exists(filename):
raise exceptions.EnvironmentFileNotFound(filename)
with open(filename, 'r') as fp:
yamlstr = fp.read()
return from_yaml(yamlstr, filename=filename)
# TODO test explicitly
class Dependencies(OrderedDict):
def __init__(self, raw, *args, **kwargs):
super(Dependencies, self).__init__(*args, **kwargs)
self.raw = raw
self.parse()
def parse(self):
if not self.raw:
return
self.update({'conda': []})
for line in self.raw:
if isinstance(line, dict):
self.update(line)
else:
self['conda'].append(common.arg2spec(line))
if 'pip' in self:
if not self['pip']:
del self['pip']
if not any(MatchSpec(s).name == 'pip' for s in self['conda']):
self['conda'].append('pip')
# TODO only append when it's not already present
def add(self, package_name):
self.raw.append(package_name)
self.parse()
def unique(seq, key=None):
""" Return only unique elements of a sequence
>>> tuple(unique((1, 2, 3)))
(1, 2, 3)
>>> tuple(unique((1, 2, 1, 3)))
(1, 2, 3)
Uniqueness can be defined by key keyword
>>> tuple(unique(['cat', 'mouse', 'dog', 'hen'], key=len))
('cat', 'mouse')
"""
seen = set()
seen_add = seen.add
if key is None:
for item in seq:
if item not in seen:
seen_add(item)
yield item
else: # calculate key
for item in seq:
val = key(item)
if val not in seen:
seen_add(val)
yield item
class Environment(object):
def __init__(self, name=None, filename=None, channels=None,
dependencies=None, prefix=None):
self.name = name
self.filename = filename
self.prefix = prefix
self.dependencies = Dependencies(dependencies)
if channels is None:
channels = []
self.channels = channels
def add_channels(self, channels):
self.channels = list(unique(chain.from_iterable((channels, self.channels))))
def remove_channels(self):
self.channels = []
def to_dict(self):
d = yaml.dict([('name', self.name)])
if self.channels:
d['channels'] = self.channels
if self.dependencies:
d['dependencies'] = self.dependencies.raw
if self.prefix:
d['prefix'] = self.prefix
return d
def to_yaml(self, stream=None):
d = self.to_dict()
out = compat.u(dump(d))
if stream is None:
return out
stream.write(compat.b(out, encoding="utf-8"))
def save(self):
with open(self.filename, "wb") as fp:
self.to_yaml(stream=fp)
|
apache-2.0
| -9,188,469,645,229,537,000 | 30.688889 | 94 | 0.606417 | false |
telefonicaid/perseo-fe
|
test/acceptance/integration/__init__.py
|
1
|
1548
|
# -*- coding: utf-8 -*-
#
# Copyright 2015 Telefonica Investigación y Desarrollo, S.A.U
#
# This file is part of perseo-fe
#
# perseo-fe is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# perseo-fe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with perseo-fe.
# If not, see http://www.gnu.org/licenses/.
#
# For those usages not covered by the GNU Affero General Public License
# please contact with:
# iot_support at tid.es
#
__author__ = 'Iván Arias León (ivan.ariasleon@telefonica.com)'
from lettuce import world
import json
import os
import sys
"""
Parse the JSON configuration file located in the src folder and
store the resulting dictionary in the lettuce world global variable.
"""
with open("properties.json") as config_file:
try:
world.config = json.load(config_file)
except Exception, e:
print 'Error parsing config file: %s' % (e)
sys.exit(1)
"""
Make sure the logs path exists and create it otherwise.
"""
if not os.path.exists(world.config["environment"]["logs_path"]):
os.makedirs(world.config["environment"]["logs_path"])
|
agpl-3.0
| 2,361,031,582,711,251,000 | 31.1875 | 77 | 0.72945 | false |
balle/chaosmap
|
chaosmap.py
|
1
|
15829
|
#!/usr/bin/python2
#
# Chaosmap
#
# Chaosmap is an information gathering tool and
# dns / whois / web server scanner.
# For wider description and example usages see the README
#
# Coded by Balle
# http://www.datenterrorist.de
# License GPLv3
version = "1.2"
###[ Import modules
import sys
import getopt
import re
import socket
from random import randint
from time import sleep, strftime
import urllib2
sys.path.append('lib')
import httplib2
import socks
from cymruwhois import Client as WhoisClient
import google
#httplib2.debuglevel=4
###[ Globals
domains = None
dict_files = None
start_ip = None
stop_ip = None
web_proxy = None
web_port = "80"
base_url = ""
web_user = None
web_pass = None
proxy_user = None
proxy_pass = None
delay = 0
name_lookup = False
salt = False
backup_salt = False
urlencode = False
shut_up = False
web_lookup = False
email_lookup = False
google_dict_search = False
google_query_dict = False
whois = False
whois_client = WhoisClient()
web_client = httplib2.Http()
###[ Subroutines
def usage():
"""
Guess what ;)
"""
print "Chaosmap " + version
print "Coded by Bastian Ballmann"
print "http://www.datenterrorist.de\n"
print "Usage: " + sys.argv[0] + """
-b <base_url>
-B(ackup salt)
-c <web_user:password>
-C <proxy_user:password>
-d <domains,domain2,domain3>
-D <delay_in_sec>
-e(mail_search)
-f <dict_file,ddict_files,dict_file3>
-g(oogle_dict_search)
-G(oogle_only)
-h(elp)
-i <start_ip>-<stop_ip>
-n(ame_lookup)
-p <webserver_port>
-P <proxy_ip:port>
-q(uiet)
-Q (input in dict are google hack queries)
-s(alt)
-u(rlencode)
-v(ersion)
-w(eb)
-W(hois)"""
print "\nFor examples see the README"
sys.exit(1)
def do_dns_lookup(lookup_name):
"""
do the actual dns lookup or print error
"""
try:
print lookup_name + ": " + socket.gethostbyname(lookup_name)
except socket.gaierror, e:
print lookup_name + ": " + str(e)
def do_url_encoding(path):
hex = '%'.join(["%02x" % ord(x) for x in path])
return '%' + hex
def dns_dict_lookup():
"""
make a dns dictionay lookups
if salt is true construct names like www2 www-2 www02
"""
for file in dict_files.split(","):
try:
fh = open(file, "r")
for word in fh.readlines():
salted_dns = []
if salt == True:
salt_chars = ["", "0", "-", "-0", "_", "_0"]
for chars in salt_chars:
for i in range(1, 9):
salted_dns.append(word.strip() + chars + str(i) + "." + domain)
for domain in domains.split(","):
do_dns_lookup(word.strip() + "." + domain)
while len(salted_dns) > 0:
i = randint(0, len(salted_dns) - 1)
do_dns_lookup(salted_dns[i])
del salted_dns[i]
if delay > 0:
sleep(delay)
fh.close()
except IOError:
print "Cannot read dictionary " + file
def get_ips(start_ip, stop_ip):
"""
return a list all ip addresses from start_ip to stop_ip
"""
ips = []
start_dec = long(''.join(["%02X" % long(i) for i in start_ip.split('.')]), 16)
stop_dec = long(''.join(["%02X" % long(i) for i in stop_ip.split('.')]), 16)
while(start_dec < stop_dec + 1):
bytes = []
bytes.append(str(int(start_dec / 16777216)))
rem = start_dec % 16777216
bytes.append(str(int(rem / 65536)))
rem = rem % 65536
bytes.append(str(int(rem / 256)))
rem = rem % 256
bytes.append(str(rem))
ips.append(".".join(bytes))
start_dec += 1
return ips
def dns_reverse_lookup():
"""
do a dns reverse lookup in random order
"""
ips = get_ips(start_ip, stop_ip)
while len(ips) > 0:
i = randint(0, len(ips) - 1)
lookup_ip = str(ips[i])
try:
print lookup_ip + ": " + str(socket.gethostbyaddr(lookup_ip)[0])
except socket.herror, e:
print lookup_ip + ": " + str(e)
except socket.error, e:
print lookup_ip + ": " + str(e)
if whois:
info = whois_client.lookup(lookup_ip)
print info.owner
if delay > 0:
sleep(delay)
del ips[i]
def do_web_lookup(host, path):
"""
do the actual web lookup, maybe mixin salt and
search the path on host with google, print the result
"""
url = ""
got_google_result = False
chars = ["/"]
if salt == True:
chars = ["/", "//", "/mooh/../", "/./"]
# if base_url != "" and re.search("/$", base_url) == None:
# base_url += "/"
if google_dict_search:
if not shut_up: print "Google dict search " + path + " on " + host
google_search_string = "+site:" + host + " inurl:" + base_url + "/" + path
if google_query_dict: google_search_string = "+site:" + host + " " + path
results = google.search(google_search_string, stop = 3)
try:
for link in results:
if re.match("^https?://" + host, link):
print "FOUND with Google:" + link
got_google_result = True
break
except KeyError:
pass
except urllib2.HTTPError, e:
print "Google search failed: " + str(e)
if not got_google_result:
if not shut_up: print "No result"
if web_lookup == True and (google_dict_search == False or (google_dict_search == True and got_google_result == False)):
for char in chars:
if web_port == "80":
url = "http://" + host + char + base_url + path
elif web_port == "443":
url = "https://" + host + char + base_url + path
else:
url = "http://" + host + ":" + web_port + char + base_url + path
try:
if not shut_up: print "GET " + url
response, content = web_client.request(url)
if response.status == 200:
print "FOUND " + url + " got " + response['content-location']
if delay > 0:
sleep(delay)
except httplib2.ServerNotFoundError:
print "Got error for " + url + ": Server not found"
def do_backup_salted_web_lookup(domain, path):
"""
Search for web backup files
"""
prefix_chars = ('_', '#')
suffix_chars = ('~', '~1', '.back', '.bak', '.old', '.orig', '_backup')
date_formats = ('%Y-%m-%d', '%Y%m%d', '%Y%d%m', '%Y-%d-%m', '%d%m%Y', '%d%m%Y',
'%y-%m-%d', '%y%m%d', '%y%d%m', '%y-%d-%m', '%d%m%y', '%d%m%y')
if re.search("%DATE%", path) != None:
for date_format in date_formats:
for date_glue in ('', '-', '_'):
path_copy = re.sub("%DATE%", date_glue + strftime(date_format), path)
do_web_lookup(domain, path_copy)
path_copy = re.sub("%DATE%", "", path)
if re.search("^%DATE%", path) == None:
do_web_lookup(domain, strftime(date_format) + date_glue + path_copy)
if re.search("%DATE%$", path):
do_web_lookup(domain, path_copy + date_glue + strftime(date_format))
if re.search("%DATE%", path) != None:
path = re.sub("%DATE%", "", path)
for char in prefix_chars:
do_web_lookup(domain, char + path)
for char in suffix_chars:
do_web_lookup(domain, path + char)
for prefix in prefix_chars:
for suffix in suffix_chars:
do_web_lookup(domain, prefix + path + suffix)
def do_google_mail_search(site):
"""
search google for site and parse a list of emails
"""
emails = set()
if not shut_up: print "Google search for emails on " + site
results = google.search("+site:" + site, num = 100, tld = "de", stop = 23)
try:
for link in results:
if link.find("youtube") > 0 or re.search("[html?|phtml|php|asp|jsp|txt|/][\\?$]", link) == None:
continue
if not shut_up: print "GET " + link
response, content = web_client.request(link)
if response.status == 200:
matches = re.findall(".*?([a-zA-Z0-9\\._\\-\\+]+@.+?\\.\w{2,4})", content)
if matches != None:
for match in matches:
emails.add(match)
except KeyError:
pass
except urllib2.HTTPError, e:
print "Google search failed: " + str(e)
if len(emails) == 0:
if not shut_up: print "No emails found for " + site
else:
print "Emails found for " + site + ":"
for email in emails:
print email
def scan_webserver():
"""
scan a web server for hidden paths based on a dictionary
"""
for file in dict_files.split(","):
try:
fh = open(file, "r")
for word in fh.readlines():
path = word.strip()
if urlencode:
path = do_url_encoding(path)
if domains != None:
for domain in domains.split(","):
if backup_salt:
do_backup_salted_web_lookup(domain, path)
else:
path = re.sub("%DATE%","", path)
do_web_lookup(domain, path)
else:
ips = get_ips(start_ip, stop_ip)
while len(ips) > 0:
i = randint(0, len(ips) - 1)
lookup_ip = str(ips[i])
del ips[i]
if backup_salt:
do_backup_salted_web_lookup(domain, path)
else:
path = re.sub("%DATE%","", path)
do_web_lookup(lookup_ip, path)
fh.close()
except IOError:
print "Cannot read dictionary " + file
###[ MAIN PART
if(len(sys.argv) < 2):
usage()
try:
cmd_opts = "b:Bc:C:d:D:ef:gi:np:P:qQsuvwW"
opts, args = getopt.getopt(sys.argv[1:], cmd_opts)
except getopt.GetoptError:
usage()
for opt in opts:
if opt[0] == "-b":
base_url = opt[1]
elif opt[0] == "-B":
backup_salt = True
elif opt[0] == "-c":
web_user, web_pass = opt[1].split(":")
elif opt[0] == "-C":
proxy_user, proxy_pass = opt[1].split(":")
elif opt[0] == "-d":
domains = opt[1]
elif opt[0] == "-D":
delay = int(opt[1])
elif opt[0] == "-e":
email_lookup = True
elif opt[0] == "-f":
dict_files = opt[1]
elif opt[0] == "-g":
google_dict_search = True
elif opt[0] == "-h":
usage()
elif opt[0] == "-i":
start_ip, stop_ip = opt[1].split('-')
elif opt[0] == "-n":
name_lookup = True
elif opt[0] == "-p":
web_port = opt[1]
elif opt[0] == "-P":
web_proxy = opt[1]
elif opt[0] == "-q":
shut_up = True
elif opt[0] == "-Q":
google_query_dict = True
elif opt[0] == "-s":
salt = True
elif opt[0] == "-u":
urlencode = True
elif opt[0] == "-v":
print version
sys.exit(1)
elif opt[0] == "-w":
web_lookup = True
elif opt[0] == "-W":
whois = True
if web_proxy != None:
proxy_ip, proxy_port = web_proxy.split(":")
if proxy_ip != "" and proxy_port != "":
proxy_info = httplib2.ProxyInfo(
proxy_type=socks.PROXY_TYPE_HTTP,
proxy_host=proxy_ip,
proxy_port=int(proxy_port),
proxy_rdns=True,
proxy_username=proxy_user,
proxy_password=proxy_pass
)
web_client = httplib2.Http(proxy_info=proxy_info)
else:
print "Proxy settings should be proxy_ip:port"
sys.exit(1)
if web_user != None and web_pass != None:
web_client.add_credentials(web_user, web_pass)
if(start_ip != None and stop_ip != None):
if name_lookup:
dns_reverse_lookup()
elif web_lookup == True and dict_files != None:
scan_webserver()
else:
print "You need to either specify -n for dns or -w for web server mapping"
sys.exit(1)
elif(domains != None and dict_files != None):
if name_lookup:
dns_dict_lookup()
elif web_lookup:
scan_webserver()
elif google_dict_search:
scan_webserver()
else:
print "You need to either specify -n for dns or -w for web server mapping"
sys.exit(1)
elif(domains != None and email_lookup):
do_google_mail_search(domains)
else:
usage()
# EOF dude ;)
|
gpl-3.0
| -7,577,405,450,876,749,000 | 32.82265 | 127 | 0.415187 | false |
okfn/rtei
|
rtei/settings/production.py
|
1
|
2263
|
import sys
import os
from .base import *
DEBUG = False
# Update database configuration with $DATABASE_URL.
import dj_database_url
db_from_env = dj_database_url.config()
DATABASES['default'].update(db_from_env)
SECRET_KEY = os.environ.get('SECRET_KEY')
# AWS S3 settings
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
# Necessary to overcome broken pipe error if not default US location
# (https://github.com/boto/boto/issues/621).
if os.environ.get('AWS_S3_HOST', False):
AWS_S3_HOST = os.environ.get('AWS_S3_HOST')
MEDIA_URL = "https://%s/" % (AWS_S3_CUSTOM_DOMAIN)
ALLOWED_HOSTS = [
'localhost',
'rtei.herokuapp.com',
'rtei-production.herokuapp.com',
'www.rtei.org',
'rtei.org',
]
# Email to receive contact requests from the form on /about/contact-us/
RTEI_CONTACT_FORM_EMAIL = os.environ.get('RTEI_CONTACT_FORM_EMAIL')
EMAIL_HOST = os.environ.get('EMAIL_HOST')
EMAIL_PORT = os.environ.get('EMAIL_PORT', 587)
EMAIL_HOST_USER = os.environ.get('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_HOST_PASSWORD')
EMAIL_USE_TLS = True
# Force HTTPS on Heroku
SECURE_SSL_REDIRECT = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'stream': sys.stdout
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
'rtei': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
},
}
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.search.backends.elasticsearch2',
'URLS': [os.environ.get('ELASTICSEARCH_URL')],
'INDEX': 'wagtail',
'TIMEOUT': 5,
}
}
try:
from .local import *
except ImportError:
pass
|
agpl-3.0
| 669,437,752,501,034,400 | 25.940476 | 71 | 0.633672 | false |
akretion/odoo
|
addons/mrp/models/stock_warehouse.py
|
4
|
14216
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.exceptions import ValidationError, UserError
class StockWarehouse(models.Model):
_inherit = 'stock.warehouse'
manufacture_to_resupply = fields.Boolean(
'Manufacture to Resupply', default=True,
help="When products are manufactured, they can be manufactured in this warehouse.")
manufacture_pull_id = fields.Many2one(
'stock.rule', 'Manufacture Rule')
pbm_mto_pull_id = fields.Many2one(
'stock.rule', 'Picking Before Manufacturing MTO Rule')
sam_rule_id = fields.Many2one(
'stock.rule', 'Stock After Manufacturing Rule')
manu_type_id = fields.Many2one(
'stock.picking.type', 'Manufacturing Operation Type',
domain=[('code', '=', 'mrp_operation')])
pbm_type_id = fields.Many2one('stock.picking.type', 'Picking Before Manufacturing Operation Type')
sam_type_id = fields.Many2one('stock.picking.type', 'Stock After Manufacturing Operation Type')
manufacture_steps = fields.Selection([
('mrp_one_step', 'Manufacture (1 step)'),
('pbm', 'Pick components and then manufacture (2 steps)'),
('pbm_sam', 'Pick components, manufacture and then store products (3 steps)')],
'Manufacture', default='mrp_one_step', required=True,
help="Produce : Move the raw materials to the production location\
directly and start the manufacturing process.\nPick / Produce : Unload\
the raw materials from the Stock to Input location first, and then\
transfer it to the Production location.")
pbm_route_id = fields.Many2one('stock.location.route', 'Picking Before Manufacturing Route', ondelete='restrict')
pbm_loc_id = fields.Many2one('stock.location', 'Picking before Manufacturing Location')
sam_loc_id = fields.Many2one('stock.location', 'Stock after Manufacturing Location')
def get_rules_dict(self):
result = super(StockWarehouse, self).get_rules_dict()
production_location_id = self._get_production_location()
for warehouse in self:
result[warehouse.id].update({
'mrp_one_step': [],
'pbm': [
self.Routing(warehouse.lot_stock_id, warehouse.pbm_loc_id, warehouse.pbm_type_id, 'pull'),
self.Routing(warehouse.pbm_loc_id, production_location_id, warehouse.manu_type_id, 'pull'),
],
'pbm_sam': [
self.Routing(warehouse.lot_stock_id, warehouse.pbm_loc_id, warehouse.pbm_type_id, 'pull'),
self.Routing(warehouse.pbm_loc_id, production_location_id, warehouse.manu_type_id, 'pull'),
self.Routing(warehouse.sam_loc_id, warehouse.lot_stock_id, warehouse.sam_type_id, 'push'),
],
})
return result
@api.model
def _get_production_location(self):
location = self.env.ref('stock.location_production', raise_if_not_found=False)
if not location:
location = self.env['stock.location'].search([('usage', '=', 'production')], limit=1)
if not location:
raise UserError(_('Can\'t find any production location.'))
return location
def _get_routes_values(self):
routes = super(StockWarehouse, self)._get_routes_values()
routes.update({
'pbm_route_id': {
'routing_key': self.manufacture_steps,
'depends': ['manufacture_steps', 'manufacture_to_resupply'],
'route_update_values': {
'name': self._format_routename(route_type=self.manufacture_steps),
'active': self.manufacture_steps != 'mrp_one_step',
},
'route_create_values': {
'product_categ_selectable': True,
'warehouse_selectable': True,
'product_selectable': False,
'company_id': self.company_id.id,
'sequence': 10,
},
'rules_values': {
'active': True,
}
}
})
return routes
def _get_route_name(self, route_type):
names = {
'mrp_one_step': _('Manufacture (1 step)'),
'pbm': _('Pick components and then manufacture'),
'pbm_sam': _('Pick components, manufacture and then store products (3 steps)'),
}
if route_type in names:
return names[route_type]
else:
return super(StockWarehouse, self)._get_route_name(route_type)
def _get_global_route_rules_values(self):
rules = super(StockWarehouse, self)._get_global_route_rules_values()
location_id = self.manufacture_steps == 'pbm_sam' and self.sam_loc_id or self.lot_stock_id
rules.update({
'manufacture_pull_id': {
'depends': ['manufacture_steps', 'manufacture_to_resupply'],
'create_values': {
'action': 'manufacture',
'procure_method': 'make_to_order',
'company_id': self.company_id.id,
'picking_type_id': self.manu_type_id.id,
'route_id': self._find_global_route('mrp.route_warehouse0_manufacture', _('Manufacture')).id
},
'update_values': {
'active': self.manufacture_to_resupply,
'name': self._format_rulename(location_id, False, 'Production'),
'location_id': location_id.id,
}
},
'pbm_mto_pull_id': {
'depends': ['manufacture_steps', 'manufacture_to_resupply'],
'create_values': {
'procure_method': 'make_to_order',
'company_id': self.company_id.id,
'action': 'pull',
'auto': 'manual',
'propagate': True,
'route_id': self._find_global_route('stock.route_warehouse0_mto', _('Make To Order')).id,
'name': self._format_rulename(self.lot_stock_id, self.pbm_loc_id, 'MTO'),
'location_id': self.pbm_loc_id.id,
'location_src_id': self.lot_stock_id.id,
'picking_type_id': self.pbm_type_id.id
},
'update_values': {
'active': self.manufacture_steps != 'mrp_one_step' and self.manufacture_to_resupply,
}
},
# The purpose to move sam rule in the manufacture route instead of
# pbm_route_id is to avoid conflict with receipt in multiple
# step. For example if the product is manufacture and receipt in two
# step it would conflict in WH/Stock since product could come from
# WH/post-prod or WH/input. We do not have this conflict with
# manufacture route since it is set on the product.
'sam_rule_id': {
'depends': ['manufacture_steps', 'manufacture_to_resupply'],
'create_values': {
'procure_method': 'make_to_order',
'company_id': self.company_id.id,
'action': 'pull',
'auto': 'manual',
'propagate': True,
'route_id': self._find_global_route('mrp.route_warehouse0_manufacture', _('Manufacture')).id,
'name': self._format_rulename(self.sam_loc_id, self.lot_stock_id, False),
'location_id': self.lot_stock_id.id,
'location_src_id': self.sam_loc_id.id,
'picking_type_id': self.sam_type_id.id
},
'update_values': {
'active': self.manufacture_steps == 'pbm_sam' and self.manufacture_to_resupply,
}
}
})
return rules
def _get_locations_values(self, vals):
values = super(StockWarehouse, self)._get_locations_values(vals)
def_values = self.default_get(['manufacture_steps'])
manufacture_steps = vals.get('manufacture_steps', def_values['manufacture_steps'])
code = vals.get('code') or self.code or ''
code = code.replace(' ', '').upper()
company_id = vals.get('company_id', self.company_id.id)
values.update({
'pbm_loc_id': {
'name': _('Pre-Production'),
'active': manufacture_steps in ('pbm', 'pbm_sam'),
'usage': 'internal',
'barcode': self._valid_barcode(code + '-PREPRODUCTION', company_id)
},
'sam_loc_id': {
'name': _('Post-Production'),
'active': manufacture_steps == 'pbm_sam',
'usage': 'internal',
'barcode': self._valid_barcode(code + '-POSTPRODUCTION', company_id)
},
})
return values
def _get_sequence_values(self):
values = super(StockWarehouse, self)._get_sequence_values()
values.update({
'pbm_type_id': {'name': self.name + ' ' + _('Sequence picking before manufacturing'), 'prefix': self.code + '/PC/', 'padding': 5},
'sam_type_id': {'name': self.name + ' ' + _('Sequence stock after manufacturing'), 'prefix': self.code + '/SFP/', 'padding': 5},
'manu_type_id': {'name': self.name + ' ' + _('Sequence production'), 'prefix': self.code + '/MO/', 'padding': 5},
})
return values
def _get_picking_type_create_values(self, max_sequence):
data, next_sequence = super(StockWarehouse, self)._get_picking_type_create_values(max_sequence)
data.update({
'pbm_type_id': {
'name': _('Pick Components'),
'code': 'internal',
'use_create_lots': True,
'use_existing_lots': True,
'default_location_src_id': self.lot_stock_id.id,
'default_location_dest_id': self.pbm_loc_id.id,
'sequence': next_sequence + 1
},
'sam_type_id': {
'name': _('Store Finished Product'),
'code': 'internal',
'use_create_lots': True,
'use_existing_lots': True,
'default_location_src_id': self.sam_loc_id.id,
'default_location_dest_id': self.lot_stock_id.id,
'sequence': next_sequence + 3
},
'manu_type_id': {
'name': _('Manufacturing'),
'code': 'mrp_operation',
'use_create_lots': True,
'use_existing_lots': True,
'sequence': next_sequence + 2
},
})
return data, max_sequence + 4
def _get_picking_type_update_values(self):
data = super(StockWarehouse, self)._get_picking_type_update_values()
data.update({
'pbm_type_id': {'active': self.manufacture_to_resupply and self.manufacture_steps in ('pbm', 'pbm_sam') and self.active},
'sam_type_id': {'active': self.manufacture_to_resupply and self.manufacture_steps == 'pbm_sam' and self.active},
'manu_type_id': {
'active': self.manufacture_to_resupply and self.active,
'default_location_src_id': self.manufacture_steps in ('pbm', 'pbm_sam') and self.pbm_loc_id.id or self.lot_stock_id.id,
'default_location_dest_id': self.manufacture_steps == 'pbm_sam' and self.sam_loc_id.id or self.lot_stock_id.id,
},
})
return data
@api.multi
def write(self, vals):
if any(field in vals for field in ('manufacture_steps', 'manufacture_to_resupply')):
for warehouse in self:
warehouse._update_location_manufacture(vals.get('manufacture_steps', warehouse.manufacture_steps))
return super(StockWarehouse, self).write(vals)
@api.multi
def _get_all_routes(self):
routes = super(StockWarehouse, self)._get_all_routes()
routes |= self.filtered(lambda self: self.manufacture_to_resupply and self.manufacture_pull_id and self.manufacture_pull_id.route_id).mapped('manufacture_pull_id').mapped('route_id')
return routes
def _update_location_manufacture(self, new_manufacture_step):
switch_warehouses = self.filtered(lambda wh: wh.manufacture_steps != new_manufacture_step)
loc_warehouse = switch_warehouses.filtered(lambda wh: not wh._location_used(wh.pbm_loc_id))
if loc_warehouse:
loc_warehouse.mapped('pbm_loc_id').write({'active': False})
loc_warehouse = switch_warehouses.filtered(lambda wh: not wh._location_used(wh.sam_loc_id))
if loc_warehouse:
loc_warehouse.mapped('sam_loc_id').write({'active': False})
if new_manufacture_step != 'mrp_one_step':
self.mapped('pbm_loc_id').write({'active': True})
if new_manufacture_step == 'pbm_sam':
self.mapped('sam_loc_id').write({'active': True})
@api.multi
def _update_name_and_code(self, name=False, code=False):
res = super(StockWarehouse, self)._update_name_and_code(name, code)
# change the manufacture stock rule name
for warehouse in self:
if warehouse.manufacture_pull_id and name:
warehouse.manufacture_pull_id.write({'name': warehouse.manufacture_pull_id.name.replace(warehouse.name, name, 1)})
return res
class Orderpoint(models.Model):
_inherit = "stock.warehouse.orderpoint"
@api.constrains('product_id')
def check_product_is_not_kit(self):
if self.env['mrp.bom'].search(['|', ('product_id', 'in', self.product_id.ids),
'&', ('product_id', '=', False), ('product_tmpl_id', 'in', self.product_id.product_tmpl_id.ids),
('type', '=', 'phantom')], count=True):
raise ValidationError(_("A product with a kit-type bill of materials can not have a reordering rule."))
|
agpl-3.0
| 7,094,540,157,854,976,000 | 48.361111 | 190 | 0.557681 | false |
simgislab/universiade2013
|
univer_parse.py
|
1
|
7397
|
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# police-uum.py
# Author: Maxim Dubinin (sim@gis-lab.info)
# About: Grab 112.ru data on участковые, creates two tables linked with unique id, policemen and locations they are responsible for.
# Created: 13:26 07.05.2013
# Usage example: python police-uum.py 45000000000
# ---------------------------------------------------------------------------
import urllib2
from bs4 import BeautifulSoup
import sys
import os
import ucsv as csv
from datetime import datetime
def download_list(link,cntry):
try:
u = urllib2.urlopen(link)
except urllib2.URLError, e:
if hasattr(e, 'reason'):
print 'We failed to reach a server.'
print 'Reason: ', e.reason
elif hasattr(e, 'code'):
print 'The server couldn\'t fulfill the request.'
print 'Error code: ', e.code
f_errors.write(cntry + "," + link + "\n")
success = False
else:
f = open("countries/" + cntry + ".html","wb")
f.write(u.read())
f.close()
print("Listing for " + cntry + " was downloaded")
success = True
return success
def get_country_codes(link):
country_codes = []
u = urllib2.urlopen(link)
soup = BeautifulSoup(''.join(u.read()))
sel = soup.find("select", { "name" : "countryId" })
options = sel.findAll('option')
for option in options:
optval = option['value']
if optval != '':
country_codes.append(optval)
return country_codes
def download_person(link,cntry,name):
try:
u = urllib2.urlopen(link)
except urllib2.URLError, e:
if hasattr(e, 'reason'):
print 'We failed to reach a server.'
print 'Reason: ', e.reason
elif hasattr(e, 'code'):
print 'The server couldn\'t fulfill the request.'
print 'Error code: ', e.code
f_errors.write(cntry + "," + link + "," + name + "\n")
success = False
else:
f = open("peoples/" + cntry + "_" + name + ".html","wb")
f.write(u.read())
f.close()
print("Listing for " + name.encode("utf-8") + " was downloaded")
success = True
return success
def parse_list(cntry):
cntry_list = open("countries/" + cntry + ".html")
soup = BeautifulSoup(''.join(cntry_list.read()))
maintable = soup.find("table", { "class" : "participantList sortable" })
trs = maintable.findAll('tr')
del trs[0]
for tr in trs:
tds = tr.findAll('td')
name = list(tds[0].find("span", { "class" : "sortValue hidden" }).strings)[0]
link = "http://kazan2013.ru" + tds[0].find('a')['href']
nameru = list(tds[0].find('a').strings)[0]
#if len(list(tds[1].strings)) != 0:
# gender = list(tds[1].strings)[0]
#else:
# gender = "not set"
if tds[2].find('a') != None:
sports = list(tds[2].find('a').strings)[0]
sports = sports.replace("\r\n","").strip()
sportslink = "http://kazan2013.ru" + tds[2].find('a')['href']
else:
sports = ""
sportslink = ""
#cntry = list(tds[3].find('a').strings)[0]
cntrylink = "http://kazan2013.ru" + tds[3].find('a')['href']
success = download_person(link.replace("/ru/","/en/"),cntry,name)
if success == True:
lastname,firstname,gender,dob,day_b,month_b,year_b,height,weight,uniname,unicity,team = parse_person(cntry,name)
else:
lastname = firstname = gender = dob = day_b = month_b = year_b = height = weight = uniname = unicity = team = "error"
#write to man file
csvwriter.writerow(dict(NAME=name,
LINK=link,
NAMERU=nameru,
GENDER=gender,
SPORTS=sports,
SPORTSLINK=sportslink,
CNTRY=cntry,
CNTRYLINK=cntrylink,
LASTNAME=lastname,
FIRSTNAME=firstname,
DOB=dob,
DOB_DAY=day_b,
DOB_MNTH=month_b,
DOB_YEAR=year_b,
HEIGHT=height,
WEIGHT=weight,
UNINAME=uniname,
UNICITY=unicity,
TEAM=team))
def parse_person(cntry,name):
f_person = open("peoples/" + cntry + "_" + name + ".html",'rb')
soup = BeautifulSoup(''.join(f_person.read()))
persinfotable = soup.findAll('table')[0]
trs = persinfotable.findAll('tr')
del trs[0]
lastname = firstname = dob = day_b = month_b = year_b = height = weight = uniname = unicity = team = ""
for tr in trs:
tds = tr.findAll('td')
trname = list(tds[0].strings)[0].strip()
if trname == "Family name":
lastname = list(tds[1].strings)[0].strip()
elif trname == "Given name":
firstname = list(tds[1].strings)[0].strip()
elif trname == "Gender":
gender = list(tds[1].find('div').strings)[0].strip()
elif trname == "Birthdate":
dob = list(tds[1].findAll('div')[0].strings)[0].strip()
date_object = datetime.strptime(dob, '%d %B %Y')
day_b = date_object.day
month_b = date_object.month
year_b = date_object.year
elif trname == "Height (cm)":
height = list(tds[1].strings)[0].strip()
elif trname == "Weight (kg)":
weight = list(tds[1].strings)[0].strip()
elif trname == "University":
uniname = list(tds[1].strings)[0].strip()
elif trname == "University City":
unicity = list(tds[1].strings)[0].strip()
elif trname == "Teams":
team = list(tds[1].find("span").strings)[0].strip()
return lastname,firstname,gender,dob,day_b,month_b,year_b,height,weight,uniname,unicity,team
if __name__ == '__main__':
args = sys.argv[1:]
if len(args) == 1:
country_codes = [args[0]] #use RUS for RUSSIA
else:
country_codes = get_country_codes(link = "http://kazan2013.ru/hide/ru/-240/Participant/List?isRelay=False&isAnimal=False&lastNameStarts=&sportId=&countryId=RUS")
f_errors = open("errors.log","a")
fieldnames_data = ("NAME","LINK","NAMERU","SPORTS","SPORTSLINK","CNTRY","CNTRYLINK","LASTNAME","FIRSTNAME","GENDER","DOB","DOB_DAY","DOB_MNTH","DOB_YEAR","HEIGHT","WEIGHT","UNINAME","UNICITY","TEAM")
for cntry in country_codes:
link = "http://kazan2013.ru/hide/ru/-240/Participant/List?isRelay=False&isAnimal=False&lastNameStarts=&sportId=&countryId=" + cntry
data_name = cntry + ".csv"
f_data = open("countries/" + data_name,"wb")
csvwriter = csv.DictWriter(f_data, fieldnames=fieldnames_data)
success = download_list(link,cntry)
if success == True:
parse_list(cntry)
else:
f_errors.write(cntry + "\n")
f_data.close()
f_errors.close()
|
bsd-2-clause
| -682,727,570,312,697,100 | 39.152174 | 203 | 0.5155 | false |
machinegun/SALSA
|
correct.py
|
1
|
3402
|
import sys
#reads input assembly, breakpoints given by the method and outputs new contig file with lengths
#offsets bed file as well
def parse_fasta(fh):
fa = {}
current_short_name = None
# Part 1: compile list of lines per sequence
for ln in fh:
if ln[0] == '>':
# new name line; remember current sequence's short name
long_name = ln[1:].rstrip()
current_short_name = long_name.split()[0]
fa[current_short_name] = []
else:
# append nucleotides to current sequence
fa[current_short_name].append(ln.rstrip())
# Part 2: join lists into strings
for short_name, nuc_list in fa.iteritems():
# join this sequence's lines into one long string
fa[short_name] = ''.join(nuc_list)
return fa
#read fasta first
input_seqs = parse_fasta(open(sys.argv[1],'r'))
#read breakpoints
contig2breakpoints = {}
with open(sys.argv[2],'r') as f:
for line in f:
attrs = line.split()
contig2breakpoints[attrs[0]] = int(attrs[1])
#first breake the input assembly and store the mapping of old to new names of contigs
contig2new = {}
contig2newseq = {}
contig_id = 1
for seq in input_seqs:
if seq not in contig2breakpoints:
contig2new[seq] = seq
contig2newseq[seq] = input_seqs[seq]
contig_id += 1
else:
first = input_seqs[seq][:contig2breakpoints[seq]]
second = input_seqs[seq][contig2breakpoints[seq]:]
first_id = seq+'_1'
contig_id += 1
second_id = seq+'_2'
contig_id += 1
contig2new[seq] = [first_id,second_id]
contig2newseq[first_id] = first
contig2newseq[second_id] = second
#now update the bed file in streaming fashion
oline = ""
count = 0
ofile = open(sys.argv[4]+'/alignment_iteration_1.tmp.bed','w')
with open(sys.argv[3],'r') as f:
for line in f:
attrs = line.split()
if attrs[0] not in contig2breakpoints:
oline += str(contig2new[attrs[0]] +'\t'+attrs[1]+'\t'+attrs[2]+'\t'+attrs[3]+'\n')
count += 1
else:
pos1 = int(attrs[1])
pos2 = int(attrs[2])
breakpoint_loc = contig2breakpoints[attrs[0]]
if pos1 < breakpoint_loc and pos2 > breakpoint_loc:
continue
else:
if pos2 < breakpoint_loc:
oline += str(contig2new[attrs[0]][0]+'\t'+attrs[1]+'\t'+attrs[2]+'\t'+attrs[3]+'\n')
count += 1
else:
new_start = pos1 - breakpoint_loc
new_end = pos2 - breakpoint_loc
oline += str(contig2new[attrs[0]][1]+'\t'+str(new_start)+'\t'+str(new_end)+'\t'+attrs[3]+'\n')
count += 1
if count >= 1000000:
ofile.write(oline)
oline = ""
count = 0
ofile.close()
#write fasta file
ofasta = open(sys.argv[4]+'/asm.cleaned.fasta','w')
for seq in contig2newseq:
contig_seq = contig2newseq[seq]
chunks = [contig_seq[i:i+80] for i in xrange(0,len(contig_seq),80)]
ofasta.write('>'+seq+'\n')
for chunk in chunks:
ofasta.write(chunk+'\n')
ofasta.close()
#write lengths
olens = open(sys.argv[4]+'/scaffold_length_iteration_1','w')
for seq in contig2newseq:
olens.write(seq+'\t'+str(len(contig2newseq[seq]))+'\n')
olens.close()
|
mit
| -8,328,874,914,861,466,000 | 29.927273 | 114 | 0.57231 | false |
PEAT-AI/Crampy
|
inverse_kinematics/torben_ik.py
|
1
|
7206
|
import serial
import math
import time
import csv
import thread
import numpy
from multiprocessing import Process
#globale variablen
#serial connection to mini Maestro
ser = serial.Serial('/dev/ttyACM1')
#leg length in cm
lega=3
legb=8
legc=10
def leg1(WinkelA,WinkelB,WinkelC,ser):
WinkelA=180-WinkelA
WinkelC=170-WinkelC
if WinkelB <70:
WinkelB=70
cmd_dreh=chr(0xFF)+chr(0)+chr(WinkelA)
cmd_hoch=chr(0xFF)+chr(1)+chr(WinkelB)
cmd_fuss=chr(0xFF)+chr(2)+chr(WinkelC)
ser.write(cmd_dreh+cmd_fuss+cmd_hoch)
ser.flush()
def leg2(WinkelA,WinkelB,WinkelC,ser):
WinkelA=180-WinkelA
#WinkelA=90-WinkelA
WinkelB=170-WinkelB
cmd_dreh=chr(0xFF)+chr(3)+chr(WinkelA)
cmd_hoch=chr(0xFF)+chr(4)+chr(WinkelB)
cmd_fuss=chr(0xFF)+chr(5)+chr(WinkelC)
ser.write(cmd_dreh+cmd_hoch+cmd_fuss)
ser.flush()
def leg3(WinkelA,WinkelB,WinkelC,ser):
WinkelC=170-WinkelC
cmd_dreh=chr(0xFF)+chr(6)+chr(WinkelA)
cmd_hoch=chr(0xFF)+chr(7)+chr(WinkelB)
cmd_fuss=chr(0xFF)+chr(8)+chr(WinkelC)
ser.write(cmd_dreh+cmd_hoch+cmd_fuss)
ser.flush()
def leg4(WinkelA,WinkelB,WinkelC,ser):
WinkelB=170-WinkelB
cmd_dreh=chr(0xFF)+chr(9)+chr(WinkelA)
cmd_hoch=chr(0xFF)+chr(10)+chr(WinkelB)
cmd_fuss=chr(0xFF)+chr(11)+chr(WinkelC)
ser.write(cmd_dreh+cmd_hoch+cmd_fuss)
ser.flush()
def IK1(x,y,z,ser):
angle_a=math.atan2(y,x)
A=-z
B=lega-(x*math.cos(angle_a)+y*math.sin(angle_a))
D=(2*lega*(x*math.cos(angle_a)+y*math.sin(angle_a)) +math.pow(legc,2)-math.pow(legb,2)-math.pow(z,2)-math.pow((x*math.cos(angle_a)+y*math.sin(angle_a)),2) )/(2*legb)
angle2=-math.atan2(B,A)+math.atan2(D,math.sqrt(math.pow(A,2)+math.pow(B,2)-math.pow(D,2)))
angle3=math.atan2(z-legb*math.sin(angle2),x*math.cos(angle_a)+y*math.sin(angle_a))
angle1=angle_a*180/math.pi
angle2=angle2*180/math.pi
angle3=angle3*180/math.pi
angle2=angle2
angle3=90+angle3
angle2=90+angle2
#print(angle1)
#print(angle2)
#print(angle3)
angle_a=angle_a*180/math.pi+45
leg1(int(angle_a),int(angle2),int(angle3),ser)
def IK2(x,y,z,ser):
angle_a=math.atan2(y,x)
A=-z
B=lega-(x*math.cos(angle_a)+y*math.sin(angle_a))
D=(2*lega*(x*math.cos(angle_a)+y*math.sin(angle_a)) +math.pow(legc,2)-math.pow(legb,2)-math.pow(z,2)-math.pow((x*math.cos(angle_a)+y*math.sin(angle_a)),2) )/(2*legb)
angle2=-math.atan2(B,A)+math.atan2(D,math.sqrt(math.pow(A,2)+math.pow(B,2)-math.pow(D,2)))
angle3=math.atan2(z-legb*math.sin(angle2),x*math.cos(angle_a)+y*math.sin(angle_a))
angle1=angle_a*180/math.pi
angle2=angle2*180/math.pi
angle3=angle3*180/math.pi
angle2=angle2
angle3=90+angle3
angle2=90+angle2
#print(angle1)
#print(angle2)
#print(angle3)
angle_a=angle_a*180/math.pi+45
leg2(int(angle_a),int(angle2),int(angle3),ser)
def IK3(x,y,z,ser):
angle_a=math.atan2(y,x)
A=-z
B=lega-(x*math.cos(angle_a)+y*math.sin(angle_a))
D=(2*lega*(x*math.cos(angle_a)+y*math.sin(angle_a)) +math.pow(legc,2)-math.pow(legb,2)-math.pow(z,2)-math.pow((x*math.cos(angle_a)+y*math.sin(angle_a)),2) )/(2*legb)
angle2=-math.atan2(B,A)+math.atan2(D,math.sqrt(math.pow(A,2)+math.pow(B,2)-math.pow(D,2)))
angle3=math.atan2(z-legb*math.sin(angle2),x*math.cos(angle_a)+y*math.sin(angle_a))
angle1=angle_a*180/math.pi
angle2=angle2*180/math.pi
angle3=angle3*180/math.pi
angle2=angle2
angle3=90+angle3
angle2=90+angle2
#print(angle1)
#print(angle2)
#print(angle3)
angle_a=angle_a*180/math.pi+45
leg3(int(angle_a),int(angle2),int(angle3),ser)
def IK4(x,y,z,ser):
angle_a=math.atan2(y,x)
A=-z
B=lega-(x*math.cos(angle_a)+y*math.sin(angle_a))
D=(2*lega*(x*math.cos(angle_a)+y*math.sin(angle_a)) +math.pow(legc,2)-math.pow(legb,2)-math.pow(z,2)-math.pow((x*math.cos(angle_a)+y*math.sin(angle_a)),2) )/(2*legb)
angle2=-math.atan2(B,A)+math.atan2(D,math.sqrt(math.pow(A,2)+math.pow(B,2)-math.pow(D,2)))
angle3=math.atan2(z-legb*math.sin(angle2),x*math.cos(angle_a)+y*math.sin(angle_a))
angle1=angle_a*180/math.pi
angle2=angle2*180/math.pi
angle3=angle3*180/math.pi
angle2=angle2
angle3=90+angle3
angle2=90+angle2
print(angle1)
#print(angle2)
#print(angle3)
angle_a=angle_a*180/math.pi+45
leg4(int(angle_a),int(angle2),int(angle3),ser)
# normal position is 90 90 90 (all leg away and middle)
#for WinkelB and WinkelC a higher angle means to extrude the leg
#leg1(90,90,90,ser)
#time.sleep(1.)
#leg1(90,120,170,ser)
#time.sleep(1)
#leg1(90,90,120,ser)
#time.sleep(1)
#leg1(90,120,90,ser)
#leg2(90,90,90,ser)
#leg3(90,90,90,ser)
#leg4(90,90,90,ser)
def doleg(String,speed,leg,modulo):
koords=[0]
with open(String,'rb') as csvfile:
reader=csv.reader(csvfile,delimiter=',',quotechar='|')
for row in reader:
koords.append(row)
for i in range(0,100):
x=koords[1][i]
y=koords[2][i]
z=koords[3][i]
if i%modulo==0:
time.sleep(speed)
if leg==1:
IK1(float(x),float(y),float(z),ser)
if leg==2:
IK2(float(x),float(y),float(z),ser)
if leg==3:
IK3(float(x),float(y),float(z),ser)
if leg==4:
IK4(float(x),float(y),float(z),ser)
def testgait(number):
doleg('testgait.csv',0.005,number,1)
doleg('testgait_down.csv',0.005,number,1)
doleg('testgait_back.csv',0.005,number,1)
def func1():
number=1
testgait(number)
def func2():
number=2
testgait(number)
def func3():
number=3
testgait(number)
def func4():
number=4
testgait(number)
def calctrajectory(v1,v2,resolution):
r=numpy.linalg.norm(v1)
v3=numpy.cross(numpy.cross(v1,v2),v1)
v3=r*v3/numpy.linalg.norm(v3)
t=numpy.linspace(0,math.atan2(numpy.linalg.norm(numpy.cross(v1,v2)),numpy.dot(v1,v2)),num=resolution)
v1_result=[[0 for x in range(len(t))] for x in range(len(t))]
v3_result=[[0 for x in range(len(t))] for x in range(len(t))]
result=[[0 for x in range(len(t))] for x in range(len(t))]
for x in range(len(t)):
v1_result[x][0]=v1[0]*math.cos(t[x])
v1_result[x][1]=v1[1]*math.cos(t[x])
v1_result[x][2]=v1[2]*math.cos(t[x])
v3_result[x][0]=v3[0]*math.sin(t[x])
v3_result[x][1]=v3[1]*math.sin(t[x])
v3_result[x][2]=v3[2]*math.sin(t[x])
result[x][0]=v1_result[x][0]+v3_result[x][0]
result[x][1]=v1_result[x][1]+v3_result[x][1]
result[x][2]=v1_result[x][2]+v3_result[x][2]
return result
leg1(0,0,0,ser)
if __name__ == '__main__':
test=calctrajectory([0,15,-10],[11,11,-10],100)
for x in range(len(test)):
print test[x][1]
# p1 = Process(target=func1)
# p1.start()
# p2 = Process(target=func2)
# p2.start()
# p3 = Process(target=func3)
# p3.start()
# p4 = Process(target=func4)
# p4.start()
# p1.join()
# p2.join()
# p3.join()
# p4.join()
#try:
# thread.start_new_thread(testgait(1))
# thread.start_new_thread(testgait(2))
# #thread.start_new_thread(testgait(3))
# #thread.start_new_thread(testgait(4))
#except:
# print "error"
#IK1(13,13,0,ser)
#time.sleep(0.5)
#IK1(0,15,-10,ser)
#time.sleep(0.5)
#IK1(11,11,-10,ser)
#IK2(11,11,-10,ser)
#IK3(11,11,-10,ser)
#IK4(11,11,-10,ser)
######## Old Test Stuff##########
#cmd=chr(0xFF)+chr(11)+chr(70)
#ser.write(cmd)
#ser.write(chr(0xAA))
#compact protocol
#ser.write(chr(0x84)+chr(0x00)+chr(0x70)+chr(0x2E))
#pololu protocol
#ser.write(chr(0xAA)+chr(0x0C)+chr(0x04)+chr(0x00)+chr(0x50)+chr(0x2E))
#mini SSC Protocol
#ser.write(chr(0xFF)+chr(0x00)+chr(0x70))
ser.flush()
|
gpl-3.0
| 4,077,228,614,053,299,700 | 21.949045 | 166 | 0.67305 | false |
ircwaves/gips
|
gips/data/landsat/landsat.py
|
1
|
92875
|
#!/usr/bin/env python
################################################################################
# GIPS: Geospatial Image Processing System
#
# AUTHOR: Matthew Hanson
# EMAIL: matt.a.hanson@gmail.com
#
# Copyright (C) 2014-2018 Applied Geosolutions
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
################################################################################
from __future__ import print_function
from contextlib import contextmanager
import sys
import os
import re
from datetime import datetime, date, timedelta
import shutil
import glob
import traceback
from copy import deepcopy
import commands # TODO unused?
import subprocess
import tempfile
import tarfile
import json
from xml.etree import ElementTree
from backports.functools_lru_cache import lru_cache
import numpy
# once gippy==1.0, switch to GeoRaster.erode
from scipy.ndimage import binary_dilation
import osr
import gippy
from gips import __version__ as __gips_version__
from gips.core import SpatialExtent, TemporalExtent
from gippy.algorithms import ACCA, Fmask, LinearTransform, Indices, AddShadowMask
from gips.data.core import Repository, Data
import gips.data.core
from gips.atmosphere import SIXS, MODTRAN
import gips.atmosphere
from gips.inventory import DataInventory
from gips.utils import RemoveFiles, basename, settings, verbose_out
from gips import utils
from shapely.geometry import Polygon
from shapely.wkt import loads as wkt_loads
import requests
import homura
requirements = ['Py6S>=1.5.0']
def path_row(tile_id):
"""Converts the given landsat tile string into (path, row)."""
return (tile_id[:3], tile_id[3:])
def binmask(arr, bit):
""" Return boolean array indicating which elements as binary have a 1 in
a specified bit position. Input is Numpy array.
"""
return arr & (1 << (bit - 1)) == (1 << (bit - 1))
class NoSentinelError(Exception):
pass
class CantAlignError(Exception):
pass
class landsatRepository(Repository):
""" Singleton (all class methods) to be overridden by child data classes """
name = 'Landsat'
description = 'Landsat 5 (TM), 7 (ETM+), 8 (OLI)'
_tile_attribute = 'PR'
default_settings = {
'source': 'usgs',
'asset-preference': ('C1', 'C1S3', 'C1GS', 'DN'),
}
@classmethod
def feature2tile(cls, feature):
tile = super(landsatRepository, cls).feature2tile(feature)
return tile.zfill(6)
class landsatAsset(gips.data.core.CloudCoverAsset,
gips.data.core.GoogleStorageMixin,
gips.data.core.S3Mixin):
""" Landsat asset (original raw tar file) """
Repository = landsatRepository
gs_bucket_name = 'gcp-public-data-landsat'
# tassled cap coefficients for L5 and L7
_tcapcoef = [
[0.3561, 0.3972, 0.3904, 0.6966, 0.2286, 0.1596],
[-0.3344, -0.3544, -0.4556, 0.6966, -0.0242, -0.2630],
[0.2626, 0.2141, 0.0926, 0.0656, -0.7629, -0.5388],
[0.0805, -0.0498, 0.1950, -0.1327, 0.5752, -0.7775],
[-0.7252, -0.0202, 0.6683, 0.0631, -0.1494, -0.0274],
[0.4000, -0.8172, 0.3832, 0.0602, -0.1095, 0.0985]
]
# combine sensormeta with sensor
_sensors = {
#'LT4': {
# 'description': 'Landsat 4',
#},
'LT5': {
'code': 'LT05',
'description': 'Landsat 5',
'ee_dataset': 'LANDSAT_TM_C1',
'startdate': date(1984, 3, 1),
'enddate': date(2013, 1, 1),
'bands': ['1', '2', '3', '4', '5', '6', '7'],
'oldbands': ['1', '2', '3', '4', '5', '6', '7'],
'colors': ["BLUE", "GREEN", "RED", "NIR", "SWIR1", "LWIR", "SWIR2"],
# TODO - update bands with actual L5 values (these are L7)
'bandlocs': [0.4825, 0.565, 0.66, 0.825, 1.65, 11.45, 2.22],
'bandwidths': [0.065, 0.08, 0.06, 0.15, 0.2, 2.1, 0.26],
'E': [1983, 1796, 1536, 1031, 220.0, 0, 83.44],
'K1': [0, 0, 0, 0, 0, 607.76, 0],
'K2': [0, 0, 0, 0, 0, 1260.56, 0],
'tcap': _tcapcoef,
},
'LE7': {
'code': 'LE07',
'description': 'Landsat 7',
'ee_dataset': 'LANDSAT_EMT_C1',
'startdate': date(1999, 4, 15),
#bands = ['1','2','3','4','5','6_VCID_1','6_VCID_2','7','8']
'bands': ['1', '2', '3', '4', '5', '6_VCID_1', '7'],
'oldbands': ['1', '2', '3', '4', '5', '61', '7'],
'colors': ["BLUE", "GREEN", "RED", "NIR", "SWIR1", "LWIR", "SWIR2"],
'bandlocs': [0.4825, 0.565, 0.66, 0.825, 1.65, 11.45, 2.22],
'bandwidths': [0.065, 0.08, 0.06, 0.15, 0.2, 2.1, 0.26],
'E': [1997, 1812, 1533, 1039, 230.8, 0, 84.90],
'K1': [0, 0, 0, 0, 0, 666.09, 0],
'K2': [0, 0, 0, 0, 0, 1282.71, 0],
'tcap': _tcapcoef,
},
'LC8': {
'code': 'LC08',
'description': 'Landsat 8',
'ee_dataset': 'LANDSAT_8_C1',
'startdate': date(2013, 4, 1),
# as normal for Landsat 8 but with panchromatic band left out, CF:
# https://landsat.usgs.gov/what-are-band-designations-landsat-satellites
'bands': ['1', '2', '3', '4', '5', '6', '7', '9', '10', '11'],
'oldbands': ['1', '2', '3', '4', '5', '6', '7', '9', '10', '11'],
'colors': ("COASTAL", "BLUE", "GREEN", "RED", "NIR",
"SWIR1", "SWIR2", "CIRRUS", "LWIR", "LWIR2"),
'bandlocs': [0.443, 0.4825, 0.5625, 0.655, 0.865,
1.610, 2.2, 1.375, 10.8, 12.0],
'bandwidths': [0.01, 0.0325, 0.0375, 0.025, 0.02,
0.05, 0.1, 0.015, 0.5, 0.5],
'E': [2638.35, 2031.08, 1821.09, 2075.48, 1272.96,
246.94, 90.61, 369.36, 0, 0],
'K1': [0, 0, 0, 0, 0,
0, 0, 0, 774.89, 480.89],
'K2': [0, 0, 0, 0, 0,
0, 0, 0, 1321.08, 1201.14],
'tcap': [
[0.3029, 0.2786, 0.4733, 0.5599, 0.508, 0.1872],
[-0.2941, -0.243, -0.5424, 0.7276, 0.0713, -0.1608],
[0.1511, 0.1973, 0.3283, 0.3407, -0.7117, -0.4559],
[-0.8239, 0.0849, 0.4396, -0.058, 0.2013, -0.2773],
[-0.3294, 0.0557, 0.1056, 0.1855, -0.4349, 0.8085],
[0.1079, -0.9023, 0.4119, 0.0575, -0.0259, 0.0252],
]
},
'LC8SR': {
'description': 'Landsat 8 Surface Reflectance',
'startdate': date(2013, 4, 1),
}
}
# filename minus extension so that C1 & C1S3 both use the same pattern
# example: LC08_L1TP_013030_20151225_20170224_01_T1
_c1_base_pattern = (
r'^L(?P<sensor>\w)(?P<satellite>\d{2})_'
r'(?P<correction_level>.{4})_(?P<pathrow>\d{6})_(?P<acq_date>\d{8})_'
r'(?P<processing_date>\d{8})_'
r'(?P<coll_num>\d{2})_(?P<coll_cat>.{2})')
cloud_storage_a_types = ('C1S3', 'C1GS') # in order of current preference
_assets = {
# DN & SR assets are no longer fetchable
'DN': {
'sensors': ['LT5', 'LE7', 'LC8'],
'enddate': date(2017, 4, 30),
'pattern': (
r'^L(?P<sensor>[A-Z])(?P<satellie>\d)'
r'(?P<pathrow>\d{6})(?P<acq_date>\d{7})'
r'(?P<gsi>[A-Z]{3})(?P<version>\d{2})\.tar\.gz$'
),
},
'SR': {
'sensors': ['LC8SR'],
'enddate': date(2017, 4, 30),
'pattern': r'^L.*?-SC.*?\.tar\.gz$',
},
# landsat setting 'source' decides which asset type is downloaded:
# source == usgs -> fetch C1 assets from USGS
# source == s3 -> fetch C1S3 assets from AWS S3
'C1': {
'sensors': ['LT5', 'LE7', 'LC8'],
'pattern': _c1_base_pattern + r'\.tar\.gz$',
'latency': 12,
},
'C1S3': {
'sensors': ['LC8'],
'pattern': _c1_base_pattern + r'_S3\.json$',
'latency': 12,
},
'C1GS': {
'sensors': ['LT5', 'LE7', 'LC8'],
'pattern': _c1_base_pattern + r'_gs\.json$',
'latency': 12,
},
}
# Field ids are retrieved with `api.dataset_fields()` call
_ee_datasets = None
# Set the startdate to the min date of the asset's sensors
for asset, asset_info in _assets.iteritems():
asset_info['startdate'] = min(
[_sensors[sensor]['startdate']
for sensor in asset_info['sensors']]
)
_defaultresolution = [30.0, 30.0]
def __init__(self, filename):
""" Inspect a single file and get some metadata """
super(landsatAsset, self).__init__(filename)
fname = os.path.basename(filename)
verbose_out("Attempting to load " + fname, 2)
# determine asset type
match = None
for at, ad in self._assets.items():
match = re.match(ad['pattern'], fname)
if match:
break
if match is None:
raise RuntimeError(
"No valid landsat asset type for '{}'".format(fname), filename)
self.asset = at
# set attribs according to asset type
if at == 'SR':
self.sensor = 'LC8SR'
self.tile = fname[3:9]
self.date = datetime.strptime(fname[9:16], "%Y%j")
self.version = int(fname[20:22])
elif at == 'DN':
self.sensor = fname[0:3]
self.date = datetime.strptime(match.group('acq_date'), "%Y%j")
self.version = int(fname[19:21])
else: # C1 flavors
self.sensor = "L{}{}".format(match.group('sensor'),
int(match.group('satellite')))
self.date = datetime.strptime(match.group('acq_date'), "%Y%m%d")
self.collection_number = match.group('coll_num')
self.collection_category = match.group('coll_cat')
processing_date = datetime.strptime(match.group('processing_date'),
'%Y%m%d')
self.version = 1e6 * int(self.collection_number) + \
(processing_date - datetime(2017, 1, 1)).days + \
{'RT': 0, 'T2': 0.5, 'T1': 0.9}[self.collection_category]
if self.asset != 'SR':
self.tile = match.group('pathrow')
smeta = self._sensors[self.sensor]
self.meta = {}
self.meta['bands'] = {}
for i, band in enumerate(smeta['colors']):
wvlen = smeta['bandlocs'][i]
self.meta['bands'][band] = {
'bandnum': i + 1,
'wvlen': wvlen,
'wvlen1': wvlen - smeta['bandwidths'][i] / 2.0,
'wvlen2': wvlen + smeta['bandwidths'][i] / 2.0,
'E': smeta['E'][i],
'K1': smeta['K1'][i],
'K2': smeta['K2'][i],
}
self.visbands = [col for col in smeta['colors'] if col[0:4] != "LWIR"]
self.lwbands = [col for col in smeta['colors'] if col[0:4] == "LWIR"]
if self.sensor not in self._sensors.keys():
raise Exception("Sensor %s not supported: %s" % (self.sensor, filename))
self._version = self.version
def band_paths(self):
if not self.in_cloud_storage():
raise NotImplementedError(
'porting local files to this method is a TODO')
spectral_bands = self.load_c1_json()[
{'C1S3': '30m-bands', 'C1GS': 'spectral-bands'}[self.asset]]
# json module insists on returning unicode, which gippy no likey
return [p.encode('ascii','ignore') for p in spectral_bands]
@classmethod
def cloud_cover_from_mtl_text(cls, text):
"""Reads the text and returns the cloud cover percentage."""
cc_pattern = r".*CLOUD_COVER = (\d+.?\d*)"
cloud_cover = re.match(cc_pattern, text, flags=re.DOTALL)
if not cloud_cover:
raise ValueError("No match for '{}' found in MTL text".format(
cc_pattern))
return float(cloud_cover.group(1))
def cloud_cover(self):
"""Returns the cloud cover for the current asset.
Caches and returns the value found in self.meta['cloud-cover']."""
if 'cloud-cover' in self.meta:
return self.meta['cloud-cover']
# first attempt to find or download an MTL file and get the CC value
text = None
if self.in_cloud_storage():
if os.path.exists(self.filename):
c1json_content = self.load_c1_json()
utils.verbose_out('requesting ' + c1json_content['mtl'], 4)
text = self.gs_backoff_get(c1json_content['mtl']).text
else:
query_results = self.query_gs(self.tile, self.date)
if query_results is None:
raise IOError('Could not locate metadata for'
' ({}, {})'.format(self.tile, self.date))
url = self.gs_object_url_base() + query_results['keys']['mtl']
utils.verbose_out('requesting ' + url, 4)
text = self.gs_backoff_get(url).text
elif os.path.exists(self.filename):
mtlfilename = self.extract(
[f for f in self.datafiles() if f.endswith('MTL.txt')]
)[0]
err_msg = 'Error reading metadata file ' + mtlfilename
with utils.error_handler(err_msg):
with open(mtlfilename, 'r') as mtlfile:
text = mtlfile.read()
if text is not None:
self.meta['cloud-cover'] = self.cloud_cover_from_mtl_text(text)
return self.meta['cloud-cover']
# the MTL file didn't work out; attempt USGS API search instead
api_key = self.ee_login()
self.load_ee_search_keys()
dataset_name = self._sensors[self.sensor]['ee_dataset']
path_field = self._ee_datasets[dataset_name]['WRS Path']
row_field = self._ee_datasets[dataset_name]['WRS Row']
date_string = datetime.strftime(self.date, "%Y-%m-%d")
from usgs import api
response = api.search(
dataset_name, 'EE',
where={path_field: self.tile[0:3], row_field: self.tile[3:]},
start_date=date_string, end_date=date_string, api_key=api_key)
metadata = requests.get(
response['data']['results'][0]['metadataUrl']).text
xml = ElementTree.fromstring(metadata)
xml_magic_string = (".//{http://earthexplorer.usgs.gov/eemetadata.xsd}"
"metadataField[@name='Scene Cloud Cover']")
# Indexing an Element instance returns its children
self.meta['cloud-cover'] = float(xml.find(xml_magic_string)[0].text)
return self.meta['cloud-cover']
def load_c1_json(self):
"""Load the content from a C1 json asset and return it."""
if not self.in_cloud_storage():
return None
with open(self.filename) as aof:
return json.load(aof)
@classmethod
def ee_login(cls):
if not hasattr(cls, '_ee_key'):
username = settings().REPOS['landsat']['username']
password = settings().REPOS['landsat']['password']
from usgs import api
cls._ee_key = api.login(username, password)['data']
return cls._ee_key
@classmethod
def load_ee_search_keys(cls):
if cls._ee_datasets:
return
api_key = cls.ee_login()
from usgs import api
cls._ee_datasets = {
ds: {
r['name']: r['fieldId']
for r in api.dataset_fields(ds, 'EE', api_key)['data']
if r['name'] in [u'WRS Path', u'WRS Row']
}
for ds in ['LANDSAT_8_C1', 'LANDSAT_ETM_C1', 'LANDSAT_TM_C1']
}
_s3_bucket_name = 'landsat-pds'
_s3_url = 'https://landsat-pds.s3.amazonaws.com/'
@classmethod
def query_s3(cls, tile, date, pclouds=100):
"""Handles AWS S3 queries for landsat data.
Returns a filename suitable for naming the constructed asset,
and a list of S3 keys. Returns None if no asset found for the
given scene. Filters by the given cloud percentage.
"""
# for finding assets matching the tile
key_prefix = 'c1/L8/{}/{}/'.format(*path_row(tile))
# match something like: 'LC08_L1TP_013030_20170402_20170414_01_T1'
# filters for date and also tier
# TODO all things not just T1 ----------------vv
fname_fragment = r'L..._...._{}_{}_\d{{8}}_.._T1'.format(
tile, date.strftime('%Y%m%d'))
re_string = key_prefix + fname_fragment
filter_re = re.compile(re_string)
keys = cls.s3_prefix_search(key_prefix)
_30m_tifs = []
_15m_tif = mtl_txt = qa_tif = None
for key in keys:
if not filter_re.match(key):
continue
if key.endswith('B8.TIF'):
_15m_tif = key
elif key.endswith('MTL.txt'):
mtl_txt = key
elif key.endswith('BQA.TIF'):
qa_tif = key
elif key.endswith('.TIF'):
_30m_tifs.append(key)
if len(_30m_tifs) != 10 or None in (_15m_tif, mtl_txt, qa_tif):
verbose_out('Found no complete S3 asset for'
' (C1S3, {}, {})'.format(tile, date), 4)
return None
if pclouds < 100:
mtl_content = requests.get(cls._s3_url + mtl_txt).text
cc = cls.cloud_cover_from_mtl_text(mtl_content)
if cc > pclouds:
cc_msg = ('C1S3 asset found for ({}, {}), but cloud cover'
' percentage ({} %) fails to meet threshold ({} %)')
verbose_out(cc_msg.format(tile, date, cc, pclouds), 3)
return None
else:
cc_msg = ('C1S3 asset found for ({}, {}); cloud cover'
' percentage ({} %) meets threshold ({} %)')
verbose_out(cc_msg.format(tile, date, cc, pclouds), 3)
else:
verbose_out('Found complete C1S3 asset for'
' ({}, {})'.format(tile, date), 3)
# have to custom sort thanks to 'B1.TIF' instead of 'B01.TIF':
def sort_key_f(key):
match = re.search(r'B(\d+).TIF$', key)
# sort by band number; anything weird goes last
return int(match.group(1)) if match else 99
_30m_tifs.sort(key=sort_key_f)
filename = re.search(fname_fragment, _15m_tif).group(0) + '_S3.json'
verbose_out("Constructed S3 asset filename: " + filename, 5)
return {'basename': filename,
'_30m_tifs': _30m_tifs, '_15m_tif': _15m_tif,
'qa_tif': qa_tif, 'mtl_txt': mtl_txt}
@classmethod
def gs_prefix_search(cls, tile, acq_date):
"""Locates the best prefix for the given arguments.
Docs: https://cloud.google.com/storage/docs/json_api/v1/objects/list
"""
# we identify a sensor as eg 'LC8' but in the filename it's 'LC08';
# prefer the latest sensor that has data for the scene
sensors = reversed([s for s in cls._assets['C1GS']['sensors']])
ads = acq_date.strftime('%Y%m%d')
path, row = path_row(tile)
# sample full key (highly redundant, unfortunately):
# we're searching up to here ---------------v
# ('LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/
# 'LC08_L1GT_044034_20130330_20170310_01_T2_MTL.txt')
p_template = '{{}}/01/{}/{}/{{}}_{{}}_{}_{}_'.format(path, row, tile, ads)
for s in sensors:
c = cls._sensors[s]['code']
# find best correction level in desc order of preference
for cl in ('L1TP', 'L1GT', 'L1GS'):
search_prefix = p_template.format(c, c, cl)
full_prefixes = cls.gs_api_search(search_prefix).get('prefixes', [])
for t in ('T1', 'T2', 'RT'): # get best C1 tier available
for p in full_prefixes:
if p.endswith(t + '/'):
return s, p
return None, None
@classmethod
def query_gs(cls, tile, date, pclouds=100):
"""Query for assets in google cloud storage.
Returns {'basename': '...', 'urls': [...]}, else None.
"""
sensor, prefix = cls.gs_prefix_search(tile, date)
if prefix is None:
return None
raw_keys = [i['name'] for i in cls.gs_api_search(prefix)['items']]
# sort and organize the URLs, and check for missing ones
keys = {'spectral-bands': []}
missing_suffixes = []
band_suffixes = ['B{}.TIF'.format(b)
for b in cls._sensors[sensor]['bands']]
for bs in band_suffixes:
try:
keys['spectral-bands'].append(
next(u for u in raw_keys if u.endswith(bs)))
except StopIteration:
missing_suffixes.append(bs)
for k, s in [('mtl', 'MTL.txt'), ('qa-band', 'BQA.TIF')]:
try:
keys[k] = next(u for u in raw_keys if u.endswith(s))
except StopIteration:
missing_suffixes.append(s)
# sanity check that we have all the band keys & metatadata key
if missing_suffixes:
err_msg = ("Found GS asset wasn't complete for (C1GS, {}, {});"
" missing files with these suffixes: {}")
verbose_out(err_msg.format(tile, date, missing_suffixes), 2)
return None
# handle pclouds
if pclouds < 100:
r = cls.gs_backoff_get(cls.gs_object_url_base() + keys['mtl'])
cc = cls.cloud_cover_from_mtl_text(r.text)
if cc > pclouds:
cc_msg = ('C1GS asset found for ({}, {}), but cloud cover'
' percentage ({}%) fails to meet threshold ({}%)')
verbose_out(cc_msg.format(tile, date, cc, pclouds), 3)
return None
verbose_out('Found complete C1GS asset for'
' ({}, {})'.format(tile, date), 3)
return dict(basename=(prefix.split('/')[-2] + '_gs.json'), keys=keys)
@classmethod
def query_c1(cls, tile, date, pcover):
"""Query for C1 assets by incquiring of the USGS API"""
path, row = path_row(tile)
fdate = date.strftime('%Y-%m-%d')
cls.load_ee_search_keys()
api_key = cls.ee_login()
from usgs import api
for dataset in cls._ee_datasets.keys():
response = api.search(
dataset, 'EE',
start_date=fdate, end_date=fdate,
where={
cls._ee_datasets[dataset]['WRS Path']: path,
cls._ee_datasets[dataset]['WRS Row']: row,
},
api_key=api_key
)['data']
for result in response['results']:
metadata = requests.get(result['metadataUrl']).text
xml = ElementTree.fromstring(metadata)
# Indexing an Element instance returns it's children
scene_cloud_cover = xml.find(
".//{http://earthexplorer.usgs.gov/eemetadata.xsd}metadataField[@name='Scene Cloud Cover']"
)[0].text
land_cloud_cover = xml.find(
".//{http://earthexplorer.usgs.gov/eemetadata.xsd}metadataField[@name='Land Cloud Cover']"
)[0].text
if float(scene_cloud_cover) < pcover:
return {
# actually used
'scene_id': result['entityId'],
'dataset': dataset,
# ignored but required
'basename': result['displayId'] + '.tar.gz',
# ignored
#'scene_cloud_cover': float(scene_cloud_cover),
#'land_cloud_cover': float(land_cloud_cover),
}
return None
@classmethod
@lru_cache(maxsize=100) # cache size chosen arbitrarily
def query_service(cls, asset, tile, date, pclouds=90.0, **ignored):
"""As superclass with optional argument:
Finds assets matching the arguments, where pcover is maximum
permitted cloud cover %.
"""
# start with pre-query checks
if not cls.available(asset, date):
return None
if asset in ['DN', 'SR']:
verbose_out('Landsat "{}" assets are no longer fetchable'.format(
asset), 6)
return None
data_src = cls.get_setting('source')
c1_sources = ('s3', 'usgs', 'gs')
if data_src not in c1_sources:
raise ValueError("Invalid data source '{}'; valid sources:"
" {}".format(data_src, c1_sources))
# perform the query, but on a_type-source mismatch, do nothing
rv = {
('C1', 'usgs'): cls.query_c1,
('C1S3', 's3'): cls.query_s3,
('C1GS', 'gs'): cls.query_gs,
}.get((asset, data_src), lambda *_: None)(tile, date, pclouds)
if rv is not None:
rv['a_type'] = asset
return rv
@classmethod
def download(cls, a_type, download_fp, **kwargs):
"""Downloads the asset defined by the kwargs to the full path."""
methods = {'C1': cls.download_c1,
'C1S3': cls.download_s3,
'C1GS': cls.download_gs,
}
if a_type not in methods:
raise ValueError('Unfetchable asset type: {}'.format(asset))
return methods[a_type](download_fp, **kwargs)
@classmethod
def download_c1(cls, download_fp, scene_id, dataset, **ignored):
"""Fetches the C1 asset defined by the arguments."""
stage_dir = cls.Repository.path('stage')
api_key = cls.ee_login()
from usgs import api
url = api.download(
dataset, 'EE', [str(scene_id)], 'STANDARD', api_key)['data'][0]['url']
with utils.make_temp_dir(prefix='dwnld', dir=stage_dir) as dldir:
homura.download(url, dldir)
granules = os.listdir(dldir)
if len(granules) == 0:
raise Exception("Download didn't seem to"
" produce a file: {}".format(str(granules)))
os.rename(os.path.join(dldir, granules[0]), download_fp)
return True
@classmethod
def download_s3(cls, download_fp, _30m_tifs, _15m_tif, qa_tif, mtl_txt,
**ignored):
"""Fetches AWS S3 assets; currently only 'C1S3' assets.
Doesn't fetch much; instead it constructs VRT-based json.
"""
# construct VSI paths; sample (split into lines):
# /vsis3_streaming/landsat-pds/c1/L8/013/030
# /LC08_L1TP_013030_20171128_20171207_01_T1
# /LC08_L1TP_013030_20171128_20171207_01_T1_B10.TIF
# see also: http://www.gdal.org/gdal_virtual_file_systems.html
asset_content = {
# can/should add metadata/versioning info here
'30m-bands': [cls.s3_vsi_prefix(t) for t in _30m_tifs],
'15m-band': cls.s3_vsi_prefix(_15m_tif),
'qa-band': cls.s3_vsi_prefix(qa_tif),
'mtl': cls._s3_url + mtl_txt,
}
utils.json_dump(asset_content, download_fp)
return True
@classmethod
def download_gs(cls, download_fp, keys, **ignored):
"""Assembles C1 assets that link into Google Cloud Storage.
Constructs a json file containing /vsicurl_streaming/ paths,
similarly to S3 assets.
"""
content = {
'mtl': cls.gs_object_url_base() + keys['mtl'],
'qa-band': cls.gs_vsi_prefix() + keys['qa-band'],
'spectral-bands': [cls.gs_vsi_prefix() + u
for u in keys['spectral-bands']],
}
utils.json_dump(content, download_fp)
return True
def unitless_bands(*bands):
return [{'name': b, 'units': Data._unitless} for b in bands]
class landsatData(gips.data.core.CloudCoverData):
name = 'Landsat'
version = '1.0.1'
inline_archive = True
Asset = landsatAsset
_lt5_startdate = date(1984, 3, 1)
_lc8_startdate = date(2013, 5, 30)
# Group products belong to ('Standard' if not specified)
_productgroups = {
'Index': ['bi', 'evi', 'lswi', 'msavi2', 'ndsi', 'ndvi', 'ndwi',
'satvi', 'vari'],
'Tillage': ['ndti', 'crc', 'sti', 'isti'],
'LC8SR': ['ndvi8sr'],
# 'rhoam', # Dropped for the moment due to issues in ACOLITE
'ACOLITE': ['rhow', 'oc2chl', 'oc3chl', 'fai',
'spm', 'spm2016', 'turbidity', 'acoflags'],
}
__toastring = 'toa: use top of the atmosphere reflectance'
__visible_bands_union = [color for color in Asset._sensors['LC8']['colors'] if 'LWIR' not in color]
# note C1 products can be made with multiple asset types; see below
# TODO don't use manual tables of repeated information in the first place
_products = {
#'Standard':
'rad': {
'assets': ['DN', 'C1'],
'description': 'Surface-leaving radiance',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
# units given by https://landsat.usgs.gov/landsat-8-l8-data-users-handbook-section-5
'bands': [{'name': n, 'units': 'W/m^2/sr/um'} for n in __visible_bands_union],
},
'ref': {
'assets': ['DN', 'C1'],
'description': 'Surface reflectance',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands(*__visible_bands_union)
},
'temp': {
'assets': ['DN', 'C1'],
'description': 'Brightness (apparent) temperature',
'toa': True,
'startdate': _lt5_startdate,
'latency': 1,
# units given by https://landsat.usgs.gov/landsat-8-l8-data-users-handbook-section-5
'bands': [{'name': n, 'units': 'degree Kelvin'} for n in ['LWIR', 'LWIR2']],
},
'acca': {
'assets': ['DN', 'C1'],
'description': 'Automated Cloud Cover Assessment',
'arguments': [
'X: erosion kernel diameter in pixels (default: 5)',
'Y: dilation kernel diameter in pixels (default: 10)',
'Z: cloud height in meters (default: 4000)'
],
'nargs': '*',
'toa': True,
'startdate': _lt5_startdate,
'latency': 1,
# percentage, so unitless, per landsat docs:
# https://landsat.usgs.gov/how-percentage-cloud-cover-calculated
'bands': unitless_bands('finalmask', 'cloudmask', 'ambclouds', 'pass1'),
},
'fmask': {
'assets': ['DN', 'C1'],
'description': 'Fmask cloud cover',
'nargs': '*',
'toa': True,
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('finalmask', 'cloudmask',
'PCP', 'clearskywater', 'clearskyland'),
},
'cloudmask': {
'assets': ['C1'],
'description': ('Cloud (and shadow) mask product based on cloud '
'bits of the quality band'),
'toa': True,
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('cloudmask'),
},
'tcap': {
'assets': ['DN', 'C1'],
'description': 'Tassled cap transformation',
'toa': True,
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('Brightness', 'Greenness', 'Wetness', 'TCT4', 'TCT5', 'TCT6'),
},
'dn': {
'assets': ['DN', 'C1'],
'description': 'Raw digital numbers',
'toa': True,
'startdate': _lt5_startdate,
'latency': 1,
'bands': [{'name': n, 'units': 'W/m^2/sr/um'} for n in __visible_bands_union],
},
'volref': {
'assets': ['DN', 'C1'],
'description': 'Volumetric water reflectance - valid for water only',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
# reflectance is unitless therefore volref should be unitless
'bands': unitless_bands(*__visible_bands_union),
},
'wtemp': {
'assets': ['DN', 'C1'],
'description': 'Water temperature (atmospherically correct) - valid for water only',
# It's not really TOA, but the product code will take care of atm correction itself
'toa': True,
'startdate': _lt5_startdate,
'latency': 1,
'bands': [{'name': n, 'units': 'degree Kelvin'} for n in ['LWIR', 'LWIR2']],
},
'bqashadow': {
'assets': ['DN', 'C1'],
'description': 'LC8 QA + Shadow Smear',
'arguments': [
'X: erosion kernel diameter in pixels (default: 5)',
'Y: dilation kernel diameter in pixels (default: 10)',
'Z: cloud height in meters (default: 4000)'
],
'nargs': '*',
'toa': True,
'startdate': _lc8_startdate,
'latency': 1,
'bands': unitless_bands('bqashadow'),
},
#'Indices': {
'bi': {
'assets': ['DN', 'C1'],
'description': 'Brightness Index',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('bi'),
},
'evi': {
'assets': ['DN', 'C1'],
'description': 'Enhanced Vegetation Index',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('evi'),
},
'lswi': {
'assets': ['DN', 'C1'],
'description': 'Land Surface Water Index',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('lswi'),
},
'msavi2': {
'assets': ['DN', 'C1'],
'description': 'Modified Soil-Adjusted Vegetation Index (revised)',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('msavi2'),
},
'ndsi': {
'assets': ['DN', 'C1'],
'description': 'Normalized Difference Snow Index',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('ndsi'),
},
'ndvi': {
'assets': ['DN', 'C1'],
'description': 'Normalized Difference Vegetation Index',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('ndvi'),
},
'ndwi': {
'assets': ['DN', 'C1'],
'description': 'Normalized Difference Water Index',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('ndwi'),
},
'satvi': {
'assets': ['DN', 'C1'],
'description': 'Soil-Adjusted Total Vegetation Index',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('satvi'),
},
'vari': {
'assets': ['DN', 'C1'],
'description': 'Visible Atmospherically Resistant Index',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('vari'),
},
#'Tillage Indices': {
'ndti': {
'assets': ['DN', 'C1'],
'description': 'Normalized Difference Tillage Index',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('ndti'),
},
'crc': {
'assets': ['DN', 'C1'],
'description': 'Crop Residue Cover',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('crc'),
},
'sti': {
'assets': ['DN', 'C1'],
'description': 'Standard Tillage Index',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('sti'),
},
'isti': {
'assets': ['DN', 'C1'],
'description': 'Inverse Standard Tillage Index',
'arguments': [__toastring],
'startdate': _lt5_startdate,
'latency': 1,
'bands': unitless_bands('isti'),
},
'ndvi8sr': {
'assets': ['SR'],
'description': 'Normalized Difference Vegetation from LC8SR',
'startdate': _lc8_startdate,
'latency': 1,
'bands': unitless_bands('ndvi8sr'),
},
'landmask': {
'assets': ['SR'],
'description': 'Land mask from LC8SR',
'startdate': _lc8_startdate,
'latency': 1,
'bands': unitless_bands('landmask'),
},
}
gips.atmosphere.add_acolite_product_dicts(_products, 'DN', 'C1')
for pname, pdict in _products.items():
if 'C1' in pdict['assets']:
pdict['assets'] += ['C1S3', 'C1GS']
for product, product_info in _products.iteritems():
product_info['startdate'] = min(
[landsatAsset._assets[asset]['startdate']
for asset in product_info['assets']]
)
if 'C1' in product_info['assets']:
product_info['latency'] = landsatAsset._assets['C1']['latency']
else:
product_info['latency'] = float("inf")
def _process_indices(self, image, asset_fn, metadata, sensor, indices,
coreg_shift=None):
"""Process the given indices and add their files to the inventory.
Image is a GeoImage suitable for generating the indices.
Metadata is passed in to the gippy Indices() call. Sensor is
used to generate index filenames and saving info about the
product to self. Indices is a dict of desired keys; keys and
values are the same as requested products in process(). Coreg_shift
is a dict with keys `x` and `y` used to make affine
transformation for `-coreg` products.
"""
gippy_input = {} # map prod types to temp output filenames for feeding to gippy
tempfps_to_ptypes = {} # map temp output filenames to prod types, for AddFile
for prod_type, pt_split in indices.items():
temp_fp = self.temp_product_filename(sensor, prod_type)
gippy_input[pt_split[0]] = temp_fp
tempfps_to_ptypes[temp_fp] = prod_type
self._time_report("Running Indices")
prodout = Indices(image, gippy_input,
self.prep_meta(asset_fn, metadata))
self._time_report("Finshed running Indices")
if coreg_shift:
for key, val in prodout.iteritems():
self._time_report("coregistering index")
xcoreg = coreg_shift.get('x', 0.0)
ycoreg = coreg_shift.get('y', 0.0)
self._time_report("coreg (x, y) = ({:.3f}, {:.3f})"
.format(xcoreg, ycoreg))
img = gippy.GeoImage(val, True)
coreg_mag = (xcoreg ** 2 + ycoreg ** 2) ** 0.5
insane = coreg_mag > 75 # TODO: actual fix
img.SetMeta("COREG_MAGNITUDE", str(coreg_mag))
if not insane:
affine = img.Affine()
affine[0] += xcoreg
affine[3] += ycoreg
img.SetAffine(affine)
img.Process()
img = None
for temp_fp in prodout.values():
archived_fp = self.archive_temp_path(temp_fp)
self.AddFile(sensor, tempfps_to_ptypes[temp_fp], archived_fp)
def _download_gcs_bands(self, output_dir):
if 'C1GS' not in self.assets:
raise Exception("C1GS asset not found for {} on {}".format(
self.id, self.date
))
band_files = []
for path in self.assets['C1GS'].band_paths():
match = re.match("/[\w_]+/(.+)", path)
url = match.group(1)
output_path = os.path.join(
output_dir, os.path.basename(url)
)
self.Asset.gs_backoff_downloader(url, output_path)
band_files.append(output_path)
return band_files
@property
def preferred_asset(self):
if getattr(self, '_preferred_asset', None):
return self._preferred_asset
# figure out which asset should be used for processing
self._preferred_asset = self.assets.keys()[0] # really an asset type string, eg 'SR'
if len(self.assets) > 1:
# if there's more than one, have to choose:
# prefer local over fetching from the cloud, and prefer C1 over DN
at_pref = self.get_setting('asset-preference')
try:
self._preferred_asset = next(at for at in at_pref if at in self.assets)
except StopIteration:
verbose_out('No preferred asset types ({}) found in'
' available assets ({})'.format(self.assets, at_pref),
2, sys.stderr)
self._preferred_asset = None
if 'SR' in self.assets:
# this method is structured poorly; handle an odd error case:
p_types = set(v[0] for v in products.requested.values())
if p_types & {'landmask', 'ndvi8sr'}:
raise NotImplementedError(
"Can't process SR alongside non-SR")
return self._preferred_asset
@Data.proc_temp_dir_manager
def process(self, products=None, overwrite=False, **kwargs):
""" Make sure all products have been processed """
products = super(landsatData, self).process(products, overwrite, **kwargs)
if len(products) == 0:
verbose_out("Skipping processing; no products requested.", 5)
return
if len(self.assets) == 0:
verbose_out("Skipping processing; no assets found.", 5)
return
start = datetime.now()
asset = self.preferred_asset
# TODO: De-hack this to loop over products & handle the SR case --^
if asset == 'SR':
datafiles = self.assets['SR'].datafiles()
imgpaths = dict()
for datafile in datafiles:
key = datafile.partition('_')[2].split('.')[0]
path = os.path.join('/vsitar/' + self.assets['SR'].filename, datafile)
imgpaths[key] = path
# print imgpaths
sensor = 'LC8SR'
for key, val in products.requested.items():
fname = self.temp_product_filename(sensor, key)
if val[0] == "ndvi8sr":
img = gippy.GeoImage([imgpaths['sr_band4'], imgpaths['sr_band5']])
missing = float(img[0].NoDataValue())
red = img[0].Read().astype('float32')
nir = img[1].Read().astype('float32')
wvalid = numpy.where((red != missing) & (nir != missing) & (red + nir != 0.0))
red[wvalid] *= 1.E-4
nir[wvalid] *= 1.E-4
# TODO: change this so that these pixels become missing
red[(red != missing) & (red < 0.0)] = 0.0
red[red > 1.0] = 1.0
nir[(nir != missing) & (nir < 0.0)] = 0.0
nir[nir > 1.0] = 1.0
ndvi = missing + numpy.zeros_like(red)
ndvi[wvalid] = ((nir[wvalid] - red[wvalid]) /
(nir[wvalid] + red[wvalid]))
verbose_out("writing " + fname, 2)
imgout = gippy.GeoImage(fname, img, gippy.GDT_Float32, 1)
imgout.SetNoData(-9999.)
imgout.SetOffset(0.0)
imgout.SetGain(1.0)
imgout.SetBandName('NDVI', 1)
imgout[0].Write(ndvi)
if val[0] == "landmask":
img = gippy.GeoImage([imgpaths['cfmask'], imgpaths['cfmask_conf']])
cfmask = img[0].Read()
# array([ 0, 1, 2, 3, 4, 255], dtype=uint8)
# 0 means clear! but I want 1 to mean clear
cfmask[cfmask > 0] = 2
cfmask[cfmask == 0] = 1
cfmask[cfmask == 2] = 0
verbose_out("writing " + fname, 2)
imgout = gippy.GeoImage(fname, img, gippy.GDT_Byte, 1)
imgout.SetBandName('Land mask', 1)
imgout[0].Write(cfmask)
archive_fp = self.archive_temp_path(fname)
self.AddFile(sensor, key, archive_fp)
else: # C1 types & DN
# Add the sensor for this date to the basename
self.basename = self.basename + '_' + self.sensors[asset]
# Read the assets
with utils.error_handler('Error reading ' + basename(self.assets[asset].filename)):
img = self._readraw(asset)
# This is landsat, so always just one sensor for a given date
sensor = self.sensors[asset]
if asset.startswith('C1'):
# BQA in C1 defines value 1 as "designated fill", in addition to any
# no data value defined for a band. As BQA value 0 is
# undefined, and has not been seen in any assets thus far -- so
# also excluding 0 is OK.
# N.B. the label "designated fill" is mutually exclusive with
# all other bqa labels.
# See https://landsat.usgs.gov/collectionqualityband
qaimg = self._readqa(asset)
img.AddMask(qaimg[0] > 1)
qaimg = None
asset_fn = self.assets[asset].filename
meta = self.assets[asset].meta['bands']
visbands = self.assets[asset].visbands
lwbands = self.assets[asset].lwbands
md = {}
product_is_coreg = [(v and 'coreg' in v) for v in products.requested.values()]
coreg = all(product_is_coreg)
if not coreg and any(product_is_coreg):
# Disallow coreg and non-coreg products in same processing
# call both to avoid having to check each if each product
# needs to be shifted as well as a hint to users who will
# likely only do this as an accident anyway.
raise ValueError("Mixing coreg and non-coreg products is not allowed")
if coreg:
# If possible, use AROP 'ortho' command to co-register this landsat scene
# against a reference Sentinel2 scene. When AROP is successful it creates
# a text file with parameters in it that is needed to apply an offset.
# That text file will get reused if it exists. Otherwise, we will attempt
# to create a new one. This might fail because it cannot find a S2 scene
# within a specified window; in this case simply use the Landsat data as
# it is. This might also fail for mathematical reasons, in which case
# do still create a product? Note S2 is a new sensor so for most years
# the expected situation is not finding matching scene.
# TODO: call fetch on the landsat scene boundary, thus eliminating the
# case where S2 exists but is not found by GIPS.
# TODO: question: why are we using glob here?
if not glob.glob(os.path.join(self.path, "*coreg_args.txt")):
with utils.error_handler('Problem with running AROP'):
tmpdir_fp = self.generate_temp_path('arop')
utils.mkdir(tmpdir_fp)
try:
# on error, use the unshifted image
s2_export = self.sentinel2_coreg_export(tmpdir_fp)
self.run_arop(s2_export, img['NIR'].Filename())
except NoSentinelError:
verbose_out(
'No Sentinel found for co-registration', 4)
except CantAlignError as cae:
verbose_out('Co-registration error '
'(FALLBACK): {}'.format(cae), 4)
try:
coreg_xshift, coreg_yshift = self.parse_coreg_coefficients()
md['COREG_STATUS'] = 'AROP'
except IOError:
coreg_xshift, coreg_yshift = (0.0, 0.0)
md['COREG_STATUS'] = 'FALLBACK'
# running atmosphere if any products require it
toa = True
for val in products.requested.values():
toa = toa and (self._products[val[0]].get('toa', False) or 'toa' in val)
if not toa:
start = datetime.now()
if not settings().REPOS[self.Repository.name.lower()]['6S']:
raise ValueError("atmospheric correction requested but"
" settings.REPOS['landsat']['6S'] is False.")
with utils.error_handler('Problem running 6S atmospheric model'):
wvlens = [(meta[b]['wvlen1'], meta[b]['wvlen2']) for b in visbands]
geo = self.metadata['geometry']
atm6s = SIXS(visbands, wvlens, geo, self.metadata['datetime'],
sensor=self.sensor_set[0])
md["AOD Source"] = str(atm6s.aod[0])
md["AOD Value"] = str(atm6s.aod[1])
# Break down by group
groups = products.groups()
# ^--- has the info about what products the user requested
# create non-atmospherically corrected apparent reflectance and temperature image
reflimg = gippy.GeoImage(img)
theta = numpy.pi * self.metadata['geometry']['solarzenith'] / 180.0
sundist = (1.0 - 0.016728 * numpy.cos(numpy.pi * 0.9856 * (float(self.day) - 4.0) / 180.0))
for col in self.assets[asset].visbands:
reflimg[col] = img[col] * (1.0 /
((meta[col]['E'] * numpy.cos(theta)) / (numpy.pi * sundist * sundist)))
for col in self.assets[asset].lwbands:
reflimg[col] = (((img[col].pow(-1)) * meta[col]['K1'] + 1).log().pow(-1)
) * meta[col]['K2'] - 273.15
# Process standard products (this is in the 'DN' block)
for key, val in groups['Standard'].items():
p_type = val[0]
if asset not in self._products[p_type]['assets']:
verbose_out("{} not supported for {} assets".format(p_type, asset), 5)
continue
start = datetime.now()
# TODO - update if no atmos desired for others
toa = self._products[val[0]].get('toa', False) or 'toa' in val
# Create product
with utils.error_handler(
'Error creating product {} for {}'
.format(key, basename(self.assets[asset].filename)),
continuable=True):
fname = self.temp_product_filename(sensor, key)
if val[0] == 'acca':
s_azim = self.metadata['geometry']['solarazimuth']
s_elev = 90 - self.metadata['geometry']['solarzenith']
erosion, dilation, cloudheight = 5, 10, 4000
if len(val) >= 4:
erosion, dilation, cloudheight = [int(v) for v in val[1:4]]
resset = set(
[(reflimg[band].Resolution().x(),
reflimg[band].Resolution().y())
for band in (self.assets[asset].visbands +
self.assets[asset].lwbands)]
)
if len(resset) > 1:
raise Exception(
'ACCA requires all bands to have the same '
'spatial resolution. Found:\n\t' + str(resset)
)
imgout = ACCA(reflimg, fname, s_elev, s_azim, erosion, dilation, cloudheight)
elif val[0] == 'fmask':
tolerance, dilation = 3, 5
if len(val) >= 3:
tolerance, dilation = [int(v) for v in val[1:3]]
imgout = Fmask(reflimg, fname, tolerance, dilation)
elif val[0] == 'cloudmask':
qaimg = self._readqa(asset)
npqa = qaimg.Read() # read image file into numpy array
qaimg = None
# https://landsat.usgs.gov/collectionqualityband
# cloudmaskmask = (cloud and
# (cc_low or cc_med or cc_high)
# ) or csc_high
# cloud iff bit 4
# (cc_low or cc_med or cc_high) iff bit 5 or bit 6
# (csc_high) iff bit 8 ***
# NOTE: from USGS tables as of 2018-05-22, cloud
# shadow conficence is either high(3) or low(1).
# No pixels get medium (2). And only no-data pixels
# ever get no (0) confidence.
# GIPPY 1.0 note: rewrite this whole product after
# adding get_bit method to GeoRaster
def get_bit(np_array, i):
"""Return an array with the ith bit extracted from each cell."""
return (np_array >> i) & 0b1
np_cloudmask = (
get_bit(npqa, 8) # shadow
| (get_bit(npqa, 4) & # cloud
( # with at least low(1) confidence
get_bit(npqa, 5) | get_bit(npqa, 6)
)
)
).astype('uint8')
dilation_width = 20
elem = numpy.ones((dilation_width,) * 2, dtype='uint8')
np_cloudmask_dilated = binary_dilation(
np_cloudmask, structure=elem,
).astype('uint8')
np_cloudmask_dilated *= (npqa != 1)
#
imgout = gippy.GeoImage(fname, img, gippy.GDT_Byte, 1)
verbose_out("writing " + fname, 2)
imgout.SetBandName(
self._products[val[0]]['bands'][0]['name'], 1
)
md.update(
{
'GIPS_LANDSAT_VERSION': self.version,
'GIPS_C1_DILATED_PIXELS': str(dilation_width),
'GIPS_LANDSAT_CLOUDMASK_CLOUD_VALUE': '1',
'GIPS_LANDSAT_CLOUDMASK_CLEAR_OR_NODATA_VALUE': '0',
}
)
####################
# GIPPY1.0 note: replace this block with
# imgout[0].set_nodata(0.)
# imout[0].write_raw(np_cloudmask_dilated)
imgout[0].Write(
np_cloudmask_dilated
)
imgout = None
imgout = gippy.GeoImage(fname, True)
imgout[0].SetNoData(0.)
####################
elif val[0] == 'rad':
imgout = gippy.GeoImage(fname, img, gippy.GDT_Int16, len(visbands))
for i in range(0, imgout.NumBands()):
imgout.SetBandName(visbands[i], i + 1)
imgout.SetNoData(-32768)
imgout.SetGain(0.1)
if toa:
for col in visbands:
img[col].Process(imgout[col])
else:
for col in visbands:
((img[col] - atm6s.results[col][1]) / atm6s.results[col][0]
).Process(imgout[col])
# Mask out any pixel for which any band is nodata
#imgout.ApplyMask(img.DataMask())
elif val[0] == 'ref':
imgout = gippy.GeoImage(fname, img, gippy.GDT_Int16, len(visbands))
for i in range(0, imgout.NumBands()):
imgout.SetBandName(visbands[i], i + 1)
imgout.SetNoData(-32768)
imgout.SetGain(0.0001)
if toa:
for c in visbands:
reflimg[c].Process(imgout[c])
else:
for c in visbands:
(((img[c] - atm6s.results[c][1]) / atm6s.results[c][0])
* (1.0 / atm6s.results[c][2])).Process(imgout[c])
# Mask out any pixel for which any band is nodata
#imgout.ApplyMask(img.DataMask())
elif val[0] == 'tcap':
tmpimg = gippy.GeoImage(reflimg)
tmpimg.PruneBands(['BLUE', 'GREEN', 'RED', 'NIR', 'SWIR1', 'SWIR2'])
arr = numpy.array(self.Asset._sensors[self.sensor_set[0]]['tcap']).astype('float32')
imgout = LinearTransform(tmpimg, fname, arr)
imgout.SetMeta('AREA_OR_POINT', 'Point')
outbands = ['Brightness', 'Greenness', 'Wetness', 'TCT4', 'TCT5', 'TCT6']
for i in range(0, imgout.NumBands()):
imgout.SetBandName(outbands[i], i + 1)
elif val[0] == 'temp':
imgout = gippy.GeoImage(fname, img, gippy.GDT_Int16, len(lwbands))
for i in range(0, imgout.NumBands()):
imgout.SetBandName(lwbands[i], i + 1)
imgout.SetNoData(-32768)
imgout.SetGain(0.1)
[reflimg[col].Process(imgout[col]) for col in lwbands]
elif val[0] == 'dn':
rawimg = self._readraw(asset)
rawimg.SetGain(1.0)
rawimg.SetOffset(0.0)
imgout = rawimg.Process(fname)
rawimg = None
elif val[0] == 'volref':
bands = deepcopy(visbands)
bands.remove("SWIR1")
imgout = gippy.GeoImage(fname, reflimg, gippy.GDT_Int16, len(bands))
[imgout.SetBandName(band, i + 1) for i, band in enumerate(bands)]
imgout.SetNoData(-32768)
imgout.SetGain(0.0001)
r = 0.54 # Water-air reflection
p = 0.03 # Internal Fresnel reflectance
pp = 0.54 # Water-air Fresnel reflectance
n = 1.34 # Refractive index of water
Q = 1.0 # Downwelled irradiance / upwelled radiance
A = ((1 - p) * (1 - pp)) / (n * n)
srband = reflimg['SWIR1'].Read()
nodatainds = srband == reflimg['SWIR1'].NoDataValue()
for band in bands:
bimg = reflimg[band].Read()
diffimg = bimg - srband
diffimg = diffimg / (A + r * Q * diffimg)
diffimg[bimg == reflimg[band].NoDataValue()] = imgout[band].NoDataValue()
diffimg[nodatainds] = imgout[band].NoDataValue()
imgout[band].Write(diffimg)
elif val[0] == 'wtemp':
raise NotImplementedError('See https://gitlab.com/appliedgeosolutions/gips/issues/155')
imgout = gippy.GeoImage(fname, img, gippy.GDT_Int16, len(lwbands))
[imgout.SetBandName(lwbands[i], i + 1) for i in range(0, imgout.NumBands())]
imgout.SetNoData(-32768)
imgout.SetGain(0.1)
tmpimg = gippy.GeoImage(img)
for col in lwbands:
band = tmpimg[col]
m = meta[col]
lat = self.metadata['geometry']['lat']
lon = self.metadata['geometry']['lon']
dt = self.metadata['datetime']
atmos = MODTRAN(m['bandnum'], m['wvlen1'], m['wvlen2'], dt, lat, lon, True)
e = 0.95
band = (tmpimg[col] - (atmos.output[1] + (1 - e) * atmos.output[2])
) / (atmos.output[0] * e)
band = (((band.pow(-1)) * meta[col]['K1'] + 1).log().pow(-1)
) * meta[col]['K2'] - 273.15
band.Process(imgout[col])
elif val[0] == 'bqashadow':
if 'LC8' not in self.sensor_set:
continue
imgout = gippy.GeoImage(fname, img, gippy.GDT_UInt16, 1)
imgout[0].SetNoData(0)
qaimg = self._readqa(asset)
qadata = qaimg.Read()
qaimg = None
fill = binmask(qadata, 1)
dropped = binmask(qadata, 2)
terrain = binmask(qadata, 3)
cirrus = binmask(qadata, 14)
othercloud = binmask(qadata, 16)
cloud = (cirrus + othercloud) + 2 * (fill + dropped + terrain)
abfn = fname + '-intermediate'
abimg = gippy.GeoImage(abfn, img, gippy.GDT_UInt16, 1)
abimg[0].SetNoData(2)
abimg[0].Write(cloud.astype(numpy.uint16))
abimg.Process()
abimg = None
abimg = gippy.GeoImage(abfn + '.tif')
s_azim = self.metadata['geometry']['solarazimuth']
s_elev = 90 - self.metadata['geometry']['solarzenith']
erosion, dilation, cloudheight = 5, 10, 4000
if len(val) >= 4:
erosion, dilation, cloudheight = [int(v) for v in val[1:4]]
imgout = AddShadowMask(
abimg, imgout, 0, s_elev, s_azim, erosion,
dilation, cloudheight, {'notes': 'dev-version'}
)
imgout.Process()
abimg = None
os.remove(abfn + '.tif')
fname = imgout.Filename()
imgout.SetMeta(self.prep_meta(asset_fn, md))
if coreg:
coreg_mag = (coreg_xshift ** 2 + coreg_yshift ** 2) ** 0.5
insane = coreg_mag > 75 # TODO: actual fix
imgout.SetMeta("COREG_MAGNITUDE", str(coreg_mag))
if not insane:
self._time_report("Setting affine of product")
affine = imgout.Affine()
affine[0] += coreg_xshift
affine[3] += coreg_yshift
imgout.SetAffine(affine)
imgout.Process()
imgout = None
archive_fp = self.archive_temp_path(fname)
self.AddFile(sensor, key, archive_fp)
product_finished_msg = ' -> {}: processed in {}'.format(
os.path.basename(archive_fp), datetime.now() - start)
utils.verbose_out(product_finished_msg, level=2)
# Process Indices (this is in the 'DN' block)
indices0 = dict(groups['Index'], **groups['Tillage'])
if len(indices0) > 0:
start = datetime.now()
indices = {}
indices_toa = {}
for key, val in indices0.items():
if 'toa' in val:
indices_toa[key] = val
else:
indices[key] = val
coreg_shift = {}
if coreg:
coreg_shift['x'] = coreg_xshift
coreg_shift['y'] = coreg_yshift
# Run TOA
if len(indices_toa) > 0:
self._process_indices(reflimg, asset_fn, md, sensor,
indices_toa, coreg_shift)
# Run atmospherically corrected
if len(indices) > 0:
for col in visbands:
img[col] = ((img[col] - atm6s.results[col][1]) / atm6s.results[col][0]
) * (1.0 / atm6s.results[col][2])
self._process_indices(img, asset_fn, md, sensor, indices,
coreg_shift)
verbose_out(' -> %s: processed %s in %s' % (
self.basename, indices0.keys(), datetime.now() - start), 1)
img = None
# cleanup scene directory by removing (most) extracted files
with utils.error_handler('Error removing extracted files', continuable=True):
if settings().REPOS[self.Repository.name.lower()]['extract']:
for bname in self.assets[asset].datafiles():
if bname[-7:] != 'MTL.txt':
files = glob.glob(os.path.join(self.path, bname) + '*')
RemoveFiles(files)
# TODO only wtemp uses MODTRAN; do the dir removal there?
modtran_path = os.path.join(self.path, 'modtran')
if os.path.exists(modtran_path):
shutil.rmtree(modtran_path)
if groups['ACOLITE']:
start = datetime.now()
aco_dn = self.generate_temp_path('acolite')
os.mkdir(aco_dn)
a_obj = self.assets[asset]
err_msg = 'Error creating ACOLITE products {} for {}'.format(
groups['ACOLITE'].keys(), os.path.basename(a_obj.filename))
with utils.error_handler(err_msg, continuable=True):
# TODO use self.temp_product_filename(sensor, prod_type):
# then copy to self.path using methods
p_spec = {p: os.path.join(self.path, self.basename + '_' + p + '.tif')
for p in groups['ACOLITE']}
prodout = gips.atmosphere.process_acolite(a_obj, aco_dn,
p_spec, self.prep_meta(asset_fn, md.copy()), reflimg)
endtime = datetime.now()
for k, fn in prodout.items():
self.AddFile(sensor, k, fn)
verbose_out(' -> {}: processed {} in {}'.format(
self.basename, prodout.keys(), endtime - start), 1)
## end ACOLITE
reflimg = None
def filter(self, pclouds=100, sensors=None, **kwargs):
"""Check if Data object passes filter.
User can't enter pclouds, but can pass in --sensors. kwargs
isn't used.
"""
if not super(landsatData, self).filter(pclouds, **kwargs):
return False
if sensors:
if type(sensors) is str:
sensors = [sensors]
sensors = set(sensors)
# ideally, the data class would be trimmed by
if not sensors.intersection(self.sensor_set):
return False
return True
def meta(self, asset_type):
"""Read in Landsat metadata file and return it as a dict.
Also saves it to self.metadata."""
# TODO this belongs in landsatAsset
# test if metadata already read in, if so, return
if hasattr(self, 'metadata'):
return self.metadata
asset_obj = self.assets[asset_type]
c1_json = asset_obj.load_c1_json()
if c1_json:
r = self.Asset.gs_backoff_get(c1_json['mtl'])
r.raise_for_status()
text = r.text
qafn = c1_json['qa-band'].encode('ascii', 'ignore')
else:
datafiles = asset_obj.datafiles()
# save for later; defaults to None
qafn = next((f for f in datafiles if '_BQA.TIF' in f), None)
# locate MTL file and save it to disk if it isn't saved already
mtlfilename = next(f for f in datafiles if 'MTL.txt' in f)
if os.path.exists(mtlfilename) and os.stat(mtlfilename).st_size:
os.remove(mtlfilename)
if not os.path.exists(mtlfilename):
mtlfilename = asset_obj.extract([mtlfilename])[0]
# Read MTL file
with utils.error_handler(
'Error reading metadata file ' + mtlfilename):
text = open(mtlfilename, 'r').read()
if len(text) < 10:
raise IOError('MTL file is too short. {}'.format(mtlfilename))
sensor = asset_obj.sensor
smeta = asset_obj._sensors[sensor]
# Process MTL text - replace old metadata tags with new
# NOTE This is not comprehensive, there may be others
text = text.replace('ACQUISITION_DATE', 'DATE_ACQUIRED')
text = text.replace('SCENE_CENTER_SCAN_TIME', 'SCENE_CENTER_TIME')
for (ob, nb) in zip(smeta['oldbands'], smeta['bands']):
text = re.sub(r'\WLMIN_BAND' + ob, 'RADIANCE_MINIMUM_BAND_' + nb, text)
text = re.sub(r'\WLMAX_BAND' + ob, 'RADIANCE_MAXIMUM_BAND_' + nb, text)
text = re.sub(r'\WQCALMIN_BAND' + ob, 'QUANTIZE_CAL_MIN_BAND_' + nb, text)
text = re.sub(r'\WQCALMAX_BAND' + ob, 'QUANTIZE_CAL_MAX_BAND_' + nb, text)
text = re.sub(r'\WBAND' + ob + '_FILE_NAME', 'FILE_NAME_BAND_' + nb, text)
for l in ('LAT', 'LON', 'MAPX', 'MAPY'):
for c in ('UL', 'UR', 'LL', 'LR'):
text = text.replace('PRODUCT_' + c + '_CORNER_' + l, 'CORNER_' + c + '_' + l + '_PRODUCT')
text = text.replace('\x00', '')
# Remove junk
lines = text.split('\n')
mtl = dict()
for l in lines:
meta = l.replace('\"', "").strip().split('=')
if len(meta) > 1:
key = meta[0].strip()
item = meta[1].strip()
if key != "GROUP" and key != "END_GROUP":
mtl[key] = item
# Extract useful metadata
lats = (float(mtl['CORNER_UL_LAT_PRODUCT']), float(mtl['CORNER_UR_LAT_PRODUCT']),
float(mtl['CORNER_LL_LAT_PRODUCT']), float(mtl['CORNER_LR_LAT_PRODUCT']))
lons = (float(mtl['CORNER_UL_LON_PRODUCT']), float(mtl['CORNER_UR_LON_PRODUCT']),
float(mtl['CORNER_LL_LON_PRODUCT']), float(mtl['CORNER_LR_LON_PRODUCT']))
lat = (min(lats) + max(lats)) / 2.0
lon = (min(lons) + max(lons)) / 2.0
dt = datetime.strptime(mtl['DATE_ACQUIRED'] + ' ' + mtl['SCENE_CENTER_TIME'][:-2], '%Y-%m-%d %H:%M:%S.%f')
clouds = 0.0
with utils.error_handler('Error reading CLOUD_COVER metadata', continuable=True):
# CLOUD_COVER isn't trusted for unknown reasons; previously errors were silenced, but
# now maybe explicit error reports will reveal something.
clouds = float(mtl['CLOUD_COVER'])
filenames = []
gain = []
offset = []
dynrange = []
for i, b in enumerate(smeta['bands']):
minval = int(float(mtl['QUANTIZE_CAL_MIN_BAND_' + b]))
maxval = int(float(mtl['QUANTIZE_CAL_MAX_BAND_' + b]))
minrad = float(mtl['RADIANCE_MINIMUM_BAND_' + b])
maxrad = float(mtl['RADIANCE_MAXIMUM_BAND_' + b])
gain.append((maxrad - minrad) / (maxval - minval))
offset.append(minrad)
dynrange.append((minval, maxval))
filenames.append(mtl['FILE_NAME_BAND_' + b].strip('\"'))
_geometry = {
'solarzenith': (90.0 - float(mtl['SUN_ELEVATION'])),
'solarazimuth': float(mtl['SUN_AZIMUTH']),
'zenith': 0.0,
'azimuth': 180.0,
'lat': lat,
'lon': lon,
}
self.metadata = {
'filenames': filenames,
'gain': gain,
'offset': offset,
'dynrange': dynrange,
'geometry': _geometry,
'datetime': dt,
'clouds': clouds,
}
if qafn is not None:
self.metadata['qafilename'] = qafn
#self.metadata.update(smeta)
return self.metadata
def _readqa(self, asset_type):
"""Returns a gippy GeoImage containing a QA band.
The QA band belongs to the asset corresponding to the given asset type.
"""
md = self.meta(asset_type)
if self.assets[asset_type].in_cloud_storage():
qafilename = self.Asset._cache_if_vsicurl(
[md['qafilename']],
self._temp_proc_dir
)
return gippy.GeoImage(qafilename)
if settings().REPOS[self.Repository.name.lower()]['extract']:
# Extract files
qadatafile = self.assets[asset_type].extract([md['qafilename']])
else:
# Use tar.gz directly using GDAL's virtual filesystem
qadatafile = os.path.join(
'/vsitar/' + self.assets[asset_type].filename,
md['qafilename'])
qaimg = gippy.GeoImage(qadatafile)
return qaimg
def _readraw(self, asset_type):
"""Return a gippy GeoImage containing raster bands.
The bands are read from the asset corresponding to the given
asset type.
"""
# TODO this belongs in landsatAsset
start = datetime.now()
asset_obj = self.assets[asset_type]
md = self.meta(asset_type)
try:
self._time_report("Gathering band files")
if asset_type == 'C1GS':
paths = self._download_gcs_bands(self._temp_proc_dir)
else:
paths = asset_obj.band_paths()
self._time_report("Finished gathering band files")
except NotImplementedError:
# Extract files, use tarball directly via GDAL's virtual filesystem?
if self.get_setting('extract'):
paths = self.extract(md['filenames'])
else:
paths = [os.path.join('/vsitar/' + asset_obj.filename, f)
for f in md['filenames']]
self._time_report("reading bands")
image = gippy.GeoImage(paths)
image.SetNoData(0)
# TODO - set appropriate metadata
#for key,val in meta.iteritems():
# image.SetMeta(key,str(val))
# Geometry used for calculating incident irradiance
# colors = self.assets['DN']._sensors[self.sensor_set[0]]['colors']
sensor = asset_obj.sensor
colors = asset_obj._sensors[sensor]['colors']
for bi in range(0, len(md['filenames'])):
image.SetBandName(colors[bi], bi + 1)
# need to do this or can we index correctly?
band = image[bi]
gain = md['gain'][bi]
band.SetGain(gain)
band.SetOffset(md['offset'][bi])
dynrange = md['dynrange'][bi]
# #band.SetDynamicRange(dynrange[0], dynrange[1])
# dynrange[0] was used internally to for conversion to radiance
# from DN in GeoRaster.Read:
# img = Gain() * (img-_minDC) + Offset(); # (1)
# and with the removal of _minDC and _maxDC it is now:
# img = Gain() * img + Offset(); # (2)
# And 1 can be re-written as:
# img = Gain() * img - Gain() *
# _minDC + Offset(); # (3)
# = Gain() * img + Offset
# - _min * Gain() ; # (4)
# So, since the gippy now has line (2), we can add
# the final term of (4) [as below] to keep that functionality.
image[bi] = band - dynrange[0] * gain
# I verified this by example. With old gippy.GeoRaster:
# In [8]: a.min()
# Out[8]: -64.927711
# with new version.
# In [20]: ascale.min() - 1*0.01298554277169103
# Out[20]: -64.927711800095906
self._time_report("done reading bands")
verbose_out('%s: read in %s' % (image.Basename(), datetime.now() - start), 2)
return image
def _s2_tiles_for_coreg(self, inventory, date_found, landsat_footprint):
if len(inventory) == 0:
verbose_out("No S2 assets found on {}".format(date_found), 3)
return None
raster_vsi_paths = []
s2_footprint = Polygon()
tiles = inventory[date_found].tiles.keys()
for tile in tiles:
s2ao = inventory[date_found][tile].current_asset()
if s2ao.tile[:2] != self.utm_zone():
continue
band_8 = next(f for f in s2ao.datafiles()
if f.endswith('B08.jp2') and tile in basename(f))
vsi_str = (band_8 if s2ao.asset == 'L1CGS' else
'/vsizip/' + os.path.join(s2ao.filename, band_8))
raster_vsi_paths.append(vsi_str)
s2_footprint = s2_footprint.union(wkt_loads(s2ao.footprint()))
if len(raster_vsi_paths) == 0:
verbose_out("No S2 assets found in UTM zone {}".format(self.utm_zone()), 3)
return None
percent_cover = (s2_footprint.intersection(landsat_footprint).area) / landsat_footprint.area
if percent_cover > .2:
return raster_vsi_paths
verbose_out("S2 assets do not cover enough of Landsat data.", 3)
return None
def sentinel2_coreg_export(self, tmpdir):
"""
Grabs closest (temporally) sentinel2 tiles and stitches them together
to match this landsat tile's footprint.
tmpdir is a directory name
"""
from gips.data.sentinel2 import sentinel2Asset, sentinel2Data
landsat_shp = self.get_setting('tiles')
spatial_extent = SpatialExtent.factory(
sentinel2Data, site=landsat_shp,
where="pr = '{}'".format(self.id),
ptile=20.0)[0]
fetch = False
# If there is no available sentinel2 scene on that day, search before and after
# until one is found.
delta = timedelta(1)
if self.date < date(2017, 1, 1):
date_found = starting_date = date(2017, 1, 1) + (
self.date - date(self.date.year, 1, 1))
else:
date_found = starting_date = self.date
temporal_extent = TemporalExtent(starting_date.strftime("%Y-%j"))
self._time_report("querying for most recent sentinel2 images")
# TODO: DRY the following statement which is repeated 3 times here
inventory = DataInventory(sentinel2Data, spatial_extent, temporal_extent, fetch=fetch, pclouds=33)
landsat_footprint = wkt_loads(self.assets[next(iter(self.assets))].get_geometry())
geo_images = self._s2_tiles_for_coreg(
inventory, starting_date, landsat_footprint
)
if geo_images:
geo_images = self.Asset._cache_if_vsicurl(geo_images, tmpdir)
date_found = starting_date
while not geo_images:
if delta > timedelta(90):
raise NoSentinelError(
"didn't find s2 images in this utm zone {}, (pathrow={},date={})"
.format(self.utm_zone(), self.id, self.date)
)
temporal_extent = TemporalExtent((starting_date + delta).strftime("%Y-%j"))
inventory = DataInventory(
sentinel2Data, spatial_extent, temporal_extent,
fetch=fetch, pclouds=33
)
geo_images = self._s2_tiles_for_coreg(
inventory, (starting_date + delta), landsat_footprint
)
if geo_images:
geo_images = self.Asset._cache_if_vsicurl(geo_images, tmpdir)
date_found = starting_date + delta
break
temporal_extent = TemporalExtent((starting_date - delta).strftime("%Y-%j"))
inventory = DataInventory(
sentinel2Data, spatial_extent, temporal_extent,
fetch=fetch, pclouds=33
)
geo_images = self._s2_tiles_for_coreg(
inventory, (starting_date - delta), landsat_footprint
)
if geo_images:
geo_images = self.Asset._cache_if_vsicurl(geo_images, tmpdir)
date_found = starting_date - delta
break
delta += timedelta(1)
self._time_report("merge sentinel images to bin")
merge_args = ["gdal_merge.py", "-o", tmpdir + "/sentinel_mosaic.bin",
"-of", "ENVI", "-a_nodata", "0"]
# only use images that are in the same proj as landsat tile
merge_args.extend(geo_images)
subprocess.call(merge_args, env={"GDAL_NUM_THREADS": "1"}, )
self._time_report("done with s2 export")
return tmpdir + '/sentinel_mosaic.bin'
def run_arop(self, base_band_filename, warp_band_filename):
"""
Runs AROP's `ortho` program.
base_band_filename is the filename of the sentinel2 image you want
to warp to
"""
warp_tile = self.id
warp_date = self.date
asset_type = self.preferred_asset
asset = self.assets[asset_type]
with utils.make_temp_dir() as tmpdir:
nir_band = asset._sensors[asset.sensor]['bands'][
asset._sensors[asset.sensor]['colors'].index('NIR')
]
if asset_type not in ['C1GS', 'C1S3']:
warp_band_filename = '/vsitar/' + os.path.join(asset.filename, warp_band_filename)
# TODO: I believe this is a singleton, so it should go away
warp_bands_bin = []
band_bin = basename(warp_band_filename) + '.bin'
cmd = ["gdal_translate", "-of", "ENVI",
warp_band_filename,
os.path.join(tmpdir, band_bin)]
subprocess.call(args=cmd, cwd=tmpdir)
warp_bands_bin.append(band_bin)
# make parameter file
with open(os.path.join(os.path.dirname(__file__), 'input_file_tmp.inp'), 'r') as input_template:
template = input_template.read()
base_band_img = gippy.GeoImage(base_band_filename)
warp_base_band_filename = [
f for f in warp_bands_bin
if f.endswith("B{}.bin".format(nir_band))
][0]
warp_base_band_img = gippy.GeoImage(os.path.join(tmpdir, warp_base_band_filename))
base_pixel_size = abs(base_band_img.Resolution().x())
warp_pixel_size = abs(warp_base_band_img.Resolution().x())
out_pixel_size = max(base_pixel_size, warp_pixel_size)
parameters = template.format(
base_satellite='Sentinel2',
base_band=base_band_filename,
base_nsample=base_band_img.XSize(),
base_nline=base_band_img.YSize(),
base_pixel_size=base_pixel_size,
base_upper_left_x=base_band_img.MinXY().x(),
base_upper_left_y=base_band_img.MaxXY().y(),
base_utm=self.utm_zone(),
warp_satellite='Landsat8',
warp_nbands=len(warp_bands_bin),
warp_bands=' '.join([os.path.join(tmpdir, band) for band in warp_bands_bin]),
warp_base_band=os.path.join(tmpdir, warp_base_band_filename),
warp_data_type=' '.join(([
str(warp_base_band_img.DataType())
] * len(warp_bands_bin))),
warp_nsample=warp_base_band_img.XSize(),
warp_nline=warp_base_band_img.YSize(),
warp_pixel_size=warp_pixel_size,
warp_upper_left_x=warp_base_band_img.MinXY().x(),
warp_upper_left_y=warp_base_band_img.MaxXY().y(),
warp_utm=self.utm_zone(),
out_bands=' '.join(
[os.path.join(tmpdir, basename(band) + '_warped.bin')
for band in warp_bands_bin]
),
out_base_band=os.path.join(
tmpdir, basename(warp_base_band_filename)) + '_warped.bin',
out_pixel_size=out_pixel_size,
tmpdir=tmpdir
)
parameter_file = os.path.join(tmpdir, 'parameter_file.inp')
with open(parameter_file, 'w') as param_file:
param_file.write(parameters)
shutil.copyfile(
os.path.join(os.path.dirname(__file__), 'lndortho.cps_par.ini'),
os.path.join(tmpdir, 'lndortho.cps_par.ini')
)
try:
# subprocess has a timeout option as of python 3.3
ORTHO_TIMEOUT = 10 * 60
cmd = ["timeout", str(ORTHO_TIMEOUT),
"ortho", "-r", parameter_file]
returnstatus = subprocess.check_call(args=cmd, cwd=tmpdir)
except subprocess.CalledProcessError as e:
raise CantAlignError(repr((warp_tile, warp_date)))
with open('{}/cp_log.txt'.format(tmpdir), 'r') as log:
xcoef_re = re.compile(r"x' += +([\d\-\.]+) +\+ +[\d\-\.]+ +\* +x +\+ +[\d\-\.]+ +\* y")
ycoef_re = re.compile(r"y' += +([\d\-\.]+) +\+ +[\d\-\.]+ +\* +x +\+ +[\d\-\.]+ +\* y")
xcoef = ycoef = None
for line in log:
x_match = xcoef_re.match(line)
if x_match:
xcoef = float(x_match.group(1))
y_match = ycoef_re.match(line)
if y_match:
ycoef = float(y_match.group(1))
if xcoef is None:
raise CantAlignError('AROP: no coefs found in cp_log --> '
+ repr((warp_tile, warp_date)))
x_shift = ((base_band_img.MinXY().x() - warp_base_band_img.MinXY().x()) / out_pixel_size - xcoef) * out_pixel_size
y_shift = ((base_band_img.MaxXY().y() - warp_base_band_img.MaxXY().y()) / out_pixel_size + ycoef) * out_pixel_size
with open('{}/{}_{}_coreg_args.txt'.format(self.path, self.id, datetime.strftime(self.date, "%Y%j")), 'w') as coreg_args:
coreg_args.write("x: {}\n".format(x_shift))
coreg_args.write("y: {}".format(y_shift))
def utm_zone(self):
"""
Parse UTM zone out of `gdalinfo` output.
"""
if getattr(self, 'utm_zone_number', None):
return self.utm_zone_number
self.utm_zone_number = None
asset = self.assets[self.preferred_asset]
# TODO: stick this somewhere better. Just hacking to make it work now.
if asset.asset in ['C1S3', 'C1GS']:
if os.path.exists(asset.filename):
c1json_content = asset.load_c1_json()
utils.verbose_out('requesting ' + c1json_content['mtl'], 4)
text = self.Asset.gs_backoff_get(c1json_content['mtl']).text
else:
query_results = asset.query_gs(asset.tile, asset.date)
if query_results is None:
raise IOError('Could not locate metadata for'
' ({}, {})'.format(self.tile, self.date))
url = cls.gs_object_url_base() + query_results['keys']['mtl']
utils.verbose_out('requesting ' + url, 4)
text = self.Asset.gs_backoff_get(url).text
else:
print('asset is "{}"'.format(asset.asset))
mtl = asset.extract([f for f in asset.datafiles() if f.endswith("MTL.txt")])[0]
with open(mtl, 'r') as mtl_file:
text = mtl_file.read()
match = re.search(".*UTM_ZONE = (\d+).*", text)
if match:
self.utm_zone_number = match.group(1)
else:
raise ValueError('MTL file does not contian UTM_ZONE')
print('utm_zone is ' + str(self.utm_zone_number))
return self.utm_zone_number
def parse_coreg_coefficients(self):
"""
Parse out coregistration coefficients from asset's `*_coreg_args.txt`
file.
"""
date = datetime.strftime(self.date, "%Y%j")
cp_log = "{}/{}_{}_coreg_args.txt".format(self.path, self.id, date)
with open(cp_log, 'r') as log:
xcoef_re = re.compile(r"x: (-?\d+\.?\d*)")
ycoef_re = re.compile(r"y: (-?\d+\.?\d*)")
for line in log:
x_match = xcoef_re.match(line)
if x_match:
xcoef = float(x_match.groups()[0])
y_match = ycoef_re.match(line)
if y_match:
ycoef = float(y_match.groups()[0])
return xcoef, ycoef
|
gpl-2.0
| 6,233,482,248,751,617,000 | 42.850331 | 133 | 0.495031 | false |
gunan/tensorflow
|
tensorflow/python/keras/engine/data_adapter_test.py
|
1
|
43158
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""DataAdapter tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
from absl.testing import parameterized
import numpy as np
from tensorflow.python import keras
from tensorflow.python.data.experimental.ops import cardinality
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.keras.engine import data_adapter
from tensorflow.python.keras.utils import data_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.platform import test
from tensorflow.python.util import nest
class DummyArrayLike(object):
"""Dummy array-like object."""
def __init__(self, data):
self.data = data
def __len__(self):
return len(self.data)
def __getitem__(self, key):
return self.data[key]
@property
def shape(self):
return self.data.shape
@property
def dtype(self):
return self.data.dtype
def fail_on_convert(x, **kwargs):
_ = x
_ = kwargs
raise TypeError('Cannot convert DummyArrayLike to a tensor')
ops.register_tensor_conversion_function(DummyArrayLike, fail_on_convert)
class DataAdapterTestBase(keras_parameterized.TestCase):
def setUp(self):
super(DataAdapterTestBase, self).setUp()
self.batch_size = 5
self.numpy_input = np.zeros((50, 10))
self.numpy_target = np.ones(50)
self.tensor_input = constant_op.constant(2.0, shape=(50, 10))
self.tensor_target = array_ops.ones((50,))
self.arraylike_input = DummyArrayLike(self.numpy_input)
self.arraylike_target = DummyArrayLike(self.numpy_target)
self.dataset_input = dataset_ops.DatasetV2.from_tensor_slices(
(self.numpy_input, self.numpy_target)).shuffle(50).batch(
self.batch_size)
def generator():
while True:
yield (np.zeros((self.batch_size, 10)), np.ones(self.batch_size))
self.generator_input = generator()
self.iterator_input = data_utils.threadsafe_generator(generator)()
self.sequence_input = TestSequence(batch_size=self.batch_size,
feature_shape=10)
self.model = keras.models.Sequential(
[keras.layers.Dense(8, input_shape=(10,), activation='softmax')])
class TestSequence(data_utils.Sequence):
def __init__(self, batch_size, feature_shape):
self.batch_size = batch_size
self.feature_shape = feature_shape
def __getitem__(self, item):
return (np.zeros((self.batch_size, self.feature_shape)),
np.ones((self.batch_size,)))
def __len__(self):
return 10
class TensorLikeDataAdapterTest(DataAdapterTestBase):
def setUp(self):
super(TensorLikeDataAdapterTest, self).setUp()
self.adapter_cls = data_adapter.TensorLikeDataAdapter
def test_can_handle_numpy(self):
self.assertTrue(self.adapter_cls.can_handle(self.numpy_input))
self.assertTrue(
self.adapter_cls.can_handle(self.numpy_input, self.numpy_target))
self.assertFalse(self.adapter_cls.can_handle(self.dataset_input))
self.assertFalse(self.adapter_cls.can_handle(self.generator_input))
self.assertFalse(self.adapter_cls.can_handle(self.sequence_input))
def test_size_numpy(self):
adapter = self.adapter_cls(
self.numpy_input, self.numpy_target, batch_size=5)
self.assertEqual(adapter.get_size(), 10)
self.assertFalse(adapter.has_partial_batch())
def test_batch_size_numpy(self):
adapter = self.adapter_cls(
self.numpy_input, self.numpy_target, batch_size=5)
self.assertEqual(adapter.batch_size(), 5)
def test_partial_batch_numpy(self):
adapter = self.adapter_cls(
self.numpy_input, self.numpy_target, batch_size=4)
self.assertEqual(adapter.get_size(), 13) # 50/4
self.assertTrue(adapter.has_partial_batch())
self.assertEqual(adapter.partial_batch_size(), 2)
def test_epochs(self):
num_epochs = 3
adapter = self.adapter_cls(
self.numpy_input, self.numpy_target, batch_size=5, epochs=num_epochs)
ds_iter = iter(adapter.get_dataset())
num_batches_per_epoch = self.numpy_input.shape[0] // 5
for _ in range(num_batches_per_epoch * num_epochs):
next(ds_iter)
with self.assertRaises(StopIteration):
next(ds_iter)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_training_numpy(self):
self.model.compile(loss='sparse_categorical_crossentropy', optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.model.fit(self.numpy_input, self.numpy_target, batch_size=5)
def test_can_handle_pandas(self):
try:
import pandas as pd # pylint: disable=g-import-not-at-top
except ImportError:
self.skipTest('Skipping test because pandas is not installed.')
self.assertTrue(self.adapter_cls.can_handle(pd.DataFrame(self.numpy_input)))
self.assertTrue(
self.adapter_cls.can_handle(pd.DataFrame(self.numpy_input)[0]))
self.assertTrue(
self.adapter_cls.can_handle(
pd.DataFrame(self.numpy_input),
pd.DataFrame(self.numpy_input)[0]))
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_training_pandas(self):
try:
import pandas as pd # pylint: disable=g-import-not-at-top
except ImportError:
self.skipTest('Skipping test because pandas is not installed.')
input_a = keras.Input(shape=(3,), name='input_a')
input_b = keras.Input(shape=(3,), name='input_b')
input_c = keras.Input(shape=(1,), name='input_b')
x = keras.layers.Dense(4, name='dense_1')(input_a)
y = keras.layers.Dense(3, name='dense_2')(input_b)
z = keras.layers.Dense(1, name='dense_3')(input_c)
model_1 = keras.Model(inputs=input_a, outputs=x)
model_2 = keras.Model(inputs=[input_a, input_b], outputs=[x, y])
model_3 = keras.Model(inputs=input_c, outputs=z)
model_1.compile(optimizer='rmsprop', loss='mse')
model_2.compile(optimizer='rmsprop', loss='mse')
input_a_np = np.random.random((10, 3))
input_b_np = np.random.random((10, 3))
input_a_df = pd.DataFrame(input_a_np)
input_b_df = pd.DataFrame(input_b_np)
output_a_df = pd.DataFrame(np.random.random((10, 4)))
output_b_df = pd.DataFrame(np.random.random((10, 3)))
model_1.fit(input_a_df,
output_a_df)
model_2.fit([input_a_df, input_b_df],
[output_a_df, output_b_df])
model_1.fit([input_a_df],
[output_a_df])
model_1.fit({'input_a': input_a_df},
output_a_df)
model_2.fit({'input_a': input_a_df, 'input_b': input_b_df},
[output_a_df, output_b_df])
model_1.evaluate(input_a_df,
output_a_df)
model_2.evaluate([input_a_df, input_b_df],
[output_a_df, output_b_df])
model_1.evaluate([input_a_df],
[output_a_df])
model_1.evaluate({'input_a': input_a_df},
output_a_df)
model_2.evaluate({'input_a': input_a_df, 'input_b': input_b_df},
[output_a_df, output_b_df])
# Verify predicting on pandas vs numpy returns the same result
predict_1_pandas = model_1.predict(input_a_df)
predict_2_pandas = model_2.predict([input_a_df, input_b_df])
predict_3_pandas = model_3.predict(input_a_df[0])
predict_1_numpy = model_1.predict(input_a_np)
predict_2_numpy = model_2.predict([input_a_np, input_b_np])
predict_3_numpy = model_3.predict(np.asarray(input_a_df[0]))
self.assertAllClose(predict_1_numpy, predict_1_pandas)
self.assertAllClose(predict_2_numpy, predict_2_pandas)
self.assertAllClose(predict_3_numpy, predict_3_pandas)
# Extra ways to pass in dataframes
model_1.predict([input_a_df])
model_1.predict({'input_a': input_a_df})
model_2.predict({'input_a': input_a_df, 'input_b': input_b_df})
def test_can_handle(self):
self.assertTrue(self.adapter_cls.can_handle(self.tensor_input))
self.assertTrue(
self.adapter_cls.can_handle(self.tensor_input, self.tensor_target))
self.assertFalse(self.adapter_cls.can_handle(self.arraylike_input))
self.assertFalse(
self.adapter_cls.can_handle(self.arraylike_input,
self.arraylike_target))
self.assertFalse(self.adapter_cls.can_handle(self.dataset_input))
self.assertFalse(self.adapter_cls.can_handle(self.generator_input))
self.assertFalse(self.adapter_cls.can_handle(self.sequence_input))
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_training(self):
self.model.compile(loss='sparse_categorical_crossentropy', optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.model.fit(self.tensor_input, self.tensor_target, batch_size=5)
def test_size(self):
adapter = self.adapter_cls(
self.tensor_input, self.tensor_target, batch_size=5)
self.assertEqual(adapter.get_size(), 10)
self.assertFalse(adapter.has_partial_batch())
def test_shuffle_correctness(self):
with context.eager_mode():
num_samples = 100
batch_size = 32
x = np.arange(num_samples)
np.random.seed(99)
adapter = self.adapter_cls(
x, y=None, batch_size=batch_size, shuffle=True, epochs=2)
def _get_epoch(ds_iter):
ds_data = []
for _ in range(int(math.ceil(num_samples / batch_size))):
ds_data.append(next(ds_iter)[0].numpy())
return np.concatenate(ds_data)
ds_iter = iter(adapter.get_dataset())
# First epoch.
epoch_data = _get_epoch(ds_iter)
# Check that shuffling occurred.
self.assertNotAllClose(x, epoch_data)
# Check that each elements appears, and only once.
self.assertAllClose(x, np.sort(epoch_data))
# Second epoch.
second_epoch_data = _get_epoch(ds_iter)
# Check that shuffling occurred.
self.assertNotAllClose(x, second_epoch_data)
# Check that shuffling is different across epochs.
self.assertNotAllClose(epoch_data, second_epoch_data)
# Check that each elements appears, and only once.
self.assertAllClose(x, np.sort(second_epoch_data))
def test_batch_shuffle_correctness(self):
with context.eager_mode():
num_samples = 100
batch_size = 6
x = np.arange(num_samples)
np.random.seed(99)
adapter = self.adapter_cls(
x, y=None, batch_size=batch_size, shuffle='batch', epochs=2)
def _get_epoch_batches(ds_iter):
ds_data = []
for _ in range(int(math.ceil(num_samples / batch_size))):
ds_data.append(next(ds_iter)[0].numpy())
return ds_data
ds_iter = iter(adapter.get_dataset())
# First epoch.
epoch_batch_data = _get_epoch_batches(ds_iter)
epoch_data = np.concatenate(epoch_batch_data)
def _verify_batch(batch):
# Verify that a batch contains only contiguous data, and that it has
# been shuffled.
shuffled_batch = np.sort(batch)
self.assertNotAllClose(batch, shuffled_batch)
for i in range(1, len(batch)):
self.assertEqual(shuffled_batch[i-1] + 1, shuffled_batch[i])
# Assert that the data within each batch remains contiguous
for batch in epoch_batch_data:
_verify_batch(batch)
# Check that individual batches are unshuffled
# Check that shuffling occurred.
self.assertNotAllClose(x, epoch_data)
# Check that each elements appears, and only once.
self.assertAllClose(x, np.sort(epoch_data))
# Second epoch.
second_epoch_batch_data = _get_epoch_batches(ds_iter)
second_epoch_data = np.concatenate(second_epoch_batch_data)
# Assert that the data within each batch remains contiguous
for batch in second_epoch_batch_data:
_verify_batch(batch)
# Check that shuffling occurred.
self.assertNotAllClose(x, second_epoch_data)
# Check that shuffling is different across epochs.
self.assertNotAllClose(epoch_data, second_epoch_data)
# Check that each elements appears, and only once.
self.assertAllClose(x, np.sort(second_epoch_data))
@parameterized.named_parameters(
('batch_size_5', 5, None, 5),
('batch_size_50', 50, 4, 50), # Sanity check: batch_size takes precedence
('steps_1', None, 1, 50),
('steps_4', None, 4, 13),
)
def test_batch_size(self, batch_size_in, steps, batch_size_out):
adapter = self.adapter_cls(
self.tensor_input, self.tensor_target, batch_size=batch_size_in,
steps=steps)
self.assertEqual(adapter.batch_size(), batch_size_out)
@parameterized.named_parameters(
('batch_size_5', 5, None, 10, 0),
('batch_size_4', 4, None, 13, 2),
('steps_1', None, 1, 1, 0),
('steps_5', None, 5, 5, 0),
('steps_4', None, 4, 4, 11),
)
def test_partial_batch(
self, batch_size_in, steps, size, partial_batch_size):
adapter = self.adapter_cls(
self.tensor_input, self.tensor_target, batch_size=batch_size_in,
steps=steps)
self.assertEqual(adapter.get_size(), size) # 50/steps
self.assertEqual(adapter.has_partial_batch(), bool(partial_batch_size))
self.assertEqual(adapter.partial_batch_size(), partial_batch_size or None)
class GenericArrayLikeDataAdapterTest(DataAdapterTestBase):
def setUp(self):
super(GenericArrayLikeDataAdapterTest, self).setUp()
self.adapter_cls = data_adapter.GenericArrayLikeDataAdapter
def test_can_handle_some_numpy(self):
self.assertTrue(self.adapter_cls.can_handle(
self.arraylike_input))
self.assertTrue(
self.adapter_cls.can_handle(self.arraylike_input,
self.arraylike_target))
# Because adapters are mutually exclusive, don't handle cases
# where all the data is numpy or an eagertensor
self.assertFalse(self.adapter_cls.can_handle(self.numpy_input))
self.assertFalse(
self.adapter_cls.can_handle(self.numpy_input,
self.numpy_target))
self.assertFalse(self.adapter_cls.can_handle(self.tensor_input))
self.assertFalse(
self.adapter_cls.can_handle(self.tensor_input, self.tensor_target))
# But do handle mixes that include generic arraylike data
self.assertTrue(
self.adapter_cls.can_handle(self.numpy_input,
self.arraylike_target))
self.assertTrue(
self.adapter_cls.can_handle(self.arraylike_input,
self.numpy_target))
self.assertTrue(
self.adapter_cls.can_handle(self.arraylike_input,
self.tensor_target))
self.assertTrue(
self.adapter_cls.can_handle(self.tensor_input,
self.arraylike_target))
self.assertFalse(self.adapter_cls.can_handle(self.dataset_input))
self.assertFalse(self.adapter_cls.can_handle(self.generator_input))
self.assertFalse(self.adapter_cls.can_handle(self.sequence_input))
def test_size(self):
adapter = self.adapter_cls(
self.arraylike_input,
self.arraylike_target, batch_size=5)
self.assertEqual(adapter.get_size(), 10)
self.assertFalse(adapter.has_partial_batch())
def test_epochs(self):
num_epochs = 3
adapter = self.adapter_cls(
self.arraylike_input,
self.numpy_target, batch_size=5, epochs=num_epochs)
ds_iter = iter(adapter.get_dataset())
num_batches_per_epoch = self.numpy_input.shape[0] // 5
for _ in range(num_batches_per_epoch * num_epochs):
next(ds_iter)
with self.assertRaises(StopIteration):
next(ds_iter)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_training(self):
# First verify that DummyArrayLike can't be converted to a Tensor
with self.assertRaises(TypeError):
ops.convert_to_tensor_v2(self.arraylike_input)
# Then train on the array like.
# It should not be converted to a tensor directly (which would force it into
# memory), only the sliced data should be converted.
self.model.compile(loss='sparse_categorical_crossentropy', optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.model.fit(self.arraylike_input,
self.arraylike_target, batch_size=5)
self.model.fit(self.arraylike_input,
self.arraylike_target,
shuffle=True, batch_size=5)
self.model.fit(self.arraylike_input,
self.arraylike_target,
shuffle='batch', batch_size=5)
self.model.evaluate(self.arraylike_input,
self.arraylike_target, batch_size=5)
self.model.predict(self.arraylike_input, batch_size=5)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_training_numpy_target(self):
self.model.compile(loss='sparse_categorical_crossentropy', optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.model.fit(self.arraylike_input,
self.numpy_target, batch_size=5)
self.model.fit(self.arraylike_input,
self.numpy_target, shuffle=True,
batch_size=5)
self.model.fit(self.arraylike_input,
self.numpy_target, shuffle='batch',
batch_size=5)
self.model.evaluate(self.arraylike_input,
self.numpy_target, batch_size=5)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_training_tensor_target(self):
self.model.compile(loss='sparse_categorical_crossentropy', optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.model.fit(self.arraylike_input,
self.tensor_target, batch_size=5)
self.model.fit(self.arraylike_input,
self.tensor_target, shuffle=True,
batch_size=5)
self.model.fit(self.arraylike_input,
self.tensor_target, shuffle='batch',
batch_size=5)
self.model.evaluate(self.arraylike_input,
self.tensor_target, batch_size=5)
def test_shuffle_correctness(self):
with context.eager_mode():
num_samples = 100
batch_size = 32
x = DummyArrayLike(np.arange(num_samples))
np.random.seed(99)
adapter = self.adapter_cls(
x, y=None, batch_size=batch_size, shuffle=True, epochs=2)
def _get_epoch(ds_iter):
ds_data = []
for _ in range(int(math.ceil(num_samples / batch_size))):
ds_data.append(next(ds_iter)[0].numpy())
return np.concatenate(ds_data)
ds_iter = iter(adapter.get_dataset())
# First epoch.
epoch_data = _get_epoch(ds_iter)
# Check that shuffling occurred.
self.assertNotAllClose(x, epoch_data)
# Check that each elements appears, and only once.
self.assertAllClose(x, np.sort(epoch_data))
# Second epoch.
second_epoch_data = _get_epoch(ds_iter)
# Check that shuffling occurred.
self.assertNotAllClose(x, second_epoch_data)
# Check that shuffling is different across epochs.
self.assertNotAllClose(epoch_data, second_epoch_data)
# Check that each elements appears, and only once.
self.assertAllClose(x, np.sort(second_epoch_data))
def test_batch_shuffle_correctness(self):
with context.eager_mode():
num_samples = 100
batch_size = 6
x = DummyArrayLike(np.arange(num_samples))
np.random.seed(99)
adapter = self.adapter_cls(
x, y=None, batch_size=batch_size, shuffle='batch', epochs=2)
def _get_epoch_batches(ds_iter):
ds_data = []
for _ in range(int(math.ceil(num_samples / batch_size))):
ds_data.append(next(ds_iter)[0].numpy())
return ds_data
ds_iter = iter(adapter.get_dataset())
# First epoch.
epoch_batch_data = _get_epoch_batches(ds_iter)
epoch_data = np.concatenate(epoch_batch_data)
def _verify_batch(batch):
# Verify that a batch contains only contiguous data, but that it has
# been shuffled.
shuffled_batch = np.sort(batch)
self.assertNotAllClose(batch, shuffled_batch)
for i in range(1, len(batch)):
self.assertEqual(shuffled_batch[i-1] + 1, shuffled_batch[i])
# Assert that the data within each batch is shuffled contiguous data
for batch in epoch_batch_data:
_verify_batch(batch)
# Check that individual batches are unshuffled
# Check that shuffling occurred.
self.assertNotAllClose(x, epoch_data)
# Check that each elements appears, and only once.
self.assertAllClose(x, np.sort(epoch_data))
# Second epoch.
second_epoch_batch_data = _get_epoch_batches(ds_iter)
second_epoch_data = np.concatenate(second_epoch_batch_data)
# Assert that the data within each batch remains contiguous
for batch in second_epoch_batch_data:
_verify_batch(batch)
# Check that shuffling occurred.
self.assertNotAllClose(x, second_epoch_data)
# Check that shuffling is different across epochs.
self.assertNotAllClose(epoch_data, second_epoch_data)
# Check that each elements appears, and only once.
self.assertAllClose(x, np.sort(second_epoch_data))
@parameterized.named_parameters(
('batch_size_5', 5, None, 5),
('batch_size_50', 50, 4, 50), # Sanity check: batch_size takes precedence
('steps_1', None, 1, 50),
('steps_4', None, 4, 13),
)
def test_batch_size(self, batch_size_in, steps, batch_size_out):
adapter = self.adapter_cls(
self.arraylike_input,
self.arraylike_target, batch_size=batch_size_in,
steps=steps)
self.assertEqual(adapter.batch_size(), batch_size_out)
@parameterized.named_parameters(
('batch_size_5', 5, None, 10, 0),
('batch_size_4', 4, None, 13, 2),
('steps_1', None, 1, 1, 0),
('steps_5', None, 5, 5, 0),
('steps_4', None, 4, 4, 11),
)
def test_partial_batch(
self, batch_size_in, steps, size, partial_batch_size):
adapter = self.adapter_cls(
self.arraylike_input, self.arraylike_target,
batch_size=batch_size_in,
steps=steps)
self.assertEqual(adapter.get_size(), size) # 50/steps
self.assertEqual(adapter.has_partial_batch(), bool(partial_batch_size))
self.assertEqual(adapter.partial_batch_size(), partial_batch_size or None)
class DatasetAdapterTest(DataAdapterTestBase):
def setUp(self):
super(DatasetAdapterTest, self).setUp()
self.adapter_cls = data_adapter.DatasetAdapter
def test_can_handle(self):
self.assertFalse(self.adapter_cls.can_handle(self.numpy_input))
self.assertFalse(self.adapter_cls.can_handle(self.tensor_input))
self.assertTrue(self.adapter_cls.can_handle(self.dataset_input))
self.assertFalse(self.adapter_cls.can_handle(self.generator_input))
self.assertFalse(self.adapter_cls.can_handle(self.sequence_input))
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_training(self):
dataset = self.adapter_cls(self.dataset_input).get_dataset()
self.model.compile(loss='sparse_categorical_crossentropy', optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.model.fit(dataset)
def test_size(self):
adapter = self.adapter_cls(self.dataset_input)
self.assertIsNone(adapter.get_size())
def test_batch_size(self):
adapter = self.adapter_cls(self.dataset_input)
self.assertIsNone(adapter.batch_size())
def test_partial_batch(self):
adapter = self.adapter_cls(self.dataset_input)
self.assertFalse(adapter.has_partial_batch())
self.assertIsNone(adapter.partial_batch_size())
def test_invalid_targets_argument(self):
with self.assertRaisesRegexp(ValueError, r'`y` argument is not supported'):
self.adapter_cls(self.dataset_input, y=self.dataset_input)
def test_invalid_sample_weights_argument(self):
with self.assertRaisesRegexp(ValueError,
r'`sample_weight` argument is not supported'):
self.adapter_cls(self.dataset_input, sample_weights=self.dataset_input)
class GeneratorDataAdapterTest(DataAdapterTestBase):
def setUp(self):
super(GeneratorDataAdapterTest, self).setUp()
self.adapter_cls = data_adapter.GeneratorDataAdapter
def test_can_handle(self):
self.assertFalse(self.adapter_cls.can_handle(self.numpy_input))
self.assertFalse(self.adapter_cls.can_handle(self.tensor_input))
self.assertFalse(self.adapter_cls.can_handle(self.dataset_input))
self.assertTrue(self.adapter_cls.can_handle(self.generator_input))
self.assertFalse(self.adapter_cls.can_handle(self.sequence_input))
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_training(self):
self.model.compile(loss='sparse_categorical_crossentropy', optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.model.fit(self.generator_input, steps_per_epoch=10)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
@test_util.run_v2_only
@data_utils.dont_use_multiprocessing_pool
def test_with_multiprocessing_training(self):
self.model.compile(loss='sparse_categorical_crossentropy', optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.model.fit(self.iterator_input, workers=1, use_multiprocessing=True,
max_queue_size=10, steps_per_epoch=10)
# Fit twice to ensure there isn't any duplication that prevent the worker
# from starting.
self.model.fit(self.iterator_input, workers=1, use_multiprocessing=True,
max_queue_size=10, steps_per_epoch=10)
def test_size(self):
adapter = self.adapter_cls(self.generator_input)
self.assertIsNone(adapter.get_size())
def test_batch_size(self):
adapter = self.adapter_cls(self.generator_input)
self.assertEqual(adapter.batch_size(), None)
self.assertEqual(adapter.representative_batch_size(), 5)
def test_partial_batch(self):
adapter = self.adapter_cls(self.generator_input)
self.assertFalse(adapter.has_partial_batch())
self.assertIsNone(adapter.partial_batch_size())
def test_invalid_targets_argument(self):
with self.assertRaisesRegexp(ValueError, r'`y` argument is not supported'):
self.adapter_cls(self.generator_input, y=self.generator_input)
def test_invalid_sample_weights_argument(self):
with self.assertRaisesRegexp(ValueError,
r'`sample_weight` argument is not supported'):
self.adapter_cls(
self.generator_input, sample_weights=self.generator_input)
def test_not_shuffled(self):
def generator():
for i in range(10):
yield np.ones((1, 1)) * i
adapter = self.adapter_cls(generator(), shuffle=True)
with context.eager_mode():
for i, data in enumerate(adapter.get_dataset()):
self.assertEqual(i, data[0].numpy().flatten())
class KerasSequenceAdapterTest(DataAdapterTestBase):
def setUp(self):
super(KerasSequenceAdapterTest, self).setUp()
self.adapter_cls = data_adapter.KerasSequenceAdapter
def test_can_handle(self):
self.assertFalse(self.adapter_cls.can_handle(self.numpy_input))
self.assertFalse(self.adapter_cls.can_handle(self.tensor_input))
self.assertFalse(self.adapter_cls.can_handle(self.dataset_input))
self.assertFalse(self.adapter_cls.can_handle(self.generator_input))
self.assertTrue(self.adapter_cls.can_handle(self.sequence_input))
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_training(self):
self.model.compile(loss='sparse_categorical_crossentropy', optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.model.fit(self.sequence_input)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
@test_util.run_v2_only
@data_utils.dont_use_multiprocessing_pool
def test_with_multiprocessing_training(self):
self.model.compile(loss='sparse_categorical_crossentropy', optimizer='sgd',
run_eagerly=testing_utils.should_run_eagerly())
self.model.fit(self.sequence_input, workers=1, use_multiprocessing=True,
max_queue_size=10, steps_per_epoch=10)
# Fit twice to ensure there isn't any duplication that prevent the worker
# from starting.
self.model.fit(self.sequence_input, workers=1, use_multiprocessing=True,
max_queue_size=10, steps_per_epoch=10)
def test_size(self):
adapter = self.adapter_cls(self.sequence_input)
self.assertEqual(adapter.get_size(), 10)
def test_batch_size(self):
adapter = self.adapter_cls(self.sequence_input)
self.assertEqual(adapter.batch_size(), None)
self.assertEqual(adapter.representative_batch_size(), 5)
def test_partial_batch(self):
adapter = self.adapter_cls(self.sequence_input)
self.assertFalse(adapter.has_partial_batch())
self.assertIsNone(adapter.partial_batch_size())
def test_invalid_targets_argument(self):
with self.assertRaisesRegexp(ValueError, r'`y` argument is not supported'):
self.adapter_cls(self.sequence_input, y=self.sequence_input)
def test_invalid_sample_weights_argument(self):
with self.assertRaisesRegexp(ValueError,
r'`sample_weight` argument is not supported'):
self.adapter_cls(self.sequence_input, sample_weights=self.sequence_input)
class DataHandlerTest(keras_parameterized.TestCase):
def test_finite_dataset_with_steps_per_epoch(self):
data = dataset_ops.Dataset.from_tensor_slices([0, 1, 2, 3]).batch(1)
# User can choose to only partially consume `Dataset`.
data_handler = data_adapter.DataHandler(
data, initial_epoch=0, epochs=2, steps_per_epoch=2)
self.assertEqual(data_handler.inferred_steps, 2)
self.assertFalse(data_handler._adapter.should_recreate_iterator())
returned_data = []
for _, iterator in data_handler.enumerate_epochs():
epoch_data = []
for _ in data_handler.steps():
epoch_data.append(next(iterator).numpy())
returned_data.append(epoch_data)
self.assertEqual(returned_data, [[0, 1], [2, 3]])
def test_finite_dataset_without_steps_per_epoch(self):
data = dataset_ops.Dataset.from_tensor_slices([0, 1, 2]).batch(1)
data_handler = data_adapter.DataHandler(data, initial_epoch=0, epochs=2)
self.assertEqual(data_handler.inferred_steps, 3)
returned_data = []
for _, iterator in data_handler.enumerate_epochs():
epoch_data = []
for _ in data_handler.steps():
epoch_data.append(next(iterator).numpy())
returned_data.append(epoch_data)
self.assertEqual(returned_data, [[0, 1, 2], [0, 1, 2]])
def test_finite_dataset_with_steps_per_epoch_exact_size(self):
data = dataset_ops.Dataset.from_tensor_slices([0, 1, 2, 3]).batch(1)
# If user specifies exact size of `Dataset` as `steps_per_epoch`,
# create a new iterator each epoch.
data_handler = data_adapter.DataHandler(
data, initial_epoch=0, epochs=2, steps_per_epoch=4)
self.assertTrue(data_handler._adapter.should_recreate_iterator())
returned_data = []
for _, iterator in data_handler.enumerate_epochs():
epoch_data = []
for _ in data_handler.steps():
epoch_data.append(next(iterator).numpy())
returned_data.append(epoch_data)
self.assertEqual(returned_data, [[0, 1, 2, 3], [0, 1, 2, 3]])
def test_infinite_dataset_with_steps_per_epoch(self):
data = dataset_ops.Dataset.from_tensor_slices([0, 1, 2]).batch(1).repeat()
data_handler = data_adapter.DataHandler(
data, initial_epoch=0, epochs=2, steps_per_epoch=3)
returned_data = []
for _, iterator in data_handler.enumerate_epochs():
epoch_data = []
for _ in data_handler.steps():
epoch_data.append(next(iterator).numpy())
returned_data.append(epoch_data)
self.assertEqual(returned_data, [[0, 1, 2], [0, 1, 2]])
def test_unknown_cardinality_dataset_with_steps_per_epoch(self):
ds = dataset_ops.DatasetV2.from_tensor_slices([0, 1, 2, 3, 4, 5, 6])
filtered_ds = ds.filter(lambda x: x < 4)
self.assertEqual(
cardinality.cardinality(filtered_ds).numpy(), cardinality.UNKNOWN)
# User can choose to only partially consume `Dataset`.
data_handler = data_adapter.DataHandler(
filtered_ds, initial_epoch=0, epochs=2, steps_per_epoch=2)
self.assertFalse(data_handler._adapter.should_recreate_iterator())
returned_data = []
for _, iterator in data_handler.enumerate_epochs():
epoch_data = []
for _ in data_handler.steps():
epoch_data.append(next(iterator))
returned_data.append(epoch_data)
returned_data = self.evaluate(returned_data)
self.assertEqual(returned_data, [[0, 1], [2, 3]])
self.assertEqual(data_handler.inferred_steps, 2)
def test_unknown_cardinality_dataset_without_steps_per_epoch(self):
ds = dataset_ops.DatasetV2.from_tensor_slices([0, 1, 2, 3, 4, 5, 6])
filtered_ds = ds.filter(lambda x: x < 4)
self.assertEqual(
cardinality.cardinality(filtered_ds).numpy(), cardinality.UNKNOWN)
data_handler = data_adapter.DataHandler(
filtered_ds, initial_epoch=0, epochs=2)
self.assertEqual(data_handler.inferred_steps, None)
self.assertTrue(data_handler._adapter.should_recreate_iterator())
returned_data = []
for _, iterator in data_handler.enumerate_epochs():
epoch_data = []
with data_handler.catch_stop_iteration():
for _ in data_handler.steps():
epoch_data.append(next(iterator))
returned_data.append(epoch_data)
returned_data = self.evaluate(returned_data)
self.assertEqual(returned_data, [[0, 1, 2, 3], [0, 1, 2, 3]])
self.assertEqual(data_handler.inferred_steps, 4)
def test_insufficient_data(self):
ds = dataset_ops.DatasetV2.from_tensor_slices([0, 1])
ds = ds.filter(lambda *args, **kwargs: True)
data_handler = data_adapter.DataHandler(
ds, initial_epoch=0, epochs=2, steps_per_epoch=3)
returned_data = []
for _, iterator in data_handler.enumerate_epochs():
epoch_data = []
for _ in data_handler.steps():
with data_handler.catch_stop_iteration():
epoch_data.append(next(iterator))
returned_data.append(epoch_data)
returned_data = self.evaluate(returned_data)
self.assertTrue(data_handler._insufficient_data)
self.assertEqual(returned_data, [[0, 1]])
def test_numpy(self):
x = np.array([0, 1, 2])
y = np.array([0, 2, 4])
sw = np.array([0, 4, 8])
data_handler = data_adapter.DataHandler(
x=x, y=y, sample_weight=sw, batch_size=1, epochs=2)
returned_data = []
for _, iterator in data_handler.enumerate_epochs():
epoch_data = []
for _ in data_handler.steps():
epoch_data.append(next(iterator))
returned_data.append(epoch_data)
returned_data = self.evaluate(returned_data)
self.assertEqual(returned_data,
[[(0, 0, 0), (1, 2, 4),
(2, 4, 8)], [(0, 0, 0), (1, 2, 4), (2, 4, 8)]])
def test_generator(self):
def generator():
for _ in range(2):
for step in range(3):
yield (ops.convert_to_tensor_v2([step]),)
data_handler = data_adapter.DataHandler(
generator(), epochs=2, steps_per_epoch=3)
returned_data = []
for _, iterator in data_handler.enumerate_epochs():
epoch_data = []
for _ in data_handler.steps():
epoch_data.append(next(iterator))
returned_data.append(epoch_data)
returned_data = self.evaluate(returned_data)
self.assertEqual(returned_data, [[([0],), ([1],),
([2],)], [([0],), ([1],), ([2],)]])
def test_composite_tensor(self):
st = sparse_tensor.SparseTensor(
indices=[[0, 0], [1, 0], [2, 0]], values=[0, 1, 2], dense_shape=[3, 1])
data_handler = data_adapter.DataHandler(st, epochs=2, steps_per_epoch=3)
returned_data = []
for _, iterator in data_handler.enumerate_epochs():
epoch_data = []
for _ in data_handler.steps():
epoch_data.append(next(iterator))
returned_data.append(epoch_data)
returned_data = self.evaluate(
nest.map_structure(sparse_ops.sparse_tensor_to_dense, returned_data))
self.assertEqual(returned_data, [[([0],), ([1],),
([2],)], [([0],), ([1],), ([2],)]])
def test_list_of_scalars(self):
data_handler = data_adapter.DataHandler([[0], [1], [2]],
epochs=2,
steps_per_epoch=3)
returned_data = []
for _, iterator in data_handler.enumerate_epochs():
epoch_data = []
for _ in data_handler.steps():
epoch_data.append(next(iterator))
returned_data.append(epoch_data)
returned_data = self.evaluate(returned_data)
self.assertEqual(returned_data, [[([0],), ([1],),
([2],)], [([0],), ([1],), ([2],)]])
def test_class_weight_user_errors(self):
with self.assertRaisesRegexp(ValueError, 'to be a dict with keys'):
data_adapter.DataHandler(
x=[[0], [1], [2]],
y=[[2], [1], [0]],
batch_size=1,
sample_weight=[[1.], [2.], [4.]],
class_weight={
0: 0.5,
1: 1.,
3: 1.5 # Skips class `2`.
})
with self.assertRaisesRegexp(ValueError, 'with a single output'):
data_adapter.DataHandler(
x=np.ones((10, 1)),
y=[np.ones((10, 1)), np.zeros((10, 1))],
batch_size=2,
class_weight={
0: 0.5,
1: 1.,
2: 1.5
})
class TestValidationSplit(keras_parameterized.TestCase):
@parameterized.named_parameters(('numpy_arrays', True), ('tensors', False))
def test_validation_split_shuffled(self, use_numpy):
if use_numpy:
x = np.array([0, 1, 2, 3, 4])
y = np.array([0, 2, 4, 6, 8])
sw = np.array([0, 4, 8, 12, 16])
else:
x = ops.convert_to_tensor_v2([0, 1, 2, 3, 4])
y = ops.convert_to_tensor_v2([0, 2, 4, 6, 8])
sw = ops.convert_to_tensor_v2([0, 4, 8, 12, 16])
(train_x, train_y, train_sw), (val_x, val_y, val_sw) = (
data_adapter.train_validation_split((x, y, sw), validation_split=0.2))
self.assertEqual(int(train_x.shape[0]), 4)
self.assertEqual(int(train_y.shape[0]), 4)
self.assertEqual(int(train_sw.shape[0]), 4)
for i in range(4):
# Check that all arrays were shuffled in identical order.
self.assertEqual(2 * train_x[i].numpy(), train_y[i].numpy())
self.assertEqual(2 * train_y[i].numpy(), train_sw[i].numpy())
self.assertEqual(int(val_x.shape[0]), 1)
self.assertEqual(int(val_y.shape[0]), 1)
self.assertEqual(int(val_sw.shape[0]), 1)
for i in range(1):
# Check that all arrays were shuffled in identical order.
self.assertEqual(2 * train_x[i].numpy(), train_y[i].numpy())
self.assertEqual(2 * train_y[i].numpy(), train_sw[i].numpy())
# Check that arrays contain expected values.
self.assertEqual(
sorted(array_ops.concat([train_x, val_x], axis=0).numpy().tolist()),
sorted(ops.convert_to_tensor_v2(x).numpy().tolist()))
self.assertEqual(
sorted(array_ops.concat([train_y, val_y], axis=0).numpy().tolist()),
sorted(ops.convert_to_tensor_v2(y).numpy().tolist()))
self.assertEqual(
sorted(array_ops.concat([train_sw, val_sw], axis=0).numpy().tolist()),
sorted(ops.convert_to_tensor_v2(sw).numpy().tolist()))
@parameterized.named_parameters(('numpy_arrays', True), ('tensors', False))
def test_validation_split_unshuffled(self, use_numpy):
if use_numpy:
x = np.array([0, 1, 2, 3, 4])
y = np.array([0, 2, 4, 6, 8])
sw = np.array([0, 4, 8, 12, 16])
else:
x = ops.convert_to_tensor_v2([0, 1, 2, 3, 4])
y = ops.convert_to_tensor_v2([0, 2, 4, 6, 8])
sw = ops.convert_to_tensor_v2([0, 4, 8, 12, 16])
(train_x, train_y, train_sw), (val_x, val_y, val_sw) = (
data_adapter.train_validation_split((x, y, sw),
validation_split=0.2,
shuffle=False))
self.assertEqual(train_x.numpy().tolist(), [0, 1, 2, 3])
self.assertEqual(train_y.numpy().tolist(), [0, 2, 4, 6])
self.assertEqual(train_sw.numpy().tolist(), [0, 4, 8, 12])
self.assertEqual(val_x.numpy().tolist(), [4])
self.assertEqual(val_y.numpy().tolist(), [8])
self.assertEqual(val_sw.numpy().tolist(), [16])
def test_validation_split_user_error(self):
with self.assertRaisesRegexp(ValueError, 'is only supported for Tensors'):
data_adapter.train_validation_split(
lambda: np.ones((10, 1)), validation_split=0.2)
def test_validation_split_examples_too_few(self):
with self.assertRaisesRegexp(
ValueError, 'not sufficient to split it'):
data_adapter.train_validation_split(
np.ones((1, 10)), validation_split=0.2)
def test_validation_split_none(self):
train_sw, val_sw = data_adapter.train_validation_split(
None, validation_split=0.2)
self.assertIsNone(train_sw)
self.assertIsNone(val_sw)
(_, train_sw), (_, val_sw) = data_adapter.train_validation_split(
(np.ones((10, 1)), None), validation_split=0.2)
self.assertIsNone(train_sw)
self.assertIsNone(val_sw)
class TestUtils(keras_parameterized.TestCase):
def test_expand_1d_sparse_tensors_untouched(self):
st = sparse_tensor.SparseTensor(
indices=[[0], [10]], values=[1, 2], dense_shape=[10])
st = data_adapter.expand_1d(st)
self.assertEqual(st.shape.rank, 1)
if __name__ == '__main__':
ops.enable_eager_execution()
test.main()
|
apache-2.0
| -2,744,430,613,566,517,000 | 38.740331 | 80 | 0.649567 | false |
Pfiver/penview
|
dev/Danny_example.py
|
1
|
9958
|
from Danny.OOo.OOoLib import *
def CalcExample():
# create a new Calc spreadsheet.
oDoc = StarDesktop.loadComponentFromURL( "private:factory/scalc", "_blank", 0, Array() )
#-----
# Use this instead to open an EXISTING calc document,
# and assign it to variable oDoc.
# cFile = "C:\Documents and Settings\danny\Desktop\MyCalc" # Windows
# cFile = "/home/danny/Desktop/MyCalc.sxc" # Linux
# cURL = convertToURL( cFile + ".sxc" )
# oDoc = StarDesktop.loadComponentFromURL( cURL, "_blank", 0, Array() )
#-----
# Here are two ways to get access to one of the various sheets
# in the spreadsheet document.
# Note that these don't make the sheet *vislble*, they merely give
# access to the sheet's content within the program.
oSheet = oDoc.getSheets().getByIndex( 0 ) # get the zero'th sheet
#oSheet = oDoc.getSheets().getByName( "Sheet3" ) # get by name
#-----
# Put some sales figures onto the sheet.
oSheet.getCellByPosition( 0, 0 ).setString( "Month" )
oSheet.getCellByPosition( 1, 0 ).setString( "Sales" )
oSheet.getCellByPosition( 2, 0 ).setString( "End Date" )
oSheet.getCellByPosition( 0, 1 ).setString( "Jan" )
oSheet.getCellByPosition( 0, 2 ).setString( "Feb" )
oSheet.getCellByPosition( 0, 3 ).setString( "Mar" )
oSheet.getCellByPosition( 0, 4 ).setString( "Apr" )
oSheet.getCellByPosition( 0, 5 ).setString( "May" )
oSheet.getCellByPosition( 0, 6 ).setString( "Jun" )
oSheet.getCellByPosition( 0, 7 ).setString( "Jul" )
oSheet.getCellByPosition( 0, 8 ).setString( "Aug" )
oSheet.getCellByPosition( 0, 9 ).setString( "Sep" )
oSheet.getCellByPosition( 0, 10 ).setString( "Oct" )
oSheet.getCellByPosition( 0, 11 ).setString( "Nov" )
oSheet.getCellByPosition( 0, 12 ).setString( "Dec" )
oSheet.getCellByPosition( 1, 1 ).setValue( 3826.37 )
oSheet.getCellByPosition( 1, 2 ).setValue( 3504.21 )
oSheet.getCellByPosition( 1, 3 ).setValue( 2961.45 )
oSheet.getCellByPosition( 1, 4 ).setValue( 2504.12 )
oSheet.getCellByPosition( 1, 5 ).setValue( 2713.98 )
oSheet.getCellByPosition( 1, 6 ).setValue( 2248.17 )
oSheet.getCellByPosition( 1, 7 ).setValue( 1802.13 )
oSheet.getCellByPosition( 1, 8 ).setValue( 2003.22 )
oSheet.getCellByPosition( 1, 9 ).setValue( 1502.54 )
oSheet.getCellByPosition( 1, 10 ).setValue( 1207.68 )
oSheet.getCellByPosition( 1, 11 ).setValue( 1319.71 )
oSheet.getCellByPosition( 1, 12 ).setValue( 786.03 )
oSheet.getCellByPosition( 2, 1 ).setFormula( "=DATE(2004;01;31)" )
oSheet.getCellByPosition( 2, 2 ).setFormula( "=DATE(2004;02;29)" )
oSheet.getCellByPosition( 2, 3 ).setFormula( "=DATE(2004;03;31)" )
oSheet.getCellByPosition( 2, 4 ).setFormula( "=DATE(2004;04;30)" )
oSheet.getCellByPosition( 2, 5 ).setFormula( "=DATE(2004;05;31)" )
oSheet.getCellByPosition( 2, 6 ).setFormula( "=DATE(2004;06;30)" )
oSheet.getCellByPosition( 2, 7 ).setFormula( "=DATE(2004;07;31)" )
oSheet.getCellByPosition( 2, 8 ).setFormula( "=DATE(2004;08;31)" )
oSheet.getCellByPosition( 2, 9 ).setFormula( "=DATE(2004;09;30)" )
# Note that these last three dates are not set as DATE() function calls.
oSheet.getCellByPosition( 2, 10 ).setFormula( "10/31/2004" )
oSheet.getCellByPosition( 2, 11 ).setFormula( "11/30/2004" )
oSheet.getCellRangeByName( "C13" ).setFormula( "12/31/2004" )
#-----
#-----
# Format the date cells as dates.
com_sun_star_util_NumberFormat_DATE = uno.getConstantByName( "com.sun.star.util.NumberFormat.DATE" )
oFormats = oDoc.getNumberFormats()
oLocale = createUnoStruct( "com.sun.star.lang.Locale" )
nDateKey = oFormats.getStandardFormat( com_sun_star_util_NumberFormat_DATE, oLocale )
oCell = oSheet.getCellRangeByName( "C2:C13" )
oCell.NumberFormat = nDateKey
#-----
#-----
# Now add a chart to the spreadsheet.
oCellRangeAddress = oSheet.getCellRangeByName( "A1:B13" ).getRangeAddress()
# oCellRangeAddress = MakeCellRangeAddress( 0, 0, 1, 1, 12 )
# Get the collection of charts from the sheet.
oCharts = oSheet.getCharts()
# Add a new chart with a specific name,
# in a specific rectangle on the drawing page,
# and connected to specific cells of the spreadsheet.
oCharts.addNewByName( "Sales",
makeRectangle( 8000, 1000, 16000, 10000 ),
Array( oCellRangeAddress ),
True, True )
# From the collection of charts, get the new chart we just created.
oChart = oCharts.getByName( "Sales" )
# Get the chart document model.
oChartDoc = oChart.getEmbeddedObject()
# Get the drawing text shape of the title of the chart.
oTitleTextShape = oChartDoc.getTitle()
# Change the title.
oTitleTextShape.String = "Sales Chart"
# Create a diagram.
oDiagram = oChartDoc.createInstance( "com.sun.star.chart.BarDiagram" )
# Set its parameters.
oDiagram.Vertical = True
# Make the chart use this diagram.
oChartDoc.setDiagram( oDiagram )
# Ask the chart what diagram it is using.
# (Unnecessary, since variable oDiagram already contains this value.)
oDiagram = oChartDoc.getDiagram()
# Make more changes to the diagram.
oDiagram.DataCaption = uno.getConstantByName( "com.sun.star.chart.ChartDataCaption.VALUE" )
oDiagram.DataRowSource = uno.getConstantByName( "com.sun.star.chart.ChartDataRowSource.COLUMNS" )
#
#-----
#-----
# Now demonstrate how to manipulate the sheets.
# Insert six more sheets into the document.
nNumSheetsCurrently = oDoc.getSheets().getCount()
oDoc.getSheets().insertNewByName( "Fred", nNumSheetsCurrently+1 )
oDoc.getSheets().insertNewByName( "Joe", nNumSheetsCurrently+2 )
oDoc.getSheets().insertNewByName( "Bill", nNumSheetsCurrently+3 )
oDoc.getSheets().insertNewByName( "Sam", nNumSheetsCurrently+4 )
oDoc.getSheets().insertNewByName( "Tom", nNumSheetsCurrently+5 )
oDoc.getSheets().insertNewByName( "David", nNumSheetsCurrently+6 )
# Now find a sheet named "Sheet2" and get rid of it.
oDoc.getSheets().removeByName( "Sheet2" )
# Now find the sheet named "Sam" and change its name to "Sheet 37"
oDoc.getSheets().getByName( "Sam" ).Name = "Sheet 37"
#
#-----
#-------
# Now print the document -- three different ways.
# Technique 1.
# Now print the document.
# Print two copies.
# Print pages 1 thru 4, and also page 10.
#
# NOTE: we would do it like this, except the word "print"
# has a special meaning in python, and cannot be invoked
# as a method.
#oDoc.print(
# Array(
# makePropertyValue( "CopyCount", 2 ),
# makePropertyValue( "Pages", "1-4;10" ) ) )
uno.invoke( oDoc, "print", ( Array(
makePropertyValue( "CopyCount", 2 ),
makePropertyValue( "Pages", "1-4;10" ) ), ) )
# Technique 2.
# Print the document already, without any arguments.
uno.invoke( oDoc, "print", ( Array(), ) )
#oDoc.print( Array() )
# Using technique 1 or 2, be sure not to close the document
# until printing is completed.
# http://www.oooforum.org/forum/viewtopic.php?p=23144#23144
# Technique 3.
# Print the document by bringing up the Print Job dialog box
# for the user to interact with.
oDocFrame = oDoc.getCurrentController().getFrame()
oDispatchHelper = createUnoService( "com.sun.star.frame.DispatchHelper" )
oDispatchHelper.executeDispatch( oDocFrame, ".uno:Print", "", 0, Array() )
# To learn some more about the dispatcher, see these articles...
# http://www.oooforum.org/forum/viewtopic.php?t=5058
# http://www.oooforum.org/forum/viewtopic.php?t=5057
#
#-------
#-------
# Now save the document
# Prepare the filename to save.
# We're going to save the file in several different formats,
# but all based on the same filename.
cFile = "C:\Documents and Settings\dbrewer\Desktop\MyCalc" # Windows
#cFile = "/home/danny/Desktop/MyCalc.sxc" # Linux
# Now save the spreadsheet in native OOo Calc format.
cURL = convertToURL( cFile + ".sxc" )
oDoc.storeAsURL( cURL, Array() )
# Note the above used storeAsUrl,
# the following use storeToUrl.
# Now save it in Excel format.
cURL = convertToURL( cFile + ".xls" )
oDoc.storeToURL( cURL, Array( makePropertyValue( "FilterName", "MS Excel 97" ) ) )
# Now save a PDF.
cURL = convertToURL( cFile + ".pdf" )
oDoc.storeToURL( cURL, Array( makePropertyValue( "FilterName", "calc_pdf_Export" ) ) )
# Now save it in CSV format.
cURL = convertToURL( cFile + ".csv" )
oDoc.storeToURL( cURL, Array( makePropertyValue( "FilterName", "Text - txt - csv (StarCalc)" ) ) )
# Now save it in DIF format.
cURL = convertToURL( cFile + ".dif" )
oDoc.storeToURL( cURL, Array( makePropertyValue( "FilterName", "DIF" ) ) )
# Now save it in SYLK format.
cURL = convertToURL( cFile + ".sylk" )
oDoc.storeToURL( cURL, Array( makePropertyValue( "FilterName", "SYLK" ) ) )
# Now save as HTML.
cURL = convertToURL( cFile + ".html" )
oDoc.storeToURL( cURL, Array( makePropertyValue( "FilterName", "HTML (StarCalc)" ) ) )
# A list of some filter names you can use for both loading
# and saving a document can be found here...
# http://www.oooforum.org/forum/viewtopic.php?t=3549
#
#-------
#-------
# Now close the document
oDoc.close( True )
#-------
# import Danny.OOo.Examples.CalcExamples
# reload( Danny.OOo.Examples.CalcExamples ); from Danny.OOo.Examples.CalcExamples import *
# CalcExample()
|
gpl-3.0
| -697,155,829,228,361,100 | 40.495833 | 105 | 0.642097 | false |
samstav/mongoexport
|
mongoexport.py
|
1
|
7599
|
"""mongoexport implemented as a python module.
'username' and 'password' for accessing target mongodb
can be stored as environment variables or in python
keyring.
Stored as environment variables:
export MONGOEXPORT_USERNAME=*****
export MONGOEXPORT_PASSWORD=*****
or in python keyring:
$ keyring set mongoexport username
Password for 'username' in 'mongoexport': *****
$ keyring set mongoexport password
Password for 'password' in 'mongoexport': *****
Usage:
me = MongoExport('helperdb.objectrocket.com', 'help', 'advicecollection')
me.run()
# to move each document into its own file...
me.file_per_document()
"""
import itertools
import mmap
import os
import shlex
import subprocess
import sys
from multiprocessing import pool
import arrow
try:
import ujson as json
except ImportError:
import json
import keyring
DAYFMT = '%a_%b_%d_%Y'
SERVICE = 'mongoexport'
class MongoExport(object):
def __init__(self, host, database, collection, port=27017,
username=None, password=None, output=None, query=None,
fields=None, use_ssl=True):
"""Constructor for mongoexport job.
:param fields: Fields as a list which will be selected for export.
Each field may reference a subdocument or value using
dot notation.
"""
now = arrow.now()
todaystr = now.floor('day').strftime(DAYFMT)
filename = "%s_%s" % (collection, now.strftime('%X'))
output = normalized_path(output)
if not output:
output = "%s/%s/%s/%s" % (SERVICE.lower(), collection, todaystr, filename)
elif os.path.isdir(output):
output = "%s/%s" % (output, filename)
elif os.path.isfile(output):
pass
output = normalized_path(output)
self.dirs, self.filename = os.path.split(output)
ensure_dirpath(self.dirs)
self.docspath = os.path.join(self.dirs, 'documents')
ensure_dirpath(self.docspath)
if not username:
username = get_configured_value('username')
if not password:
password = get_configured_value('password')
if query:
query = make_json(query)
if not query.startswith("'"):
query = "'" + query
if not query.endswith("'"):
query = query + "'"
self.host = host
self.port = port
self.database = database
self.collection = collection
self.username = username
self.password = password
self.query = query
self.fields = fields
self.use_ssl = use_ssl
self.output = output
def get_command(self):
command = ("mongoexport --host {host} --port {port} "
"--db {db} --collection {collection} --out {output}")
command = command.format(host=self.host, port=self.port,
db=self.database, collection=self.collection,
output=self.output)
if self.username:
command += " --username %s" % self.username
if self.password:
command += " --password %s" % self.password
if self.query:
command += " --query %s" % self.query
if self.fields:
command += " --fields %s" % ",".join(self.fields)
if self.use_ssl:
command += " --ssl"
return command
def run(self):
command = self.get_command()
return execute(command)
def file_per_document(self):
return _file_per_document(self.output)
def _file_per_document(exportfile):
if not os.path.exists(exportfile):
print "%s doesn't exist!" % exportfile
return
dirs, _ = os.path.split(exportfile)
docspath = os.path.join(dirs, 'documents')
ensure_dirpath(docspath)
expfile = open(exportfile, 'r')
def wat(ammapobject):
x = True
while x:
ablob = ammapobject.readline()
if ablob:
yield ablob
else:
x = False
tpool = pool.ThreadPool(pool.cpu_count()*64)
gettingweird = wat(mmap.mmap(expfile.fileno(), 0, prot=mmap.PROT_READ))
job = tpool.imap_unordered(
_fpd,
itertools.izip_longest(gettingweird, (), fillvalue=docspath))
while True:
try:
job.next()
except Exception:
return
def _fpd(jsonblobdocspath):
jsonblob, docspath = jsonblobdocspath
r = json.loads(jsonblob)
realpath = os.path.join(docspath, r['_id'] + '.json')
with open(realpath, 'w') as document:
document.write(jsonblob)
class SubprocessError(subprocess.CalledProcessError):
def __init__(self, returncode, cmd, output=None, stderr=None):
super(SubprocessError, self).__init__(returncode, cmd, output=output)
self.stderr = stderr
def __str__(self):
line = super(SubprocessError, self).__str__()
if self.stderr:
line += " | %s" % self.stderr
return line
def get_configured_value(valuename):
"""Gets value by valuename, from environment variable or keyring.
If the value is stored in keyring, it should be stored with
service_name equal to the variable SERVICE (lowercased),
defined at the top of this module.
If the value is stored as an environment variable, it should be
stored with the prefix SERVICE + "_".
"""
value = keyring.get_password(SERVICE.lower(), valuename)
if not value:
value = os.getenv('%s_%s' % (SERVICE.upper(), valuename.upper()))
return value
def normalized_path(value, must_exist=False):
"""Normalize and expand a shorthand or relative path."""
if not value:
return
norm = os.path.normpath(value)
norm = os.path.abspath(os.path.expanduser(norm))
if must_exist:
if not os.path.exists(norm):
raise ValueError("%s is not a valid path." % norm)
return norm
def ensure_dirpath(path):
"""Ensures that the directory exists.
Creates the directory structure if necessary.
"""
if not path:
return
try:
os.makedirs(path)
except OSError:
if os.path.isdir(path):
pass
else:
raise
def make_json(something):
"""Return a json-encoded string from a file, path, or standard object."""
if isinstance(something, file):
something = something.read()
elif isinstance(something, (int, tuple, list, dict)):
something = json.dumps(something)
elif os.path.exists(something):
with open(something, 'r') as thing:
something = thing.read()
return something
def execute(command):
"""Manages the subprocess and returns a dictionary with the result.
Executes the command in the current working directory.
If the return code is non-zero, raises a SubprocessError.
"""
cmd = shlex.split(command)
try:
result = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError as err:
raise SubprocessError(err.errno, command, stderr=err.strerror)
out, err = result.communicate()
resultdict = {
'exit_code': result.returncode,
'stdout': out.strip(),
'stderr': err.strip(),
}
if resultdict['exit_code'] != 0:
raise SubprocessError(resultdict['exit_code'], command,
output=resultdict['stderr'],
stderr=resultdict['stderr'])
return resultdict
|
apache-2.0
| 3,746,758,413,521,380,000 | 28.568093 | 86 | 0.600342 | false |
ronaldoussoren/macholib
|
macholib/mach_o.py
|
1
|
46738
|
"""
Other than changing the load commands in such a way that they do not
contain the load command itself, this is largely a by-hand conversion
of the C headers. Hopefully everything in here should be at least as
obvious as the C headers, and you should be using the C headers as a real
reference because the documentation didn't come along for the ride.
Doing much of anything with the symbol tables or segments is really
not covered at this point.
See /usr/include/mach-o and friends.
"""
import time
from macholib.ptypes import (
Structure,
p_int32,
p_int64,
p_long,
p_short,
p_uint8,
p_uint32,
p_uint64,
p_ulong,
pypackable,
)
_CPU_ARCH_ABI64 = 0x01000000
CPU_TYPE_NAMES = {
-1: "ANY",
1: "VAX",
6: "MC680x0",
7: "i386",
_CPU_ARCH_ABI64 | 7: "x86_64",
8: "MIPS",
10: "MC98000",
11: "HPPA",
12: "ARM",
_CPU_ARCH_ABI64 | 12: "ARM64",
13: "MC88000",
14: "SPARC",
15: "i860",
16: "Alpha",
18: "PowerPC",
_CPU_ARCH_ABI64 | 18: "PowerPC64",
}
INTEL64_SUBTYPE = {
3: "CPU_SUBTYPE_X86_64_ALL",
4: "CPU_SUBTYPE_X86_ARCH1",
8: "CPU_SUBTYPE_X86_64_H",
}
# define CPU_SUBTYPE_INTEL(f, m) ((cpu_subtype_t) (f) + ((m) << 4))
INTEL_SUBTYPE = {
0: "CPU_SUBTYPE_INTEL_MODEL_ALL",
1: "CPU_THREADTYPE_INTEL_HTT",
3: "CPU_SUBTYPE_I386_ALL",
4: "CPU_SUBTYPE_486",
5: "CPU_SUBTYPE_586",
8: "CPU_SUBTYPE_PENTIUM_3",
9: "CPU_SUBTYPE_PENTIUM_M",
10: "CPU_SUBTYPE_PENTIUM_4",
11: "CPU_SUBTYPE_ITANIUM",
12: "CPU_SUBTYPE_XEON",
34: "CPU_SUBTYPE_XEON_MP",
42: "CPU_SUBTYPE_PENTIUM_4_M",
43: "CPU_SUBTYPE_ITANIUM_2",
38: "CPU_SUBTYPE_PENTPRO",
40: "CPU_SUBTYPE_PENTIUM_3_M",
52: "CPU_SUBTYPE_PENTIUM_3_XEON",
102: "CPU_SUBTYPE_PENTII_M3",
132: "CPU_SUBTYPE_486SX",
166: "CPU_SUBTYPE_PENTII_M5",
199: "CPU_SUBTYPE_CELERON",
231: "CPU_SUBTYPE_CELERON_MOBILE",
}
MC680_SUBTYPE = {
1: "CPU_SUBTYPE_MC680x0_ALL",
2: "CPU_SUBTYPE_MC68040",
3: "CPU_SUBTYPE_MC68030_ONLY",
}
MIPS_SUBTYPE = {
0: "CPU_SUBTYPE_MIPS_ALL",
1: "CPU_SUBTYPE_MIPS_R2300",
2: "CPU_SUBTYPE_MIPS_R2600",
3: "CPU_SUBTYPE_MIPS_R2800",
4: "CPU_SUBTYPE_MIPS_R2000a",
5: "CPU_SUBTYPE_MIPS_R2000",
6: "CPU_SUBTYPE_MIPS_R3000a",
7: "CPU_SUBTYPE_MIPS_R3000",
}
MC98000_SUBTYPE = {0: "CPU_SUBTYPE_MC98000_ALL", 1: "CPU_SUBTYPE_MC98601"}
HPPA_SUBTYPE = {0: "CPU_SUBTYPE_HPPA_7100", 1: "CPU_SUBTYPE_HPPA_7100LC"}
MC88_SUBTYPE = {
0: "CPU_SUBTYPE_MC88000_ALL",
1: "CPU_SUBTYPE_MC88100",
2: "CPU_SUBTYPE_MC88110",
}
SPARC_SUBTYPE = {0: "CPU_SUBTYPE_SPARC_ALL"}
I860_SUBTYPE = {0: "CPU_SUBTYPE_I860_ALL", 1: "CPU_SUBTYPE_I860_860"}
POWERPC_SUBTYPE = {
0: "CPU_SUBTYPE_POWERPC_ALL",
1: "CPU_SUBTYPE_POWERPC_601",
2: "CPU_SUBTYPE_POWERPC_602",
3: "CPU_SUBTYPE_POWERPC_603",
4: "CPU_SUBTYPE_POWERPC_603e",
5: "CPU_SUBTYPE_POWERPC_603ev",
6: "CPU_SUBTYPE_POWERPC_604",
7: "CPU_SUBTYPE_POWERPC_604e",
8: "CPU_SUBTYPE_POWERPC_620",
9: "CPU_SUBTYPE_POWERPC_750",
10: "CPU_SUBTYPE_POWERPC_7400",
11: "CPU_SUBTYPE_POWERPC_7450",
100: "CPU_SUBTYPE_POWERPC_970",
}
ARM_SUBTYPE = {
0: "CPU_SUBTYPE_ARM_ALL12",
5: "CPU_SUBTYPE_ARM_V4T",
6: "CPU_SUBTYPE_ARM_V6",
7: "CPU_SUBTYPE_ARM_V5TEJ",
8: "CPU_SUBTYPE_ARM_XSCALE",
9: "CPU_SUBTYPE_ARM_V7",
10: "CPU_SUBTYPE_ARM_V7F",
11: "CPU_SUBTYPE_ARM_V7S",
12: "CPU_SUBTYPE_ARM_V7K",
13: "CPU_SUBTYPE_ARM_V8",
14: "CPU_SUBTYPE_ARM_V6M",
15: "CPU_SUBTYPE_ARM_V7M",
16: "CPU_SUBTYPE_ARM_V7EM",
}
ARM64_SUBTYPE = {0: "CPU_SUBTYPE_ARM64_ALL", 1: "CPU_SUBTYPE_ARM64_V8"}
VAX_SUBTYPE = {
0: "CPU_SUBTYPE_VAX_ALL",
1: "CPU_SUBTYPE_VAX780",
2: "CPU_SUBTYPE_VAX785",
3: "CPU_SUBTYPE_VAX750",
4: "CPU_SUBTYPE_VAX730",
5: "CPU_SUBTYPE_UVAXI",
6: "CPU_SUBTYPE_UVAXII",
7: "CPU_SUBTYPE_VAX8200",
8: "CPU_SUBTYPE_VAX8500",
9: "CPU_SUBTYPE_VAX8600",
10: "CPU_SUBTYPE_VAX8650",
11: "CPU_SUBTYPE_VAX8800",
12: "CPU_SUBTYPE_UVAXIII",
}
def get_cpu_subtype(cpu_type, cpu_subtype):
st = cpu_subtype & 0x0FFFFFFF
if cpu_type == 1:
subtype = VAX_SUBTYPE.get(st, st)
elif cpu_type == 6:
subtype = MC680_SUBTYPE.get(st, st)
elif cpu_type == 7:
subtype = INTEL_SUBTYPE.get(st, st)
elif cpu_type == 7 | _CPU_ARCH_ABI64:
subtype = INTEL64_SUBTYPE.get(st, st)
elif cpu_type == 8:
subtype = MIPS_SUBTYPE.get(st, st)
elif cpu_type == 10:
subtype = MC98000_SUBTYPE.get(st, st)
elif cpu_type == 11:
subtype = HPPA_SUBTYPE.get(st, st)
elif cpu_type == 12:
subtype = ARM_SUBTYPE.get(st, st)
elif cpu_type == 12 | _CPU_ARCH_ABI64:
subtype = ARM64_SUBTYPE.get(st, st)
elif cpu_type == 13:
subtype = MC88_SUBTYPE.get(st, st)
elif cpu_type == 14:
subtype = SPARC_SUBTYPE.get(st, st)
elif cpu_type == 15:
subtype = I860_SUBTYPE.get(st, st)
elif cpu_type == 16:
subtype = MIPS_SUBTYPE.get(st, st)
elif cpu_type == 18:
subtype = POWERPC_SUBTYPE.get(st, st)
elif cpu_type == 18 | _CPU_ARCH_ABI64:
subtype = POWERPC_SUBTYPE.get(st, st)
else:
subtype = str(st)
return subtype
_MH_EXECUTE_SYM = "__mh_execute_header"
MH_EXECUTE_SYM = "_mh_execute_header"
_MH_BUNDLE_SYM = "__mh_bundle_header"
MH_BUNDLE_SYM = "_mh_bundle_header"
_MH_DYLIB_SYM = "__mh_dylib_header"
MH_DYLIB_SYM = "_mh_dylib_header"
_MH_DYLINKER_SYM = "__mh_dylinker_header"
MH_DYLINKER_SYM = "_mh_dylinker_header"
(
MH_OBJECT,
MH_EXECUTE,
MH_FVMLIB,
MH_CORE,
MH_PRELOAD,
MH_DYLIB,
MH_DYLINKER,
MH_BUNDLE,
MH_DYLIB_STUB,
MH_DSYM,
) = range(0x1, 0xB)
MH_FILESET = 0xC
(
MH_NOUNDEFS,
MH_INCRLINK,
MH_DYLDLINK,
MH_BINDATLOAD,
MH_PREBOUND,
MH_SPLIT_SEGS,
MH_LAZY_INIT,
MH_TWOLEVEL,
MH_FORCE_FLAT,
MH_NOMULTIDEFS,
MH_NOFIXPREBINDING,
MH_PREBINDABLE,
MH_ALLMODSBOUND,
MH_SUBSECTIONS_VIA_SYMBOLS,
MH_CANONICAL,
MH_WEAK_DEFINES,
MH_BINDS_TO_WEAK,
MH_ALLOW_STACK_EXECUTION,
MH_ROOT_SAFE,
MH_SETUID_SAFE,
MH_NO_REEXPORTED_DYLIBS,
MH_PIE,
MH_DEAD_STRIPPABLE_DYLIB,
MH_HAS_TLV_DESCRIPTORS,
MH_NO_HEAP_EXECUTION,
MH_APP_EXTENSION_SAFE,
) = map((1).__lshift__, range(26))
MH_MAGIC = 0xFEEDFACE
MH_CIGAM = 0xCEFAEDFE
MH_MAGIC_64 = 0xFEEDFACF
MH_CIGAM_64 = 0xCFFAEDFE
integer_t = p_int32
cpu_type_t = integer_t
cpu_subtype_t = p_uint32
MH_FILETYPE_NAMES = {
MH_OBJECT: "relocatable object",
MH_EXECUTE: "demand paged executable",
MH_FVMLIB: "fixed vm shared library",
MH_CORE: "core",
MH_PRELOAD: "preloaded executable",
MH_DYLIB: "dynamically bound shared library",
MH_DYLINKER: "dynamic link editor",
MH_BUNDLE: "dynamically bound bundle",
MH_DYLIB_STUB: "shared library stub for static linking",
MH_DSYM: "symbol information",
MH_FILESET: "fileset object",
}
MH_FILETYPE_SHORTNAMES = {
MH_OBJECT: "object",
MH_EXECUTE: "execute",
MH_FVMLIB: "fvmlib",
MH_CORE: "core",
MH_PRELOAD: "preload",
MH_DYLIB: "dylib",
MH_DYLINKER: "dylinker",
MH_BUNDLE: "bundle",
MH_DYLIB_STUB: "dylib_stub",
MH_DSYM: "dsym",
}
MH_FLAGS_NAMES = {
MH_NOUNDEFS: "MH_NOUNDEFS",
MH_INCRLINK: "MH_INCRLINK",
MH_DYLDLINK: "MH_DYLDLINK",
MH_BINDATLOAD: "MH_BINDATLOAD",
MH_PREBOUND: "MH_PREBOUND",
MH_SPLIT_SEGS: "MH_SPLIT_SEGS",
MH_LAZY_INIT: "MH_LAZY_INIT",
MH_TWOLEVEL: "MH_TWOLEVEL",
MH_FORCE_FLAT: "MH_FORCE_FLAT",
MH_NOMULTIDEFS: "MH_NOMULTIDEFS",
MH_NOFIXPREBINDING: "MH_NOFIXPREBINDING",
MH_PREBINDABLE: "MH_PREBINDABLE",
MH_ALLMODSBOUND: "MH_ALLMODSBOUND",
MH_SUBSECTIONS_VIA_SYMBOLS: "MH_SUBSECTIONS_VIA_SYMBOLS",
MH_CANONICAL: "MH_CANONICAL",
MH_WEAK_DEFINES: "MH_WEAK_DEFINES",
MH_BINDS_TO_WEAK: "MH_BINDS_TO_WEAK",
MH_ALLOW_STACK_EXECUTION: "MH_ALLOW_STACK_EXECUTION",
MH_ROOT_SAFE: "MH_ROOT_SAFE",
MH_SETUID_SAFE: "MH_SETUID_SAFE",
MH_NO_REEXPORTED_DYLIBS: "MH_NO_REEXPORTED_DYLIBS",
MH_PIE: "MH_PIE",
MH_DEAD_STRIPPABLE_DYLIB: "MH_DEAD_STRIPPABLE_DYLIB",
MH_HAS_TLV_DESCRIPTORS: "MH_HAS_TLV_DESCRIPTORS",
MH_NO_HEAP_EXECUTION: "MH_NO_HEAP_EXECUTION",
MH_APP_EXTENSION_SAFE: "MH_APP_EXTENSION_SAFE",
}
MH_FLAGS_DESCRIPTIONS = {
MH_NOUNDEFS: "no undefined references",
MH_INCRLINK: "output of an incremental link",
MH_DYLDLINK: "input for the dynamic linker",
MH_BINDATLOAD: "undefined references bound dynamically when loaded",
MH_PREBOUND: "dynamic undefined references prebound",
MH_SPLIT_SEGS: "split read-only and read-write segments",
MH_LAZY_INIT: "(obsolete)",
MH_TWOLEVEL: "using two-level name space bindings",
MH_FORCE_FLAT: "forcing all imagges to use flat name space bindings",
MH_NOMULTIDEFS: "umbrella guarantees no multiple definitions",
MH_NOFIXPREBINDING: "do not notify prebinding agent about this executable",
MH_PREBINDABLE: "the binary is not prebound but can have its prebinding redone",
MH_ALLMODSBOUND: "indicates that this binary binds to all "
"two-level namespace modules of its dependent libraries",
MH_SUBSECTIONS_VIA_SYMBOLS: "safe to divide up the sections into "
"sub-sections via symbols for dead code stripping",
MH_CANONICAL: "the binary has been canonicalized via the unprebind operation",
MH_WEAK_DEFINES: "the final linked image contains external weak symbols",
MH_BINDS_TO_WEAK: "the final linked image uses weak symbols",
MH_ALLOW_STACK_EXECUTION: "all stacks in the task will be given "
"stack execution privilege",
MH_ROOT_SAFE: "the binary declares it is safe for use in processes with uid zero",
MH_SETUID_SAFE: "the binary declares it is safe for use in processes "
"when issetugid() is true",
MH_NO_REEXPORTED_DYLIBS: "the static linker does not need to examine dependent "
"dylibs to see if any are re-exported",
MH_PIE: "the OS will load the main executable at a random address",
MH_DEAD_STRIPPABLE_DYLIB: "the static linker will automatically not create a "
"LC_LOAD_DYLIB load command to the dylib if no symbols are being "
"referenced from the dylib",
MH_HAS_TLV_DESCRIPTORS: "contains a section of type S_THREAD_LOCAL_VARIABLES",
MH_NO_HEAP_EXECUTION: "the OS will run the main executable with a "
"non-executable heap even on platforms that don't require it",
MH_APP_EXTENSION_SAFE: "the code was linked for use in an application extension.",
}
class mach_version_helper(Structure):
_fields_ = (("_version", p_uint32),)
@property
def major(self):
return self._version >> 16 & 0xFFFF
@major.setter
def major(self, v):
self._version = (self._version & 0xFFFF) | (v << 16)
@property
def minor(self):
return self._version >> 8 & 0xFF
@minor.setter
def minor(self, v):
self._version = (self._version & 0xFFFF00FF) | (v << 8)
@property
def rev(self):
return self._version & 0xFF
@rev.setter
def rev(self, v):
return (self._version & 0xFFFFFF00) | v
def __str__(self):
return "%s.%s.%s" % (self.major, self.minor, self.rev)
class mach_timestamp_helper(p_uint32):
def __str__(self):
return time.ctime(self)
def read_struct(f, s, **kw):
return s.from_fileobj(f, **kw)
class mach_header(Structure):
_fields_ = (
("magic", p_uint32),
("cputype", cpu_type_t),
("cpusubtype", cpu_subtype_t),
("filetype", p_uint32),
("ncmds", p_uint32),
("sizeofcmds", p_uint32),
("flags", p_uint32),
)
def _describe(self):
bit = 1
flags = self.flags
dflags = []
while flags and bit < (1 << 32):
if flags & bit:
dflags.append(
{
"name": MH_FLAGS_NAMES.get(bit, str(bit)),
"description": MH_FLAGS_DESCRIPTIONS.get(bit, str(bit)),
}
)
flags = flags ^ bit
bit <<= 1
return (
("magic", int(self.magic)),
("cputype_string", CPU_TYPE_NAMES.get(self.cputype, self.cputype)),
("cputype", int(self.cputype)),
("cpusubtype_string", get_cpu_subtype(self.cputype, self.cpusubtype)),
("cpusubtype", int(self.cpusubtype)),
("filetype_string", MH_FILETYPE_NAMES.get(self.filetype, self.filetype)),
("filetype", int(self.filetype)),
("ncmds", self.ncmds),
("sizeofcmds", self.sizeofcmds),
("flags", dflags),
("raw_flags", int(self.flags)),
)
class mach_header_64(mach_header):
_fields_ = mach_header._fields_ + (("reserved", p_uint32),)
class load_command(Structure):
_fields_ = (("cmd", p_uint32), ("cmdsize", p_uint32))
def get_cmd_name(self):
return LC_NAMES.get(self.cmd, self.cmd)
LC_REQ_DYLD = 0x80000000
(
LC_SEGMENT,
LC_SYMTAB,
LC_SYMSEG,
LC_THREAD,
LC_UNIXTHREAD,
LC_LOADFVMLIB,
LC_IDFVMLIB,
LC_IDENT,
LC_FVMFILE,
LC_PREPAGE,
LC_DYSYMTAB,
LC_LOAD_DYLIB,
LC_ID_DYLIB,
LC_LOAD_DYLINKER,
LC_ID_DYLINKER,
LC_PREBOUND_DYLIB,
LC_ROUTINES,
LC_SUB_FRAMEWORK,
LC_SUB_UMBRELLA,
LC_SUB_CLIENT,
LC_SUB_LIBRARY,
LC_TWOLEVEL_HINTS,
LC_PREBIND_CKSUM,
) = range(0x1, 0x18)
LC_LOAD_WEAK_DYLIB = LC_REQ_DYLD | 0x18
LC_SEGMENT_64 = 0x19
LC_ROUTINES_64 = 0x1A
LC_UUID = 0x1B
LC_RPATH = 0x1C | LC_REQ_DYLD
LC_CODE_SIGNATURE = 0x1D
LC_CODE_SEGMENT_SPLIT_INFO = 0x1E
LC_REEXPORT_DYLIB = 0x1F | LC_REQ_DYLD
LC_LAZY_LOAD_DYLIB = 0x20
LC_ENCRYPTION_INFO = 0x21
LC_DYLD_INFO = 0x22
LC_DYLD_INFO_ONLY = 0x22 | LC_REQ_DYLD
LC_LOAD_UPWARD_DYLIB = 0x23 | LC_REQ_DYLD
LC_VERSION_MIN_MACOSX = 0x24
LC_VERSION_MIN_IPHONEOS = 0x25
LC_FUNCTION_STARTS = 0x26
LC_DYLD_ENVIRONMENT = 0x27
LC_MAIN = 0x28 | LC_REQ_DYLD
LC_DATA_IN_CODE = 0x29
LC_SOURCE_VERSION = 0x2A
LC_DYLIB_CODE_SIGN_DRS = 0x2B
LC_ENCRYPTION_INFO_64 = 0x2C
LC_LINKER_OPTION = 0x2D
LC_LINKER_OPTIMIZATION_HINT = 0x2E
LC_VERSION_MIN_TVOS = 0x2F
LC_VERSION_MIN_WATCHOS = 0x30
LC_NOTE = 0x31
LC_BUILD_VERSION = 0x32
LC_DYLD_EXPORTS_TRIE = 0x33 | LC_REQ_DYLD
LC_DYLD_CHAINED_FIXUPS = 0x34 | LC_REQ_DYLD
LC_FILESET_ENTRY = 0x35 | LC_REQ_DYLD
# this is really a union.. but whatever
class lc_str(p_uint32):
pass
p_str16 = pypackable("p_str16", bytes, "16s")
vm_prot_t = p_int32
class segment_command(Structure):
_fields_ = (
("segname", p_str16),
("vmaddr", p_uint32),
("vmsize", p_uint32),
("fileoff", p_uint32),
("filesize", p_uint32),
("maxprot", vm_prot_t),
("initprot", vm_prot_t),
("nsects", p_uint32), # read the section structures ?
("flags", p_uint32),
)
def describe(self):
s = {}
s["segname"] = self.segname.rstrip("\x00")
s["vmaddr"] = int(self.vmaddr)
s["vmsize"] = int(self.vmsize)
s["fileoff"] = int(self.fileoff)
s["filesize"] = int(self.filesize)
s["initprot"] = self.get_initial_virtual_memory_protections()
s["initprot_raw"] = int(self.initprot)
s["maxprot"] = self.get_max_virtual_memory_protections()
s["maxprot_raw"] = int(self.maxprot)
s["nsects"] = int(self.nsects)
s["flags"] = self.flags
return s
def get_initial_virtual_memory_protections(self):
vm = []
if self.initprot == 0:
vm.append("VM_PROT_NONE")
if self.initprot & 1:
vm.append("VM_PROT_READ")
if self.initprot & 2:
vm.append("VM_PROT_WRITE")
if self.initprot & 4:
vm.append("VM_PROT_EXECUTE")
return vm
def get_max_virtual_memory_protections(self):
vm = []
if self.maxprot == 0:
vm.append("VM_PROT_NONE")
if self.maxprot & 1:
vm.append("VM_PROT_READ")
if self.maxprot & 2:
vm.append("VM_PROT_WRITE")
if self.maxprot & 4:
vm.append("VM_PROT_EXECUTE")
return vm
class segment_command_64(Structure):
_fields_ = (
("segname", p_str16),
("vmaddr", p_uint64),
("vmsize", p_uint64),
("fileoff", p_uint64),
("filesize", p_uint64),
("maxprot", vm_prot_t),
("initprot", vm_prot_t),
("nsects", p_uint32), # read the section structures ?
("flags", p_uint32),
)
def describe(self):
s = {}
s["segname"] = self.segname.rstrip("\x00")
s["vmaddr"] = int(self.vmaddr)
s["vmsize"] = int(self.vmsize)
s["fileoff"] = int(self.fileoff)
s["filesize"] = int(self.filesize)
s["initprot"] = self.get_initial_virtual_memory_protections()
s["initprot_raw"] = int(self.initprot)
s["maxprot"] = self.get_max_virtual_memory_protections()
s["maxprot_raw"] = int(self.maxprot)
s["nsects"] = int(self.nsects)
s["flags"] = self.flags
return s
def get_initial_virtual_memory_protections(self):
vm = []
if self.initprot == 0:
vm.append("VM_PROT_NONE")
if self.initprot & 1:
vm.append("VM_PROT_READ")
if self.initprot & 2:
vm.append("VM_PROT_WRITE")
if self.initprot & 4:
vm.append("VM_PROT_EXECUTE")
return vm
def get_max_virtual_memory_protections(self):
vm = []
if self.maxprot == 0:
vm.append("VM_PROT_NONE")
if self.maxprot & 1:
vm.append("VM_PROT_READ")
if self.maxprot & 2:
vm.append("VM_PROT_WRITE")
if self.maxprot & 4:
vm.append("VM_PROT_EXECUTE")
return vm
SG_HIGHVM = 0x1
SG_FVMLIB = 0x2
SG_NORELOC = 0x4
SG_PROTECTED_VERSION_1 = 0x8
class section(Structure):
_fields_ = (
("sectname", p_str16),
("segname", p_str16),
("addr", p_uint32),
("size", p_uint32),
("offset", p_uint32),
("align", p_uint32),
("reloff", p_uint32),
("nreloc", p_uint32),
("flags", p_uint32),
("reserved1", p_uint32),
("reserved2", p_uint32),
)
def describe(self):
s = {}
s["sectname"] = self.sectname.rstrip("\x00")
s["segname"] = self.segname.rstrip("\x00")
s["addr"] = int(self.addr)
s["size"] = int(self.size)
s["offset"] = int(self.offset)
s["align"] = int(self.align)
s["reloff"] = int(self.reloff)
s["nreloc"] = int(self.nreloc)
f = {}
f["type"] = FLAG_SECTION_TYPES[int(self.flags) & 0xFF]
f["attributes"] = []
for k in FLAG_SECTION_ATTRIBUTES:
if k & self.flags:
f["attributes"].append(FLAG_SECTION_ATTRIBUTES[k])
if not f["attributes"]:
del f["attributes"]
s["flags"] = f
s["reserved1"] = int(self.reserved1)
s["reserved2"] = int(self.reserved2)
return s
def add_section_data(self, data):
self.section_data = data
class section_64(Structure):
_fields_ = (
("sectname", p_str16),
("segname", p_str16),
("addr", p_uint64),
("size", p_uint64),
("offset", p_uint32),
("align", p_uint32),
("reloff", p_uint32),
("nreloc", p_uint32),
("flags", p_uint32),
("reserved1", p_uint32),
("reserved2", p_uint32),
("reserved3", p_uint32),
)
def describe(self):
s = {}
s["sectname"] = self.sectname.rstrip("\x00")
s["segname"] = self.segname.rstrip("\x00")
s["addr"] = int(self.addr)
s["size"] = int(self.size)
s["offset"] = int(self.offset)
s["align"] = int(self.align)
s["reloff"] = int(self.reloff)
s["nreloc"] = int(self.nreloc)
f = {}
f["type"] = FLAG_SECTION_TYPES[int(self.flags) & 0xFF]
f["attributes"] = []
for k in FLAG_SECTION_ATTRIBUTES:
if k & self.flags:
f["attributes"].append(FLAG_SECTION_ATTRIBUTES[k])
if not f["attributes"]:
del f["attributes"]
s["flags"] = f
s["reserved1"] = int(self.reserved1)
s["reserved2"] = int(self.reserved2)
s["reserved3"] = int(self.reserved3)
return s
def add_section_data(self, data):
self.section_data = data
SECTION_TYPE = 0xFF
SECTION_ATTRIBUTES = 0xFFFFFF00
S_REGULAR = 0x0
S_ZEROFILL = 0x1
S_CSTRING_LITERALS = 0x2
S_4BYTE_LITERALS = 0x3
S_8BYTE_LITERALS = 0x4
S_LITERAL_POINTERS = 0x5
S_NON_LAZY_SYMBOL_POINTERS = 0x6
S_LAZY_SYMBOL_POINTERS = 0x7
S_SYMBOL_STUBS = 0x8
S_MOD_INIT_FUNC_POINTERS = 0x9
S_MOD_TERM_FUNC_POINTERS = 0xA
S_COALESCED = 0xB
S_GB_ZEROFILL = 0xC
S_INTERPOSING = 0xD
S_16BYTE_LITERALS = 0xE
S_DTRACE_DOF = 0xF
S_LAZY_DYLIB_SYMBOL_POINTERS = 0x10
S_THREAD_LOCAL_REGULAR = 0x11
S_THREAD_LOCAL_ZEROFILL = 0x12
S_THREAD_LOCAL_VARIABLES = 0x13
S_THREAD_LOCAL_VARIABLE_POINTERS = 0x14
S_THREAD_LOCAL_INIT_FUNCTION_POINTERS = 0x15
FLAG_SECTION_TYPES = {
S_REGULAR: "S_REGULAR",
S_ZEROFILL: "S_ZEROFILL",
S_CSTRING_LITERALS: "S_CSTRING_LITERALS",
S_4BYTE_LITERALS: "S_4BYTE_LITERALS",
S_8BYTE_LITERALS: "S_8BYTE_LITERALS",
S_LITERAL_POINTERS: "S_LITERAL_POINTERS",
S_NON_LAZY_SYMBOL_POINTERS: "S_NON_LAZY_SYMBOL_POINTERS",
S_LAZY_SYMBOL_POINTERS: "S_LAZY_SYMBOL_POINTERS",
S_SYMBOL_STUBS: "S_SYMBOL_STUBS",
S_MOD_INIT_FUNC_POINTERS: "S_MOD_INIT_FUNC_POINTERS",
S_MOD_TERM_FUNC_POINTERS: "S_MOD_TERM_FUNC_POINTERS",
S_COALESCED: "S_COALESCED",
S_GB_ZEROFILL: "S_GB_ZEROFILL",
S_INTERPOSING: "S_INTERPOSING",
S_16BYTE_LITERALS: "S_16BYTE_LITERALS",
S_DTRACE_DOF: "S_DTRACE_DOF",
S_LAZY_DYLIB_SYMBOL_POINTERS: "S_LAZY_DYLIB_SYMBOL_POINTERS",
S_THREAD_LOCAL_REGULAR: "S_THREAD_LOCAL_REGULAR",
S_THREAD_LOCAL_ZEROFILL: "S_THREAD_LOCAL_ZEROFILL",
S_THREAD_LOCAL_VARIABLES: "S_THREAD_LOCAL_VARIABLES",
S_THREAD_LOCAL_VARIABLE_POINTERS: "S_THREAD_LOCAL_VARIABLE_POINTERS",
S_THREAD_LOCAL_INIT_FUNCTION_POINTERS: "S_THREAD_LOCAL_INIT_FUNCTION_POINTERS",
}
SECTION_ATTRIBUTES_USR = 0xFF000000
S_ATTR_PURE_INSTRUCTIONS = 0x80000000
S_ATTR_NO_TOC = 0x40000000
S_ATTR_STRIP_STATIC_SYMS = 0x20000000
S_ATTR_NO_DEAD_STRIP = 0x10000000
S_ATTR_LIVE_SUPPORT = 0x08000000
S_ATTR_SELF_MODIFYING_CODE = 0x04000000
S_ATTR_DEBUG = 0x02000000
SECTION_ATTRIBUTES_SYS = 0x00FFFF00
S_ATTR_SOME_INSTRUCTIONS = 0x00000400
S_ATTR_EXT_RELOC = 0x00000200
S_ATTR_LOC_RELOC = 0x00000100
FLAG_SECTION_ATTRIBUTES = {
S_ATTR_PURE_INSTRUCTIONS: "S_ATTR_PURE_INSTRUCTIONS",
S_ATTR_NO_TOC: "S_ATTR_NO_TOC",
S_ATTR_STRIP_STATIC_SYMS: "S_ATTR_STRIP_STATIC_SYMS",
S_ATTR_NO_DEAD_STRIP: "S_ATTR_NO_DEAD_STRIP",
S_ATTR_LIVE_SUPPORT: "S_ATTR_LIVE_SUPPORT",
S_ATTR_SELF_MODIFYING_CODE: "S_ATTR_SELF_MODIFYING_CODE",
S_ATTR_DEBUG: "S_ATTR_DEBUG",
S_ATTR_SOME_INSTRUCTIONS: "S_ATTR_SOME_INSTRUCTIONS",
S_ATTR_EXT_RELOC: "S_ATTR_EXT_RELOC",
S_ATTR_LOC_RELOC: "S_ATTR_LOC_RELOC",
}
SEG_PAGEZERO = "__PAGEZERO"
SEG_TEXT = "__TEXT"
SECT_TEXT = "__text"
SECT_FVMLIB_INIT0 = "__fvmlib_init0"
SECT_FVMLIB_INIT1 = "__fvmlib_init1"
SEG_DATA = "__DATA"
SECT_DATA = "__data"
SECT_BSS = "__bss"
SECT_COMMON = "__common"
SEG_OBJC = "__OBJC"
SECT_OBJC_SYMBOLS = "__symbol_table"
SECT_OBJC_MODULES = "__module_info"
SECT_OBJC_STRINGS = "__selector_strs"
SECT_OBJC_REFS = "__selector_refs"
SEG_ICON = "__ICON"
SECT_ICON_HEADER = "__header"
SECT_ICON_TIFF = "__tiff"
SEG_LINKEDIT = "__LINKEDIT"
SEG_UNIXSTACK = "__UNIXSTACK"
SEG_IMPORT = "__IMPORT"
#
# I really should remove all these _command classes because they
# are no different. I decided to keep the load commands separate,
# so classes like fvmlib and fvmlib_command are equivalent.
#
class fvmlib(Structure):
_fields_ = (
("name", lc_str),
("minor_version", mach_version_helper),
("header_addr", p_uint32),
)
class fvmlib_command(Structure):
_fields_ = fvmlib._fields_
def describe(self):
s = {}
s["header_addr"] = int(self.header_addr)
return s
class dylib(Structure):
_fields_ = (
("name", lc_str),
("timestamp", mach_timestamp_helper),
("current_version", mach_version_helper),
("compatibility_version", mach_version_helper),
)
# merged dylib structure
class dylib_command(Structure):
_fields_ = dylib._fields_
def describe(self):
s = {}
s["timestamp"] = str(self.timestamp)
s["current_version"] = str(self.current_version)
s["compatibility_version"] = str(self.compatibility_version)
return s
class sub_framework_command(Structure):
_fields_ = (("umbrella", lc_str),)
def describe(self):
return {}
class sub_client_command(Structure):
_fields_ = (("client", lc_str),)
def describe(self):
return {}
class sub_umbrella_command(Structure):
_fields_ = (("sub_umbrella", lc_str),)
def describe(self):
return {}
class sub_library_command(Structure):
_fields_ = (("sub_library", lc_str),)
def describe(self):
return {}
class prebound_dylib_command(Structure):
_fields_ = (("name", lc_str), ("nmodules", p_uint32), ("linked_modules", lc_str))
def describe(self):
return {"nmodules": int(self.nmodules)}
class dylinker_command(Structure):
_fields_ = (("name", lc_str),)
def describe(self):
return {}
class thread_command(Structure):
_fields_ = (("flavor", p_uint32), ("count", p_uint32))
def describe(self):
s = {}
s["flavor"] = int(self.flavor)
s["count"] = int(self.count)
return s
class entry_point_command(Structure):
_fields_ = (("entryoff", p_uint64), ("stacksize", p_uint64))
def describe(self):
s = {}
s["entryoff"] = int(self.entryoff)
s["stacksize"] = int(self.stacksize)
return s
class routines_command(Structure):
_fields_ = (
("init_address", p_uint32),
("init_module", p_uint32),
("reserved1", p_uint32),
("reserved2", p_uint32),
("reserved3", p_uint32),
("reserved4", p_uint32),
("reserved5", p_uint32),
("reserved6", p_uint32),
)
def describe(self):
s = {}
s["init_address"] = int(self.init_address)
s["init_module"] = int(self.init_module)
s["reserved1"] = int(self.reserved1)
s["reserved2"] = int(self.reserved2)
s["reserved3"] = int(self.reserved3)
s["reserved4"] = int(self.reserved4)
s["reserved5"] = int(self.reserved5)
s["reserved6"] = int(self.reserved6)
return s
class routines_command_64(Structure):
_fields_ = (
("init_address", p_uint64),
("init_module", p_uint64),
("reserved1", p_uint64),
("reserved2", p_uint64),
("reserved3", p_uint64),
("reserved4", p_uint64),
("reserved5", p_uint64),
("reserved6", p_uint64),
)
def describe(self):
s = {}
s["init_address"] = int(self.init_address)
s["init_module"] = int(self.init_module)
s["reserved1"] = int(self.reserved1)
s["reserved2"] = int(self.reserved2)
s["reserved3"] = int(self.reserved3)
s["reserved4"] = int(self.reserved4)
s["reserved5"] = int(self.reserved5)
s["reserved6"] = int(self.reserved6)
return s
class symtab_command(Structure):
_fields_ = (
("symoff", p_uint32),
("nsyms", p_uint32),
("stroff", p_uint32),
("strsize", p_uint32),
)
def describe(self):
s = {}
s["symoff"] = int(self.symoff)
s["nsyms"] = int(self.nsyms)
s["stroff"] = int(self.stroff)
s["strsize"] = int(self.strsize)
return s
class dysymtab_command(Structure):
_fields_ = (
("ilocalsym", p_uint32),
("nlocalsym", p_uint32),
("iextdefsym", p_uint32),
("nextdefsym", p_uint32),
("iundefsym", p_uint32),
("nundefsym", p_uint32),
("tocoff", p_uint32),
("ntoc", p_uint32),
("modtaboff", p_uint32),
("nmodtab", p_uint32),
("extrefsymoff", p_uint32),
("nextrefsyms", p_uint32),
("indirectsymoff", p_uint32),
("nindirectsyms", p_uint32),
("extreloff", p_uint32),
("nextrel", p_uint32),
("locreloff", p_uint32),
("nlocrel", p_uint32),
)
def describe(self):
dys = {}
dys["ilocalsym"] = int(self.ilocalsym)
dys["nlocalsym"] = int(self.nlocalsym)
dys["iextdefsym"] = int(self.iextdefsym)
dys["nextdefsym"] = int(self.nextdefsym)
dys["iundefsym"] = int(self.iundefsym)
dys["nundefsym"] = int(self.nundefsym)
dys["tocoff"] = int(self.tocoff)
dys["ntoc"] = int(self.ntoc)
dys["modtaboff"] = int(self.modtaboff)
dys["nmodtab"] = int(self.nmodtab)
dys["extrefsymoff"] = int(self.extrefsymoff)
dys["nextrefsyms"] = int(self.nextrefsyms)
dys["indirectsymoff"] = int(self.indirectsymoff)
dys["nindirectsyms"] = int(self.nindirectsyms)
dys["extreloff"] = int(self.extreloff)
dys["nextrel"] = int(self.nextrel)
dys["locreloff"] = int(self.locreloff)
dys["nlocrel"] = int(self.nlocrel)
return dys
INDIRECT_SYMBOL_LOCAL = 0x80000000
INDIRECT_SYMBOL_ABS = 0x40000000
class dylib_table_of_contents(Structure):
_fields_ = (("symbol_index", p_uint32), ("module_index", p_uint32))
class dylib_module(Structure):
_fields_ = (
("module_name", p_uint32),
("iextdefsym", p_uint32),
("nextdefsym", p_uint32),
("irefsym", p_uint32),
("nrefsym", p_uint32),
("ilocalsym", p_uint32),
("nlocalsym", p_uint32),
("iextrel", p_uint32),
("nextrel", p_uint32),
("iinit_iterm", p_uint32),
("ninit_nterm", p_uint32),
("objc_module_info_addr", p_uint32),
("objc_module_info_size", p_uint32),
)
class dylib_module_64(Structure):
_fields_ = (
("module_name", p_uint32),
("iextdefsym", p_uint32),
("nextdefsym", p_uint32),
("irefsym", p_uint32),
("nrefsym", p_uint32),
("ilocalsym", p_uint32),
("nlocalsym", p_uint32),
("iextrel", p_uint32),
("nextrel", p_uint32),
("iinit_iterm", p_uint32),
("ninit_nterm", p_uint32),
("objc_module_info_size", p_uint32),
("objc_module_info_addr", p_uint64),
)
class dylib_reference(Structure):
_fields_ = (
("isym_flags", p_uint32),
# ('isym', p_uint8 * 3),
# ('flags', p_uint8),
)
class twolevel_hints_command(Structure):
_fields_ = (("offset", p_uint32), ("nhints", p_uint32))
def describe(self):
s = {}
s["offset"] = int(self.offset)
s["nhints"] = int(self.nhints)
return s
class twolevel_hint(Structure):
_fields_ = (
("isub_image_itoc", p_uint32),
# ('isub_image', p_uint8),
# ('itoc', p_uint8 * 3),
)
class prebind_cksum_command(Structure):
_fields_ = (("cksum", p_uint32),)
def describe(self):
return {"cksum": int(self.cksum)}
class symseg_command(Structure):
_fields_ = (("offset", p_uint32), ("size", p_uint32))
def describe(self):
s = {}
s["offset"] = int(self.offset)
s["size"] = int(self.size)
class ident_command(Structure):
_fields_ = ()
def describe(self):
return {}
class fvmfile_command(Structure):
_fields_ = (("name", lc_str), ("header_addr", p_uint32))
def describe(self):
return {"header_addr": int(self.header_addr)}
class uuid_command(Structure):
_fields_ = (("uuid", p_str16),)
def describe(self):
return {"uuid": self.uuid.rstrip("\x00")}
class rpath_command(Structure):
_fields_ = (("path", lc_str),)
def describe(self):
return {}
class linkedit_data_command(Structure):
_fields_ = (("dataoff", p_uint32), ("datasize", p_uint32))
def describe(self):
s = {}
s["dataoff"] = int(self.dataoff)
s["datasize"] = int(self.datasize)
return s
class version_min_command(Structure):
_fields_ = (
("version", p_uint32), # X.Y.Z is encoded in nibbles xxxx.yy.zz
("sdk", p_uint32),
)
def describe(self):
v = int(self.version)
v3 = v & 0xFF
v = v >> 8
v2 = v & 0xFF
v = v >> 8
v1 = v & 0xFFFF
s = int(self.sdk)
s3 = s & 0xFF
s = s >> 8
s2 = s & 0xFF
s = s >> 8
s1 = s & 0xFFFF
return {
"version": str(int(v1)) + "." + str(int(v2)) + "." + str(int(v3)),
"sdk": str(int(s1)) + "." + str(int(s2)) + "." + str(int(s3)),
}
class source_version_command(Structure):
_fields_ = (("version", p_uint64),)
def describe(self):
v = int(self.version)
a = v >> 40
b = (v >> 30) & 0x3FF
c = (v >> 20) & 0x3FF
d = (v >> 10) & 0x3FF
e = v & 0x3FF
r = str(a) + "." + str(b) + "." + str(c) + "." + str(d) + "." + str(e)
return {"version": r}
class note_command(Structure):
_fields_ = (("data_owner", p_str16), ("offset", p_uint64), ("size", p_uint64))
class build_version_command(Structure):
_fields_ = (
("platform", p_uint32),
("minos", p_uint32),
("sdk", p_uint32),
("ntools", p_uint32),
)
def describe(self):
return {}
class build_tool_version(Structure):
_fields_ = (("tool", p_uint32), ("version", p_uint32))
class data_in_code_entry(Structure):
_fields_ = (("offset", p_uint32), ("length", p_uint32), ("kind", p_uint32))
def describe(self):
return {"offset": self.offset, "length": self.length, "kind": self.kind}
DICE_KIND_DATA = 0x0001
DICE_KIND_JUMP_TABLE8 = 0x0002
DICE_KIND_JUMP_TABLE16 = 0x0003
DICE_KIND_JUMP_TABLE32 = 0x0004
DICE_KIND_ABS_JUMP_TABLE32 = 0x0005
DATA_IN_CODE_KINDS = {
DICE_KIND_DATA: "DICE_KIND_DATA",
DICE_KIND_JUMP_TABLE8: "DICE_KIND_JUMP_TABLE8",
DICE_KIND_JUMP_TABLE16: "DICE_KIND_JUMP_TABLE16",
DICE_KIND_JUMP_TABLE32: "DICE_KIND_JUMP_TABLE32",
DICE_KIND_ABS_JUMP_TABLE32: "DICE_KIND_ABS_JUMP_TABLE32",
}
class tlv_descriptor(Structure):
_fields_ = (
("thunk", p_long), # Actually a pointer to a function
("key", p_ulong),
("offset", p_ulong),
)
def describe(self):
return {"thunk": self.thunk, "key": self.key, "offset": self.offset}
class encryption_info_command(Structure):
_fields_ = (("cryptoff", p_uint32), ("cryptsize", p_uint32), ("cryptid", p_uint32))
def describe(self):
s = {}
s["cryptoff"] = int(self.cryptoff)
s["cryptsize"] = int(self.cryptsize)
s["cryptid"] = int(self.cryptid)
return s
class encryption_info_command_64(Structure):
_fields_ = (
("cryptoff", p_uint32),
("cryptsize", p_uint32),
("cryptid", p_uint32),
("pad", p_uint32),
)
def describe(self):
s = {}
s["cryptoff"] = int(self.cryptoff)
s["cryptsize"] = int(self.cryptsize)
s["cryptid"] = int(self.cryptid)
s["pad"] = int(self.pad)
return s
class dyld_info_command(Structure):
_fields_ = (
("rebase_off", p_uint32),
("rebase_size", p_uint32),
("bind_off", p_uint32),
("bind_size", p_uint32),
("weak_bind_off", p_uint32),
("weak_bind_size", p_uint32),
("lazy_bind_off", p_uint32),
("lazy_bind_size", p_uint32),
("export_off", p_uint32),
("export_size", p_uint32),
)
def describe(self):
dyld = {}
dyld["rebase_off"] = int(self.rebase_off)
dyld["rebase_size"] = int(self.rebase_size)
dyld["bind_off"] = int(self.bind_off)
dyld["bind_size"] = int(self.bind_size)
dyld["weak_bind_off"] = int(self.weak_bind_off)
dyld["weak_bind_size"] = int(self.weak_bind_size)
dyld["lazy_bind_off"] = int(self.lazy_bind_off)
dyld["lazy_bind_size"] = int(self.lazy_bind_size)
dyld["export_off"] = int(self.export_off)
dyld["export_size"] = int(self.export_size)
return dyld
class linker_option_command(Structure):
_fields_ = (("count", p_uint32),)
def describe(self):
return {"count": int(self.count)}
class fileset_entry_command(Structure):
_fields_ = (
("vmaddr", p_uint64),
("fileoff", p_uint64),
("entry_id", lc_str),
("reserved", p_uint32),
)
LC_REGISTRY = {
LC_SEGMENT: segment_command,
LC_IDFVMLIB: fvmlib_command,
LC_LOADFVMLIB: fvmlib_command,
LC_ID_DYLIB: dylib_command,
LC_LOAD_DYLIB: dylib_command,
LC_LOAD_WEAK_DYLIB: dylib_command,
LC_SUB_FRAMEWORK: sub_framework_command,
LC_SUB_CLIENT: sub_client_command,
LC_SUB_UMBRELLA: sub_umbrella_command,
LC_SUB_LIBRARY: sub_library_command,
LC_PREBOUND_DYLIB: prebound_dylib_command,
LC_ID_DYLINKER: dylinker_command,
LC_LOAD_DYLINKER: dylinker_command,
LC_THREAD: thread_command,
LC_UNIXTHREAD: thread_command,
LC_ROUTINES: routines_command,
LC_SYMTAB: symtab_command,
LC_DYSYMTAB: dysymtab_command,
LC_TWOLEVEL_HINTS: twolevel_hints_command,
LC_PREBIND_CKSUM: prebind_cksum_command,
LC_SYMSEG: symseg_command,
LC_IDENT: ident_command,
LC_FVMFILE: fvmfile_command,
LC_SEGMENT_64: segment_command_64,
LC_ROUTINES_64: routines_command_64,
LC_UUID: uuid_command,
LC_RPATH: rpath_command,
LC_CODE_SIGNATURE: linkedit_data_command,
LC_CODE_SEGMENT_SPLIT_INFO: linkedit_data_command,
LC_REEXPORT_DYLIB: dylib_command,
LC_LAZY_LOAD_DYLIB: dylib_command,
LC_ENCRYPTION_INFO: encryption_info_command,
LC_DYLD_INFO: dyld_info_command,
LC_DYLD_INFO_ONLY: dyld_info_command,
LC_LOAD_UPWARD_DYLIB: dylib_command,
LC_VERSION_MIN_MACOSX: version_min_command,
LC_VERSION_MIN_IPHONEOS: version_min_command,
LC_FUNCTION_STARTS: linkedit_data_command,
LC_DYLD_ENVIRONMENT: dylinker_command,
LC_MAIN: entry_point_command,
LC_DATA_IN_CODE: linkedit_data_command,
LC_SOURCE_VERSION: source_version_command,
LC_DYLIB_CODE_SIGN_DRS: linkedit_data_command,
LC_ENCRYPTION_INFO_64: encryption_info_command_64,
LC_LINKER_OPTION: linker_option_command,
LC_LINKER_OPTIMIZATION_HINT: linkedit_data_command,
LC_VERSION_MIN_TVOS: version_min_command,
LC_VERSION_MIN_WATCHOS: version_min_command,
LC_NOTE: note_command,
LC_BUILD_VERSION: build_version_command,
LC_DYLD_EXPORTS_TRIE: linkedit_data_command,
LC_DYLD_CHAINED_FIXUPS: linkedit_data_command,
LC_FILESET_ENTRY: fileset_entry_command,
}
LC_NAMES = {
LC_SEGMENT: "LC_SEGMENT",
LC_IDFVMLIB: "LC_IDFVMLIB",
LC_LOADFVMLIB: "LC_LOADFVMLIB",
LC_ID_DYLIB: "LC_ID_DYLIB",
LC_LOAD_DYLIB: "LC_LOAD_DYLIB",
LC_LOAD_WEAK_DYLIB: "LC_LOAD_WEAK_DYLIB",
LC_SUB_FRAMEWORK: "LC_SUB_FRAMEWORK",
LC_SUB_CLIENT: "LC_SUB_CLIENT",
LC_SUB_UMBRELLA: "LC_SUB_UMBRELLA",
LC_SUB_LIBRARY: "LC_SUB_LIBRARY",
LC_PREBOUND_DYLIB: "LC_PREBOUND_DYLIB",
LC_ID_DYLINKER: "LC_ID_DYLINKER",
LC_LOAD_DYLINKER: "LC_LOAD_DYLINKER",
LC_THREAD: "LC_THREAD",
LC_UNIXTHREAD: "LC_UNIXTHREAD",
LC_ROUTINES: "LC_ROUTINES",
LC_SYMTAB: "LC_SYMTAB",
LC_DYSYMTAB: "LC_DYSYMTAB",
LC_TWOLEVEL_HINTS: "LC_TWOLEVEL_HINTS",
LC_PREBIND_CKSUM: "LC_PREBIND_CKSUM",
LC_SYMSEG: "LC_SYMSEG",
LC_IDENT: "LC_IDENT",
LC_FVMFILE: "LC_FVMFILE",
LC_SEGMENT_64: "LC_SEGMENT_64",
LC_ROUTINES_64: "LC_ROUTINES_64",
LC_UUID: "LC_UUID",
LC_RPATH: "LC_RPATH",
LC_CODE_SIGNATURE: "LC_CODE_SIGNATURE",
LC_CODE_SEGMENT_SPLIT_INFO: "LC_CODE_SEGMENT_SPLIT_INFO",
LC_REEXPORT_DYLIB: "LC_REEXPORT_DYLIB",
LC_LAZY_LOAD_DYLIB: "LC_LAZY_LOAD_DYLIB",
LC_ENCRYPTION_INFO: "LC_ENCRYPTION_INFO",
LC_DYLD_INFO: "LC_DYLD_INFO",
LC_DYLD_INFO_ONLY: "LC_DYLD_INFO_ONLY",
LC_LOAD_UPWARD_DYLIB: "LC_LOAD_UPWARD_DYLIB",
LC_VERSION_MIN_MACOSX: "LC_VERSION_MIN_MACOSX",
LC_VERSION_MIN_IPHONEOS: "LC_VERSION_MIN_IPHONEOS",
LC_FUNCTION_STARTS: "LC_FUNCTION_STARTS",
LC_DYLD_ENVIRONMENT: "LC_DYLD_ENVIRONMENT",
LC_MAIN: "LC_MAIN",
LC_DATA_IN_CODE: "LC_DATA_IN_CODE",
LC_SOURCE_VERSION: "LC_SOURCE_VERSION",
LC_DYLIB_CODE_SIGN_DRS: "LC_DYLIB_CODE_SIGN_DRS",
LC_LINKER_OPTIMIZATION_HINT: "LC_LINKER_OPTIMIZATION_HINT",
LC_VERSION_MIN_TVOS: "LC_VERSION_MIN_TVOS",
LC_VERSION_MIN_WATCHOS: "LC_VERSION_MIN_WATCHOS",
LC_NOTE: "LC_NOTE",
LC_BUILD_VERSION: "LC_BUILD_VERSION",
LC_DYLD_EXPORTS_TRIE: "LC_DYLD_EXPORTS_TRIE",
LC_DYLD_CHAINED_FIXUPS: "LC_DYLD_CHAINED_FIXUPS",
LC_ENCRYPTION_INFO_64: "LC_ENCRYPTION_INFO_64",
LC_LINKER_OPTION: "LC_LINKER_OPTION",
LC_PREPAGE: "LC_PREPAGE",
LC_FILESET_ENTRY: "LC_FILESET_ENTRY",
}
# this is another union.
class n_un(p_int32):
pass
class nlist(Structure):
_fields_ = (
("n_un", n_un),
("n_type", p_uint8),
("n_sect", p_uint8),
("n_desc", p_short),
("n_value", p_uint32),
)
class nlist_64(Structure):
_fields_ = [
("n_un", n_un),
("n_type", p_uint8),
("n_sect", p_uint8),
("n_desc", p_short),
("n_value", p_int64),
]
N_STAB = 0xE0
N_PEXT = 0x10
N_TYPE = 0x0E
N_EXT = 0x01
N_UNDF = 0x0
N_ABS = 0x2
N_SECT = 0xE
N_PBUD = 0xC
N_INDR = 0xA
NO_SECT = 0
MAX_SECT = 255
class relocation_info(Structure):
_fields_ = (("r_address", p_uint32), ("_r_bitfield", p_uint32))
def _describe(self):
return (("r_address", self.r_address), ("_r_bitfield", self._r_bitfield))
def GET_COMM_ALIGN(n_desc):
return (n_desc >> 8) & 0x0F
def SET_COMM_ALIGN(n_desc, align):
return (n_desc & 0xF0FF) | ((align & 0x0F) << 8)
REFERENCE_TYPE = 0xF
REFERENCE_FLAG_UNDEFINED_NON_LAZY = 0
REFERENCE_FLAG_UNDEFINED_LAZY = 1
REFERENCE_FLAG_DEFINED = 2
REFERENCE_FLAG_PRIVATE_DEFINED = 3
REFERENCE_FLAG_PRIVATE_UNDEFINED_NON_LAZY = 4
REFERENCE_FLAG_PRIVATE_UNDEFINED_LAZY = 5
REFERENCED_DYNAMICALLY = 0x0010
def GET_LIBRARY_ORDINAL(n_desc):
return ((n_desc) >> 8) & 0xFF
def SET_LIBRARY_ORDINAL(n_desc, ordinal):
return ((n_desc) & 0x00FF) | (((ordinal & 0xFF) << 8))
SELF_LIBRARY_ORDINAL = 0x0
MAX_LIBRARY_ORDINAL = 0xFD
DYNAMIC_LOOKUP_ORDINAL = 0xFE
EXECUTABLE_ORDINAL = 0xFF
N_NO_DEAD_STRIP = 0x0020
N_DESC_DISCARDED = 0x0020
N_WEAK_REF = 0x0040
N_WEAK_DEF = 0x0080
N_REF_TO_WEAK = 0x0080
N_ARM_THUMB_DEF = 0x0008
N_SYMBOL_RESOLVER = 0x0100
N_ALT_ENTRY = 0x0200
# /usr/include/mach-o/fat.h
FAT_MAGIC = 0xCAFEBABE
FAT_CIGAM = 0xBEBAFECA
FAT_MAGIC_64 = 0xCAFEBABF
FAT_CIGAM_64 = 0xBFBAFECA
class fat_header(Structure):
_fields_ = (("magic", p_uint32), ("nfat_arch", p_uint32))
class fat_arch(Structure):
_fields_ = (
("cputype", cpu_type_t),
("cpusubtype", cpu_subtype_t),
("offset", p_uint32),
("size", p_uint32),
("align", p_uint32),
)
class fat_arch64(Structure):
_fields_ = (
("cputype", cpu_type_t),
("cpusubtype", cpu_subtype_t),
("offset", p_uint64),
("size", p_uint64),
("align", p_uint32),
("reserved", p_uint32),
)
REBASE_TYPE_POINTER = 1 # noqa: E221
REBASE_TYPE_TEXT_ABSOLUTE32 = 2 # noqa: E221
REBASE_TYPE_TEXT_PCREL32 = 3 # noqa: E221
REBASE_OPCODE_MASK = 0xF0 # noqa: E221
REBASE_IMMEDIATE_MASK = 0x0F # noqa: E221
REBASE_OPCODE_DONE = 0x00 # noqa: E221
REBASE_OPCODE_SET_TYPE_IMM = 0x10 # noqa: E221
REBASE_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB = 0x20 # noqa: E221
REBASE_OPCODE_ADD_ADDR_ULEB = 0x30 # noqa: E221
REBASE_OPCODE_ADD_ADDR_IMM_SCALED = 0x40 # noqa: E221
REBASE_OPCODE_DO_REBASE_IMM_TIMES = 0x50 # noqa: E221
REBASE_OPCODE_DO_REBASE_ULEB_TIMES = 0x60 # noqa: E221
REBASE_OPCODE_DO_REBASE_ADD_ADDR_ULEB = 0x70 # noqa: E221
REBASE_OPCODE_DO_REBASE_ULEB_TIMES_SKIPPING_ULEB = 0x80 # noqa: E221
BIND_TYPE_POINTER = 1 # noqa: E221
BIND_TYPE_TEXT_ABSOLUTE32 = 2 # noqa: E221
BIND_TYPE_TEXT_PCREL32 = 3 # noqa: E221
BIND_SPECIAL_DYLIB_SELF = 0 # noqa: E221
BIND_SPECIAL_DYLIB_MAIN_EXECUTABLE = -1 # noqa: E221
BIND_SPECIAL_DYLIB_FLAT_LOOKUP = -2 # noqa: E221
BIND_SYMBOL_FLAGS_WEAK_IMPORT = 0x1 # noqa: E221
BIND_SYMBOL_FLAGS_NON_WEAK_DEFINITION = 0x8 # noqa: E221
BIND_OPCODE_MASK = 0xF0 # noqa: E221
BIND_IMMEDIATE_MASK = 0x0F # noqa: E221
BIND_OPCODE_DONE = 0x00 # noqa: E221
BIND_OPCODE_SET_DYLIB_ORDINAL_IMM = 0x10 # noqa: E221
BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB = 0x20 # noqa: E221
BIND_OPCODE_SET_DYLIB_SPECIAL_IMM = 0x30 # noqa: E221
BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM = 0x40 # noqa: E221
BIND_OPCODE_SET_TYPE_IMM = 0x50 # noqa: E221
BIND_OPCODE_SET_ADDEND_SLEB = 0x60 # noqa: E221
BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB = 0x70 # noqa: E221
BIND_OPCODE_ADD_ADDR_ULEB = 0x80 # noqa: E221
BIND_OPCODE_DO_BIND = 0x90 # noqa: E221
BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB = 0xA0 # noqa: E221
BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED = 0xB0 # noqa: E221
BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB = 0xC0 # noqa: E221
EXPORT_SYMBOL_FLAGS_KIND_MASK = 0x03 # noqa: E221
EXPORT_SYMBOL_FLAGS_KIND_REGULAR = 0x00 # noqa: E221
EXPORT_SYMBOL_FLAGS_KIND_THREAD_LOCAL = 0x01 # noqa: E221
EXPORT_SYMBOL_FLAGS_WEAK_DEFINITION = 0x04 # noqa: E221
EXPORT_SYMBOL_FLAGS_REEXPORT = 0x08 # noqa: E221
EXPORT_SYMBOL_FLAGS_STUB_AND_RESOLVER = 0x10 # noqa: E221
PLATFORM_MACOS = 1
PLATFORM_IOS = 2
PLATFORM_TVOS = 3
PLATFORM_WATCHOS = 4
PLATFORM_BRIDGEOS = 5
PLATFORM_IOSMAC = 6
PLATFORM_MACCATALYST = 6
PLATFORM_IOSSIMULATOR = 7
PLATFORM_TVOSSIMULATOR = 8
PLATFORM_WATCHOSSIMULATOR = 9
PLATFORM_NAMES = {
PLATFORM_MACOS: "macOS",
PLATFORM_IOS: "iOS",
PLATFORM_TVOS: "tvOS",
PLATFORM_WATCHOS: "watchOS",
PLATFORM_BRIDGEOS: "bridgeOS",
PLATFORM_MACCATALYST: "catalyst",
PLATFORM_IOSSIMULATOR: "iOS simulator",
PLATFORM_TVOSSIMULATOR: "tvOS simulator",
PLATFORM_WATCHOSSIMULATOR: "watchOS simulator",
}
TOOL_CLANG = 1
TOOL_SWIFT = 2
TOOL_LD = 3
TOOL_NAMES = {TOOL_CLANG: "clang", TOOL_SWIFT: "swift", TOOL_LD: "ld"}
|
mit
| -2,584,373,802,201,344,000 | 27.56846 | 87 | 0.60178 | false |
adam-p/danforth-east
|
tasks.py
|
1
|
12184
|
# -*- coding: utf-8 -*-
#
# Copyright Adam Pritchard 2020
# MIT License : https://adampritchard.mit-license.org/
#
"""
Flask routes used by tasks queues and cron jobs
"""
import logging
import flask
from google.cloud import ndb
import config
import gapps
import emailer
import main
tasks = flask.Blueprint('tasks', __name__)
# These aren't routes submitted to by users, and there are checks to ensure that.
main.csrf.exempt(tasks)
@tasks.route('/tasks/new-member-mail', methods=['POST'])
def new_member_mail():
"""Queue task invoked when a member has been newly registered.
Sends appropriate welcome emails.
"""
logging.info('tasks.new_member_mail hit')
member_dict = gapps.validate_queue_task(flask.request)
logging.info(member_dict)
#
# Send welcome email
#
member_name = '%s %s' % (member_dict[config.SHEETS.member.fields.first_name.name],
member_dict[config.SHEETS.member.fields.last_name.name])
member_email = member_dict[config.SHEETS.member.fields.email.name]
with open('templates/tasks/email-new-member-subject.txt', 'r') as subject_file:
subject = subject_file.read().strip()
body_html = flask.render_template(
'tasks/email-new-member.jinja',
app_config=config)
if not emailer.send((member_email, member_name), subject, body_html, None):
# Log and carry on
logging.error(f'failed to send new-member email to {member_email}')
else:
logging.info(f'sent new-member email to {member_email}')
#
# Send email to volunteer-interest-area reps
#
interest_reps = gapps.get_volunteer_interest_reps_for_member(member_dict)
if interest_reps:
subject = flask.render_template(
'tasks/email-volunteer-interest-rep-subject.jinja',
app_config=config,
join_type='member').strip()
for interest, reps in interest_reps.items():
body_html = flask.render_template(
'tasks/email-volunteer-interest-rep.jinja',
app_config=config,
join_type='member',
interest=interest,
member_name=member_name,
member_email=member_email)
for rep in reps:
rep_email = rep.get(config.SHEETS.volunteer_interest.fields.email.name)
rep_name = rep.get(config.SHEETS.volunteer_interest.fields.name.name)
ok = emailer.send(
(rep_email, rep_name),
subject,
body_html, None)
if not ok:
logging.error(f'failed to send new-member-volunteer-interest email to {rep_email}')
else:
logging.info(f'sent new-member-volunteer-interest email to {rep_email}')
return flask.make_response('', 200)
@tasks.route('/tasks/renew-member-mail', methods=['POST'])
def renew_member_mail():
"""Queue task invoked when a member has been renewed.
Sends appropriate welcome emails.
"""
logging.info('tasks.renew_member_mail hit')
member_dict = gapps.validate_queue_task(flask.request)
logging.info(member_dict)
#
# Send welcome email
#
member_name = '%s %s' % (member_dict[config.SHEETS.member.fields.first_name.name],
member_dict[config.SHEETS.member.fields.last_name.name])
member_email = member_dict[config.SHEETS.member.fields.email.name]
with open('templates/tasks/email-renew-member-subject.txt', 'r') as subject_file:
subject = subject_file.read().strip()
body_html = flask.render_template(
'tasks/email-renew-member.jinja',
app_config=config)
if not emailer.send((member_email, member_name), subject, body_html, None):
# Log and carry on
# TODO: Should we instead return non-200 and let the task retry?
logging.error(f'failed to send renew-member email to {member_email}')
else:
logging.info(f'sent renew-member email to {member_email}')
return flask.make_response('', 200)
@tasks.route('/tasks/new-volunteer-mail', methods=['POST'])
def new_volunteer_mail():
"""Queue task invoked when a new volunteer has been added.
Sends appropriate welcome emails.
"""
logging.info('tasks.new_volunteer_mail hit')
volunteer_dict = gapps.validate_queue_task(flask.request)
logging.info(volunteer_dict)
#
# Send welcome email
#
volunteer_name = '%s %s' % (volunteer_dict[config.SHEETS.volunteer.fields.first_name.name],
volunteer_dict[config.SHEETS.volunteer.fields.last_name.name])
volunteer_email = volunteer_dict[config.SHEETS.volunteer.fields.email.name]
with open('templates/tasks/email-new-volunteer-subject.txt', 'r') as subject_file:
subject = subject_file.read().strip()
body_html = flask.render_template(
'tasks/email-new-volunteer.jinja',
app_config=config)
if not emailer.send((volunteer_email, volunteer_name), subject, body_html, None):
# Log and carry on
# TODO: Should we instead return non-200 and let the task retry?
logging.error(f'failed to send new-volunteer email to {volunteer_email}')
else:
logging.info(f'sent new-volunteer email to {volunteer_email}')
#
# Send email to volunteer-interest-area reps
#
interest_reps = gapps.get_volunteer_interest_reps_for_member(volunteer_dict)
if interest_reps:
subject = flask.render_template(
'tasks/email-volunteer-interest-rep-subject.jinja',
app_config=config,
join_type='volunteer').strip()
for interest, reps in interest_reps.items():
body_html = flask.render_template(
'tasks/email-volunteer-interest-rep.jinja',
app_config=config,
join_type='volunteer',
interest=interest,
member_name=volunteer_name,
member_email=volunteer_email)
for rep in reps:
rep_email = rep.get(config.SHEETS.volunteer_interest.fields.email.name)
rep_name = rep.get(config.SHEETS.volunteer_interest.fields.name.name)
ok = emailer.send(
(rep_email, rep_name),
subject, body_html, None)
if not ok:
logging.error(f'failed to send new-volunteer-volunteer-interest email to {rep_email}')
else:
logging.info(f'sent new-volunteer-volunteer-interest email to {rep_email}')
return flask.make_response('', 200)
@tasks.route('/tasks/member-sheet-cull', methods=['GET', 'POST'])
def member_sheet_cull():
"""Remove members from the members sheet who have not renewed in a long time.
This gets called both as a cron job and a task queue job.
"""
if flask.request.method == 'GET':
# cron job
logging.debug('tasks.member_sheet_cull hit from cron')
gapps.validate_cron_task(flask.request)
else:
# task queue job
logging.debug('tasks.member_sheet_cull hit from task queue')
gapps.validate_queue_task(flask.request)
gapps.cull_members_sheet()
return flask.make_response('', 200)
class Settings(ndb.Model):
"""Used to store app state and settings.
"""
SINGLETON_DATASTORE_KEY = 'SINGLETON'
_ndb_client = ndb.Client()
member_sheet_year = ndb.IntegerProperty(
default=2019,
verbose_name='The current year of operation. When the calendar year changes, work needs to be done and this gets updated.',
indexed=False)
@classmethod
def singleton(cls):
with cls._ndb_client.context():
return cls.get_or_insert(cls.SINGLETON_DATASTORE_KEY)
def update(self):
with self._ndb_client.context():
self.put()
@tasks.route('/tasks/member-sheet-archive', methods=['GET'])
def member_sheet_archive():
"""Cron task that creates an archive of the members sheet once per year.
"""
logging.warning('tasks.member_sheet_archive: hit')
gapps.validate_cron_task(flask.request)
settings = Settings.singleton()
logging.debug('tasks.member_sheet_archive: settings.member_sheet_year: %d', settings.member_sheet_year)
new_year = gapps.archive_members_sheet(settings.member_sheet_year)
if new_year:
logging.debug('tasks.member_sheet_archive: archived; setting new year: %d', new_year)
settings.member_sheet_year = new_year
settings.update()
return flask.make_response('', 200)
@tasks.route('/tasks/renewal-reminder-emails', methods=['GET'])
def renewal_reminder_emails():
"""Sends renewal reminder emails to members who are nearing their renewal
date.
"""
logging.debug('tasks.renewal_reminder_emails: hit')
gapps.validate_cron_task(flask.request)
expiring_rows = gapps.get_members_expiring_soon()
if not expiring_rows:
logging.debug('tasks.renewal_reminder_emails: no expiring members')
return flask.make_response('', 200)
logging.debug('tasks.renewal_reminder_emails: found %d expiring members', len(expiring_rows))
with open('templates/tasks/email-renewal-reminder-subject.txt', 'r') as subject_file:
subject_noauto = subject_file.read().strip()
with open('templates/tasks/email-renewal-reminder-auto-subject.txt', 'r') as subject_file:
subject_auto = subject_file.read().strip()
for row in expiring_rows:
member_first_name = row.dict.get(config.SHEETS.member.fields.first_name.name)
member_name = '%s %s' % (member_first_name,
row.dict.get(config.SHEETS.member.fields.last_name.name))
member_email = row.dict.get(config.SHEETS.member.fields.email.name)
# Right now we use a Paypal button that does one-time purchases;
# that is, members pay for a year and then need to manually pay
# again the next year. But previously we used a "subscription"
# Paypal button, so there are still some members who automatically
# pay each year. These two groups will get different reminder
# emails.
auto_renewing = str(row.dict.get(config.SHEETS.member.fields.paypal_auto_renewing.name))
if auto_renewing.lower().startswith('y'):
# Member is auto-renewing (i.e., is a Paypal "subscriber")
subject = subject_auto
body_html = flask.render_template(
'tasks/email-renewal-reminder-auto.jinja',
app_config=config,
member_first_name=row.dict.get(config.SHEETS.member.fields.first_name.name))
logging.info('tasks.renewal_reminder_emails: sending auto-renewing reminder to %s', member_email)
else:
# Member is year-to-year
subject = subject_noauto
body_html = flask.render_template(
'tasks/email-renewal-reminder.jinja',
app_config=config,
member_first_name=row.dict.get(config.SHEETS.member.fields.first_name.name))
logging.info('tasks.renewal_reminder_emails: sending non-auto-renewing reminder to %s', member_email)
emailer.send((member_email, member_name),
subject, body_html, None)
return flask.make_response('', 200)
@tasks.route('/tasks/process-mailchimp-updates', methods=['GET', 'POST'])
def process_mailchimp_updates():
"""Updates MailChimp with changed members and volunteers.
This gets called both as a cron job and a task queue job.
"""
if flask.request.method == 'GET':
# cron job
logging.debug('tasks.process_mailchimp_updates: hit from cron')
gapps.validate_cron_task(flask.request)
else:
# task queue job
logging.debug('tasks.process_mailchimp_updates: hit from task queue')
gapps.validate_queue_task(flask.request)
if not config.MAILCHIMP_ENABLED:
return flask.make_response('', 200)
gapps.process_mailchimp_updates()
return flask.make_response('', 200)
|
mit
| 3,668,100,720,192,087,000 | 35.370149 | 131 | 0.636655 | false |
kasbah/slim_looper
|
src/gui/rdfdict/namespaces.py
|
1
|
3494
|
#!/usr/bin/env python
# Copyright 2014 Kaspar Emanuel <kaspar.emanuel@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import rdflib
lv2 = {
"lv2" : rdflib.Namespace("http://lv2plug.in/ns/lv2core#"),
"ui" : rdflib.Namespace("http://lv2plug.in/ns/extensions/ui#"),
"units" : rdflib.Namespace("http://lv2plug.in/ns/extensions/units#"),
"buf-size" : rdflib.Namespace("http://lv2plug.in/ns/ext/buf-size#"),
"midi" : rdflib.Namespace("http://lv2plug.in/ns/ext/midi#"),
"morph" : rdflib.Namespace("http://lv2plug.in/ns/ext/morph#"),
"options" : rdflib.Namespace("http://lv2plug.in/ns/ext/options#"),
"resizeports" : rdflib.Namespace("http://lv2plug.in/ns/ext/resize-port#"),
"urid" : rdflib.Namespace("http://lv2plug.in/ns/ext/urid#"),
"time" : rdflib.Namespace("http://lv2plug.in/ns/ext/time#"),
"log" : rdflib.Namespace("http://lv2plug.in/ns/ext/log#"),
"state" : rdflib.Namespace("http://lv2plug.in/ns/ext/state#"),
"port-props" : rdflib.Namespace("http://lv2plug.in/ns/ext/port-props#"),
"paramters" : rdflib.Namespace("http://lv2plug.in/ns/ext/parameters#"),
"data-access" : rdflib.Namespace("http://lv2plug.in/ns/ext/data-access#"),
"patch" : rdflib.Namespace("http://lv2plug.in/ns/ext/patch#"),
"dynamifest" : rdflib.Namespace("http://lv2plug.in/ns/ext/dynmanifest#"),
"atom" : rdflib.Namespace("http://lv2plug.in/ns/ext/atom#"),
"presets" : rdflib.Namespace("http://lv2plug.in/ns/ext/presets#"),
"instance-access" : rdflib.Namespace("http://lv2plug.in/ns/ext/instance-access#"),
"uri-map" : rdflib.Namespace("http://lv2plug.in/ns/ext/uri-map#"),
"worker" : rdflib.Namespace("http://lv2plug.in/ns/ext/worker#"),
"port-groups" : rdflib.Namespace("http://lv2plug.in/ns/ext/port-groups#"),
"event" : rdflib.Namespace("http://lv2plug.in/ns/ext/event#"),
"meta" : rdflib.Namespace("http://lv2plug.in/ns/meta#"),
}
kxstudio = {
"external-ui" : rdflib.Namespace("http://kxstudio.sourceforge.net/ns/lv2ext/external-ui#"),
"programs" : rdflib.Namespace("http://kxstudio.sourceforge.net/ns/lv2ext/programs#"),
"rtmempool" : rdflib.Namespace("http://kxstudio.sourceforge.net/ns/lv2ext/rtmempool"),
}
w3 = {
"xsd" : rdflib.Namespace("http://www.w3.org/2001/XMLSchema#"),
"rdfs" : rdflib.Namespace("http://www.w3.org/2000/01/rdf-schema#"),
"rdf" : rdflib.Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#"),
"owl" : rdflib.Namespace("http://www.w3.org/2002/07/owl#"),
}
usefulinc = {
"doap" : rdflib.Namespace("http://usefulinc.com/ns/doap#"),
}
|
gpl-3.0
| -4,170,733,738,288,910,000 | 57.233333 | 95 | 0.641099 | false |
vlegoff/tsunami
|
src/primaires/perso/commandes/raedit/__init__.py
|
1
|
2488
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'raedit'."""
from primaires.interpreteur.commande.commande import Commande
class CmdRaedit(Commande):
"""Commande 'raedit'"""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "raedit", "raedit")
self.groupe = "administrateur"
self.schema = ""
self.nom_categorie = "batisseur"
self.aide_courte = "ouvre l'éditeur des races"
self.aide_longue = \
"Cette commande ouvre l'éditeur des races. Elle vous permet de " \
"créer et éditer les races existantes."
def interpreter(self, personnage, dic_masques):
"""Méthode d'interprétation de commande"""
editeur = type(self).importeur.interpreteur.construire_editeur(
"raedit", personnage, None)
personnage.contextes.ajouter(editeur)
editeur.actualiser()
|
bsd-3-clause
| 6,132,993,815,882,985,000 | 44.127273 | 79 | 0.721998 | false |
stackmachine/bearweb
|
bearweb/urls/site.py
|
1
|
1692
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
from games.views import download, IdenticonDetail
from core.views import ContactView, PricingView
from blog.views import ArticleView
def template(path):
return TemplateView.as_view(template_name=path)
urlpatterns = patterns(
'',
url((r'^games/(?P<uuid>[0-9a-f]{24})/download/'
r'(?P<platform>windows|osx|love)$'),
download, name='download'),
url(r'^identicons/(?P<uuid>[0-9a-f]{24}).png', IdenticonDetail.as_view(),
name='identicon'),
url(r'api/', include('games.urls.legacy', namespace='api')),
url(r'^$', template("core/index.html"), name='home'),
url(r'^robots.txt$', TemplateView.as_view(template_name='core/robots.txt',
content_type='text/plain')),
url(r'^pricing$', PricingView.as_view(), name='pricing'),
url(r'^privacy$', template('core/privacy.html'), name='privacy'),
url(r'^tos$', template('core/tos.html'), name='tos'),
url(r'^contact$', ContactView.as_view(), name='contact'),
url(r'^blog$', template('blog/index.html'), name='blog'),
url(r'^blog/feed.xml$', template('blog/feed.xml'), name='feed'),
url(r'^blog/(?P<article_name>[a-z0-9\-]+)$', ArticleView.as_view()),
url(r'^houston/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
(r'^500/$', 'django.views.defaults.server_error'),
(r'^404/$', 'django.views.defaults.page_not_found'),
)
|
mit
| 8,222,612,598,508,858,000 | 35.782609 | 78 | 0.636525 | false |
sjthespian/ZenPacks.community.FlexLM
|
ZenPacks/community/FlexLM/modeler/plugins/community/cmd/FlexLM.py
|
1
|
4273
|
# Module-level documentation will automatically be shown as additional
# information for the modeler plugin in the web interface.
"""
FlexLM
An flexLM plugin that collects licenses under FlexLM control
"""
# When configuring modeler plugins for a device or device class, this plugin's
# name would be community.cmd.FlexLM because its filesystem path within
# the ZenPack is modeler/plugins/community/snmp/FlexLM.py. The name of the
# class within this file must match the filename.
import re
# CommandPlugin is the base class that provides lots of help in modeling data
# that's available by connecting to a remote machine, running command line
# tools, and parsing their results.
from Products.DataCollector.plugins.CollectorPlugin import CommandPlugin
# Classes we'll need for returning proper results from our modeler plugin's
# process method.
from Products.DataCollector.plugins.DataMaps import ObjectMap, RelationshipMap
class FlexLM(CommandPlugin):
# relname is a relationship name, which is the same as the relationship
# name in the model class:
relname = 'licenseFlexLM'
compname = ''
# this is the class we will instantiate. and it needs to match the container
modname = 'ZenPacks.community.FlexLM.FlexLMLicense'
# The command to run.
command = "/bin/ps -efww| /bin/grep lmgrd | /bin/egrep -v grep | /bin/sed 's/^.*-c //;s/ .*$//' | /bin/sort | /usr/bin/uniq | /usr/bin/xargs -i /usr/bin/lmstat -a -c {}"
# Query the system to find all of the lmgrd daemons running. From that
# get the config files, and from those use lmstat to get the vendor
# daemons and create a component for each feature.
def process(self, device, results, log):
log.info("Modeler %s processing data for device %s",
self.name(), device.id)
# call self.relMap() helper method that initializes relname, compname
rm = self.relMap()
# For CommandPlugin, the results parameter to the process method will
# be a string containing all output from the command defined above.
#matcher = re.compile(r'^\d+\s+\d+\s+(?P<blocks>\d+)\s+(?P<name>\S+)')
usageMatcher = re.compile(r'^Users of (?P<license>[^:]+):\s+\(Total of (?P<total>\d+) licenses issued.*Total of (?P<inuse>\d+) licenses in use')
portMatcher = re.compile(r'^License server status: (?P<port>\d+)@')
vendorMatcher = re.compile(r'^\s*(?P<vendor>[^:]+):\s+(?P<status>(UP|DOWN))\s+v(?P<version>[0-9\.]+)')
vendor = '' # FlexLM Vendor for the current features
vendor_status = ''
vendor_version = ''
vendor_count = 0
feature_count = 0
lmport = 0
for line in results.split('\n'):
line = line.strip()
# Parse lmstat output, for port, vendor, feature,
# total and inuse licenses
match = portMatcher.search(line) # Get server port
if match:
lmport = int(match.group('port'))
vendor = '' # Clear vendor info
vendor_status = ''
vendor_version = ''
vendor_count = vendor_count + 1
match = vendorMatcher.search(line) # Get license vendor
if match:
vendor = match.group('vendor')
vendor_status = match.group('status')
vendor_version = match.group('version')
match = usageMatcher.search(line) # Get feature usage
if match:
om = self.objectMap()
log.info("Found license %s/%s(%s) %d (%d of %d)",
vendor, match.group('license'), vendor_version, lmport, int(match.group('inuse')), int(match.group('total')))
om.id = self.prepId(match.group('license'))
om.feature = match.group('license')
om.vendor = vendor
om.version = vendor_version
om.port = lmport
om.total = match.group('total')
om.inuse = match.group('inuse')
feature_count = feature_count + 1
rm.append(om)
# Return a RelationshipMap that describes the component, relationship
# on that component, and the module name for the created objects. Pass
# in the previously built list of ObjectMaps that will be used to
# populate the relationship.
return rm
# return RelationshipMap(
# compname="lic", relname=relname,
# modname=modname
# objmaps=objectmaps)
|
gpl-2.0
| -3,921,368,739,322,689,500 | 40.485437 | 173 | 0.659958 | false |
nstockton/mapperproxy-mume
|
tests/mapper/test_mudevents.py
|
1
|
1842
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Future Modules:
from __future__ import annotations
# Built-in Modules:
import socket
from unittest import TestCase
from unittest.mock import Mock, patch
# Mapper Modules:
from mapper.mapper import Mapper
from mapper.mudevents import Handler
class DummyHandler(Handler):
event: str = "testEvent"
handleText: Mock = Mock()
def handle(self, text: str) -> None:
self.handleText(f"I received {text}")
class HandlerWithoutType(Handler):
def handle(self, text: str) -> None:
pass
class TestHandler(TestCase):
@patch.object(Mapper, "loadRooms", Mock()) # Speedup test execution.
def setUp(self) -> None:
self.mapper: Mapper = Mapper(
playerSocket=Mock(spec=socket.socket),
gameSocket=Mock(spec=socket.socket),
outputFormat="normal",
interface="text",
promptTerminator=None,
gagPrompts=False,
findFormat="",
isEmulatingOffline=False,
)
self.mapper.daemon = True # Allow unittest to quit if mapper thread does not close properly.
def testMapper_handle(self) -> None:
dummy: DummyHandler = DummyHandler(self.mapper)
self.mapper.handleMudEvent(dummy.event, b"Hello world")
dummy.handleText.assert_called_once_with("I received Hello world")
dummy.handleText.reset_mock()
self.mapper.handleMudEvent(dummy.event, b"I am here.")
dummy.handleText.assert_called_once_with("I received I am here.")
dummy.handleText.reset_mock()
dummy.__del__()
self.mapper.handleMudEvent(dummy.event, b"Goodbye world")
dummy.handleText.assert_not_called()
def test_init_raisesValueErrorWhenNoEventTypeIsProvided(self) -> None:
with self.assertRaises(ValueError):
HandlerWithoutType(self.mapper)
|
mpl-2.0
| 5,960,192,641,944,425,000 | 29.196721 | 95 | 0.741042 | false |
Svolcano/python_exercise
|
dianhua/worker/crawler/china_mobile/wap/zhejiang/main.py
|
1
|
27693
|
# -*- coding:utf-8 -*-
import sys
import re
import time
import random
import base64
from lxml import etree
from dateutil.relativedelta import relativedelta
reload(sys)
sys.setdefaultencoding('utf8')
import datetime
import traceback
if __name__ == '__main__':
sys.path.append('..')
sys.path.append('../..')
sys.path.append('../../..')
sys.path.append('../../../..')
sys.path.append('../../../../..')
from crawler.base_crawler import BaseCrawler
from enstr import enstring
else:
from worker.crawler.base_crawler import BaseCrawler
from worker.crawler.china_mobile.wap.zhejiang.enstr import enstring
class Crawler(BaseCrawler):
"""
用装饰器,分别封装了
1> 底层log,便于单个爬虫的bug 排查
2> 默认异常抛出, 代码复用
"""
def __init__(self, **kwargs):
"""
kwargs 包含
'tel': str,
'pin_pwd': str,
'user_id': str,
'user_name': unicode,
'sms_code': str,
'captcha_code': str
錯誤等級
0: 成功
1: 帳號密碼錯誤
2: 認證碼錯誤
9: 其他錯誤
"""
super(Crawler, self).__init__(**kwargs)
def need_parameters(self, **kwargs):
"""
用list告訴我你需要哪些欄位
:param kwargs:
:return:
list of full_name, id_card, pin_pwd, sms_verify, captcha_verify
"""
# print('用list告訴我你需要哪些欄位')
return ['pin_pwd', 'sms_verify']
def login(self, **kwargs):
# set cookies
login_url = "https://zj.ac.10086.cn/login?jumpurl=http://wap.zj.10086.cn/szhy/my/index.html"
headers = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate, br",
"Accept-Language": "zh-CN,zh;q=0.9",
}
code, key, resp = self.get(login_url, headers=headers)
if code != 0:
return code, key
try:
et = etree.HTML(resp.text)
spid = et.xpath("//form/input[@name='spid']/@value")[0]
except:
error = traceback.format_exc()
self.log("crawler", "获取spid失败{}".format(error), resp)
return 9, "html_error"
codetype = 3004
for i in range(self.max_retry):
# 获取图片
url = "https://zj.ac.10086.cn//common/image.jsp?r={}".format(random.randint(1000000000000, 9999999999999))
headers = {
"Accept": "image/webp,image/apng,image/*,*/*;q=0.8",
"Referer": login_url,
"Accept-Encoding": "gzip, deflate, br",
"Accept-Language": "zh-CN,zh;q=0.9"""
}
code, key, resp = self.get(url, headers=headers)
if code != 0:
return code, key
# 云打码
try:
key, result, cid = self._dama(resp.content, codetype)
except:
msg = traceback.format_exc()
self.log('crawler', u'打码失败:{}'.format(msg), '')
continue
if key == "success" and result != "":
captcha_code = str(result).lower()
else:
continue
# 验证
url = "https://zj.ac.10086.cn/validImageCode?r_{}&imageCode={}".format(random.random(), captcha_code)
headers = {
"Referer": login_url,
"Accept-Encoding": "gzip, deflate, br",
"Accept-Language": "zh-CN,zh;q=0.9",
"Accept": "*/*",
"X-Requested-With": "XMLHttpRequest"
}
code, key, resp = self.get(url, headers=headers)
if code != 0:
return code, key
if "1" != str(resp.text.strip()):
self.log("crawler", "图片打码失败", resp)
self._dama_report(cid)
continue
else:
break
else:
self.log("crawler", "自动打码失败", "")
return 9, "auto_captcha_code_error"
try:
pin_pwd = enstring(kwargs['pin_pwd'])
tel = enstring(kwargs['tel'])
except:
error = traceback.format_exc()
self.log("crawler", "信息加密失败{}".format(error), "")
return 9, "website_busy_error"
data = {
"type": "B",
"backurl": "https://zj.ac.10086.cn/waplogin/backPage.jsp",
"warnurl": "https://zj.ac.10086.cn/waplogin/warnPage.jsp",
"errorurl": "https://zj.ac.10086.cn/waplogin/errorPage.jsp",
"spid": spid,
"RelayState": "type=B;backurl=http://wap.zj.10086.cn/servlet/assertion;nl=7;loginFromUrl=http://wap.zj.10086.cn/szhy/my/index.html;callbackurl=/servlet/assertion;islogin=true",
"mobileNum": tel,
"validCode": captcha_code,
"smsValidCode": "",
"service": "my",
"failurl": "",
"continue": "http%3A%2F%2Fwap.zj.10086.cn%2Fszhy%2Fmy%2Findex.html",
"loginUserType": "1",
"pwdType": "2",
"billId": kwargs['tel'],
"servicePassword": pin_pwd,
}
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Referer": login_url,
"Accept-Encoding": "gzip, deflate, br",
"Accept-Language": "zh-CN,zh;q=0.9"
}
url = "https://zj.ac.10086.cn/Login"
code, key, resp = self.post(url, headers=headers, data=data)
if code != 0:
return code, key
if resp.history:
try:
result_code = re.findall("code=(.*?)&", resp.url)[0]
if result_code == "5003":
self.log("user", "服务密码错误", resp)
return 9, "pin_pwd_error"
if result_code == "7005":
self.log("user", "服务密码错误", resp)
return 9, "pin_pwd_error"
self.log("crawler", "其他情况登陆出现错误", resp)
return 9, "login_param_error"
except:
error = traceback.format_exc()
self.log("crawler", "解析登录结果失败 {}".format(error), resp)
return 9, "html_error"
try:
et = etree.HTML(resp.text)
url = et.xpath("//form/@action")[0]
data = {}
form = et.xpath("//form/input")
for i in form:
name = i.xpath("./@name")[0]
value = i.xpath("./@value")[0]
if name == "SAMLart":
self.samlart = value
data[name] = value
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Referer": "https://zj.ac.10086.cn/Login",
"Accept-Encoding": "gzip, deflate, br",
"Accept-Language": "zh-CN,zh;q=0.9"
}
code, key, resp = self.post(url, headers=headers, data=data)
if code != 0:
return code, key
url = "http://wap.zj.10086.cn/servlet/assertion"
data = {
"SAMLart": self.samlart,
"RelayState": "type=B;backurl=http://wap.zj.10086.cn/servlet/assertion;nl=7;loginFromUrl=http://wap.zj.10086.cn/szhy/my/index.html;callbackurl=/servlet/assertion;islogin=true"
}
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9"
}
code, key, resp = self.post(url, headers=headers, data=data)
if code != 0:
return code, key
# print(resp.text)
except:
error = traceback.format_exc()
self.log("crawler", "获取登录跳转信息失败{}".format(error), resp)
return 9, "html_error"
try:
self.ul_scid = re.findall('ul_scid=(.*?)"', resp.text)[0]
except:
error = traceback.format_exc()
self.log("crawler", "获取官网随机数失败 {} ".format(error), resp)
return 9, "html_error"
return 0, "success"
def get_verify_type(self, **kwargs):
return "SMS"
def send_verify_request(self, **kwargs):
url = "http://wap.zj.10086.cn/new/mobileApp/client/queryUserInfoBySso.do?ul_scid={}".format(self.ul_scid)
headers = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Referer": "http://wap.zj.10086.cn/szhy/my/index.html",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9"
}
code, key, resp = self.get(url, headers=headers)
if code != 0:
return code, key, ""
url = "http://wap.zj.10086.cn/mobileStore/details/query/queryDetails.do?r={}".format(random.random())
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"Referer": "http://wap.zj.10086.cn/mobileStore/details/query/list.do?v=6&id=1063&nwId=wap&ul_nwid=wap&ul_scid={}".format(self.ul_scid),
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9"
}
data = {
"listType": "1",
"secondPass": "",
"type": "json",
"id": "1063"
}
code, key, resp = self.post(url, headers=headers, data=data)
if code != 0:
return code, key, ""
if '验证码下发成功' in resp.text:
return 0, "success", ""
elif '用户查询详单验证码仍在有效期内,请继续使用' in resp.text:
return 0, "reusable_sms", "reusable_sms"
else:
self.log("crawler", "下发短信验证码失败", resp)
return 9, "send_sms_error", ""
def verify(self, **kwargs):
url = "http://wap.zj.10086.cn/mobileStore/details/query/queryDetails.do?r={}".format(random.random())
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"Referer": "http://wap.zj.10086.cn/mobileStore/details/query/list.do?v=6&id=1063&nwId=wap&ul_nwid=wap&ul_scid={}".format(self.ul_scid),
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9"
}
data = {
"listType": "1",
"secondPass": kwargs['sms_code'],
"type": "json",
"id": "1063"
}
code, key, resp = self.post(url, headers=headers, data=data)
if code != 0:
return code, key
try:
result = resp.json()
result_code = result.get("code")
if result_code == "0":
return 0, "success"
else:
self.log("crawler", "验证码错误", resp)
return 9, "verify_error"
except:
error = traceback.format_exc()
self.log("crawler", "解析短信验证异常{}".format(error), resp)
return 9, "json_error"
def time_stamp(self, time_str, str_format='%Y-%m-%d %H:%M:%S'):
time_type = time.strptime(time_str, str_format)
return str(int(time.mktime(time_type)))
def time_format(self, time_str, **kwargs):
exec_type = 1
time_str = time_str.encode('utf-8')
if 'exec_type' in kwargs:
exec_type = kwargs['exec_type']
if (exec_type == 1):
xx = re.match(r'(.*时)?(.*分)?(.*秒)?', time_str)
h, m, s = 0, 0, 0
if xx.group(1):
hh = re.findall('\d+', xx.group(1))[0]
h = int(hh)
if xx.group(2):
mm = re.findall('\d+', xx.group(2))[0]
m = int(mm)
if xx.group(3):
ss = re.findall('\d+', xx.group(3))[0]
s = int(ss)
real_time = h * 60 * 60 + m * 60 + s
if (exec_type == 2):
xx = re.findall(r'\d*', time_str)
h, m, s = map(int, xx[::2])
real_time = h * 60 * 60 + m * 60 + s
return str(real_time)
def get_page_data(self, resp, month):
et = etree.HTML(resp.text)
xx = et.xpath("//dl")
data_list = []
for i in xx:
data = {}
call_tel = i.xpath("./dt/div[1]/span[1]/text()")
call_cost = i.xpath("./dt/div[1]/span[2]/text()")
call_time = i.xpath("./dt/div[2]/span[1]/text()")
call_from = i.xpath("./dt/div[2]/span[2]/text()")
call_method = i.xpath("./dt/div[2]/span[3]/text()")
call_duration = i.xpath("./dt/div[2]/span[4]/text()")
call_type = i.xpath("./dd/ul/li[2]/span[2]/text()")
data['call_tel'] = "".join(call_tel and call_tel[0])
data['call_cost'] = "".join(call_cost and call_cost[0])
data['call_time'] = "".join(call_time and self.time_stamp(call_time[0]))
raw_call_from = "".join(call_from and call_from[0])
call_from, error = self.formatarea(raw_call_from)
if call_from:
data['call_from'] = call_from
else:
data['call_from'] = raw_call_from
data['call_method'] = "".join(call_method and call_method[0])
data['call_type'] = "".join(call_type and call_type[0])
data['call_duration'] = "".join(call_duration and self.time_format(call_duration[0]))
data['month'] = month
data['call_to'] = ''
data_list.append(data)
if "点击查看更多" in resp.text:
return 0, "has_next", data_list
return 0, "success", data_list
def get_next_page_data(self, resp, month):
data_list = []
try:
js = resp.json()
raw_list = js.get("list")
for i in raw_list:
data = {}
info = i.get("cs")
# print info
if not info:
self.log("crawler", "异常的数据{}".format(i), resp)
continue
call_tel = info[5]
call_cost = info[0]
call_time = info[1]
call_from = info[7]
call_method = info[4]
call_duration = info[2]
call_type = info[3]
data['call_tel'] = call_tel
data['call_cost'] = call_cost
data['call_time'] = self.time_stamp(call_time)
raw_call_from = call_from
call_from, error = self.formatarea(raw_call_from)
if call_from:
data['call_from'] = call_from
else:
data['call_from'] = raw_call_from
data['call_method'] = call_method
data['call_type'] = call_type
data['call_duration'] = self.time_format(call_duration)
data['call_to'] = ''
data['month'] = month
data_list.append(data)
except:
error = traceback.format_exc()
self.log("crawler", "获取详单未知异常{}".format(error), resp)
return 9, "json_error", data_list
return 0, "success", data_list
def get_next_page(self, month, ref_url):
data_list = []
part_miss_list = []
for times in range(self.max_retry):
message = ""
url = "http://wap.zj.10086.cn/mobileStore/details/query/queryDetails.do"
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"Referer": ref_url,
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9"
}
parmas = {"r": "{}".format(random.random())}
data = {
"listType": "1",
"yearMonth": month,
"pageNum": "2",
"type": "more",
"id": "1063"
}
code, key, resp = self.post(url, headers=headers, data=data, params=parmas)
if code != 0:
return code, key, [], []
message = resp
code, key, result = self.get_next_page_data(resp, month)
if code == 0 and result:
data_list.extend(result)
break
else:
self.log("crawler", "重试第二页失败", message)
part_miss_list.append(month)
return 9, "json_error", [], part_miss_list
try:
result = resp.json()
totalPage = result.get("totalPage")
except:
error = traceback.format_exc()
part_miss_list.append(month)
self.log("crawler", "解析分页失败{}".format(error), resp)
return 9, "json_error", [], part_miss_list
# 当执行到这里时, 之前加入的部分缺失需要清空
part_miss_list = []
for page_num in range(3, totalPage+1):
data['pageNum'] = page_num
for times in range(self.max_retry):
code, key, resp = self.post(url, headers=headers, data=data, params=parmas)
if code != 0:
return code, key, [], []
message = resp
code, key, result = self.get_next_page_data(resp, month)
if code == 0 and result:
data_list.extend(result)
break
else:
self.log("crawler", "重试第{}页失败".format(page_num), message)
part_miss_list.append(month)
return 0, "success", data_list, part_miss_list
def crawl_call_log(self, **kwargs):
miss_list = []
pos_miss_list = []
part_miss_list = []
data_list = []
month_retrys = [(x, self.max_retry) for x in self.monthly_period()]
time_fee = 0
full_time = 40.0
rand_time = random.randint(20, 40) / 10.0
st_time = time.time()
crawler_error = 0
result_list = []
log_for_retrys = []
retrys_limit = -4
while month_retrys:
month, retrys = month_retrys.pop(0)
retrys -= 1
if retrys < retrys_limit:
self.log("crawler", "重试完成", "")
miss_list.append(month)
continue
log_for_retrys.append((month, retrys, time_fee))
url = "http://wap.zj.10086.cn/mobileStore/details/query/queryDetails.do?id=1063&listType=1&yearMonth={}&r={}".format(month, random.random())
headers = {
"Accept": "application/json",
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"Referer": "http://wap.zj.10086.cn/mobileStore/details/query/queryDetails.do?id=1063&listType=1",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9"
}
data = {"id": "1063"}
code, key, resp = self.post(url, headers=headers, data=data)
if code == 0:
code, key, result = self.get_page_data(resp, month)
if code == 0 and result:
result_list.extend(result)
if key == "has_next":
code, key, next_result, next_part_miss = self.get_next_page(month, url)
part_miss_list.extend(next_part_miss)
result_list.extend(next_result)
continue
if retrys >= 0:
time_fee = time.time() - st_time
month_retrys.append((month, retrys))
elif time_fee < full_time:
time.sleep(rand_time)
time_fee = time.time() - st_time
month_retrys.append((month, retrys))
else:
self.log("crawler", "多次重试失败", "")
miss_list.append(month)
miss_list = list(set(miss_list))
part_miss_list = list(set(part_miss_list))
miss_list.sort(reverse=True)
part_miss_list.sort(reverse=True)
if len(miss_list) == 6:
return 9, "website_busy_error", data_list, miss_list, pos_miss_list, part_miss_list
return 0, "success", result_list, miss_list, pos_miss_list, part_miss_list
def crawl_info(self, **kwargs):
url = ""
return 9, "unknown_error", {}
def crawl_phone_bill(self, **kwargs):
url = "http://wap.zj.10086.cn/"
headers = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9"
}
code, key, resp = self.get(url, headers=headers)
if code != 0:
return code, key, [], []
url = "http://wap.zj.10086.cn/new/authentication?uid=59&chId=1&nwId=wap&ul_nwid=wap&ul_scid={}".format(self.ul_scid)
headers = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Referer": "http://wap.zj.10086.cn/szhy/",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9"
}
code, key, resp = self.get(url, headers=headers)
if code != 0:
return code, key, [], []
if resp.history:
for his in resp.history:
self.log("crawler", "记录跳转", his)
try:
ref_url = resp.url
session_str = re.findall("session=(.*?)&", ref_url)[0]
nonce = re.findall("nonce=(.*?)&", ref_url)[0]
encpn = re.findall("encpn=(.*?)&", ref_url)[0]
except:
error = traceback.format_exc()
self.log("crawler", "获取账单信息session_str失败{}".format(error), resp)
return 9, "html_error", [], []
# 用户认证
url = "http://app.m.zj.chinamobile.com/zjweb/Authentic.do"
params = {
"cf": "10058",
"nonce": nonce,
"encpn": encpn
}
headers = {
"X-Requested-With": "XMLHttpRequest",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9",
"Accept": "application/json, text/javascript, */*; q=0.01"
}
code, key, resp = self.post(url, headers=headers, params=params)
if code != 0:
return code, key, [], []
result_list = []
miss_list = []
crawler_error = 0
url = "http://app.m.zj.chinamobile.com/zjweb/Bill.do"
params = {
"ym": "0",
"session": session_str,
"num": kwargs['tel']
}
headers = {
"X-Requested-With": "XMLHttpRequest",
"Referer": ref_url,
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9c"
}
for month_str in self.monthly_period():
if month_str == datetime.datetime.now().strftime("%Y%m"):
ym = "0"
else:
ym = month_str
params.update({"ym": ym})
for i in range(self.max_retry):
code, key, resp = self.post(url, headers=headers, params=params)
if code == 0:
code, key, result = self.get_bill_data(resp, month_str)
if code == 0:
result_list.append(result)
break
else:
miss_list.append(month_str)
if len(miss_list) == 6:
if crawler_error > 0:
self.log("crawler", "获取账单中有异常", "")
return 9, "crawl_error", [], []
return 9, "website_busy_error", result_list, miss_list
return 0, "success", result_list, miss_list
def get_bill_data(self, resp, month):
try:
js = resp.json()
info_list = js.get('qry').get('list')
bill_amount = js.get('qry').get('total')
info_dict = {
"bill_amount": bill_amount,
"bill_month": month,
"bill_package": "",
"bill_ext_calls": "",
"bill_ext_sms": "",
"bill_ext_data": "",
"bill_zengzhifei": "",
"bill_daishoufei": "",
"bill_qita": "",
}
for info in info_list:
name = info['name']
fee = info['amount']
if name == u"套餐及固定费":
info_dict["bill_package"] = fee
if name == u"语音通信费":
info_dict['bill_ext_calls'] = fee
if name == u"短彩信费":
info_dict['bill_ext_sms'] = fee
if name == u"上网费":
info_dict['bill_ext_data'] = fee
if name == u"增值业务费":
info_dict['bill_zengzhifei'] = fee
if name == u"代收费":
info_dict['bill_daishoufei'] = fee
return 0, "success", info_dict
except:
error = traceback.format_exc()
self.log("crawler", "解析账单数据失败 {}".format(error), resp)
return 9, "json_error", {}
def monthly_period(self, length=6, strf='%Y%m'):
current_time = datetime.datetime.now()
monthly_period_list = []
for month_offset in range(0, length):
monthly_period_list.append((current_time - relativedelta(months=month_offset)).strftime(strf))
return monthly_period_list
if __name__ == "__main__":
c = Crawler()
# USER_ID = "13575702249"
USER_ID = "13575702249"
USER_PASSWORD = "125274"
c.self_test(tel=USER_ID, pin_pwd=USER_PASSWORD)
|
mit
| -4,666,179,527,746,855,000 | 37.556369 | 191 | 0.473035 | false |
ianyfan/alevel
|
build.py
|
1
|
5117
|
#!venv/bin/python
import os
import markdown
md = markdown.Markdown(output_format='html5',
extensions=['markdown.extensions.tables', 'markdown.extensions.nl2br'])
def make_nav(node):
if 'href' in node:
indent = ' '*(2*len(node['href']) + 6)
if 'children' in node:
children = '\n'.join(map(lambda item: make_nav(item[1]),
sorted(node['children'].items())))
return ('{0}<li>\n{0} <a href="#{1}">{1} {2}</a>\n{0} <ol>\n{3}\n'
'{0} </ol>\n{0}</li>').format(
indent, node['href'], node['heading'], children
) if 'href' in node else children
else:
return '{0}<li><a href="#{1}">{1} {2}</a></li>'.format(
indent, node['href'], node['heading'])
def make_html(node):
if 'children' in node:
html = '\n'.join(map(lambda item: make_html(item[1]),
sorted(node['children'].items())))
else:
try:
html = ''' <table>
<thead>
<tr>
<th>Specification</th>
<th>Exposition</th>
</tr>
</thead>
<tbody>
{}
</tbody>
</table>'''.format('\n'.join(
''' <tr>
<td>
{}
</td>
<td>{}</td>'''.format(md.convert(spec), md.convert(node['exposition'][n]) if 'exposition' in node and n in node['exposition'] else '\n ')
for n, spec in node['specification'].items()
))
except:
html = ''
try:
return ' <h{0} id="{1}">{1} {2}</h{0}>\n{3}'.format(
(len(node['href']) + 1)//2, node['href'], node['heading'], html)
except:
return html
subjects = ('electronics', 'physics')
data = {}
navs = {}
for subject in subjects:
sections = data[subject] = {}
with open(subject + '/headings.md') as f:
for line in f:
section, heading = line.strip().partition(' ')[0::2]
*parents, current = map(int, section.split('.'))
node = sections
for parent in parents:
node = node['children'][parent]
if 'children' not in node:
node['children'] = {}
node['children'][current] = {'heading': heading.lstrip(),
'href': section}
navs[subject] = make_nav(sections)
for document in ('specification', 'exposition'):
file_name = subject + '/' + document + '.md'
with open(file_name) as f:
in_section = False
for line_no, line in enumerate(f, 1):
if not in_section:
try:
parents = map(int, line.strip().split('.'))
node = sections
for parent in parents:
node = node['children'][parent]
except KeyError:
pass
except:
raise Exception('Error in ' + file_name + ', line ' + str(line_no))
else:
markdown = node[document] = {}
in_section = True
elif line == '\n':
in_section = False
elif line == '\\\n':
markdown[last] += '\n'
else:
number, _, line = line.partition(' ')
if number == '\\':
markdown[last] += line
else:
last = int(number)
markdown[last] = line
for subject in subjects:
os.makedirs('alevel/' + subject, exist_ok=True)
with open('alevel/' + subject + '/index.html', 'w+') as f:
f.write('''<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<title>{}</title>
<link href="/alevel/style.css" rel="stylesheet">
<script src="/alevel/script.js" async></script>
<script type="text/x-mathjax-config">
MathJax.Ajax.config.path["Contrib"] = "//cdn.mathjax.org/mathjax/contrib";
MathJax.Hub.Config({{
jax: ["input/TeX", "output/CommonHTML", "output/NativeMML"],
TeX: {{ extensions: ["AMSmath.js", "AMSsymbols.js", "noErrors.js", "noUndefined.js"] }},
extensions: ["tex2jax.js", "mml3.js", "[Contrib]/siunitx/siunitx.js"]
}});
</script>
<script src="//cdn.mathjax.org/mathjax/latest/MathJax.js" async></script>
</head>
<body>
<nav class="{}">
<ul>
{}
</ul>
{}
</nav>
<section>
{}
</section>
<footer>
</footer>
<div id="shadow"></div>
<button></button>
</body>
</html>'''.format(
subject.capitalize(), subject,
'\n'.join([' <li><a href="/alevel/{}">{}</a></li>'.format(s, s.title()) for s in subjects]),
'\n'.join([' <ol id="{}">\n{}\n </ol>'.format(s, navs[s]) for s in subjects]),
make_html(data[subject])
)
)
|
mit
| -7,544,063,696,612,175,000 | 33.574324 | 160 | 0.465507 | false |
saketkc/moca-scripts
|
encode_images.py
|
1
|
10888
|
from __future__ import division
import os
from pymongo import MongoClient
import numpy as np
from moca.helpers import read_memefile, get_total_sequences
from moca.helpers.db import create_binary_pickle, encode_image#, unpickle_numpy_array
from moca.helpers import get_max_occuring_bases, search_files
#from moca.helpers import read_centrimo_txt
#from moca.helpers import read_centrimo_stats
from moca.helpers.seqstats import get_center_enrichment, get_motif_evalue, perform_t_test, get_pearson_corr
from moca.helpers.seqstats import get_flanking_scores, remove_flanking_scores, perform_OLS
__root_dir__ = '/media/data1/encode_analysis'
flank_length = 5
COUNT_TYPE = 'counts'
client = MongoClient()
db = client.moca_encode_tf
db.encode_tf_stats.remove()
for d in os.listdir(__root_dir__):
results = db.tf_metadata.find({'@id': '/experiments/{}/'.format(d)})
meme_file = os.path.join(__root_dir__, d, 'moca_output', 'meme_out', 'meme.txt')
centrimo_dir = os.path.join(__root_dir__, d, 'moca_output', 'centrimo_out')
if not os.path.isfile(meme_file):
print 'Skipping {}'.format(d)
continue
meme_info = read_memefile(meme_file)
total_sequences = get_total_sequences(meme_file)
for i in range(0, meme_info['total_motifs']):
record = meme_info['motif_records'][i]
max_occur = get_max_occuring_bases(record, max_count=1, count_type=COUNT_TYPE)
motif_freq = []
for position in max_occur:
motif_freq.append(position[0][1])
motif_freq = np.asarray(motif_freq)
fimo_sample = os.path.join(os.path.dirname(meme_file), 'fimo_out_{}'.format(i+1))
fimo_random = os.path.join(os.path.dirname(meme_file), 'fimo_random_{}'.format(i+1))
if not os.path.isfile(os.path.join(fimo_sample, 'gerp.mean.txt')):
db.encode_tf_stats.insert_one({ 'encode_id': d,
'motif_number': i+1,
'motif_missing_error': True})
continue
motif_enrichment = meme_info['motif_occurrences']['motif{}'.format(i+1)]/total_sequences
centrimo_dir = os.path.abspath(centrimo_dir)
centrimo_txt = os.path.join(centrimo_dir, 'centrimo.txt')
enrichment_info = get_center_enrichment(centrimo_txt, i+1)
center_enrichment = enrichment_info['enrichment']
center_enrichment_pval = enrichment_info['enrichment_pval']
motif_evalue = get_motif_evalue(record)
motif_logo = os.path.join(os.path.dirname(meme_file), 'logo{}.png'.format(i+1))
motif_logorc = os.path.join(os.path.dirname(meme_file), 'logo_rc{}.png'.format(i+1))
motif_logo_encoded = encode_image(motif_logo)
motif_logorc_encoded = encode_image(motif_logorc)
moca_plot = os.path.join(os.path.dirname(meme_file), 'moca_plots', 'moca{}.png'.format(i+1))
moca_plotrc = os.path.join(os.path.dirname(meme_file), 'moca_plots', 'moca{}_rc.png'.format(i+1))
moca_plot_encoded = encode_image(moca_plot)
moca_plotrc_encoded = encode_image(moca_plot)
if os.stat(os.path.join(fimo_sample, 'gerp.mean.txt')).st_size == 0:
db.encode_tf_stats.insert_one({ 'encode_id': d,
'motif_number': i+1,
'center_enrichment': center_enrichment,
'center_enrichment_pval': center_enrichment_pval,
'motif_evalue': motif_evalue,
'motif_enrichment': motif_enrichment,
'motif_logo': motif_logo_encoded,
'motif_logorc': motif_logorc_encoded,
'moca_plot': moca_plot_encoded,
'moca_plotrc': moca_plotrc_encoded,
'no_fimo_hit_sample': True})
continue
gerp_mean_sample = np.loadtxt(os.path.join(fimo_sample, 'gerp.mean.txt')).tolist()
phylop_mean_sample = np.loadtxt(os.path.join(fimo_sample, 'phylop.mean.txt')).tolist()
delta_phylop_ttest = perform_t_test(remove_flanking_scores(phylop_mean_sample, flank_length),
get_flanking_scores(phylop_mean_sample, flank_length))
p_delta_phylop = delta_phylop_ttest['one_sided_pval']
delta_phylop = delta_phylop_ttest['delta']
delta_gerp_ttest = perform_t_test(remove_flanking_scores(gerp_mean_sample, flank_length),
get_flanking_scores(gerp_mean_sample, flank_length))
p_delta_gerp = delta_gerp_ttest['one_sided_pval']
delta_gerp = delta_gerp_ttest['delta']
phylop_sample_ols = perform_OLS(remove_flanking_scores(phylop_mean_sample, flank_length), motif_freq)
gerp_sample_ols = perform_OLS(remove_flanking_scores(gerp_mean_sample, flank_length), motif_freq)
phylop_sample_fit = phylop_sample_ols['regression_fit']
gerp_sample_fit = gerp_sample_ols['regression_fit']
corr_phylop_sample = get_pearson_corr(motif_freq, remove_flanking_scores(phylop_mean_sample, flank_length))
corr_gerp_sample = get_pearson_corr(motif_freq, remove_flanking_scores(gerp_mean_sample, flank_length))
r_phylop_sample, r_phylop_sample_pval = corr_phylop_sample
r_gerp_sample, r_gerp_sample_pval = corr_gerp_sample
r2_phylop_sample = phylop_sample_fit.rsquared
r2_gerp_sample = gerp_sample_fit.rsquared
#gerp_raw= np.loadtxt(os.path.join(fimo_sample, 'gerp.raw.txt'))
#phylop_raw = np.loadtxt(os.path.join(fimo_sample, 'phylop.raw.txt'))
if os.stat(os.path.join(fimo_random, 'gerp.mean.txt')).st_size == 0:
db.encode_tf_stats.insert_one({ 'encode_id': d,
'motif_number': i+1,
'center_enrichment': center_enrichment,
'center_enrichment_pval': center_enrichment_pval,
'motif_evalue': motif_evalue,
'motif_enrichment': motif_enrichment,
'phylop_mean_sample': phylop_mean_sample,
'gerp_mean_sample': gerp_mean_sample,
'r2_phylop_sample': r2_phylop_sample,
'r_phylop_sample': r_phylop_sample,
'r_phylop_sample_pval': r_phylop_sample_pval,
'r2_gerp_sample': r2_gerp_sample,
'r_gerp_sample': r_gerp_sample,
'r_gerp_sample_pval': r_gerp_sample_pval,
'motif_logo': motif_logo_encoded,
'motif_logorc': motif_logorc_encoded,
'moca_plot': moca_plot_encoded,
'moca_plotrc': moca_plotrc_encoded,
'no_fimo_hit_control': True})
continue
gerp_mean_control = np.loadtxt(os.path.join(fimo_random, 'gerp.mean.txt')).tolist()
phylop_mean_control = np.loadtxt(os.path.join(fimo_random, 'phylop.mean.txt')).tolist()
phylop_control_ols = perform_OLS(remove_flanking_scores(phylop_mean_control, flank_length), motif_freq)
gerp_control_ols = perform_OLS(remove_flanking_scores(gerp_mean_control, flank_length), motif_freq)
phylop_control_fit = phylop_control_ols['regression_fit']
gerp_control_fit = gerp_control_ols['regression_fit']
corr_phylop_control = get_pearson_corr(motif_freq, remove_flanking_scores(phylop_mean_control, flank_length))
corr_gerp_control = get_pearson_corr(motif_freq, remove_flanking_scores(gerp_mean_control, flank_length))
r_phylop_control, r_phylop_control_pval = corr_phylop_control
r_gerp_control, r_gerp_control_pval = corr_gerp_control
r2_phylop_control = phylop_control_fit.rsquared
r2_gerp_control = gerp_control_fit.rsquared
db.encode_tf_stats.insert_one({ 'encode_id': d,
'motif_number': i+1,
'center_enrichment': center_enrichment,
'center_enrichment_pval': center_enrichment_pval,
'motif_evalue': motif_evalue,
'motif_enrichment': motif_enrichment,
'phylop_mean_sample': phylop_mean_sample,
'gerp_mean_sample': gerp_mean_sample,
'r2_phylop_sample': r2_phylop_sample,
'r_phylop_sample': r_phylop_sample,
'r_phylop_sample_pval': r_phylop_sample_pval,
'r2_gerp_sample': r2_gerp_sample,
'r_gerp_sample': r_gerp_sample,
'r_gerp_sample_pval': r_gerp_sample_pval,
#'gerp_raw': create_binary_pickle(gerp_raw),
#'phylop_raw': create_binary_pickle(phylop_raw),
'phylop_mean_control': phylop_mean_control,
'gerp_mean_control': gerp_mean_control,
'delta_phylop_over_control': delta_phylop,
'delta_phylop_pval': p_delta_phylop,
'delta_gerp_over_control': delta_gerp,
'delta_gerp_pval': p_delta_gerp,
'r2_phylop_control': r2_phylop_control,
'r_phylop_control': r_phylop_control,
'r_phylop_control_pval': r_phylop_control_pval,
'r2_gerp_control': r2_gerp_control,
'r_gerp_control_pval': r_gerp_control_pval,
'motif_logo': motif_logo_encoded,
'motif_logorc': motif_logorc_encoded,
'moca_plot': moca_plot_encoded,
'moca_plotrc': moca_plotrc_encoded,
})
|
isc
| 1,487,184,823,585,802,200 | 57.854054 | 117 | 0.52342 | false |
tonygalmiche/is_plastigray
|
is_reach.py
|
1
|
16005
|
# -*- coding: utf-8 -*-
from openerp import models,fields,api,SUPERUSER_ID
from openerp.tools.translate import _
from openerp.exceptions import Warning
import datetime
import logging
_logger = logging.getLogger(__name__)
class is_reach(models.Model):
_name='is.reach'
_order='name desc'
name = fields.Date("Date du calcul", required=True)
date_debut = fields.Date("Date de début" , required=True, help="Date de début des livraisons")
date_fin = fields.Date("Date de fin" , required=True, help="Date de fin des livraisons")
clients = fields.Char("Clients", help="Codes des clients à 6 chiffres séparés par un espace")
partner_id = fields.Many2one("res.partner", 'Client pour rapport', domain=[('customer','=',True),('is_company','=',True)])
product_ids = fields.One2many('is.reach.product', 'reach_id', u"Produits livrés")
_defaults = {
'name' : lambda *a: fields.datetime.now(),
}
@api.multi
def calcul_action(self):
cr, uid, context = self.env.args
for obj in self:
#** Liste des clients indiquée *************************************
clients=[]
if obj.clients:
res=obj.clients.split(' ')
for r in res:
if r not in clients and r:
clients.append("'"+r+"'")
clients=','.join(clients)
#*******************************************************************
#** Livraisons sur la période et les clients indiqués **************
SQL="""
select
sp.partner_id as partner_id,
pp.id as product_id,
pt.is_mold_dossierf as is_mold_dossierf,
pt.is_ref_client as ref_client,
pt.is_category_id as is_category_id,
pt.is_gestionnaire_id as is_gestionnaire_id,
pt.weight_net as weight_net,
sum(sm.product_uom_qty)
from stock_picking sp inner join stock_move sm on sm.picking_id=sp.id
inner join product_product pp on sm.product_id=pp.id
inner join product_template pt on pp.product_tmpl_id=pt.id
inner join res_partner rp on sp.partner_id=rp.id
where
sp.picking_type_id=2 and
sm.state='done' and
sp.is_date_expedition>='"""+str(obj.date_debut)+"""' and
sp.is_date_expedition<='"""+str(obj.date_fin)+"""'
"""
if clients:
SQL=SQL+" and rp.is_code in ("+clients+") "
SQL=SQL+"""
group by
sp.partner_id,
pp.id,
pt.id,
pt.is_code,
pt.is_category_id,
pt.is_gestionnaire_id,
pt.is_mold_dossierf,
pt.is_ref_client,
pt.weight_net
order by pt.is_code
"""
cr.execute(SQL)
result = cr.fetchall()
obj.product_ids.unlink()
ct=0
nb=len(result)
for row in result:
ct=ct+1
qt_livree=row[7]
vals={
'reach_id' : obj.id,
'partner_id' : row[0],
'name' : row[1],
'moule' : row[2],
'ref_client' : row[3],
'category_id' : row[4],
'gestionnaire_id': row[5],
'qt_livree' : qt_livree,
'interdit' : 'Non',
}
line=self.env['is.reach.product'].create(vals)
product_id=row[1]
global ordre
ordre=0
product = self.env['product.product'].browse(product_id)
_logger.info(str(ct)+'/'+str(nb)+' : '+str(product.is_code))
self.cbb_multi_niveaux(line,product)
#** Calcul du poids des matières *******************************
poids_produit=0
for matiere in line.matiere_ids:
poids_produit=poids_produit+matiere.qt_nomenclature
line.poids_produit_unitaire = poids_produit
line.poids_produit = poids_produit*qt_livree
#***************************************************************
#** Calcul du poids des substances et des codes cas ************
poids_substances=0
codes_cas=[]
interdits=[]
for cas in line.cas_ids:
interdit=cas.name.interdit
if interdit=='Oui':
if cas.name.code_cas not in interdits:
interdits.append(cas.name.code_cas)
cas.poids_produit_unitaire = poids_produit
cas.poids_produit = poids_produit*qt_livree
#pourcentage_substance=0
#if line.poids_produit!=0:
# pourcentage_substance=100*cas.poids_substance/line.poids_produit
#cas.pourcentage_substance=pourcentage_substance
poids_substances=poids_substances+cas.poids_substance
code_cas=cas.name.code_cas
if code_cas not in codes_cas:
codes_cas.append(code_cas)
line.codes_cas=', '.join(codes_cas)
line.interdit=', '.join(interdits)
pourcentage_substances=0
if line.poids_produit!=0:
pourcentage_substances=100*poids_substances/line.poids_produit
line.poids_substances=poids_substances
line.pourcentage_substances=pourcentage_substances
#***************************************************************
#*******************************************************************
def get_poids_substances(self):
ret = {}
for product in self.product_ids:
for cas in product.get_cas_unique():
name = cas['name']
ret.setdefault(name, {'name': name,
'interdit': cas['interdit'],
'poids': 0})
ret[name]['poids'] += cas['poids']
return ret.values()
@api.multi
def cbb_multi_niveaux(self, reach_product,product, quantite=1, niveau=1):
global ordre
#** Enregistrement matière livrée **************************************
if len(product.is_code_cas_ids)>0:
vals={
'reach_product_id' : reach_product.id,
'reach_id' : reach_product.reach_id.id,
'qt_livree' : reach_product.qt_livree,
'product_id' : product.id,
'qt_nomenclature' : quantite,
'qt_matiere_livree' : reach_product.qt_livree*quantite,
}
res=self.env['is.reach.product.matiere'].create(vals)
#***********************************************************************
#** Enregistrement des CAS de cet article ******************************
for cas in product.is_code_cas_ids:
poids_substance = reach_product.qt_livree * quantite * cas.poids/100
interdit=cas.code_cas_id.interdit
vals={
'reach_product_id' : reach_product.id,
'reach_id' : reach_product.reach_id.id,
'partner_id' : reach_product.partner_id.id,
'product_id' : reach_product.name.id,
'moule' : reach_product.moule,
'ref_client' : reach_product.ref_client,
'category_id' : reach_product.category_id.id,
'gestionnaire_id' : reach_product.gestionnaire_id.id,
'qt_livree' : reach_product.qt_livree,
'matiere_id' : product.id,
'name' : cas.code_cas_id.id,
'interdit' : interdit,
'poids_substance' : poids_substance,
'pourcentage_substance': cas.poids,
'poids_autorise' : cas.code_cas_id.poids_autorise,
}
res=self.env['is.reach.product.cas'].create(vals)
#***********************************************************************
bom_obj = self.env['mrp.bom']
bom_id = bom_obj._bom_find(product.product_tmpl_id.id, properties=None)
bom = bom_obj.browse(bom_id)
res= bom_obj._bom_explode(bom, product, 1)
for line in res[0]:
ordre=ordre+1
line_product = self.env['product.product'].browse(line['product_id'])
line_quantite = quantite*line['product_qty']
self.cbb_multi_niveaux(reach_product,line_product, line_quantite, niveau+1)
@api.multi
def produits_livres_action(self):
for obj in self:
return {
'name': u'Analyse REACH par produit',
'view_mode': 'tree,form',
'view_type': 'form',
'res_model': 'is.reach.product',
'domain': [
('reach_id' ,'=', obj.id),
],
'context': {
'default_reach_id' : obj.id,
},
'type': 'ir.actions.act_window',
'limit': 1000,
}
@api.multi
def matieres_livrees_action(self):
for obj in self:
return {
'name': u'Analyse REACH par matière',
'view_mode': 'tree,form',
'view_type': 'form',
'res_model': 'is.reach.product.matiere',
'domain': [
('reach_id' ,'=', obj.id),
],
'context': {
'default_reach_id' : obj.id,
},
'type': 'ir.actions.act_window',
'limit': 1000,
}
@api.multi
def substances_livrees_action(self):
for obj in self:
return {
'name': u'Analyse REACH par substance',
'view_mode': 'tree,form',
'view_type': 'form',
'res_model': 'is.reach.product.cas',
'domain': [
('reach_id','=', obj.id),
],
'context': {
'default_reach_id': obj.id,
},
'type': 'ir.actions.act_window',
'limit': 1000,
}
class is_reach_product(models.Model):
_name='is.reach.product'
_order='partner_id, name'
reach_id = fields.Many2one('is.reach', "Analyse REACH", required=True, ondelete='cascade')
partner_id = fields.Many2one('res.partner', 'Client livré')
name = fields.Many2one('product.product', 'Produit livré')
moule = fields.Char("Moule")
ref_client = fields.Char("Réf client")
category_id = fields.Many2one('is.category', 'Catégorie')
gestionnaire_id = fields.Many2one('is.gestionnaire', 'Gestionnaire')
qt_livree = fields.Integer("Quantité livrée", required=True)
interdit = fields.Char("Substance réglementée")
poids_substances = fields.Float("Poids total des substances à risque")
poids_produit_unitaire = fields.Float("Poids unitaire des matières", digits=(14,4))
poids_produit = fields.Float("Poids des matières livrées")
pourcentage_substances = fields.Float("% du poids des substances à risque", digits=(14,4))
codes_cas = fields.Char("Codes CAS livrés")
cas_ids = fields.One2many('is.reach.product.cas' , 'reach_product_id', u"Substances livrées")
matiere_ids = fields.One2many('is.reach.product.matiere', 'reach_product_id', u"Matières livrées")
@api.multi
def substances_livrees_action(self):
for obj in self:
return {
'name': u'Analyse REACH par substance',
'view_mode': 'tree,form',
'view_type': 'form',
'res_model': 'is.reach.product.cas',
'domain': [
('reach_id' ,'=', obj.reach_id.id),
('reach_product_id' ,'=', obj.id),
],
'context': {
'default_reach_id' : obj.reach_id.id,
'default_reach_product_id': obj.id,
},
'type': 'ir.actions.act_window',
'limit': 1000,
}
def get_matiere_unique(self):
matiere_ids = []
for matiere_id in self.matiere_ids:
if matiere_id.product_id.id not in matiere_ids:
matiere_ids.append(matiere_id.product_id.id)
yield matiere_id
def get_cas_unique(self):
ret = {}
for cas in self.cas_ids:
name = cas.name.code_cas
ret.setdefault(name, {'name': name,
'interdit': cas.interdit,
'pourcentage_substance': cas.pourcentage_substance,
'poids': 0})
ret[name]['poids'] += cas.poids_substance
return ret.values()
class is_reach_product_matiere(models.Model):
_name='is.reach.product.matiere'
_order='product_id'
reach_id = fields.Many2one('is.reach', 'Analyse REACH')
reach_product_id = fields.Many2one('is.reach.product', 'Ligne produit REACH', required=True, ondelete='cascade')
qt_livree = fields.Integer("Quantité produit fini livrée")
product_id = fields.Many2one('product.product', 'Matière livrée')
qt_nomenclature = fields.Float("Qt nomenclature", digits=(14,6))
qt_matiere_livree = fields.Float("Quantité matière livrée", digits=(14,2))
class is_reach_product_cas(models.Model):
_name='is.reach.product.cas'
_order='name'
reach_product_id = fields.Many2one('is.reach.product', 'Ligne produit REACH', required=True, ondelete='cascade')
name = fields.Many2one('is.code.cas', 'Substance livrée')
reach_id = fields.Many2one('is.reach', 'Analyse REACH')
partner_id = fields.Many2one('res.partner', 'Client livré')
product_id = fields.Many2one('product.product', 'Produit livré')
moule = fields.Char("Moule")
ref_client = fields.Char("Réf client")
category_id = fields.Many2one('is.category', 'Catégorie')
gestionnaire_id = fields.Many2one('is.gestionnaire', 'Gestionnaire')
qt_livree = fields.Integer("Quantité livrée", required=True)
matiere_id = fields.Many2one('product.product', 'Matière livrée')
interdit = fields.Selection([
('Oui','Oui'),
('Non','Non'),
], "Substance réglementée")
poids_substance = fields.Float("Poids total substance à risque")
poids_produit_unitaire = fields.Float("Poids produit unitaire", digits=(14,4))
poids_produit = fields.Float("Poids produit livré")
pourcentage_substance = fields.Float("% du poids de cette substance à risque", digits=(14,4))
poids_autorise = fields.Float('% de poids autorisé')
|
mit
| 7,814,359,526,069,614,000 | 42.947658 | 128 | 0.476964 | false |
napalm-automation/napalm-yang
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/extended_ipv4_reachability/prefixes/prefix/subTLVs/subTLVs_/prefix_sid/__init__.py
|
1
|
15078
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import sid
class prefix_sid(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/extended-ipv4-reachability/prefixes/prefix/subTLVs/subTLVs/prefix-sid. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container defines segment routing extensions for prefixes.
"""
__slots__ = ("_path_helper", "_extmethods", "__sid")
_yang_name = "prefix-sid"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__sid = YANGDynClass(
base=YANGListType(
False,
sid.sid,
yang_name="sid",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="sid",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"extended-ipv4-reachability",
"prefixes",
"prefix",
"subTLVs",
"subTLVs",
"prefix-sid",
]
def _get_sid(self):
"""
Getter method for sid, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/extended_ipv4_reachability/prefixes/prefix/subTLVs/subTLVs/prefix_sid/sid (list)
YANG Description: Prefix Segment-ID list. IGP-Prefix Segment is an IGP segment attached
to an IGP prefix. An IGP-Prefix Segment is global (unless explicitly
advertised otherwise) within the SR/IGP domain.
"""
return self.__sid
def _set_sid(self, v, load=False):
"""
Setter method for sid, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/extended_ipv4_reachability/prefixes/prefix/subTLVs/subTLVs/prefix_sid/sid (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_sid is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sid() directly.
YANG Description: Prefix Segment-ID list. IGP-Prefix Segment is an IGP segment attached
to an IGP prefix. An IGP-Prefix Segment is global (unless explicitly
advertised otherwise) within the SR/IGP domain.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGListType(
False,
sid.sid,
yang_name="sid",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="sid",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """sid must be of a type compatible with list""",
"defined-type": "list",
"generated-type": """YANGDynClass(base=YANGListType(False,sid.sid, yang_name="sid", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='False', extensions=None), is_container='list', yang_name="sid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)""",
}
)
self.__sid = t
if hasattr(self, "_set"):
self._set()
def _unset_sid(self):
self.__sid = YANGDynClass(
base=YANGListType(
False,
sid.sid,
yang_name="sid",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="sid",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
sid = __builtin__.property(_get_sid)
_pyangbind_elements = OrderedDict([("sid", sid)])
from . import sid
class prefix_sid(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/extended-ipv4-reachability/prefixes/prefix/subTLVs/subTLVs/prefix-sid. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container defines segment routing extensions for prefixes.
"""
__slots__ = ("_path_helper", "_extmethods", "__sid")
_yang_name = "prefix-sid"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__sid = YANGDynClass(
base=YANGListType(
False,
sid.sid,
yang_name="sid",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="sid",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"extended-ipv4-reachability",
"prefixes",
"prefix",
"subTLVs",
"subTLVs",
"prefix-sid",
]
def _get_sid(self):
"""
Getter method for sid, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/extended_ipv4_reachability/prefixes/prefix/subTLVs/subTLVs/prefix_sid/sid (list)
YANG Description: Prefix Segment-ID list. IGP-Prefix Segment is an IGP segment attached
to an IGP prefix. An IGP-Prefix Segment is global (unless explicitly
advertised otherwise) within the SR/IGP domain.
"""
return self.__sid
def _set_sid(self, v, load=False):
"""
Setter method for sid, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/extended_ipv4_reachability/prefixes/prefix/subTLVs/subTLVs/prefix_sid/sid (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_sid is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sid() directly.
YANG Description: Prefix Segment-ID list. IGP-Prefix Segment is an IGP segment attached
to an IGP prefix. An IGP-Prefix Segment is global (unless explicitly
advertised otherwise) within the SR/IGP domain.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGListType(
False,
sid.sid,
yang_name="sid",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="sid",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """sid must be of a type compatible with list""",
"defined-type": "list",
"generated-type": """YANGDynClass(base=YANGListType(False,sid.sid, yang_name="sid", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='False', extensions=None), is_container='list', yang_name="sid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)""",
}
)
self.__sid = t
if hasattr(self, "_set"):
self._set()
def _unset_sid(self):
self.__sid = YANGDynClass(
base=YANGListType(
False,
sid.sid,
yang_name="sid",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="sid",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
sid = __builtin__.property(_get_sid)
_pyangbind_elements = OrderedDict([("sid", sid)])
|
apache-2.0
| -7,184,822,684,662,738,000 | 37.172152 | 517 | 0.552461 | false |
noplay/gns3-gui
|
setup.py
|
1
|
2642
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013 GNS3 Technologies Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
class Tox(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import tox
errcode = tox.cmdline(self.test_args)
sys.exit(errcode)
setup(
name="gns3-gui",
version=__import__("gns3").__version__,
url="http://github.com/GNS3/gns3-gui",
license="GNU General Public License v3 (GPLv3)",
tests_require=["tox"],
cmdclass={"test": Tox},
author="Jeremy Grossmann",
author_email="package-maintainer@gns3.net",
description="GNS3 graphical interface for the GNS3 server.",
long_description=open("README.rst", "r").read(),
install_requires=[
"apache-libcloud>=0.14.1",
"requests>=2.4.3",
"paramiko>=1.15.1",
"gns3-converter>=1.2.3",
"raven>=5.2.0"
],
entry_points={
"gui_scripts": [
"gns3 = gns3.main:main",
]
},
packages=find_packages(".", exclude=["docs", "tests"]),
include_package_data=True,
package_data={"gns3": ["configs/*.txt"]},
platforms="any",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications :: Qt",
"Intended Audience :: Information Technology",
"Topic :: System :: Networking",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
'Natural Language :: English',
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: CPython",
],
)
|
gpl-3.0
| 5,308,925,384,489,474,000 | 33.311688 | 75 | 0.636639 | false |
srio/Orange-XOPPY
|
orangecontrib/xoppy/util/xoppy_undulators.py
|
1
|
20777
|
# --------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------------
import numpy
import os
from collections import OrderedDict
from orangecontrib.xoppy.util import srundplug
from orangecontrib.xoppy.util.fit_gaussian2d import fit_gaussian2d, info_params, twoD_Gaussian
from srxraylib.util.h5_simple_writer import H5SimpleWriter
import scipy.constants as codata
codata_mee = codata.codata.physical_constants["electron mass energy equivalent in MeV"][0]
# --------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------------
def xoppy_calc_undulator_spectrum(ELECTRONENERGY=6.04,ELECTRONENERGYSPREAD=0.001,ELECTRONCURRENT=0.2,\
ELECTRONBEAMSIZEH=0.000395,ELECTRONBEAMSIZEV=9.9e-06,\
ELECTRONBEAMDIVERGENCEH=1.05e-05,ELECTRONBEAMDIVERGENCEV=3.9e-06,\
PERIODID=0.018,NPERIODS=222,KV=1.68,KH=0.0,KPHASE=0.0,DISTANCE=30.0,
GAPH=0.001,GAPV=0.001,GAPH_CENTER=0.0,GAPV_CENTER=0.0,\
PHOTONENERGYMIN=3000.0,PHOTONENERGYMAX=55000.0,PHOTONENERGYPOINTS=500,METHOD=2,
USEEMITTANCES=1):
print("Inside xoppy_calc_undulator_spectrum. ")
bl = OrderedDict()
bl['ElectronBeamDivergenceH'] = ELECTRONBEAMDIVERGENCEH
bl['ElectronBeamDivergenceV'] = ELECTRONBEAMDIVERGENCEV
bl['ElectronBeamSizeH'] = ELECTRONBEAMSIZEH
bl['ElectronBeamSizeV'] = ELECTRONBEAMSIZEV
bl['ElectronCurrent'] = ELECTRONCURRENT
bl['ElectronEnergy'] = ELECTRONENERGY
bl['ElectronEnergySpread'] = ELECTRONENERGYSPREAD
bl['Kv'] = KV
bl['Kh'] = KH
bl['Kphase'] = KPHASE
bl['NPeriods'] = NPERIODS
bl['PeriodID'] = PERIODID
bl['distance'] = DISTANCE
bl['gapH'] = GAPH
bl['gapV'] = GAPV
bl['gapHcenter'] = GAPH_CENTER
bl['gapVcenter'] = GAPV_CENTER
if USEEMITTANCES:
zero_emittance = False
else:
zero_emittance = True
#TODO remove file and export e,f arrays
outFile = "undulator_spectrum.spec"
codata_mee = codata.m_e * codata.c**2 / codata.e # electron mass in eV
gamma = bl['ElectronEnergy'] * 1e9 / codata_mee
m2ev = codata.c * codata.h / codata.e # lambda(m) = m2eV / energy(eV)
resonance_wavelength = (1 + (bl['Kv']**2 + bl['Kh']**2) / 2.0) / 2 / gamma**2 * bl["PeriodID"]
resonance_energy = m2ev / resonance_wavelength
print ("Gamma: %f \n"%(gamma))
print ("Resonance wavelength [A]: %g \n"%(1e10*resonance_wavelength))
print ("Resonance energy [eV]: %g \n"%(resonance_energy))
ptot = (NPERIODS/6) * codata.value('characteristic impedance of vacuum') * \
ELECTRONCURRENT * codata.e * 2 * numpy.pi * codata.c * gamma**2 * (KV**2+KH**2) / PERIODID
print ("\nTotal power radiated by the undulator with fully opened slits [W]: %g \n"%(ptot))
if METHOD == 0:
print("Undulator flux calculation using US. Please wait...")
e, f = srundplug.calc1d_us(bl,photonEnergyMin=PHOTONENERGYMIN,photonEnergyMax=PHOTONENERGYMAX,
photonEnergyPoints=PHOTONENERGYPOINTS,fileName=outFile,fileAppend=False,zero_emittance=zero_emittance)
print("Done")
print("\nCheck calculation output at: %s"%(os.path.join(os.getcwd(),"us.out")))
if METHOD == 1:
print("Undulator flux calculation using URGENT. Please wait...")
e, f = srundplug.calc1d_urgent(bl,photonEnergyMin=PHOTONENERGYMIN,photonEnergyMax=PHOTONENERGYMAX,
photonEnergyPoints=PHOTONENERGYPOINTS,fileName=outFile,fileAppend=False,zero_emittance=zero_emittance)
print("Done")
print("\nCheck calculation output at: %s"%(os.path.join(os.getcwd(),"urgent.out")))
if METHOD == 2:
# get the maximum harmonic number
h_max = int(2.5*PHOTONENERGYMAX/resonance_energy)
print ("Number of harmonics considered: %d \n"%(h_max))
print("Undulator flux calculation using SRW. Please wait...")
e, f = srundplug.calc1d_srw(bl,photonEnergyMin=PHOTONENERGYMIN,photonEnergyMax=PHOTONENERGYMAX,
photonEnergyPoints=PHOTONENERGYPOINTS,fileName=outFile,fileAppend=False,zero_emittance=zero_emittance,
srw_max_harmonic_number=h_max)
print("Done")
if zero_emittance:
print("\nNo emittance calculation")
if METHOD == 1 and len(e) == 0: raise Exception("Invalid Input Parameters")
power_in_spectrum = f.sum()*1e3*codata.e*(e[1]-e[0])
print("\nPower from integral of spectrum: %8.3f W"%(power_in_spectrum))
print("\nRatio Power from integral of spectrum over Total emitted power: %5.4f"%(power_in_spectrum / ptot))
spectral_power = f * codata.e * 1e3
try:
cumulated_power = spectral_power.cumsum() * numpy.abs(e[0] - e[1])
except:
cumulated_power = 0.0
return e, f, spectral_power, cumulated_power
def xoppy_calc_undulator_power_density(ELECTRONENERGY=6.04,ELECTRONENERGYSPREAD=0.001,ELECTRONCURRENT=0.2,\
ELECTRONBEAMSIZEH=0.000395,ELECTRONBEAMSIZEV=9.9e-06,\
ELECTRONBEAMDIVERGENCEH=1.05e-05,ELECTRONBEAMDIVERGENCEV=3.9e-06,\
PERIODID=0.018,NPERIODS=222,KV=1.68,KH=0.0,KPHASE=0.0,DISTANCE=30.0,GAPH=0.001,GAPV=0.001,\
HSLITPOINTS=101,VSLITPOINTS=51,METHOD=2,USEEMITTANCES=1,
MASK_FLAG=0,
MASK_ROT_H_DEG=0.0,MASK_ROT_V_DEG=0.0,
MASK_H_MIN=None,MASK_H_MAX=None,
MASK_V_MIN=None,MASK_V_MAX=None,
h5_file="",h5_entry_name="XOPPY_POWERDENSITY",h5_initialize=True,h5_parameters={},
):
print("Inside xoppy_calc_undulator_power_density. ")
bl = OrderedDict()
bl['ElectronBeamDivergenceH'] = ELECTRONBEAMDIVERGENCEH
bl['ElectronBeamDivergenceV'] = ELECTRONBEAMDIVERGENCEV
bl['ElectronBeamSizeH'] = ELECTRONBEAMSIZEH
bl['ElectronBeamSizeV'] = ELECTRONBEAMSIZEV
bl['ElectronCurrent'] = ELECTRONCURRENT
bl['ElectronEnergy'] = ELECTRONENERGY
bl['ElectronEnergySpread'] = ELECTRONENERGYSPREAD
bl['Kv'] = KV
bl['Kh'] = KH
bl['Kphase'] = KPHASE
bl['NPeriods'] = NPERIODS
bl['PeriodID'] = PERIODID
bl['distance'] = DISTANCE
bl['gapH'] = GAPH
bl['gapV'] = GAPV
if USEEMITTANCES:
zero_emittance = False
else:
zero_emittance = True
#TODO remove SPEC file
outFile = "undulator_power_density.spec"
if METHOD == 0:
code = "US"
print("Undulator power_density calculation using US. Please wait...")
h,v,p = srundplug.calc2d_us(bl,fileName=outFile,fileAppend=False,hSlitPoints=HSLITPOINTS,vSlitPoints=VSLITPOINTS,
zero_emittance=zero_emittance)
print("Done")
if METHOD == 1:
code = "URGENT"
print("Undulator power_density calculation using URGENT. Please wait...")
h,v,p = srundplug.calc2d_urgent(bl,fileName=outFile,fileAppend=False,hSlitPoints=HSLITPOINTS,vSlitPoints=VSLITPOINTS,
zero_emittance=zero_emittance)
print("Done")
if METHOD == 2:
code = "SRW"
print("Undulator power_density calculation using SRW. Please wait...")
h,v,p = srundplug.calc2d_srw(bl,fileName=outFile,fileAppend=False,hSlitPoints=HSLITPOINTS,vSlitPoints=VSLITPOINTS,
zero_emittance=zero_emittance)
print("Done")
if zero_emittance:
print("No emittance calculation")
codata_mee = codata.m_e * codata.c**2 / codata.e # electron mass in eV
gamma = ELECTRONENERGY * 1e9 / codata_mee
ptot = (NPERIODS/6) * codata.value('characteristic impedance of vacuum') * \
ELECTRONCURRENT * codata.e * 2 * numpy.pi * codata.c * gamma**2 * (KV**2 + KH**2)/ PERIODID
print ("\nTotal power radiated by the undulator with fully opened slits [W]: %g \n"%(ptot))
if MASK_FLAG:
#
# rotation
#
v /= numpy.cos(MASK_ROT_H_DEG * numpy.pi / 180)
h /= numpy.cos(MASK_ROT_V_DEG * numpy.pi / 180)
# also reduce the power density!!
p *= numpy.cos(MASK_ROT_H_DEG * numpy.pi / 180)
p *= numpy.cos(MASK_ROT_V_DEG * numpy.pi / 180)
#
# mask
#
if MASK_H_MIN is not None:
lower_window_h = numpy.where(h < MASK_H_MIN)
if len(lower_window_h) > 0: p[lower_window_h,:] = 0
if MASK_H_MAX is not None:
upper_window_h = numpy.where(h > MASK_H_MAX)
if len(upper_window_h) > 0: p[upper_window_h,:] = 0
if MASK_V_MIN is not None:
lower_window_v = numpy.where(v < MASK_V_MIN)
if len(lower_window_v) > 0: p[:,lower_window_v] = 0
if MASK_V_MIN is not None:
upper_window_v = numpy.where(v > MASK_V_MAX)
if len(upper_window_v) > 0: p[:,upper_window_v] = 0
txt0 = "============= power density in the modified (masked) screen ==========\n"
else:
txt0 = "=================== power density ======================\n"
text_info = txt0
text_info += " Power density peak: %f W/mm2\n"%p.max()
text_info += " Total power: %f W\n"%(p.sum()*(h[1]-h[0])*(v[1]-v[0]))
text_info += "====================================================\n"
print(text_info)
# fit
fit_ok = False
try:
print("============= Fitting power density to a 2D Gaussian. ==============\n")
print("Please use these results with care: check if the original data looks like a Gaussian.")
fit_parameters = fit_gaussian2d(p,h,v)
print(info_params(fit_parameters))
H,V = numpy.meshgrid(h,v)
data_fitted = twoD_Gaussian( (H,V), *fit_parameters)
print(" Total power in the fitted data [W]: ",data_fitted.sum()*(h[1]-h[0])*(v[1]-v[0]))
# plot_image(data_fitted.reshape((h.size,v.size)),h, v,title="FIT")
print("====================================================\n")
fit_ok = True
except:
pass
if h5_file != "":
try:
if h5_initialize:
h5w = H5SimpleWriter.initialize_file(h5_file,creator="xoppy_undulators.py")
else:
h5w = H5SimpleWriter(h5_file,None)
h5w.create_entry(h5_entry_name,nx_default="PowerDensity")
h5w.add_image(p,h,v,image_name="PowerDensity",entry_name=h5_entry_name,title_x="X [mm]",title_y="Y [mm]")
h5w.add_key("info",text_info, entry_name=h5_entry_name)
h5w.create_entry("parameters",root_entry=h5_entry_name,nx_default=None)
for key in h5_parameters.keys():
h5w.add_key(key,h5_parameters[key], entry_name=h5_entry_name+"/parameters")
if fit_ok:
h5w.add_image(data_fitted.reshape(h.size,v.size),h,v,image_name="PowerDensityFit",entry_name=h5_entry_name,title_x="X [mm]",title_y="Y [mm]")
h5w.add_key("fit_info",info_params(fit_parameters), entry_name=h5_entry_name+"/PowerDensityFit")
print("File written to disk: %s"%h5_file)
except:
print("ERROR initializing h5 file")
return h, v, p, code
def xoppy_calc_undulator_radiation(ELECTRONENERGY=6.04,ELECTRONENERGYSPREAD=0.001,ELECTRONCURRENT=0.2,\
ELECTRONBEAMSIZEH=0.000395,ELECTRONBEAMSIZEV=9.9e-06,\
ELECTRONBEAMDIVERGENCEH=1.05e-05,ELECTRONBEAMDIVERGENCEV=3.9e-06,\
PERIODID=0.018,NPERIODS=222,KV=1.68,KH=0.0,KPHASE=0.0,DISTANCE=30.0,
SETRESONANCE=0,HARMONICNUMBER=1,
GAPH=0.003,GAPV=0.003,GAPH_CENTER=0.0,GAPV_CENTER=0.0,
HSLITPOINTS=41,VSLITPOINTS=41,METHOD=2,
PHOTONENERGYMIN=7982.2,PHOTONENERGYMAX=7983.2,PHOTONENERGYPOINTS=2,
USEEMITTANCES=1,
h5_file="",h5_entry_name="XOPPY_RADIATION",h5_initialize=True,h5_parameters={}):
print("Inside xoppy_calc_undulator_radiation. ")
bl = OrderedDict()
bl['ElectronBeamDivergenceH'] = ELECTRONBEAMDIVERGENCEH
bl['ElectronBeamDivergenceV'] = ELECTRONBEAMDIVERGENCEV
bl['ElectronBeamSizeH'] = ELECTRONBEAMSIZEH
bl['ElectronBeamSizeV'] = ELECTRONBEAMSIZEV
bl['ElectronCurrent'] = ELECTRONCURRENT
bl['ElectronEnergy'] = ELECTRONENERGY
bl['ElectronEnergySpread'] = ELECTRONENERGYSPREAD
bl['Kv'] = KV
bl['Kh'] = KH
bl['Kphase'] = KPHASE
bl['NPeriods'] = NPERIODS
bl['PeriodID'] = PERIODID
bl['distance'] = DISTANCE
bl['gapH'] = GAPH
bl['gapV'] = GAPV
bl['gapHcenter'] = GAPH_CENTER
bl['gapVcenter'] = GAPV_CENTER
if USEEMITTANCES:
zero_emittance = False
else:
zero_emittance = True
gamma = ELECTRONENERGY / (codata_mee * 1e-3)
resonance_wavelength = (1 + (bl['Kv']**2 + bl['Kh']**2)/ 2.0) / 2 / gamma**2 * bl["PeriodID"]
m2ev = codata.c * codata.h / codata.e # lambda(m) = m2eV / energy(eV)
resonance_energy = m2ev / resonance_wavelength
resonance_central_cone = 1.0/gamma*numpy.sqrt( (1+0.5*(KV**2+KH**2))/(2*NPERIODS*HARMONICNUMBER) )
ring_order = 1
resonance_ring = 1.0/gamma*numpy.sqrt( ring_order / HARMONICNUMBER * (1+0.5*(KV**2+KH**2)) )
# autoset energy
if SETRESONANCE == 0:
photonEnergyMin = PHOTONENERGYMIN
photonEnergyMax = PHOTONENERGYMAX
photonEnergyPoints = PHOTONENERGYPOINTS
else:
# referred to resonance
photonEnergyMin = resonance_energy
photonEnergyMax = resonance_energy + 1
photonEnergyPoints = 2
# autoset slit
if SETRESONANCE == 0:
pass
elif SETRESONANCE == 1:
MAXANGLE = 3 * 0.69 * resonance_central_cone
bl['gapH'] = 2 * MAXANGLE * DISTANCE
bl['gapV'] = 2 * MAXANGLE * DISTANCE
elif SETRESONANCE == 2:
MAXANGLE = 2.1 * resonance_ring
bl['gapH'] = 2 * MAXANGLE * DISTANCE
bl['gapV'] = 2 * MAXANGLE * DISTANCE
#TODO SPEC file can be removed
outFile = "undulator_radiation.spec"
# Memorandum:
# e = array with energy in eV
# h = array with horizontal positions in mm
# v = array with vertical positions in mm
# p = array with photon flux in photons/s/0.1%bw/mm^2 with shape (Ne,Nh.Nv)
if METHOD == 0:
code = "US"
print("Undulator radiation calculation using US. Please wait...")
e,h,v,p = srundplug.calc3d_us(bl,fileName=outFile,fileAppend=False,hSlitPoints=HSLITPOINTS,vSlitPoints=VSLITPOINTS,
photonEnergyMin=photonEnergyMin,photonEnergyMax=photonEnergyMax,
photonEnergyPoints=photonEnergyPoints,zero_emittance=zero_emittance)
if METHOD == 1:
code = "URGENT"
print("Undulator radiation calculation using URGENT. Please wait...")
e,h,v,p = srundplug.calc3d_urgent(bl,fileName=outFile,fileAppend=False,hSlitPoints=HSLITPOINTS,vSlitPoints=VSLITPOINTS,
photonEnergyMin=photonEnergyMin,photonEnergyMax=photonEnergyMax,
photonEnergyPoints=photonEnergyPoints,zero_emittance=zero_emittance)
if METHOD == 2:
code = "SRW"
print("Undulator radiation calculation using SRW. Please wait...")
e,h,v,p = srundplug.calc3d_srw(bl,fileName=outFile,fileAppend=False,hSlitPoints=HSLITPOINTS,vSlitPoints=VSLITPOINTS,
photonEnergyMin=photonEnergyMin,photonEnergyMax=photonEnergyMax,
photonEnergyPoints=photonEnergyPoints,zero_emittance=zero_emittance)
if METHOD == 22:
code = "SRW"
print("Undulator radiation calculation using SRW. Please wait...")
e, h, v, p = srundplug.calc3d_srw_step_by_step(bl, fileName=outFile, fileAppend=False, hSlitPoints=HSLITPOINTS,
vSlitPoints=VSLITPOINTS,
photonEnergyMin=photonEnergyMin, photonEnergyMax=photonEnergyMax,
photonEnergyPoints=photonEnergyPoints, zero_emittance=zero_emittance)
if METHOD == 3:
# todo too slow
code = "pySRU"
print("Undulator radiation calculation using SRW. Please wait...")
e,h,v,p = srundplug.calc3d_pysru(bl,fileName=outFile,fileAppend=False,hSlitPoints=HSLITPOINTS,vSlitPoints=VSLITPOINTS,
photonEnergyMin=photonEnergyMin,photonEnergyMax=photonEnergyMax,
photonEnergyPoints=photonEnergyPoints,zero_emittance=zero_emittance)
print ("Gamma: %f \n"%(gamma))
print ("Resonance wavelength (1st harmonic): %g A\n"%(1e10*resonance_wavelength))
print ("Resonance energy (1st harmonic): %g eV\n"%(resonance_energy))
if HARMONICNUMBER != 1:
print ("Resonance wavelength (%d harmonic): %g A\n"%(HARMONICNUMBER,1e10*resonance_wavelength/HARMONICNUMBER))
print ("Resonance energy (%d harmonic): %g eV\n"%(HARMONICNUMBER,HARMONICNUMBER*resonance_energy))
print ("Resonance central cone (%d harmonic): %g urad\n"%(HARMONICNUMBER,1e6*resonance_central_cone))
print ("Resonance first ring (%d harmonic): %g urad\n"%(HARMONICNUMBER,1e6*resonance_ring))
print("Calculated %d photon energy points from %f to %f."%(photonEnergyPoints,photonEnergyMin,photonEnergyMax))
if zero_emittance:
print("No emittance.")
print("Done")
ptot = (NPERIODS/6) * codata.value('characteristic impedance of vacuum') * \
ELECTRONCURRENT * codata.e * 2 * numpy.pi * codata.c * gamma**2 * (KV**2 + KH**2)/ PERIODID
print ("\nTotal power radiated by the undulator with fully opened slits [W]: %f \n"%(ptot))
if SETRESONANCE == 0:
pcalc = p.sum() * codata.e * 1e3 * (h[1]-h[0]) * (v[1]-v[0]) * (e[1]-e[0])
print ("\nTotal power from calculated spectrum (h,v,energy) grid [W]: %f \n"%pcalc)
# fit
try:
print("============= Fitting power density to a 2D Gaussian. ==============\n")
print("Please use these results with care: check if the original data looks like a Gaussian.\n")
print("Length units are mm")
data_to_fit = p.sum(axis=0)*(e[1]-e[0])*codata.e*1e3
fit_parameters = fit_gaussian2d(data_to_fit,h,v)
print(info_params(fit_parameters))
H,V = numpy.meshgrid(h,v)
data_fitted = twoD_Gaussian( (H,V), *fit_parameters)
print(" Total power in the fitted data [W]: ",data_fitted.sum()*(h[1]-h[0])*(v[1]-v[0]))
# plot_image(data_fitted.reshape((h.size,v.size)),h, v,title="FIT")
print("====================================================\n")
except:
pass
if h5_file != "":
try:
if h5_initialize:
h5w = H5SimpleWriter.initialize_file(h5_file,creator="xoppy_undulators.py")
else:
h5w = H5SimpleWriter(h5_file,None)
h5w.create_entry(h5_entry_name,nx_default=None)
h5w.add_stack(e,h,v,p,stack_name="Radiation",entry_name=h5_entry_name,
title_0="Photon energy [eV]",
title_1="X gap [mm]",
title_2="Y gap [mm]")
h5w.create_entry("parameters",root_entry=h5_entry_name,nx_default=None)
for key in h5_parameters.keys():
h5w.add_key(key,h5_parameters[key], entry_name=h5_entry_name+"/parameters")
print("File written to disk: %s"%h5_file)
except:
print("ERROR initializing h5 file")
return e, h, v, p, code
if __name__ == "__main__":
from srxraylib.plot.gol import plot,plot_image
e, f, spectral_power, cumulated_power = xoppy_calc_undulator_spectrum()
plot(e,f)
h, v, p, code = xoppy_calc_undulator_power_density(h5_file="test.h5",h5_initialize=True)
plot_image(p,h,v)
e, h, v, p, code = xoppy_calc_undulator_radiation(ELECTRONENERGY=6.0, h5_file="test.h5",h5_entry_name="first_entry",h5_initialize=True)
e, h, v, p, code = xoppy_calc_undulator_radiation(ELECTRONENERGY=7.0, h5_file="test.h5",h5_entry_name="second_entry",h5_initialize=False)
|
bsd-2-clause
| 8,465,278,814,384,543,000 | 44.966814 | 157 | 0.58892 | false |
anhaidgroup/py_stringsimjoin
|
py_stringsimjoin/join/overlap_coefficient_join_py.py
|
1
|
17453
|
# overlap coefficient join
from joblib import delayed, Parallel
from six import iteritems
import pandas as pd
import pyprind
from py_stringsimjoin.filter.overlap_filter import OverlapFilter
from py_stringsimjoin.index.inverted_index import InvertedIndex
from py_stringsimjoin.utils.generic_helper import convert_dataframe_to_array, \
find_output_attribute_indices, get_attrs_to_project, \
get_num_processes_to_launch, get_output_header_from_tables, \
get_output_row_from_tables, remove_redundant_attrs, split_table, COMP_OP_MAP
from py_stringsimjoin.utils.missing_value_handler import \
get_pairs_with_missing_value
from py_stringsimjoin.utils.validation import validate_attr, \
validate_attr_type, validate_comp_op_for_sim_measure, validate_key_attr, \
validate_input_table, validate_threshold, validate_tokenizer, \
validate_output_attrs
def overlap_coefficient_join_py(ltable, rtable,
l_key_attr, r_key_attr,
l_join_attr, r_join_attr,
tokenizer, threshold, comp_op='>=',
allow_empty=True, allow_missing=False,
l_out_attrs=None, r_out_attrs=None,
l_out_prefix='l_', r_out_prefix='r_',
out_sim_score=True, n_jobs=1, show_progress=True):
"""Join two tables using overlap coefficient.
For two sets X and Y, the overlap coefficient between them is given by:
:math:`overlap\\_coefficient(X, Y) = \\frac{|X \\cap Y|}{\\min(|X|, |Y|)}`
In the case where one of X and Y is an empty set and the other is a
non-empty set, we define their overlap coefficient to be 0. In the case
where both X and Y are empty sets, we define their overlap coefficient to
be 1.
Finds tuple pairs from left table and right table such that the overlap
coefficient between the join attributes satisfies the condition on input
threshold. For example, if the comparison operator is '>=', finds tuple
pairs whose overlap coefficient between the strings that are the values of
the join attributes is greater than or equal to the input threshold, as
specified in "threshold".
Args:
ltable (DataFrame): left input table.
rtable (DataFrame): right input table.
l_key_attr (string): key attribute in left table.
r_key_attr (string): key attribute in right table.
l_join_attr (string): join attribute in left table.
r_join_attr (string): join attribute in right table.
tokenizer (Tokenizer): tokenizer to be used to tokenize join
attributes.
threshold (float): overlap coefficient threshold to be satisfied.
comp_op (string): comparison operator. Supported values are '>=', '>'
and '=' (defaults to '>=').
allow_empty (boolean): flag to indicate whether tuple pairs with empty
set of tokens in both the join attributes should be included in the
output (defaults to True).
allow_missing (boolean): flag to indicate whether tuple pairs with
missing value in at least one of the join attributes should be
included in the output (defaults to False). If this flag is set to
True, a tuple in ltable with missing value in the join attribute
will be matched with every tuple in rtable and vice versa.
l_out_attrs (list): list of attribute names from the left table to be
included in the output table (defaults to None).
r_out_attrs (list): list of attribute names from the right table to be
included in the output table (defaults to None).
l_out_prefix (string): prefix to be used for the attribute names coming
from the left table, in the output table (defaults to 'l\_').
r_out_prefix (string): prefix to be used for the attribute names coming
from the right table, in the output table (defaults to 'r\_').
out_sim_score (boolean): flag to indicate whether similarity score
should be included in the output table (defaults to True). Setting
this flag to True will add a column named '_sim_score' in the
output table. This column will contain the similarity scores for the
tuple pairs in the output.
n_jobs (int): number of parallel jobs to use for the computation
(defaults to 1). If -1 is given, all CPUs are used. If 1 is given,
no parallel computing code is used at all, which is useful for
debugging. For n_jobs below -1, (n_cpus + 1 + n_jobs) are used
(where n_cpus is the total number of CPUs in the machine). Thus for
n_jobs = -2, all CPUs but one are used. If (n_cpus + 1 + n_jobs)
becomes less than 1, then no parallel computing code will be used
(i.e., equivalent to the default).
show_progress (boolean): flag to indicate whether task progress should
be displayed to the user (defaults to True).
Returns:
An output table containing tuple pairs that satisfy the join
condition (DataFrame).
"""
# check if the input tables are dataframes
validate_input_table(ltable, 'left table')
validate_input_table(rtable, 'right table')
# check if the key attributes and join attributes exist
validate_attr(l_key_attr, ltable.columns,
'key attribute', 'left table')
validate_attr(r_key_attr, rtable.columns,
'key attribute', 'right table')
validate_attr(l_join_attr, ltable.columns,
'join attribute', 'left table')
validate_attr(r_join_attr, rtable.columns,
'join attribute', 'right table')
# check if the join attributes are not of numeric type
validate_attr_type(l_join_attr, ltable[l_join_attr].dtype,
'join attribute', 'left table')
validate_attr_type(r_join_attr, rtable[r_join_attr].dtype,
'join attribute', 'right table')
# check if the input tokenizer is valid
validate_tokenizer(tokenizer)
# check if the input threshold is valid
validate_threshold(threshold, 'OVERLAP_COEFFICIENT')
# check if the comparison operator is valid
validate_comp_op_for_sim_measure(comp_op, 'OVERLAP_COEFFICIENT')
# check if the output attributes exist
validate_output_attrs(l_out_attrs, ltable.columns,
r_out_attrs, rtable.columns)
# check if the key attributes are unique and do not contain missing values
validate_key_attr(l_key_attr, ltable, 'left table')
validate_key_attr(r_key_attr, rtable, 'right table')
# set return_set flag of tokenizer to be True, in case it is set to False
revert_tokenizer_return_set_flag = False
if not tokenizer.get_return_set():
tokenizer.set_return_set(True)
revert_tokenizer_return_set_flag = True
# remove redundant attrs from output attrs.
l_out_attrs = remove_redundant_attrs(l_out_attrs, l_key_attr)
r_out_attrs = remove_redundant_attrs(r_out_attrs, r_key_attr)
# get attributes to project.
l_proj_attrs = get_attrs_to_project(l_out_attrs, l_key_attr, l_join_attr)
r_proj_attrs = get_attrs_to_project(r_out_attrs, r_key_attr, r_join_attr)
# Do a projection on the input dataframes to keep only the required
# attributes. Then, remove rows with missing value in join attribute from
# the input dataframes. Then, convert the resulting dataframes into ndarray.
ltable_array = convert_dataframe_to_array(ltable, l_proj_attrs, l_join_attr)
rtable_array = convert_dataframe_to_array(rtable, r_proj_attrs, r_join_attr)
# computes the actual number of jobs to launch.
n_jobs = min(get_num_processes_to_launch(n_jobs), len(rtable_array))
if n_jobs <= 1:
# if n_jobs is 1, do not use any parallel code.
output_table = _overlap_coefficient_join_split(
ltable_array, rtable_array,
l_proj_attrs, r_proj_attrs,
l_key_attr, r_key_attr,
l_join_attr, r_join_attr,
tokenizer, threshold, comp_op,
allow_empty,
l_out_attrs, r_out_attrs,
l_out_prefix, r_out_prefix,
out_sim_score, show_progress)
else:
# if n_jobs is above 1, split the right table into n_jobs splits and
# join each right table split with the whole of left table in a separate
# process.
r_splits = split_table(rtable_array, n_jobs)
results = Parallel(n_jobs=n_jobs)(
delayed(_overlap_coefficient_join_split)(
ltable_array, r_splits[job_index],
l_proj_attrs, r_proj_attrs,
l_key_attr, r_key_attr,
l_join_attr, r_join_attr,
tokenizer, threshold, comp_op,
allow_empty,
l_out_attrs, r_out_attrs,
l_out_prefix, r_out_prefix,
out_sim_score,
(show_progress and (job_index==n_jobs-1)))
for job_index in range(n_jobs))
output_table = pd.concat(results)
# If allow_missing flag is set, then compute all pairs with missing value in
# at least one of the join attributes and then add it to the output
# obtained from the join.
if allow_missing:
missing_pairs = get_pairs_with_missing_value(
ltable, rtable,
l_key_attr, r_key_attr,
l_join_attr, r_join_attr,
l_out_attrs, r_out_attrs,
l_out_prefix, r_out_prefix,
out_sim_score, show_progress)
output_table = pd.concat([output_table, missing_pairs])
# add an id column named '_id' to the output table.
output_table.insert(0, '_id', range(0, len(output_table)))
# revert the return_set flag of tokenizer, in case it was modified.
if revert_tokenizer_return_set_flag:
tokenizer.set_return_set(False)
return output_table
def _overlap_coefficient_join_split(ltable_list, rtable_list,
l_columns, r_columns,
l_key_attr, r_key_attr,
l_join_attr, r_join_attr,
tokenizer, threshold, comp_op,
allow_empty,
l_out_attrs, r_out_attrs,
l_out_prefix, r_out_prefix,
out_sim_score, show_progress):
"""Perform overlap coefficient join for a split of ltable and rtable"""
# find column indices of key attr, join attr and output attrs in ltable
l_key_attr_index = l_columns.index(l_key_attr)
l_join_attr_index = l_columns.index(l_join_attr)
l_out_attrs_indices = find_output_attribute_indices(l_columns, l_out_attrs)
# find column indices of key attr, join attr and output attrs in rtable
r_key_attr_index = r_columns.index(r_key_attr)
r_join_attr_index = r_columns.index(r_join_attr)
r_out_attrs_indices = find_output_attribute_indices(r_columns, r_out_attrs)
# Build inverted index over ltable
inverted_index = InvertedIndex(ltable_list, l_join_attr_index,
tokenizer, cache_size_flag=True)
# While building the index, we cache the record ids with empty set of
# tokens. This is needed to handle the allow_empty flag.
cached_data = inverted_index.build(allow_empty)
l_empty_records = cached_data['empty_records']
overlap_filter = OverlapFilter(tokenizer, 1)
comp_fn = COMP_OP_MAP[comp_op]
output_rows = []
has_output_attributes = (l_out_attrs is not None or
r_out_attrs is not None)
if show_progress:
prog_bar = pyprind.ProgBar(len(rtable_list))
for r_row in rtable_list:
r_string = r_row[r_join_attr_index]
r_join_attr_tokens = tokenizer.tokenize(r_string)
r_num_tokens = len(r_join_attr_tokens)
# If allow_empty flag is set and the current rtable record has empty set
# of tokens in the join attribute, then generate output pairs joining
# the current rtable record with those records in ltable with empty set
# of tokens in the join attribute. These ltable record ids are cached in
# l_empty_records list which was constructed when building the inverted
# index.
if allow_empty and r_num_tokens == 0:
for l_id in l_empty_records:
if has_output_attributes:
output_row = get_output_row_from_tables(
ltable_list[l_id], r_row,
l_key_attr_index, r_key_attr_index,
l_out_attrs_indices,
r_out_attrs_indices)
else:
output_row = [ltable_list[l_id][l_key_attr_index],
r_row[r_key_attr_index]]
if out_sim_score:
output_row.append(1.0)
output_rows.append(output_row)
continue
# probe inverted index and find overlap of candidates
candidate_overlap = overlap_filter.find_candidates(
r_join_attr_tokens, inverted_index)
for cand, overlap in iteritems(candidate_overlap):
# compute the actual similarity score
sim_score = (float(overlap) /
float(min(r_num_tokens,
inverted_index.size_cache[cand])))
if comp_fn(sim_score, threshold):
if has_output_attributes:
output_row = get_output_row_from_tables(
ltable_list[cand], r_row,
l_key_attr_index, r_key_attr_index,
l_out_attrs_indices, r_out_attrs_indices)
else:
output_row = [ltable_list[cand][l_key_attr_index],
r_row[r_key_attr_index]]
# if out_sim_score flag is set, append the overlap coefficient
# score to the output record.
if out_sim_score:
output_row.append(sim_score)
output_rows.append(output_row)
if show_progress:
prog_bar.update()
output_header = get_output_header_from_tables(l_key_attr, r_key_attr,
l_out_attrs, r_out_attrs,
l_out_prefix, r_out_prefix)
if out_sim_score:
output_header.append("_sim_score")
output_table = pd.DataFrame(output_rows, columns=output_header)
return output_table
|
bsd-3-clause
| 102,006,608,460,075,310 | 51.569277 | 127 | 0.518134 | false |
hemberger/MechanicalSoup
|
tests/utils.py
|
1
|
3376
|
import mechanicalsoup
import requests_mock
from distutils.version import StrictVersion
import bs4
try:
from urllib.parse import parse_qsl
except ImportError:
from urlparse import parse_qsl
"""
Utilities for testing MechanicalSoup.
"""
choose_submit_form = '''
<html>
<body>
<!-- vaguely based on Trac edit-page form -->
<form id="choose-submit-form" method="post" action="mock://form.com/post">
<textarea id="text" class="wikitext trac-resizable" name="text"
cols="80" rows="40">
</textarea>
<div class="field">
<label>Comment about this change (optional):<br />
<input id="comment" type="text" name="comment" size="80" value="" />
</label>
</div>
<div class="buttons">
<input type="submit" name="preview" value="Preview Page" />
<input type="submit" name="diff" value="Review Changes" />
<input type="submit" id="save" name="save" value="Submit changes" />
<button type="submit" name="cancel" value="Cancel" />
</div>
</form>
</body>
</html>
'''
def setup_mock_browser(expected_post=None, text=choose_submit_form):
url = 'mock://form.com'
browser, mock = prepare_mock_browser()
mock_get(mock, url, text)
if expected_post is not None:
mock_post(mock, url + '/post', expected_post)
return browser, url
def prepare_mock_browser(scheme='mock'):
mock = requests_mock.Adapter()
browser = mechanicalsoup.StatefulBrowser(requests_adapters={scheme: mock})
return browser, mock
def mock_get(mocked_adapter, url, reply, content_type='text/html', **kwargs):
headers = {'Content-Type': content_type}
mocked_adapter.register_uri('GET', url, headers=headers, text=reply,
**kwargs)
def mock_post(mocked_adapter, url, expected, reply='Success!'):
def text_callback(request, context):
# Python 2's parse_qsl doesn't like None argument
query = parse_qsl(request.text) if request.text else []
# In bs4 4.7.0+, CSS selectors return elements in page order,
# but did not in earlier versions.
if StrictVersion(bs4.__version__) >= StrictVersion('4.7.0'):
assert query == expected
else:
assert sorted(query) == sorted(expected)
return reply
mocked_adapter.register_uri('POST', url, text=text_callback)
class HttpbinRemote:
"""Drop-in replacement for pytest-httpbin's httpbin fixture
that uses the remote httpbin server instead of a local one."""
def __init__(self):
self.url = "http://httpbin.org"
def __add__(self, x):
return self.url + x
def open_legacy_httpbin(browser, httpbin):
"""Opens the start page of httpbin (given as a fixture). Tries the
legacy page (available only on recent versions of httpbin), and if
it fails fall back to the main page (which is JavaScript-only in
recent versions of httpbin hence usable for us only on old
versions).
"""
try:
response = browser.open(httpbin + "/legacy")
if response.status_code == 404:
# The line above may or may not have raised the exception
# depending on raise_on_404. Raise it unconditionally now.
raise mechanicalsoup.LinkNotFoundError()
except mechanicalsoup.LinkNotFoundError:
browser.open(httpbin.url)
|
mit
| -6,715,178,474,680,914,000 | 32.098039 | 78 | 0.641884 | false |
yifeng-li/DECRES
|
cA.py
|
1
|
13116
|
"""
A module of contractive autoencoder modified
from the Deep Learning Tutorials (www.deeplearning.net/tutorial/).
Copyright (c) 2008-2013, Theano Development Team All rights reserved.
Modified by Yifeng Li
CMMT, UBC, Vancouver
Sep 23, 2014
Contact: yifeng.li.cn@gmail.com
"""
from __future__ import division
import time
import numpy
import theano
import theano.tensor as T
import classification as cl
class cA(object):
""" Contractive Auto-Encoder class (cA)
The contractive autoencoder tries to reconstruct the input with an
additional constraint on the latent space. With the objective of
obtaining a robust representation of the input space, we
regularize the L2 norm(Froebenius) of the jacobian of the hidden
representation with respect to the input. Please refer to Rifai et
al.,2011 for more details.
If x is the input then equation (1) computes the projection of the
input into the latent space h. Equation (2) computes the jacobian
of h with respect to x. Equation (3) computes the reconstruction
of the input, while equation (4) computes the reconstruction
error and the added regularization term from Eq.(2).
.. math::
h_i = s(W_i x + b_i) (1)
J_i = h_i (1 - h_i) * W_i (2)
x' = s(W' h + b') (3)
L = -sum_{k=1}^d [x_k \log x'_k + (1-x_k) \log( 1-x'_k)]
+ lambda * sum_{i=1}^d sum_{j=1}^n J_{ij}^2 (4)
"""
def __init__(self, numpy_rng, input=None, n_visible=784, n_hidden=100,
n_batchsize=1, W=None, bhid=None, bvis=None):
"""Initialize the cA class by specifying the number of visible units (the
dimension d of the input ), the number of hidden units ( the dimension
d' of the latent or hidden space ) and the contraction level. The
constructor also receives symbolic variables for the input, weights and
bias.
:type numpy_rng: numpy.random.RandomState
:param numpy_rng: number random generator used to generate weights
:type theano_rng: theano.tensor.shared_randomstreams.RandomStreams
:param theano_rng: Theano random generator; if None is given
one is generated based on a seed drawn from `rng`
:type input: theano.tensor.TensorType
:param input: a symbolic description of the input or None for
standalone cA
:type n_visible: int
:param n_visible: number of visible units
:type n_hidden: int
:param n_hidden: number of hidden units
:type n_batchsize int
:param n_batchsize: number of examples per batch
:type W: theano.tensor.TensorType
:param W: Theano variable pointing to a set of weights that should be
shared belong the dA and another architecture; if dA should
be standalone set this to None
:type bhid: theano.tensor.TensorType
:param bhid: Theano variable pointing to a set of biases values (for
hidden units) that should be shared belong dA and another
architecture; if dA should be standalone set this to None
:type bvis: theano.tensor.TensorType
:param bvis: Theano variable pointing to a set of biases values (for
visible units) that should be shared belong dA and another
architecture; if dA should be standalone set this to None
"""
self.n_visible = n_visible
self.n_hidden = n_hidden
self.n_batchsize = n_batchsize
# note : W' was written as `W_prime` and b' as `b_prime`
if not W:
# W is initialized with `initial_W` which is uniformely sampled
# from -4*sqrt(6./(n_visible+n_hidden)) and
# 4*sqrt(6./(n_hidden+n_visible))the output of uniform if
# converted using asarray to dtype
# theano.config.floatX so that the code is runable on GPU
initial_W = numpy.asarray(numpy_rng.uniform(
low=-4 * numpy.sqrt(6. / (n_hidden + n_visible)),
high=4 * numpy.sqrt(6. / (n_hidden + n_visible)),
size=(n_visible, n_hidden)),
dtype=theano.config.floatX)
W = theano.shared(value=initial_W, name='W', borrow=True)
if not bvis:
bvis = theano.shared(value=numpy.zeros(n_visible,
dtype=theano.config.floatX),
borrow=True)
if not bhid:
bhid = theano.shared(value=numpy.zeros(n_hidden,
dtype=theano.config.floatX),
name='b',
borrow=True)
self.W = W
# b corresponds to the bias of the hidden
self.b = bhid
# b_prime corresponds to the bias of the visible
self.b_prime = bvis
# tied weights, therefore W_prime is W transpose
self.W_prime = self.W.T
# if no input is given, generate a variable representing the input
if input == None:
# we use a matrix because we expect a minibatch of several
# examples, each example being a row
self.x = T.dmatrix(name='input')
else:
self.x = input
self.params = [self.W, self.b, self.b_prime]
def get_hidden_values(self, input):
""" Computes the values of the hidden layer """
return T.nnet.sigmoid(T.dot(input, self.W) + self.b)
def get_jacobian(self, hidden, W):
"""Computes the jacobian of the hidden layer with respect to
the input, reshapes are necessary for broadcasting the
element-wise product on the right axis
"""
return T.reshape(hidden * (1 - hidden),
(self.n_batchsize, 1, self.n_hidden)) * T.reshape(
W, (1, self.n_visible, self.n_hidden))
def get_reconstructed_input(self, hidden):
"""Computes the reconstructed input given the values of the
hidden layer
"""
return T.nnet.sigmoid(T.dot(hidden, self.W_prime) + self.b_prime)
def get_cost_updates(self, contraction_level, learning_rate, cost_measure="cross_entropy"):
""" This function computes the cost and the updates for one trainng
step of the cA """
y = self.get_hidden_values(self.x)
z = self.get_reconstructed_input(y)
J = self.get_jacobian(y, self.W)
if cost_measure=="cross_entropy":
#self.L_rec = - T.sum(self.x * T.log(z) + (1 - self.x) * T.log(1 - z), axis=1)
self.L_rec = T.mean(- T.sum(self.x * T.log(z) + (1 - self.x) * T.log(1 - z),axis=1))
elif cost_measure=="euclidean":
self.L_rec = T.mean(T.sum((self.x-z)**2,axis=1))
# Compute the jacobian and average over the number of samples/minibatch
self.L_jacob = T.mean(T.sum(J ** 2) / self.n_batchsize)
cost = self.L_rec + contraction_level * self.L_jacob
# compute the gradients of the cost of the `cA` with respect
# to its parameters
gparams = T.grad(cost, self.params)
# generate the list of updates
updates = []
for param, gparam in zip(self.params, gparams):
updates.append((param, param - learning_rate * gparam))
return (cost, updates)
def train_model(train_set_x_org=None, training_epochs=1000, batch_size=100,
n_hidden=10,learning_rate=0.1,contraction_level=0.1,
cost_measure="cross_entropy", rng=numpy.random.RandomState(100)):
"""
Train a contractive autoencoder.
INPUTS:
train_set_x_org: numpy 2d array, each row is a training sample.
training_epochs: int scalar, the maximal number of epochs.
batch_size: int scalar, minibatch size.
n_hidden: int scalar, number of hidden units
learning_rate: float scalar, the initial learning rate.
corruption_level: float from interval [0,1), corruption level.
cost_measure: string from {"cross_entropy", "euclidean"}, measure to compute the restructive cost.
rng: numpy random number state.
OUTPUTS:
ca: object of cA, the model learned, returned for testing.
train_set_x_extracted: reduced training set.
training_time: float, training time in seconds.
"""
train_set_x = theano.shared(numpy.asarray(train_set_x_org,dtype=theano.config.floatX),borrow=True)
#train_set_y = T.cast(theano.shared(numpy.asarray(train_set_y_org,dtype=theano.config.floatX),borrow=True),'int32')
n_train_batches = train_set_x.get_value(borrow=True).shape[0] // batch_size
#n_train_batches = int(math.ceil(train_set_x.get_value(borrow=True).shape[0] / batch_size))
# shared variable to reduce the learning rate
learning_rate_shared=theano.shared(learning_rate,name='learn_rate_shared')
# learning_rate_init=T.scalar(name='learning_rate_init',dtype=theano.config.floatX)
# epoch_variable=T.iscalar(name='epoch_variable')
decay_rate=T.scalar(name='decay_rate',dtype=theano.config.floatX)
# compute_learn_rate=theano.function([learning_rate_init,epoch_variable,decay_rate],learning_rate_shared, \
# updates=[(learning_rate_shared,learning_rate_init*decay_rate**(epoch_variable//100))]) # thenao does not support math.pow, instead use T.pow() or a**b
reduce_learning_rate=theano.function([decay_rate],learning_rate_shared,updates=[(learning_rate_shared,learning_rate_shared*decay_rate)])
n_visible=train_set_x_org.shape[1] # number of input features
# define the model
x=T.matrix(name='x',dtype=theano.config.floatX) # define a symbol for the input data (training, validation, or test data)
ca=cA(numpy_rng=rng, input=x, n_visible=n_visible, n_hidden=n_hidden,n_batchsize=batch_size)
# get the formula of the cost and updates
cost,updates=ca.get_cost_updates(contraction_level=contraction_level, learning_rate=learning_rate,
cost_measure=cost_measure)
index=T.lscalar() # symbol for the index
# define a function to update the cost and model parameters using the formula above
train_ca_one_iteration=theano.function([index], [ca.L_rec, ca.L_jacob], updates=updates,
givens={x:train_set_x[index*batch_size:(index+1)*batch_size]})
max_num_epoch_change_learning_rate=100
max_num_epoch_not_improve=3*max_num_epoch_change_learning_rate
max_num_epoch_change_rate=0.8
learning_rate_decay_rate=0.8
epoch_change_count=0
best_cost=numpy.inf
# train the model using training set
start_time=time.clock()
for epoch in xrange(training_epochs):
c=[] # costs of all minibatches of this epoch
epoch_change_count=epoch_change_count+1
if epoch_change_count % max_num_epoch_change_learning_rate ==0:
reduce_learning_rate(learning_rate_decay_rate)
max_num_epoch_change_learning_rate= \
cl.change_max_num_epoch_change_learning_rate(max_num_epoch_change_learning_rate,max_num_epoch_change_rate)
max_num_epoch_not_improve=3*max_num_epoch_change_learning_rate
epoch_change_count=0
for batch_index in xrange(n_train_batches):
c_batch,j_batch=train_ca_one_iteration(batch_index)
c.append(c_batch)
this_cost=numpy.mean(c)
print 'Training eopch: %d, cost: %f' % (epoch,this_cost)
if this_cost<best_cost:
best_cost=this_cost
num_epoch_not_improve=0
if this_cost>=best_cost:
num_epoch_not_improve=num_epoch_not_improve+1
if num_epoch_not_improve>=max_num_epoch_not_improve:
break
end_time=time.clock()
training_time=end_time-start_time
print 'Training time: %f' %(training_time/60)
# return the trained model and the reduced training set
extracted=ca.get_hidden_values(train_set_x)
get_extracted=theano.function([],extracted)
train_set_x_extracted=get_extracted()
return ca, train_set_x_extracted, training_time
def test_model(model_trained,test_set_x_org=None):
"""
Reduce the dimensionality of given data.
INPUTS:
model_trained: object of dA, model learned by "train_model".
test_set_x_org: numpy 2d array, each row is an input sample.
OUTPUTS:
test_set_x_extracted, numpy 2d array, each row is a reduced sample in the feature space.
"""
test_set_x=theano.shared(numpy.asarray(test_set_x_org,dtype=theano.config.floatX),borrow=True)
extracted=model_trained.get_hidden_values(test_set_x)
get_extracted=theano.function([],extracted)
test_set_x_extracted=get_extracted()
return test_set_x_extracted
|
bsd-3-clause
| 3,418,660,605,187,341,300 | 42.72 | 155 | 0.618786 | false |
google/nsscache
|
nss_cache/caches/files_test.py
|
1
|
10874
|
# Copyright 2007 Google Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Unit tests for nss_cache/caches/files.py."""
__author__ = ('jaq@google.com (Jamie Wilkinson)',
'vasilios@google.com (Vasilios Hoffman)')
import os
import shutil
import tempfile
import unittest
import sys
from mox3 import mox
from nss_cache import config
from nss_cache.maps import automount
from nss_cache.maps import group
from nss_cache.maps import netgroup
from nss_cache.maps import passwd
from nss_cache.maps import shadow
from nss_cache.caches import files
class TestFilesCache(mox.MoxTestBase):
def setUp(self):
super(TestFilesCache, self).setUp()
self.workdir = tempfile.mkdtemp()
self.config = {'dir': self.workdir}
def tearDown(self):
super(TestFilesCache, self).tearDown()
shutil.rmtree(self.workdir)
def testInstantiation(self):
cache = files.FilesCache(self.config, config.MAP_PASSWORD)
self.assertNotEqual(None, cache)
def testWrite(self):
cache = files.FilesPasswdMapHandler(self.config)
entry = passwd.PasswdMapEntry({'name': 'foo', 'uid': 10, 'gid': 10})
pmap = passwd.PasswdMap([entry])
written = cache.Write(pmap)
self.assertTrue('foo' in written)
self.assertFalse(entry in pmap) # we emptied pmap to avoid mem leaks
self.assertFalse(cache.temp_cache_file.closed)
def testCacheFilenameSuffixOption(self):
new_config = {'cache_filename_suffix': 'blarg'}
new_config.update(self.config)
cache = files.FilesCache(new_config, config.MAP_PASSWORD)
cache.CACHE_FILENAME = 'test'
self.assertEqual(os.path.join(self.workdir, 'test.blarg'),
cache.GetCacheFilename())
cache.temp_cache_file = open(os.path.join(self.workdir, 'pre-commit'),
'w')
cache.temp_cache_file.write('\n')
cache.temp_cache_filename = os.path.join(self.workdir, 'pre-commit')
cache._Commit()
expected_cache_filename = os.path.join(self.workdir, 'test.blarg')
self.assertTrue(os.path.exists(expected_cache_filename))
def testWritePasswdEntry(self):
"""We correctly write a typical entry in /etc/passwd format."""
cache = files.FilesPasswdMapHandler(self.config)
file_mock = self.mox.CreateMock(sys.stdout)
file_mock.write(b'root:x:0:0:Rootsy:/root:/bin/bash\n')
map_entry = passwd.PasswdMapEntry()
map_entry.name = 'root'
map_entry.passwd = 'x'
map_entry.uid = 0
map_entry.gid = 0
map_entry.gecos = 'Rootsy'
map_entry.dir = '/root'
map_entry.shell = '/bin/bash'
self.mox.ReplayAll()
cache._WriteData(file_mock, map_entry)
def testWriteGroupEntry(self):
"""We correctly write a typical entry in /etc/group format."""
cache = files.FilesGroupMapHandler(self.config)
file_mock = self.mox.CreateMock(sys.stdout)
file_mock.write(b'root:x:0:zero_cool,acid_burn\n')
map_entry = group.GroupMapEntry()
map_entry.name = 'root'
map_entry.passwd = 'x'
map_entry.gid = 0
map_entry.members = ['zero_cool', 'acid_burn']
self.mox.ReplayAll()
cache._WriteData(file_mock, map_entry)
def testWriteShadowEntry(self):
"""We correctly write a typical entry in /etc/shadow format."""
cache = files.FilesShadowMapHandler(self.config)
file_mock = self.mox.CreateMock(sys.stdout)
file_mock.write(b'root:$1$zomgmd5support:::::::\n')
map_entry = shadow.ShadowMapEntry()
map_entry.name = 'root'
map_entry.passwd = '$1$zomgmd5support'
self.mox.ReplayAll()
cache._WriteData(file_mock, map_entry)
def testWriteNetgroupEntry(self):
"""We correctly write a typical entry in /etc/netgroup format."""
cache = files.FilesNetgroupMapHandler(self.config)
file_mock = self.mox.CreateMock(sys.stdout)
file_mock.write(
b'administrators unix_admins noc_monkeys (-,zero_cool,)\n')
map_entry = netgroup.NetgroupMapEntry()
map_entry.name = 'administrators'
map_entry.entries = 'unix_admins noc_monkeys (-,zero_cool,)'
self.mox.ReplayAll()
cache._WriteData(file_mock, map_entry)
def testWriteAutomountEntry(self):
"""We correctly write a typical entry in /etc/auto.* format."""
cache = files.FilesAutomountMapHandler(self.config)
file_mock = self.mox.CreateMock(sys.stdout)
file_mock.write(b'scratch -tcp,rw,intr,bg fileserver:/scratch\n')
map_entry = automount.AutomountMapEntry()
map_entry.key = 'scratch'
map_entry.options = '-tcp,rw,intr,bg'
map_entry.location = 'fileserver:/scratch'
self.mox.ReplayAll()
cache._WriteData(file_mock, map_entry)
self.mox.VerifyAll()
file_mock = self.mox.CreateMock(sys.stdout)
file_mock.write(b'scratch fileserver:/scratch\n')
map_entry = automount.AutomountMapEntry()
map_entry.key = 'scratch'
map_entry.options = None
map_entry.location = 'fileserver:/scratch'
self.mox.ReplayAll()
cache._WriteData(file_mock, map_entry)
def testAutomountSetsFilename(self):
"""We set the correct filename based on mountpoint information."""
# also tests GetMapLocation() because it uses it :)
conf = {'dir': self.workdir, 'cache_filename_suffix': ''}
cache = files.FilesAutomountMapHandler(conf)
self.assertEqual(cache.GetMapLocation(),
'%s/auto.master' % self.workdir)
cache = files.FilesAutomountMapHandler(conf,
automount_mountpoint='/home')
self.assertEqual(cache.GetMapLocation(), '%s/auto.home' % self.workdir)
cache = files.FilesAutomountMapHandler(conf,
automount_mountpoint='/usr/meh')
self.assertEqual(cache.GetMapLocation(),
'%s/auto.usr_meh' % self.workdir)
def testCacheFileDoesNotExist(self):
"""Make sure we just get an empty map rather than exception."""
conf = {'dir': self.workdir, 'cache_filename_suffix': ''}
cache = files.FilesAutomountMapHandler(conf)
self.assertFalse(
os.path.exists(os.path.join(self.workdir, 'auto.master')))
data = cache.GetMap()
self.assertFalse(data)
def testIndexCreation(self):
cache = files.FilesPasswdMapHandler(self.config)
entries = [
passwd.PasswdMapEntry(dict(name='foo', uid=10, gid=10)),
passwd.PasswdMapEntry(dict(name='bar', uid=11, gid=11)),
passwd.PasswdMapEntry(dict(name='quux', uid=12, gid=11)),
]
pmap = passwd.PasswdMap(entries)
cache.Write(pmap)
cache.WriteIndex()
index_filename = cache.GetCacheFilename() + '.ixname'
self.assertTrue(os.path.exists(index_filename),
'Index not created %s' % index_filename)
with open(index_filename) as f:
self.assertEqual('bar\x0015\x00\x00\n', f.readline())
self.assertEqual('foo\x000\x00\x00\x00\n', f.readline())
self.assertEqual('quux\x0030\x00\n', f.readline())
index_filename = cache.GetCacheFilename() + '.ixuid'
self.assertTrue(os.path.exists(index_filename),
'Index not created %s' % index_filename)
with open(index_filename) as f:
self.assertEqual('10\x000\x00\x00\n', f.readline())
self.assertEqual('11\x0015\x00\n', f.readline())
self.assertEqual('12\x0030\x00\n', f.readline())
def testWriteCacheAndIndex(self):
cache = files.FilesPasswdMapHandler(self.config)
entries = [
passwd.PasswdMapEntry(dict(name='foo', uid=10, gid=10)),
passwd.PasswdMapEntry(dict(name='bar', uid=11, gid=11)),
]
pmap = passwd.PasswdMap(entries)
written = cache.Write(pmap)
cache.WriteIndex()
self.assertTrue('foo' in written)
self.assertTrue('bar' in written)
index_filename = cache.GetCacheFilename() + '.ixname'
self.assertTrue(os.path.exists(index_filename),
'Index not created %s' % index_filename)
index_filename = cache.GetCacheFilename() + '.ixuid'
self.assertTrue(os.path.exists(index_filename),
'Index not created %s' % index_filename)
entries = [
passwd.PasswdMapEntry(dict(name='foo', uid=10, gid=10)),
passwd.PasswdMapEntry(dict(name='bar', uid=11, gid=11)),
passwd.PasswdMapEntry(dict(name='quux', uid=12, gid=11)),
]
pmap = passwd.PasswdMap(entries)
written = cache.Write(pmap)
self.assertTrue('foo' in written)
self.assertTrue('bar' in written)
self.assertTrue('quux' in written)
index_filename = cache.GetCacheFilename() + '.ixname'
with open(index_filename) as f:
self.assertEqual('bar\x0015\x00\n', f.readline())
self.assertEqual('foo\x000\x00\x00\n', f.readline())
index_filename = cache.GetCacheFilename() + '.ixuid'
with open(index_filename) as f:
self.assertEqual('10\x000\x00\x00\n', f.readline())
self.assertEqual('11\x0015\x00\n', f.readline())
cache.WriteIndex()
index_filename = cache.GetCacheFilename() + '.ixname'
with open(index_filename) as f:
self.assertEqual('bar\x0015\x00\x00\n', f.readline())
self.assertEqual('foo\x000\x00\x00\x00\n', f.readline())
self.assertEqual('quux\x0030\x00\n', f.readline())
index_filename = cache.GetCacheFilename() + '.ixuid'
with open(index_filename) as f:
self.assertEqual('10\x000\x00\x00\n', f.readline())
self.assertEqual('11\x0015\x00\n', f.readline())
self.assertEqual('12\x0030\x00\n', f.readline())
if __name__ == '__main__':
unittest.main()
|
gpl-2.0
| -6,607,135,317,482,214,000 | 38.256318 | 79 | 0.62369 | false |
ChileanVirtualObservatory/flask_endpoint
|
endpoint/app/__init__.py
|
1
|
2299
|
#This file is part of ChiVO, the Chilean Virtual Observatory
#A project sponsored by FONDEF (D11I1060)
#Copyright (C) 2015 Universidad Tecnica Federico Santa Maria Mauricio Solar
# Marcelo Mendoza
# Universidad de Chile Diego Mardones
# Pontificia Universidad Catolica Karim Pichara
# Universidad de Concepcion Ricardo Contreras
# Universidad de Santiago Victor Parada
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import urllib2
import requests
import json
import threading
import copy
# Import flask and template operators
from flask import Flask, render_template, request, redirect
# Define the WSGI application object
app = Flask(__name__)
# Configurations
app.config.from_object('config')
## Sample HTTP error handling
#@app.errorhandler(404)
#def not_found(error):
# return render_template('404.html'), 404
#Remove trailing slash in POST requests
#@app.before_request
#def remove_trailing_slash():
# if request.path != '/' and request.path.endswith('/'):
# return redirect(request.path[:-1], code=307)
# Import a module / component using its blueprint handler variable (mod_auth)
from app.services.controllers import services as services_module
from app.external.controllers import external as external_module
from app.registry.controllers import registry as registry_module
# Register blueprint(s)
app.register_blueprint(services_module)
app.register_blueprint(external_module)
app.register_blueprint(registry_module)
|
gpl-3.0
| -656,813,622,121,283,500 | 34.921875 | 81 | 0.715094 | false |
PaddlePaddle/Paddle
|
python/paddle/jit/dy2static/convert_operators.py
|
1
|
2230
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from ...fluid.dygraph.dygraph_to_static.convert_operators import cast_bool_if_necessary # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import convert_assert # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import convert_ifelse # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import convert_len # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import convert_logical_and # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import convert_logical_not # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import convert_logical_or # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import convert_pop # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import convert_print # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import convert_shape_compare # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import convert_var_dtype # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import convert_var_shape # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import convert_var_shape_simple # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import eval_if_exist_else_none # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import choose_shape_attr_or_api # noqa: F401
from ...fluid.dygraph.dygraph_to_static.convert_operators import convert_while_loop # noqa: F401
__all__ = []
|
apache-2.0
| -7,350,643,029,132,838,000 | 66.575758 | 103 | 0.773991 | false |
Tomsod/gemrb
|
gemrb/GUIScripts/GUISAVE.py
|
1
|
10200
|
# -*-python-*-
# GemRB - Infinity Engine Emulator
# Copyright (C) 2003 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# GUISAVE.py - Save game screen from GUISAVE winpack
###################################################
import GemRB
import GameCheck
import GUICommon
import LoadScreen
from GameCheck import MAX_PARTY_SIZE
from GUIDefines import *
SaveWindow = None
ConfirmWindow = None
NameField = 0
SaveButton = 0
Games = ()
ScrollBar = 0
# this lookup table is used only by bg2
str_chapter = (48007, 48006, 16205, 16206, 16207, 16208, 16209, 71020, 71021, 71022)
num_rows = 4
ctrl_offset = (26, 30, 40, 0x10000008, 0x10000010, 25, 34, 3, 0x10000004, 40, 7, 8, 2)
sav_version = 0
strs = { 'cancel' : 13727, 'save' : 15588, 'delete' : 13957, 'empty' : 15304, 'overwrite' : 15306, 'yousure' : 15305 }
def OpenSaveWindow ():
global SaveWindow, Games, ScrollBar
global num_rows, ctrl_offset, sav_version, strs
if GUICommon.CloseOtherWindow (OpenSaveWindow):
CloseSaveWindow ()
return
GemRB.HideGUI ()
GUICommon.GameWindow.SetVisible(WINDOW_INVISIBLE)
if GameCheck.IsIWD2():
GemRB.LoadWindowPack ("GUISAVE", 800, 600)
num_rows = 5
ctrl_offset = (55, 60, 25, 0x10000005, 0x1000000a, 23, 22, 3, 0x10000004, 40, 7, 8, 2)
sav_version = 22
else:
GemRB.LoadWindowPack ("GUISAVE", 640, 480)
if GameCheck.IsPST():
ctrl_offset = (14, 18, 22, 0x10000004, 0x10000008, 13, 46, 1, 0x10000002, 6, 4, 5, 3)
strs = { 'cancel' : 4196, 'save' : 28645, 'delete' : 28640, 'empty' : 28647, 'overwrite' : 28644, 'yousure' : 28639 }
SaveWindow = Window = GemRB.LoadWindow (0)
Window.SetFrame ()
GemRB.SetVar ("OtherWindow", SaveWindow.ID)
# Cancel button
CancelButton = Window.GetControl (ctrl_offset[6])
CancelButton.SetText (strs['cancel'])
CancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenSaveWindow)
CancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
GemRB.SetVar ("SaveIdx", 0)
for i in range(num_rows):
Button = Window.GetControl (ctrl_offset[0]+i)
Button.SetText (strs['save'])
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenConfirmWindow)
Button.SetState (IE_GUI_BUTTON_DISABLED)
Button.SetVarAssoc ("SaveIdx", i)
Button = Window.GetControl (ctrl_offset[1]+i)
Button.SetText (strs['delete'])
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, DeleteGamePress)
Button.SetState (IE_GUI_BUTTON_DISABLED)
Button.SetVarAssoc ("SaveIdx", i)
# area previews
Button = Window.GetControl (1+i)
Button.SetState (IE_GUI_BUTTON_LOCKED)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE|IE_GUI_BUTTON_PICTURE, OP_SET)
# PC portraits
for j in range(min(6, MAX_PARTY_SIZE)):
Button = Window.GetControl (ctrl_offset[2] + i*min(6, MAX_PARTY_SIZE) + j)
Button.SetState (IE_GUI_BUTTON_LOCKED)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE|IE_GUI_BUTTON_PICTURE, OP_SET)
ScrollBar = Window.GetControl (ctrl_offset[5])
ScrollBar.SetEvent (IE_GUI_SCROLLBAR_ON_CHANGE, ScrollBarPress)
Games = GemRB.GetSaveGames ()
TopIndex = max (0, len(Games) - num_rows + 1) #one more for the 'new game'
GemRB.SetVar ("TopIndex",TopIndex)
ScrollBar.SetVarAssoc ("TopIndex", TopIndex)
ScrollBar.SetDefaultScrollBar ()
ScrollBarPress ()
Window.SetVisible (WINDOW_VISIBLE)
return
def ScrollBarPress():
Window = SaveWindow
# draw load game portraits
Pos = GemRB.GetVar ("TopIndex")
for i in range(num_rows):
ActPos = Pos + i
Button1 = Window.GetControl (ctrl_offset[0]+i)
Button2 = Window.GetControl (ctrl_offset[1]+i)
if ActPos <= len(Games):
Button1.SetState (IE_GUI_BUTTON_ENABLED)
else:
Button1.SetState (IE_GUI_BUTTON_DISABLED)
if ActPos < len(Games):
Slotname = Games[ActPos].GetName()
Slottime = Games[ActPos].GetDate ()
Button2.SetState (IE_GUI_BUTTON_ENABLED)
elif ActPos == len(Games):
Slotname = strs['empty']
Slottime = ""
Button2.SetState (IE_GUI_BUTTON_DISABLED)
else:
Slotname = ""
Slottime = ""
Button2.SetState (IE_GUI_BUTTON_DISABLED)
Label = Window.GetControl (ctrl_offset[3]+i)
Label.SetText (Slotname)
Label = Window.GetControl (ctrl_offset[4]+i)
Label.SetText (Slottime)
Button = Window.GetControl (1+i)
if ActPos < len(Games):
Button.SetSprite2D(Games[ActPos].GetPreview())
else:
Button.SetPicture ("")
for j in range(min(6, MAX_PARTY_SIZE)):
Button = Window.GetControl (ctrl_offset[2] + i*min(6, MAX_PARTY_SIZE) + j)
if ActPos < len(Games):
Button.SetSprite2D(Games[ActPos].GetPortrait(j))
else:
Button.SetPicture ("")
return
def QuickSavePressed():
Slot = 1
if GameCheck.IsTOB():
Slot = 4
GemRB.SaveGame(Slot)
return
def CloseConfirmWindow():
global ConfirmWindow
if ConfirmWindow:
ConfirmWindow.Unload ()
ConfirmWindow = None
GemRB.SetVar ("FloatWindow", -1)
SaveWindow.SetVisible (WINDOW_VISIBLE)
return
def AbortedSaveGame():
CloseConfirmWindow ()
return
# User entered save name and pressed save/overwrite.
# Display progress bar screen and save the game, close the save windows
def ConfirmedSaveGame():
global ConfirmWindow
Pos = GemRB.GetVar ("TopIndex") + GemRB.GetVar ("SaveIdx")
Label = ConfirmWindow.GetControl (ctrl_offset[7])
Slotname = Label.QueryText ()
Slotname = Slotname.replace ("/", "|") # can't have path separators in the name
# Empty save name. We can get here if user presses Enter key
if Slotname == "":
return
# We have to close floating window first
OpenConfirmWindow ()
#FIXME: make this work
#LoadScreen.StartLoadScreen (LoadScreen.LS_TYPE_SAVING)
OpenSaveWindow ()
GemRB.HideGUI ()
if Pos < len(Games):
GemRB.SaveGame (Games[Pos], Slotname, sav_version)
else:
GemRB.SaveGame (None, Slotname, sav_version)
GemRB.UnhideGUI ()
return
def OpenConfirmWindow ():
global ConfirmWindow, NameField, SaveButton
if ConfirmWindow != None:
ConfirmWindow.Unload ()
ConfirmWindow = None
GemRB.SetVar ("FloatWindow", -1)
return
Pos = GemRB.GetVar ("TopIndex") + GemRB.GetVar ("SaveIdx")
ConfirmWindow = GemRB.LoadWindow (1)
GemRB.SetVar ("FloatWindow", ConfirmWindow.ID)
# Slot name
if Pos < len(Games):
Slotname = Games[Pos].GetName()
save_strref = strs['overwrite']
else:
Slotname = ""
save_strref = strs['save']
NameField = ConfirmWindow.GetControl (ctrl_offset[7])
NameField.SetText (Slotname)
NameField.SetEvent (IE_GUI_EDIT_ON_CHANGE, EditChange)
#game hours (should be generated from game)
if Pos < len(Games):
if GameCheck.IsBG2():
Chapter = GemRB.GetGameVar ("CHAPTER") & 0x7fffffff
Slotname = GemRB.GetString(str_chapter[Chapter-1]) + " " + Games[Pos].GetGameDate()
else:
Slotname = Games[Pos].GetGameDate()
else:
Slotname = ""
Label = ConfirmWindow.GetControl (ctrl_offset[8])
Label.SetText (Slotname)
# Areapreview
if not GameCheck.IsIWD2():
Button = ConfirmWindow.GetControl (0)
if Pos<len(Games):
Button.SetSprite2D(Games[Pos].GetPreview())
else:
Button.SetPicture("")
# PC portraits
for j in range(min(6, MAX_PARTY_SIZE)):
Button = ConfirmWindow.GetControl (ctrl_offset[9]+j)
if Pos<len(Games):
Button.SetSprite2D(Games[Pos].GetPortrait(j))
else:
Button.SetPicture("")
# Save/Overwrite
SaveButton = ConfirmWindow.GetControl (ctrl_offset[10])
SaveButton.SetText (save_strref)
SaveButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ConfirmedSaveGame)
SaveButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
if Slotname == "":
SaveButton.SetState (IE_GUI_BUTTON_DISABLED)
# Cancel
CancelButton = ConfirmWindow.GetControl (ctrl_offset[11])
CancelButton.SetText (strs['cancel'])
CancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, AbortedSaveGame)
CancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
ConfirmWindow.SetVisible (WINDOW_VISIBLE)
ConfirmWindow.ShowModal (MODAL_SHADOW_NONE)
NameField.SetStatus (IE_GUI_CONTROL_FOCUSED) # ShowModal will happily reset this..
return
# Disable Save/Overwrite button if the save slotname is empty,
# else enable it
def EditChange():
Name = NameField.QueryText ()
if len(Name) == 0:
SaveButton.SetState (IE_GUI_BUTTON_DISABLED)
else:
SaveButton.SetState (IE_GUI_BUTTON_ENABLED)
return
def DeleteGameConfirm():
global Games
TopIndex = GemRB.GetVar ("TopIndex")
Pos = TopIndex + GemRB.GetVar ("SaveIdx")
GemRB.DeleteSaveGame (Games[Pos])
del Games[Pos]
if TopIndex>0:
GemRB.SetVar ("TopIndex",TopIndex-1)
ScrollBar.SetVarAssoc ("TopIndex", len(Games))
ScrollBarPress()
CloseConfirmWindow ()
return
def DeleteGameCancel():
CloseConfirmWindow ()
return
def DeleteGamePress():
global ConfirmWindow
SaveWindow.SetVisible (WINDOW_INVISIBLE)
ConfirmWindow=GemRB.LoadWindow (ctrl_offset[12])
Text=ConfirmWindow.GetControl (0)
Text.SetText (strs['yousure'])
DeleteButton=ConfirmWindow.GetControl (1)
DeleteButton.SetText (strs['delete'])
DeleteButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, DeleteGameConfirm)
DeleteButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
CancelButton=ConfirmWindow.GetControl (2)
CancelButton.SetText (strs['cancel'])
CancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, DeleteGameCancel)
CancelButton.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
ConfirmWindow.SetVisible (WINDOW_VISIBLE)
return
# Exit either back to game or to the Start window
def CloseSaveWindow ():
global SaveWindow
if SaveWindow:
SaveWindow.Unload ()
SaveWindow = None
GemRB.SetVar ("OtherWindow", -1)
if GemRB.GetVar ("QuitAfterSave"):
GemRB.QuitGame ()
GemRB.SetNextScript ("Start")
return
GUICommon.GameWindow.SetVisible(WINDOW_VISIBLE) #enabling the game control screen
GemRB.UnhideGUI () #enabling the other windows
return
|
gpl-2.0
| 8,875,290,173,057,791,000 | 28.142857 | 120 | 0.72098 | false |
duyuan11/glumpy
|
glumpy/geometry/arc.py
|
4
|
6332
|
# ----------------------------------------------------------------------------
# Anti-Grain Geometry (AGG) - Version 2.5
# A high quality rendering engine for C++
# Copyright (C) 2002-2006 Maxim Shemanarev
# Contact: mcseem@antigrain.com
# mcseemagg@yahoo.com
# http://antigrain.com
#
# AGG is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# AGG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with AGG; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
# ----------------------------------------------------------------------------
#
# Python translation by Nicolas P. Rougier
# Copyright (C) 2013 Nicolas P. Rougier. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY NICOLAS P. ROUGIER ''AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL NICOLAS P. ROUGIER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Nicolas P. Rougier.
#
# ----------------------------------------------------------------------------
import math
import numpy as np
def elliptical_arc(x0, y0, rx, ry, angle, large_arc_flag, sweep_flag, x2, y2):
"""
"""
radii_ok = True
cos_a = math.cos(angle)
sin_a = math.sin(angle)
if rx < 0.0: rx = -rx
if ry < 0.0: ry = -rx
# Calculate the middle point between
# the current and the final points
# ------------------------
dx2 = (x0 - x2) / 2.0
dy2 = (y0 - y2) / 2.0
# Calculate (x1, y1)
# ------------------------
x1 = cos_a * dx2 + sin_a * dy2
y1 = -sin_a * dx2 + cos_a * dy2
# Check that radii are large enough
# ------------------------
prx,pry = rx * rx, ry * ry
px1,py1 = x1 * x1, y1 * y1
radii_check = px1/prx + py1/pry
if radii_check > 1.0:
rx = math.sqrt(radii_check) * rx
ry = math.sqrt(radii_check) * ry
prx = rx * rx
pry = ry * ry
if radii_check > 10.0:
radii_ok = False
# Calculate (cx1, cy1)
# ------------------------
if large_arc_flag == sweep_flag:
sign = -1
else:
sign = +1
sq = (prx*pry - prx*py1 - pry*px1) / (prx*py1 + pry*px1)
coef = sign*math.sqrt( max(sq,0) )
cx1 = coef * ((rx * y1) / ry)
cy1 = coef * -((ry * x1) / rx)
# Calculate (cx, cy) from (cx1, cy1)
# ------------------------
sx2 = (x0 + x2) / 2.0
sy2 = (y0 + y2) / 2.0
cx = sx2 + (cos_a * cx1 - sin_a * cy1)
cy = sy2 + (sin_a * cx1 + cos_a * cy1)
# Calculate the start_angle (angle1) and the sweep_angle (dangle)
# ------------------------
ux = (x1 - cx1) / rx
uy = (y1 - cy1) / ry
vx = (-x1 - cx1) / rx
vy = (-y1 - cy1) / ry
# Calculate the angle start
# ------------------------
n = math.sqrt(ux*ux + uy*uy)
p = ux;
if uy < 0:
sign = -1.0
else:
sign = +1.0
v = p / n
if v < -1.0:
v = -1.0
if v > 1.0:
v = 1.0
start_angle = sign * math.acos(v)
# Calculate the sweep angle
# ------------------------
n = math.sqrt((ux*ux + uy*uy) * (vx*vx + vy*vy))
p = ux * vx + uy * vy
if ux * vy - uy * vx < 0:
sign = -1.0
else:
sign = +1.0
v = p / n
v = min(max(v,-1.0),+1.0)
sweep_angle = sign * math.acos(v)
if not sweep_flag and sweep_angle > 0:
sweep_angle -= math.pi * 2.0
elif sweep_flag and sweep_angle < 0:
sweep_angle += math.pi * 2.0
start_angle = math.fmod(start_angle, 2.0 * math.pi)
if sweep_angle >= 2.0 * math.pi:
sweep_angle = 2.0 * math.pi
if sweep_angle <= -2.0 * math.pi:
sweep_angle = -2.0 * math.pi
V = arc( cx, cy, rx, ry, start_angle, start_angle+sweep_angle, sweep_flag )
c = math.cos(angle)
s = math.sin(angle)
X,Y = V[:,0]-cx, V[:,1]-cy
V[:,0] = c*X - s*Y + cx
V[:,1] = s*X + c*Y + cy
return V
def arc(cx, cy, rx, ry, a1, a2, ccw=False):
"""
"""
scale = 1.0
ra = (abs(rx) + abs(ry)) / 2.0
da = math.acos(ra / (ra + 0.125 / scale)) * 2.0
if ccw:
while a2 < a1:
a2 += math.pi * 2.0
else:
while a1 < a2:
a1 += math.pi * 2.0
da = -da
a_start = a1
a_end = a2
vertices =[]
angle = a_start
while (angle < a_end - da/4) == ccw:
x = cx + math.cos(angle) * rx
y = cy + math.sin(angle) * ry
vertices.append( (x,y) )
angle += da
x = cx + math.cos(a_end) * rx
y = cy + math.sin(a_end) * ry
vertices.append( (x,y) )
return np.array(vertices).reshape(len(vertices),2)
|
bsd-3-clause
| 1,184,211,180,139,694,800 | 32.326316 | 79 | 0.554643 | false |
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_01_01/aio/operations/_load_balancer_frontend_ip_configurations_operations.py
|
1
|
8943
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerFrontendIPConfigurationsOperations:
"""LoadBalancerFrontendIPConfigurationsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_01_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs
) -> AsyncIterable["_models.LoadBalancerFrontendIPConfigurationListResult"]:
"""Gets all the load balancer frontend IP configurations.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LoadBalancerFrontendIPConfigurationListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_01_01.models.LoadBalancerFrontendIPConfigurationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancerFrontendIPConfigurationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('LoadBalancerFrontendIPConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/frontendIPConfigurations'} # type: ignore
async def get(
self,
resource_group_name: str,
load_balancer_name: str,
frontend_ip_configuration_name: str,
**kwargs
) -> "_models.FrontendIPConfiguration":
"""Gets load balancer frontend IP configuration.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param frontend_ip_configuration_name: The name of the frontend IP configuration.
:type frontend_ip_configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: FrontendIPConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_01_01.models.FrontendIPConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.FrontendIPConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'frontendIPConfigurationName': self._serialize.url("frontend_ip_configuration_name", frontend_ip_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('FrontendIPConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/frontendIPConfigurations/{frontendIPConfigurationName}'} # type: ignore
|
mit
| 4,455,762,540,214,366,000 | 49.241573 | 228 | 0.659958 | false |
numairmansur/RoBO
|
examples/example_bagged_nets.py
|
1
|
1284
|
import sys
import logging
import numpy as np
import matplotlib.pyplot as plt
import robo.models.neural_network as robo_net
import robo.models.bagged_networks as bn
from robo.initial_design.init_random_uniform import init_random_uniform
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
def f(x):
return np.sinc(x * 10 - 5).sum(axis=1)[:, None]
rng = np.random.RandomState(42)
X = init_random_uniform(np.zeros(1), np.ones(1), 20, rng).astype(np.float32)
Y = f(X)
x = np.linspace(0, 1, 512, dtype=np.float32)[:, None]
vals = f(x).astype(np.float32)
plt.grid()
plt.plot(x[:, 0], f(x)[:, 0], label="true", color="green")
plt.plot(X[:, 0], Y[:, 0], "ro")
model = bn.BaggedNets(robo_net.SGDNet, num_models=16, bootstrap_with_replacement=True,
n_epochs=16384, error_threshold=1e-3,
n_units=[32, 32, 32], dropout=0,
batch_size=10, learning_rate=1e-3,
shuffle_batches=True)
m = model.train(X, Y)
mean_pred, var_pred = model.predict(x)
std_pred = np.sqrt(var_pred)
plt.plot(x[:, 0], mean_pred[:, 0], label="bagged nets", color="blue")
plt.fill_between(x[:, 0], mean_pred[:, 0] + std_pred[:, 0], mean_pred[:, 0] - std_pred[:, 0], alpha=0.2, color="blue")
plt.legend()
plt.show()
|
bsd-3-clause
| -7,428,407,319,746,540,000 | 27.533333 | 118 | 0.632399 | false |
ebukoz/thrive
|
erpnext/accounts/doctype/budget/budget.py
|
1
|
13426
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt, getdate, add_months, get_last_day, fmt_money, nowdate
from frappe.model.naming import make_autoname
from erpnext.accounts.utils import get_fiscal_year
from frappe.model.document import Document
class BudgetError(frappe.ValidationError): pass
class DuplicateBudgetError(frappe.ValidationError): pass
class Budget(Document):
def autoname(self):
self.name = make_autoname(self.get(frappe.scrub(self.budget_against))
+ "/" + self.fiscal_year + "/.###")
def validate(self):
if not self.get(frappe.scrub(self.budget_against)):
frappe.throw(_("{0} is mandatory").format(self.budget_against))
self.validate_duplicate()
self.validate_accounts()
self.set_null_value()
self.validate_applicable_for()
def validate_duplicate(self):
budget_against_field = frappe.scrub(self.budget_against)
budget_against = self.get(budget_against_field)
accounts = [d.account for d in self.accounts] or []
existing_budget = frappe.db.sql("""
select
b.name, ba.account from `tabBudget` b, `tabBudget Account` ba
where
ba.parent = b.name and b.docstatus < 2 and b.company = %s and %s=%s and
b.fiscal_year=%s and b.name != %s and ba.account in (%s) """
% ('%s', budget_against_field, '%s', '%s', '%s', ','.join(['%s'] * len(accounts))),
(self.company, budget_against, self.fiscal_year, self.name) + tuple(accounts), as_dict=1)
for d in existing_budget:
frappe.throw(_("Another Budget record '{0}' already exists against {1} '{2}' and account '{3}' for fiscal year {4}")
.format(d.name, self.budget_against, budget_against, d.account, self.fiscal_year), DuplicateBudgetError)
def validate_accounts(self):
account_list = []
for d in self.get('accounts'):
if d.account:
account_details = frappe.db.get_value("Account", d.account,
["is_group", "company", "report_type"], as_dict=1)
if account_details.is_group:
frappe.throw(_("Budget cannot be assigned against Group Account {0}").format(d.account))
elif account_details.company != self.company:
frappe.throw(_("Account {0} does not belongs to company {1}")
.format(d.account, self.company))
elif account_details.report_type != "Profit and Loss":
frappe.throw(_("Budget cannot be assigned against {0}, as it's not an Income or Expense account")
.format(d.account))
if d.account in account_list:
frappe.throw(_("Account {0} has been entered multiple times").format(d.account))
else:
account_list.append(d.account)
def set_null_value(self):
if self.budget_against == 'Cost Center':
self.project = None
else:
self.cost_center = None
def validate_applicable_for(self):
if (self.applicable_on_material_request
and not (self.applicable_on_purchase_order and self.applicable_on_booking_actual_expenses)):
frappe.throw(_("Please enable Applicable on Purchase Order and Applicable on Booking Actual Expenses"))
elif (self.applicable_on_purchase_order
and not (self.applicable_on_booking_actual_expenses)):
frappe.throw(_("Please enable Applicable on Booking Actual Expenses"))
elif not(self.applicable_on_material_request
or self.applicable_on_purchase_order or self.applicable_on_booking_actual_expenses):
self.applicable_on_booking_actual_expenses = 1
def validate_expense_against_budget(args):
args = frappe._dict(args)
if args.get('company') and not args.fiscal_year:
args.fiscal_year = get_fiscal_year(args.get('posting_date'), company=args.get('company'))[0]
frappe.flags.exception_approver_role = frappe.get_cached_value('Company',
args.get('company'), 'exception_budget_approver_role')
if not args.account:
args.account = args.get("expense_account")
if not (args.get('account') and args.get('cost_center')) and args.item_code:
args.cost_center, args.account = get_item_details(args)
if not (args.cost_center or args.project) and not args.account:
return
for budget_against in ['project', 'cost_center']:
if (args.get(budget_against) and args.account
and frappe.db.get_value("Account", {"name": args.account, "root_type": "Expense"})):
if args.project and budget_against == 'project':
condition = "and b.project=%s" % frappe.db.escape(args.project)
args.budget_against_field = "Project"
elif args.cost_center and budget_against == 'cost_center':
cc_lft, cc_rgt = frappe.db.get_value("Cost Center", args.cost_center, ["lft", "rgt"])
condition = """and exists(select name from `tabCost Center`
where lft<=%s and rgt>=%s and name=b.cost_center)""" % (cc_lft, cc_rgt)
args.budget_against_field = "Cost Center"
args.budget_against = args.get(budget_against)
budget_records = frappe.db.sql("""
select
b.{budget_against_field} as budget_against, ba.budget_amount, b.monthly_distribution,
ifnull(b.applicable_on_material_request, 0) as for_material_request,
ifnull(applicable_on_purchase_order,0) as for_purchase_order,
ifnull(applicable_on_booking_actual_expenses,0) as for_actual_expenses,
b.action_if_annual_budget_exceeded, b.action_if_accumulated_monthly_budget_exceeded,
b.action_if_annual_budget_exceeded_on_mr, b.action_if_accumulated_monthly_budget_exceeded_on_mr,
b.action_if_annual_budget_exceeded_on_po, b.action_if_accumulated_monthly_budget_exceeded_on_po
from
`tabBudget` b, `tabBudget Account` ba
where
b.name=ba.parent and b.fiscal_year=%s
and ba.account=%s and b.docstatus=1
{condition}
""".format(condition=condition,
budget_against_field=frappe.scrub(args.get("budget_against_field"))),
(args.fiscal_year, args.account), as_dict=True)
if budget_records:
validate_budget_records(args, budget_records)
def validate_budget_records(args, budget_records):
for budget in budget_records:
if flt(budget.budget_amount):
amount = get_amount(args, budget)
yearly_action, monthly_action = get_actions(args, budget)
if monthly_action in ["Stop", "Warn"]:
budget_amount = get_accumulated_monthly_budget(budget.monthly_distribution,
args.posting_date, args.fiscal_year, budget.budget_amount)
args["month_end_date"] = get_last_day(args.posting_date)
compare_expense_with_budget(args, budget_amount,
_("Accumulated Monthly"), monthly_action, budget.budget_against, amount)
if yearly_action in ("Stop", "Warn") and monthly_action != "Stop" \
and yearly_action != monthly_action:
compare_expense_with_budget(args, flt(budget.budget_amount),
_("Annual"), yearly_action, budget.budget_against, amount)
def compare_expense_with_budget(args, budget_amount, action_for, action, budget_against, amount=0):
actual_expense = amount or get_actual_expense(args)
if actual_expense > budget_amount:
diff = actual_expense - budget_amount
currency = frappe.get_cached_value('Company', args.company, 'default_currency')
msg = _("{0} Budget for Account {1} against {2} {3} is {4}. It will exceed by {5}").format(
_(action_for), frappe.bold(args.account), args.budget_against_field,
frappe.bold(budget_against),
frappe.bold(fmt_money(budget_amount, currency=currency)),
frappe.bold(fmt_money(diff, currency=currency)))
if (frappe.flags.exception_approver_role
and frappe.flags.exception_approver_role in frappe.get_roles(frappe.session.user)):
action = "Warn"
if action=="Stop":
frappe.throw(msg, BudgetError)
else:
frappe.msgprint(msg, indicator='orange')
def get_actions(args, budget):
yearly_action = budget.action_if_annual_budget_exceeded
monthly_action = budget.action_if_accumulated_monthly_budget_exceeded
if args.get('doctype') == 'Material Request' and budget.for_material_request:
yearly_action = budget.action_if_annual_budget_exceeded_on_mr
monthly_action = budget.action_if_accumulated_monthly_budget_exceeded_on_mr
elif args.get('doctype') == 'Purchase Order' and budget.for_purchase_order:
yearly_action = budget.action_if_annual_budget_exceeded_on_po
monthly_action = budget.action_if_accumulated_monthly_budget_exceeded_on_po
return yearly_action, monthly_action
def get_amount(args, budget):
amount = 0
if args.get('doctype') == 'Material Request' and budget.for_material_request:
amount = (get_requested_amount(args, budget)
+ get_ordered_amount(args, budget) + get_actual_expense(args))
elif args.get('doctype') == 'Purchase Order' and budget.for_purchase_order:
amount = get_ordered_amount(args, budget) + get_actual_expense(args)
return amount
def get_requested_amount(args, budget):
item_code = args.get('item_code')
condition = get_other_condition(args, budget, 'Material Request')
data = frappe.db.sql(""" select ifnull((sum(child.stock_qty - child.ordered_qty) * rate), 0) as amount
from `tabMaterial Request Item` child, `tabMaterial Request` parent where parent.name = child.parent and
child.item_code = %s and parent.docstatus = 1 and child.stock_qty > child.ordered_qty and {0} and
parent.material_request_type = 'Purchase' and parent.status != 'Stopped'""".format(condition), item_code, as_list=1)
return data[0][0] if data else 0
def get_ordered_amount(args, budget):
item_code = args.get('item_code')
condition = get_other_condition(args, budget, 'Purchase Order')
data = frappe.db.sql(""" select ifnull(sum(child.amount - child.billed_amt), 0) as amount
from `tabPurchase Order Item` child, `tabPurchase Order` parent where
parent.name = child.parent and child.item_code = %s and parent.docstatus = 1 and child.amount > child.billed_amt
and parent.status != 'Closed' and {0}""".format(condition), item_code, as_list=1)
return data[0][0] if data else 0
def get_other_condition(args, budget, for_doc):
condition = "expense_account = '%s'" % (args.expense_account)
budget_against_field = frappe.scrub(args.get("budget_against_field"))
if budget_against_field and args.get(budget_against_field):
condition += " and child.%s = '%s'" %(budget_against_field, args.get(budget_against_field))
if args.get('fiscal_year'):
date_field = 'schedule_date' if for_doc == 'Material Request' else 'transaction_date'
start_date, end_date = frappe.db.get_value('Fiscal Year', args.get('fiscal_year'),
['year_start_date', 'year_end_date'])
condition += """ and parent.%s
between '%s' and '%s' """ %(date_field, start_date, end_date)
return condition
def get_actual_expense(args):
condition1 = " and gle.posting_date <= %(month_end_date)s" \
if args.get("month_end_date") else ""
if args.budget_against_field == "Cost Center":
lft_rgt = frappe.db.get_value(args.budget_against_field,
args.budget_against, ["lft", "rgt"], as_dict=1)
args.update(lft_rgt)
condition2 = """and exists(select name from `tabCost Center`
where lft>=%(lft)s and rgt<=%(rgt)s and name=gle.cost_center)"""
elif args.budget_against_field == "Project":
condition2 = "and exists(select name from `tabProject` where name=gle.project and gle.project = %(budget_against)s)"
return flt(frappe.db.sql("""
select sum(gle.debit) - sum(gle.credit)
from `tabGL Entry` gle
where gle.account=%(account)s
{condition1}
and gle.fiscal_year=%(fiscal_year)s
and gle.company=%(company)s
and gle.docstatus=1
{condition2}
""".format(condition1=condition1, condition2=condition2), (args))[0][0])
def get_accumulated_monthly_budget(monthly_distribution, posting_date, fiscal_year, annual_budget):
distribution = {}
if monthly_distribution:
for d in frappe.db.sql("""select mdp.month, mdp.percentage_allocation
from `tabMonthly Distribution Percentage` mdp, `tabMonthly Distribution` md
where mdp.parent=md.name and md.fiscal_year=%s""", fiscal_year, as_dict=1):
distribution.setdefault(d.month, d.percentage_allocation)
dt = frappe.db.get_value("Fiscal Year", fiscal_year, "year_start_date")
accumulated_percentage = 0.0
while(dt <= getdate(posting_date)):
if monthly_distribution:
accumulated_percentage += distribution.get(getdate(dt).strftime("%B"), 0)
else:
accumulated_percentage += 100.0/12
dt = add_months(dt, 1)
return annual_budget * accumulated_percentage / 100
def get_item_details(args):
cost_center, expense_account = None, None
if not args.get('company'):
return cost_center, expense_account
if args.item_code:
item_defaults = frappe.db.get_value('Item Default',
{'parent': args.item_code, 'company': args.get('company')},
['buying_cost_center', 'expense_account'])
if item_defaults:
cost_center, expense_account = item_defaults
if not (cost_center and expense_account):
for doctype in ['Item Group', 'Company']:
data = get_expense_cost_center(doctype, args)
if not cost_center and data:
cost_center = data[0]
if not expense_account and data:
expense_account = data[1]
if cost_center and expense_account:
return cost_center, expense_account
return cost_center, expense_account
def get_expense_cost_center(doctype, args):
if doctype == 'Item Group':
return frappe.db.get_value('Item Default',
{'parent': args.get(frappe.scrub(doctype)), 'company': args.get('company')},
['buying_cost_center', 'expense_account'])
else:
return frappe.db.get_value(doctype, args.get(frappe.scrub(doctype)),\
['cost_center', 'default_expense_account'])
|
gpl-3.0
| 287,245,431,636,735,200 | 39.932927 | 119 | 0.710338 | false |
TheWeiTheTruthAndTheLight/senior-design
|
src/get_tweets.py
|
1
|
5213
|
"""
get_tweets.py
usage: get_tweets.py [-h] [--sarcastic_path SARCASTIC_PATH]
[--non_sarcastic_path NON_SARCASTIC_PATH]
[--log_path LOG_PATH]
Query twitter API for tweets over last 7 days
optional arguments:
-h, --help show this help message and exit
--sarcastic_path SARCASTIC_PATH
path to directory where results w/ #sarcasm should be
saved. Needs trailing "/"
--non_sarcastic_path NON_SARCASTIC_PATH
path to directory where results w/o #sarcasm should be
saved. Needs trailing "/"
--log_path LOG_PATH path to save log. Needs trailing "/"
"""
import os
import sys
import json
import argparse
import logging
from datetime import datetime, timedelta
from TwitterSearch import *
from login import *
from json_io import list_to_json
def create_sarcastic_search_order():
tso = TwitterSearchOrder()
tso.set_keywords(['#sarcasm']) # query only tweets containing #sarcasm
tso.set_language('en')
tso.set_include_entities(True)
tso.arguments.update({"tweet_mode": "extended"})
return tso
def create_non_sarcastic_search_order():
tso = TwitterSearchOrder()
tso.set_keywords(["-#sarcasm"]) # must have keyword, so query tweets containing common words but NOT '#sarcasm'
tso.set_language('en')
tso.set_include_entities(True)
tso.arguments.update({"tweet_mode": "extended"})
return tso
if __name__ == "__main__":
# Setup CLA parser
parser = argparse.ArgumentParser(description='Query twitter API for tweets over last 7 days')
parser.add_argument('--sarcastic_path', help='path to directory where results w/ #sarcasm should be saved. Needs trailing "/"')
parser.add_argument('--non_sarcastic_path', help='path to directory where results w/o #sarcasm should be saved. Needs trailing "/"')
parser.add_argument('--log_path', help='path to save log. Needs trailing "/"')
# Parse CLAs
args = parser.parse_args()
# start and end date (for file naming/logging)
end_date = datetime.strftime(datetime.now(), "%Y-%m-%d")
start_date = datetime.strftime( (datetime.now() - timedelta(days=7)), "%Y-%m-%d")
filename = "{}_{}".format(start_date, end_date)
# setup logger
if args.log_path:
if not os.path.exists(args.log_path):
os.makedirs(args.log_path)
logger = logging.getLogger('root')
FORMAT = "[%(asctime)s - %(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s"
logging.basicConfig(filename=args.log_path + filename + ".log", filemode='a', level=logging.INFO, format=FORMAT)
# lists to store tweets
sarcastic_tweets_list = []
non_sarcastic_tweets_list = []
# create search orders
if args.sarcastic_path:
sarcastic_tso = create_sarcastic_search_order()
if args.non_sarcastic_path:
non_sarcastic_tso = create_non_sarcastic_search_order()
try:
# query twitter API and populate tweet lists
ts = TwitterSearch(
consumer_key = CONSUMER_KEY,
consumer_secret = CONSUMER_SECRET,
access_token = ACCESS_TOKEN,
access_token_secret = ACCESS_SECRET
)
if args.sarcastic_path:
for sarcastic_tweet in ts.search_tweets_iterable(sarcastic_tso):
if not sarcastic_tweet['full_text'].lower().startswith('rt'):
sarcastic_tweets_list.append({
'id': sarcastic_tweet['id'],
'urls': not not sarcastic_tweet['entities']['urls'],
'media': "media" in sarcastic_tweet["entities"],
'text': sarcastic_tweet['full_text']})
if args.non_sarcastic_path:
for non_sarcastic_tweet in ts.search_tweets_iterable(non_sarcastic_tso):
if not non_sarcastic_tweet['full_text'].lower().startswith('rt'):
non_sarcastic_tweets_list.append({
'id': non_sarcastic_tweet['id'],
'urls': not not non_sarcastic_tweet['entities']['urls'],
'media': "media" in non_sarcastic_tweet["entities"],
'text': non_sarcastic_tweet['full_text']})
except TwitterSearchException as e:
logging.error(str(e))
# save results to json
if args.sarcastic_path:
if not os.path.exists(args.sarcastic_path):
os.makedirs(args.sarcastic_path)
list_to_json(sarcastic_tweets_list, args.sarcastic_path + filename + ".json", old_format=False)
if args.log_path:
logging.info("Saved {} sarcastic tweets at {}".format(len(sarcastic_tweets_list), args.sarcastic_path + filename + ".json"))
if args.non_sarcastic_path:
if not os.path.exists(args.non_sarcastic_path):
os.makedirs(args.non_sarcastic_path)
list_to_json(non_sarcastic_tweets_list, args.non_sarcastic_path + filename + ".json", old_format=False)
if args.log_path:
logging.info("Saved {} non sarcastic tweets at {}".format(len(non_sarcastic_tweets_list), args.non_sarcastic_path + filename + ".json"))
|
mit
| 7,683,003,271,177,713,000 | 42.082645 | 148 | 0.61807 | false |
gfairchild/pyHarmonySearch
|
pyharmonysearch/harmony_search.py
|
1
|
13648
|
"""
Copyright (c) 2013, Triad National Security, LLC
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following
disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of Triad National Security, LLC nor the names of its contributors may be used to endorse or
promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import random
from multiprocessing import Pool, Event
from datetime import datetime
from collections import namedtuple
import copy
# Note: We use a global multiprocessing.Event to deal with a KeyboardInterrupt. This idea comes from
# http://stackoverflow.com/questions/14579474/multiprocessing-pool-spawning-new-childern-after-terminate-on-linux-python2-7.
# This is not necessary when running under Python 3, but to keep 2.7 compatability, I'm leaving it in.
terminating = Event()
# HarmonySearchResults is a struct-like object that we'll use to attach the results of the search.
# namedtuples are lightweight and trivial to extend should more results be desired in the future. Right now, we're just
# keeping track of the total elapsed clock time, the best harmony found, the fitness for that harmony, and the harmony memory,
# which allows you to see the top harmonies.
HarmonySearchResults = namedtuple('HarmonySearchResults', ['elapsed_time', 'best_harmony', 'best_fitness', 'harmony_memories', 'harmony_histories'])
def harmony_search(objective_function, num_processes, num_iterations, initial_harmonies=None):
"""
Here, we use multiprocessing.Pool to do multiple harmony searches simultaneously. Since HS is stochastic (unless random_seed is set),
multiple runs can find different results. We run the specified number of iterations on the specified number of processes and return
an instance of HarmonySearchResults.
"""
pool = Pool(num_processes)
try:
start = datetime.now()
pool_results = [pool.apply_async(worker, args=(objective_function, initial_harmonies,)) for i in range(num_iterations)]
pool.close() # no more tasks will be submitted to the pool
pool.join() # wait for all tasks to finish before moving on
end = datetime.now()
elapsed_time = end - start
# find best harmony from all iterations
best_harmony = None
best_fitness = float('-inf') if objective_function.maximize() else float('+inf')
harmony_memories = list()
harmony_histories = list()
for result in pool_results:
harmony, fitness, harmony_memory, harmony_history = result.get() # multiprocessing.pool.AsyncResult is returned for each process, so we need to call get() to pull out the value
if (objective_function.maximize() and fitness > best_fitness) or (not objective_function.maximize() and fitness < best_fitness):
best_harmony = harmony
best_fitness = fitness
harmony_memories.append(harmony_memory)
harmony_histories.append(harmony_history)
return HarmonySearchResults(elapsed_time=elapsed_time, best_harmony=best_harmony, best_fitness=best_fitness,\
harmony_memories=harmony_memories, harmony_histories=harmony_histories)
except KeyboardInterrupt:
pool.terminate()
def worker(objective_function, initial_harmonies=None):
"""
This is just a dummy function to make multiprocessing work with a class. It also checks/sets the global multiprocessing.Event to prevent
new processes from starting work on a KeyboardInterrupt.
"""
try:
if not terminating.is_set():
hs = HarmonySearch(objective_function)
return hs.run(initial_harmonies=initial_harmonies)
except KeyboardInterrupt:
terminating.set() # set the Event to true to prevent the other processes from doing any work
class HarmonySearch(object):
"""
This class implements the harmony search (HS) global optimization algorithm. In general, what you'll do is this:
1. Implement an objective function that inherits from ObjectiveFunctionInterface.
2. Initialize HarmonySearch with this objective function (e.g., hs = HarmonySearch(objective_function)).
3. Run HarmonySearch (e.g., results = hs.run()).
"""
def __init__(self, objective_function):
"""
Initialize HS with the specified objective function. Note that this objective function must implement ObjectiveFunctionInterface.
"""
self._obj_fun = objective_function
def run(self, initial_harmonies=None):
"""
This is the main HS loop. It initializes the harmony memory and then continually generates new harmonies
until the stopping criterion (max_imp iterations) is reached.
"""
# set optional random seed
if self._obj_fun.use_random_seed():
random.seed(self._obj_fun.get_random_seed())
# harmony_memory stores the best hms harmonies
self._harmony_memory = list()
# harmony_history stores all hms harmonies every nth improvisations (i.e., one 'generation')
self._harmony_history = list()
# fill harmony_memory using random parameter values by default, but with initial_harmonies if provided
self._initialize(initial_harmonies)
# create max_imp improvisations
generation = 0
num_imp = 0
while(num_imp < self._obj_fun.get_max_imp()):
# generate new harmony
harmony = list()
for i in range(0, self._obj_fun.get_num_parameters()):
if random.random() < self._obj_fun.get_hmcr():
self._memory_consideration(harmony, i)
if random.random() < self._obj_fun.get_par():
self._pitch_adjustment(harmony, i)
else:
self._random_selection(harmony, i)
fitness = self._obj_fun.get_fitness(harmony)
self._update_harmony_memory(harmony, fitness)
num_imp += 1
# save harmonies every nth improvisations (i.e., one 'generation')
if num_imp % self._obj_fun.get_hms() == 0:
generation += 1
harmony_list = {'gen': generation, 'harmonies': copy.deepcopy(self._harmony_memory)}
self._harmony_history.append(harmony_list)
# return best harmony
best_harmony = None
best_fitness = float('-inf') if self._obj_fun.maximize() else float('+inf')
for harmony, fitness in self._harmony_memory:
if (self._obj_fun.maximize() and fitness > best_fitness) or (not self._obj_fun.maximize() and fitness < best_fitness):
best_harmony = harmony
best_fitness = fitness
return best_harmony, best_fitness, self._harmony_memory, self._harmony_history
def _initialize(self, initial_harmonies=None):
"""
Initialize harmony_memory, the matrix (list of lists) containing the various harmonies (solution vectors). Note
that we aren't actually doing any matrix operations, so a library like NumPy isn't necessary here. The matrix
merely stores previous harmonies.
If harmonies are provided, then use them instead of randomly initializing them.
Populate harmony_history with initial harmony memory.
"""
if initial_harmonies is not None:
# verify that the initial harmonies are provided correctly
if len(initial_harmonies) != self._obj_fun.get_hms():
raise ValueError('Number of initial harmonies does not equal to the harmony memory size.')
num_parameters = self._obj_fun.get_num_parameters()
for i in range(len(initial_harmonies)):
num_parameters_initial_harmonies = len(initial_harmonies[i])
if num_parameters_initial_harmonies != num_parameters:
raise ValueError('Number of parameters in initial harmonies does not match that defined.')
else:
initial_harmonies = list()
for i in range(0, self._obj_fun.get_hms()):
harmony = list()
for j in range(0, self._obj_fun.get_num_parameters()):
self._random_selection(harmony, j)
initial_harmonies.append(harmony)
for i in range(0, self._obj_fun.get_hms()):
fitness = self._obj_fun.get_fitness(initial_harmonies[i])
self._harmony_memory.append((initial_harmonies[i], fitness))
harmony_list = {'gen': 0, 'harmonies': self._harmony_memory}
self._harmony_history.append(harmony_list)
def _random_selection(self, harmony, i):
"""
Choose a note according to get_value(). Remember that even if a note is not variable, get_value() must still
return a valid value.
"""
harmony.append(self._obj_fun.get_value(i))
def _memory_consideration(self, harmony, i):
"""
Randomly choose a note previously played.
"""
memory_index = random.randint(0, self._obj_fun.get_hms() - 1)
harmony.append(self._harmony_memory[memory_index][0][i])
def _pitch_adjustment(self, harmony, i):
"""
If variable, randomly adjust the pitch up or down by some amount. This is the only place in the algorithm where there
is an explicit difference between continuous and discrete variables.
The probability of adjusting the pitch either up or down is fixed at 0.5. The maximum pitch adjustment proportion (mpap)
and maximum pitch adjustment index (mpai) determine the maximum amount the pitch may change for continuous and discrete
variables, respectively.
For example, suppose that it is decided via coin flip that the pitch will be adjusted down. Also suppose that mpap is set to 0.25.
This means that the maximum value the pitch can be dropped will be 25% of the difference between the lower bound and the current
pitch. mpai functions similarly, only it relies on indices of the possible values instead.
"""
if(self._obj_fun.is_variable(i)):
if self._obj_fun.is_discrete(i):
current_index = self._obj_fun.get_index(i, harmony[i])
# discrete variable
if random.random() < 0.5:
# adjust pitch down
harmony[i] = self._obj_fun.get_value(i, current_index - random.randint(0, min(self._obj_fun.get_mpai(), current_index)))
else:
# adjust pitch up
harmony[i] = self._obj_fun.get_value(i, current_index + random.randint(0, min(self._obj_fun.get_mpai(), self._obj_fun.get_num_discrete_values(i) - current_index - 1)))
else:
# continuous variable
if random.random() < 0.5:
# adjust pitch down
harmony[i] -= (harmony[i] - self._obj_fun.get_lower_bound(i)) * random.random() * self._obj_fun.get_mpap()
else:
# adjust pitch up
harmony[i] += (self._obj_fun.get_upper_bound(i) - harmony[i]) * random.random() * self._obj_fun.get_mpap()
def _update_harmony_memory(self, considered_harmony, considered_fitness):
"""
Update the harmony memory if necessary with the given harmony. If the given harmony is better than the worst
harmony in memory, replace it. This function doesn't allow duplicate harmonies in memory.
"""
if (considered_harmony, considered_fitness) not in self._harmony_memory:
worst_index = None
worst_fitness = float('+inf') if self._obj_fun.maximize() else float('-inf')
for i, (harmony, fitness) in enumerate(self._harmony_memory):
if (self._obj_fun.maximize() and fitness < worst_fitness) or (not self._obj_fun.maximize() and fitness > worst_fitness):
worst_index = i
worst_fitness = fitness
if (self._obj_fun.maximize() and considered_fitness > worst_fitness) or (not self._obj_fun.maximize() and considered_fitness < worst_fitness):
self._harmony_memory[worst_index] = (considered_harmony, considered_fitness)
|
bsd-3-clause
| -769,342,194,975,787,300 | 53.592 | 189 | 0.654015 | false |
vlegoff/tsunami
|
src/primaires/connex/contextes/connexion/confirmer_pass.py
|
1
|
1972
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 DAVY Guillaume
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# pereIBILITY OF SUCH DAMAGE.
from primaires.connex.contextes.commun.confirmer_pass import ConfirmerPass
class ConfirmerPassConnex(ConfirmerPass):
nom = "connex:connexion:confirmer_pass"
def __init__(self, pere):
"""Constructeur du contexte"""
ConfirmerPass.__init__(self, pere)
self.opts.rci_ctx_prec = "connex:connexion:choisir_pass"
self.suivant = "connex:connexion:choix_personnages"
|
bsd-3-clause
| -1,355,478,154,387,710,700 | 49.564103 | 79 | 0.763692 | false |
Labonneguigue/Machine-Learning
|
Machine Learning Data Repository /Facebook_metrics/hw1_regression.py
|
1
|
1970
|
import csv
import sys
import numpy
from numpy import genfromtxt
from numpy.linalg import inv
numpy.set_printoptions(threshold=numpy.nan)
Lambda = float(sys.argv[1])
Sigma2 = float(sys.argv[2])
print("Lambda = ")
print(Lambda)
print("Sigma2 = ")
print(Sigma2)
X_train = genfromtxt('X_train.csv', delimiter=',')
print("X_train = ")
print(X_train)
y_train = genfromtxt('y_train.csv', delimiter=',')
print("y_train = ")
print(y_train)
X_test = genfromtxt('X_test.csv', delimiter=',')
print("X_test = ")
print(X_test)
# Get the number of columns -> size of identity_matrix
# Shape returns a tuple (rows, columns)
columns = X_train.shape[1]
identity_matrix = numpy.identity(columns)
LambdaDotIdentityMatrix = numpy.multiply(identity_matrix,Lambda)
print("LambdaDotIdentityMatrix = ")
print(LambdaDotIdentityMatrix)
XTransposeX = numpy.transpose(X_train).dot(X_train)
print("XTransposeX")
print(XTransposeX)
Inverse = inv(LambdaDotIdentityMatrix+XTransposeX)
print("Inverse")
print(Inverse)
XtransposeY = numpy.transpose(X_train).dot(y_train)
print("XtransposeY")
print(XtransposeY)
wRR = Inverse.dot(XtransposeY)
print("wRR")
print(wRR)
nameOfThePart1File = "wRR_"+str(Lambda)+".csv"
print(nameOfThePart1File)
with open(nameOfThePart1File, 'wb') as csvfile:
spamwriter = csv.writer(csvfile, delimiter='\n', quotechar='|', quoting=csv.QUOTE_MINIMAL)
spamwriter.writerow(numpy.transpose(wRR))
nameOfThePart2File = "active_"+str(Lambda)+"_"+str(Sigma2)+".csv"
print(nameOfThePart2File)
with open(nameOfThePart2File, 'wb') as csvfile2:
spamwriter2 = csv.writer(csvfile2, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
spamwriter2.writerow(wRR)
# print(numpy.transpose(X_train))
# f = open ( 'input.txt' , 'r')
# l = [ map(int,line.split(',')) for line in f ]
# print l
# with fileinput.input(files=('X_train.csv')) as f:
# for line in f:
# process(line)
#python hw1_regression.py lambda sigma2 y_train.csv X_test.csv
|
mit
| 3,158,200,066,098,003,000 | 23.02439 | 95 | 0.718782 | false |
nickabattista/IB2d
|
pyIB2d/Examples/Rayleigh_Taylor_Instability/please_Compute_External_Forcing.py
|
1
|
6723
|
#-------------------------------------------------------------------------------------------------------------------#
#
# IB2d is an Immersed Boundary Code (IB) for solving fully coupled non-linear
# fluid-structure interaction models. This version of the code is based off of
# Peskin's Immersed Boundary Method Paper in Acta Numerica, 2002.
#
# Author: Nicholas A. Battista
# Email: nickabattista@gmail.com
# Date Created: May 27th, 2015
# Institution: UNC-CH
#
# This code is capable of creating Lagrangian Structures using:
# 1. Springs
# 2. Beams (*torsional springs)
# 3. Target Points
# 4. Muscle-Model (combined Force-Length-Velocity model, "HIll+(Length-Tension)")
#
# One is able to update those Lagrangian Structure parameters, e.g., spring constants, resting ## lengths, etc
#
# There are a number of built in Examples, mostly used for teaching purposes.
#
# If you would like us #to add a specific muscle model, please let Nick (nickabattista@gmail.com) know.
#
#--------------------------------------------------------------------------------------------------------------------#
import numpy as np
################################################################################################################
#
# def: Computes the components of the force term in Navier-Stokes from
# arbitrary external forces, i.e., external force to get desired
# velocity profile on fluid grid
#
################################################################################################################
def please_Compute_External_Forcing(dt,current_time,x,y, grid_Info, uX, uY, first, inds):
#
# dt: time-step
# current_time: Current time of simulation (in seconds)
# x: x-Eulerian pts
# y: y-Eulerian pts
# grid_Info: holds lots of geometric pieces about grid / simulations
# uX: x-Velocity on Eulerian Grid
# uY: y-Velocity on Eulerian Grid
# Grid Info #
Nx = grid_Info[0] # # of Eulerian pts. in x-direction
Ny = grid_Info[1] # # of Eulerian pts. in y-direction
Lx = grid_Info[2] # Length of Eulerian grid in x-coordinate
Ly = grid_Info[3] # Length of Eulerian grid in y-coordinate
dx = grid_Info[4] # Spatial-size in x
dy = grid_Info[5] # Spatial-size in y
supp = grid_Info[6] # Delta-def support
Nb = grid_Info[7] # # of Lagrangian pts.
ds = grid_Info[8] # Lagrangian spacing
# Compute Where You Want to Apply Force
xMin = 0.2
xMax = 0.3
yMin = 0.48
yMax = 0.52
# Stiffness for Arbitrary External Force to Fluid Grid
kStiff = 1e4
# Width of Channel
w = 0.3
# Max Velocity Desired
uMax = 10.0
if first == 1:
inds = give_Me_Indices_To_Apply_Force(x,y,xMin,xMax,yMin,yMax)
first = 0
# Compute External Forces from Desired Target Velocity
fx, fy = give_Me_Velocity_Target_External_Force_Density(current_time,dx,dy,x,y,Nx,Ny,Lx,Ly,uX,uY,kStiff,w,uMax,inds)
# Compute Total External Forces
Fx = fx
Fy = fy
return (Fx, Fy, first, inds)
######################################################################################
#
# def: computes indices for exerting forces in specified places on fluid grid
#
######################################################################################
def give_Me_Indices_To_Apply_Force(x,y,xMin,xMax,yMin,yMax):
j=0
noMinYet = 1
while noMinYet:
if ( x[j] >= xMin ):
iX_min = j
noMinYet = 0
j=j+1
j=x.size - 1
noMaxYet = 1
while noMaxYet:
if ( x[j] <= xMax ):
iX_max = j
noMaxYet = 0
j=j-1
j=0
noMinYet = 1
while noMinYet:
if ( y[j] >= yMin ):
iY_min = j
noMinYet = 0
j=j+1
j=y.size - 1
noMaxYet = 1
while noMaxYet:
if ( y[j] <= yMax ):
iY_max = j
noMaxYet = 0
j=j-1
iX_Vec = np.arange(iX_min,iX_max+1,1)
iY_Vec = np.arange(iY_min,iY_max+1,1)
n = 0
inds = np.zeros((len(iX_Vec)*len(iY_Vec),2))
for i in range(0,iX_Vec.size):
for j in range(0,iY_Vec.size):
inds[n,0] = iX_Vec[i]
inds[n,1] = iY_Vec[j]
n = n+1
return inds
######################################################################################
#
# def: computes the External Force Densities!
#
######################################################################################
def give_Me_Velocity_Target_External_Force_Density(t,dx,dy,x,y,Nx,Ny,Lx,Ly,uX,uY,kStiff,w,Umax,inds):
# t: current time in simulation
# Nx: # of nodes in x-direction on Eulerian grid
# Ny: # of nodes in y-direction on Eulerian grid
# uX: x-Velocity on Eulerian grid
# uY: y-Velocity on Eulerian grid
# kStiff: stiffness parameter
# inds: indices on the fluid grid for where to apply the arbitrary external force
fx = np.zeros((Ny,Nx)) # Initialize storage for x-force density from EXTERNAL FORCES
fy = np.zeros((Ny,Nx)) # Initialize storage for y-force density from EXTERNAL FORCES
if (t<0.01):
for n in range(0,inds.shape[0]):
i = int(inds[n,0])
j = int(inds[n,1])
uX_Tar,uY_Tar = please_Give_Target_Velocity(t,dx,dy,x,y,Lx,Ly,i,j,w,Umax)
fx[j,i] = fx[j,i] - kStiff*( uX[j,i] - uX_Tar )
fy[j,i] = fy[j,i] - kStiff*( uY[j,i] - uY_Tar )
fx_exts = fx
fy_exts = fy
return (fx_exts, fy_exts)
# MIGHT NOT NEED THESE!
#fx_exts = fx/ds^2
#fy_exts = fy/ds^2
########################################################################################################
#
# def: computes the Target Velocity Profile (MODEL DEPENDENT)
#
########################################################################################################
def please_Give_Target_Velocity(t,dx,dy,xGrid,yGrid,Lx,Ly,i,j,w,Umax):
# t: current time in simulation
# dx: x-Grid spacing
# dy: y-Grid spacing
# xGrid: vector of xPts in Eulerian grid
# yGrid: vector of yPts in Eulerian grid
# Lx: x-Length of Eulerian Grid
# Ly: y-Length of Eulerian Grid
# i: ith component in x-Grid
# j: jth component in y-Grid
# w: width of Channel
# Umax: maximum velocity
#y = yGrid(j) # y-Value considered
uX_Tar = 0 # Only external forces in x-direction
uY_Tar = -Umax * (np.tanh(20*t)) # No external forces in y-direction
return (uX_Tar, uY_Tar)
|
gpl-3.0
| 4,047,843,990,817,786,400 | 28.752212 | 120 | 0.501264 | false |
swprojects/Serial-Sequence-Creator
|
serialfunctions.py
|
1
|
1796
|
import logging
import serial
import sys
import time
import serial.tools.list_ports
#------------------------------------------------#
# serial connection functions
#------------------------------------------------#
def OpenSerial(port, baudrate, bytesize, stopbits, parity, flowcontrol, timeout):
# configure the serial connections (the parameters differs on the device you are connecting to)
if parity == "None":
parity = serial.PARITY_NONE
elif parity == "Odd":
parity = serial.PARITY_ODD
elif parity == "Even":
parity = serial.PARITY_EVEN
elif parity == "MARK":
parity = serial.PARITY_MARK
elif parity == "SPACE":
parity = serial.PARITY_SPACE
ser = serial.Serial()
ser.baudrate = int(baudrate)
ser.port = port
ser.bytesize = int(bytesize)
ser.parity = parity
ser.stopbits = int(stopbits)
ser.timeout = int(timeout)
if flowcontrol.lower() == "xon/xoff":
ser.xonxoff = 1
elif flowcontrol.lower() == "rts/cts":
ser.rtscts = 1
elif flowcontrol.lower() == "dsr/dtr":
ser.dsrdtr = 1
if ser.isOpen() is False:
ser.close()
try:
ser.open()
except:
# logging(" cannot open serial port ")
return False
return ser
def SendToSerial(ser, input):
end = "\r\n"
# ser.write(bytes(end, "utf8"))
# ser.write(bytes("*CLS" +end, "utf8"))
# time.sleep(0.5)
# ser = self.psu_connection
ser.write(bytes(input + end, "utf8"))
time.sleep(0.5)
out = ""
while ser.inWaiting() > 0:
# print(ser.read(1))
try:
out += str(ser.read(1), "utf8")
except UnicodeDecodeError:
print(UnicodeDecodeError)
return out
|
mit
| 518,038,412,185,088,100 | 24.671429 | 99 | 0.550668 | false |
runt18/mojo
|
mojo/devtools/common/android_stack_parser/symbol.py
|
1
|
17737
|
# Copyright (C) 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for looking up symbolic debugging information.
The information can include symbol names, offsets, and source locations.
"""
import glob
import itertools
import os
import re
import subprocess
import zipfile
NDK_DIR = ""
BUILD_DIRS = []
SYMBOLS_DIR = ""
ARCH = "arm"
TOOLCHAIN_INFO = None
def Uname():
"""'uname' for constructing prebuilt/<...> and out/host/<...> paths."""
uname = os.uname()[0]
proc = os.uname()[-1]
if uname == "Darwin":
if proc == "i386":
return "darwin-x86"
elif proc == "x86_64":
return "darwin-x86_64"
return "darwin-ppc"
if uname == "Linux":
if proc == "i386":
return "linux-x86"
else:
return "linux-x86_64"
return uname
def ToolPath(tool, toolchain_info=None):
"""Return a full qualified path to the specified tool"""
# ToolPath looks for the tools in the completely incorrect directory.
# This looks in the checked in android_tools.
if ARCH == "arm":
toolchain_source = "arm-linux-androideabi-4.9"
toolchain_prefix = "arm-linux-androideabi"
elif ARCH == "arm64":
toolchain_source = "aarch64-linux-android-4.9"
toolchain_prefix = "aarch64-linux-android"
elif ARCH == "x86":
toolchain_source = "x86-4.9"
toolchain_prefix = "i686-linux-android"
elif ARCH == "x86_64" or ARCH == "x64":
toolchain_source = "x86_64-4.9"
toolchain_prefix = "x86_64-linux-android"
elif ARCH == "mips":
toolchain_source = "mipsel-linux-android-4.9"
toolchain_prefix = "mipsel-linux-android"
else:
raise Exception("Could not find tool chain")
toolchain_subdir = ("toolchains/%s/prebuilt/%s/bin" % (
toolchain_source, Uname()))
return os.path.join(NDK_DIR,
toolchain_subdir,
toolchain_prefix + "-" + tool)
def FindToolchain():
"""Look for the latest available toolchain
Args:
None
Returns:
A pair of strings containing toolchain label and target prefix.
"""
global TOOLCHAIN_INFO
if TOOLCHAIN_INFO is not None:
return TOOLCHAIN_INFO
## Known toolchains, newer ones in the front.
gcc_version = "4.9"
if ARCH == "arm64":
known_toolchains = [
("aarch64-linux-android-" + gcc_version, "aarch64", "aarch64-linux-android")
]
elif ARCH == "arm":
known_toolchains = [
("arm-linux-androideabi-" + gcc_version, "arm", "arm-linux-androideabi")
]
elif ARCH =="x86":
known_toolchains = [
("x86-" + gcc_version, "x86", "i686-linux-android")
]
elif ARCH =="x86_64" or ARCH =="x64":
known_toolchains = [
("x86_64-" + gcc_version, "x86_64", "x86_64-linux-android")
]
elif ARCH == "mips":
known_toolchains = [
("mipsel-linux-android-" + gcc_version, "mips", "mipsel-linux-android")
]
else:
known_toolchains = []
# Look for addr2line to check for valid toolchain path.
for (label, platform, target) in known_toolchains:
toolchain_info = (label, platform, target);
if os.path.exists(ToolPath("addr2line", toolchain_info)):
TOOLCHAIN_INFO = toolchain_info
print "Using toolchain from :" + ToolPath("", TOOLCHAIN_INFO)
return toolchain_info
raise Exception("Could not find tool chain")
def GetAapt():
"""Returns the path to aapt.
Args:
None
Returns:
the pathname of the 'aapt' executable.
"""
sdk_home = os.path.join('third_party', 'android_tools', 'sdk')
sdk_home = os.environ.get('SDK_HOME', sdk_home)
aapt_exe = glob.glob(os.path.join(sdk_home, 'build-tools', '*', 'aapt'))
if not aapt_exe:
return None
return sorted(aapt_exe, key=os.path.getmtime, reverse=True)[0]
def ApkMatchPackageName(aapt, apk_path, package_name):
"""Returns true the APK's package name matches package_name.
Args:
aapt: pathname for the 'aapt' executable.
apk_path: pathname of the APK file.
package_name: package name to match.
Returns:
True if the package name matches or aapt is None, False otherwise.
"""
if not aapt:
# Allow false positives
return True
aapt_output = subprocess.check_output(
[aapt, 'dump', 'badging', apk_path]).split('\n')
package_name_re = re.compile(r'package: .*name=\'(\S*)\'')
for line in aapt_output:
match = package_name_re.match(line)
if match:
return package_name == match.group(1)
return False
def PathListJoin(prefix_list, suffix_list):
"""Returns each prefix in prefix_list joined with each suffix in suffix list.
Args:
prefix_list: list of path prefixes.
suffix_list: list of path suffixes.
Returns:
List of paths each of which joins a prefix with a suffix.
"""
return [
os.path.join(prefix, suffix)
for prefix in prefix_list for suffix in suffix_list ]
def GetCandidates(filepart, candidate_fun, relative_dirs=None):
"""Returns a list of candidate filenames.
Args:
filepart: the file part of the pathname.
candidate_fun: a function to apply to each candidate, returns a list.
relative_dirs: a list of relative directory names to search from.
Returns:
A list of candidate files ordered by modification time, newest first.
"""
candidates = list(BUILD_DIRS)
if relative_dirs:
candidates = PathListJoin(candidates, relative_dirs)
candidates = PathListJoin(candidates, [filepart])
candidates = list(
itertools.chain.from_iterable(map(candidate_fun, candidates)))
candidates = sorted(candidates, key=os.path.getmtime, reverse=True)
return candidates
def GetCandidateApks():
"""Returns a list of APKs which could contain the library.
Args:
None
Returns:
list of APK filename which could contain the library.
"""
return GetCandidates('*.apk', glob.glob, relative_dirs=['apks'])
def GetCrazyLib(apk_filename):
"""Returns the name of the first crazy library from this APK.
Args:
apk_filename: name of an APK file.
Returns:
Name of the first library which would be crazy loaded from this APK.
"""
zip_file = zipfile.ZipFile(apk_filename, 'r')
for filename in zip_file.namelist():
match = re.match('lib/[^/]*/crazy.(lib.*[.]so)', filename)
if match:
return match.group(1)
def GetMatchingApks(device_apk_name):
"""Find any APKs which match the package indicated by the device_apk_name.
Args:
device_apk_name: name of the APK on the device.
Returns:
A list of APK filenames which could contain the desired library.
"""
match = re.match('(.*)-[0-9]+[.]apk$', device_apk_name)
if not match:
return None
package_name = match.group(1)
return filter(
lambda candidate_apk:
ApkMatchPackageName(GetAapt(), candidate_apk, package_name),
GetCandidateApks())
def MapDeviceApkToLibrary(device_apk_name):
"""Provide a library name which corresponds with device_apk_name.
Args:
device_apk_name: name of the APK on the device.
Returns:
Name of the library which corresponds to that APK.
"""
matching_apks = GetMatchingApks(device_apk_name)
for matching_apk in matching_apks:
crazy_lib = GetCrazyLib(matching_apk)
if crazy_lib:
return crazy_lib
def GetCandidateLibraries(library_name):
"""Returns a list of candidate library filenames.
Args:
library_name: basename of the library to match.
Returns:
A list of matching library filenames for library_name.
"""
return GetCandidates(
library_name,
lambda filename: filter(os.path.exists, [filename]))
def TranslatePathFromDeviceToLocal(lib):
"""Maps a path as seen on the device to a path on the local file system
containing symbols.
Args:
lib: library (or executable) pathname from device.
"""
# SymbolInformation(lib, addr) receives lib that is either a basename or
# the path from symbols root to the symbols file. This needs to be translated
# to point to the correct .so path. If the user doesn't explicitly specify
# which directory to use, then use the most recently updated one in one of
# the known directories.
library_name = os.path.basename(lib)
# The filename in the stack trace maybe an APK name rather than a library
# name. This happens when the library was loaded directly from inside the
# APK. If this is the case we try to figure out the library name by looking
# for a matching APK file and finding the name of the library in contains.
# The name of the APK file on the device is of the form
# <package_name>-<number>.apk. The APK file on the host may have any name
# so we look at the APK badging to see if the package name matches.
if re.search('-[0-9]+[.]apk$', library_name):
mapping = MapDeviceApkToLibrary(library_name)
if mapping:
library_name = mapping
candidate_libraries = GetCandidateLibraries(library_name)
return (candidate_libraries[0] if candidate_libraries else
os.path.join(SYMBOLS_DIR, lib))
def SymbolInformation(lib, addr, get_detailed_info):
"""Look up symbol information about an address.
Args:
lib: library (or executable) pathname containing symbols
addr: string hexidecimal address
Returns:
A list of the form [(source_symbol, source_location,
object_symbol_with_offset)].
If the function has been inlined then the list may contain
more than one element with the symbols for the most deeply
nested inlined location appearing first. The list is
always non-empty, even if no information is available.
Usually you want to display the source_location and
object_symbol_with_offset from the last element in the list.
"""
lib = TranslatePathFromDeviceToLocal(lib)
info = SymbolInformationForSet(lib, set([addr]), get_detailed_info)
return (info and info.get(addr)) or [(None, None, None)]
def SymbolInformationForSet(lib, unique_addrs, get_detailed_info):
"""Look up symbol information for a set of addresses from the given library.
Args:
lib: library (or executable) pathname containing symbols
unique_addrs: set of hexidecimal addresses
Returns:
A dictionary of the form {addr: [(source_symbol, source_location,
object_symbol_with_offset)]} where each address has a list of
associated symbols and locations. The list is always non-empty.
If the function has been inlined then the list may contain
more than one element with the symbols for the most deeply
nested inlined location appearing first. The list is
always non-empty, even if no information is available.
Usually you want to display the source_location and
object_symbol_with_offset from the last element in the list.
"""
if not lib:
return None
addr_to_line = CallAddr2LineForSet(lib, unique_addrs)
if not addr_to_line:
return None
if get_detailed_info:
addr_to_objdump = CallObjdumpForSet(lib, unique_addrs)
if not addr_to_objdump:
return None
else:
addr_to_objdump = dict((addr, ("", 0)) for addr in unique_addrs)
result = {}
for addr in unique_addrs:
source_info = addr_to_line.get(addr)
if not source_info:
source_info = [(None, None)]
if addr in addr_to_objdump:
(object_symbol, object_offset) = addr_to_objdump.get(addr)
object_symbol_with_offset = FormatSymbolWithOffset(object_symbol,
object_offset)
else:
object_symbol_with_offset = None
result[addr] = [(source_symbol, source_location, object_symbol_with_offset)
for (source_symbol, source_location) in source_info]
return result
class MemoizedForSet(object):
def __init__(self, fn):
self.fn = fn
self.cache = {}
def __call__(self, lib, unique_addrs):
lib_cache = self.cache.setdefault(lib, {})
no_cache = filter(lambda x: x not in lib_cache, unique_addrs)
if no_cache:
lib_cache.update((k, None) for k in no_cache)
result = self.fn(lib, no_cache)
if result:
lib_cache.update(result)
return dict((k, lib_cache[k]) for k in unique_addrs if lib_cache[k])
@MemoizedForSet
def CallAddr2LineForSet(lib, unique_addrs):
"""Look up line and symbol information for a set of addresses.
Args:
lib: library (or executable) pathname containing symbols
unique_addrs: set of string hexidecimal addresses look up.
Returns:
A dictionary of the form {addr: [(symbol, file:line)]} where
each address has a list of associated symbols and locations
or an empty list if no symbol information was found.
If the function has been inlined then the list may contain
more than one element with the symbols for the most deeply
nested inlined location appearing first.
"""
if not lib or not os.path.isfile(lib):
return None
(label, platform, target) = FindToolchain()
cmd = [ToolPath("addr2line"), "--functions", "--inlines",
"--demangle", "--exe=" + lib]
child = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
result = {}
addrs = sorted(unique_addrs)
for addr in addrs:
child.stdin.write("0x%s\n" % addr)
child.stdin.flush()
records = []
first = True
while True:
symbol = child.stdout.readline().strip()
if symbol == "??":
symbol = None
location = child.stdout.readline().strip()
if location == "??:0":
location = None
if symbol is None and location is None:
break
records.append((symbol, location))
if first:
# Write a blank line as a sentinel so we know when to stop
# reading inlines from the output.
# The blank line will cause addr2line to emit "??\n??:0\n".
child.stdin.write("\n")
first = False
result[addr] = records
child.stdin.close()
child.stdout.close()
return result
def StripPC(addr):
"""Strips the Thumb bit a program counter address when appropriate.
Args:
addr: the program counter address
Returns:
The stripped program counter address.
"""
global ARCH
if ARCH == "arm":
return addr & ~1
return addr
@MemoizedForSet
def CallObjdumpForSet(lib, unique_addrs):
"""Use objdump to find out the names of the containing functions.
Args:
lib: library (or executable) pathname containing symbols
unique_addrs: set of string hexidecimal addresses to find the functions for.
Returns:
A dictionary of the form {addr: (string symbol, offset)}.
"""
if not lib:
return None
symbols = SYMBOLS_DIR + lib
if not os.path.exists(symbols):
return None
symbols = SYMBOLS_DIR + lib
if not os.path.exists(symbols):
return None
result = {}
# Function lines look like:
# 000177b0 <android::IBinder::~IBinder()+0x2c>:
# We pull out the address and function first. Then we check for an optional
# offset. This is tricky due to functions that look like "operator+(..)+0x2c"
func_regexp = re.compile("(^[a-f0-9]*) \<(.*)\>:$")
offset_regexp = re.compile("(.*)\+0x([a-f0-9]*)")
# A disassembly line looks like:
# 177b2: b510 push {r4, lr}
asm_regexp = re.compile("(^[ a-f0-9]*):[ a-f0-0]*.*$")
for target_addr in unique_addrs:
start_addr_dec = str(StripPC(int(target_addr, 16)))
stop_addr_dec = str(StripPC(int(target_addr, 16)) + 8)
cmd = [ToolPath("objdump"),
"--section=.text",
"--demangle",
"--disassemble",
"--start-address=" + start_addr_dec,
"--stop-address=" + stop_addr_dec,
symbols]
current_symbol = None # The current function symbol in the disassembly.
current_symbol_addr = 0 # The address of the current function.
stream = subprocess.Popen(cmd, stdout=subprocess.PIPE).stdout
for line in stream:
# Is it a function line like:
# 000177b0 <android::IBinder::~IBinder()>:
components = func_regexp.match(line)
if components:
# This is a new function, so record the current function and its address.
current_symbol_addr = int(components.group(1), 16)
current_symbol = components.group(2)
# Does it have an optional offset like: "foo(..)+0x2c"?
components = offset_regexp.match(current_symbol)
if components:
current_symbol = components.group(1)
offset = components.group(2)
if offset:
current_symbol_addr -= int(offset, 16)
# Is it an disassembly line like:
# 177b2: b510 push {r4, lr}
components = asm_regexp.match(line)
if components:
addr = components.group(1)
i_addr = int(addr, 16)
i_target = StripPC(int(target_addr, 16))
if i_addr == i_target:
result[target_addr] = (current_symbol, i_target - current_symbol_addr)
stream.close()
return result
def CallCppFilt(mangled_symbol):
cmd = [ToolPath("c++filt")]
process = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
process.stdin.write(mangled_symbol)
process.stdin.write("\n")
process.stdin.close()
demangled_symbol = process.stdout.readline().strip()
process.stdout.close()
return demangled_symbol
def FormatSymbolWithOffset(symbol, offset):
if offset == 0:
return symbol
return "%s+%d" % (symbol, offset)
|
bsd-3-clause
| 6,496,259,129,740,246,000 | 30.50444 | 82 | 0.66973 | false |
supermurat/hamsi-manager
|
SpecialTools/CharacterEncoding.py
|
1
|
4773
|
# This file is part of HamsiManager.
#
# Copyright (c) 2010 - 2015 Murat Demir <mopened@gmail.com>
#
# Hamsi Manager is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Hamsi Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HamsiManager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from Core import Universals as uni
from Core.MyObjects import *
class CharacterEncoding(MWidget):
def __init__(self, _parent):
MWidget.__init__(self, _parent)
self.specialTools = _parent
self.cckbCorrectText = MCheckBox(translate("SpecialTools", "Character Encoding"))
lblColumns = MLabel(translate("SpecialTools", "Column: "))
lblSourceValues = MLabel(translate("SpecialTools", "Source Values : "))
lblSourceEncoding = MLabel(translate("SpecialTools", "Source Encoding : "))
lblDestinationEncoding = MLabel(translate("SpecialTools", "Destination Encoding : "))
self.columns = MyComboBox()
self.cbSourceEncoding = MComboBox()
self.cbSourceEncoding.addItems(uni.getCharSets())
self.cbDestinationEncoding = MComboBox()
self.cbDestinationEncoding.addItems(uni.getCharSets())
self.cbSourceEncoding.setCurrentIndex(self.cbSourceEncoding.findText(uni.MySettings["fileSystemEncoding"]))
self.cbDestinationEncoding.setCurrentIndex(
self.cbDestinationEncoding.findText(uni.MySettings["fileSystemEncoding"]))
self.cbSourceValues = MComboBox()
self.cbSourceValues.addItems([translate("Options", "Real Values"),
translate("Options", "Table Contents")])
HBoxs = [MHBoxLayout(), MHBoxLayout()]
HBoxs[0].addWidget(lblColumns)
HBoxs[0].addWidget(self.columns)
HBoxs[0].addWidget(lblSourceValues)
HBoxs[0].addWidget(self.cbSourceValues)
HBoxs[1].addWidget(lblSourceEncoding)
HBoxs[1].addWidget(self.cbSourceEncoding)
HBoxs[1].addWidget(lblDestinationEncoding)
HBoxs[1].addWidget(self.cbDestinationEncoding)
vblCharacterEncoding = MVBoxLayout()
vblCharacterEncoding.addLayout(HBoxs[0])
vblCharacterEncoding.addLayout(HBoxs[1])
self.setLayout(vblCharacterEncoding)
lblColumns.setFixedWidth(60)
def showAdvancedSelections(self):
pass
def hideAdvancedSelections(self):
pass
def checkCompleters(self):
pass
def reFillCompleters(self):
pass
def apply(self):
self.checkCompleters()
self.reFillCompleters()
getMainTable().createHistoryPoint()
getMainTable().isAskShowHiddenColumn = True
sourceEncoding = str(self.cbSourceEncoding.currentText())
destinationEncoding = str(self.cbDestinationEncoding.currentText())
sourceValues = str(self.cbSourceValues.currentText())
isUseRealValues = (sourceValues == translate("Options", "Real Values"))
selectedColumnKey = self.columns.currentData()
if selectedColumnKey == "all":
columnKeys = getMainTable().getWritableColumnKeys()
else:
columnKeys = [selectedColumnKey]
for columnKey in columnKeys:
columnNo = getMainTable().getColumnNoFromKey(columnKey)
if getMainTable().checkReadOnlyColumn(columnKey) is False:
continue
if getMainTable().checkHiddenColumn(columnKey, False) is False:
continue
for rowNo in range(getMainTable().rowCount()):
if getMainTable().isChangeableItem(rowNo, columnKey):
if isUseRealValues:
newString = str(getMainTable().values[rowNo][columnKey])
else:
newString = str(getMainTable().item(rowNo, columnNo).text())
myString = ""
try:
myString = uni.trDecode(newString, sourceEncoding, "ignore")
except:
pass
try:
myString = str(uni.trEncode(myString, destinationEncoding, "ignore"))
except:
pass
getMainTable().item(rowNo, columnNo).setText(str(myString))
|
gpl-3.0
| -860,980,650,611,901,700 | 44.028302 | 115 | 0.649696 | false |
apaloczy/ap_tools
|
utils.py
|
1
|
54151
|
# Description: General-purpose functions for personal use.
# Author: André Palóczy
# E-mail: paloczy@gmail.com
__all__ = ['seasonal_avg',
'seasonal_std',
'deseason',
'blkavg',
'blkavgdir',
'blkavgt',
'blkapply',
'stripmsk',
'pydatetime2m_arr',
'm2pydatetime_arr',
'npdt2dt',
'dt2sfloat',
'doy2date',
'flowfun',
'cumsimp',
'rot_vec',
'avgdir',
'lon180to360',
'lon360to180',
'bbox2ij',
'xy2dist',
'get_xtrackline',
'get_arrdepth',
'fpointsbox',
'near',
'near2',
'mnear',
'refine',
'denan',
'standardize',
'linear_trend',
'thomas',
'point_in_poly',
'get_mask_from_poly',
'sphericalpolygon_area',
'greatCircleBearing',
'weim',
'smoo2',
'topo_slope',
'curvature_geometric',
'get_isobath',
'angle_isobath',
'isopyc_depth',
'whiten_zero',
'wind2stress',
'gen_dates',
'fmt_isobath',
'float2latex',
'mat2npz',
'bb_map',
'dots_dualcolor']
from os import system
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from matplotlib import path
from mpl_toolkits.basemap import Basemap
from datetime import datetime, timedelta
from dateutil import rrule, parser
from scipy.io import loadmat, savemat
from scipy import signal
from scipy.signal import savgol_filter
from glob import glob
from netCDF4 import Dataset, num2date, date2num
# from pandas import rolling_window # FIXME, new pandas way of doing this is, e.g., arr = Series(...).rolling(...).mean()
from pandas import Timestamp
from gsw import distance
from pygeodesy import Datums, VincentyError
from pygeodesy.ellipsoidalVincenty import LatLon as LatLon
from pygeodesy.sphericalNvector import LatLon as LatLon_sphere
def seasonal_avg(t, F):
"""
USAGE
-----
F_seasonal = seasonal_avg(t, F)
Calculates the seasonal average of variable F(t).
Assumes 't' is a 'datetime.datetime' object.
"""
tmo = np.array([ti.month for ti in t])
ftmo = [tmo==mo for mo in range(1, 13)]
return np.array([F[ft].mean() for ft in ftmo])
def seasonal_std(t, F):
"""
USAGE
-----
F_seasonal = seasonal_std(t, F)
Calculates the seasonal standard deviation of variable F(t).
Assumes 't' is a 'datetime.datetime' object.
"""
tmo = np.array([ti.month for ti in t])
ftmo = [tmo==mo for mo in range(1, 13)]
return np.array([F[ft].std() for ft in ftmo])
def deseason(t, F):
"""
USAGE
-----
F_nonssn = deseason(t, F)
Removes the seasonal signal of variable F(t).
Assumes 't' is a 'datetime.datetime' object.
Also assumes that F is sampled monthly and only for
complete years (i.e., t.size is a multiple of 12).
"""
Fssn = seasonal_avg(t, F)
nyears = int(t.size/12)
aux = np.array([])
for n in range(nyears):
aux = np.concatenate((aux, Fssn))
return F - aux
def blkavg(x, y, every=2):
"""
Block-averages a variable y(x). Returns its block average
and standard deviation and new x axis.
"""
nx = x.size
xblk, yblk, yblkstd = np.array([]), np.array([]), np.array([])
for i in range(every, nx+every, every):
yi = y[i-every:i]
xblk = np.append(xblk, np.nanmean(x[i-every:i]))
yblk = np.append(yblk, np.nanmean(yi))
yblkstd = np.append(yblkstd, np.nanstd(yi))
return xblk, yblk, yblkstd
def blkavgdir(x, ydir, every=2, degrees=False, axis=None):
"""
Block-averages a PERIODIC variable ydir(x). Returns its
block average and new x axis.
"""
nx = x.size
xblk, yblk, yblkstd = np.array([]), np.array([]), np.array([])
for i in range(every, nx+every, every):
xblk = np.append(xblk, np.nanmean(x[i-every:i]))
yblk = np.append(yblk, avgdir(ydir[i-every:i], degrees=degrees, axis=axis))
return xblk, yblk
def blkavgt(t, x, every=2):
"""
Block-averages a variable x(t). Returns its block average
and the new t axis.
"""
nt = t.size
units = 'days since 01-01-01'
calendar = 'proleptic_gregorian'
t = date2num(t, units=units, calendar=calendar)
tblk, xblk = np.array([]), np.array([])
for i in range(every, nt+every, every):
xi = x[i-every:i]
tblk = np.append(tblk, np.nanmean(t[i-every:i]))
xblk = np.append(xblk, np.nanmean(xi))
tblk = num2date(tblk, units=units, calendar=calendar)
return tblk, xblk
def blkapply(x, f, nblks, overlap=0, demean=False, detrend=False, verbose=True):
"""
Divides array 'x' in 'nblks' blocks and applies function 'f' = f(x) on
each block.
"""
x = np.array(x)
assert callable(f), "f must be a function"
nx = x.size
ni = int(nx/nblks) # Number of data points in each chunk.
y = np.zeros(ni) # Array that will receive each block.
dn = int(round(ni - overlap*ni)) # How many indices to move forward with
# each chunk (depends on the % overlap).
# Demean/detrend the full record first (removes the lowest frequencies).
# Then, also demean/detrend each block beffore applying f().
if demean: x = x - x.mean()
if detrend: x = signal.detrend(x, type='linear')
n=0
il, ir = 0, ni
while ir<=nx:
xn = x[il:ir]
if demean: xn = xn - xn.mean()
if detrend: xn = signal.detrend(xn, type='linear')
y = y + f(xn) # Apply function and accumulate the current bock.
il+=dn; ir+=dn
n+=1
y /= n # Divide by number of blocks actually used.
ncap = nx - il # Number of points left out at the end of array.
if verbose:
print("")
print("Left last %d data points out (%.1f %% of all points)."%(ncap,100*ncap/nx))
if overlap>0:
print("")
print("Intended %d blocks, but could fit %d blocks, with"%(nblks,n))
print('overlap of %.1f %%, %d points per block.'%(100*overlap,dn))
print("")
return y
def stripmsk(arr, mask_invalid=False):
if mask_invalid:
arr = np.ma.masked_invalid(arr)
if np.ma.isMA(arr):
msk = arr.mask
arr = arr.data
arr[msk] = np.nan
return arr
def pydatetime2m_arr(pydt_arr):
pydt_arr = np.array(pydt_arr)
secperyr = 86400.0
timedt = timedelta(days=366)
matdt = []
for pydt in pydt_arr.tolist():
m = pydt.toordinal() + timedt
dfrac = pydt - datetime(pydt.year,pydt.month,pydt.day,0,0,0).seconds/secperyr
matdt.append(m.toordinal() + dfrac)
return np.array(matdt)
def m2pydatetime_arr(mdatenum_arr):
mdatenum_arr = np.array(mdatenum_arr)
timedt = timedelta(days=366)
pydt = []
for mdt in mdatenum_arr.tolist():
d = datetime.fromordinal(int(mdt))
dfrac = timedelta(days=mdt%1) - timedt
pydt.append(d + dfrac)
return np.array(pydt)
def npdt2dt(tnp):
"""
USAGE
-----
t_datetime = npdt2dt(t_numpydatetime64)
Convert an array of numpy.datetime64 timestamps to datetime.datetime.
"""
return np.array([Timestamp(ti).to_pydatetime() for ti in tnp])
def dt2sfloat(t):
"""
USAGE
-----
t_float = dt2sfloat(t_datetime)
Convert an array of datetime.datetime timestamps to an array of floats
representing elapsed seconds since the first timestamp.
"""
t = np.array(t)
t0 = t[0]
return np.array([(tn - t0).total_seconds() for tn in t])
def doy2date(doy, year=2017):
"""
USAGE
-----
t = doy2date(doy, year=2017)
Convert an array `doy` of decimal yeardays to
an array of datetime.datetime timestamps.
"""
doy = np.array(doy)*86400 # [seconds/day].
tunit = 'seconds since %d-01-01 00:00:00'%year
return np.array([num2date(dn, tunit) for dn in doy])
def flowfun(x, y, u, v, variable='psi', geographic=True):
"""
FLOWFUN Computes the potential PHI and the streamfunction PSI
of a 2-dimensional flow defined by the matrices of velocity
components U and V, so that
d(PHI) d(PSI) d(PHI) d(PSI)
u = ----- - ----- , v = ----- + -----
dx dy dx dy
P = FLOWFUN(x,y,u,v) returns an array P of the same size as u and v,
which can be the velocity potential (PHI) or the streamfunction (PSI)
Because these scalar fields are defined up to the integration constant,
their absolute values are such that PHI[0,0] = PSI[0,0] = 0.
For a potential (irrotational) flow PSI = 0, and the Laplacian
of PSI is equal to the divergence of the velocity field.
A solenoidal (non-divergent) flow can be described by the
streamfunction alone, and the Laplacian of the streamfunction
is equal to the vorticity (curl) of the velocity field.
The units of the grid coordinates are assumed to be consistent
with the units of the velocity components, e.g., [m] and [m/s].
If variable=='psi', the streamfunction (PSI) is returned.
If variable=='phi', the velocity potential (PHI) is returned.
If geographic==True (default), (x,y) are assumed to be
(longitude,latitude) and are converted to meters before
computing (dx,dy).
If geographic==False, (x,y) are assumed to be in meters.
Uses function 'cumsimp()' (Simpson rule summation).
Author: Kirill K. Pankratov, March 7, 1994.
Source: http://www-pord.ucsd.edu/~matlab/stream.htm
Translated to Python by André Palóczy, January 15, 2015.
Modified by André Palóczy on January 15, 2015.
"""
x,y,u,v = map(np.asanyarray, (x,y,u,v))
if not x.shape==y.shape==u.shape==v.shape:
print("Error: Arrays (x, y, u, v) must be of equal shape.")
return
## Calculating grid spacings.
if geographic:
dlat, _ = np.gradient(y)
_, dlon = np.gradient(x)
deg2m = 111120.0 # [m/deg]
dx = dlon*deg2m*np.cos(y*np.pi/180.) # [m]
dy = dlat*deg2m # [m]
else:
dy, _ = np.gradient(y)
_, dx = np.gradient(x)
ly, lx = x.shape # Shape of the (x,y,u,v) arrays.
## Now the main computations.
## Integrate velocity fields to get potential and streamfunction.
## Use Simpson rule summation (function CUMSIMP).
## Compute velocity potential PHI (non-rotating part).
if variable=='phi':
cx = cumsimp(u[0,:]*dx[0,:]) # Compute x-integration constant
cy = cumsimp(v[:,0]*dy[:,0]) # Compute y-integration constant
cx = np.expand_dims(cx, 0)
cy = np.expand_dims(cy, 1)
phiy = cumsimp(v*dy) + np.tile(cx, (ly,1))
phix = cumsimp(u.T*dx.T).T + np.tile(cy, (1,lx))
phi = (phix + phiy)/2.
return phi
## Compute streamfunction PSI (non-divergent part).
if variable=='psi':
cx = cumsimp(v[0,:]*dx[0,:]) # Compute x-integration constant
cy = cumsimp(u[:,0]*dy[:,0]) # Compute y-integration constant
cx = np.expand_dims(cx, 0)
cy = np.expand_dims(cy, 1)
psix = -cumsimp(u*dy) + np.tile(cx, (ly,1))
psiy = cumsimp(v.T*dx.T).T - np.tile(cy, (1,lx))
psi = (psix + psiy)/2.
return psi
def cumsimp(y):
"""
F = CUMSIMP(Y) Simpson-rule column-wise cumulative summation.
Numerical approximation of a function F(x) such that
Y(X) = dF/dX. Each column of the input matrix Y represents
the value of the integrand Y(X) at equally spaced points
X = 0,1,...size(Y,1).
The output is a matrix F of the same size as Y.
The first row of F is equal to zero and each following row
is the approximation of the integral of each column of matrix
Y up to the givem row.
CUMSIMP assumes continuity of each column of the function Y(X)
and uses Simpson rule summation.
Similar to the command F = CUMSUM(Y), exept for zero first
row and more accurate summation (under the assumption of
continuous integrand Y(X)).
Author: Kirill K. Pankratov, March 7, 1994.
Source: http://www-pord.ucsd.edu/~matlab/stream.htm
Translated to Python by André Palóczy, January 15, 2015.
"""
y = np.asanyarray(y)
## 3-point interpolation coefficients to midpoints.
## Second-order polynomial (parabolic) interpolation coefficients
## from Xbasis = [0 1 2] to Xint = [.5 1.5]
c1 = 3/8.
c2 = 6/8.
c3 = -1/8.
if y.ndim==1:
y = np.expand_dims(y,1)
f = np.zeros((y.size,1)) # Initialize summation array.
squeeze_after = True
elif y.ndim==2:
f = np.zeros(y.shape) # Initialize summation array.
squeeze_after = False
else:
print("Error: Input array has more than 2 dimensions.")
return
if y.size==2: # If only 2 elements in columns - simple average.
f[1,:] = (y[0,:] + y[1,:])/2.
return f
else: # If more than two elements in columns - Simpson summation.
## Interpolate values of y to all midpoints.
f[1:-1,:] = c1*y[:-2,:] + c2*y[1:-1,:] + c3*y[2:,:]
f[2:,:] = f[2:,:] + c3*y[:-2,:] + c2*y[1:-1,:] + c1*y[2:,:]
f[1,:] = f[1,:]*2
f[-1,:] = f[-1,:]*2
## Simpson (1,4,1) rule.
f[1:,:] = 2*f[1:,:] + y[:-1,:] + y[1:,:]
f = np.cumsum(f, axis=0)/6. # Cumulative sum, 6 - denominator from the Simpson rule.
if squeeze_after:
f = f.squeeze()
return f
def rot_vec(u, v, angle=-45, degrees=True):
"""
USAGE
-----
u_rot,v_rot = rot_vec(u,v,angle=-45.,degrees=True)
Returns the rotated vector components (`u_rot`,`v_rot`)
from the zonal-meridional input vector components (`u`,`v`).
The rotation is done using the angle `angle` positive counterclockwise
(trigonometric convention). If `degrees` is set to `True``(default),
then `angle` is converted to radians.
is
Example
-------
>>> from matplotlib.pyplot import quiver
>>> from ap_tools.utils import rot_vec
>>> u = -1.
>>> v = -1.
>>> u2,v2 = rot_vec(u,v, angle=-30.)
"""
u,v = map(np.asanyarray, (u,v))
if degrees:
angle = angle*np.pi/180. # Degrees to radians.
u_rot = +u*np.cos(angle) + v*np.sin(angle) # Usually the across-shore component.
v_rot = -u*np.sin(angle) + v*np.cos(angle) # Usually the along-shore component.
return u_rot,v_rot
def avgdir(dirs, degrees=False, axis=None):
"""
USAGE
-----
dirm = avgdir(dirs, degrees=False, axis=None)
Calculate the mean direction of an array of directions 'dirs'.
If 'degrees' is 'False' (default), the input directions must be
in radians. If 'degrees' is 'True', the input directions must be
in degrees.
The direction angle is measured from the ZONAL axis, i.e.,
(0, 90, -90) deg are (Eastward, Northward, Southward).
180 and -180 deg are both Westward.
If 'axis' is 'None' (default) the mean is calculated on the
flattened array. Otherwise, 'axis' is the index of the axis
to calculate the mean over.
"""
dirs = np.array(dirs)
if degrees:
dirs = dirs*np.pi/180 # Degrees to radians.
uxs = np.cos(dirs)
vys = np.sin(dirs)
dirm = np.arctan2(vys.sum(axis=axis), uxs.sum(axis=axis))
if degrees:
dirm = dirm*180/np.pi # From radians to degrees.
return dirm
def lon180to360(lon):
"""
Converts longitude values in the range [-180,+180]
to longitude values in the range [0,360].
"""
lon = np.asanyarray(lon)
return (lon + 360.0) % 360.0
def lon360to180(lon):
"""
Converts longitude values in the range [0,360]
to longitude values in the range [-180,+180].
"""
lon = np.asanyarray(lon)
return ((lon + 180.) % 360.) - 180.
def bbox2ij(lon, lat, bbox=[-135., -85., -76., -64.], FIX_IDL=True):
"""
USAGE
-----
ilon_start, ilon_end, jlat_start, jlat_end = bbox2ij(lon, lat, bbox=[-135., -85., -76., -64.], FIX_IDL=True)
OR
(ilon_start_left, ilon_end_left, jlat_start, jlat_end), (ilon_start_right, ilon_end_right, jlat_start, jlat_end) = ...
... bbox2ij(lon, lat, bbox=[-135., -85., -76., -64.], FIX_IDL=True)
Return indices for i,j that will completely cover the specified bounding box. 'lon' and 'lat' are 2D coordinate arrays
(generated by meshgrid), and 'bbox' is a list like [lon_start, lon_end, lat_start, lat_end] describing the desired
longitude-latitude box.
If the specified bbox is such that it crosses the edges of the longitude array, two tuples of indices are returned.
The first (second) tuple traces out the left (right) part of the bbox.
If FIX_IDL is set to 'True' (default), the indices returned correspond to the "short route" around the globe, which
amounts to assuming that the specified bbox crosses the International Date. If FIX_IDL is set to 'False', the
"long route" is used instead.
Example
-------
>>> import numpy as np
>>> import matplotlib.pyplot as plt
>>> lon = np.arange(-180., 180.25, 0.25)
>>> lat = np.arange(-90., 90.25, 0.25)
>>> lon, lat = np.meshgrid(lon, lat)
>>> h = np.sin(lon) + np.cos(lat)
>>> i0, i1, j0, j1 = bbox2ij(lon, lat, bbox=[-71, -63., 39., 46])
>>> h_subset = h[j0:j1,i0:i1]
>>> lon_subset = lon[j0:j1,i0:i1]
>>> lat_subset = lat[j0:j1,i0:i1]
>>> fig, ax = plt.subplots()
>>> ax.pcolor(lon_subset,lat_subset,h_subset)
>>> plt.axis('tight')
Original function downloaded from http://gis.stackexchange.com/questions/71630/subsetting-a-curvilinear-netcdf-file-roms-model-output-using-a-lon-lat-boundin
Modified by André Palóczy on August 20, 2016 to handle bboxes that
cross the International Date Line or the edges of the longitude array.
"""
lon, lat, bbox = map(np.asanyarray, (lon, lat, bbox))
# Test whether the wanted bbox crosses the International Date Line (brach cut of the longitude array).
dlon = bbox[:2].ptp()
IDL_BBOX=dlon>180.
IDL_BBOX=np.logical_and(IDL_BBOX, FIX_IDL)
mypath = np.array([bbox[[0,1,1,0]], bbox[[2,2,3,3]]]).T
p = path.Path(mypath)
points = np.vstack((lon.flatten(), lat.flatten())).T
n, m = lon.shape
inside = p.contains_points(points).reshape((n, m))
# Fix mask if bbox goes throught the International Date Line.
if IDL_BBOX:
fcol=np.all(~inside, axis=0)
flin=np.any(inside, axis=1)
fcol, flin = map(np.expand_dims, (fcol, flin), (0, 1))
fcol = np.tile(fcol, (n, 1))
flin = np.tile(flin, (1, m))
inside=np.logical_and(flin, fcol)
print("Bbox crosses the International Date Line.")
ii, jj = np.meshgrid(range(m), range(n))
iiin, jjin = ii[inside], jj[inside]
i0, i1, j0, j1 = min(iiin), max(iiin), min(jjin), max(jjin)
SPLIT_BBOX=(i1-i0)==(m-1) # Test whether the wanted bbox crosses edges of the longitude array.
# If wanted bbox crosses edges of the longitude array, return indices for the two boxes separately.
if SPLIT_BBOX:
Iiin = np.unique(iiin)
ib0 = np.diff(Iiin).argmax() # Find edge of the inner side of the left bbox.
ib1 = ib0 + 1 # Find edge of the inner side of the right bbox.
Il, Ir = Iiin[ib0], Iiin[ib1] # Indices of the columns that bound the inner side of the two bboxes.
print("Bbox crosses edges of the longitude array. Returning two sets of indices.")
return (i0, Il, j0, j1), (Ir, i1, j0, j1)
else:
return i0, i1, j0, j1
def xy2dist(x, y, cyclic=False, datum='WGS84'):
"""
USAGE
-----
d = xy2dist(x, y, cyclic=False, datum='WGS84')
Calculates a distance axis from a line defined by longitudes and latitudes
'x' and 'y', using either the Vicenty formulae on an ellipsoidal earth
(ellipsoid defaults to WGS84) or on a sphere (if datum=='Sphere').
Example
-------
>>> yi, yf = -23.550520, 32.71573800
>>> xi, xf = -46.633309, -117.161084
>>> x, y = np.linspace(xi, xf), np.linspace(yi, yf)
>>> d_ellipse = xy2dist(x, y, datum='WGS84')[-1]*1e-3 # [km].
>>> d_sphere = xy2dist(x, y, datum='Sphere')[-1]*1e-3 # [km].
>>> dd = np.abs(d_ellipse - d_sphere)
>>> dperc = 100*dd/d_ellipse
>>> msg = 'Difference of %.1f km over a %.0f km-long line (%.3f %% difference)'%(dd, d_ellipse, dperc)
>>> print(msg)
"""
if datum!="Sphere":
xy = [LatLon(y0, x0, datum=Datums[datum]) for x0, y0 in zip(x, y)]
else:
xy = [LatLon_sphere(y0, x0) for x0, y0 in zip(x, y)]
d = np.array([xy[n].distanceTo(xy[n+1]) for n in range(len(xy)-1)])
return np.append(0, np.cumsum(d))
def get_xtrackline(lon1, lon2, lat1, lat2, L=200, dL=10):
"""
USAGE
-----
lonp, latp = get_xtrackline(lon1, lon2, lat1, lat2, L=200, dL=13)
Generates a great-circle line with length 2L (with L in km) that is perpendicular to the great-circle line
defined by the input points (lon1, lat1) and (lon2, lat2). The spacing between the points along the output
line is dL km. Assumes a spherical Earth.
"""
km2m = 1e3
L, dL = L*km2m, dL*km2m
nh = int(L/dL)
p1, p2 = LatLon_sphere(lat1, lon1), LatLon_sphere(lat2, lon2)
angperp = p1.initialBearingTo(p2) + 90
angperpb = angperp + 180
pm = p1.midpointTo(p2)
# Create perpendicular line starting from the midpoint.
N = range(1, nh + 1)
pperp = []
_ = [pperp.append(pm.destination(dL*n, angperpb)) for n in N]
pperp.reverse()
pperp.append(pm)
_ = [pperp.append(pm.destination(dL*n, angperp)) for n in N]
lonperp = np.array([p.lon for p in pperp])
latperp = np.array([p.lat for p in pperp])
return lonperp, latperp
def get_arrdepth(arr):
"""
USAGE
-----
arr_depths = get_arrdepth(arr)
Determine number of nested levels in each
element of an array of arrays of arrays...
(or other array-like objects).
"""
arr = np.array(arr) # Make sure first level is an array.
all_nlevs = []
for i in range(arr.size):
nlev=0
wrk_arr = arr[i]
while np.size(wrk_arr)>0:
try:
wrk_arr = np.array(wrk_arr[i])
except Exception:
all_nlevs.append(nlev)
nlev=0
break
nlev+=1
return np.array(all_nlevs)
def fpointsbox(x, y, fig, ax, nboxes=1, plot=True, pause_secs=5, return_index=True):
"""
USAGE
-----
fpts = fpointsbox(x, y, fig, ax, nboxes=1, plot=True, pause_secs=5, return_index=True)
Find points in a rectangle made with 2 ginput points.
"""
fpts = np.array([])
for n in range(nboxes):
box = np.array(fig.ginput(n=2, timeout=0))
try:
xb, yb = box[:,0], box[:,1]
except IndexError:
print("No points selected. Skipping box \# %d."%(n+1))
continue
xl, xr, yd, yu = xb.min(), xb.max(), yb.min(), yb.max()
xbox = np.array([xl, xr, xr, xl, xl])
ybox = np.array([yd, yd, yu, yu, yd])
fxbox, fybox = np.logical_and(x>xl, x<xr), np.logical_and(y>yd, y<yu)
fptsi = np.logical_and(fxbox, fybox)
if return_index:
fptsi = np.where(fptsi)[0]
fpts = np.append(fpts, fptsi)
if plot:
ax.plot(xbox, ybox, 'r', linestyle='solid', marker='o', ms=4)
ax.plot(x[fptsi], y[fptsi], 'r', linestyle='none', marker='+', ms=5)
plt.draw()
fig.show()
else:
fig.close()
if plot:
plt.draw()
fig.show()
system("sleep %d"%pause_secs)
return fpts
def near(x, x0, npts=1, return_index=False):
"""
USAGE
-----
xnear = near(x, x0, npts=1, return_index=False)
Finds 'npts' points (defaults to 1) in array 'x'
that are closest to a specified 'x0' point.
If 'return_index' is True (defauts to False),
then the indices of the closest points are
returned. The indices are ordered in order of
closeness.
"""
x = list(x)
xnear = []
xidxs = []
for n in range(npts):
idx = np.nanargmin(np.abs(np.array(x)-x0))
xnear.append(x.pop(idx))
if return_index:
xidxs.append(idx)
if return_index: # Sort indices according to the proximity of wanted points.
xidxs = [xidxs[i] for i in np.argsort(xnear).tolist()]
xnear.sort()
if npts==1:
xnear = xnear[0]
if return_index:
xidxs = xidxs[0]
else:
xnear = np.array(xnear)
if return_index:
return xidxs
else:
return xnear
def near2(x, y, x0, y0, npts=1, return_index=False):
"""
USAGE
-----
xnear, ynear = near2(x, y, x0, y0, npts=1, return_index=False)
Finds 'npts' points (defaults to 1) in arrays 'x' and 'y'
that are closest to a specified '(x0, y0)' point. If
'return_index' is True (defauts to False), then the
indices of the closest point(s) are returned.
Example
-------
>>> x = np.arange(0., 100., 0.25)
>>> y = np.arange(0., 100., 0.25)
>>> x, y = np.meshgrid(x, y)
>>> x0, y0 = 44.1, 30.9
>>> xn, yn = near2(x, y, x0, y0, npts=1)
>>> print("(x0, y0) = (%f, %f)"%(x0, y0))
>>> print("(xn, yn) = (%f, %f)"%(xn, yn))
"""
x, y = map(np.array, (x, y))
shp = x.shape
xynear = []
xyidxs = []
dx = x - x0
dy = y - y0
dr = dx**2 + dy**2
for n in range(npts):
xyidx = np.unravel_index(np.nanargmin(dr), dims=shp)
if return_index:
xyidxs.append(xyidx)
xyn = (x[xyidx], y[xyidx])
xynear.append(xyn)
dr[xyidx] = np.nan
if npts==1:
xynear = xynear[0]
if return_index:
xyidxs = xyidxs[0]
if return_index:
return xyidxs
else:
return xynear
def mnear(x, y, x0, y0):
"""
USAGE
-----
xmin,ymin = mnear(x, y, x0, y0)
Finds the the point in a (lons,lats) line
that is closest to a specified (lon0,lat0) point.
"""
x,y,x0,y0 = map(np.asanyarray, (x,y,x0,y0))
point = (x0,y0)
d = np.array([])
for n in range(x.size):
xn,yn = x[n],y[n]
dn = distance((xn,x0),(yn,y0)) # Calculate distance point-wise.
d = np.append(d,dn)
idx = d.argmin()
return x[idx],y[idx]
def refine(line, nref=100, close=True):
"""
USAGE
-----
ref_line = refine(line, nref=100, close=True)
Given a 1-D sequence of points 'line', returns a
new sequence 'ref_line', which is built by linearly
interpolating 'nref' points between each pair of
subsequent points in the original line.
If 'close' is True (default), the first value of
the original line is repeated at the end of the
refined line, as in a closed polygon.
"""
line = np.squeeze(np.asanyarray(line))
if close:
line = np.append(line,line[0])
ref_line = np.array([])
for n in range(line.shape[0]-1):
xi, xf = line[n], line[n+1]
xref = np.linspace(xi,xf,nref)
ref_line = np.append(ref_line, xref)
return ref_line
def point_in_poly(x,y,poly):
"""
USAGE
-----
isinside = point_in_poly(x,y,poly)
Determine if a point is inside a given polygon or not
Polygon is a list of (x,y) pairs. This fuction
returns True or False. The algorithm is called
'Ray Casting Method'.
Source: http://pseentertainmentcorp.com/smf/index.php?topic=545.0
"""
n = len(poly)
inside = False
p1x,p1y = poly[0]
for i in range(n+1):
p2x,p2y = poly[i % n]
if y > min(p1y,p2y):
if y <= max(p1y,p2y):
if x <= max(p1x,p2x):
if p1y != p2y:
xinters = (y-p1y)*(p2x-p1x)/(p2y-p1y)+p1x
if p1x == p2x or x <= xinters:
inside = not inside
p1x,p1y = p2x,p2y
return inside
def get_mask_from_poly(xp, yp, poly, verbose=False):
"""
USAGE
-----
mask = get_mask_from_poly(xp, yp, poly, verbose=False)
Given two arrays 'xp' and 'yp' of (x,y) coordinates (generated by meshgrid)
and a polygon defined by an array of (x,y) coordinates 'poly', with
shape = (n,2), return a boolean array 'mask', where points that lie inside
'poly' are set to 'True'.
"""
print('Building the polygon mask...')
jmax, imax = xp.shape
mask = np.zeros((jmax,imax))
for j in range(jmax):
if verbose:
print("Row %s of %s"%(j+1,jmax))
for i in range(imax):
px, py = xp[j,i], yp[j,i]
# Test if this point is within the polygon.
mask[j,i] = point_in_poly(px, py, poly)
return mask
def sphericalpolygon_area(lons, lats, R=6371000.):
"""
USAGE
-----
area = sphericalpolygon_area(lons, lats, R=6371000.)
Calculates the area of a polygon on the surface of a sphere of
radius R using Girard's Theorem, which states that the area of
a polygon of great circles is R**2 times the sum of the angles
between the polygons minus (N-2)*pi, where N is number of corners.
R = 6371000 m (6371 km, default) is a typical value for the mean
radius of the Earth.
Source: http://stackoverflow.com/questions/4681737/how-to-calculate-the-area-of-a-polygon-on-the-earths-surface-using-python
"""
lons, lats = map(np.asanyarray, (lons, lats))
N = lons.size
angles = np.empty(N)
for i in range(N):
phiB1, phiA, phiB2 = np.roll(lats, i)[:3]
LB1, LA, LB2 = np.roll(lons, i)[:3]
# calculate angle with north (eastward)
beta1 = greatCircleBearing(LA, phiA, LB1, phiB1)
beta2 = greatCircleBearing(LA, phiA, LB2, phiB2)
# calculate angle between the polygons and add to angle array
angles[i] = np.arccos(np.cos(-beta1)*np.cos(-beta2) + np.sin(-beta1)*np.sin(-beta2))
return (np.sum(angles) - (N-2)*np.pi)*R**2
def greatCircleBearing(lon1, lat1, lon2, lat2):
"""
USAGE
-----
angle = greatCircleBearing(lon1, lat1, lon2, lat2)
Calculates the angle (positive eastward) a
great circle passing through points (lon1,lat1)
and (lon2,lat2) makes with true nirth.
Source: http://stackoverflow.com/questions/4681737/how-to-calculate-the-area-of-a-polygon-on-the-earths-surface-using-python
"""
lon1, lat1, lon2, lat2 = map(np.asanyarray, (lon1, lat1, lon2, lat2))
dLong = lon1 - lon2
d2r = np.pi/180.
s = np.cos(d2r*lat2)*np.sin(d2r*dLong)
c = np.cos(d2r*lat1)*np.sin(d2r*lat2) - np.sin(lat1*d2r)*np.cos(d2r*lat2)*np.cos(d2r*dLong)
return np.arctan2(s, c)
def weim(x, N, kind='hann', badflag=-9999, beta=14):
"""
Usage
-----
xs = weim(x, N, kind='hann', badflag=-9999, beta=14)
Description
-----------
Calculates the smoothed array 'xs' from the original array 'x' using the specified
window of type 'kind' and size 'N'. 'N' must be an odd number.
Parameters
----------
x : 1D array
Array to be smoothed.
N : integer
Window size. Must be odd.
kind : string, optional
One of the window types available in the numpy module:
hann (default) : Gaussian-like. The weight decreases toward the ends. Its end-points are zeroed.
hamming : Similar to the hann window. Its end-points are not zeroed, therefore it is
discontinuous at the edges, and may produce undesired artifacts.
blackman : Similar to the hann and hamming windows, with sharper ends.
bartlett : Triangular-like. Its end-points are zeroed.
kaiser : Flexible shape. Takes the optional parameter "beta" as a shape parameter.
For beta=0, the window is rectangular. As beta increases, the window gets narrower.
Refer to the numpy functions for details about each window type.
badflag : float, optional
The bad data flag. Elements of the input array 'A' holding this value are ignored.
beta : float, optional
Shape parameter for the kaiser window. For windows other than the kaiser window,
this parameter does nothing.
Returns
-------
xs : 1D array
The smoothed array.
---------------------------------------
André Palóczy Filho (paloczy@gmail.com)
June 2012
==============================================================================================================
"""
###########################################
### Checking window type and dimensions ###
###########################################
kinds = ['hann', 'hamming', 'blackman', 'bartlett', 'kaiser']
if ( kind not in kinds ):
raise ValueError('Invalid window type requested: %s'%kind)
if np.mod(N,2) == 0:
raise ValueError('Window size must be odd')
###########################
### Creating the window ###
###########################
if ( kind == 'kaiser' ): # If the window kind is kaiser (beta is required).
wstr = 'np.kaiser(N, beta)'
else: # If the window kind is hann, hamming, blackman or bartlett (beta is not required).
if kind == 'hann':
kind = 'hanning'
wstr = 'np.' + kind + '(N)'
w = eval(wstr)
x = np.asarray(x).flatten()
Fnan = np.isnan(x).flatten()
ln = (N-1)/2
lx = x.size
lf = lx - ln
xs = np.nan*np.ones(lx)
# Eliminating bad data from mean computation.
fbad=x==badflag
x[fbad] = np.nan
for i in range(lx):
if i <= ln:
xx = x[:ln+i+1]
ww = w[ln-i:]
elif i >= lf:
xx = x[i-ln:]
ww = w[:lf-i-1]
else:
xx = x[i-ln:i+ln+1]
ww = w.copy()
f = ~np.isnan(xx) # Counting only NON-NaNs, both in the input array and in the window points.
xx = xx[f]
ww = ww[f]
if f.sum() == 0: # Thou shalt not divide by zero.
xs[i] = x[i]
else:
xs[i] = np.sum(xx*ww)/np.sum(ww)
xs[Fnan] = np.nan # Assigning NaN to the positions holding NaNs in the input array.
return xs
def smoo2(A, hei, wid, kind='hann', badflag=-9999, beta=14):
"""
Usage
-----
As = smoo2(A, hei, wid, kind='hann', badflag=-9999, beta=14)
Description
-----------
Calculates the smoothed array 'As' from the original array 'A' using the specified
window of type 'kind' and shape ('hei','wid').
Parameters
----------
A : 2D array
Array to be smoothed.
hei : integer
Window height. Must be odd and greater than or equal to 3.
wid : integer
Window width. Must be odd and greater than or equal to 3.
kind : string, optional
One of the window types available in the numpy module:
hann (default) : Gaussian-like. The weight decreases toward the ends. Its end-points are zeroed.
hamming : Similar to the hann window. Its end-points are not zeroed, therefore it is
discontinuous at the edges, and may produce undesired artifacts.
blackman : Similar to the hann and hamming windows, with sharper ends.
bartlett : Triangular-like. Its end-points are zeroed.
kaiser : Flexible shape. Takes the optional parameter "beta" as a shape parameter.
For beta=0, the window is rectangular. As beta increases, the window gets narrower.
Refer to the numpy functions for details about each window type.
badflag : float, optional
The bad data flag. Elements of the input array 'A' holding this value are ignored.
beta : float, optional
Shape parameter for the kaiser window. For windows other than the kaiser window,
this parameter does nothing.
Returns
-------
As : 2D array
The smoothed array.
---------------------------------------
André Palóczy Filho (paloczy@gmail.com)
April 2012
==============================================================================================================
"""
###########################################
### Checking window type and dimensions ###
###########################################
kinds = ['hann', 'hamming', 'blackman', 'bartlett', 'kaiser']
if ( kind not in kinds ):
raise ValueError('Invalid window type requested: %s'%kind)
if ( np.mod(hei,2) == 0 ) or ( np.mod(wid,2) == 0 ):
raise ValueError('Window dimensions must be odd')
if (hei <= 1) or (wid <= 1):
raise ValueError('Window shape must be (3,3) or greater')
##############################
### Creating the 2D window ###
##############################
if ( kind == 'kaiser' ): # If the window kind is kaiser (beta is required).
wstr = 'np.outer(np.kaiser(hei, beta), np.kaiser(wid, beta))'
else: # If the window kind is hann, hamming, blackman or bartlett (beta is not required).
if kind == 'hann':
kind = 'hanning'
# computing outer product to make a 2D window out of the original 1d windows.
wstr = 'np.outer(np.' + kind + '(hei), np.' + kind + '(wid))'
wdw = eval(wstr)
A = np.asanyarray(A)
Fnan = np.isnan(A)
imax, jmax = A.shape
As = np.nan*np.ones( (imax, jmax) )
for i in range(imax):
for j in range(jmax):
### Default window parameters.
wupp = 0
wlow = hei
wlef = 0
wrig = wid
lh = np.floor(hei/2)
lw = np.floor(wid/2)
### Default array ranges (functions of the i,j indices).
upp = i-lh
low = i+lh+1
lef = j-lw
rig = j+lw+1
##################################################
### Tiling window and input array at the edges ###
##################################################
# Upper edge.
if upp < 0:
wupp = wupp-upp
upp = 0
# Left edge.
if lef < 0:
wlef = wlef-lef
lef = 0
# Bottom edge.
if low > imax:
ex = low-imax
wlow = wlow-ex
low = imax
# Right edge.
if rig > jmax:
ex = rig-jmax
wrig = wrig-ex
rig = jmax
###############################################
### Computing smoothed value at point (i,j) ###
###############################################
Ac = A[upp:low, lef:rig]
wdwc = wdw[wupp:wlow, wlef:wrig]
fnan = np.isnan(Ac)
Ac[fnan] = 0; wdwc[fnan] = 0 # Eliminating NaNs from mean computation.
fbad = Ac==badflag
wdwc[fbad] = 0 # Eliminating bad data from mean computation.
a = Ac * wdwc
As[i,j] = a.sum() / wdwc.sum()
As[Fnan] = np.nan # Assigning NaN to the positions holding NaNs in the input array.
return As
def denan(arr):
"""
USAGE
-----
denaned_arr = denan(arr)
Remove the NaNs from an array.
"""
f = np.isnan(arr)
return arr[~f]
def standardize(series):
"""
USAGE
-----
series2 = standardize(series)
Standardizes a series by subtracting its mean value
and dividing by its standard deviation. The result is
a dimensionless series. Inputs can be of type
"np.array", or "Pandas.Series"/"Pandas.TimeSeries".
"""
Mean, Std = series.mean(), series.std()
return (series - Mean)/Std
def linear_trend(series, return_line=True):
"""
USAGE
-----
line = linear_trend(series, return_line=True)
OR
b, a, x = linear_trend(series, return_line=False)
Returns the linear fit (line = b*x + a) associated
with the 'series' array.
Adapted from pylab.detrend_linear.
"""
series = np.asanyarray(series)
x = np.arange(series.size, dtype=np.float_)
C = np.cov(x, series, bias=1) # Covariance matrix.
b = C[0, 1]/C[0, 0] # Angular coefficient.
a = series.mean() - b*x.mean() # Linear coefficient.
line = b*x + a
if return_line:
return line
else:
return b, a, x
def thomas(A, b):
"""
USAGE
-----
x = thomas(A,b)
Solve Ax = b (where A is a tridiagonal matrix)
using the Thomas Algorithm.
References
----------
For a step-by-step derivation of the algorithm, see
e.g., http://www3.ul.ie/wlee/ms6021_thomas.pdf
"""
# Step 1: Sweep rows from top to bottom,
# calculating gammas and rhos along the way.
N = b.size
gam = [float(A[0,1]/A[0,0])]
rho = [float(b[0]/A[0,0])]
for i in range(0, N):
rho.append(float((b[i] - A[i,i-1]*rho[-1])/(A[i,i] - A[i,i-1]*gam[-1])))
if i<N-1: # No gamma in the last row.
gam.append(float(A[i,i+1]/(A[i,i] - A[i,i-1]*gam[-1])))
# Step 2: Substitute solutions for unknowns
# starting from the bottom row all the way up.
x = [] # Vector of unknowns.
x.append(rho.pop()) # Last row is already solved.
for i in range(N-2, -1, -1):
x.append(float(rho.pop() - gam.pop()*x[-1]))
x.reverse()
return np.array(x)
def topo_slope(lon, lat, h):
"""
USAGE
-----
lons, lats, slope = topo_slope(lon, lat, h)
Calculates bottom slope for a topography fields 'h' at
coordinates ('lon', 'lat') using first-order finite differences.
The output arrays have shape (M-1,L-1), where M,L = h.shape().
"""
lon,lat,h = map(np.asanyarray, (lon,lat,h))
deg2m = 1852.*60. # m/deg.
deg2rad = np.pi/180. # rad/deg.
x = lon*deg2m*np.cos(lat*deg2rad)
y = lat*deg2m
# First-order differences, accurate to O(dx) and O(dy),
# respectively.
sx = (h[:,1:] - h[:,:-1]) / (x[:,1:] - x[:,:-1])
sy = (h[1:,:] - h[:-1,:]) / (y[1:,:] - y[:-1,:])
# Finding the values of the derivatives sx and sy
# at the same location in physical space.
sx = 0.5*(sx[1:,:]+sx[:-1,:])
sy = 0.5*(sy[:,1:]+sy[:,:-1])
# Calculating the bottom slope.
slope = np.sqrt(sx**2 + sy**2)
# Finding the lon,lat coordinates of the
# values of the derivatives sx and sy.
lons = 0.5*(lon[1:,:]+lon[:-1,:])
lats = 0.5*(lat[1:,:]+lat[:-1,:])
lons = 0.5*(lons[:,1:]+lons[:,:-1])
lats = 0.5*(lats[:,1:]+lats[:,:-1])
return lons, lats, slope
def curvature_geometric(x, y):
"""
USAGE
-----
k = curvature_geometric(x, y)
Estimates the curvature k of a 2D curve (x,y) using a geometric method.
If your curve is given by two arrays, x and y, you can
approximate its curvature at each point by the reciprocal of the
radius of a circumscribing triangle with that point, the preceding
point, and the succeeding point as vertices. The radius of such a
triangle is one fourth the product of the three sides divided by its
area.
The curvature will be positive for curvature to the left and
negative for curvature to the right as you advance along the curve.
Note that if your data are too closely spaced together or subject
to substantial noise errors, this formula will not be very accurate.
Author: Roger Stafford
Source: http://www.mathworks.com/matlabcentral/newsreader/view_thread/125637
Translated to Python by André Palóczy, January 19, 2015.
"""
x,y = map(np.asanyarray, (x,y))
x1 = x[:-2]; x2 = x[1:-1]; x3 = x[2:]
y1 = y[:-2]; y2 = y[1:-1]; y3 = y[2:]
## a, b, and c are the three sides of the triangle.
a = np.sqrt((x3-x2)**2 + (y3-y2)**2)
b = np.sqrt((x1-x3)**2 + (y1-y3)**2)
c = np.sqrt((x2-x1)**2 + (y2-y1)**2)
## A is the area of the triangle.
A = 0.5*(x1*y2 + x2*y3 + x3*y1 - x1*y3 - x2*y1 - x3*y2)
## The reciprocal of the circumscribed radius, i.e., the curvature.
k = 4.0*A/(a*b*c)
return np.squeeze(k)
def get_isobath(lon, lat, topo, iso, cyclic=False, smooth_isobath=False, window_length=21, win_type='barthann', **kw):
"""
USAGE
-----
lon_isob, lat_isob = get_isobath(lon, lat, topo, iso, cyclic=False, smooth_isobath=False, window_length=21, win_type='barthann', **kw)
Retrieves the 'lon_isob','lat_isob' coordinates of a wanted 'iso'
isobath from a topography array 'topo', with 'lon_topo','lat_topo'
coordinates.
"""
lon, lat, topo = map(np.array, (lon, lat, topo))
fig, ax = plt.subplots()
cs = ax.contour(lon, lat, topo, [iso])
coll = cs.collections[0]
## Test all lines to find thel ongest one.
## This is assumed to be the wanted isobath.
ncoll = len(coll.get_paths())
siz = np.array([])
for n in range(ncoll):
path = coll.get_paths()[n]
siz = np.append(siz, path.vertices.shape[0])
f = siz.argmax()
xiso = coll.get_paths()[f].vertices[:, 0]
yiso = coll.get_paths()[f].vertices[:, 1]
plt.close()
# Smooth the isobath with a moving window.
# Periodize according to window length to avoid losing edges.
if smooth_isobath:
fleft = window_length//2
fright = -window_length//2 + 1
if cyclic:
xl = xiso[:fleft] + 360
xr = xiso[fright:] - 360
yl = yiso[:fleft]
yr = yiso[fright:]
xiso = np.concatenate((xr, xiso, xl))
yiso = np.concatenate((yr, yiso, yl))
# xiso = rolling_window(xiso, window=window_length, win_type=win_type, center=True, **kw)[fleft:fright] # FIXME
# yiso = rolling_window(yiso, window=window_length, win_type=win_type, center=True, **kw)[fleft:fright] # FIXME
# else:
# xiso = rolling_window(xiso, window=window_length, win_type=win_type, center=True, **kw) # FIXME
# yiso = rolling_window(yiso, window=window_length, win_type=win_type, center=True, **kw) # FIXME
return xiso, yiso
def angle_isobath(lon, lat, h, isobath=100, cyclic=False, smooth_isobath=True, window_length=21, win_type='barthann', plot_map=False, **kw):
"""
USAGE
-----
lon_isob, lat_isob, angle = angle_isobath(lon, lat, h, isobath=100, cyclic=False, smooth_isobath=True, window_length=21, win_type='barthann', plot_map=False, **kw)
Returns the coordinates ('lon_isob', 'lat_isob') and the angle an isobath
makes with the zonal direction for a topography array 'h' at coordinates
('lon', 'lat'). Defaults to the 100 m isobath.
If 'smooth_isobath'==True, smooths the isobath with a rolling window of type
'win_type' and 'window_length' points wide.
All keyword arguments are passed to 'pandas.rolling_window()'.
If 'plot_map'==True, plots a map showing
the isobath (and its soothed version if smooth_isobath==True).
"""
lon, lat, h = map(np.array, (lon, lat, h))
R = 6371000.0 # Mean radius of the earth in meters (6371 km), from gsw.constants.earth_radius.
deg2rad = np.pi/180. # [rad/deg]
# Extract isobath coordinates
xiso, yiso = get_isobath(lon, lat, h, isobath)
if cyclic: # Add cyclic point.
xiso = np.append(xiso, xiso[0])
yiso = np.append(yiso, yiso[0])
# Smooth the isobath with a moving window.
if smooth_isobath:
xiso = rolling_window(xiso, window=window_length, win_type=win_type, **kw)
yiso = rolling_window(yiso, window=window_length, win_type=win_type, **kw)
# From the coordinates of the isobath, find the angle it forms with the
# zonal axis, using points k+1 and k.
shth = yiso.size-1
theta = np.zeros(shth)
for k in range(shth):
dyk = R*(yiso[k+1]-yiso[k])
dxk = R*(xiso[k+1]-xiso[k])*np.cos(yiso[k]*deg2rad)
theta[k] = np.arctan2(dyk,dxk)
xisom = 0.5*(xiso[1:] + xiso[:-1])
yisom = 0.5*(yiso[1:] + yiso[:-1])
# Plots map showing the extracted isobath.
if plot_map:
fig, ax = plt.subplots()
m = bb_map([lon.min(), lon.max()], [lat.min(), lat.max()], projection='cyl', resolution='h', ax=ax)
m.plot(xisom, yisom, color='b', linestyle='-', zorder=3, latlon=True)
input("Press any key to continue.")
plt.close()
return xisom, yisom, theta
def isopyc_depth(z, dens0, isopyc=1027.75, dzref=1.):
"""
USAGE
-----
hisopyc = isopyc_depth(z, dens0, isopyc=1027.75)
Calculates the spatial distribution of the depth of a specified isopycnal 'isopyc'
(defaults to 1027.75 kg/m3) from a 3D density array rho0 (in kg/m3) with shape
(nz,ny,nx) and a 1D depth array 'z' (in m) with shape (nz).
'dzref' is the desired resolution for the refined depth array (defaults to 1 m) which
is generated for calculating the depth of the isopycnal. The smaller 'dzref', the smoother
the resolution of the returned isopycnal depth array 'hisopyc'.
"""
z, dens0 = map(np.asanyarray, (z, dens0))
ny, nx = dens0.shape[1:]
zref = np.arange(z.min(), z.max(), dzref)
if np.ma.isMaskedArray(dens0):
dens0 = np.ma.filled(dens0, np.nan)
hisopyc = np.nan*np.ones((ny,nx))
for j in range(ny):
for i in range(nx):
dens0ij = dens0[:,j,i]
if np.logical_or(np.logical_or(isopyc<np.nanmin(dens0ij), np.nanmax(dens0ij)<isopyc), np.isnan(dens0ij).all()):
continue
else:
dens0ref = np.interp(zref, z, dens0ij) # Refined density profile.
dens0refn = near(dens0ref, isopyc)
fz=dens0ref==dens0refn
try:
hisopyc[j,i] = zref[fz]
except ValueError:
print("Warning: More than 1 (%d) nearest depths found. Using the median of the depths for point (j=%d,i=%d)."%(fz.sum(), j, i))
hisopyc[j,i] = np.nanmedian(zref[fz])
return hisopyc
def whiten_zero(x, y, z, ax, cs, n=1, cmap=plt.cm.RdBu_r, zorder=9):
"""
USAGE
-----
whiten_zero(x, y, z, ax, cs, n=1, cmap=plt.cm.RdBu_r, zorder=9)
Changes to white the color of the 'n' (defaults to 1)
neighboring patches about the zero contour created
by a command like 'cs = ax.contourf(x, y, z)'.
"""
x, y, z = map(np.asanyarray, (x,y,z))
white = (1.,1.,1.)
cslevs = cs.levels
assert 0. in cslevs
f0=np.where(cslevs==0.)[0][0]
f0m, f0p = f0-n, f0+n
c0m, c0p = cslevs[f0m], cslevs[f0p]
ax.contourf(x, y, z, levels=[c0m, c0p], linestyles='none', colors=[white, white], cmap=None, zorder=zorder)
def wind2stress(u, v, formula='large_pond1981-modified'):
"""
USAGE
-----
taux,tauy = wind2stress(u, v, formula='mellor2004')
Converts u,v wind vector components to taux,tauy
wind stress vector components.
"""
rho_air = 1.226 # kg/m3
mag = np.sqrt(u**2+v**2) # m/s
Cd = np.zeros( mag.shape ) # Drag coefficient.
if formula=='large_pond1981-modified':
# Large and Pond (1981) formula
# modified for light winds, as
# in Trenberth et al. (1990).
f=mag<=1.
Cd[f] = 2.18e-3
f=np.logical_and(mag>1.,mag<3.)
Cd[f] = (0.62+1.56/mag[f])*1e-3
f=np.logical_and(mag>=3.,mag<10.)
Cd[f] = 1.14e-3
f=mag>=10.
Cd[f] = (0.49 + 0.065*mag[f])*1e-3
elif formula=='mellor2004':
Cd = 7.5e-4 + 6.7e-5*mag
else:
np.disp('Unknown formula for Cd.')
pass
# Computing wind stress [N/m2]
taux = rho_air*Cd*mag*u
tauy = rho_air*Cd*mag*v
return taux,tauy
def gen_dates(start, end, dt='day', input_datetime=False):
"""
Returns a list of datetimes within the date range
from `start` to `end`, at a `dt` time interval.
`dt` can be 'second', 'minute', 'hour', 'day', 'week',
'month' or 'year'.
If `input_datetime` is False (default), `start` and `end`
must be a date in string form. If `input_datetime` is True,
`start` and `end` must be datetime objects.
Note
----
Modified from original function
by Filipe Fernandes (ocefpaf@gmail.com).
Example
-------
>>> from ap_tools.utils import gen_dates
>>> from datetime import datetime
>>> start = '1989-08-19'
>>> end = datetime.utcnow().strftime("%Y-%m-%d")
>>> gen_dates(start, end, dt='day')
"""
DT = dict(second=rrule.SECONDLY,
minute=rrule.MINUTELY,
hour=rrule.HOURLY,
day=rrule.DAILY,
week=rrule.WEEKLY,
month=rrule.MONTHLY,
year=rrule.YEARLY)
dt = DT[dt]
if input_datetime: # Input are datetime objects. No parsing needed.
dates = rrule.rrule(dt, dtstart=start, until=end)
else: # Input in string form, parse into datetime objects.
dates = rrule.rrule(dt, dtstart=parser.parse(start), until=parser.parse(end))
return list(dates)
def fmt_isobath(cs, fontsize=8, fmt='%g', inline=True, inline_spacing=7, manual=True, **kw):
"""
Formats the labels of isobath contours. `manual` is set to `True` by default,
but can be `False`, or a tuple/list of tuples with the coordinates of the labels.
All options are passed to plt.clabel().
"""
isobstrH = plt.clabel(cs, fontsize=fontsize, fmt=fmt, inline=inline, \
inline_spacing=inline_spacing, manual=manual, **kw)
for ih in range(0, len(isobstrH)): # Appends 'm' for meters at the end of the label.
isobstrh = isobstrH[ih]
isobstr = isobstrh.get_text()
isobstr = isobstr.replace('-','') + ' m'
isobstrh.set_text(isobstr)
def float2latex(f, ndigits=1):
"""
USAGE
-----
texstr = float2latex(f, ndigits=1)
Converts a float input into a latex-formatted
string with 'ndigits' (defaults to 1).
Adapted from:
http://stackoverflow.com/questions/13490292/format-number-using-latex-notation-in-python
"""
float_str = "{0:.%se}"%ndigits
float_str = float_str.format(f)
base, exponent = float_str.split("e")
return "${0} \times 10^{{{1}}}$".format(base, int(exponent))
def mat2npz(matname):
"""
USAGE
-----
mat2npz(matname)
Extract variables stored in a .mat file,
and saves them in a .npz file.
"""
d = loadmat(matname)
_ = d.pop('__header__')
_ = d.pop('__globals__')
_ = d.pop('__version__')
npzname = matname[:-4] + '.npz'
np.savez(npzname,**d)
return None
def bb_map(lons, lats, ax, projection='merc', resolution='i', drawparallels=True, drawmeridians=True):
"""
USAGE
-----
m = bb_map(lons, lats, **kwargs)
Returns a Basemap instance with lon,lat bounding limits
inferred from the input arrays `lons`,`lats`.
Coastlines, countries, states, parallels and meridians
are drawn, and continents are filled.
"""
lons,lats = map(np.asanyarray, (lons,lats))
lonmin,lonmax = lons.min(),lons.max()
latmin,latmax = lats.min(),lats.max()
m = Basemap(llcrnrlon=lonmin,
urcrnrlon=lonmax,
llcrnrlat=latmin,
urcrnrlat=latmax,
projection=projection,
resolution=resolution,
ax=ax)
plt.ioff() # Avoid showing the figure.
m.fillcontinents(color='0.9', zorder=9)
m.drawcoastlines(zorder=10)
m.drawstates(zorder=10)
m.drawcountries(linewidth=2.0, zorder=10)
m.drawmapboundary(zorder=9999)
if drawmeridians:
m.drawmeridians(np.arange(np.floor(lonmin), np.ceil(lonmax), 1), linewidth=0.15, labels=[1, 0, 1, 0], zorder=12)
if drawparallels:
m.drawparallels(np.arange(np.floor(latmin), np.ceil(latmax), 1), linewidth=0.15, labels=[1, 0, 0, 0], zorder=12)
plt.ion()
return m
def dots_dualcolor(x, y, z, thresh=20., color_low='b', color_high='r', marker='o', markersize=5):
"""
USAGE
-----
dots_dualcolor(x, y, z, thresh=20., color_low='b', color_high='r')
Plots dots colored with a dual-color criterion,
separated by a threshold value.
"""
ax = plt.gca()
# Below-threshold dots.
f=z<=thresh
ax.plot(x[f], y[f], lw=0, marker=marker, ms=markersize, mfc=color_low, mec=color_low)
# Above-threshold dots.
f=z>thresh
ax.plot(x[f], y[f], lw=0, marker=marker, ms=markersize, mfc=color_high, mec=color_high)
if __name__=='__main__':
import doctest
doctest.testmod()
|
mit
| 7,885,736,604,063,761,000 | 30.255774 | 167 | 0.604821 | false |
ralstonJ/Angelhack-Rumble
|
scripts/TwitterStreamTutorial.py
|
1
|
1032
|
#!/usr/bin/env python
import tweepy
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
import time
ckey = 'CiQGfooRBKJRSrtaGl7GkQ'
csecret = 'G2o2pVeFaYlZdZAQL6NxkE3x7iW0U7kfoUKBTaOg'
atoken = '741285805-38CJ8saoV8UJx46L38bFiZMOSocC2XhtubOsot9x'
asecret = 'sNJUHNg4GzZDnIR82BeCkQK0ptgrOWsimO265BhB1YCtq'
class listener(StreamListener):
def on_data(self, data):
try:
#tweet = data.split(',"text":"')[4].split('","source')[0]
print data
#saveThis = str(time.time())+'::'+tweet
saveFile = open('all_tweets.csv','a')
saveFile.write(data)
saveFile.write('\n')
saveFile.close()
return True
except BaseException, e:
print 'failed ondata,',str(e)
time.sleep(5)
def on_error(self, status):
print status
auth = OAuthHandler(ckey, csecret)
auth.set_access_token(atoken, asecret)
twitterStream = Stream(auth, listener())
twitterStream.filter(track=["rob ford"])
|
apache-2.0
| -2,584,913,198,241,240,600 | 26.157895 | 66 | 0.676357 | false |
nevermoreluo/sakura
|
sakura/buffer_reader.py
|
1
|
1837
|
import struct
class BufferReader():
def __init__(self, buf, endian='<'):
self._buffer = buf
self._length = len(buf)
self._endian = endian
self._offset = 0
self._error = False
@property
def error(self):
return self._error
def set_endian(self, endian):
self._endian = endian
def get_int8(self):
return self._get_int('b', 1)
def get_uint8(self):
return self._get_int('B', 1)
def get_int16(self):
return self._get_int('h', 2)
def get_uint16(self):
return self._get_int('H', 2)
def get_int32(self):
return self._get_int('i', 4)
def get_uint32(self):
return self._get_int('I', 4)
def get_int64(self):
return self._get_int('q', 8)
def get_uint64(self):
return self._get_int('Q', 8)
def get_float(self):
return self._get_int('f', 4)
def get_double(self):
return self._get_int('d', 8)
def get_buffer(self, size):
end_offset = self._offset + size
if end_offset > self._length:
self._error = True
v = self._buffer[self._offset:end_offset]
self._offset = end_offset
return v
def get_remain(self):
v = self._buffer[self._offset:]
self._offset += len(v)
return v
def skip(self, size):
self._offset += size
if self._offset > self._length:
self._error = True
def _get_int(self, fmt, size):
if self._offset + size > self._length:
self._offset += size
self._error = True
return 0
v = struct.unpack_from(self._endian + fmt, self._buffer, self._offset)[0]
self._offset += size
return v
|
gpl-3.0
| 896,517,828,458,708,500 | 22.851351 | 81 | 0.507349 | false |
townbull/keystone-dtrust
|
keystone/tests/test_auth.py
|
1
|
37040
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import uuid
from keystone import auth
from keystone import config
from keystone import exception
from keystone import identity
from keystone.openstack.common import timeutils
from keystone import tests
from keystone.tests import default_fixtures
from keystone import token
from keystone import trust
CONF = config.CONF
TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
def _build_user_auth(token=None, user_id=None, username=None,
password=None, tenant_id=None, tenant_name=None,
trust_id=None):
"""Build auth dictionary.
It will create an auth dictionary based on all the arguments
that it receives.
"""
auth_json = {}
if token is not None:
auth_json['token'] = token
if username or password:
auth_json['passwordCredentials'] = {}
if username is not None:
auth_json['passwordCredentials']['username'] = username
if user_id is not None:
auth_json['passwordCredentials']['userId'] = user_id
if password is not None:
auth_json['passwordCredentials']['password'] = password
if tenant_name is not None:
auth_json['tenantName'] = tenant_name
if tenant_id is not None:
auth_json['tenantId'] = tenant_id
if trust_id is not None:
auth_json['trust_id'] = trust_id
return auth_json
class AuthTest(tests.TestCase):
def setUp(self):
super(AuthTest, self).setUp()
CONF.identity.driver = 'keystone.identity.backends.kvs.Identity'
self.load_backends()
self.load_fixtures(default_fixtures)
# need to register the token provider first because auth controller
# depends on it
token.provider.Manager()
self.controller = token.controllers.Auth()
def assertEqualTokens(self, a, b):
"""Assert that two tokens are equal.
Compare two tokens except for their ids. This also truncates
the time in the comparison.
"""
def normalize(token):
token['access']['token']['id'] = 'dummy'
del token['access']['token']['expires']
del token['access']['token']['issued_at']
return token
self.assertCloseEnoughForGovernmentWork(
timeutils.parse_isotime(a['access']['token']['expires']),
timeutils.parse_isotime(b['access']['token']['expires']))
self.assertCloseEnoughForGovernmentWork(
timeutils.parse_isotime(a['access']['token']['issued_at']),
timeutils.parse_isotime(b['access']['token']['issued_at']))
return self.assertDictEqual(normalize(a), normalize(b))
class AuthBadRequests(AuthTest):
def setUp(self):
super(AuthBadRequests, self).setUp()
def test_no_external_auth(self):
"""Verify that _authenticate_external() raises exception if N/A."""
self.assertRaises(
token.controllers.ExternalAuthNotApplicable,
self.controller._authenticate_external,
{}, {})
def test_no_token_in_auth(self):
"""Verify that _authenticate_token() raises exception if no token."""
self.assertRaises(
exception.ValidationError,
self.controller._authenticate_token,
None, {})
def test_no_credentials_in_auth(self):
"""Verify that _authenticate_local() raises exception if no creds."""
self.assertRaises(
exception.ValidationError,
self.controller._authenticate_local,
None, {})
def test_authenticate_blank_request_body(self):
"""Verify sending empty json dict raises the right exception."""
self.assertRaises(exception.ValidationError,
self.controller.authenticate,
{}, {})
def test_authenticate_blank_auth(self):
"""Verify sending blank 'auth' raises the right exception."""
body_dict = _build_user_auth()
self.assertRaises(exception.ValidationError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_invalid_auth_content(self):
"""Verify sending invalid 'auth' raises the right exception."""
self.assertRaises(exception.ValidationError,
self.controller.authenticate,
{}, {'auth': 'abcd'})
def test_authenticate_user_id_too_large(self):
"""Verify sending large 'userId' raises the right exception."""
body_dict = _build_user_auth(user_id='0' * 65, username='FOO',
password='foo2')
self.assertRaises(exception.ValidationSizeError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_username_too_large(self):
"""Verify sending large 'username' raises the right exception."""
body_dict = _build_user_auth(username='0' * 65, password='foo2')
self.assertRaises(exception.ValidationSizeError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_tenant_id_too_large(self):
"""Verify sending large 'tenantId' raises the right exception."""
body_dict = _build_user_auth(username='FOO', password='foo2',
tenant_id='0' * 65)
self.assertRaises(exception.ValidationSizeError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_tenant_name_too_large(self):
"""Verify sending large 'tenantName' raises the right exception."""
body_dict = _build_user_auth(username='FOO', password='foo2',
tenant_name='0' * 65)
self.assertRaises(exception.ValidationSizeError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_token_too_large(self):
"""Verify sending large 'token' raises the right exception."""
body_dict = _build_user_auth(token={'id': '0' * 8193})
self.assertRaises(exception.ValidationSizeError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_password_too_large(self):
"""Verify sending large 'password' raises the right exception."""
length = CONF.identity.max_password_length + 1
body_dict = _build_user_auth(username='FOO', password='0' * length)
self.assertRaises(exception.ValidationSizeError,
self.controller.authenticate,
{}, body_dict)
class AuthWithToken(AuthTest):
def setUp(self):
super(AuthWithToken, self).setUp()
def test_unscoped_token(self):
"""Verify getting an unscoped token with password creds."""
body_dict = _build_user_auth(username='FOO',
password='foo2')
unscoped_token = self.controller.authenticate({}, body_dict)
tenant = unscoped_token["access"]["token"].get("tenant", None)
self.assertEqual(tenant, None)
def test_auth_invalid_token(self):
"""Verify exception is raised if invalid token."""
body_dict = _build_user_auth(token={"id": uuid.uuid4().hex})
self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
{}, body_dict)
def test_auth_bad_formatted_token(self):
"""Verify exception is raised if invalid token."""
body_dict = _build_user_auth(token={})
self.assertRaises(
exception.ValidationError,
self.controller.authenticate,
{}, body_dict)
def test_auth_unscoped_token_no_project(self):
"""Verify getting an unscoped token with an unscoped token."""
body_dict = _build_user_auth(
username='FOO',
password='foo2')
unscoped_token = self.controller.authenticate({}, body_dict)
body_dict = _build_user_auth(
token=unscoped_token["access"]["token"])
unscoped_token_2 = self.controller.authenticate({}, body_dict)
self.assertEqualTokens(unscoped_token, unscoped_token_2)
def test_auth_unscoped_token_project(self):
"""Verify getting a token in a tenant with an unscoped token."""
# Add a role in so we can check we get this back
self.identity_api.add_role_to_user_and_project(
self.user_foo['id'],
self.tenant_bar['id'],
self.role_member['id'])
# Get an unscoped tenant
body_dict = _build_user_auth(
username='FOO',
password='foo2')
unscoped_token = self.controller.authenticate({}, body_dict)
# Get a token on BAR tenant using the unscoped tenant
body_dict = _build_user_auth(
token=unscoped_token["access"]["token"],
tenant_name="BAR")
scoped_token = self.controller.authenticate({}, body_dict)
tenant = scoped_token["access"]["token"]["tenant"]
roles = scoped_token["access"]["metadata"]["roles"]
self.assertEquals(tenant["id"], self.tenant_bar['id'])
self.assertEquals(roles[0], self.role_member['id'])
def test_auth_token_project_group_role(self):
"""Verify getting a token in a tenant with group roles."""
# Add a v2 style role in so we can check we get this back
self.identity_api.add_role_to_user_and_project(
self.user_foo['id'],
self.tenant_bar['id'],
self.role_member['id'])
# Now create a group role for this user as well
domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.identity_api.create_domain(domain1['id'], domain1)
new_group = {'id': uuid.uuid4().hex, 'domain_id': domain1['id'],
'name': uuid.uuid4().hex}
self.identity_api.create_group(new_group['id'], new_group)
self.identity_api.add_user_to_group(self.user_foo['id'],
new_group['id'])
self.identity_api.create_grant(
group_id=new_group['id'],
project_id=self.tenant_bar['id'],
role_id=self.role_admin['id'])
# Get a scoped token for the tenant
body_dict = _build_user_auth(
username='FOO',
password='foo2',
tenant_name="BAR")
scoped_token = self.controller.authenticate({}, body_dict)
tenant = scoped_token["access"]["token"]["tenant"]
roles = scoped_token["access"]["metadata"]["roles"]
self.assertEquals(tenant["id"], self.tenant_bar['id'])
self.assertIn(self.role_member['id'], roles)
self.assertIn(self.role_admin['id'], roles)
def test_auth_token_cross_domain_group_and_project(self):
"""Verify getting a token in cross domain group/project roles."""
# create domain, project and group and grant roles to user
domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.identity_api.create_domain(domain1['id'], domain1)
project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
'domain_id': domain1['id']}
self.assignment_api.create_project(project1['id'], project1)
role_foo_domain1 = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex}
self.identity_api.create_role(role_foo_domain1['id'],
role_foo_domain1)
role_group_domain1 = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex}
self.identity_api.create_role(role_group_domain1['id'],
role_group_domain1)
self.identity_api.add_user_to_project(project1['id'],
self.user_foo['id'])
new_group = {'id': uuid.uuid4().hex, 'domain_id': domain1['id'],
'name': uuid.uuid4().hex}
self.identity_api.create_group(new_group['id'], new_group)
self.identity_api.add_user_to_group(self.user_foo['id'],
new_group['id'])
self.identity_api.create_grant(
user_id=self.user_foo['id'],
project_id=project1['id'],
role_id=self.role_member['id'])
self.identity_api.create_grant(
group_id=new_group['id'],
project_id=project1['id'],
role_id=self.role_admin['id'])
self.identity_api.create_grant(
user_id=self.user_foo['id'],
domain_id=domain1['id'],
role_id=role_foo_domain1['id'])
self.identity_api.create_grant(
group_id=new_group['id'],
domain_id=domain1['id'],
role_id=role_group_domain1['id'])
# Get a scoped token for the tenant
body_dict = _build_user_auth(
username=self.user_foo['name'],
password=self.user_foo['password'],
tenant_name=project1['name'])
scoped_token = self.controller.authenticate({}, body_dict)
tenant = scoped_token["access"]["token"]["tenant"]
roles = scoped_token["access"]["metadata"]["roles"]
self.assertEquals(tenant["id"], project1['id'])
self.assertIn(self.role_member['id'], roles)
self.assertIn(self.role_admin['id'], roles)
self.assertNotIn(role_foo_domain1['id'], roles)
self.assertNotIn(role_group_domain1['id'], roles)
def test_belongs_to_no_tenant(self):
r = self.controller.authenticate(
{},
auth={
'passwordCredentials': {
'username': self.user_foo['name'],
'password': self.user_foo['password']
}
})
unscoped_token_id = r['access']['token']['id']
self.assertRaises(
exception.Unauthorized,
self.controller.validate_token,
dict(is_admin=True, query_string={'belongsTo': 'BAR'}),
token_id=unscoped_token_id)
def test_belongs_to(self):
body_dict = _build_user_auth(
username='FOO',
password='foo2',
tenant_name="BAR")
scoped_token = self.controller.authenticate({}, body_dict)
scoped_token_id = scoped_token['access']['token']['id']
self.assertRaises(
exception.Unauthorized,
self.controller.validate_token,
dict(is_admin=True, query_string={'belongsTo': 'me'}),
token_id=scoped_token_id)
self.assertRaises(
exception.Unauthorized,
self.controller.validate_token,
dict(is_admin=True, query_string={'belongsTo': 'BAR'}),
token_id=scoped_token_id)
def test_token_auth_with_binding(self):
CONF.token.bind = ['kerberos']
body_dict = _build_user_auth()
context = {'REMOTE_USER': 'FOO', 'AUTH_TYPE': 'Negotiate'}
unscoped_token = self.controller.authenticate(context, body_dict)
# the token should have bind information in it
bind = unscoped_token['access']['token']['bind']
self.assertEqual(bind['kerberos'], 'FOO')
body_dict = _build_user_auth(
token=unscoped_token['access']['token'],
tenant_name='BAR')
# using unscoped token without remote user context fails
self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
{}, body_dict)
# using token with remote user context succeeds
scoped_token = self.controller.authenticate(context, body_dict)
# the bind information should be carried over from the original token
bind = scoped_token['access']['token']['bind']
self.assertEqual(bind['kerberos'], 'FOO')
def test_deleting_role_revokes_token(self):
role_controller = identity.controllers.Role()
project1 = {'id': 'Project1', 'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID}
self.assignment_api.create_project(project1['id'], project1)
role_one = {'id': 'role_one', 'name': uuid.uuid4().hex}
self.assignment_api.create_role(role_one['id'], role_one)
self.identity_api.add_role_to_user_and_project(
self.user_foo['id'], project1['id'], role_one['id'])
no_context = {}
# Get a scoped token for the tenant
body_dict = _build_user_auth(
username=self.user_foo['name'],
password=self.user_foo['password'],
tenant_name=project1['name'])
token = self.controller.authenticate(no_context, body_dict)
# Ensure it is valid
token_id = token['access']['token']['id']
self.controller.validate_token(
dict(is_admin=True, query_string={}),
token_id=token_id)
# Delete the role, which should invalidate the token
role_controller.delete_role(
dict(is_admin=True, query_string={}), role_one['id'])
# Check the token is now invalid
self.assertRaises(
exception.TokenNotFound,
self.controller.validate_token,
dict(is_admin=True, query_string={}),
token_id=token_id)
class AuthWithPasswordCredentials(AuthTest):
def setUp(self):
super(AuthWithPasswordCredentials, self).setUp()
def test_auth_invalid_user(self):
"""Verify exception is raised if invalid user."""
body_dict = _build_user_auth(
username=uuid.uuid4().hex,
password=uuid.uuid4().hex)
self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
{}, body_dict)
def test_auth_valid_user_invalid_password(self):
"""Verify exception is raised if invalid password."""
body_dict = _build_user_auth(
username="FOO",
password=uuid.uuid4().hex)
self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
{}, body_dict)
def test_auth_empty_password(self):
"""Verify exception is raised if empty password."""
body_dict = _build_user_auth(
username="FOO",
password="")
self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
{}, body_dict)
def test_auth_no_password(self):
"""Verify exception is raised if empty password."""
body_dict = _build_user_auth(username="FOO")
self.assertRaises(
exception.ValidationError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_blank_password_credentials(self):
"""Sending empty dict as passwordCredentials raises a 400 error."""
body_dict = {'passwordCredentials': {}, 'tenantName': 'demo'}
self.assertRaises(exception.ValidationError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_no_username(self):
"""Verify skipping username raises the right exception."""
body_dict = _build_user_auth(password="pass",
tenant_name="demo")
self.assertRaises(exception.ValidationError,
self.controller.authenticate,
{}, body_dict)
def test_bind_without_remote_user(self):
CONF.token.bind = ['kerberos']
body_dict = _build_user_auth(username='FOO', password='foo2',
tenant_name='BAR')
token = self.controller.authenticate({}, body_dict)
self.assertNotIn('bind', token['access']['token'])
class AuthWithRemoteUser(AuthTest):
def setUp(self):
super(AuthWithRemoteUser, self).setUp()
def test_unscoped_remote_authn(self):
"""Verify getting an unscoped token with external authn."""
body_dict = _build_user_auth(
username='FOO',
password='foo2')
local_token = self.controller.authenticate(
{}, body_dict)
body_dict = _build_user_auth()
remote_token = self.controller.authenticate(
{'REMOTE_USER': 'FOO'}, body_dict)
self.assertEqualTokens(local_token, remote_token)
def test_unscoped_remote_authn_jsonless(self):
"""Verify that external auth with invalid request fails."""
self.assertRaises(
exception.ValidationError,
self.controller.authenticate,
{'REMOTE_USER': 'FOO'},
None)
def test_scoped_remote_authn(self):
"""Verify getting a token with external authn."""
body_dict = _build_user_auth(
username='FOO',
password='foo2',
tenant_name='BAR')
local_token = self.controller.authenticate(
{}, body_dict)
body_dict = _build_user_auth(
tenant_name='BAR')
remote_token = self.controller.authenticate(
{'REMOTE_USER': 'FOO'}, body_dict)
self.assertEqualTokens(local_token, remote_token)
def test_scoped_nometa_remote_authn(self):
"""Verify getting a token with external authn and no metadata."""
body_dict = _build_user_auth(
username='TWO',
password='two2',
tenant_name='BAZ')
local_token = self.controller.authenticate(
{}, body_dict)
body_dict = _build_user_auth(tenant_name='BAZ')
remote_token = self.controller.authenticate(
{'REMOTE_USER': 'TWO'}, body_dict)
self.assertEqualTokens(local_token, remote_token)
def test_scoped_remote_authn_invalid_user(self):
"""Verify that external auth with invalid user fails."""
body_dict = _build_user_auth(tenant_name="BAR")
self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
{'REMOTE_USER': uuid.uuid4().hex},
body_dict)
def test_bind_with_kerberos(self):
CONF.token.bind = ['kerberos']
kerb = {'REMOTE_USER': 'FOO', 'AUTH_TYPE': 'Negotiate'}
body_dict = _build_user_auth(tenant_name="BAR")
token = self.controller.authenticate(kerb, body_dict)
self.assertEqual(token['access']['token']['bind']['kerberos'], 'FOO')
def test_bind_without_config_opt(self):
CONF.token.bind = ['x509']
kerb = {'REMOTE_USER': 'FOO', 'AUTH_TYPE': 'Negotiate'}
body_dict = _build_user_auth(tenant_name='BAR')
token = self.controller.authenticate(kerb, body_dict)
self.assertNotIn('bind', token['access']['token'])
class AuthWithTrust(AuthTest):
def setUp(self):
super(AuthWithTrust, self).setUp()
self.opt_in_group('trust', enabled=True)
trust.Manager()
self.trust_controller = trust.controllers.TrustV3()
self.auth_v3_controller = auth.controllers.Auth()
self.trustor = self.user_foo
self.trustee = self.user_two
self.assigned_roles = [self.role_member['id'],
self.role_browser['id']]
for assigned_role in self.assigned_roles:
self.identity_api.add_role_to_user_and_project(
self.trustor['id'], self.tenant_bar['id'], assigned_role)
self.sample_data = {'trustor_user_id': self.trustor['id'],
'trustee_user_id': self.trustee['id'],
'project_id': self.tenant_bar['id'],
'impersonation': True,
'roles': [{'id': self.role_browser['id']},
{'name': self.role_member['name']}]}
expires_at = timeutils.strtime(timeutils.utcnow() +
datetime.timedelta(minutes=10),
fmt=TIME_FORMAT)
self.create_trust(expires_at=expires_at)
def create_trust(self, expires_at=None, impersonation=True):
username = self.trustor['name'],
password = 'foo2'
body_dict = _build_user_auth(username=username, password=password)
self.unscoped_token = self.controller.authenticate({}, body_dict)
context = {'token_id': self.unscoped_token['access']['token']['id']}
trust_data = copy.deepcopy(self.sample_data)
trust_data['expires_at'] = expires_at
trust_data['impersonation'] = impersonation
self.new_trust = self.trust_controller.create_trust(
context, trust=trust_data)['trust']
def build_v2_token_request(self, username, password):
body_dict = _build_user_auth(username=username, password=password)
self.unscoped_token = self.controller.authenticate({}, body_dict)
unscoped_token_id = self.unscoped_token['access']['token']['id']
request_body = _build_user_auth(token={'id': unscoped_token_id},
trust_id=self.new_trust['id'],
tenant_id=self.tenant_bar['id'])
return request_body
def test_create_trust_bad_data_fails(self):
context = {'token_id': self.unscoped_token['access']['token']['id']}
bad_sample_data = {'trustor_user_id': self.trustor['id']}
self.assertRaises(exception.ValidationError,
self.trust_controller.create_trust,
context, trust=bad_sample_data)
def test_create_trust_no_roles(self):
self.new_trust = None
self.sample_data['roles'] = []
self.create_trust()
self.assertEquals(self.new_trust['roles'], [])
def test_create_trust(self):
self.assertEquals(self.new_trust['trustor_user_id'],
self.trustor['id'])
self.assertEquals(self.new_trust['trustee_user_id'],
self.trustee['id'])
role_ids = [self.role_browser['id'], self.role_member['id']]
self.assertTrue(timeutils.parse_strtime(self.new_trust['expires_at'],
fmt=TIME_FORMAT))
self.assertIn('http://localhost:5000/v3/OS-TRUST/',
self.new_trust['links']['self'])
self.assertIn('http://localhost:5000/v3/OS-TRUST/',
self.new_trust['roles_links']['self'])
for role in self.new_trust['roles']:
self.assertIn(role['id'], role_ids)
def test_get_trust(self):
context = {'token_id': self.unscoped_token['access']['token']['id']}
trust = self.trust_controller.get_trust(context,
self.new_trust['id'])['trust']
self.assertEquals(trust['trustor_user_id'],
self.trustor['id'])
self.assertEquals(trust['trustee_user_id'],
self.trustee['id'])
role_ids = [self.role_browser['id'], self.role_member['id']]
for role in self.new_trust['roles']:
self.assertIn(role['id'], role_ids)
def test_create_trust_no_impersonation(self):
self.create_trust(expires_at=None, impersonation=False)
self.assertEquals(self.new_trust['trustor_user_id'],
self.trustor['id'])
self.assertEquals(self.new_trust['trustee_user_id'],
self.trustee['id'])
self.assertIs(self.new_trust['impersonation'], False)
auth_response = self.fetch_v2_token_from_trust()
token_user = auth_response['access']['user']
self.assertEquals(token_user['id'],
self.new_trust['trustee_user_id'])
# TODO(ayoung): Endpoints
def test_create_trust_impersonation(self):
self.create_trust(expires_at=None)
self.assertEqual(self.new_trust['trustor_user_id'], self.trustor['id'])
self.assertEqual(self.new_trust['trustee_user_id'], self.trustee['id'])
self.assertIs(self.new_trust['impersonation'], True)
auth_response = self.fetch_v2_token_from_trust()
token_user = auth_response['access']['user']
self.assertEqual(token_user['id'], self.new_trust['trustor_user_id'])
def test_token_from_trust_wrong_user_fails(self):
request_body = self.build_v2_token_request('FOO', 'foo2')
self.assertRaises(
exception.Forbidden,
self.controller.authenticate, {}, request_body)
def fetch_v2_token_from_trust(self):
request_body = self.build_v2_token_request('TWO', 'two2')
auth_response = self.controller.authenticate({}, request_body)
return auth_response
def fetch_v3_token_from_trust(self):
v3_password_data = {
'identity': {
"methods": ["password"],
"password": {
"user": {
"id": self.trustee["id"],
"password": self.trustee["password"]}}
},
'scope': {
'project': {
'id': self.tenant_baz['id']}}}
auth_response = (self.auth_v3_controller.authenticate_for_token
({'query_string': {}}, v3_password_data))
token = auth_response.headers['X-Subject-Token']
v3_req_with_trust = {
"identity": {
"methods": ["token"],
"token": {"id": token}},
"scope": {
"OS-TRUST:trust": {"id": self.new_trust['id']}}}
token_auth_response = (self.auth_v3_controller.authenticate_for_token
({'query_string': {}}, v3_req_with_trust))
return token_auth_response
def test_create_v3_token_from_trust(self):
auth_response = self.fetch_v3_token_from_trust()
trust_token_user = auth_response.json['token']['user']
self.assertEquals(trust_token_user['id'], self.trustor['id'])
trust_token_trust = auth_response.json['token']['OS-TRUST:trust']
self.assertEquals(trust_token_trust['id'], self.new_trust['id'])
self.assertEquals(trust_token_trust['trustor_user']['id'],
self.trustor['id'])
self.assertEquals(trust_token_trust['trustee_user']['id'],
self.trustee['id'])
trust_token_roles = auth_response.json['token']['roles']
self.assertEquals(len(trust_token_roles), 2)
def test_v3_trust_token_get_token_fails(self):
auth_response = self.fetch_v3_token_from_trust()
trust_token = auth_response.headers['X-Subject-Token']
v3_token_data = {'identity': {
'methods': ['token'],
'token': {'id': trust_token}
}}
self.assertRaises(
exception.Forbidden,
self.auth_v3_controller.authenticate_for_token,
{'query_string': {}}, v3_token_data)
def test_token_from_trust(self):
auth_response = self.fetch_v2_token_from_trust()
self.assertIsNotNone(auth_response)
self.assertEquals(len(auth_response['access']['metadata']['roles']),
2,
"user_foo has three roles, but the token should"
" only get the two roles specified in the trust.")
def assert_token_count_for_trust(self, expected_value):
tokens = self.trust_controller.token_api.list_tokens(
self.trustee['id'], trust_id=self.new_trust['id'])
token_count = len(tokens)
self.assertEquals(token_count, expected_value)
def test_delete_tokens_for_user_invalidates_tokens_from_trust(self):
self.assert_token_count_for_trust(0)
self.fetch_v2_token_from_trust()
self.assert_token_count_for_trust(1)
self.trust_controller._delete_tokens_for_user(self.trustee['id'])
self.assert_token_count_for_trust(0)
def test_token_from_trust_cant_get_another_token(self):
auth_response = self.fetch_v2_token_from_trust()
trust_token_id = auth_response['access']['token']['id']
request_body = _build_user_auth(token={'id': trust_token_id},
tenant_id=self.tenant_bar['id'])
self.assertRaises(
exception.Forbidden,
self.controller.authenticate, {}, request_body)
def test_delete_trust_revokes_token(self):
context = {'token_id': self.unscoped_token['access']['token']['id']}
self.fetch_v2_token_from_trust()
trust_id = self.new_trust['id']
tokens = self.token_api.list_tokens(self.trustor['id'],
trust_id=trust_id)
self.assertEquals(len(tokens), 1)
self.trust_controller.delete_trust(context, trust_id=trust_id)
tokens = self.token_api.list_tokens(self.trustor['id'],
trust_id=trust_id)
self.assertEquals(len(tokens), 0)
def test_token_from_trust_with_no_role_fails(self):
for assigned_role in self.assigned_roles:
self.identity_api.remove_role_from_user_and_project(
self.trustor['id'], self.tenant_bar['id'], assigned_role)
request_body = self.build_v2_token_request('TWO', 'two2')
self.assertRaises(
exception.Forbidden,
self.controller.authenticate, {}, request_body)
def test_expired_trust_get_token_fails(self):
expiry = "1999-02-18T10:10:00Z"
self.create_trust(expiry)
request_body = self.build_v2_token_request('TWO', 'two2')
self.assertRaises(
exception.Forbidden,
self.controller.authenticate, {}, request_body)
def test_token_from_trust_with_wrong_role_fails(self):
self.identity_api.add_role_to_user_and_project(
self.trustor['id'],
self.tenant_bar['id'],
self.role_other['id'])
for assigned_role in self.assigned_roles:
self.identity_api.remove_role_from_user_and_project(
self.trustor['id'], self.tenant_bar['id'], assigned_role)
request_body = self.build_v2_token_request('TWO', 'two2')
self.assertRaises(
exception.Forbidden,
self.controller.authenticate, {}, request_body)
class TokenExpirationTest(AuthTest):
def _maintain_token_expiration(self):
"""Token expiration should be maintained after re-auth & validation."""
timeutils.set_time_override()
r = self.controller.authenticate(
{},
auth={
'passwordCredentials': {
'username': self.user_foo['name'],
'password': self.user_foo['password']
}
})
unscoped_token_id = r['access']['token']['id']
original_expiration = r['access']['token']['expires']
timeutils.advance_time_seconds(1)
r = self.controller.validate_token(
dict(is_admin=True, query_string={}),
token_id=unscoped_token_id)
self.assertEqual(original_expiration, r['access']['token']['expires'])
timeutils.advance_time_seconds(1)
r = self.controller.authenticate(
{},
auth={
'token': {
'id': unscoped_token_id,
},
'tenantId': self.tenant_bar['id'],
})
scoped_token_id = r['access']['token']['id']
self.assertEqual(original_expiration, r['access']['token']['expires'])
timeutils.advance_time_seconds(1)
r = self.controller.validate_token(
dict(is_admin=True, query_string={}),
token_id=scoped_token_id)
self.assertEqual(original_expiration, r['access']['token']['expires'])
def test_maintain_uuid_token_expiration(self):
self.opt_in_group('signing', token_format='UUID')
self._maintain_token_expiration()
class NonDefaultAuthTest(tests.TestCase):
def test_add_non_default_auth_method(self):
self.opt_in_group('auth', methods=['password', 'token', 'custom'])
config.setup_authentication()
self.assertTrue(hasattr(CONF.auth, 'custom'))
|
apache-2.0
| -2,652,902,829,037,487,600 | 40.385475 | 79 | 0.578024 | false |
machtfit/django-database-email-backend
|
database_email_backend/admin.py
|
1
|
5607
|
#-*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.contrib import admin
from django import forms
from django.http import HttpResponse
from django.core.urlresolvers import reverse
from django.core.mail import message
from django.db.models import Count
from functools import update_wrapper
from database_email_backend.models import Email, Attachment
from django.utils.translation import ugettext as _
WIDE_INPUT_SIZE = '80'
###################
# view sent email #
###################
class AttachmentInlineAdmin(admin.TabularInline):
model = Attachment
extra = 0
can_delete = False
max_num = 0
readonly_fields = ('filename', 'mimetype', 'content', 'file_link',)
fields = ('file_link', 'mimetype',)
def file_link(self, obj):
if obj.pk is None:
return u''
url_name = '%s:%s_email_attachment' % (self.admin_site.name, self.model._meta.app_label,)
kwargs={
'email_id': str(obj.email_id),
'attachment_id': str(obj.id),
'filename': str(obj.filename)}
url = reverse(url_name, kwargs=kwargs)
return u'<a href="%(url)s">%(filename)s</a>' % {'filename': obj.filename, 'url': url}
file_link.allow_tags = True
class EmailAdmin(admin.ModelAdmin):
list_display = ('from_email', 'to_emails', 'subject', 'body_stripped', 'sent_at', 'attachment_count')
date_hierarchy = 'sent_at'
search_fields = ('from_email', 'to_emails', 'subject', 'body',)
exclude = ('raw',)
readonly_fields = list_display + ('cc_emails', 'bcc_emails', 'all_recipients', 'headers', 'body',)
inlines = (AttachmentInlineAdmin,)
def queryset(self, request):
queryset = super(EmailAdmin, self).queryset(request)
return queryset.annotate(attachment_count_cache=Count('attachments'))
def attachment_count(self, obj):
return obj.attachment_count
attachment_count.admin_order_field = 'attachment_count_cache'
def body_stripped(self, obj):
if obj.body and len(obj.body)>100:
return obj.body[:100] + ' [...]'
return obj.body
body_stripped.short_description = 'body'
body_stripped.admin_order_field = 'body'
def get_urls(self):
urlpatterns = super(EmailAdmin, self).get_urls()
from django.conf.urls import patterns, url
def wrap(view):
def wrapper(*args, **kwargs):
return self.admin_site.admin_view(view)(*args, **kwargs)
return update_wrapper(wrapper, view)
appname = self.model._meta.app_label
urlpatterns = patterns('',
url(r'^(?P<email_id>\d+)/attachments/(?P<attachment_id>\d+)/(?P<filename>[\w.]+)$',
wrap(self.serve_attachment),
name='%s_email_attachment' % appname)
) + urlpatterns
return urlpatterns
def serve_attachment(self, request, email_id, attachment_id, filename, extra_context=None):
if not self.has_change_permission(request, None):
raise PermissionDenied
attachment = Attachment.objects.get(email__id=email_id, id=attachment_id, filename=filename)
response = HttpResponse(attachment.content, mimetype=attachment.mimetype or 'application/octet-stream')
response["Content-Length"] = len(attachment.content)
return response
admin.site.register(Email, EmailAdmin)
##############
# send email #
##############
class SendEmail(Email):
class Meta:
proxy = True
class SendEmailForm(forms.ModelForm):
class Meta:
model = SendEmail
widgets = {
'from_email': forms.TextInput(attrs={'size': '30'}),
'to_emails': forms.TextInput(attrs={'size': WIDE_INPUT_SIZE}),
'cc_emails': forms.TextInput(attrs={'size': WIDE_INPUT_SIZE}),
'bcc_emails': forms.TextInput(attrs={'size': WIDE_INPUT_SIZE}),
'subject': forms.TextInput(attrs={'size': WIDE_INPUT_SIZE}),
}
fields = ['from_email', 'to_emails', 'cc_emails', 'bcc_emails',
'all_recipients', 'headers', 'subject', 'body', 'raw']
class SendEmailAdmin(admin.ModelAdmin):
form = SendEmailForm
fieldsets = (
(None, {'fields':('from_email', 'to_emails')}),
(_('cc and bcc'), {
'fields': ('cc_emails', 'bcc_emails'),
'classes': ('collapse',)}),
(None, {'fields': ('subject', 'body')}),
)
def save_model(self, request, obj, form, change):
"""
sends the email and does not save it
"""
email = message.EmailMessage(
subject=obj.subject,
body=obj.body,
from_email=obj.from_email,
to=[t.strip() for t in obj.to_emails.split(',')],
bcc=[t.strip() for t in obj.bcc_emails.split(',')],
cc=[t.strip() for t in obj.cc_emails.split(',')]
)
email.send()
def response_add(self, request, obj, post_url_continue=None):
msg = _('The Email was sent successfully.')
self.message_user(request, msg)
if "_addanother" in request.POST:
return HttpResponseRedirect(request.path)
return HttpResponseRedirect('../../')
def has_delete_permission(self, request, obj=None):
return False
def has_change_permission(self, request, obj=None):
return False
def get_model_perms(self, request):
return {
'add': self.has_add_permission(request),
'change': False,
'delete': False
}
admin.site.register(SendEmail, SendEmailAdmin)
|
mit
| 2,205,017,277,987,033,600 | 33.826087 | 111 | 0.601034 | false |
Azure/azure-sdk-for-python
|
sdk/identity/azure-identity/azure/identity/_credentials/cloud_shell.py
|
1
|
2005
|
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import functools
import os
from typing import TYPE_CHECKING
from azure.core.pipeline.transport import HttpRequest
from .. import CredentialUnavailableError
from .._constants import EnvironmentVariables
from .._internal.managed_identity_client import ManagedIdentityClient
from .._internal.get_token_mixin import GetTokenMixin
if TYPE_CHECKING:
from typing import Any, Optional
from azure.core.credentials import AccessToken
class CloudShellCredential(GetTokenMixin):
def __init__(self, **kwargs):
# type: (**Any) -> None
super(CloudShellCredential, self).__init__()
url = os.environ.get(EnvironmentVariables.MSI_ENDPOINT)
if url:
self._available = True
self._client = ManagedIdentityClient(
request_factory=functools.partial(_get_request, url),
base_headers={"Metadata": "true"},
**kwargs
)
else:
self._available = False
def get_token(self, *scopes, **kwargs):
# type: (*str, **Any) -> AccessToken
if not self._available:
raise CredentialUnavailableError(
message="Cloud Shell managed identity configuration not found in environment"
)
return super(CloudShellCredential, self).get_token(*scopes, **kwargs)
def _acquire_token_silently(self, *scopes, **kwargs):
# type: (*str, **Any) -> Optional[AccessToken]
return self._client.get_cached_token(*scopes)
def _request_token(self, *scopes, **kwargs):
# type: (*str, **Any) -> AccessToken
return self._client.request_token(*scopes, **kwargs)
def _get_request(url, scope, identity_config):
# type: (str, str, dict) -> HttpRequest
request = HttpRequest("POST", url, data=dict({"resource": scope}, **identity_config))
return request
|
mit
| 8,458,720,174,471,907,000 | 34.803571 | 93 | 0.625436 | false |
ceph/autotest
|
client/tests/kvm/tests/virtio_console.py
|
1
|
49379
|
"""
virtio_console test
@copyright: 2010 Red Hat, Inc.
"""
import array, logging, os, random, re, select, shutil, socket, sys, tempfile
import threading, time, traceback
from collections import deque
from threading import Thread
from autotest_lib.client.common_lib import error
from autotest_lib.client.bin import utils
import kvm_subprocess, kvm_test_utils, kvm_preprocessing
def run_virtio_console(test, params, env):
"""
KVM virtio_console test
1) Starts VMs with the specified number of virtio console devices
2) Start smoke test
3) Start loopback test
4) Start performance test
This test uses an auxiliary script, console_switch.py, that is copied to
guests. This script has functions to send and write data to virtio console
ports. Details of each test can be found on the docstrings for the test_*
functions.
@param test: kvm test object
@param params: Dictionary with the test parameters
@param env: Dictionary with test environment
"""
class SubTest(object):
"""
Collect result of subtest of main test.
"""
def __init__(self):
"""
Initialize object
"""
self.result = []
self.passed = 0
self.failed = 0
self.cleanup_func = None
self.cleanup_args = None
def set_cleanup_func(self, func, args):
"""
Set cleanup function which is called when subtest fails.
@param func: Function which should be called when test fails.
@param args: Arguments of cleanup function.
"""
self.cleanup_func = func
self.cleanup_args = args
def do_test(self, function, args=None, fatal=False, cleanup=True):
"""
Execute subtest function.
@param function: Object of function.
@param args: List of arguments of function.
@param fatal: If true exception is forwarded to main test.
@param cleanup: If true call cleanup function after crash of test.
@return: Return what returned executed subtest.
@raise TestError: If collapse of test is fatal raise forward
exception from subtest.
"""
if args == None:
args = []
res = [None, function.func_name, args]
try:
logging.debug("Start test %s.", function.func_name)
ret = function(*args)
res[0] = True
logging.info(self.result_to_string(res))
self.result.append(res)
self.passed += 1
return ret
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
logging.error("In function (" + function.func_name + "):")
logging.error("Call from:\n" +
traceback.format_stack()[-2][:-1])
logging.error("Exception from:\n" +
"".join(traceback.format_exception(
exc_type, exc_value,
exc_traceback.tb_next)))
# Clean up environment after subTest crash
res[0] = False
logging.info(self.result_to_string(res))
self.result.append(res)
self.failed += 1
if cleanup:
try:
self.cleanup_func(*self.cleanup_args)
except:
error.TestFail("Cleanup function crash too.")
if fatal:
raise
def is_failed(self):
"""
@return: If any of subtest not pass return True.
"""
if self.failed > 0:
return True
else:
return False
def get_result(self):
"""
@return: Result of subtests.
Format:
tuple(pass/fail,function_name,call_arguments)
"""
return self.result
def result_to_string_debug(self, result):
"""
@param result: Result of test.
"""
sargs = ""
for arg in result[2]:
sargs += str(arg) + ","
sargs = sargs[:-1]
if result[0]:
status = "PASS"
else:
status = "FAIL"
return ("Subtest (%s(%s)): --> %s") % (result[1], sargs, status)
def result_to_string(self, result):
"""
@param result: Result of test.
"""
if result[0]:
status = "PASS"
else:
status = "FAIL"
return ("Subtest (%s): --> %s") % (result[1], status)
def headline(self, msg):
"""
Add headline to result output.
@param msg: Test of headline
"""
self.result.append([msg])
def _gen_res(self, format_func):
"""
Format result with foramting function
@param format_func: Func for formating result.
"""
result = ""
for res in self.result:
if (len(res) == 3):
result += format_func(res) + "\n"
else:
result += res[0] + "\n"
return result
def get_full_text_result(self):
"""
@return string with text form of result
"""
return self._gen_res(lambda str: self.result_to_string_debug(str))
def get_text_result(self):
"""
@return string with text form of result
"""
return self._gen_res(lambda str: self.result_to_string(str))
class Port(object):
"""
Define structure to keep information about used port.
"""
def __init__(self, sock, name, port_type, path):
"""
@param vm: virtual machine object that port owned
@param sock: Socket of port if port is open.
@param name: Name of port for guest side.
@param port_type: Type of port yes = console, no= serialport.
@param path: Path to port on host side.
"""
self.sock = sock
self.name = name
self.port_type = port_type
self.path = path
self.is_open = False
def for_guest(self):
"""
Format data for communication with guest side.
"""
return [self.name, self.port_type]
def open(self):
"""
Open port on host side.
"""
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.connect(self.path)
self.is_open = True
def clean_port(self):
"""
Clean all data from opened port on host side.
"""
if self.is_open:
self.close()
self.open()
ret = select.select([self.sock], [], [], 1.0)
if ret[0]:
buf = self.sock.recv(1024)
logging.debug("Rest in socket: " + buf)
def close(self):
"""
Close port.
"""
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
self.is_open = False
def __str__(self):
"""
Convert to text.
"""
return ("%s,%s,%s,%s,%d" % ("Socket", self.name, self.port_type,
self.path, self.is_open))
class ThSend(Thread):
"""
Random data sender thread.
"""
def __init__(self, port, data, event):
"""
@param port: Destination port.
@param data: The data intend to be send in a loop.
@param event: Exit event.
"""
Thread.__init__(self)
self.port = port
# FIXME: socket.send(data>>127998) without read blocks thread
if len(data) > 102400:
data = data[0:102400]
logging.error("Data is too long, using only first %d bytes",
len(data))
self.data = data
self.exitevent = event
self.idx = 0
def run(self):
logging.debug("ThSend %s: run", self.getName())
while not self.exitevent.isSet():
self.idx += self.port.send(self.data)
logging.debug("ThSend %s: exit(%d)", self.getName(),
self.idx)
class ThSendCheck(Thread):
"""
Random data sender thread.
"""
def __init__(self, port, event, queues, blocklen=1024):
"""
@param port: Destination port
@param event: Exit event
@param queues: Queues for the control data (FIFOs)
@param blocklen: Block length
"""
Thread.__init__(self)
self.port = port
self.queues = queues
# FIXME: socket.send(data>>127998) without read blocks thread
if blocklen > 102400:
blocklen = 102400
logging.error("Data is too long, using blocklen = %d",
blocklen)
self.blocklen = blocklen
self.exitevent = event
self.idx = 0
def run(self):
logging.debug("ThSendCheck %s: run", self.getName())
too_much_data = False
while not self.exitevent.isSet():
# FIXME: workaround the problem with qemu-kvm stall when too
# much data is sent without receiving
for queue in self.queues:
while not self.exitevent.isSet() and len(queue) > 1048576:
too_much_data = True
time.sleep(0.1)
ret = select.select([], [self.port], [], 1.0)
if ret[1]:
# Generate blocklen of random data add them to the FIFO
# and send them over virtio_console
buf = ""
for i in range(self.blocklen):
ch = "%c" % random.randrange(255)
buf += ch
for queue in self.queues:
queue.append(ch)
target = self.idx + self.blocklen
while not self.exitevent.isSet() and self.idx < target:
idx = self.port.send(buf)
buf = buf[idx:]
self.idx += idx
logging.debug("ThSendCheck %s: exit(%d)", self.getName(),
self.idx)
if too_much_data:
logging.error("ThSendCheck: workaround the 'too_much_data'"
"bug")
class ThRecv(Thread):
"""
Recieves data and throws it away.
"""
def __init__(self, port, event, blocklen=1024):
"""
@param port: Data source port.
@param event: Exit event.
@param blocklen: Block length.
"""
Thread.__init__(self)
self.port = port
self._port_timeout = self.port.gettimeout()
self.port.settimeout(0.1)
self.exitevent = event
self.blocklen = blocklen
self.idx = 0
def run(self):
logging.debug("ThRecv %s: run", self.getName())
while not self.exitevent.isSet():
# TODO: Workaround, it didn't work with select :-/
try:
self.idx += len(self.port.recv(self.blocklen))
except socket.timeout:
pass
self.port.settimeout(self._port_timeout)
logging.debug("ThRecv %s: exit(%d)", self.getName(), self.idx)
class ThRecvCheck(Thread):
"""
Random data receiver/checker thread.
"""
def __init__(self, port, buf, event, blocklen=1024):
"""
@param port: Source port.
@param buf: Control data buffer (FIFO).
@param length: Amount of data we want to receive.
@param blocklen: Block length.
"""
Thread.__init__(self)
self.port = port
self.buffer = buf
self.exitevent = event
self.blocklen = blocklen
self.idx = 0
def run(self):
logging.debug("ThRecvCheck %s: run", self.getName())
while not self.exitevent.isSet():
ret = select.select([self.port], [], [], 1.0)
if ret[0] and (not self.exitevent.isSet()):
buf = self.port.recv(self.blocklen)
if buf:
# Compare the recvd data with the control data
for ch in buf:
ch_ = self.buffer.popleft()
if not ch == ch_:
self.exitevent.set()
logging.error("Failed to recv %dth character",
self.idx)
logging.error("%s != %s", repr(ch), repr(ch_))
logging.error("Recv = %s", repr(buf))
# sender might change the buffer :-(
time.sleep(1)
ch_ = ""
for buf in self.buffer:
ch_ += buf
logging.error("Queue = %s", repr(ch_))
raise error.TestFail("ThRecvCheck: incorrect "
"data")
self.idx += len(buf)
logging.debug("ThRecvCheck %s: exit(%d)", self.getName(),
self.idx)
def process_stats(stats, scale=1.0):
"""
Process and print the statistic.
@param stats: List of measured data.
"""
if not stats:
return None
for i in range((len(stats) - 1), 0, -1):
stats[i] = stats[i] - stats[i - 1]
stats[i] /= scale
stats[0] /= scale
stats = sorted(stats)
return stats
def _init_guest(vm, timeout=2):
"""
Execute virtio_console_guest.py on guest, wait until it is initialized.
@param vm: Informations about the guest.
@param timeout: Timeout that will be used to verify if the script
started properly.
"""
logging.debug("compile virtio_console_guest.py on guest %s", vm[0].name)
(match, data) = _on_guest("python -OO /tmp/virtio_console_guest.py -c &&"
"echo -n 'PASS: Compile virtio_guest finished' ||"
"echo -n 'FAIL: Compile virtio_guest failed'",
vm, timeout)
if match != 0:
raise error.TestFail("Command console_switch.py on guest %s failed."
"\nreturn code: %s\n output:\n%s" %
(vm[0].name, match, data))
logging.debug("Starting virtio_console_guest.py on guest %s", vm[0].name)
vm[1].sendline()
(match, data) = _on_guest("python /tmp/virtio_console_guest.pyo &&"
"echo -n 'PASS: virtio_guest finished' ||"
"echo -n 'FAIL: virtio_guest failed'",
vm, timeout)
if match != 0:
raise error.TestFail("Command console_switch.py on guest %s failed."
"\nreturn code: %s\n output:\n%s" %
(vm[0].name, match, data))
# Let the system rest
time.sleep(2)
def init_guest(vm, consoles):
"""
Prepares guest, executes virtio_console_guest.py and initializes test.
@param vm: Informations about the guest.
@param consoles: Informations about consoles.
"""
conss = []
for mode in consoles:
for cons in mode:
conss.append(cons.for_guest())
_init_guest(vm, 10)
on_guest("virt.init(%s)" % (conss), vm, 10)
def _search_kernel_crashlog(vm, timeout = 2):
"""
Find kernel crash message.
@param vm: Informations about the guest.
@param timeout: Timeout used to verify expected output.
@return: Kernel crash log or None.
"""
data = vm[3].read_nonblocking()
match = re.search("^BUG:", data, re.MULTILINE)
if match == None:
return None
match = re.search(r"^BUG:.*^---\[ end trace .* \]---",
data, re.DOTALL |re.MULTILINE)
if match == None:
data += vm[3].read_until_last_line_matches(
["---\[ end trace .* \]---"],timeout)
match = re.search(r"(^BUG:.*^---\[ end trace .* \]---)",
data, re.DOTALL |re.MULTILINE)
return match.group(0)
def _on_guest(command, vm, timeout=2):
"""
Execute given command inside the script's main loop, indicating the vm
the command was executed on.
@param command: Command that will be executed.
@param vm: Informations about the guest.
@param timeout: Timeout used to verify expected output.
@return: Tuple (match index, data)
"""
logging.debug("Executing '%s' on virtio_console_guest.py loop, vm: %s," +
"timeout: %s", command, vm[0].name, timeout)
vm[1].sendline(command)
try:
(match, data) = vm[1].read_until_last_line_matches(["PASS:",
"FAIL:"],
timeout)
except (kvm_subprocess.ExpectError):
match = None
data = "Timeout."
kcrash_data = _search_kernel_crashlog(vm)
if (kcrash_data != None):
logging.error(kcrash_data)
vm[4] = True
return (match, data)
def on_guest(command, vm, timeout=2):
"""
Wrapper around the _on_guest command which executes the command on
guest. Unlike _on_guest command when the command fails it raises the
test error.
@param command: Command that will be executed.
@param vm: Informations about the guest.
@param timeout: Timeout used to verify expected output.
@return: Tuple (match index, data)
"""
match, data = _on_guest(command, vm, timeout)
if match == 1 or match is None:
raise error.TestFail("Failed to execute '%s' on virtio_console_guest.py, "
"vm: %s, output:\n%s" %
(command, vm[0].name, data))
return (match, data)
def _guest_exit_threads(vm, send_pts, recv_pts):
"""
Safely executes on_guest("virt.exit_threads()") using workaround of
the stuck thread in loopback in mode=virt.LOOP_NONE .
@param vm: Informations about the guest.
@param send_pts: list of possible send sockets we need to work around.
@param recv_pts: list of possible recv sockets we need to read-out.
"""
# in LOOP_NONE mode it might stuck in read/write
match, tmp = _on_guest("virt.exit_threads()", vm, 10)
if match == None:
logging.debug("Workaround the stuck thread on guest")
# Thread is stucked in read/write
for send_pt in send_pts:
send_pt.sock.sendall(".")
elif match != 0:
# Something else
raise error.TestFail("Unexpected fail\nMatch: %s\nData:\n%s"
% (match, tmp))
# Read-out all remaining data
for recv_pt in recv_pts:
while select.select([recv_pt.sock], [], [], 0.1)[0]:
recv_pt.sock.recv(1024)
# This will cause fail in case anything went wrong.
on_guest("print 'PASS: nothing'", vm, 10)
def _vm_create(no_console=3, no_serialport=3):
"""
Creates the VM and connects the specified number of consoles and serial
ports.
Ports are allocated by 2 per 1 virtio-serial-pci device starting with
console. (3+2 => CC|CS|S; 0+2 => SS; 3+4 => CC|CS|SS|S, ...) This way
it's easy to test communication on the same or different
virtio-serial-pci device.
Further in tests the consoles are being picked always from the first
available one (3+2: 2xC => CC|cs|s <communication on the same PCI>;
2xC,1xS => CC|cS|s <communication between 2 PCI devs)
@param no_console: Number of desired virtconsoles.
@param no_serialport: Number of desired virtserialports.
@return: Tuple with (guest information, consoles information)
guest informations = [vm, session, tmp_dir, kcrash]
consoles informations = [consoles[], serialports[]]
"""
consoles = []
serialports = []
tmp_dir = tempfile.mkdtemp(prefix="virtio-console-", dir="/tmp/")
if not params.get('extra_params'):
params['extra_params'] = ''
for i in range(0, no_console):
# Spread consoles between multiple PCI devices (2 per a dev)
if not i % 2:
pci = "virtio-serial-pci%d" % (i / 2)
params['extra_params'] += (" -device virtio-serial-pci,id="
+ pci)
pci += ".0"
params['extra_params'] += (" -chardev socket,path=%s/%d,id=vc%d,"
"server,nowait" % (tmp_dir, i, i))
params['extra_params'] += (" -device virtconsole,chardev=vc%d,"
"name=console-%d,id=c%d,bus=%s"
% (i, i, i, pci))
for i in range(no_console, no_console + no_serialport):
# Spread seroal ports between multiple PCI devices (2 per a dev)
if not i % 2:
pci = "virtio-serial-pci%d" % (i / 2)
params['extra_params'] += (" -device virtio-serial-pci,id="
+ pci)
pci += ".0"
params['extra_params'] += (" -chardev socket,path=%s/%d,id=vs%d,"
"server,nowait" % (tmp_dir, i, i))
params['extra_params'] += (" -device virtserialport,chardev=vs%d,"
"name=serialport-%d,id=p%d,bus=%s"
% (i, i, i, pci))
logging.debug("Booting first guest %s", params.get("main_vm"))
kvm_preprocessing.preprocess_vm(test, params, env,
params.get("main_vm"))
vm = env.get_vm(params.get("main_vm"))
session = vm.wait_for_login(timeout=float(params.get("boot_timeout", 240)))
sserial = kvm_test_utils.wait_for_login(vm, 0,
float(params.get("boot_timeout", 240)),
0, 2, serial=True)
# connect the sockets
for i in range(0, no_console):
consoles.append(Port(None ,"console-%d" % i,
"yes", "%s/%d" % (tmp_dir, i)))
for i in range(no_console, no_console + no_serialport):
serialports.append(Port(None ,"serialport-%d" % i,
"no", "%s/%d" % (tmp_dir, i)))
kcrash = False
return [vm, session, tmp_dir, sserial, kcrash], [consoles, serialports]
def topen(vm, port):
"""
Open virtioconsole port.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param port: Port identifier.
"""
on_guest("virt.open('%s')" % (port.name), vm, 10)
port.open()
def tmulti_open(vm, port):
"""
Multiopen virtioconsole port.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param port: Port identifier.
"""
on_guest("virt.close('%s')" % (port.name), vm, 10)
on_guest("virt.open('%s')" % (port.name), vm, 10)
(match, data) = _on_guest("virt.open('%s')" % (port.name), vm, 10)
# Console is permitted to open the device multiple times
if port.port_type == "yes": #is console?
if match != 0: #Multiopen not pass
raise error.TestFail("Unexpected fail of opening the console"
" device for the 2nd time.\n%s" % data)
else:
if match != 1: #Multiopen not fail:
raise error.TestFail("Unexpetded pass of opening the"
" serialport device for the 2nd time.")
elif not "[Errno 24]" in data:
raise error.TestFail("Multiple opening fail but with another"
" exception %s" % data)
port.open()
def tclose(vm, port):
"""
Close socket.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param port: Port to open.
"""
on_guest("virt.close('%s')" % (port.name), vm, 10)
port.close()
def tpooling(vm, port):
"""
Test try pooling function.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param port: Port used in test.
"""
# Poll (OUT)
on_guest("virt.poll('%s', %s)" % (port.name, select.POLLOUT), vm,
2)
# Poll (IN, OUT)
port.sock.sendall("test")
for test in [select.POLLIN, select.POLLOUT]:
on_guest("virt.poll('%s', %s)" % (port.name, test), vm, 10)
# Poll (IN HUP)
# I store the socket informations and close the socket
port.close()
for test in [select.POLLIN, select.POLLHUP]:
on_guest("virt.poll('%s', %s)" % (port.name, test), vm, 10)
# Poll (HUP)
on_guest("virt.recv('%s', 4, 1024, False)" % (port.name), vm, 10)
on_guest("virt.poll('%s', %s)" % (port.name, select.POLLHUP), vm,
2)
# Reconnect the socket
port.open()
# Redefine socket in consoles
on_guest("virt.poll('%s', %s)" % (port.name, select.POLLOUT), vm,
2)
def tsigio(vm, port):
"""
Test try sigio function.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param port: Port used in test.
"""
if port.is_open:
port.close()
# Enable sigio on specific port
on_guest("virt.async('%s', True, 0)" %
(port.name) , vm, 10)
on_guest("virt.get_sigio_poll_return('%s')" % (port.name) , vm, 10)
#Test sigio when port open
on_guest("virt.set_pool_want_return('%s', select.POLLOUT)" %
(port.name), vm, 10)
port.open()
match = _on_guest("virt.get_sigio_poll_return('%s')" %
(port.name) , vm, 10)[0]
if match == 1:
raise error.TestFail("Problem with HUP on console port.")
#Test sigio when port receive data
on_guest("virt.set_pool_want_return('%s', select.POLLOUT |"
" select.POLLIN)" % (port.name), vm, 10)
port.sock.sendall("0123456789")
on_guest("virt.get_sigio_poll_return('%s')" % (port.name) , vm, 10)
#Test sigio port close event
on_guest("virt.set_pool_want_return('%s', select.POLLHUP |"
" select.POLLIN)" % (port.name), vm, 10)
port.close()
on_guest("virt.get_sigio_poll_return('%s')" % (port.name) , vm, 10)
#Test sigio port open event and persistence of written data on port.
on_guest("virt.set_pool_want_return('%s', select.POLLOUT |"
" select.POLLIN)" % (port.name), vm, 10)
port.open()
on_guest("virt.get_sigio_poll_return('%s')" % (port.name) , vm, 10)
#Test event when erase data.
on_guest("virt.clean_port('%s')" % (port.name), vm, 10)
port.close()
on_guest("virt.set_pool_want_return('%s', select.POLLOUT)"
% (port.name), vm, 10)
port.open()
on_guest("virt.get_sigio_poll_return('%s')" % (port.name) , vm, 10)
# Disable sigio on specific port
on_guest("virt.async('%s', False, 0)" %
(port.name) , vm, 10)
def tlseek(vm, port):
"""
Tests the correct handling of lseek (expected fail)
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param port: Port used in test.
"""
# The virt.lseek returns PASS when the seek fails
on_guest("virt.lseek('%s', 0, 0)" % (port.name), vm, 10)
def trw_host_offline(vm, port):
"""
Guest read/write from host when host is disconnected.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param port: Port used in test.
"""
if port.is_open:
port.close()
on_guest("virt.recv('%s', 0, 1024, False)" % port.name, vm, 10)
match, tmp = _on_guest("virt.send('%s', 10, False)"
% port.name, vm, 10)
if match != None:
raise error.TestFail("Write on guest while host disconnected "
"didn't timed out.\nOutput:\n%s"
% tmp)
port.open()
if (port.sock.recv(1024) < 10):
raise error.TestFail("Didn't received data from guest")
# Now the _on_guest("virt.send('%s'... command should be finished
on_guest("print 'PASS: nothing'", vm, 10)
def trw_blocking_mode(vm, port):
"""
Guest read\write data in blocking mode.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param port: Port used in test.
"""
# Blocking mode
if not port.is_open:
port.open()
on_guest("virt.blocking('%s', True)" % port.name, vm, 10)
# Recv should timed out
match, tmp = _on_guest("virt.recv('%s', 10, 1024, False)" %
port.name, vm, 10)
if match == 0:
raise error.TestFail("Received data even when non were sent\n"
"Data:\n%s" % tmp)
elif match != None:
raise error.TestFail("Unexpected fail\nMatch: %s\nData:\n%s" %
(match, tmp))
port.sock.sendall("1234567890")
# Now guest received the data end escaped from the recv()
on_guest("print 'PASS: nothing'", vm, 10)
def trw_nonblocking_mode(vm, port):
"""
Guest read\write data in nonblocking mode.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param port: Port used in test.
"""
# Non-blocking mode
if not port.is_open:
port.open()
on_guest("virt.blocking('%s', False)" % port.name, vm, 10)
# Recv should return FAIL with 0 received data
match, tmp = _on_guest("virt.recv('%s', 10, 1024, False)" %
port.name, vm, 10)
if match == 0:
raise error.TestFail("Received data even when non were sent\n"
"Data:\n%s" % tmp)
elif match == None:
raise error.TestFail("Timed out, probably in blocking mode\n"
"Data:\n%s" % tmp)
elif match != 1:
raise error.TestFail("Unexpected fail\nMatch: %s\nData:\n%s" %
(match, tmp))
port.sock.sendall("1234567890")
on_guest("virt.recv('%s', 10, 1024, False)" % port.name, vm, 10)
def tbasic_loopback(vm, send_port, recv_port, data="Smoke test data"):
"""
Easy loop back test with loop over only two port.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param port: Port used in test.
"""
if not send_port.is_open:
send_port.open()
if not recv_port.is_open:
recv_port.open()
on_guest("virt.loopback(['%s'], ['%s'], 1024, virt.LOOP_NONE)" %
(send_port.name, recv_port.name), vm, 10)
send_port.sock.sendall(data)
tmp = ""
i = 0
while i <= 10:
i += 1
ret = select.select([recv_port.sock], [], [], 1.0)
if ret:
tmp += recv_port.sock.recv(1024)
if len(tmp) >= len(data):
break
if tmp != data:
raise error.TestFail("Incorrect data: '%s' != '%s'",
data, tmp)
_guest_exit_threads(vm, [send_port], [recv_port])
def tloopback(vm, consoles, params):
"""
Virtio console loopback subtest.
Creates loopback on the vm machine between send_pt and recv_pts
ports and sends length amount of data through this connection.
It validates the correctness of the data sent.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param consoles: Field of virtio ports with the minimum of 2 items.
@param params: test parameters, multiple recievers allowed.
'$source_console_type@buffer_length:
$destination_console_type1@$buffer_length:...:
$loopback_buffer_length;...'
"""
# PREPARE
for param in params.split(';'):
if not param:
continue
logging.info("test_loopback: params: %s", param)
param = param.split(':')
idx_serialport = 0
idx_console = 0
buf_len = []
if (param[0].startswith('console')):
send_pt = consoles[0][idx_console]
idx_console += 1
else:
send_pt = consoles[1][idx_serialport]
idx_serialport += 1
if (len(param[0].split('@')) == 2):
buf_len.append(int(param[0].split('@')[1]))
else:
buf_len.append(1024)
recv_pts = []
for parm in param[1:]:
if (parm.isdigit()):
buf_len.append(int(parm))
break # buf_len is the last portion of param
if (parm.startswith('console')):
recv_pts.append(consoles[0][idx_console])
idx_console += 1
else:
recv_pts.append(consoles[1][idx_serialport])
idx_serialport += 1
if (len(parm[0].split('@')) == 2):
buf_len.append(int(parm[0].split('@')[1]))
else:
buf_len.append(1024)
# There must be sum(idx_*) consoles + last item as loopback buf_len
if len(buf_len) == (idx_console + idx_serialport):
buf_len.append(1024)
for p in recv_pts:
if not p.is_open:
p.open()
if not send_pt.is_open:
send_pt.open()
if len(recv_pts) == 0:
raise error.TestFail("test_loopback: incorrect recv consoles"
"definition")
threads = []
queues = []
for i in range(0, len(recv_pts)):
queues.append(deque())
tmp = "'%s'" % recv_pts[0].name
for recv_pt in recv_pts[1:]:
tmp += ", '%s'" % (recv_pt.name)
on_guest("virt.loopback(['%s'], [%s], %d, virt.LOOP_POLL)"
% (send_pt.name, tmp, buf_len[-1]), vm, 10)
exit_event = threading.Event()
# TEST
thread = ThSendCheck(send_pt.sock, exit_event, queues,
buf_len[0])
thread.start()
threads.append(thread)
for i in range(len(recv_pts)):
thread = ThRecvCheck(recv_pts[i].sock, queues[i], exit_event,
buf_len[i + 1])
thread.start()
threads.append(thread)
time.sleep(60)
exit_event.set()
threads[0].join()
tmp = "%d data sent; " % threads[0].idx
for thread in threads[1:]:
thread.join()
tmp += "%d, " % thread.idx
logging.info("test_loopback: %s data received and verified",
tmp[:-2])
# Read-out all remaining data
for recv_pt in recv_pts:
while select.select([recv_pt.sock], [], [], 0.1)[0]:
recv_pt.sock.recv(1024)
_guest_exit_threads(vm, [send_pt], recv_pts)
del exit_event
del threads[:]
def tperf(vm, consoles, params):
"""
Tests performance of the virtio_console tunel. First it sends the data
from host to guest and than back. It provides informations about
computer utilisation and statistic informations about the troughput.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param consoles: Field of virtio ports with the minimum of 2 items.
@param params: test parameters:
'$console_type@$buffer_length:$test_duration;...'
"""
for param in params.split(';'):
if not param:
continue
logging.info("test_perf: params: %s", param)
param = param.split(':')
duration = 60.0
if len(param) > 1:
try:
duration = float(param[1])
except:
pass
param = param[0].split('@')
if len(param) > 1 and param[1].isdigit():
buf_len = int(param[1])
else:
buf_len = 1024
param = (param[0] == 'serialport')
port = consoles[param][0]
if not port.is_open:
port.open()
data = ""
for i in range(buf_len):
data += "%c" % random.randrange(255)
exit_event = threading.Event()
time_slice = float(duration) / 100
# HOST -> GUEST
on_guest('virt.loopback(["%s"], [], %d, virt.LOOP_NONE)' %
(port.name, buf_len), vm, 10)
thread = ThSend(port.sock, data, exit_event)
stats = array.array('f', [])
loads = utils.SystemLoad([(os.getpid(), 'autotest'),
(vm[0].get_pid(), 'VM'), 0])
loads.start()
_time = time.time()
thread.start()
for i in range(100):
stats.append(thread.idx)
time.sleep(time_slice)
_time = time.time() - _time - duration
logging.info("\n" + loads.get_cpu_status_string()[:-1])
logging.info("\n" + loads.get_mem_status_string()[:-1])
exit_event.set()
thread.join()
# Let the guest read-out all the remaining data
while not _on_guest("virt.poll('%s', %s)" %
(port.name, select.POLLIN), vm, 10)[0]:
time.sleep(1)
_guest_exit_threads(vm, [port], [])
if (_time > time_slice):
logging.error(
"Test ran %fs longer which is more than one time slice", _time)
else:
logging.debug("Test ran %fs longer", _time)
stats = process_stats(stats[1:], time_slice * 1048576)
logging.debug("Stats = %s", stats)
logging.info("Host -> Guest [MB/s] (min/med/max) = %.3f/%.3f/%.3f",
stats[0], stats[len(stats) / 2], stats[-1])
del thread
# GUEST -> HOST
exit_event.clear()
stats = array.array('f', [])
on_guest("virt.send_loop_init('%s', %d)" % (port.name, buf_len),
vm, 30)
thread = ThRecv(port.sock, exit_event, buf_len)
thread.start()
loads.start()
on_guest("virt.send_loop()", vm, 10)
_time = time.time()
for i in range(100):
stats.append(thread.idx)
time.sleep(time_slice)
_time = time.time() - _time - duration
logging.info("\n" + loads.get_cpu_status_string()[:-1])
logging.info("\n" + loads.get_mem_status_string()[:-1])
on_guest("virt.exit_threads()", vm, 10)
exit_event.set()
thread.join()
if (_time > time_slice): # Deviation is higher than 1 time_slice
logging.error(
"Test ran %fs longer which is more than one time slice", _time)
else:
logging.debug("Test ran %fs longer", _time)
stats = process_stats(stats[1:], time_slice * 1048576)
logging.debug("Stats = %s", stats)
logging.info("Guest -> Host [MB/s] (min/med/max) = %.3f/%.3f/%.3f",
stats[0], stats[len(stats) / 2], stats[-1])
del thread
del exit_event
def _clean_ports(vm, consoles):
"""
Read all data all port from both side of port.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param consoles: Consoles which should be clean.
"""
for ctype in consoles:
for port in ctype:
openned = port.is_open
port.clean_port()
#on_guest("virt.blocking('%s', True)" % port.name, vm, 10)
on_guest("virt.clean_port('%s'),1024" % port.name, vm, 10)
if not openned:
port.close()
on_guest("virt.close('%s'),1024" % port.name, vm, 10)
def clean_ports(vm, consoles):
"""
Clean state of all ports and set port to default state.
Default state:
No data on port or in port buffer.
Read mode = blocking.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param consoles: Consoles which should be clean.
"""
# Check if python is still alive
print "CLEANING"
match, tmp = _on_guest("is_alive()", vm, 10)
if (match == None) or (match != 0):
logging.error("Python died/is stuck/has remaining threads")
logging.debug(tmp)
try:
if vm[4] == True:
raise error.TestFail("Kernel crash.")
match, tmp = _on_guest("guest_exit()", vm, 10)
if (match == None) or (match == 0):
vm[1].close()
vm[1] = vm[0].wait_for_login(timeout=float(params.get("boot_timeout", 240)))
on_guest("killall -9 python "
"&& echo -n PASS: python killed"
"|| echo -n PASS: python died",
vm, 10)
init_guest(vm, consoles)
_clean_ports(vm, consoles)
except (error.TestFail, kvm_subprocess.ExpectError,
Exception), inst:
logging.error(inst)
logging.error("Virtio-console driver is irreparably"
" blocked. Every comd end with sig KILL."
"Try reboot vm for continue in testing.")
vm[1] = vm[0].reboot(vm[1], "system_reset")
init_guest(vm, consoles)
match = _on_guest("virt.clean_port('%s'),1024" %
consoles[0][0].name, vm, 10)[0]
if (match == None) or (match != 0):
raise error.TestFail("Virtio-console driver is irrepar"
"ably blocked. Every comd end"
" with sig KILL. Neither the "
"restart did not help.")
_clean_ports(vm, consoles)
def test_smoke(test, vm, consoles, params):
"""
Virtio console smoke test.
Tests the basic functionalities (poll, read/write with and without
connected host, etc.
@param test: Main test object.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param consoles: Field of virtio ports with the minimum of 2 items.
@param params: Test parameters '$console_type:$data;...'
"""
# PREPARE
for param in params.split(';'):
if not param:
continue
headline = "test_smoke: params: %s" % (param)
logging.info(headline)
param = param.split(':')
if len(param) > 1:
data = param[1]
else:
data = "Smoke test data"
param = (param[0] == 'serialport')
send_pt = consoles[param][0]
recv_pt = consoles[param][1]
test.headline(headline)
test.do_test(topen, [vm, send_pt], True)
test.do_test(tclose, [vm, send_pt], True)
test.do_test(tmulti_open, [vm, send_pt])
test.do_test(tpooling, [vm, send_pt])
test.do_test(tsigio, [vm, send_pt])
test.do_test(tlseek, [vm, send_pt])
test.do_test(trw_host_offline, [vm, send_pt])
test.do_test(trw_nonblocking_mode, [vm, send_pt])
test.do_test(trw_blocking_mode, [vm, send_pt])
test.do_test(tbasic_loopback, [vm, send_pt, recv_pt, data], True)
def test_multiport(test, vm, consoles, params):
"""
This is group of test which test virtio_console in maximal load and
with multiple ports.
@param test: Main test object.
@param vm: Target virtual machine [vm, session, tmp_dir, ser_session].
@param consoles: Field of virtio ports with the minimum of 2 items.
@param params: Test parameters '$console_type:$data;...'
"""
test.headline("test_multiport:")
#Test Loopback
test.do_test(tloopback, [vm, consoles, params[0]])
#Test Performance
test.do_test(tperf, [vm, consoles, params[1]])
# INITIALIZE
tsmoke_params = params.get('virtio_console_smoke', '')
tloopback_params = params.get('virtio_console_loopback', '')
tperf_params = params.get('virtio_console_perf', '')
no_serialports = 0
no_consoles = 0
# consoles required for Smoke test
if (tsmoke_params.count('serialport')):
no_serialports = max(2, no_serialports)
if (tsmoke_params.count('console')):
no_consoles = max(2, no_consoles)
# consoles required for Loopback test
for param in tloopback_params.split(';'):
no_serialports = max(no_serialports, param.count('serialport'))
no_consoles = max(no_consoles, param.count('console'))
# consoles required for Performance test
if (tperf_params.count('serialport')):
no_serialports = max(1, no_serialports)
if (tperf_params.count('console')):
no_consoles = max(1, no_consoles)
if (no_serialports + no_consoles) == 0:
raise error.TestFail("No tests defined, probably incorrect "
"configuration in tests_base.cfg")
vm, consoles = _vm_create(no_consoles, no_serialports)
# Copy virtio_console_guest.py into guests
pwd = os.path.join(os.environ['AUTODIR'], 'tests/kvm')
vksmd_src = os.path.join(pwd, "scripts/virtio_console_guest.py")
dst_dir = "/tmp"
vm[0].copy_files_to(vksmd_src, dst_dir)
# ACTUAL TESTING
# Defines all available consoles; tests udev and sysfs
test = SubTest()
try:
init_guest(vm, consoles)
test.set_cleanup_func(clean_ports, [vm, consoles])
#Test Smoke
test_smoke(test, vm, consoles, tsmoke_params)
#Test multiport functionality and performance.
test_multiport(test, vm, consoles, [tloopback_params, tperf_params])
finally:
logging.info(("Summary: %d tests passed %d test failed :\n" %
(test.passed, test.failed)) + test.get_text_result())
if test.is_failed():
raise error.TestFail("Virtio_console test FAILED.")
# CLEANUP
vm[1].close()
vm[0].destroy(gracefully=False)
shutil.rmtree(vm[2])
|
gpl-2.0
| 1,024,693,506,317,516,900 | 36.323507 | 92 | 0.494259 | false |
kespindler/puffin
|
tests/test_puffin.py
|
1
|
2287
|
from puf import cli
from tests import StreamCaptureTest
import re
from tempfile import NamedTemporaryFile
class TestMain(StreamCaptureTest):
def test_main(self):
cli.main(['range(3)'])
self.assertWasStreamed('0\n1\n2\n')
def test_main_initial(self):
self.assertRaises(NameError, cli.main, ['fake_object'])
cli.main(['-b', 'fake_object=5', 'fake_object*2'])
self.assertWasStreamed('10\n')
def test_main_raw(self):
cli.main(['-r', 'range(3)'])
self.assertWasStreamed('[0, 1, 2]\n')
def test_main_linemode(self):
self.sin.write('file1\nfile2\nfile3')
self.sin.seek(0)
cli.main(['-l', 'line+".txt"'])
self.assertWasStreamed('file1.txt\nfile2.txt\nfile3.txt\n')
def test_main_skipheader(self):
self.sin.write('pid\n5\n3')
self.sin.seek(0)
cli.main(['-hl', 'row[0]*2'])
self.assertWasStreamed('10\n6\n')
def test_version(self):
cli.main(['--version'])
self.sout.seek(0)
streamed = self.sout.read()
self.assertTrue(re.match('\d+\.\d+\.\d+$', streamed))
def test_passed_file(self):
t = NamedTemporaryFile()
t.write('bye\n')
t.flush()
cli.main(['-l', 'line.replace("bye", "hi")', t.name])
self.assertWasStreamed('hi\n')
def test_passed_file_twice(self):
t = NamedTemporaryFile()
t.write('bye\n')
t.flush()
cli.main(['-l', 'line.replace("bye", "hi")', t.name, t.name])
self.assertWasStreamed('hi\nhi\n')
def test_in_place_modification(self):
t = NamedTemporaryFile()
t.write('bye\n')
t.flush()
extension = '.bak'
backup = t.name + extension
cli.main(['-l', '-i', extension, 'line.replace("bye", "hi")', t.name])
with open(backup) as f:
self.assertEqual(f.read(), 'bye\n')
with open(t.name) as f:
self.assertEqual(f.read(), 'hi\n')
def test_in_place_no_extension(self):
t = NamedTemporaryFile()
t.write('bye\n')
t.flush()
extension = ''
cli.main(['-l', '-i', extension, 'line.replace("bye", "hi")', t.name])
with open(t.name) as f:
self.assertEqual(f.read(), 'hi\n')
|
mit
| 4,869,518,767,320,799,000 | 30.763889 | 78 | 0.555313 | false |
waltaskew/tweet-cereal
|
tweet_cereal/app.py
|
1
|
1799
|
"""Define an app for processing requests to LaTeX up tweets."""
import base64
import os
import flask
import flask_socketio
import eventlet
import redis
import tweet_cereal.tasks as tasks
import tweet_cereal.tweets as tweets
JOB_RESULT_POLL_INTERVAL = 1
app = flask.Flask(__name__)
socketio = flask_socketio.SocketIO(app, binary=True, async_mode='eventlet')
redis_conn = redis.from_url(os.environ['REDIS_URL'])
@socketio.on('render')
def render_pdf(message):
"""Render PDFs for the requested twitter collection.
Parameters
----------
message : dict
Description of the timeline to render.
Returns
-------
bytes
The PDF representing the collection.
"""
session = tweets.get_oauth_session(
os.environ['TWITTER_CONSUMER_KEY'],
os.environ['TWITTER_CONSUMER_SECRET'],
os.environ['TWITTER_ACCESS_TOKEN'],
os.environ['TWITTER_ACCESS_TOKEN_SECRET'],
)
job = tasks.write_pdf_from_timeline.delay(
session, message['collection_id'])
while not job.ready():
# While we poll the job to completion, sleep to yield the event loop.
eventlet.sleep(JOB_RESULT_POLL_INTERVAL)
result = job.result
# Drop the result from Redis now that we've read it.
# This is hacky -- the proper way to do this is to set
# the celery result_expires timeout to a reasonable value
# so the job results don't hang around too long.
# However, we're doing things on the cheap and free-tier
# Redis in Heroku is just 25 MB, so we need to clear the result now.
#
# The really proper way to do this is to use S3 rather than Redis
# to pass PDFs back from the workers but again, we're on the cheap.
redis_conn.delete('celery-task-meta-%s' % job.task_id)
return result
|
bsd-3-clause
| -4,704,310,292,591,530,000 | 29.491525 | 77 | 0.676487 | false |
mrcrilly/vs-vlan-db
|
vsvlandb/forms/vlan.py
|
1
|
1069
|
from vsvlandb.models import Site, Subnet, Impact
from flask.ext.wtf import Form
from wtforms import TextField, SelectField, BooleanField, IntegerField
from wtforms.validators import DataRequired, NumberRange, Optional, Length
from wtforms.ext.sqlalchemy.fields import QuerySelectMultipleField, QuerySelectField
def active_sites():
return Site.query.filter_by(isactive=True)
def active_subnets():
return Subnet.query.filter_by(isactive=True)
def active_impacts():
return Impact.query.filter_by(isactive=True)
class VlanForm(Form):
# Required:
vlan = IntegerField(u'VLAN', validators=[NumberRange(min=1,max=4096), DataRequired()])
# Optional:
subnets = QuerySelectMultipleField(query_factory=active_subnets)
sites = QuerySelectMultipleField(query_factory=active_sites)
impact = QuerySelectField(query_factory=active_impacts, validators=[Optional()])
description = TextField(u'Description', validators=[Length(min=0, max=50)])
isactive = BooleanField(u'Active', default=True)
enhanced = BooleanField(u'Enhanced')
|
mit
| 3,781,316,050,793,028,000 | 32.40625 | 90 | 0.76333 | false |
spurin/xmldataset
|
examples/example6.py
|
1
|
2665
|
import xmldataset
import pprint
# Setup Pretty Printing
ppsetup = pprint.PrettyPrinter(indent=4)
pp = ppsetup.pprint
xml = """<?xml version="1.0"?>
<catalog>
<shop number="1">
<book id="bk101">
<author>Gambardella, Matthew</author>
<title>XML Developer's Guide</title>
<genre>Computer</genre>
<price>44.95</price>
<publish_date>2000-10-01</publish_date>
<description>An in-depth look at creating applications
with XML.</description>
</book>
<book id="bk102">
<author>Ralls, Kim</author>
<title>Midnight Rain</title>
<genre>Fantasy</genre>
<price>5.95</price>
<publish_date>2000-12-16</publish_date>
<description>A former architect battles corporate zombies,
an evil sorceress, and her own childhood to become queen
of the world.</description>
</book>
</shop>
<shop number="2">
<book id="bk103">
<author>Corets, Eva</author>
<title>Maeve Ascendant</title>
<genre>Fantasy</genre>
<price>5.95</price>
<publish_date>2000-11-17</publish_date>
<description>After the collapse of a nanotechnology
society in England, the young survivors lay the
foundation for a new society.</description>
</book>
<book id="bk104">
<author>Corets, Eva</author>
<title>Oberon's Legacy</title>
<genre>Fantasy</genre>
<price>5.95</price>
<publish_date>2001-03-10</publish_date>
<description>In post-apocalypse England, the mysterious
agent known only as Oberon helps to create a new life
for the inhabitants of London. Sequel to Maeve
Ascendant.</description>
</book>
</shop>
</catalog>"""
profile="""
catalog
shop
number = external_dataset:shop_information
book
id = dataset:title_and_author,prefix:shop_information_ dataset:title_and_genre,prefix:shop_information_
author = dataset:title_and_author,prefix:shop_information_
title = dataset:title_and_author,prefix:shop_information_ dataset:title_and_genre,prefix:shop_information_,prefix:shop_information_
genre = dataset:title_and_genre,prefix:shop_information_
__EXTERNAL_VALUE__ = shop_information:number:title_and_author:shop_information_number shop_information:number:title_and_genre:shop_information_number"""
# Pretty Print the output
output = xmldataset.parse_using_profile(xml,profile)
pp(output)
|
bsd-3-clause
| 4,255,772,175,311,198,000 | 37.623188 | 164 | 0.612758 | false |
linuxyan/opsmanage
|
app/auth/views.py
|
1
|
1754
|
#coding=utf-8
from flask import render_template, redirect, request, url_for, flash,abort
from . import auth
from ..models import users
from .. import db
from .forms import LoginForm,UserEditForm
from flask.ext.login import login_user, logout_user,current_user
from flask.ext.login import login_required
@auth.route('/login',methods=['GET','POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = users.query.filter_by(username=form.username.data).first()
if user is not None and user.verify_password(form.password.data) and user.status == 0:
login_user(user, form.remember_me.data)
return redirect(request.args.get('next') or url_for('main.index'))
return render_template('login.html',form=form)
@auth.route('/logout')
@login_required
def logout():
logout_user()
return redirect(url_for('main.index'))
@auth.route('/upuser/<int:userid>', methods=['GET','POST'])
@login_required
def upuser(userid):
if current_user.id == userid or current_user.role == '0':
form = UserEditForm()
if form.validate_on_submit():
user = users.query.filter_by(id=userid).first()
if user is not None and user.verify_password(form.oldpassword.data):
user.password = form.password.data
db.session.add(user)
db.session.commit()
flash(u'密码更改成功!')
return render_template('useredit.html',form=form)
user = users.query.filter_by(id=userid).first()
form.username.data = user.username
form.name.data = user.name
return render_template('useredit.html',form=form)
else:
abort(403)
|
apache-2.0
| -5,257,831,739,415,127,000 | 36.755556 | 94 | 0.632606 | false |
pjdelport/HTTPretty
|
tests/functional/test_urllib2.py
|
1
|
7926
|
# #!/usr/bin/env python
# -*- coding: utf-8 -*-
# <HTTPretty - HTTP client mock for Python>
# Copyright (C) <2011> Gabriel Falcão <gabriel@nacaolivre.org>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import urllib2
from sure import *
from httpretty import HTTPretty, httprettified
@httprettified
@within(two=microseconds)
def test_httpretty_should_mock_a_simple_get_with_urllib2_read():
u"HTTPretty should mock a simple GET with urllib2.read()"
HTTPretty.register_uri(HTTPretty.GET, "http://globo.com/",
body="The biggest portal in Brazil")
fd = urllib2.urlopen('http://globo.com')
got = fd.read()
fd.close()
assert that(got).equals('The biggest portal in Brazil')
@httprettified
@within(two=microseconds)
def test_httpretty_should_mock_headers_urllib2(now):
u"HTTPretty should mock basic headers with urllib2"
HTTPretty.register_uri(HTTPretty.GET, "http://github.com/",
body="this is supposed to be the response",
status=201)
request = urllib2.urlopen('http://github.com')
headers = dict(request.headers)
request.close()
assert that(request.code).equals(201)
assert that(headers).equals({
'content-type': 'text/plain',
'connection': 'close',
'content-length': '35',
'status': '201 Created',
'server': 'Python/HTTPretty',
'date': now.strftime('%a, %d %b %Y %H:%M:%S GMT'),
})
@httprettified
@within(two=microseconds)
def test_httpretty_should_allow_adding_and_overwritting_urllib2(now):
u"HTTPretty should allow adding and overwritting headers with urllib2"
HTTPretty.register_uri(HTTPretty.GET, "http://github.com/",
body="this is supposed to be the response",
adding_headers={
'Server': 'Apache',
'Content-Length': '27',
'Content-Type': 'application/json',
})
request = urllib2.urlopen('http://github.com')
headers = dict(request.headers)
request.close()
assert that(request.code).equals(200)
assert that(headers).equals({
'content-type': 'application/json',
'connection': 'close',
'content-length': '27',
'status': '200 OK',
'server': 'Apache',
'date': now.strftime('%a, %d %b %Y %H:%M:%S GMT'),
})
@httprettified
@within(two=microseconds)
def test_httpretty_should_allow_forcing_headers_urllib2():
u"HTTPretty should allow forcing headers with urllib2"
HTTPretty.register_uri(HTTPretty.GET, "http://github.com/",
body="this is supposed to be the response",
forcing_headers={
'Content-Type': 'application/xml',
})
request = urllib2.urlopen('http://github.com')
headers = dict(request.headers)
request.close()
assert that(headers).equals({
'content-type': 'application/xml',
})
@httprettified
@within(two=microseconds)
def test_httpretty_should_allow_adding_and_overwritting_by_kwargs_u2(now):
u"HTTPretty should allow adding and overwritting headers by " \
"keyword args with urllib2"
HTTPretty.register_uri(HTTPretty.GET, "http://github.com/",
body="this is supposed to be the response",
server='Apache',
content_length='23456789',
content_type='application/json')
request = urllib2.urlopen('http://github.com')
headers = dict(request.headers)
request.close()
assert that(request.code).equals(200)
assert that(headers).equals({
'content-type': 'application/json',
'connection': 'close',
'content-length': '23456789',
'status': '200 OK',
'server': 'Apache',
'date': now.strftime('%a, %d %b %Y %H:%M:%S GMT'),
})
@httprettified
@within(two=microseconds)
def test_httpretty_should_support_a_list_of_successive_responses_urllib2(now):
u"HTTPretty should support adding a list of successive " \
"responses with urllib2"
HTTPretty.register_uri(
HTTPretty.GET, "https://api.yahoo.com/test",
responses=[
HTTPretty.Response(body="first response", status=201),
HTTPretty.Response(body='second and last response', status=202),
])
request1 = urllib2.urlopen('https://api.yahoo.com/test')
body1 = request1.read()
request1.close()
assert that(request1.code).equals(201)
assert that(body1).equals('first response')
request2 = urllib2.urlopen('https://api.yahoo.com/test')
body2 = request2.read()
request2.close()
assert that(request2.code).equals(202)
assert that(body2).equals('second and last response')
request3 = urllib2.urlopen('https://api.yahoo.com/test')
body3 = request3.read()
request3.close()
assert that(request3.code).equals(202)
assert that(body3).equals('second and last response')
@httprettified
@within(two=microseconds)
def test_can_inspect_last_request(now):
u"HTTPretty.last_request is a mimetools.Message request from last match"
HTTPretty.register_uri(HTTPretty.POST, "http://api.github.com/",
body='{"repositories": ["HTTPretty", "lettuce"]}')
request = urllib2.Request(
'http://api.github.com',
'{"username": "gabrielfalcao"}',
{
'content-type': 'text/json',
},
)
fd = urllib2.urlopen(request)
got = fd.read()
fd.close()
assert that(HTTPretty.last_request.method).equals('POST')
assert that(HTTPretty.last_request.body).equals(
'{"username": "gabrielfalcao"}',
)
assert that(HTTPretty.last_request.headers['content-type']).equals(
'text/json',
)
assert that(got).equals('{"repositories": ["HTTPretty", "lettuce"]}')
@httprettified
@within(two=microseconds)
def test_can_inspect_last_request_with_ssl(now):
u"HTTPretty.last_request is recorded even when mocking 'https' (SSL)"
HTTPretty.register_uri(HTTPretty.POST, "https://secure.github.com/",
body='{"repositories": ["HTTPretty", "lettuce"]}')
request = urllib2.Request(
'https://secure.github.com',
'{"username": "gabrielfalcao"}',
{
'content-type': 'text/json',
},
)
fd = urllib2.urlopen(request)
got = fd.read()
fd.close()
assert that(HTTPretty.last_request.method).equals('POST')
assert that(HTTPretty.last_request.body).equals(
'{"username": "gabrielfalcao"}',
)
assert that(HTTPretty.last_request.headers['content-type']).equals(
'text/json',
)
assert that(got).equals('{"repositories": ["HTTPretty", "lettuce"]}')
|
mit
| 1,222,219,063,076,765,400 | 32.867521 | 78 | 0.629527 | false |
ywang-bom/wismon
|
wismon/wismon.py
|
1
|
20145
|
"""
WMO WIS monitoring tool for OpenWIS.
All datetime values must conform to ISO 8601.
date - YYYY-MM-DD
datetime - YYYY-MM-DDThh:mm:ssZ
"""
import os
import sys
import time
from datetime import datetime
from ConfigParser import ConfigParser, NoSectionError, NoOptionError
import urllib2
import json
import logging
import logging.handlers
from sqlite3 import OperationalError
from .db import *
from .db import WisMonDB
from .templates import MonitorJSON, CacheJSON, CentresJSON, EventsJSON
LOGGER = logging.getLogger('wismon')
BASE_DIR = os.path.dirname(__file__)
DATE_PATTERN = re.compile('^[0-9]{4}-[0-9]{2}-[0-9]{2}$')
DATETIME_PATTERN = re.compile('^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$')
MONITOR_JSON_NAME = 'monitor'
CACHE_JSON_NAME = 'cache'
CENTRES_JSON_NAME = 'centres'
EVENTS_JSON_NAME = 'events'
NON_DRAFT = 0
DRAFT = 1
class WmError(Exception):
pass
def assert_valid_date_string(date_string):
if DATE_PATTERN.match(date_string) is None:
raise WmError('Invalid ISO-8601 date string: {0}'.format(date_string))
return True
def assert_valid_datetime_string(datetime_string):
if DATETIME_PATTERN.match(datetime_string) is None:
raise WmError('Invalid ISO-8601 datetime string: {0}'.format(datetime_string))
return True
def get_uniform_datetime_string(s):
"""
This is mainly to ensure a date string is converted to datetime string.
"""
if s is None: # Do nothing for None value
return s
elif assert_valid_datetime_string(s):
return s
elif assert_valid_date_string(s):
return '{0}T00:00:00Z'.format(s)
else:
raise WmError('Invalid string for specifying time: {0}'.format(s))
def ping_url(url, timeout=20, n_retries=1):
"""
Return the response time of the given URL or -1 in case of failure
"""
for _ in xrange(n_retries):
try:
start_time = time.time()
req = urllib2.urlopen(url, timeout=timeout)
if 200 <= req.getcode() < 300:
return time.time() - start_time
except Exception:
pass
else:
return -1
def ping_oaipmh(url, timeout=20, n_retries=1):
return ping_url('{0}?verb=Identify'.format(url), timeout, n_retries)
class WisMon(object):
def __init__(self, working_dir):
self.working_dir = working_dir
self.config_file = os.path.join(self.working_dir, 'config', 'wismon.cfg')
if not (os.path.exists(self.config_file) and os.path.isfile(self.config_file)):
raise WmError('Config file not exists: %s' % self.config_file)
self.config = ConfigParser()
self.config.optionxform = str # preserve case
self.config.read(os.path.join(self.working_dir, 'config', 'wismon.cfg'))
self.gisc_name = self.config.get('monitor', 'centre')
self.time_now = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
self.data_dir = os.path.join(self.working_dir, 'data')
self.log_dir = os.path.join(self.working_dir, 'logs')
self.json_dir = os.path.join(self.data_dir, 'JSON')
self.n_messages_retain = self.config.getint('system', 'n_messages_retain')
# Set up the logging file
log_handler = logging.handlers.RotatingFileHandler(
os.path.join(self.log_dir, 'wismon.log'),
maxBytes=1048576,
backupCount=5)
log_handler.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s [%(funcName)s] - %(message)s'))
LOGGER.addHandler(log_handler)
level = self.config.get('system', 'logging_level')
try:
level = logging._levelNames[level.upper()]
LOGGER.setLevel(level)
except NameError:
LOGGER.warning('invalid logging level: %s' % level)
# Initialize the database after logging is configured so logging
# messages are properly directed.
self.wismon_db = WisMonDB(os.path.join(self.data_dir, 'wismon.sqlite3'))
def config_get_with_default(self, section_name, option_name, default=None):
if self.config.has_section(section_name) and self.config.has_option(section_name, option_name):
return self.config.get(section_name, option_name)
else:
return default
def monitor_cache_json_gen(self, force_regen=False):
"""
Generate json messages for Monitor and Cache JSON
"""
date_now = '{0}T00:00:00Z'.format(self.time_now[:10])
if self.wismon_db.json_exists('monitor', date_now):
if force_regen:
LOGGER.info('Re-generate JSON files for date: {0}'.format(date_now))
self.wismon_db.restore_metadata(date_now)
self.json_del(MONITOR_JSON_NAME, date_now)
self.json_del(CACHE_JSON_NAME, date_now)
else:
raise WmError('JSON messages already exist for date: {0}'.format(date_now))
else:
LOGGER.info('Creating JSON messages for date: {0}'.format(date_now))
# Create JSON file objects
monitor_json = MonitorJSON(self.gisc_name, date_now)
cache_json = CacheJSON(self.gisc_name, date_now)
# Read the category name for WIMMS set
wimms_name = self.config_get_with_default('monitor', 'WIMMS_name', '')
LOGGER.info('Sending query to OpenWIS DB ...')
rows = query_openwis(
host=self.config.get('system', 'openwis_db_host'),
port=self.config.getint('system', 'openwis_db_port'),
database=self.config.get('system', 'openwis_db_name'),
user=self.config.get('system', 'openwis_db_user'),
password=self.config.get('system', 'openwis_db_pass')
)
# Save data from the previous day
# TODO: Somehow the alter and create table statements have to be executed as a single
# script. Otherwise, the table will not be created after the alter statement.
LOGGER.info("Saving snapshot of the metadata catalogue from previous day ...")
self.wismon_db.archive_metadata()
LOGGER.info('Saving new snapshot of the metadata catalogue ...')
self.wismon_db.save_metadata(rows)
LOGGER.info('Querying for overall metadata stats ...')
stats = self.wismon_db.group_by_metadata_status(
"category LIKE 'WIS-GISC-%' OR category IN ('{0}', '{1}')".format('draft', wimms_name)
)
monitor_json.set('metrics.metadata_catalogue.number_of_metadata', stats[NON_DRAFT].n_metadata)
monitor_json.set(
'metrics.cache_24h.number_of_product_instances',
stats[NON_DRAFT].n_mapped_files
)
monitor_json.set(
'metrics.cache_24h.number_of_product_instances_missing_metadata',
stats[DRAFT].n_mapped_files
)
monitor_json.set(
'metrics.cache_24h.size_of_cache',
stats[NON_DRAFT].size
)
monitor_json.set(
'metrics.cache_24h.size_of_product_instances_missing_metadata',
stats[DRAFT].size
)
monitor_json.set(
'metrics.cache_24h.number_of_unique_products_missing_metadata',
stats[DRAFT].n_metadata
)
# Get the urn patterns
urn_patterns = {}
try:
for centre_name in self.config.options('cache'):
urn_patterns[centre_name] = self.config.get('cache', centre_name, raw=True).split()
except NoSectionError:
pass
LOGGER.info('Querying for AMDCN metadata stats ...')
number_of_unique_products_missing_metadata_AoR = 0
for centre_name, patterns in urn_patterns.items():
centre_idx = cache_json.new_member()
cache_json.set('centres[{0}].centre'.format(centre_idx), centre_name)
where_expr = "(category LIKE 'WIS-GISC-%' OR category IN ('{0}', '{1}')) AND ({2})".format(
'draft', wimms_name,
' OR '.join("uuid REGEXP '{0}'".format(p) for p in patterns)
)
stats = self.wismon_db.group_by_metadata_status(where_expr)
cache_json.set(
'centres[{0}].metrics.number_of_product_instances'.format(centre_idx),
stats[NON_DRAFT].n_mapped_files
)
cache_json.set(
'centres[{0}].metrics.size_of_product_instances'.format(centre_idx),
stats[NON_DRAFT].size
)
cache_json.set(
'centres[{0}].metrics.number_of_product_instances_missing_metadata'.format(centre_idx),
stats[DRAFT].n_mapped_files
)
cache_json.set(
'centres[{0}].metrics.size_of_product_instances_missing_metadata'.format(centre_idx),
stats[DRAFT].size
)
cache_json.set(
'centres[{0}].metrics.number_of_unique_products_missing_metadata'.format(centre_idx),
stats[DRAFT].n_metadata
)
number_of_unique_products_missing_metadata_AoR += stats[DRAFT].n_metadata
cache_json.set(
'centres[{0}].metrics.number_of_metadata'.format(centre_idx),
stats[NON_DRAFT].n_mapped_files
)
monitor_json.set(
'metrics.cache_24h.number_of_unique_products_missing_metadata_AoR',
number_of_unique_products_missing_metadata_AoR
)
LOGGER.info('Checking self service status ...')
portal_url = self.config.get('monitor', 'portal_url')
monitor_json.set('metrics.services.portal.status', ping_url(portal_url) >= 0)
# Check whether OAI-PMH server is up
oaipmh_url = self.config.get('monitor', 'oaipmh_url')
monitor_json.set('metrics.services.oaipmh.status', ping_oaipmh(oaipmh_url) >= 0)
sru_url = self.config.get('monitor', 'sru_url')
monitor_json.set('metrics.services.sru.status', ping_url(sru_url) >= 0)
distribution_url = self.config.get('monitor', 'distribution_url')
monitor_json.set('metrics.services.distribution_system.status', ping_url(distribution_url) >= 0)
monitor_json.set('gisc_properties.portal_url', portal_url)
monitor_json.set('gisc_properties.oaipmh_url', oaipmh_url)
monitor_json.set('gisc_properties.sru_url', sru_url)
monitor_json.set('gisc_properties.monitor_url', self.config.get('monitor', 'monitor_url') or None)
monitor_json.set('gisc_properties.cache_url', self.config.get('monitor', 'cache_url'))
monitor_json.set('gisc_properties.centres_url', self.config.get('monitor', 'centres_url'))
monitor_json.set('gisc_properties.events_url', self.config.get('monitor', 'events_url'))
monitor_json.set('gisc_properties.backup_giscs',
[x.strip() for x in self.config.get('monitor', 'backup_giscs').split(',')])
monitor_json.set('gisc_properties.rmdcn.main', self.config.get('monitor', 'rmdcn.main'))
monitor_json.set('gisc_properties.rmdcn.sub', self.config.get('monitor', 'rmdcn.sub'))
monitor_json.set('gisc_properties.rmdcn.DR_main', self.config.get('monitor', 'rmdcn.DR_main'))
monitor_json.set('gisc_properties.contact_info.voice',
self.config.get('monitor', 'contact_info.voice'))
monitor_json.set('gisc_properties.contact_info.email',
self.config.get('monitor', 'contact_info.email'))
LOGGER.info('Querying stats for new and modified metadata ...')
monitor_json.set('metrics.metadata_catalogue.number_of_changes_insert_modify',
self.wismon_db.stats_inserted_modified(wimms_name))
LOGGER.info('Querying stats for deleted metadata ...')
monitor_json.set('metrics.metadata_catalogue.number_of_changes_delete',
self.wismon_db.stats_deleted(wimms_name))
monitor_json.set('remarks', self.wismon_db.remarks_get())
# Metadata breakdown stats
try:
if self.config.getboolean('analysis', 'metadata_source_breakdown'):
LOGGER.info('Calculating metadata source breakdown stats')
self.wismon_db.calc_metadata_breakdown(date_now)
except (NoSectionError, NoOptionError):
pass
LOGGER.info('Saving JSON messages to files')
monitor_json.to_file(os.path.join(self.json_dir, '{0}.json'.format(MONITOR_JSON_NAME)))
cache_json.to_file(os.path.join(self.json_dir, '{0}.json'.format(CACHE_JSON_NAME)))
LOGGER.info('Saving JSON messages to local database')
self.wismon_db.json_save(MONITOR_JSON_NAME, date_now, monitor_json)
self.wismon_db.json_save(CACHE_JSON_NAME, date_now, cache_json)
if self.n_messages_retain >= 0:
self.wismon_db.json_throttle(MONITOR_JSON_NAME, self.n_messages_retain)
self.wismon_db.json_throttle(CACHE_JSON_NAME, self.n_messages_retain)
return monitor_json, cache_json
def centres_json_gen(self, force_regen=False):
import threading
time0_now = '{0}00Z'.format(self.time_now[:17])
if self.wismon_db.json_exists(CENTRES_JSON_NAME, time0_now):
if force_regen:
LOGGER.info('Re-generate Centres JSON for datetime: {0}'.format(time0_now))
self.wismon_db.json_del(CENTRES_JSON_NAME, time0_now)
else:
raise WmError('Centres JSON already exists for datetime: {0}'.format(time0_now))
else:
LOGGER.info('Creating Centres JSON for datetime: {0}'.format(time0_now))
centres_json = CentresJSON(self.gisc_name, time0_now)
n_threads = self.config.getint('system', 'n_threads')
LOGGER.info('About to run {0} threads to ping service URLs of WIS Centres ...'.format(n_threads))
centres_sections = [name for name in self.config.sections() if name.startswith('centres-')]
def f(s, url, path_to_json_element):
if url is None or url.strip() == '':
res = None
else:
LOGGER.info('Ping {0}'.format(url))
with s:
res = ping_url(url, timeout=20, n_retries=3)
centres_json.set(path_to_json_element, res)
semaphore = threading.Semaphore(n_threads)
all_threads = []
for section_name in centres_sections:
idx_centre = centres_json.new_member()
centres_json.set('centres[{0}].centre'.format(idx_centre), self.config.get(section_name, 'name'))
for option_name_stub in ('portal', 'oaipmh', 'sru'):
t = threading.Thread(
target=f,
args=(semaphore,
self.config_get_with_default(section_name, '{0}_url'.format(option_name_stub)),
'centres[{0}].metrics.{1}_response_time'.format(idx_centre, option_name_stub))
)
t.start()
all_threads.append(t)
for idx, t in enumerate(all_threads):
t.join()
centres_json.to_file(os.path.join(self.json_dir, '{0}.json'.format(CENTRES_JSON_NAME)))
try:
self.wismon_db.json_save(CENTRES_JSON_NAME, time0_now, centres_json)
except OperationalError as e:
LOGGER.warn('Database error: {}. Retry in 60 seconds'.format(e))
time.sleep(60)
self.wismon_db.json_save(CENTRES_JSON_NAME, time0_now, centres_json)
if self.n_messages_retain >= 0:
self.wismon_db.json_throttle(CENTRES_JSON_NAME, self.n_messages_retain)
return centres_json
def events_json_gen(self, force_regen=False):
if self.wismon_db.json_exists(EVENTS_JSON_NAME, self.time_now):
if force_regen:
LOGGER.info('Re-generate Events JSON for datetime: {0}'.format(self.time_now))
self.wismon_db.json_del(EVENTS_JSON_NAME, self.time_now)
else:
raise WmError('Events JSON already exists for datetime: {0}'.format(self.time_now))
else:
LOGGER.info('Creating Events JSON for datetime: {0}'.format(self.time_now))
events_json = EventsJSON(self.gisc_name, self.time_now)
# Events JSON
LOGGER.info('Gathering events ...')
for _, title, text, start_datetime_string, end_datetime_string in self.wismon_db.events_get(
self.time_now):
idx_event = events_json.new_member()
events_json.set('events[{0}].id'.format(idx_event), idx_event + 1)
events_json.set('events[{0}].title'.format(idx_event), title)
events_json.set('events[{0}].text'.format(idx_event), text)
events_json.set('events[{0}].start'.format(idx_event), start_datetime_string)
events_json.set('events[{0}].end'.format(idx_event), end_datetime_string)
events_json.to_file(os.path.join(self.json_dir, '{0}.json'.format(EVENTS_JSON_NAME)))
self.wismon_db.json_save(EVENTS_JSON_NAME, self.time_now, events_json)
if self.n_messages_retain >= 0:
self.wismon_db.json_throttle(EVENTS_JSON_NAME, self.n_messages_retain)
return events_json
def json_get(self, name, datetime_string):
datetime_string = get_uniform_datetime_string(datetime_string)
row = self.wismon_db.json_get(name, datetime_string)
if row:
return json.loads(row[3])
else:
raise WmError('No {0} JSON message for datetime: {1}'.format(
name, datetime_string or 'Most Recent'))
def json_del(self, name, datetime_string):
datetime_string = get_uniform_datetime_string(datetime_string)
count = self.wismon_db.json_del(name, datetime_string)
if count == 0:
raise WmError('No {0} JSON messages for datetime: {1}'.format(
name, datetime_string or 'Most Recent'))
def event_add(self, start_datetime_string, end_datetime_string, title, text=''):
start_datetime_string = get_uniform_datetime_string(start_datetime_string)
end_datetime_string = get_uniform_datetime_string(end_datetime_string)
LOGGER.info('Adding event: {0}'.format(title))
self.wismon_db.event_add(start_datetime_string, end_datetime_string, title, text)
def event_get(self, datetime_string):
datetime_string = get_uniform_datetime_string(datetime_string)
rows = self.wismon_db.events_get(datetime_string)
if rows:
return [
{
'id': eid, 'title': title, 'text': text or '',
'start_datetime_string': sd,
'end_datetime_string': ed
}
for eid, title, text, sd, ed in rows
]
else:
raise WmError('No event for datetime: {0}'.format(datetime_string))
def event_del(self, eid):
count = self.wismon_db.event_del(eid)
if count == 0:
raise WmError('No event of id: {0}'.format(eid))
def remarks_set(self, text):
self.wismon_db.remarks_set(text)
def remarks_get(self):
row = self.wismon_db.remarks_get()
if row is not None:
return row[0]
else:
raise WmError('No remarks is found')
@staticmethod
def init_working_directory(working_directory):
config_dir = os.path.join(working_directory, 'config')
if not os.path.exists(config_dir):
os.makedirs(config_dir)
with open(os.path.join(BASE_DIR, 'config_template.cfg')) as ins:
config_template = ins.read()
with open(os.path.join(config_dir, 'wismon.cfg'), 'w') as outs:
outs.write(config_template)
json_dir = os.path.join(working_directory, 'data', 'JSON')
if not os.path.exists(json_dir):
os.makedirs(json_dir)
log_dir = os.path.join(working_directory, 'logs')
if not os.path.exists(log_dir):
os.mkdir(log_dir)
|
gpl-3.0
| 629,759,576,338,308,900 | 40.96875 | 109 | 0.605808 | false |
mozilla/ichnaea
|
ichnaea/models/tests/test_observation.py
|
1
|
22477
|
import json
from ichnaea.conftest import GB_LAT, GB_LON, GB_MCC
from ichnaea.models import (
BlueObservation,
BlueReport,
CellObservation,
CellReport,
constants,
Radio,
Report,
ReportSource,
WifiObservation,
WifiReport,
)
from ichnaea.tests.factories import (
BlueObservationFactory,
CellObservationFactory,
WifiObservationFactory,
)
class BaseTest(object):
def compare(self, name, value, expect):
assert self.sample(**{name: value})[name] == expect
class TestReport(BaseTest):
def sample(self, **kwargs):
report = {"lat": GB_LAT, "lon": GB_LON}
for (k, v) in kwargs.items():
report[k] = v
return Report.validate(report)
def test_latlon(self):
assert self.sample(lat=GB_LAT, lon=GB_LON) is not None
assert self.sample(lat=0.0, lon=0.0) is None
assert self.sample(lat=GB_LAT, lon=None) is None
def test_accuracy(self):
field = "accuracy"
self.compare(field, constants.MIN_ACCURACY - 0.1, None)
self.compare(field, 0.0, 0.0)
self.compare(field, 10.2, 10.2)
self.compare(field, constants.MAX_ACCURACY + 0.1, None)
def test_altitude(self):
field = "altitude"
self.compare(field, constants.MIN_ALTITUDE - 0.1, None)
self.compare(field, -100.0, -100.0)
self.compare(field, 0.0, 0.0)
self.compare(field, 10.1, 10.1)
self.compare(field, constants.MAX_ALTITUDE + 0.1, None)
def test_altitude_accuracy(self):
field = "altitude_accuracy"
self.compare(field, constants.MIN_ALTITUDE_ACCURACY - 0.1, None)
self.compare(field, 0.0, 0.0)
self.compare(field, 10.2, 10.2)
self.compare(field, constants.MAX_ALTITUDE_ACCURACY + 0.1, None)
def test_heading(self):
field = "heading"
self.compare(field, constants.MIN_HEADING - 0.1, None)
self.compare(field, 0.0, 0.0)
self.compare(field, 357.2, 357.2)
self.compare(field, constants.MAX_HEADING + 0.1, None)
def test_pressure(self):
field = "pressure"
self.compare(field, constants.MIN_PRESSURE - 0.1, None)
self.compare(field, 870.1, 870.1)
self.compare(field, 1080.2, 1080.2)
self.compare(field, constants.MAX_PRESSURE + 0.1, None)
def test_source(self):
field = "source"
for source in (
ReportSource.fixed,
ReportSource.gnss,
ReportSource.fused,
ReportSource.query,
):
self.compare(field, source, source)
self.compare(field, "gnss", ReportSource.gnss)
def test_speed(self):
field = "speed"
self.compare(field, constants.MIN_SPEED - 0.1, None)
self.compare(field, 0.0, 0.0)
self.compare(field, 100.1, 100.1)
self.compare(field, constants.MAX_SPEED + 0.1, None)
def test_timestamp(self):
field = "timestamp"
self.compare(field, constants.MIN_TIMESTAMP - 1, None)
self.compare(field, 1405602028568, 1405602028568)
self.compare(field, constants.MAX_TIMESTAMP + 1, None)
class TestBlueObservation(BaseTest):
def test_fields(self):
mac = "3680873e9b83"
obs = BlueObservation.create(
mac=mac,
lat=GB_LAT,
lon=GB_LON,
pressure=1010.2,
source="fixed",
timestamp=1405602028568,
signal=-45,
)
assert obs.lat == GB_LAT
assert obs.lon == GB_LON
assert obs.mac == mac
assert obs.pressure == 1010.2
assert obs.signal == -45
assert obs.source is ReportSource.fixed
assert obs.timestamp == 1405602028568
assert obs.shard_id == "8"
def test_json(self):
obs = BlueObservationFactory.build(accuracy=None, source=ReportSource.gnss)
result = BlueObservation.from_json(json.loads(json.dumps(obs.to_json())))
assert type(result) is BlueObservation
assert result.accuracy is None
assert result.mac == obs.mac
assert result.lat == obs.lat
assert result.lon == obs.lon
assert result.source is ReportSource.gnss
assert type(result.source) is ReportSource
def test_weight(self):
obs_factory = BlueObservationFactory.build
assert round(obs_factory(accuracy=None).weight, 2) == 1.0
assert round(obs_factory(accuracy=0.0).weight, 2) == 1.0
assert round(obs_factory(accuracy=10.0).weight, 2) == 1.0
assert round(obs_factory(accuracy=40.0).weight, 2) == 0.5
assert round(obs_factory(accuracy=100.0).weight, 2) == 0.32
assert round(obs_factory(accuracy=100.1).weight, 2) == 0.0
assert round(obs_factory(accuracy=None, age=1000).weight, 2) == 1.0
assert round(obs_factory(accuracy=None, age=8000).weight, 2) == 0.5
assert round(obs_factory(accuracy=None, age=20001).weight, 2) == 0.0
assert round(obs_factory(accuracy=None, speed=None).weight, 2) == 1.0
assert round(obs_factory(accuracy=None, speed=0.0).weight, 2) == 1.0
assert round(obs_factory(accuracy=None, speed=1.0).weight, 2) == 1.0
assert round(obs_factory(accuracy=None, speed=20.0).weight, 2) == 0.5
assert round(obs_factory(accuracy=None, speed=51.0).weight, 2) == 0.0
class TestBlueReport(BaseTest):
def sample(self, **kwargs):
report = {"mac": "3680873e9b83"}
for (k, v) in kwargs.items():
report[k] = v
return BlueReport.validate(report)
def test_mac(self):
assert self.sample(mac="3680873e9b83") is not None
assert self.sample(mac="") is None
assert self.sample(mac="1234567890123") is None
assert self.sample(mac="aaaaaaZZZZZZ") is None
def test_age(self):
field = "age"
self.compare(field, constants.MIN_AGE - 1, None)
self.compare(field, -40000, -40000)
self.compare(field, 60000, 60000)
self.compare(field, constants.MAX_AGE + 1, None)
def test_signal(self):
field = "signal"
self.compare(field, constants.MIN_BLUE_SIGNAL - 1, None)
self.compare(field, -90, -90)
self.compare(field, -10, -10)
self.compare(field, constants.MAX_BLUE_SIGNAL + 1, None)
class TestCellObservation(BaseTest):
def test_fields(self):
obs = CellObservation.create(
radio=Radio.gsm,
mcc=GB_MCC,
mnc=5,
lac=12345,
cid=23456,
lat=GB_LAT,
lon=GB_LON,
pressure=1010.2,
source="gnss",
timestamp=1405602028568,
asu=26,
signal=-61,
ta=10,
)
assert obs.lat == GB_LAT
assert obs.lon == GB_LON
assert obs.pressure == 1010.2
assert obs.source == ReportSource.gnss
assert obs.timestamp == 1405602028568
assert obs.radio == Radio.gsm
assert obs.mcc == GB_MCC
assert obs.mnc == 5
assert obs.lac == 12345
assert obs.cid == 23456
assert obs.asu == 26
assert obs.signal == -61
assert obs.ta == 10
assert obs.shard_id == "gsm"
def test_mcc_latlon(self):
sample = dict(radio=Radio.gsm, mnc=6, lac=1, cid=2, lat=GB_LAT, lon=GB_LON)
assert CellObservation.create(mcc=GB_MCC, **sample) is not None
assert CellObservation.create(mcc=262, **sample) is None
def test_json(self):
obs = CellObservationFactory.build(accuracy=None, source="fixed")
result = CellObservation.from_json(json.loads(json.dumps(obs.to_json())))
assert type(result) is CellObservation
assert result.accuracy is None
assert type(result.radio), Radio
assert result.radio == obs.radio
assert result.mcc == obs.mcc
assert result.mnc == obs.mnc
assert result.lac == obs.lac
assert result.cid == obs.cid
assert result.lat == obs.lat
assert result.lon == obs.lon
assert result.source is ReportSource.fixed
assert type(result.source) is ReportSource
def test_weight(self):
obs_factory = CellObservationFactory.build
assert (
round(obs_factory(radio=Radio.gsm, accuracy=None, signal=-95).weight, 2)
== 1.0
)
assert (
round(obs_factory(radio=Radio.gsm, accuracy=0.0, signal=-95).weight, 2)
== 1.0
)
assert (
round(obs_factory(radio=Radio.gsm, accuracy=10.0, signal=-95).weight, 2)
== 1.0
)
assert (
round(obs_factory(radio=Radio.gsm, accuracy=160, signal=-95).weight, 2)
== 0.25
)
assert (
round(obs_factory(radio=Radio.gsm, accuracy=200, signal=-95).weight, 2)
== 0.22
)
assert (
round(obs_factory(radio=Radio.gsm, accuracy=1000, signal=-95).weight, 2)
== 0.1
)
assert (
round(obs_factory(radio=Radio.gsm, accuracy=1000.1, signal=-95).weight, 2)
== 0.0
)
assert (
round(obs_factory(radio=Radio.gsm, accuracy=10.0, signal=-51).weight, 2)
== 10.17
)
assert (
round(obs_factory(radio=Radio.gsm, accuracy=160.0, signal=-51).weight, 2)
== 2.54
)
assert (
round(obs_factory(radio=Radio.gsm, accuracy=10.0, signal=-113).weight, 2)
== 0.52
)
assert (
round(obs_factory(radio=Radio.wcdma, accuracy=10.0, signal=-25).weight, 2)
== 256.0
)
assert (
round(obs_factory(radio=Radio.wcdma, accuracy=160.0, signal=-25).weight, 2)
== 64.0
)
assert (
round(obs_factory(radio=Radio.wcdma, accuracy=10.0, signal=-121).weight, 2)
== 0.47
)
assert (
round(obs_factory(radio=Radio.lte, accuracy=10.0, signal=-43).weight, 2)
== 47.96
)
assert (
round(obs_factory(radio=Radio.lte, accuracy=160.0, signal=-43).weight, 2)
== 11.99
)
assert (
round(obs_factory(radio=Radio.lte, accuracy=10.0, signal=-140).weight, 2)
== 0.3
)
assert round(obs_factory(accuracy=0, age=1000).weight, 2) == 1.0
assert round(obs_factory(accuracy=0, age=8000).weight, 2) == 0.5
assert round(obs_factory(accuracy=0, age=20001).weight, 2) == 0.0
assert round(obs_factory(accuracy=0, speed=None).weight, 2) == 1.0
assert round(obs_factory(accuracy=0, speed=0.0).weight, 2) == 1.0
assert round(obs_factory(accuracy=0, speed=1.0).weight, 2) == 1.0
assert round(obs_factory(accuracy=0, speed=20.0).weight, 2) == 0.5
assert round(obs_factory(accuracy=0, speed=50.1).weight, 2) == 0.0
class TestCellReport(BaseTest):
def sample(self, **kwargs):
report = {"radio": Radio.gsm, "mcc": GB_MCC, "mnc": 1, "lac": 2, "cid": 3}
for (k, v) in kwargs.items():
report[k] = v
return CellReport.validate(report)
def test_cellid(self):
assert self.sample() is not None
assert self.sample(radio=None) is None
assert self.sample(mcc=None) is None
assert self.sample(mnc=None) is None
assert self.sample(lac=None) is None
assert self.sample(cid=None) is None
def test_radio(self):
field = "radio"
self.compare(field, "gsm", Radio.gsm)
self.compare(field, "wcdma", Radio.wcdma)
self.compare(field, "lte", Radio.lte)
assert self.sample(radio="cdma") is None
assert self.sample(radio="hspa") is None
assert self.sample(radio="wimax") is None
def test_mcc(self):
self.compare("mcc", 262, 262)
assert self.sample(mcc=constants.MIN_MCC - 1) is None
assert self.sample(mcc=constants.MAX_MCC + 1) is None
def test_mnc(self):
self.compare("mnc", 5, 5)
assert self.sample(mnc=constants.MIN_MNC - 1) is None
assert self.sample(mnc=constants.MAX_MNC + 1) is None
def test_lac(self):
self.compare("lac", 5, 5)
assert self.sample(lac=constants.MIN_LAC - 1) is None
assert self.sample(lac=constants.MAX_LAC + 1) is None
def test_lac_cid(self):
assert (
self.sample(radio=Radio.gsm, lac=None, cid=constants.MAX_CID_GSM, psc=None)
is None
)
assert (
self.sample(radio=Radio.gsm, lac=None, cid=constants.MAX_CID_GSM, psc=1)
is None
)
def test_cid(self):
for radio in (Radio.gsm, Radio.wcdma, Radio.lte):
assert self.sample(radio=radio, cid=constants.MIN_CID - 1) is None
assert self.sample(radio=radio, cid=12345)["cid"] == 12345
assert self.sample(radio=radio, cid=constants.MAX_CID + 1) is None
# correct radio type for large GSM cid
cid = constants.MAX_CID_GSM + 1
assert self.sample(radio=Radio.gsm, cid=cid)["radio"] is Radio.wcdma
# accept large WCDMA/LTE cid
assert self.sample(radio=Radio.wcdma, cid=cid)["cid"] == cid
assert self.sample(radio=Radio.lte, cid=cid)["cid"] == cid
def test_psc(self):
for radio in (Radio.gsm, Radio.wcdma, Radio.lte):
assert self.sample(radio=radio, psc=constants.MIN_PSC - 1)["psc"] is None
assert self.sample(radio=radio, psc=15)["psc"] == 15
assert self.sample(radio=radio, cid=constants.MAX_PSC + 1)["psc"] is None
assert (
self.sample(radio=Radio.lte, psc=constants.MAX_PSC_LTE + 1)["psc"] is None
)
def test_age(self):
field = "age"
self.compare(field, constants.MIN_AGE - 1, None)
self.compare(field, -40000, -40000)
self.compare(field, 60000, 60000)
self.compare(field, constants.MAX_AGE + 1, None)
def test_asu(self):
for radio in (Radio.gsm, Radio.wcdma, Radio.lte):
assert (
self.sample(radio=radio, asu=constants.MIN_CELL_ASU[radio] - 1)["asu"]
is None
)
assert self.sample(radio=radio, asu=15)["asu"] == 15
assert (
self.sample(radio=radio, asu=constants.MAX_CELL_ASU[radio] + 1)["asu"]
is None
)
def test_asu_signal(self):
for radio in (Radio.gsm, Radio.wcdma, Radio.lte):
# if both are specified, leave them untouched
assert self.sample(radio=radio, asu=15, signal=-75)["signal"] == -75
for radio, signal in ((Radio.gsm, -83), (Radio.wcdma, -101), (Radio.lte, -125)):
# calculate signal from asu
assert self.sample(radio=radio, asu=15, signal=None)["signal"] == signal
# switch asu/signal fields
assert self.sample(radio=radio, asu=signal, signal=None)["signal"] == signal
assert self.sample(radio=radio, asu=signal, signal=10)["signal"] == signal
def test_signal(self):
for radio in (Radio.gsm, Radio.wcdma, Radio.lte):
assert (
self.sample(radio=radio, signal=constants.MIN_CELL_SIGNAL[radio] - 1)[
"signal"
]
is None
)
assert self.sample(radio=radio, signal=-75)["signal"] == -75
assert (
self.sample(radio=radio, signal=constants.MAX_CELL_SIGNAL[radio] + 1)[
"signal"
]
is None
)
def test_ta(self):
field = "ta"
self.compare(field, constants.MIN_CELL_TA - 1, None)
self.compare(field, 0, 0)
self.compare(field, 31, 31)
self.compare(field, constants.MAX_CELL_TA + 1, None)
assert self.sample(radio=Radio.gsm, ta=1)["ta"] == 1
assert self.sample(radio=Radio.wcdma, ta=1)["ta"] is None
assert self.sample(radio=Radio.lte, ta=1)["ta"] == 1
class TestWifiObservation(BaseTest):
def test_invalid(self):
assert WifiObservation.create(mac="3680873e9b83", lat=0.0, lon=0.0) is None
assert WifiObservation.create(mac="", lat=0.0, lon=0.0) is None
def test_fields(self):
mac = "3680873e9b83"
obs = WifiObservation.create(
mac=mac,
lat=GB_LAT,
lon=GB_LON,
pressure=1010.2,
source=ReportSource.query,
timestamp=1405602028568,
channel=5,
signal=-45,
)
assert obs.lat == GB_LAT
assert obs.lon == GB_LON
assert obs.mac == mac
assert obs.pressure == 1010.2
assert obs.source == ReportSource.query
assert obs.timestamp == 1405602028568
assert obs.channel == 5
assert obs.signal == -45
assert obs.shard_id == "8"
def test_json(self):
obs = WifiObservationFactory.build(accuracy=None, source=ReportSource.query)
result = WifiObservation.from_json(json.loads(json.dumps(obs.to_json())))
assert type(result) is WifiObservation
assert result.accuracy is None
assert result.mac == obs.mac
assert result.lat == obs.lat
assert result.lon == obs.lon
assert result.source == ReportSource.query
assert type(result.source) is ReportSource
def test_weight(self):
obs_factory = WifiObservationFactory.build
assert round(obs_factory(accuracy=None, signal=-80).weight, 2) == 1.0
assert round(obs_factory(accuracy=0.0, signal=-80).weight, 2) == 1.0
assert round(obs_factory(accuracy=10.0, signal=-80).weight, 2) == 1.0
assert round(obs_factory(accuracy=40.0, signal=-80).weight, 2) == 0.5
assert round(obs_factory(accuracy=100, signal=-80).weight, 2) == 0.32
assert round(obs_factory(accuracy=200, signal=-80).weight, 2) == 0.22
assert round(obs_factory(accuracy=200.1, signal=-80).weight, 2) == 0.0
assert round(obs_factory(accuracy=10, signal=-100).weight, 2) == 0.48
assert round(obs_factory(accuracy=10, signal=-30).weight, 2) == 16.0
assert round(obs_factory(accuracy=10, signal=-10).weight, 2) == 123.46
assert round(obs_factory(accuracy=40, signal=-30).weight, 2) == 8.0
assert round(obs_factory(accuracy=100, signal=-30).weight, 2) == 5.06
assert round(obs_factory(accuracy=100, signal=-10).weight, 2) == 39.04
assert round(obs_factory(accuracy=0, age=0).weight, 2) == 1.0
assert round(obs_factory(accuracy=0, age=1000).weight, 2) == 1.0
assert round(obs_factory(accuracy=0, age=-1000).weight, 2) == 1.0
assert round(obs_factory(accuracy=0, age=5000).weight, 2) == 0.63
assert round(obs_factory(accuracy=0, age=8000).weight, 2) == 0.5
assert round(obs_factory(accuracy=0, age=20001).weight, 2) == 0.0
assert round(obs_factory(accuracy=0, speed=None).weight, 2) == 1.0
assert round(obs_factory(accuracy=0, speed=0.0).weight, 2) == 1.0
assert round(obs_factory(accuracy=0, speed=1.0).weight, 2) == 1.0
assert round(obs_factory(accuracy=0, speed=20.0).weight, 2) == 0.5
assert round(obs_factory(accuracy=0, speed=50.1).weight, 2) == 0.0
class TestWifiReport(BaseTest):
def sample(self, **kwargs):
report = {"mac": "3680873e9b83"}
for (k, v) in kwargs.items():
report[k] = v
return WifiReport.validate(report)
def test_mac(self):
assert self.sample(mac="3680873e9b83") is not None
assert self.sample(mac="3680873E9B83") is not None
assert self.sample(mac="36:80:87:3e:9b:83") is not None
assert self.sample(mac="36-80-87-3e-9b-83") is not None
assert self.sample(mac="36.80.87.3e.9b.83") is not None
# We considered but do not ban locally administered WiFi
# mac addresses based on the U/L bit
# https://en.wikipedia.org/wiki/MAC_address
assert self.sample(mac="0a0000000000") is not None
assert self.sample(mac="") is None
assert self.sample(mac="1234567890123") is None
assert self.sample(mac="aaaaaaZZZZZZ") is None
assert self.sample(mac="000000000000") is None
assert self.sample(mac="ffffffffffff") is None
assert self.sample(mac=constants.WIFI_TEST_MAC) is None
def test_age(self):
field = "age"
self.compare(field, constants.MIN_AGE - 1, None)
self.compare(field, -40000, -40000)
self.compare(field, 60000, 60000)
self.compare(field, constants.MAX_AGE + 1, None)
def test_channel(self):
field = "channel"
self.compare(field, constants.MIN_WIFI_CHANNEL - 1, None)
self.compare(field, 1, 1)
self.compare(field, 36, 36)
self.compare(field, constants.MAX_WIFI_CHANNEL + 1, None)
def test_channel_frequency(self):
sample = self.sample(channel=0, frequency=10)
assert sample["channel"] is None
assert sample["frequency"] is None
sample = self.sample(channel=0, frequency=2412)
assert sample["channel"] == 1
assert sample["frequency"] == 2412
sample = self.sample(channel=4, frequency=10)
assert sample["channel"] == 4
assert sample["frequency"] == 2427
sample = self.sample(channel=1, frequency=2427)
assert sample["channel"] == 1
assert sample["frequency"] == 2427
def test_frequency(self):
field = "frequency"
self.compare(field, constants.MIN_WIFI_FREQUENCY - 1, None)
self.compare(field, 2412, 2412)
self.compare(field, 2484, 2484)
self.compare(field, 4915, 4915)
self.compare(field, 5170, 5170)
self.compare(field, 5925, 5925)
self.compare(field, constants.MAX_WIFI_FREQUENCY + 1, None)
def test_signal(self):
field = "signal"
self.compare(field, constants.MIN_WIFI_SIGNAL - 1, None)
self.compare(field, -90, -90)
self.compare(field, -10, -10)
self.compare(field, constants.MAX_WIFI_SIGNAL + 1, None)
def test_snr(self):
field = "snr"
self.compare(field, constants.MIN_WIFI_SNR - 1, None)
self.compare(field, 1, 1)
self.compare(field, 40, 40)
self.compare(field, constants.MAX_WIFI_SNR + 1, None)
|
apache-2.0
| -9,173,250,571,382,194,000 | 36.461667 | 88 | 0.588958 | false |
kif/freesas
|
e2etest/e2etest_bift.py
|
1
|
10320
|
"""End to end tests for auto_gpa.py """
__authors__ = ["Martha Brennich"]
__license__ = "MIT"
__date__ = "27/12/2020"
import unittest
import pathlib
import logging
from platform import system
from subprocess import run, PIPE, STDOUT
from os import linesep
from os.path import normpath
import codecs
import parse
from numpy import loadtxt
logger = logging.getLogger(__name__)
if system() == "Windows":
free_bift = "free_bift.exe"
else:
free_bift = "free_bift"
class TestBIFT(unittest.TestCase):
"""End to end tests for free_bift"""
cwd = pathlib.Path.cwd()
test_location = pathlib.Path(__file__)
test_data_location = pathlib.Path(test_location.parent, "e2etest_data")
bsa_filename = pathlib.Path(test_data_location, "bsa_005_sub.dat")
sas_curve2_filename = pathlib.Path(test_data_location, "SASDF52.dat")
SASDFX7 = pathlib.Path(test_data_location, "SASDFX7.dat")
expected_outfile_name_bsa = pathlib.Path(
cwd, bsa_filename.name
).with_suffix(".out")
def __init__(self, testName, **extra_kwargs):
super().__init__(testName)
self.extra_arg = extra_kwargs
def remove_output_files(self):
try:
self.expected_outfile_name_bsa.unlink()
except FileNotFoundError:
pass
def setUp(self):
self.remove_output_files()
return super().setUp()
def tearDown(self):
self.remove_output_files()
return super().tearDown()
def test_bm29_bsa_without_arguments_creates_out_file(self):
"""
Test whether bift app on BSA data from BM29 creates an out file.
"""
run_app = run(
[free_bift, normpath(str(self.bsa_filename))],
stdout=PIPE,
stderr=STDOUT,
check=True,
)
self.assertEqual(
run_app.returncode, 0, msg="bift on BM29 BSA completed well"
)
self.assertTrue(
self.expected_outfile_name_bsa.exists(),
f"bift on BM29 BSA created out file with correct name: {str(self.expected_outfile_name_bsa)}",
)
def test_bm29_bsa_out_file_has_the_expected_format(self):
"""
Test whether bift app on BSA data from BM29 creates an out file.
"""
_ = run(
[free_bift, normpath(str(self.bsa_filename))],
stdout=PIPE,
stderr=STDOUT,
check=True,
)
with open(
self.expected_outfile_name_bsa, "r", encoding="utf-8"
) as out_file:
out_file_content = out_file.readlines()
with codecs.open(
self.expected_outfile_name_bsa, encoding="utf-8"
) as filecp:
data = loadtxt(
filecp,
dtype=float,
delimiter="\t",
skiprows=9,
)
self.assertEqual(out_file_content[0].strip(), f"# {self.bsa_filename}")
self.assertTrue(
out_file_content[1].startswith("# Dmax= ")
and "±" in out_file_content[1],
msg=f"exptexted line to resemble # Dmax= 9.76±0.05 got {out_file_content[1]}",
)
self.assertTrue(
out_file_content[2].startswith("# 𝛂= ")
and "±" in out_file_content[2],
msg=f"exptexted line to resemble # 𝛂= 8764.5±1384.2 got {out_file_content[2]}",
)
self.assertTrue(
out_file_content[3].startswith("# S₀= ")
and "±" in out_file_content[3],
msg=f"exptexted line to resemble # S₀= 0.0002±0.0000 got {out_file_content[3]}",
)
self.assertTrue(
out_file_content[4].startswith("# χ²= ")
and "±" in out_file_content[4],
msg=f"exptexted line to resemble # χ²= 1.89±0.00 got {out_file_content[4]}",
)
self.assertTrue(
out_file_content[5].startswith("# logP= ")
and "±" in out_file_content[5],
msg=f"exptexted line to resemble # logP= -914.15±0.47 got {out_file_content[5]}",
)
self.assertTrue(
out_file_content[6].startswith("# Rg= ")
and "±" in out_file_content[6],
msg=f"exptexted line to resemble # Rg= 2.98±0.00 got {out_file_content[6]}",
)
self.assertTrue(
out_file_content[7].startswith("# I₀= ")
and "±" in out_file_content[7],
msg=f"exptexted line to resemble 60.86±0.00 got {out_file_content[7]}",
)
self.assertEqual(out_file_content[8].strip(), "")
self.assertEqual(
out_file_content[9].strip(),
"# r\tp(r)\tsigma_p(r)",
)
self.assertEqual(
data.shape[1],
3,
)
def test_bm29_bsa_result_numerically_matches_expectations(self):
"""
Test whether the results of the bift app on BM29 BSA give roughly the
expected Dmax, I₀ anr Rg and that the first is and the last point is close to 0.
"""
run_app_ = run(
[free_bift, normpath(str(self.bsa_filename))],
stdout=PIPE,
stderr=STDOUT,
check=True,
)
with open(
self.expected_outfile_name_bsa, "r", encoding="utf-8"
) as out_file:
out_file_content = out_file.readlines()
self.assertAlmostEqual(
float(out_file_content[1][8:12]),
9.75,
places=1,
msg=f"expected Dmax to be close to 0.75 got {out_file_content[1]}",
)
self.assertAlmostEqual(
float(out_file_content[6][6:10]),
3.0,
places=1,
msg=f"expected Rg to be close to 3.0 got {out_file_content[6]}",
)
self.assertAlmostEqual(
0.1 * float(out_file_content[7][6:10]),
6.1,
places=1,
msg=f"expected I0 to be close to 60 got {out_file_content[7]}",
)
self.assertEqual(
out_file_content[10].strip(),
"0.0\t0.0\t0.0",
msg=f"Expected first p(r) line to be '0.0 0.0 0.0' got {out_file_content[10]}",
)
last_line_content = out_file_content[-1].split("\t")
self.assertAlmostEqual(
float(last_line_content[0]),
9.75,
places=1,
msg=f"expected last r point to be close to 9.75 got {last_line_content[0]}",
)
self.assertAlmostEqual(
float(last_line_content[1]),
0,
places=2,
msg=f"expected last r point to be close to 0 got {last_line_content[1]}",
)
def test_free_bift_outputs_one_line_summary(self):
"""
Test whether free_bift app on BM29 BSA puts a one line summary in stdout.
"""
run_app = run(
[free_bift, normpath(str(self.bsa_filename))],
stdout=PIPE,
stderr=STDOUT,
check=True,
)
if system() == "Windows":
run_app_output = str(run_app.stdout, encoding="utf-16")[:-1].replace("\\\\", "\\")
else:
run_app_output = str(run_app.stdout, encoding="utf-8")[:-1]
run_app_output_parsed = parse.parse(
"bsa_005_sub.out: Dmax= {Dmax}±{Dmax_err}; 𝛂= {alpha}±{alpha_err}; S₀= {S0}±{S0_err}; χ²= {chi_squared}±{chi_squared_err}; logP= {logP}±{logP_err}; Rg= {Rg}±{Rg_err}; I₀= {I0}±{I0_err}",
run_app_output,
)
self.assertListEqual(
list(run_app_output_parsed.named),
[
"Dmax",
"Dmax_err",
"alpha",
"alpha_err",
"S0",
"S0_err",
"chi_squared",
"chi_squared_err",
"logP",
"logP_err",
"Rg",
"Rg_err",
"I0",
"I0_err",
],
msg="Could not parse free_bift std output",
)
def test_free_bift_values_of_one_line_summary_match_expectations(self):
"""
Test whether the one line summary of free_bift app on BM29 BSA gives the expected values.
"""
run_app = run(
[free_bift, normpath(str(self.bsa_filename))],
stdout=PIPE,
stderr=STDOUT,
check=True,
)
if system() == "Windows":
run_app_output = str(run_app.stdout, encoding="utf-16")[:-1].replace("\\\\", "\\")
else:
run_app_output = str(run_app.stdout, encoding="utf-8")[:-1]
run_app_output_parsed = parse.parse(
"bsa_005_sub.out: Dmax= {Dmax}±{Dmax_err}; 𝛂= {alpha}±{alpha_err}; S₀= {S0}±{S0_err}; χ²= {chi_squared}±{chi_squared_err}; logP= {logP}±{logP_err}; Rg= {Rg}±{Rg_err}; I₀= {I0}±{I0_err}",
run_app_output,
)
self.assertAlmostEqual(
float(run_app_output_parsed["Dmax"]),
9.75,
places=1,
msg=f"expected Dmax to be close to 0.75 got {run_app_output_parsed['Dmax']}",
)
self.assertAlmostEqual(
float(run_app_output_parsed["Rg"]),
3.0,
places=1,
msg=f"expected Rg to be close to 3.0 got {run_app_output_parsed['Rg']}",
)
self.assertAlmostEqual(
0.1 * float(run_app_output_parsed["I0"]),
6.1,
places=1,
msg=f"expected I0 to be close to 60 got {run_app_output_parsed['I0']}",
)
def suite():
"""Build test suite for free_bift"""
test_suite = unittest.TestSuite()
test_suite.addTest(
TestBIFT("test_bm29_bsa_without_arguments_creates_out_file")
)
test_suite.addTest(
TestBIFT("test_bm29_bsa_out_file_has_the_expected_format")
)
test_suite.addTest(
TestBIFT("test_bm29_bsa_result_numerically_matches_expectations")
)
test_suite.addTest(TestBIFT("test_free_bift_outputs_one_line_summary"))
test_suite.addTest(
TestBIFT(
"test_free_bift_values_of_one_line_summary_match_expectations"
)
)
return test_suite
if __name__ == "__main__":
runner = unittest.TextTestRunner()
runner.run(suite())
|
mit
| 4,057,080,431,987,493,000 | 31.455696 | 198 | 0.530031 | false |
shakamunyi/sahara
|
sahara/tests/unit/plugins/spark/test_plugin.py
|
1
|
8592
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import testtools
from sahara import conductor as cond
from sahara import context
from sahara import exceptions as ex
from sahara.plugins import base as pb
from sahara.plugins import exceptions as pe
from sahara.plugins.spark import plugin as pl
from sahara.service.edp.spark import engine
from sahara.tests.unit import base
from sahara.tests.unit import testutils as tu
from sahara.utils import edp
conductor = cond.API
class SparkPluginTest(base.SaharaWithDbTestCase):
def setUp(self):
super(SparkPluginTest, self).setUp()
self.override_config("plugins", ["spark"])
pb.setup_plugins()
def _init_cluster_dict(self, version):
cluster_dict = {
'name': 'cluster',
'plugin_name': 'spark',
'hadoop_version': version,
'default_image_id': 'image'}
return cluster_dict
def test_plugin09_edp_engine_validation(self):
cluster_dict = self._init_cluster_dict('0.9.1')
job = mock.Mock()
job.type = edp.JOB_TYPE_SPARK
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
edp_engine = plugin.get_edp_engine(cluster, edp.JOB_TYPE_SPARK)
with testtools.ExpectedException(
ex.InvalidDataException,
value_re="Spark 1.3.1 or higher required to run "
"Spark jobs\nError ID: .*"):
edp_engine.validate_job_execution(cluster, job, mock.Mock())
def test_plugin10_edp_engine(self):
self._test_engine('1.3.1', edp.JOB_TYPE_SPARK,
engine.SparkJobEngine)
def test_plugin10_shell_engine(self):
self._test_engine('1.3.1', edp.JOB_TYPE_SHELL,
engine.SparkShellJobEngine)
def test_plugin11_edp_engine(self):
self._test_engine('1.6.0', edp.JOB_TYPE_SPARK,
engine.SparkJobEngine)
def test_plugin12_shell_engine(self):
self._test_engine('1.6.0', edp.JOB_TYPE_SHELL,
engine.SparkShellJobEngine)
def test_plugin21_edp_engine(self):
self._test_engine('2.1.0', edp.JOB_TYPE_SPARK,
engine.SparkJobEngine)
def test_plugin22_shell_engine(self):
self._test_engine('2.1.0', edp.JOB_TYPE_SHELL,
engine.SparkShellJobEngine)
def _test_engine(self, version, job_type, eng):
cluster_dict = self._init_cluster_dict(version)
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
self.assertIsInstance(plugin.get_edp_engine(cluster, job_type), eng)
def test_plugin13_edp_engine(self):
cluster_dict = {
'name': 'cluster',
'plugin_name': 'spark',
'hadoop_version': '1.3.1',
'default_image_id': 'image'}
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
self.assertIsInstance(
plugin.get_edp_engine(cluster, edp.JOB_TYPE_SPARK),
engine.SparkJobEngine)
def test_cleanup_configs(self):
remote = mock.Mock()
instance = mock.Mock()
extra_conf = {'job_cleanup': {
'valid': True,
'script': 'script_text',
'cron': 'cron_text'}}
instance.node_group.node_processes = ["master"]
instance.node_group.id = id
cluster_dict = self._init_cluster_dict('1.3.1')
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
plugin._push_cleanup_job(remote, cluster, extra_conf, instance)
remote.write_file_to.assert_called_with(
'/etc/hadoop/tmp-cleanup.sh',
'script_text')
remote.execute_command.assert_called_with(
'sudo sh -c \'echo "cron_text" > /etc/cron.d/spark-cleanup\'')
remote.reset_mock()
instance.node_group.node_processes = ["worker"]
plugin._push_cleanup_job(remote, cluster, extra_conf, instance)
self.assertFalse(remote.called)
remote.reset_mock()
instance.node_group.node_processes = ["master"]
extra_conf['job_cleanup']['valid'] = False
plugin._push_cleanup_job(remote, cluster, extra_conf, instance)
remote.execute_command.assert_called_with(
'sudo rm -f /etc/crond.d/spark-cleanup')
class SparkValidationTest(base.SaharaTestCase):
def setUp(self):
super(SparkValidationTest, self).setUp()
pb.setup_plugins()
self.plugin = pl.SparkProvider()
def test_validate(self):
self.ng = []
self.ng.append(tu.make_ng_dict("nn", "f1", ["namenode"], 0))
self.ng.append(tu.make_ng_dict("ma", "f1", ["master"], 0))
self.ng.append(tu.make_ng_dict("sl", "f1", ["slave"], 0))
self.ng.append(tu.make_ng_dict("dn", "f1", ["datanode"], 0))
self._validate_case(1, 1, 3, 3)
self._validate_case(1, 1, 3, 4)
self._validate_case(1, 1, 4, 3)
with testtools.ExpectedException(pe.InvalidComponentCountException):
self._validate_case(2, 1, 3, 3)
with testtools.ExpectedException(pe.InvalidComponentCountException):
self._validate_case(1, 2, 3, 3)
with testtools.ExpectedException(pe.InvalidComponentCountException):
self._validate_case(0, 1, 3, 3)
with testtools.ExpectedException(pe.RequiredServiceMissingException):
self._validate_case(1, 0, 3, 3)
cl = self._create_cluster(
1, 1, 3, 3, cluster_configs={'HDFS': {'dfs.replication': 4}})
with testtools.ExpectedException(pe.InvalidComponentCountException):
self.plugin.validate(cl)
def _create_cluster(self, *args, **kwargs):
lst = []
for i in range(0, len(args)):
self.ng[i]['count'] = args[i]
lst.append(self.ng[i])
return tu.create_cluster("cluster1", "tenant1", "spark",
"1.60", lst, **kwargs)
def _validate_case(self, *args):
cl = self._create_cluster(*args)
self.plugin.validate(cl)
class SparkProviderTest(base.SaharaTestCase):
def setUp(self):
super(SparkProviderTest, self).setUp()
def test_supported_job_types(self):
provider = pl.SparkProvider()
res = provider.get_edp_job_types()
self.assertEqual([edp.JOB_TYPE_SHELL, edp.JOB_TYPE_SPARK],
res['1.3.1'])
self.assertEqual([edp.JOB_TYPE_SHELL, edp.JOB_TYPE_SPARK],
res['1.6.0'])
self.assertEqual([edp.JOB_TYPE_SHELL, edp.JOB_TYPE_SPARK],
res['2.1.0'])
def test_edp_config_hints(self):
provider = pl.SparkProvider()
res = provider.get_edp_config_hints(edp.JOB_TYPE_SHELL, "1.3.1")
self.assertEqual({'configs': {}, 'args': [], 'params': {}},
res['job_config'])
res = provider.get_edp_config_hints(edp.JOB_TYPE_SHELL, "1.6.0")
self.assertEqual({'configs': {}, 'args': [], 'params': {}},
res['job_config'])
res = provider.get_edp_config_hints(edp.JOB_TYPE_SPARK, "1.3.1")
self.assertEqual({'args': [], 'configs': []},
res['job_config'])
res = provider.get_edp_config_hints(edp.JOB_TYPE_SPARK, "1.6.0")
self.assertEqual({'args': [], 'configs': []},
res['job_config'])
res = provider.get_edp_config_hints(edp.JOB_TYPE_SPARK, "2.1.0")
self.assertEqual({'args': [], 'configs': []},
res['job_config'])
res = provider.get_edp_config_hints(edp.JOB_TYPE_SPARK, "2.1.0")
self.assertEqual({'args': [], 'configs': []},
res['job_config'])
|
apache-2.0
| 3,557,837,620,523,994,000 | 36.356522 | 77 | 0.598231 | false |
quinox/weblate
|
weblate/trans/models/subproject.py
|
1
|
51530
|
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.db import models, transaction
from django.utils.translation import ugettext as _, ugettext_lazy
from django.core.mail import mail_admins
from django.core.exceptions import ValidationError
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.core.cache import cache
from django.utils import timezone
from glob import glob
import os
import traceback
import sys
import time
import fnmatch
import re
from weblate.trans.formats import FILE_FORMAT_CHOICES, FILE_FORMATS, ParseError
from weblate.trans.mixins import PercentMixin, URLMixin, PathMixin
from weblate.trans.filelock import FileLock
from weblate.trans.fields import RegexField
from weblate.trans.site import get_site_url
from weblate.trans.util import (
is_repo_link, cleanup_repo_url, cleanup_path, report_error,
)
from weblate.trans.signals import (
vcs_post_push, vcs_post_update, translation_post_add
)
from weblate.trans.vcs import RepositoryException, VCS_REGISTRY, VCS_CHOICES
from weblate.trans.models.translation import Translation
from weblate.trans.validators import (
validate_repoweb, validate_filemask,
validate_extra_file, validate_autoaccept,
validate_check_flags, validate_commit_message,
)
from weblate.lang.models import Language
from weblate.appsettings import (
PRE_COMMIT_SCRIPT_CHOICES, POST_UPDATE_SCRIPT_CHOICES,
POST_COMMIT_SCRIPT_CHOICES, POST_PUSH_SCRIPT_CHOICES,
POST_ADD_SCRIPT_CHOICES,
HIDE_REPO_CREDENTIALS,
DEFAULT_COMMITER_EMAIL, DEFAULT_COMMITER_NAME,
)
from weblate.accounts.models import notify_merge_failure, get_author_name
from weblate.trans.models.changes import Change
DEFAULT_COMMIT_MESSAGE = (
'Translated using Weblate (%(language_name)s)\n\n'
'Currently translated at %(translated_percent)s%% '
'(%(translated)s of %(total)s strings)'
)
NEW_LANG_CHOICES = (
('contact', ugettext_lazy('Use contact form')),
('url', ugettext_lazy('Point to translation instructions URL')),
('add', ugettext_lazy('Automatically add language file')),
('none', ugettext_lazy('No adding of language')),
)
MERGE_CHOICES = (
('merge', ugettext_lazy('Merge')),
('rebase', ugettext_lazy('Rebase')),
)
class SubProjectManager(models.Manager):
# pylint: disable=W0232
def get_linked(self, val):
'''
Returns subproject for linked repo.
'''
if not is_repo_link(val):
return None
project, subproject = val[10:].split('/', 1)
return self.get(slug=subproject, project__slug=project)
class SubProject(models.Model, PercentMixin, URLMixin, PathMixin):
name = models.CharField(
verbose_name=ugettext_lazy('Component name'),
max_length=100,
help_text=ugettext_lazy('Name to display')
)
slug = models.SlugField(
verbose_name=ugettext_lazy('URL slug'),
db_index=True,
max_length=100,
help_text=ugettext_lazy('Name used in URLs and file names.')
)
project = models.ForeignKey(
'Project',
verbose_name=ugettext_lazy('Project'),
)
vcs = models.CharField(
verbose_name=ugettext_lazy('Version control system'),
max_length=20,
help_text=ugettext_lazy(
'Version control system to use to access your '
'repository with translations.'
),
choices=VCS_CHOICES,
default='git',
)
repo = models.CharField(
verbose_name=ugettext_lazy('Source code repository'),
max_length=200,
help_text=ugettext_lazy(
'URL of a repository, use weblate://project/component '
'for sharing with other component.'
),
)
push = models.CharField(
verbose_name=ugettext_lazy('Repository push URL'),
max_length=200,
help_text=ugettext_lazy(
'URL of a push repository, pushing is disabled if empty.'
),
blank=True
)
repoweb = models.URLField(
verbose_name=ugettext_lazy('Repository browser'),
help_text=ugettext_lazy(
'Link to repository browser, use %(branch)s for branch, '
'%(file)s and %(line)s as filename and line placeholders.'
),
validators=[validate_repoweb],
blank=True,
)
git_export = models.CharField(
verbose_name=ugettext_lazy('Exported repository URL'),
max_length=200,
help_text=ugettext_lazy(
'URL of a repository where users can fetch changes from Weblate'
),
blank=True
)
report_source_bugs = models.EmailField(
verbose_name=ugettext_lazy('Source string bug report address'),
help_text=ugettext_lazy(
'Email address where errors in source string will be reported, '
'keep empty for no emails.'
),
max_length=254,
blank=True,
)
branch = models.CharField(
verbose_name=ugettext_lazy('Repository branch'),
max_length=50,
help_text=ugettext_lazy('Repository branch to translate'),
default='',
blank=True
)
filemask = models.CharField(
verbose_name=ugettext_lazy('File mask'),
max_length=200,
validators=[validate_filemask],
help_text=ugettext_lazy(
'Path of files to translate, use * instead of language code, '
'for example: po/*.po or locale/*/LC_MESSAGES/django.po.'
)
)
template = models.CharField(
verbose_name=ugettext_lazy('Monolingual base language file'),
max_length=200,
blank=True,
help_text=ugettext_lazy(
'Filename of translations base file, which contains all strings '
'and their source; this is recommended to use '
'for monolingual translation formats.'
)
)
edit_template = models.BooleanField(
verbose_name=ugettext_lazy('Edit base file'),
default=True,
help_text=ugettext_lazy(
'Whether users will be able to edit base file '
'for monolingual translations.'
)
)
new_base = models.CharField(
verbose_name=ugettext_lazy('Base file for new translations'),
max_length=200,
blank=True,
help_text=ugettext_lazy(
'Filename of file which is used for creating new translations. '
'For Gettext choose .pot file.'
)
)
file_format = models.CharField(
verbose_name=ugettext_lazy('File format'),
max_length=50,
default='auto',
choices=FILE_FORMAT_CHOICES,
help_text=ugettext_lazy(
'Automatic detection might fail for some formats '
'and is slightly slower.'
),
)
extra_commit_file = models.TextField(
verbose_name=ugettext_lazy('Additional commit files'),
default='',
blank=True,
validators=[validate_extra_file],
help_text=ugettext_lazy(
'Additional files to include in commits, one per line; '
'please check documentation for more details.',
)
)
post_update_script = models.CharField(
verbose_name=ugettext_lazy('Post-update script'),
max_length=200,
default='',
blank=True,
choices=POST_UPDATE_SCRIPT_CHOICES,
help_text=ugettext_lazy(
'Script to be executed after receiving a repository update, '
'please check documentation for more details.'
),
)
pre_commit_script = models.CharField(
verbose_name=ugettext_lazy('Pre-commit script'),
max_length=200,
default='',
blank=True,
choices=PRE_COMMIT_SCRIPT_CHOICES,
help_text=ugettext_lazy(
'Script to be executed before committing translation, '
'please check documentation for more details.'
),
)
post_commit_script = models.CharField(
verbose_name=ugettext_lazy('Post-commit script'),
max_length=200,
default='',
blank=True,
choices=POST_COMMIT_SCRIPT_CHOICES,
help_text=ugettext_lazy(
'Script to be executed after committing translation, '
'please check documentation for more details.'
),
)
post_push_script = models.CharField(
verbose_name=ugettext_lazy('Post-push script'),
max_length=200,
default='',
blank=True,
choices=POST_PUSH_SCRIPT_CHOICES,
help_text=ugettext_lazy(
'Script to be executed after pushing translation to remote, '
'please check documentation for more details.'
),
)
post_add_script = models.CharField(
verbose_name=ugettext_lazy('Post-add script'),
max_length=200,
default='',
blank=True,
choices=POST_ADD_SCRIPT_CHOICES,
help_text=ugettext_lazy(
'Script to be executed after adding new translation, '
'please check documentation for more details.'
),
)
locked = models.BooleanField(
verbose_name=ugettext_lazy('Locked'),
default=False,
help_text=ugettext_lazy(
'Whether component is locked for translation updates.'
)
)
allow_translation_propagation = models.BooleanField(
verbose_name=ugettext_lazy('Allow translation propagation'),
default=True,
db_index=True,
help_text=ugettext_lazy(
'Whether translation updates in other components '
'will cause automatic translation in this one'
)
)
save_history = models.BooleanField(
verbose_name=ugettext_lazy('Save translation history'),
default=True,
help_text=ugettext_lazy(
'Whether Weblate should keep history of translations'
)
)
enable_suggestions = models.BooleanField(
verbose_name=ugettext_lazy('Enable suggestions'),
default=True,
help_text=ugettext_lazy(
'Whether to allow translation suggestions at all.'
)
)
suggestion_voting = models.BooleanField(
verbose_name=ugettext_lazy('Suggestion voting'),
default=False,
help_text=ugettext_lazy(
'Whether users can vote for suggestions.'
)
)
suggestion_autoaccept = models.PositiveSmallIntegerField(
verbose_name=ugettext_lazy('Autoaccept suggestions'),
default=0,
help_text=ugettext_lazy(
'Automatically accept suggestions with this number of votes,'
' use 0 to disable.'
),
validators=[validate_autoaccept],
)
check_flags = models.TextField(
verbose_name=ugettext_lazy('Quality checks flags'),
default='',
help_text=ugettext_lazy(
'Additional comma-separated flags to influence quality checks, '
'check documentation for possible values.'
),
validators=[validate_check_flags],
blank=True,
)
# Licensing
license = models.CharField(
verbose_name=ugettext_lazy('Translation license'),
max_length=150,
blank=True,
default='',
help_text=ugettext_lazy(
'Optional short summary of license used for translations.'
),
)
license_url = models.URLField(
verbose_name=ugettext_lazy('License URL'),
blank=True,
default='',
help_text=ugettext_lazy('Optional URL with license details.'),
)
agreement = models.TextField(
verbose_name=ugettext_lazy('Contributor agreement'),
blank=True,
default='',
help_text=ugettext_lazy(
'Agreement which needs to be approved before user can '
'translate this component.'
)
)
# Adding new language
new_lang = models.CharField(
verbose_name=ugettext_lazy('New translation'),
max_length=10,
choices=NEW_LANG_CHOICES,
default='contact',
help_text=ugettext_lazy(
'How to handle requests for creating new translations. '
'Please note that availability of choices depends on '
'the file format.'
),
)
# VCS config
merge_style = models.CharField(
verbose_name=ugettext_lazy('Merge style'),
max_length=10,
choices=MERGE_CHOICES,
default='merge',
help_text=ugettext_lazy(
'Define whether Weblate should merge upstream repository '
'or rebase changes onto it.'
),
)
commit_message = models.TextField(
verbose_name=ugettext_lazy('Commit message'),
help_text=ugettext_lazy(
'You can use format strings for various information, '
'please check documentation for more details.'
),
validators=[validate_commit_message],
default=DEFAULT_COMMIT_MESSAGE,
)
committer_name = models.CharField(
verbose_name=ugettext_lazy('Committer name'),
max_length=200,
default=DEFAULT_COMMITER_NAME,
)
committer_email = models.EmailField(
verbose_name=ugettext_lazy('Committer email'),
max_length=254,
default=DEFAULT_COMMITER_EMAIL,
)
language_regex = RegexField(
verbose_name=ugettext_lazy('Language filter'),
max_length=200,
default='^[^.]+$',
help_text=ugettext_lazy(
'Regular expression which is used to filter '
'translation when scanning for file mask.'
),
)
objects = SubProjectManager()
is_lockable = True
class Meta(object):
ordering = ['project__name', 'name']
unique_together = (
('project', 'name'),
('project', 'slug'),
)
permissions = (
('lock_subproject', "Can lock translation for translating"),
('can_see_git_repository', "Can see VCS repository URL"),
('view_reports', "Can display reports"),
)
app_label = 'trans'
verbose_name = ugettext_lazy('Component')
verbose_name_plural = ugettext_lazy('Components')
def __init__(self, *args, **kwargs):
'''
Constructor to initialize some cache properties.
'''
super(SubProject, self).__init__(*args, **kwargs)
self._repository_lock = None
self._file_format = None
self._template_store = None
self._all_flags = None
self._linked_subproject = None
self._repository = None
@property
def filemask_re(self):
return re.compile(
fnmatch.translate(self.filemask).replace('.*', '(.*)')
)
@property
def log_prefix(self):
return '{0}/{1}: '.format(self.project.slug, self.slug)
def has_acl(self, user):
'''
Checks whether current user is allowed to access this
subproject.
'''
return self.project.has_acl(user)
def check_acl(self, request):
'''
Raises an error if user is not allowed to access this project.
'''
self.project.check_acl(request)
def _reverse_url_name(self):
'''
Returns base name for URL reversing.
'''
return 'subproject'
def _reverse_url_kwargs(self):
'''
Returns kwargs for URL reversing.
'''
return {
'project': self.project.slug,
'subproject': self.slug
}
def get_widgets_url(self):
'''
Returns absolute URL for widgets.
'''
return get_site_url(
reverse('widgets', kwargs={'project': self.project.slug})
)
def get_share_url(self):
'''
Returns absolute URL usable for sharing.
'''
return get_site_url(
reverse('engage', kwargs={'project': self.project.slug})
)
def __unicode__(self):
return '%s/%s' % (self.project.__unicode__(), self.name)
def get_full_slug(self):
return '%s__%s' % (self.project.slug, self.slug)
def _get_path(self):
'''
Returns full path to subproject VCS repository.
'''
if self.is_repo_link:
return self.linked_subproject.get_path()
else:
return os.path.join(self.project.get_path(), self.slug)
@property
def repository_lock(self):
'''
Returns lock object for current translation instance.
'''
if self.is_repo_link:
return self.linked_subproject.repository_lock
if self._repository_lock is None:
lock_path = os.path.join(
self.project.get_path(),
self.slug + '.lock'
)
self._repository_lock = FileLock(
lock_path,
timeout=30
)
return self._repository_lock
def can_push(self):
'''
Returns true if push is possible for this subproject.
'''
if self.is_repo_link:
return self.linked_subproject.can_push()
return self.push != '' and self.push is not None
@property
def is_repo_link(self):
'''
Checks whether repository is just a link for other one.
'''
return is_repo_link(self.repo)
def can_add_language(self):
'''
Returns true if new languages can be added.
'''
return self.new_lang != 'none'
@property
def linked_subproject(self):
'''
Returns subproject for linked repo.
'''
if self._linked_subproject is None:
self._linked_subproject = SubProject.objects.get_linked(self.repo)
return self._linked_subproject
@property
def repository(self):
"""
VCS repository object.
"""
if self.is_repo_link:
return self.linked_subproject.repository
if self._repository is None:
self._repository = VCS_REGISTRY[self.vcs](self.get_path())
cache_key = '{0}-config-check'.format(self.get_full_slug())
if cache.get(cache_key) is None:
self._repository.check_config()
cache.set(cache_key, True)
return self._repository
def get_last_remote_commit(self):
'''
Returns latest remote commit we know.
'''
cache_key = '{0}-last-commit'.format(self.get_full_slug())
result = cache.get(cache_key)
if result is None:
result = self.repository.get_revision_info(
self.repository.last_remote_revision
)
cache.set(cache_key, result)
return result
def get_repo_url(self):
'''
Returns link to repository.
'''
if self.is_repo_link:
return self.linked_subproject.get_repo_url()
if not HIDE_REPO_CREDENTIALS:
return self.repo
return cleanup_repo_url(self.repo)
def get_repo_branch(self):
'''
Returns branch in repository.
'''
if self.is_repo_link:
return self.linked_subproject.branch
return self.branch
def get_export_url(self):
'''
Returns URL of exported VCS repository.
'''
if self.is_repo_link:
return self.linked_subproject.git_export
return self.git_export
def get_repoweb_link(self, filename, line):
'''
Generates link to source code browser for given file and line.
For linked repositories, it is possible to override linked
repository path here.
'''
if len(self.repoweb) == 0:
if self.is_repo_link:
return self.linked_subproject.get_repoweb_link(filename, line)
return None
return self.repoweb % {
'file': filename,
'line': line,
'branch': self.branch
}
def update_remote_branch(self, validate=False):
'''
Pulls from remote repository.
'''
if self.is_repo_link:
return self.linked_subproject.update_remote_branch(validate)
# Update
self.log_info('updating repository')
try:
with self.repository_lock:
start = time.time()
self.repository.update_remote()
timediff = time.time() - start
self.log_info('update took %.2f seconds:', timediff)
for line in self.repository.last_output.splitlines():
self.log_debug('update: %s', line)
return True
except RepositoryException as error:
error_text = str(error)
self.log_error('failed to update repository: %s', error_text)
if validate:
if 'Host key verification failed' in error_text:
raise ValidationError(_(
'Failed to verify SSH host key, please add '
'them in SSH page in the admin interface.'
))
raise ValidationError(
_('Failed to fetch repository: %s') % error_text
)
return False
def configure_repo(self, validate=False):
'''
Ensures repository is correctly configured and points to current
remote.
'''
if self.is_repo_link:
return
with self.repository_lock:
self.repository.configure_remote(self.repo, self.push, self.branch)
self.repository.set_committer(
self.committer_name,
self.committer_email
)
self.update_remote_branch(validate)
def configure_branch(self):
'''
Ensures local tracking branch exists and is checkouted.
'''
if self.is_repo_link:
return
with self.repository_lock:
self.repository.configure_branch(self.branch)
def do_update(self, request=None, method=None):
'''
Wrapper for doing repository update and pushing them to translations.
'''
if self.is_repo_link:
return self.linked_subproject.do_update(request, method=method)
# pull remote
if not self.update_remote_branch():
return False
# do we have something to merge?
if not self.repo_needs_merge() and method != 'rebase':
return True
# commit possible pending changes
self.commit_pending(request, skip_push=True)
# update local branch
ret = self.update_branch(request, method=method)
# create translation objects for all files
try:
self.create_translations(request=request)
except ParseError:
ret = False
# Push after possible merge
if ret:
self.push_if_needed(request, do_update=False)
return ret
def push_if_needed(self, request, do_update=True, on_commit=True):
"""Wrapper to push if needed
Checks for:
* Enabled push on commit
* Configured push
* There is something to push
"""
if on_commit and not self.project.push_on_commit:
return False
if not self.can_push():
return False
if not self.repo_needs_push():
return False
return self.do_push(
request, force_commit=False, do_update=do_update
)
def do_push(self, request, force_commit=True, do_update=True):
'''
Wrapper for pushing changes to remote repo.
'''
if self.is_repo_link:
return self.linked_subproject.do_push(
request, force_commit=force_commit, do_update=do_update
)
# Do we have push configured
if not self.can_push():
if request is not None:
messages.error(
request,
_('Push is disabled for %s.') % self.__unicode__()
)
return False
# Commit any pending changes
if force_commit:
self.commit_pending(request, skip_push=True)
# Do we have anything to push?
if not self.repo_needs_push():
return False
if do_update:
# Update the repo
self.do_update(request)
# Were all changes merged?
if self.repo_needs_merge():
return False
# Do actual push
try:
self.log_info('pushing to remote repo')
with self.repository_lock:
self.repository.push(self.branch)
Change.objects.create(
action=Change.ACTION_PUSH,
user=request.user if request else None,
subproject=self,
)
vcs_post_push.send(sender=self.__class__, component=self)
return True
except RepositoryException as error:
self.log_error('failed to push on repo: %s', error)
msg = 'Error:\n%s' % str(error)
mail_admins(
'failed push on repo %s' % self.__unicode__(),
msg
)
if request is not None:
messages.error(
request,
_('Failed to push to remote branch on %s.') %
self.__unicode__()
)
return False
def do_reset(self, request=None):
'''
Wrapper for reseting repo to same sources as remote.
'''
if self.is_repo_link:
return self.linked_subproject.do_reset(request)
# First check we're up to date
self.update_remote_branch()
# Do actual reset
try:
self.log_info('reseting to remote repo')
with self.repository_lock:
self.repository.reset(self.branch)
Change.objects.create(
action=Change.ACTION_RESET,
user=request.user if request else None,
subproject=self,
)
except RepositoryException as error:
self.log_error('failed to reset on repo')
msg = 'Error:\n%s' % str(error)
mail_admins(
'failed reset on repo %s' % self.__unicode__(),
msg
)
if request is not None:
messages.error(
request,
_('Failed to reset to remote branch on %s.') %
self.__unicode__()
)
return False
# create translation objects for all files
self.create_translations(request=request)
return True
def get_repo_link_url(self):
return 'weblate://%s/%s' % (self.project.slug, self.slug)
def get_linked_childs(self):
'''
Returns list of subprojects which link repository to us.
'''
return SubProject.objects.filter(
repo=self.get_repo_link_url()
)
def commit_pending(self, request, from_link=False, skip_push=False):
'''
Checks whether there is any translation which needs commit.
'''
if not from_link and self.is_repo_link:
return self.linked_subproject.commit_pending(
request, True, skip_push=skip_push
)
for translation in self.translation_set.all():
translation.commit_pending(request, skip_push=True)
# Process linked projects
for subproject in self.get_linked_childs():
subproject.commit_pending(request, True, skip_push=True)
if not from_link and not skip_push:
self.push_if_needed(request)
def notify_merge_failure(self, error, status):
'''
Sends out notifications on merge failure.
'''
# Notify subscribed users about failure
notify_merge_failure(self, error, status)
def handle_parse_error(self, error):
"""Handler for parse error."""
report_error(error, sys.exc_info())
self.notify_merge_failure(
str(error),
u''.join(traceback.format_stack()),
)
raise ParseError(str(error))
def update_branch(self, request=None, method=None):
'''
Updates current branch to match remote (if possible).
'''
if self.is_repo_link:
return self.linked_subproject.update_branch(request, method=method)
if method is None:
method = self.merge_style
# Merge/rebase
if method == 'rebase':
method = self.repository.rebase
error_msg = _('Failed to rebase our branch onto remote branch %s.')
action = Change.ACTION_REBASE
action_failed = Change.ACTION_FAILED_REBASE
else:
method = self.repository.merge
error_msg = _('Failed to merge remote branch into %s.')
action = Change.ACTION_MERGE
action_failed = Change.ACTION_FAILED_MERGE
with self.repository_lock:
try:
# Try to merge it
method(self.branch)
self.log_info(
'%s remote into repo',
self.merge_style,
)
if self.id:
Change.objects.create(
subproject=self,
user=request.user if request else None,
action=action,
)
# run post update hook
vcs_post_update.send(sender=self.__class__, component=self)
return True
except RepositoryException as error:
# In case merge has failer recover
error = str(error)
status = self.repository.status()
# Log error
self.log_error(
'failed %s on repo: %s',
self.merge_style,
error
)
# Reset repo back
method(abort=True)
if self.id:
Change.objects.create(
subproject=self,
user=request.user if request else None,
action=action_failed,
target=str(error),
)
# Notify subscribers and admins
self.notify_merge_failure(error, status)
# Tell user (if there is any)
if request is not None:
messages.error(
request,
error_msg % self.__unicode__()
)
return False
def get_mask_matches(self):
'''
Returns files matching current mask.
'''
prefix = os.path.join(self.get_path(), '')
matches = glob(os.path.join(self.get_path(), self.filemask))
matches = set([f.replace(prefix, '') for f in matches])
# We want to list template among translations as well
if self.has_template():
if self.edit_template:
matches.add(self.template)
else:
matches.discard(self.template)
if self.new_base and self.new_base != self.template:
matches.discard(self.new_base)
# Remove symlinked translations
for filename in list(matches):
resolved = self.repository.resolve_symlinks(filename)
if resolved != filename and resolved in matches:
matches.discard(filename)
return sorted(matches)
def create_translations(self, force=False, langs=None, request=None):
'''
Loads translations from VCS.
'''
translations = set()
languages = set()
matches = self.get_mask_matches()
language_re = re.compile(self.language_regex)
for pos, path in enumerate(matches):
with transaction.atomic():
code = self.get_lang_code(path)
if langs is not None and code not in langs:
self.log_info('skipping %s', path)
continue
if not language_re.match(code):
self.log_info('skipping language %s', code)
continue
self.log_info(
'checking %s (%s) [%d/%d]',
path,
code,
pos + 1,
len(matches)
)
lang = Language.objects.auto_get_or_create(code=code)
if lang.code in languages:
self.log_error('duplicate language found: %s', lang.code)
continue
translation = Translation.objects.check_sync(
self, lang, code, path, force, request=request
)
translations.add(translation.id)
languages.add(lang.code)
# Delete possibly no longer existing translations
if langs is None:
todelete = self.translation_set.exclude(id__in=translations)
if todelete.exists():
with transaction.atomic():
self.log_info(
'removing stale translations: %s',
','.join([trans.language.code for trans in todelete])
)
todelete.delete()
# Process linked repos
for subproject in self.get_linked_childs():
self.log_info(
'updating linked project %s',
subproject
)
subproject.create_translations(force, langs, request=request)
self.log_info('updating completed')
def get_lang_code(self, path):
'''
Parses language code from path.
'''
# Parse filename
matches = self.filemask_re.match(path)
if not matches or not matches.lastindex:
# Assume English language for template
if path == self.template:
return 'en'
return ''
code = matches.group(1)
# Remove possible encoding part
if '.' in code and ('.utf' in code.lower() or '.iso' in code.lower()):
return code.split('.')[0]
return code
def sync_git_repo(self, validate=False):
'''
Brings VCS repo in sync with current model.
'''
if self.is_repo_link:
return
self.configure_repo(validate)
self.commit_pending(None)
self.configure_branch()
self.update_branch()
def set_default_branch(self):
'''
Set default VCS branch if empty
'''
if self.branch == '':
self.branch = VCS_REGISTRY[self.vcs].default_branch
def clean_repo_link(self):
'''
Validates repository link.
'''
try:
repo = SubProject.objects.get_linked(self.repo)
if repo is not None and repo.is_repo_link:
raise ValidationError(
_(
'Invalid link to a Weblate project, '
'can not link to linked repository!'
)
)
if repo.pk == self.pk:
raise ValidationError(
_(
'Invalid link to a Weblate project, '
'can not link to self!'
)
)
except (SubProject.DoesNotExist, ValueError):
raise ValidationError(
_(
'Invalid link to a Weblate project, '
'use weblate://project/component.'
)
)
if self.push != '':
raise ValidationError(
_('Push URL is not used when repository is linked!')
)
if self.git_export != '':
raise ValidationError(
_('Export URL is not used when repository is linked!')
)
def clean_lang_codes(self, matches):
'''
Validates that there are no double language codes found in the files.
'''
if len(matches) == 0 and not self.can_add_new_language():
raise ValidationError(_('The mask did not match any files!'))
langs = set()
translated_langs = set()
for match in matches:
code = self.get_lang_code(match)
if not code:
raise ValidationError(_(
'Got empty language code for %s, please check filemask!'
) % match)
lang = Language.objects.auto_get_or_create(code=code)
if code in langs:
raise ValidationError(_(
'There are more files for single language, please '
'adjust the mask and use components for translating '
'different resources.'
))
if lang.code in translated_langs:
raise ValidationError(_(
'Multiple translations were mapped to a single language '
'code (%s). You should disable SIMPLIFY_LANGUAGES '
'to prevent Weblate mapping similar languages to one.'
) % lang.code)
langs.add(code)
translated_langs.add(lang.code)
def clean_files(self, matches):
'''
Validates whether we can parse translation files.
'''
notrecognized = []
errors = []
dir_path = self.get_path()
for match in matches:
try:
parsed = self.file_format_cls.load(
os.path.join(dir_path, match),
)
if not self.file_format_cls.is_valid(parsed):
errors.append('%s: %s' % (
match, _('File does not seem to be valid!')
))
except ValueError:
notrecognized.append(match)
except Exception as error:
errors.append('%s: %s' % (match, str(error)))
if len(notrecognized) > 0:
msg = (
_('Format of %d matched files could not be recognized.') %
len(notrecognized)
)
raise ValidationError('%s\n%s' % (
msg,
'\n'.join(notrecognized)
))
if len(errors) > 0:
raise ValidationError('%s\n%s' % (
(_('Failed to parse %d matched files!') % len(errors)),
'\n'.join(errors)
))
def clean_new_lang(self):
'''
Validates new language choices.
'''
if self.new_lang == 'add':
if not self.file_format_cls.supports_new_language():
raise ValidationError(_(
'Chosen file format does not support adding '
'new translations as chosen in project settings.'
))
filename = self.get_new_base_filename()
if not self.file_format_cls.is_valid_base_for_new(filename):
raise ValidationError(_(
'Format of base file for new translations '
'was not recognized!'
))
elif self.new_lang != 'add' and self.new_base:
raise ValidationError(_(
'Base file for new translations is not used because of '
'component settings. '
'You probably want to enable automatic adding of new '
'translations.'
))
def clean_template(self):
"""
Validates template value.
"""
# Test for unexpected template usage
if self.template != '' and self.file_format_cls.monolingual is False:
raise ValidationError(
_('You can not use base file with bilingual translation!')
)
# Special case for Gettext
if self.template.endswith('.pot') and self.filemask.endswith('.po'):
raise ValidationError(
_('Using .pot file as base file is not supported!')
)
# Validate template loading
if self.has_template():
full_path = os.path.join(self.get_path(), self.template)
if not os.path.exists(full_path):
raise ValidationError(_('Template file not found!'))
try:
self.template_store
except ParseError as exc:
raise ValidationError(
_('Failed to parse translation base file: %s') % str(exc)
)
elif self.file_format_cls.monolingual:
raise ValidationError(
_('You can not use monolingual translation without base file!')
)
def clean(self):
'''
Validator fetches repository and tries to find translation files.
Then it checks them for validity.
'''
if self.new_lang == 'url' and self.project.instructions == '':
raise ValidationError(_(
'Please either fill in instructions URL '
'or use different option for adding new language.'
))
if self.license == '' and self.license_url != '':
raise ValidationError(_(
'License URL can not be used without license summary.'
))
# Skip validation if we don't have valid project
if self.project_id is None:
return
self.set_default_branch()
# Check if we should rename
if self.id:
old = SubProject.objects.get(pk=self.id)
self.check_rename(old)
if old.vcs != self.vcs:
# This could work, but the problem is that before changed
# object is saved the linked repos still see old vcs leading
# to horrible mess. Changing vcs from the manage.py shell
# works fine though.
raise ValidationError(
_('Changing version control system is not supported!')
)
# Check file format
if self.file_format not in FILE_FORMATS:
raise ValidationError(
_('Unsupported file format: {0}').format(self.file_format)
)
# Validate VCS repo
try:
self.sync_git_repo(True)
except RepositoryException as exc:
raise ValidationError(_('Failed to update repository: %s') % exc)
# Push repo is not used with link
if self.is_repo_link:
self.clean_repo_link()
# Template validation
self.clean_template()
matches = self.get_mask_matches()
# Verify language codes
self.clean_lang_codes(matches)
# Try parsing files
self.clean_files(matches)
# New language options
self.clean_new_lang()
# Suggestions
if self.suggestion_autoaccept and not self.suggestion_voting:
raise ValidationError(_(
'Automatically accepting suggestions can work only with '
'voting enabled!'
))
def get_template_filename(self):
'''
Creates absolute filename for template.
'''
return os.path.join(self.get_path(), self.template)
def get_new_base_filename(self):
'''
Creates absolute filename for base file for new translations.
'''
if not self.new_base:
return None
return os.path.join(self.get_path(), self.new_base)
def save(self, *args, **kwargs):
'''
Save wrapper which updates backend repository and regenerates
translation data.
'''
self.set_default_branch()
# Detect if VCS config has changed (so that we have to pull the repo)
changed_git = True
changed_setup = False
if self.id:
old = SubProject.objects.get(pk=self.id)
changed_git = (
(old.repo != self.repo) or
(old.branch != self.branch) or
(old.filemask != self.filemask)
)
changed_setup = (
(old.file_format != self.file_format) or
(old.edit_template != self.edit_template) or
(old.template != self.template)
)
# Detect slug changes and rename git repo
self.check_rename(old)
# Remove leading ./ from paths
self.filemask = cleanup_path(self.filemask)
self.template = cleanup_path(self.template)
extra_files = [
cleanup_path(x.strip()) for x in self.extra_commit_file.split('\n')
]
self.extra_commit_file = '\n'.join([x for x in extra_files if x])
# Save/Create object
super(SubProject, self).save(*args, **kwargs)
# Configure git repo if there were changes
if changed_git:
self.sync_git_repo()
# Rescan for possibly new translations if there were changes, needs to
# be done after actual creating the object above
if changed_setup:
self.create_translations(force=True)
elif changed_git:
self.create_translations()
def _get_percents(self):
'''
Returns percentages of translation status.
'''
return self.translation_set.get_percents()
def repo_needs_commit(self):
'''
Checks whether there are some not committed changes.
'''
if self.is_repo_link:
return self.linked_subproject.repo_needs_commit()
return self.repository.needs_commit()
def repo_needs_merge(self):
'''
Checks whether there is something to merge from remote repository.
'''
if self.is_repo_link:
return self.linked_subproject.repo_needs_merge()
return self.repository.needs_merge(self.branch)
def repo_needs_push(self):
'''
Checks whether there is something to push to remote repository.
'''
if self.is_repo_link:
return self.linked_subproject.repo_needs_push()
return self.repository.needs_push(self.branch)
@property
def file_format_cls(self):
'''
Returns file format object.
'''
if (self._file_format is None or
self._file_format.name != self.file_format):
self._file_format = FILE_FORMATS[self.file_format]
return self._file_format
def has_template(self):
'''
Returns true if subproject is using template for translation
'''
monolingual = self.file_format_cls.monolingual
return (
(monolingual or monolingual is None) and
len(self.template) > 0 and
not self.template.endswith('.pot')
)
def load_template_store(self):
'''
Loads translate-toolkit store for template.
'''
return self.file_format_cls.load(
self.get_template_filename(),
)
@property
def template_store(self):
'''
Gets translate-toolkit store for template.
'''
# Do we need template?
if not self.has_template():
return None
if self._template_store is None:
try:
self._template_store = self.load_template_store()
except Exception as exc:
self.handle_parse_error(exc)
return self._template_store
@property
def last_change(self):
'''
Returns date of last change done in Weblate.
'''
try:
change = Change.objects.content().filter(
translation__subproject=self
)
return change[0].timestamp
except IndexError:
return None
@property
def all_flags(self):
'''
Returns parsed list of flags.
'''
if self._all_flags is None:
self._all_flags = (
self.check_flags.split(',') +
list(self.file_format_cls.check_flags)
)
return self._all_flags
def can_add_new_language(self):
"""Wrapper to check if we can add new language."""
if self.new_lang != 'add':
return False
if not self.file_format_cls.supports_new_language():
return False
base_filename = self.get_new_base_filename()
if not self.file_format_cls.is_valid_base_for_new(base_filename):
return False
return True
def add_new_language(self, language, request):
'''
Creates new language file.
'''
if not self.can_add_new_language():
raise ValueError('Not supported operation!')
base_filename = self.get_new_base_filename()
filename = self.file_format_cls.get_language_filename(
self.filemask,
language.code
)
fullname = os.path.join(self.get_path(), filename)
self.file_format_cls.add_language(
fullname,
language.code,
base_filename
)
translation = Translation.objects.create(
subproject=self,
language=language,
filename=filename,
language_code=language.code,
commit_message='Created new translation.'
)
translation_post_add.send(
sender=self.__class__,
translation=translation
)
translation.git_commit(
request,
get_author_name(request.user),
timezone.now(),
force_commit=True,
force_new=True,
)
translation.check_sync(
force=True,
request=request
)
def do_lock(self, user):
"""Locks component."""
self.locked = True
self.save()
if self.translation_set.exists():
Change.objects.create(
subproject=self,
user=user,
action=Change.ACTION_LOCK,
)
def do_unlock(self, user):
"""Locks component."""
self.locked = False
self.save()
if self.translation_set.exists():
Change.objects.create(
subproject=self,
user=user,
action=Change.ACTION_UNLOCK,
)
def get_editable_template(self):
if not self.edit_template or not self.has_template():
return None
return self.translation_set.get(filename=self.template)
|
gpl-3.0
| -6,221,192,354,696,558,000 | 31.694797 | 79 | 0.555612 | false |
zseder/hunmisc
|
hunmisc/utils/plotting/matplotlib_simple_xy.py
|
1
|
1535
|
"""
Copyright 2011-13 Attila Zseder
Email: zseder@gmail.com
This file is part of hunmisc project
url: https://github.com/zseder/hunmisc
hunmisc is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""
import sys
import matplotlib.pyplot as plt
from matplotlib import rc
def read_data(istream):
r = [[],[],[],[],[]]
for l in istream:
le = l.strip().split()
[r[i].append(le[i]) for i in xrange(len(le))]
return r
def main():
d = read_data(open(sys.argv[1]))
rc('font', size=14)
ax = plt.subplot(111)
ax.plot(d[0], d[1], label="$M$", linewidth=2)
ax.plot(d[0], d[2], label="$l KL$", linewidth=2)
ax.plot(d[0], d[3], label="$l (H_q+KL)$", linewidth=2)
ax.plot(d[0], d[4], label="$M + l (H_q+KL)$", linewidth=2)
plt.xlabel("Bits")
ax.legend(loc=7)
plt.show()
#plt.savefig("fig.png")
if __name__ == "__main__":
main()
|
gpl-3.0
| 1,005,301,798,700,517,100 | 29.098039 | 78 | 0.673616 | false |
yugangw-msft/azure-cli
|
src/azure-cli/azure/cli/command_modules/backup/custom_common.py
|
2
|
10039
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import azure.cli.command_modules.backup.custom_help as custom_help
from azure.cli.command_modules.backup._client_factory import backup_protected_items_cf, \
protection_containers_cf, protected_items_cf, backup_protected_items_crr_cf, recovery_points_crr_cf
from azure.cli.core.util import CLIError
from azure.cli.core.azclierror import InvalidArgumentValueError
# pylint: disable=import-error
fabric_name = "Azure"
# pylint: disable=unused-argument
# Mapping of workload type
workload_type_map = {'MSSQL': 'SQLDataBase',
'SAPHANA': 'SAPHanaDatabase',
'SQLDataBase': 'SQLDataBase',
'SAPHanaDatabase': 'SAPHanaDatabase',
'VM': 'VM',
'AzureFileShare': 'AzureFileShare'}
def show_container(cmd, client, name, resource_group_name, vault_name, backup_management_type=None,
status="Registered", use_secondary_region=None):
container_type = custom_help.validate_and_extract_container_type(name, backup_management_type)
if use_secondary_region:
if container_type and container_type.lower() == "azurestorage":
raise InvalidArgumentValueError(
"""
--use-secondary-region flag is not supported for container of type AzureStorage.
Please either remove the flag or query for any other container type.
""")
if custom_help.is_native_name(name):
return protection_containers_cf(cmd.cli_ctx).get(vault_name, resource_group_name, fabric_name, name)
containers = _get_containers(client, container_type, status, resource_group_name, vault_name, name,
use_secondary_region)
return custom_help.get_none_one_or_many(containers)
def list_containers(client, resource_group_name, vault_name, backup_management_type, status="Registered",
use_secondary_region=None):
return _get_containers(client, backup_management_type, status, resource_group_name, vault_name,
use_secondary_region=use_secondary_region)
def show_policy(client, resource_group_name, vault_name, name):
return client.get(vault_name, resource_group_name, name)
def list_policies(client, resource_group_name, vault_name, workload_type=None, backup_management_type=None):
workload_type = _check_map(workload_type, workload_type_map)
filter_string = custom_help.get_filter_string({
'backupManagementType': backup_management_type,
'workloadType': workload_type})
policies = client.list(vault_name, resource_group_name, filter_string)
return custom_help.get_list_from_paged_response(policies)
def show_item(cmd, client, resource_group_name, vault_name, container_name, name, backup_management_type=None,
workload_type=None, use_secondary_region=None):
container_type = custom_help.validate_and_extract_container_type(container_name, backup_management_type)
if use_secondary_region:
if container_type and container_type.lower() == "azurestorage":
raise InvalidArgumentValueError(
"""
--use-secondary-region flag is not supported for container of type AzureStorage.
Please either remove the flag or query for any other container type.
""")
else:
if custom_help.is_native_name(name) and custom_help.is_native_name(container_name):
client = protected_items_cf(cmd.cli_ctx)
return client.get(vault_name, resource_group_name, fabric_name, container_name, name)
items = list_items(cmd, client, resource_group_name, vault_name, workload_type, container_name,
container_type, use_secondary_region)
if custom_help.is_native_name(name):
filtered_items = [item for item in items if item.name.lower() == name.lower()]
else:
filtered_items = [item for item in items if item.properties.friendly_name.lower() == name.lower()]
return custom_help.get_none_one_or_many(filtered_items)
def list_items(cmd, client, resource_group_name, vault_name, workload_type=None, container_name=None,
container_type=None, use_secondary_region=None):
workload_type = _check_map(workload_type, workload_type_map)
filter_string = custom_help.get_filter_string({
'backupManagementType': container_type,
'itemType': workload_type})
if use_secondary_region:
if container_type and container_type.lower() == "azurestorage":
raise InvalidArgumentValueError(
"""
--use-secondary-region flag is not supported for --backup-management-type AzureStorage.
Please either remove the flag or query for any other backup-management-type.
""")
client = backup_protected_items_crr_cf(cmd.cli_ctx)
items = client.list(vault_name, resource_group_name, filter_string)
paged_items = custom_help.get_list_from_paged_response(items)
if container_name:
if custom_help.is_native_name(container_name):
return [item for item in paged_items if
_is_container_name_match(item, container_name)]
return [item for item in paged_items if
item.properties.container_name.lower().split(';')[-1] == container_name.lower()]
return paged_items
def show_recovery_point(cmd, client, resource_group_name, vault_name, container_name, item_name, name,
workload_type=None, backup_management_type=None, use_secondary_region=None):
items_client = backup_protected_items_cf(cmd.cli_ctx)
item = show_item(cmd, items_client, resource_group_name, vault_name, container_name, item_name,
backup_management_type, workload_type, use_secondary_region)
custom_help.validate_item(item)
if isinstance(item, list):
raise CLIError("Multiple items found. Please give native names instead.")
# Get container and item URIs
container_uri = custom_help.get_protection_container_uri_from_id(item.id)
item_uri = custom_help.get_protected_item_uri_from_id(item.id)
container_type = custom_help.validate_and_extract_container_type(container_name, backup_management_type)
if use_secondary_region:
if container_type and container_type.lower() == "azurestorage":
raise InvalidArgumentValueError(
"""
--use-secondary-region flag is not supported for --backup-management-type AzureStorage.
Please either remove the flag or query for any other backup-management-type.
""")
client = recovery_points_crr_cf(cmd.cli_ctx)
recovery_points = client.list(vault_name, resource_group_name, fabric_name, container_uri, item_uri, None)
paged_rps = custom_help.get_list_from_paged_response(recovery_points)
filtered_rps = [rp for rp in paged_rps if rp.name.lower() == name.lower()]
return custom_help.get_none_one_or_many(filtered_rps)
return client.get(vault_name, resource_group_name, fabric_name, container_uri, item_uri, name)
def delete_policy(client, resource_group_name, vault_name, name):
client.delete(vault_name, resource_group_name, name)
def new_policy(client, resource_group_name, vault_name, policy, policy_name, container_type, workload_type):
workload_type = _check_map(workload_type, workload_type_map)
policy_object = custom_help.get_policy_from_json(client, policy)
policy_object.properties.backup_management_type = container_type
policy_object.properties.workload_type = workload_type
return client.create_or_update(vault_name, resource_group_name, policy_name, policy_object)
def _get_containers(client, backup_management_type, status, resource_group_name, vault_name, container_name=None,
use_secondary_region=None):
filter_dict = {
'backupManagementType': backup_management_type,
'status': status
}
if container_name and not custom_help.is_native_name(container_name):
filter_dict['friendlyName'] = container_name
filter_string = custom_help.get_filter_string(filter_dict)
if use_secondary_region:
if backup_management_type.lower() == "azurestorage":
raise InvalidArgumentValueError(
"""
--use-secondary-region flag is not supported for --backup-management-type AzureStorage.
Please either remove the flag or query for any other backup-management-type.
""")
paged_containers = client.list(vault_name, resource_group_name, filter_string)
containers = custom_help.get_list_from_paged_response(paged_containers)
if container_name and custom_help.is_native_name(container_name):
return [container for container in containers if container.name == container_name]
return containers
def _is_container_name_match(item, container_name):
if item.properties.container_name.lower() == container_name.lower():
return True
name = ';'.join(container_name.split(';')[1:])
if item.properties.container_name.lower() == name.lower():
return True
return False
def _check_map(item_type, item_type_map):
if item_type is None:
return None
if item_type_map.get(item_type) is not None:
return item_type_map[item_type]
error_text = "{} is an invalid argument.".format(item_type)
recommendation_text = "{} are the allowed values.".format(str(list(item_type_map.keys())))
az_error = InvalidArgumentValueError(error_text)
az_error.set_recommendation(recommendation_text)
raise az_error
|
mit
| -6,428,039,126,008,457,000 | 46.804762 | 114 | 0.665604 | false |
revarbat/snappy-reprap
|
stl_normalize.py
|
1
|
15920
|
#!/usr/bin/env python
import os
import os.path
import sys
import math
import time
import struct
import argparse
import platform
import itertools
import subprocess
guiscad_template = """\
module showlines(clr, lines) {{
for (line = lines) {{
delta = line[1]-line[0];
dist = norm(delta);
theta = atan2(delta[1],delta[0]);
phi = atan2(delta[2],norm([delta[0],delta[1]]));
translate(line[0]) {{
rotate([0, 90-phi, theta]) {{
color(clr) cylinder(d=0.5, h=dist);
}}
}}
}}
}}
module showfaces(clr, faces) {{
color(clr) {{
for (face = faces) {{
polyhedron(points=face, faces=[[0, 1, 2], [0, 2, 1]], convexity=2);
}}
}}
}}
showlines([1.0, 0.0, 1.0], [
{dupe_edges}
]);
showlines([1.0, 0.0, 0.0], [
{hole_edges}
]);
showfaces([1.0, 0.0, 1.0], [
{dupe_faces}
]);
color([0.0, 1.0, 0.0, 0.2]) import("{filename}", convexity=100);
"""
def dot(a, b):
return sum(p*q for p, q in zip(a, b))
def cross(a, b):
return [
a[1]*b[2] - a[2]*b[1],
a[2]*b[0] - a[0]*b[2],
a[0]*b[1] - a[1]*b[0]
]
def vsub(a, b):
return [i - j for i, j in zip(a, b)]
def vsdiv(v, s):
return [x / s for x in v]
def dist(v):
return math.sqrt(sum([x*x for x in v]))
def normalize(v):
return vsdiv(v, dist(v))
def is_clockwise(a, b, c, n):
return dot(n, cross(vsub(b, a), vsub(c, a))) < 0
def point_cmp(p1, p2):
for i in [2, 1, 0]:
val = cmp(p1[i], p2[i])
if val != 0:
return val
return 0
def facet_cmp(f1, f2):
cl1 = [sorted([p[i] for p in f1]) for i in range(3)]
cl2 = [sorted([p[i] for p in f2]) for i in range(3)]
for i in [2, 1, 0]:
for c1, c2 in itertools.izip_longest(cl1[i], cl2[i]):
if c1 is None:
return -1
val = cmp(c1, c2)
if val != 0:
return val
return 0
def float_fmt(val):
s = "%.3f" % val
while len(s) > 1 and s[-1:] in '0.':
if s[-1:] == '.':
s = s[:-1]
break
s = s[:-1]
if (s == '-0'):
s = '0'
return s
def vertex_fmt(vals):
return " ".join([float_fmt(v) for v in vals])
def vertex_fmt2(vals):
return "[" + (", ".join([float_fmt(v) for v in vals])) + "]"
class PointCloud(object):
points = []
pointhash = {}
def __init__(self):
self.points = []
self.pointhash = {}
self.minx = 9e9
self.miny = 9e9
self.minz = 9e9
self.maxx = -9e9
self.maxy = -9e9
self.maxz = -9e9
def update_volume(self, x, y, z):
if x < self.minx:
self.minx = x
if x > self.maxx:
self.maxx = x
if y < self.miny:
self.miny = y
if y > self.maxy:
self.maxy = y
if z < self.minz:
self.minz = z
if z > self.maxz:
self.maxz = z
def add_or_get_point(self, x, y, z):
pt = (
round(x, 4),
round(y, 4),
round(z, 4),
)
key = "%.3f %.3f %.3f" % pt
if key in self.pointhash:
return self.pointhash[key]
idx = len(self.points)
self.pointhash[key] = idx
self.points.append(pt)
self.update_volume(x, y, z)
return idx
def point_coords(self, idx):
return self.points[idx]
def facet_coords(self, facet):
return (
self.point_coords(facet[0]),
self.point_coords(facet[1]),
self.point_coords(facet[2]),
)
class StlEndOfFileException(Exception):
pass
class StlMalformedLineException(Exception):
pass
class StlData(object):
def __init__(self):
self.points = PointCloud()
self.facets = []
self.edgehash = {}
self.facehash = {}
self.filename = ""
def _mark_edge(self, vertex1, vertex2):
edge = [vertex1, vertex2]
edge.sort()
edge = tuple(edge)
if edge not in self.edgehash:
self.edgehash[edge] = 0
self.edgehash[edge] += 1
return self.edgehash[edge]
def _mark_face(self, vertex1, vertex2, vertex3):
self._mark_edge(vertex1, vertex2)
self._mark_edge(vertex2, vertex3)
self._mark_edge(vertex3, vertex1)
face = [vertex1, vertex2, vertex3]
face.sort()
face = tuple(face)
if face not in self.facehash:
self.facehash[face] = 0
self.facehash[face] += 1
return self.facehash[face]
def _read_ascii_line(self, f, watchwords=None):
line = f.readline(1024)
if line == "":
raise StlEndOfFileException()
words = line.strip(' \t\n\r').lower().split()
if words[0] == 'endsolid':
raise StlEndOfFileException()
argstart = 0
if watchwords:
watchwords = watchwords.lower().split()
argstart = len(watchwords)
for i in xrange(argstart):
if words[i] != watchwords[i]:
raise StlMalformedLineException()
return [float(val) for val in words[argstart:]]
def _read_ascii_vertex(self, f):
point = self._read_ascii_line(f, watchwords='vertex')
return self.points.add_or_get_point(*point)
def _read_ascii_facet(self, f):
while True:
try:
normal = self._read_ascii_line(f, watchwords='facet normal')
self._read_ascii_line(f, watchwords='outer loop')
vertex1 = self._read_ascii_vertex(f)
vertex2 = self._read_ascii_vertex(f)
vertex3 = self._read_ascii_vertex(f)
self._read_ascii_line(f, watchwords='endloop')
self._read_ascii_line(f, watchwords='endfacet')
if vertex1 == vertex2:
continue # zero area facet. Skip to next facet.
if vertex2 == vertex3:
continue # zero area facet. Skip to next facet.
if vertex3 == vertex1:
continue # zero area facet. Skip to next facet.
except StlEndOfFileException:
return None
except StlMalformedLineException:
continue # Skip to next facet.
return (vertex1, vertex2, vertex3, normal)
def _read_binary_facet(self, f):
data = struct.unpack('<3f 3f 3f 3f H', f.read(4*4*3+2))
normal = data[0:3]
vertex1 = data[3:6]
vertex2 = data[6:9]
vertex3 = data[9:12]
v1 = self.points.add_or_get_point(*vertex1)
v2 = self.points.add_or_get_point(*vertex2)
v3 = self.points.add_or_get_point(*vertex3)
return (v1, v2, v3, normal)
def sort_facet(self, facet):
v1, v2, v3, norm = facet
p1 = self.points.point_coords(v1)
p2 = self.points.point_coords(v2)
p3 = self.points.point_coords(v3)
if dist(norm) > 0:
# Make sure vertex ordering is counter-clockwise,
# relative to the outward facing normal.
if is_clockwise(p1, p2, p3, norm):
v1, v3, v2 = (v1, v2, v3)
p1, p3, p2 = (p1, p2, p3)
else:
# If no normal was specified, we should calculate it, relative
# to the counter-clockwise vertices (as seen from outside).
norm = cross(vsub(p3, p1), vsub(p2, p1))
if dist(norm) > 1e-6:
norm = normalize(norm)
cmp23 = point_cmp(p2, p3)
if point_cmp(p1, p2) > 0 and cmp23 < 0:
return (v2, v3, v1, norm)
if point_cmp(p1, p3) > 0 and cmp23 > 0:
return (v3, v1, v2, norm)
return (v1, v2, v3, norm)
def read_file(self, filename):
self.filename = filename
with open(filename, 'rb') as f:
line = f.readline(80)
if line == "":
return # End of file.
if line[0:6].lower() == "solid ":
while True:
facet = self._read_ascii_facet(f)
if facet is None:
break
facet = self.sort_facet(facet)
vertex1, vertex2, vertex3, normal = facet
self.facets.append(facet)
self._mark_face(vertex1, vertex2, vertex3)
else:
chunk = f.read(4)
facets = struct.unpack('<I', chunk)[0]
while facets > 0:
facets -= 1
facet = self._read_binary_facet(f)
if facet is None:
break
facet = self.sort_facet(facet)
vertex1, vertex2, vertex3, normal = facet
self.facets.append(facet)
self._mark_face(vertex1, vertex2, vertex3)
def write_file(self, filename, binary=False):
if binary:
self._write_binary_file(filename)
else:
self._write_ascii_file(filename)
def _write_ascii_file(self, filename):
with open(filename, 'wb') as f:
f.write("solid Model\n")
for facet in self.facets:
v1, v2, v3, norm = facet
v1 = self.points.point_coords(v1)
v2 = self.points.point_coords(v2)
v3 = self.points.point_coords(v3)
f.write(" facet normal %s\n" % vertex_fmt(norm))
f.write(" outer loop\n")
f.write(" vertex %s\n" % vertex_fmt(v1))
f.write(" vertex %s\n" % vertex_fmt(v2))
f.write(" vertex %s\n" % vertex_fmt(v3))
f.write(" endloop\n")
f.write(" endfacet\n")
f.write("endsolid Model\n")
def _write_binary_file(self, filename):
with open(filename, 'wb') as f:
f.write('%-80s' % 'Binary STL Model')
f.write(struct.pack('<I', len(self.facets)))
for facet in self.facets:
v1, v2, v3, norm = facet
v1 = self.points.point_coords(v1)
v2 = self.points.point_coords(v2)
v3 = self.points.point_coords(v3)
f.write(struct.pack('<3f', *norm))
f.write(struct.pack('<3f', *v1))
f.write(struct.pack('<3f', *v2))
f.write(struct.pack('<3f', *v3))
f.write(struct.pack('<H', 0))
def _gui_display_manifold(self, hole_edges, dupe_edges, dupe_faces):
global guiscad_template
modulename = os.path.basename(self.filename)
if modulename.endswith('.stl'):
modulename = modulename[:-4]
tmpfile = "mani-{0}.scad".format(modulename)
with open(tmpfile, 'w') as f:
f.write(guiscad_template.format(
hole_edges=hole_edges,
dupe_edges=dupe_edges,
dupe_faces=dupe_faces,
modulename=modulename,
filename=self.filename,
))
if platform.system() == 'Darwin':
subprocess.call(['open', tmpfile])
time.sleep(5)
else:
subprocess.call(['openscad', tmpfile])
time.sleep(5)
os.remove(tmpfile)
def _check_manifold_duplicate_faces(self):
found = []
for face, count in self.facehash.iteritems():
if count != 1:
v1 = vertex_fmt2(self.points.point_coords(face[0]))
v2 = vertex_fmt2(self.points.point_coords(face[1]))
v3 = vertex_fmt2(self.points.point_coords(face[2]))
found.append((v1, v2, v3))
return found
def _check_manifold_hole_edges(self):
found = []
for edge, count in self.edgehash.iteritems():
if count == 1:
v1 = vertex_fmt2(self.points.point_coords(edge[0]))
v2 = vertex_fmt2(self.points.point_coords(edge[1]))
found.append((v1, v2))
return found
def _check_manifold_excess_edges(self):
found = []
for edge, count in self.edgehash.iteritems():
if count > 2:
v1 = vertex_fmt2(self.points.point_coords(edge[0]))
v2 = vertex_fmt2(self.points.point_coords(edge[1]))
found.append((v1, v2))
return found
def check_manifold(self, verbose=False, gui=False):
is_manifold = True
faces = self._check_manifold_duplicate_faces()
for v1, v2, v3 in faces:
is_manifold = False
print("NON-MANIFOLD DUPLICATE FACE! {3}: {0} - {1} - {2}"
.format(v1, v2, v3, self.filename))
if gui:
dupe_faces = ",\n".join(
[" [{0}, {1}, {2}]".format(*coords) for coords in faces]
)
edges = self._check_manifold_hole_edges()
for v1, v2 in edges:
is_manifold = False
print("NON-MANIFOLD HOLE EDGE! {2}: {0} - {1}"
.format(v1, v2, self.filename))
if gui:
hole_edges = ",\n".join(
[" [{0}, {1}]".format(*coords) for coords in edges]
)
edges = self._check_manifold_excess_edges()
for v1, v2 in edges:
is_manifold = False
print("NON-MANIFOLD DUPLICATE EDGE! {2}: {0} - {1}"
.format(v1, v2, self.filename))
if gui:
dupe_edges = ",\n".join(
[" [{0}, {1}]".format(*coords) for coords in edges]
)
if is_manifold:
if gui or verbose:
print("%s is manifold." % self.filename)
elif gui:
self._gui_display_manifold(hole_edges, dupe_edges, dupe_faces)
return is_manifold
def sort_facets(self):
self.facets = sorted(
self.facets,
cmp=lambda x, y: facet_cmp(
self.points.facet_coords(x),
self.points.facet_coords(y)
)
)
def main():
parser = argparse.ArgumentParser(prog='stl_normalize')
parser.add_argument('-v', '--verbose',
help='Show verbose output.',
action="store_true")
parser.add_argument('-c', '--check-manifold',
help='Perform manifold validation of model.',
action="store_true")
parser.add_argument('-g', '--gui-display',
help='Show non-manifold edges in GUI.',
action="store_true")
parser.add_argument('-b', '--write-binary',
help='Use binary STL format for output.',
action="store_true")
parser.add_argument('-o', '--outfile',
help='Write normalized STL to file.')
parser.add_argument('infile', help='Input STL filename.')
args = parser.parse_args()
stl = StlData()
stl.read_file(args.infile)
if args.verbose:
print("Read {0} ({1:.1f} x {2:.1f} x {3:.1f})".format(
args.infile,
(stl.points.maxx-stl.points.minx),
(stl.points.maxy-stl.points.miny),
(stl.points.maxz-stl.points.minz),
))
if args.check_manifold or args.gui_display:
if not stl.check_manifold(verbose=args.verbose, gui=args.gui_display):
sys.exit(-1)
if args.outfile:
stl.sort_facets()
stl.write_file(args.outfile, binary=args.write_binary)
if args.verbose:
print("Wrote {0} ({1})".format(
args.outfile,
("binary" if args.write_binary else "ASCII"),
))
sys.exit(0)
if __name__ == "__main__":
main()
# vim: expandtab tabstop=4 shiftwidth=4 softtabstop=4 nowrap
|
gpl-2.0
| 5,155,096,900,616,418,000 | 30.277014 | 79 | 0.501382 | false |
olivierdalang/stdm
|
ui/ui_survey.py
|
1
|
4260
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_survey.ui'
#
# Created: Wed Apr 29 17:42:49 2015
# by: PyQt4 UI code generator 4.11.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_frmSurvey(object):
def setupUi(self, frmSurvey):
frmSurvey.setObjectName(_fromUtf8("frmSurvey"))
frmSurvey.resize(424, 364)
self.gridLayout = QtGui.QGridLayout(frmSurvey)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.label_2 = QtGui.QLabel(frmSurvey)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.txtSurveyCode = QtGui.QLineEdit(frmSurvey)
self.txtSurveyCode.setMinimumSize(QtCore.QSize(0, 30))
self.txtSurveyCode.setReadOnly(True)
self.txtSurveyCode.setObjectName(_fromUtf8("txtSurveyCode"))
self.gridLayout.addWidget(self.txtSurveyCode, 1, 1, 1, 1)
self.label = QtGui.QLabel(frmSurvey)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 2, 0, 1, 1)
self.dtEnumDate = QtGui.QDateEdit(frmSurvey)
self.dtEnumDate.setMinimumSize(QtCore.QSize(0, 30))
self.dtEnumDate.setCalendarPopup(True)
self.dtEnumDate.setObjectName(_fromUtf8("dtEnumDate"))
self.gridLayout.addWidget(self.dtEnumDate, 2, 1, 1, 1)
self.tabWidget = QtGui.QTabWidget(frmSurvey)
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.tab = ForeignKeyMapper()
self.tab.setObjectName(_fromUtf8("tab"))
self.tabWidget.addTab(self.tab, _fromUtf8(""))
self.tab_2 = ForeignKeyMapper()
self.tab_2.setObjectName(_fromUtf8("tab_2"))
self.tabWidget.addTab(self.tab_2, _fromUtf8(""))
self.tab_3 = ForeignKeyMapper()
self.tab_3.setObjectName(_fromUtf8("tab_3"))
self.tabWidget.addTab(self.tab_3, _fromUtf8(""))
self.tab_4 = ForeignKeyMapper()
self.tab_4.setObjectName(_fromUtf8("tab_4"))
self.tabWidget.addTab(self.tab_4, _fromUtf8(""))
self.gridLayout.addWidget(self.tabWidget, 3, 0, 1, 2)
self.buttonBox = QtGui.QDialogButtonBox(frmSurvey)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Save)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.gridLayout.addWidget(self.buttonBox, 4, 0, 1, 2)
self.vlNotification = QtGui.QVBoxLayout()
self.vlNotification.setObjectName(_fromUtf8("vlNotification"))
self.gridLayout.addLayout(self.vlNotification, 0, 0, 1, 2)
self.retranslateUi(frmSurvey)
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(frmSurvey)
def retranslateUi(self, frmSurvey):
frmSurvey.setWindowTitle(_translate("frmSurvey", "Survey Details", None))
self.label_2.setText(_translate("frmSurvey", "Survey Code", None))
self.label.setText(_translate("frmSurvey", "Enumeration Date", None))
self.dtEnumDate.setDisplayFormat(_translate("frmSurvey", "dd/MM/yyyy", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("frmSurvey", "Enumerator", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("frmSurvey", "Respondent", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_3), _translate("frmSurvey", "Witnesses", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_4), _translate("frmSurvey", "Priority Services", None))
from .foreign_key_mapper import ForeignKeyMapper
|
gpl-2.0
| -4,788,625,040,332,973,000 | 48.117647 | 121 | 0.676995 | false |
qedsoftware/commcare-hq
|
corehq/apps/users/tests/test_get_owner_ids.py
|
1
|
1346
|
from django.test import TestCase
from corehq.apps.domain.models import Domain
from corehq.apps.groups.models import Group
from corehq.apps.users.models import CommCareUser
class OwnerIDTestCase(TestCase):
@staticmethod
def _mock_user(id):
class FakeUser(CommCareUser):
@property
def project(self):
return Domain()
user = FakeUser(_id=id, domain='test-domain')
return user
def test_get_owner_id_no_groups(self):
user = self._mock_user('test-user-1')
ids = user.get_owner_ids()
self.assertEqual(1, len(ids))
self.assertEqual(user._id, ids[0])
def test_case_sharing_groups_included(self):
user = self._mock_user('test-user-2')
group = Group(domain='test-domain', users=['test-user-2'], case_sharing=True)
group.save()
ids = user.get_owner_ids()
self.assertEqual(2, len(ids))
self.assertEqual(user._id, ids[0])
self.assertEqual(group._id, ids[1])
def test_non_case_sharing_groups_not_included(self):
user = self._mock_user('test-user-3')
group = Group(domain='test-domain', users=['test-user-3'], case_sharing=False)
group.save()
ids = user.get_owner_ids()
self.assertEqual(1, len(ids))
self.assertEqual(user._id, ids[0])
|
bsd-3-clause
| 808,569,801,345,940,600 | 31.829268 | 86 | 0.618128 | false |
sffjunkie/home-assistant
|
homeassistant/components/media_player/__init__.py
|
1
|
19925
|
"""
Component to interface with various media players.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/media_player/
"""
import logging
import os
import voluptuous as vol
from homeassistant.components import discovery
from homeassistant.config import load_yaml_config_file
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
import homeassistant.helpers.config_validation as cv
from homeassistant.const import (
STATE_OFF, STATE_UNKNOWN, STATE_PLAYING, STATE_IDLE,
ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON,
SERVICE_VOLUME_UP, SERVICE_VOLUME_DOWN, SERVICE_VOLUME_SET,
SERVICE_VOLUME_MUTE, SERVICE_TOGGLE, SERVICE_MEDIA_STOP,
SERVICE_MEDIA_PLAY_PAUSE, SERVICE_MEDIA_PLAY, SERVICE_MEDIA_PAUSE,
SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PREVIOUS_TRACK, SERVICE_MEDIA_SEEK)
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'media_player'
SCAN_INTERVAL = 10
ENTITY_ID_FORMAT = DOMAIN + '.{}'
DISCOVERY_PLATFORMS = {
discovery.SERVICE_CAST: 'cast',
discovery.SERVICE_SONOS: 'sonos',
discovery.SERVICE_PLEX: 'plex',
discovery.SERVICE_SQUEEZEBOX: 'squeezebox',
discovery.SERVICE_PANASONIC_VIERA: 'panasonic_viera',
discovery.SERVICE_ROKU: 'roku',
}
SERVICE_PLAY_MEDIA = 'play_media'
SERVICE_SELECT_SOURCE = 'select_source'
ATTR_MEDIA_VOLUME_LEVEL = 'volume_level'
ATTR_MEDIA_VOLUME_MUTED = 'is_volume_muted'
ATTR_MEDIA_SEEK_POSITION = 'seek_position'
ATTR_MEDIA_CONTENT_ID = 'media_content_id'
ATTR_MEDIA_CONTENT_TYPE = 'media_content_type'
ATTR_MEDIA_DURATION = 'media_duration'
ATTR_MEDIA_TITLE = 'media_title'
ATTR_MEDIA_ARTIST = 'media_artist'
ATTR_MEDIA_ALBUM_NAME = 'media_album_name'
ATTR_MEDIA_ALBUM_ARTIST = 'media_album_artist'
ATTR_MEDIA_TRACK = 'media_track'
ATTR_MEDIA_SERIES_TITLE = 'media_series_title'
ATTR_MEDIA_SEASON = 'media_season'
ATTR_MEDIA_EPISODE = 'media_episode'
ATTR_MEDIA_CHANNEL = 'media_channel'
ATTR_MEDIA_PLAYLIST = 'media_playlist'
ATTR_APP_ID = 'app_id'
ATTR_APP_NAME = 'app_name'
ATTR_SUPPORTED_MEDIA_COMMANDS = 'supported_media_commands'
ATTR_INPUT_SOURCE = 'source'
ATTR_INPUT_SOURCE_LIST = 'source_list'
ATTR_MEDIA_ENQUEUE = 'enqueue'
MEDIA_TYPE_MUSIC = 'music'
MEDIA_TYPE_TVSHOW = 'tvshow'
MEDIA_TYPE_VIDEO = 'movie'
MEDIA_TYPE_EPISODE = 'episode'
MEDIA_TYPE_CHANNEL = 'channel'
MEDIA_TYPE_PLAYLIST = 'playlist'
SUPPORT_PAUSE = 1
SUPPORT_SEEK = 2
SUPPORT_VOLUME_SET = 4
SUPPORT_VOLUME_MUTE = 8
SUPPORT_PREVIOUS_TRACK = 16
SUPPORT_NEXT_TRACK = 32
SUPPORT_TURN_ON = 128
SUPPORT_TURN_OFF = 256
SUPPORT_PLAY_MEDIA = 512
SUPPORT_VOLUME_STEP = 1024
SUPPORT_SELECT_SOURCE = 2048
SUPPORT_STOP = 4096
# simple services that only take entity_id(s) as optional argument
SERVICE_TO_METHOD = {
SERVICE_TURN_ON: 'turn_on',
SERVICE_TURN_OFF: 'turn_off',
SERVICE_TOGGLE: 'toggle',
SERVICE_VOLUME_UP: 'volume_up',
SERVICE_VOLUME_DOWN: 'volume_down',
SERVICE_MEDIA_PLAY_PAUSE: 'media_play_pause',
SERVICE_MEDIA_PLAY: 'media_play',
SERVICE_MEDIA_PAUSE: 'media_pause',
SERVICE_MEDIA_STOP: 'media_stop',
SERVICE_MEDIA_NEXT_TRACK: 'media_next_track',
SERVICE_MEDIA_PREVIOUS_TRACK: 'media_previous_track',
SERVICE_SELECT_SOURCE: 'select_source'
}
ATTR_TO_PROPERTY = [
ATTR_MEDIA_VOLUME_LEVEL,
ATTR_MEDIA_VOLUME_MUTED,
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
ATTR_MEDIA_DURATION,
ATTR_MEDIA_TITLE,
ATTR_MEDIA_ARTIST,
ATTR_MEDIA_ALBUM_NAME,
ATTR_MEDIA_ALBUM_ARTIST,
ATTR_MEDIA_TRACK,
ATTR_MEDIA_SERIES_TITLE,
ATTR_MEDIA_SEASON,
ATTR_MEDIA_EPISODE,
ATTR_MEDIA_CHANNEL,
ATTR_MEDIA_PLAYLIST,
ATTR_APP_ID,
ATTR_APP_NAME,
ATTR_SUPPORTED_MEDIA_COMMANDS,
ATTR_INPUT_SOURCE,
ATTR_INPUT_SOURCE_LIST,
]
# Service call validation schemas
MEDIA_PLAYER_SCHEMA = vol.Schema({
ATTR_ENTITY_ID: cv.entity_ids,
})
MEDIA_PLAYER_MUTE_VOLUME_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
vol.Required(ATTR_MEDIA_VOLUME_MUTED): cv.boolean,
})
MEDIA_PLAYER_SET_VOLUME_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
vol.Required(ATTR_MEDIA_VOLUME_LEVEL): cv.small_float,
})
MEDIA_PLAYER_MEDIA_SEEK_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
vol.Required(ATTR_MEDIA_SEEK_POSITION):
vol.All(vol.Coerce(float), vol.Range(min=0)),
})
MEDIA_PLAYER_PLAY_MEDIA_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
vol.Required(ATTR_MEDIA_CONTENT_TYPE): cv.string,
vol.Required(ATTR_MEDIA_CONTENT_ID): cv.string,
ATTR_MEDIA_ENQUEUE: cv.boolean,
})
MEDIA_PLAYER_SELECT_SOURCE_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
vol.Required(ATTR_INPUT_SOURCE): cv.string,
})
def is_on(hass, entity_id=None):
"""
Return true if specified media player entity_id is on.
Check all media player if no entity_id specified.
"""
entity_ids = [entity_id] if entity_id else hass.states.entity_ids(DOMAIN)
return any(not hass.states.is_state(entity_id, STATE_OFF)
for entity_id in entity_ids)
def turn_on(hass, entity_id=None):
"""Turn on specified media player or all."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_TURN_ON, data)
def turn_off(hass, entity_id=None):
"""Turn off specified media player or all."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_TURN_OFF, data)
def toggle(hass, entity_id=None):
"""Toggle specified media player or all."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_TOGGLE, data)
def volume_up(hass, entity_id=None):
"""Send the media player the command for volume up."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_VOLUME_UP, data)
def volume_down(hass, entity_id=None):
"""Send the media player the command for volume down."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_VOLUME_DOWN, data)
def mute_volume(hass, mute, entity_id=None):
"""Send the media player the command for muting the volume."""
data = {ATTR_MEDIA_VOLUME_MUTED: mute}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_VOLUME_MUTE, data)
def set_volume_level(hass, volume, entity_id=None):
"""Send the media player the command for setting the volume."""
data = {ATTR_MEDIA_VOLUME_LEVEL: volume}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_VOLUME_SET, data)
def media_play_pause(hass, entity_id=None):
"""Send the media player the command for play/pause."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_MEDIA_PLAY_PAUSE, data)
def media_play(hass, entity_id=None):
"""Send the media player the command for play/pause."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_MEDIA_PLAY, data)
def media_pause(hass, entity_id=None):
"""Send the media player the command for pause."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_MEDIA_PAUSE, data)
def media_stop(hass, entity_id=None):
"""Send the media player the stop command."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_MEDIA_STOP, data)
def media_next_track(hass, entity_id=None):
"""Send the media player the command for next track."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_MEDIA_NEXT_TRACK, data)
def media_previous_track(hass, entity_id=None):
"""Send the media player the command for prev track."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.services.call(DOMAIN, SERVICE_MEDIA_PREVIOUS_TRACK, data)
def media_seek(hass, position, entity_id=None):
"""Send the media player the command to seek in current playing media."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
data[ATTR_MEDIA_SEEK_POSITION] = position
hass.services.call(DOMAIN, SERVICE_MEDIA_SEEK, data)
def play_media(hass, media_type, media_id, entity_id=None, enqueue=None):
"""Send the media player the command for playing media."""
data = {ATTR_MEDIA_CONTENT_TYPE: media_type,
ATTR_MEDIA_CONTENT_ID: media_id}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
if enqueue:
data[ATTR_MEDIA_ENQUEUE] = enqueue
hass.services.call(DOMAIN, SERVICE_PLAY_MEDIA, data)
def select_source(hass, source, entity_id=None):
"""Send the media player the command to select input source."""
data = {ATTR_INPUT_SOURCE: source}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SELECT_SOURCE, data)
def setup(hass, config):
"""Track states and offer events for media_players."""
component = EntityComponent(
logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL,
DISCOVERY_PLATFORMS)
component.setup(config)
descriptions = load_yaml_config_file(
os.path.join(os.path.dirname(__file__), 'services.yaml'))
def media_player_service_handler(service):
"""Map services to methods on MediaPlayerDevice."""
method = SERVICE_TO_METHOD[service.service]
for player in component.extract_from_service(service):
getattr(player, method)()
if player.should_poll:
player.update_ha_state(True)
for service in SERVICE_TO_METHOD:
hass.services.register(DOMAIN, service, media_player_service_handler,
descriptions.get(service),
schema=MEDIA_PLAYER_SCHEMA)
def volume_set_service(service):
"""Set specified volume on the media player."""
volume = service.data.get(ATTR_MEDIA_VOLUME_LEVEL)
for player in component.extract_from_service(service):
player.set_volume_level(volume)
if player.should_poll:
player.update_ha_state(True)
hass.services.register(DOMAIN, SERVICE_VOLUME_SET, volume_set_service,
descriptions.get(SERVICE_VOLUME_SET),
schema=MEDIA_PLAYER_SET_VOLUME_SCHEMA)
def volume_mute_service(service):
"""Mute (true) or unmute (false) the media player."""
mute = service.data.get(ATTR_MEDIA_VOLUME_MUTED)
for player in component.extract_from_service(service):
player.mute_volume(mute)
if player.should_poll:
player.update_ha_state(True)
hass.services.register(DOMAIN, SERVICE_VOLUME_MUTE, volume_mute_service,
descriptions.get(SERVICE_VOLUME_MUTE),
schema=MEDIA_PLAYER_MUTE_VOLUME_SCHEMA)
def media_seek_service(service):
"""Seek to a position."""
position = service.data.get(ATTR_MEDIA_SEEK_POSITION)
for player in component.extract_from_service(service):
player.media_seek(position)
if player.should_poll:
player.update_ha_state(True)
hass.services.register(DOMAIN, SERVICE_MEDIA_SEEK, media_seek_service,
descriptions.get(SERVICE_MEDIA_SEEK),
schema=MEDIA_PLAYER_MEDIA_SEEK_SCHEMA)
def select_source_service(service):
"""Change input to selected source."""
input_source = service.data.get(ATTR_INPUT_SOURCE)
for player in component.extract_from_service(service):
player.select_source(input_source)
if player.should_poll:
player.update_ha_state(True)
hass.services.register(DOMAIN, SERVICE_SELECT_SOURCE,
select_source_service,
descriptions.get(SERVICE_SELECT_SOURCE),
schema=MEDIA_PLAYER_SELECT_SOURCE_SCHEMA)
def play_media_service(service):
"""Play specified media_id on the media player."""
media_type = service.data.get(ATTR_MEDIA_CONTENT_TYPE)
media_id = service.data.get(ATTR_MEDIA_CONTENT_ID)
enqueue = service.data.get(ATTR_MEDIA_ENQUEUE)
kwargs = {
ATTR_MEDIA_ENQUEUE: enqueue,
}
for player in component.extract_from_service(service):
player.play_media(media_type, media_id, **kwargs)
if player.should_poll:
player.update_ha_state(True)
hass.services.register(DOMAIN, SERVICE_PLAY_MEDIA, play_media_service,
descriptions.get(SERVICE_PLAY_MEDIA),
schema=MEDIA_PLAYER_PLAY_MEDIA_SCHEMA)
return True
class MediaPlayerDevice(Entity):
"""ABC for media player devices."""
# pylint: disable=too-many-public-methods,no-self-use
# Implement these for your media player
@property
def state(self):
"""State of the player."""
return STATE_UNKNOWN
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return None
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return None
@property
def media_content_id(self):
"""Content ID of current playing media."""
return None
@property
def media_content_type(self):
"""Content type of current playing media."""
return None
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return None
@property
def media_image_url(self):
"""Image url of current playing media."""
return None
@property
def media_title(self):
"""Title of current playing media."""
return None
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
return None
@property
def media_album_name(self):
"""Album name of current playing media, music track only."""
return None
@property
def media_album_artist(self):
"""Album artist of current playing media, music track only."""
return None
@property
def media_track(self):
"""Track number of current playing media, music track only."""
return None
@property
def media_series_title(self):
"""Title of series of current playing media, TV show only."""
return None
@property
def media_season(self):
"""Season of current playing media, TV show only."""
return None
@property
def media_episode(self):
"""Episode of current playing media, TV show only."""
return None
@property
def media_channel(self):
"""Channel currently playing."""
return None
@property
def media_playlist(self):
"""Title of Playlist currently playing."""
return None
@property
def app_id(self):
"""ID of the current running app."""
return None
@property
def app_name(self):
"""Name of the current running app."""
return None
@property
def source(self):
"""Name of the current input source."""
return None
@property
def source_list(self):
"""List of available input sources."""
return None
@property
def supported_media_commands(self):
"""Flag media commands that are supported."""
return 0
def turn_on(self):
"""Turn the media player on."""
raise NotImplementedError()
def turn_off(self):
"""Turn the media player off."""
raise NotImplementedError()
def mute_volume(self, mute):
"""Mute the volume."""
raise NotImplementedError()
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
raise NotImplementedError()
def media_play(self):
"""Send play commmand."""
raise NotImplementedError()
def media_pause(self):
"""Send pause command."""
raise NotImplementedError()
def media_stop(self):
"""Send stop command."""
raise NotImplementedError()
def media_previous_track(self):
"""Send previous track command."""
raise NotImplementedError()
def media_next_track(self):
"""Send next track command."""
raise NotImplementedError()
def media_seek(self, position):
"""Send seek command."""
raise NotImplementedError()
def play_media(self, media_type, media_id):
"""Play a piece of media."""
raise NotImplementedError()
def select_source(self, source):
"""Select input source."""
raise NotImplementedError()
# No need to overwrite these.
@property
def support_pause(self):
"""Boolean if pause is supported."""
return bool(self.supported_media_commands & SUPPORT_PAUSE)
@property
def support_stop(self):
"""Boolean if stop is supported."""
return bool(self.supported_media_commands & SUPPORT_STOP)
@property
def support_seek(self):
"""Boolean if seek is supported."""
return bool(self.supported_media_commands & SUPPORT_SEEK)
@property
def support_volume_set(self):
"""Boolean if setting volume is supported."""
return bool(self.supported_media_commands & SUPPORT_VOLUME_SET)
@property
def support_volume_mute(self):
"""Boolean if muting volume is supported."""
return bool(self.supported_media_commands & SUPPORT_VOLUME_MUTE)
@property
def support_previous_track(self):
"""Boolean if previous track command supported."""
return bool(self.supported_media_commands & SUPPORT_PREVIOUS_TRACK)
@property
def support_next_track(self):
"""Boolean if next track command supported."""
return bool(self.supported_media_commands & SUPPORT_NEXT_TRACK)
@property
def support_play_media(self):
"""Boolean if play media command supported."""
return bool(self.supported_media_commands & SUPPORT_PLAY_MEDIA)
@property
def support_select_source(self):
"""Boolean if select source command supported."""
return bool(self.supported_media_commands & SUPPORT_SELECT_SOURCE)
def toggle(self):
"""Toggle the power on the media player."""
if self.state in [STATE_OFF, STATE_IDLE]:
self.turn_on()
else:
self.turn_off()
def volume_up(self):
"""Turn volume up for media player."""
if self.volume_level < 1:
self.set_volume_level(min(1, self.volume_level + .1))
def volume_down(self):
"""Turn volume down for media player."""
if self.volume_level > 0:
self.set_volume_level(max(0, self.volume_level - .1))
def media_play_pause(self):
"""Play or pause the media player."""
if self.state == STATE_PLAYING:
self.media_pause()
else:
self.media_play()
@property
def entity_picture(self):
"""Return image of the media playing."""
return None if self.state == STATE_OFF else self.media_image_url
@property
def state_attributes(self):
"""Return the state attributes."""
if self.state == STATE_OFF:
state_attr = {
ATTR_SUPPORTED_MEDIA_COMMANDS: self.supported_media_commands,
}
else:
state_attr = {
attr: getattr(self, attr) for attr
in ATTR_TO_PROPERTY if getattr(self, attr) is not None
}
return state_attr
|
mit
| 7,915,092,304,396,361,000 | 29.939441 | 79 | 0.647428 | false |
devurandom/portage
|
pym/_emerge/PollScheduler.py
|
1
|
7723
|
# Copyright 1999-2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import gzip
import errno
try:
import threading
except ImportError:
import dummy_threading as threading
from portage import _encodings
from portage import _unicode_encode
from portage.util import writemsg_level
from portage.util.SlotObject import SlotObject
from portage.util._eventloop.EventLoop import EventLoop
from portage.util._eventloop.global_event_loop import global_event_loop
from _emerge.getloadavg import getloadavg
class PollScheduler(object):
# max time between loadavg checks (milliseconds)
_loadavg_latency = 30000
class _sched_iface_class(SlotObject):
__slots__ = ("IO_ERR", "IO_HUP", "IO_IN", "IO_NVAL", "IO_OUT",
"IO_PRI", "child_watch_add",
"idle_add", "io_add_watch", "iteration",
"output", "register", "run",
"source_remove", "timeout_add", "unregister")
def __init__(self, main=False):
"""
@param main: If True then use global_event_loop(), otherwise use
a local EventLoop instance (default is False, for safe use in
a non-main thread)
@type main: bool
"""
self._terminated = threading.Event()
self._terminated_tasks = False
self._max_jobs = 1
self._max_load = None
self._jobs = 0
self._scheduling = False
self._background = False
if main:
self._event_loop = global_event_loop()
else:
self._event_loop = EventLoop(main=False)
self.sched_iface = self._sched_iface_class(
IO_ERR=self._event_loop.IO_ERR,
IO_HUP=self._event_loop.IO_HUP,
IO_IN=self._event_loop.IO_IN,
IO_NVAL=self._event_loop.IO_NVAL,
IO_OUT=self._event_loop.IO_OUT,
IO_PRI=self._event_loop.IO_PRI,
child_watch_add=self._event_loop.child_watch_add,
idle_add=self._event_loop.idle_add,
io_add_watch=self._event_loop.io_add_watch,
iteration=self._event_loop.iteration,
output=self._task_output,
register=self._event_loop.io_add_watch,
source_remove=self._event_loop.source_remove,
timeout_add=self._event_loop.timeout_add,
unregister=self._event_loop.source_remove)
def terminate(self):
"""
Schedules asynchronous, graceful termination of the scheduler
at the earliest opportunity.
This method is thread-safe (and safe for signal handlers).
"""
self._terminated.set()
def _termination_check(self):
"""
Calls _terminate_tasks() if appropriate. It's guaranteed not to
call it while _schedule_tasks() is being called. The check should
be executed for each iteration of the event loop, for response to
termination signals at the earliest opportunity. It always returns
True, for continuous scheduling via idle_add.
"""
if not self._scheduling and \
self._terminated.is_set() and \
not self._terminated_tasks:
self._scheduling = True
try:
self._terminated_tasks = True
self._terminate_tasks()
finally:
self._scheduling = False
return True
def _terminate_tasks(self):
"""
Send signals to terminate all tasks. This is called once
from _keep_scheduling() or _is_work_scheduled() in the event
dispatching thread. It will not be called while the _schedule_tasks()
implementation is running, in order to avoid potential
interference. All tasks should be cleaned up at the earliest
opportunity, but not necessarily before this method returns.
Typically, this method will send kill signals and return without
waiting for exit status. This allows basic cleanup to occur, such as
flushing of buffered output to logs.
"""
raise NotImplementedError()
def _keep_scheduling(self):
"""
@rtype: bool
@return: True if there may be remaining tasks to schedule,
False otherwise.
"""
return False
def _schedule_tasks(self):
"""
This is called from inside the _schedule() method, which
guarantees the following:
1) It will not be called recursively.
2) _terminate_tasks() will not be called while it is running.
3) The state of the boolean _terminated_tasks variable will
not change while it is running.
Unless this method is used to perform user interface updates,
or something like that, the first thing it should do is check
the state of _terminated_tasks and if that is True then it
should return immediately (since there's no need to
schedule anything after _terminate_tasks() has been called).
"""
pass
def _schedule(self):
"""
Calls _schedule_tasks() and automatically returns early from
any recursive calls to this method that the _schedule_tasks()
call might trigger. This makes _schedule() safe to call from
inside exit listeners.
"""
if self._scheduling:
return False
self._scheduling = True
try:
self._schedule_tasks()
finally:
self._scheduling = False
def _main_loop(self):
term_check_id = self.sched_iface.idle_add(self._termination_check)
loadavg_check_id = None
if self._max_load is not None:
# We have to schedule periodically, in case the load
# average has changed since the last call.
loadavg_check_id = self.sched_iface.timeout_add(
self._loadavg_latency, self._schedule)
try:
# Populate initial event sources. Unless we're scheduling
# based on load average, we only need to do this once
# here, since it can be called during the loop from within
# event handlers.
self._schedule()
# Loop while there are jobs to be scheduled.
while self._keep_scheduling():
self.sched_iface.iteration()
# Clean shutdown of previously scheduled jobs. In the
# case of termination, this allows for basic cleanup
# such as flushing of buffered output to logs.
while self._is_work_scheduled():
self.sched_iface.iteration()
finally:
self.sched_iface.source_remove(term_check_id)
if loadavg_check_id is not None:
self.sched_iface.source_remove(loadavg_check_id)
def _is_work_scheduled(self):
return bool(self._running_job_count())
def _running_job_count(self):
return self._jobs
def _can_add_job(self):
if self._terminated_tasks:
return False
max_jobs = self._max_jobs
max_load = self._max_load
if self._max_jobs is not True and \
self._running_job_count() >= self._max_jobs:
return False
if max_load is not None and \
(max_jobs is True or max_jobs > 1) and \
self._running_job_count() >= 1:
try:
avg1, avg5, avg15 = getloadavg()
except OSError:
return False
if avg1 >= max_load:
return False
return True
def _task_output(self, msg, log_path=None, background=None,
level=0, noiselevel=-1):
"""
Output msg to stdout if not self._background. If log_path
is not None then append msg to the log (appends with
compression if the filename extension of log_path
corresponds to a supported compression type).
"""
if background is None:
# If the task does not have a local background value
# (like for parallel-fetch), then use the global value.
background = self._background
msg_shown = False
if not background:
writemsg_level(msg, level=level, noiselevel=noiselevel)
msg_shown = True
if log_path is not None:
try:
f = open(_unicode_encode(log_path,
encoding=_encodings['fs'], errors='strict'),
mode='ab')
f_real = f
except IOError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
if not msg_shown:
writemsg_level(msg, level=level, noiselevel=noiselevel)
else:
if log_path.endswith('.gz'):
# NOTE: The empty filename argument prevents us from
# triggering a bug in python3 which causes GzipFile
# to raise AttributeError if fileobj.name is bytes
# instead of unicode.
f = gzip.GzipFile(filename='', mode='ab', fileobj=f)
f.write(_unicode_encode(msg))
f.close()
if f_real is not f:
f_real.close()
|
gpl-2.0
| -1,054,756,073,076,287,900 | 29.525692 | 71 | 0.704519 | false |
typesupply/defcon
|
documentation/source/conf.py
|
1
|
6011
|
# -*- coding: utf-8 -*-
#
# defcon documentation build configuration file, created by
# sphinx-quickstart on Sun Jan 11 12:32:49 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'defcon'
copyright = '2009, Type Supply LLC'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2.0'
# The full version, including alpha/beta/rc tags.
release = '0.2.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'default.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'defcondoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
('index', 'defcon.tex', r'defcon Documentation',
r'Tal Leming', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
|
mit
| -2,972,578,160,911,436,000 | 30.636842 | 82 | 0.720845 | false |
churchlab/millstone
|
genome_designer/pipeline/read_alignment_util.py
|
1
|
5281
|
"""
Methods related to aligning reads.
This module was created because of a circular import issue with celery.
...
File "/home/glebk/Projects/churchlab/genome-designer-v2/genome_designer/pipeline/read_alignment.py", line 9, in <module>
from main.models import clean_filesystem_location
File "/home/glebk/Projects/churchlab/genome-designer-v2/genome_designer/main/__init__.py", line 1, in <module>
import signals
File "/home/glebk/Projects/churchlab/genome-designer-v2/genome_designer/main/signals.py", line 20, in <module>
from pipeline.read_alignment import ensure_bwa_index
ImportError: cannot import name ensure_bwa_index
"""
import os
import subprocess
from utils.bam_utils import filter_bam_file_by_row
from django.conf import settings
SAMTOOLS_BINARY = settings.SAMTOOLS_BINARY
TOOLS_DIR = settings.TOOLS_DIR
def has_bwa_index(ref_genome_fasta):
return os.path.exists(ref_genome_fasta + '.bwt')
def ensure_bwa_index(ref_genome_fasta, error_output=None):
"""Creates the reference genome index required by bwa, if it doesn't exist
already.
We rely on the convention that the index file location is the fasta
location with the extension '.bwt' appended to it.
"""
if not has_bwa_index(ref_genome_fasta):
build_bwa_index(ref_genome_fasta, error_output)
# Also build the fasta index.
if not os.path.exists(ref_genome_fasta + '.fai'):
subprocess.check_call([
SAMTOOLS_BINARY,
'faidx',
ref_genome_fasta
], stderr=error_output)
def build_bwa_index(ref_genome_fasta, error_output=None):
"""Calls the command that builds the bwa index required for alignment.
This creates a file in the same directory as ref_genome_fasta, appending
the extension '.bwt' to the name of the fasta.
"""
subprocess.check_call([
'%s/bwa/bwa' % TOOLS_DIR,
'index',
'-a',
'is',
ref_genome_fasta
], stderr=error_output)
def index_bam_file(bam_file, error_output=None):
subprocess.check_call([
SAMTOOLS_BINARY,
'index',
bam_file,
], stderr=error_output)
def extract_split_reads(bam_filename, bam_split_filename):
"""
Isolate split reads from a bam file.
This uses a python script supplied with Lumpy that is run as a
separate process.
This is an internal function that works directly with files, and
is called separately by both SV calling and read ref alignment.
NOTE THAT THIS SCRIPT ONLY WORKS WITH BWA MEM.
"""
assert os.path.exists(bam_filename), "BAM file '%s' is missing." % (
bam_filename)
# Use lumpy bwa-mem split read script to pull out split reads.
filter_split_reads = ' | '.join([
'{samtools} view -h {bam_filename}',
'python {lumpy_bwa_mem_sr_script} -i stdin',
'{samtools} view -Sb -']).format(
samtools=settings.SAMTOOLS_BINARY,
bam_filename=bam_filename,
lumpy_bwa_mem_sr_script=
settings.LUMPY_EXTRACT_SPLIT_READS_BWA_MEM)
with open(bam_split_filename, 'w') as fh:
subprocess.check_call(filter_split_reads,
stdout=fh,
shell=True,
executable=settings.BASH_PATH)
# sort the split reads, overwrite the old file
subprocess.check_call([settings.SAMTOOLS_BINARY, 'sort',
bam_split_filename,
os.path.splitext(bam_split_filename)[0]])
_filter_out_interchromosome_reads(bam_split_filename)
def extract_discordant_read_pairs(bam_filename, bam_discordant_filename):
"""Isolate discordant pairs of reads from a sample alignment.
"""
# Use bam read alignment flags to pull out discordant pairs only
filter_discordant = ' | '.join([
'{samtools} view -u -F 0x0002 {bam_filename} ',
'{samtools} view -u -F 0x0100 - ',
'{samtools} view -u -F 0x0004 - ',
'{samtools} view -u -F 0x0008 - ',
'{samtools} view -b -F 0x0400 - ']).format(
samtools=settings.SAMTOOLS_BINARY,
bam_filename=bam_filename)
with open(bam_discordant_filename, 'w') as fh:
subprocess.check_call(filter_discordant,
stdout=fh, shell=True, executable=settings.BASH_PATH)
# sort the discordant reads, overwrite the old file
subprocess.check_call([settings.SAMTOOLS_BINARY, 'sort',
bam_discordant_filename,
os.path.splitext(bam_discordant_filename)[0]])
_filter_out_interchromosome_reads(bam_discordant_filename)
def _filter_out_interchromosome_reads(bam_filename, overwrite_input=True):
"""Filters out read pairs which lie on different chromosomes.
Args:
bam_filename: Path to bam file.
overwrite_input: If True, overwrite the input file.
"""
def is_rnext_same(line):
parts = line.split('\t')
rnext_col = parts[6]
return rnext_col == '='
if overwrite_input:
output_bam_path = bam_filename
else:
output_bam_path = os.path.splitext(bam_filename)[0] + '.nointerchrom.bam'
filter_bam_file_by_row(bam_filename, is_rnext_same, output_bam_path)
|
mit
| 946,334,179,424,611,500 | 33.97351 | 122 | 0.642492 | false |
klahnakoski/MySQL-to-S3
|
vendor/jx_python/__init__.py
|
1
|
3013
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import unicode_literals
from collections import Mapping
from jx_base import container
from mo_dots import Data
from mo_dots import wrap, set_default, split_field
from mo_future import text_type
from mo_logs import Log
config = Data() # config.default IS EXPECTED TO BE SET BEFORE CALLS ARE MADE
_ListContainer = None
_meta = None
def _delayed_imports():
global _ListContainer
global _meta
from jx_python import meta as _meta
from jx_python.containers.list_usingPythonList import ListContainer as _ListContainer
_ = _ListContainer
_ = _meta
try:
from pyLibrary.queries.jx_usingMySQL import MySQL
except Exception:
MySQL = None
try:
from jx_elasticsearch.meta import FromESMetadata
except Exception:
FromESMetadata = None
set_default(container.type2container, {
"mysql": MySQL,
"memory": None,
"meta": FromESMetadata
})
def wrap_from(frum, schema=None):
"""
:param frum:
:param schema:
:return:
"""
if not _meta:
_delayed_imports()
frum = wrap(frum)
if isinstance(frum, text_type):
if not container.config.default.settings:
Log.error("expecting jx_base.container.config.default.settings to contain default elasticsearch connection info")
type_ = None
index = frum
if frum.startswith("meta."):
if frum == "meta.columns":
return _meta.singlton.meta.columns.denormalized()
elif frum == "meta.tables":
return _meta.singlton.meta.tables
else:
Log.error("{{name}} not a recognized table", name=frum)
else:
type_ = container.config.default.type
index = split_field(frum)[0]
settings = set_default(
{
"index": index,
"name": frum,
"exists": True,
},
container.config.default.settings
)
settings.type = None
return container.type2container[type_](settings)
elif isinstance(frum, Mapping) and frum.type and container.type2container[frum.type]:
# TODO: Ensure the frum.name is set, so we capture the deep queries
if not frum.type:
Log.error("Expecting from clause to have a 'type' property")
return container.type2container[frum.type](frum.settings)
elif isinstance(frum, Mapping) and (frum["from"] or isinstance(frum["from"], (list, set))):
from jx_base.query import QueryOp
return QueryOp.wrap(frum, schema=schema)
elif isinstance(frum, (list, set)):
return _ListContainer("test_list", frum)
else:
return frum
|
mpl-2.0
| -4,624,035,901,889,225,000 | 27.971154 | 125 | 0.626286 | false |
fivejjs/crosscat
|
crosscat/cython_code/test_pred_prob_and_density.py
|
1
|
3579
|
#
# Copyright (c) 2010-2014, MIT Probabilistic Computing Project
#
# Lead Developers: Dan Lovell and Jay Baxter
# Authors: Dan Lovell, Baxter Eaves, Jay Baxter, Vikash Mansinghka
# Research Leads: Vikash Mansinghka, Patrick Shafto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import random
import argparse
import sys
from collections import Counter
#
import numpy
import pylab
import crosscat.tests.enumerate_utils as eu
import crosscat.tests.plot_utils as pu
# import crosscat.utils.file_utils as fu
import crosscat.utils.sample_utils as su
import crosscat.utils.data_utils as du
import crosscat.cython_code.State as State
random.seed(None)
inf_seed = random.randrange(32767)
# THIS CODE ONLY TESTS CONTINUOUS DATA
# FIXME: getting weird error on conversion to int: too large from inside pyx
def get_next_seed(max_val=32767): # sys.maxint):
return random_state.randint(max_val)
random_state = numpy.random.RandomState(inf_seed)
# generate a state with two, very distinct clusters
col = numpy.array([0,0])
row = numpy.array([[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]])
p_State, T, M_c, M_r, X_L, X_D = eu.GenerateStateFromPartitions(col,row,std_gen=10000.0, std_data=0.01)
X_L = p_State.get_X_L()
X_D = p_State.get_X_D()
# move stuff around a little bit
for i in range(100):
p_State.transition(which_transitions=['column_partition_assignments','row_partition_assignments'])
# quick test just to make sure things output what they're supposed to
x = 0.0;
query_row = len(row[0]) # tests unobserved
# query_row = 3; # tests observed
Q = [(query_row,0,x)]
Y = [] # no contraints
# Y = [(1,0,.1),(3,0,.1),(22,0,105),(30,0,100)] # generic constraints
p = su.simple_predictive_probability(M_c, X_L, X_D, Y, Q)
n = 1000;
samples = su.simple_predictive_sample(M_c, X_L, X_D, Y, Q, get_next_seed,n=n)
X = [sample[0] for sample in samples]
pylab.figure(facecolor='white')
pdf, bins, patches = pylab.hist(X,50,normed=True, histtype='bar',label='samples',edgecolor='none')
pylab.show()
pdf_max = max(pdf)
Qs = [];
for i in range(n):
Qtmp = (query_row,0,X[i])
Qs.append(Qtmp)
Ps = su.simple_predictive_probability(M_c, X_L, X_D, Y, Qs)
Ps2 = su.simple_predictive_probability_density(M_c, X_L, X_D, Y, Qs)
Ps = (numpy.exp(Ps)/max(numpy.exp(Ps)))*pdf_max
Ps2 = (numpy.exp(Ps2)/max(numpy.exp(Ps2)))*pdf_max
# make a scatterplot
pylab.scatter(X,Ps, c='red',label="p from cdf")
pylab.legend(loc='upper left')
pylab.xlabel('value')
pylab.ylabel('frequency/probability')
pylab.title('TEST: probability and frequencies are not normalized')
pylab.show()
raw_input("Press Enter when finished with probabilty...")
pylab.clf()
pdf, bins, patches = pylab.hist(X,50,normed=True, histtype='bar',label='samples',edgecolor='none')
pylab.scatter(X,Ps2, c='green',label="pdf")
pylab.legend(loc='upper left')
pylab.xlabel('value')
pylab.ylabel('frequency/density')
pylab.title('TEST: probability and frequencies are not normalized')
pylab.show()
raw_input("Press Enter when finished with density...")
|
apache-2.0
| -3,748,562,983,256,348,000 | 30.394737 | 103 | 0.714445 | false |
acercado/jd-ph-cms
|
jd-ph-cms/notifications/migrations/0001_initial.py
|
1
|
2228
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-02-04 05:22
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Notification',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_viewed', models.BooleanField(default=False)),
('is_read', models.BooleanField(default=False)),
],
options={
'db_table': 'cms_notifications',
},
),
migrations.CreateModel(
name='NotificationMessage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('message', models.TextField(blank=True, null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('category', models.CharField(max_length=10)),
('linkback', models.CharField(blank=True, max_length=20, null=True)),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'cms_notification_messages',
'ordering': ['-timestamp'],
},
),
migrations.AddField(
model_name='notification',
name='message',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='messages', to='notifications.NotificationMessage'),
),
migrations.AddField(
model_name='notification',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
bsd-3-clause
| 2,311,233,370,199,739,400 | 38.785714 | 169 | 0.581688 | false |
cylc/cylc
|
cylc/flow/task_pool.py
|
1
|
59064
|
# THIS FILE IS PART OF THE CYLC SUITE ENGINE.
# Copyright (C) NIWA & British Crown (Met Office) & Contributors.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Wrangle task proxies to manage the workflow.
"""
from fnmatch import fnmatchcase
from string import ascii_letters
import json
from time import time
from cylc.flow.parsec.OrderedDict import OrderedDict
from cylc.flow import LOG
from cylc.flow.cycling.loader import get_point, standardise_point_string
from cylc.flow.exceptions import SuiteConfigError, PointParsingError
from cylc.flow.suite_status import StopMode
from cylc.flow.task_action_timer import TaskActionTimer
from cylc.flow.task_events_mgr import (
CustomTaskEventHandlerContext, TaskEventMailContext,
TaskJobLogsRetrieveContext)
from cylc.flow.task_id import TaskID
from cylc.flow.task_job_logs import get_task_job_id
from cylc.flow.task_proxy import TaskProxy
from cylc.flow.task_state import (
TASK_OUTPUT_EXPIRED,
TASK_OUTPUT_FAILED,
TASK_OUTPUT_SUCCEEDED,
TASK_STATUSES_ACTIVE,
TASK_STATUSES_NOT_STALLED,
TASK_STATUS_EXPIRED,
TASK_STATUS_FAILED,
TASK_STATUS_QUEUED,
TASK_STATUS_READY,
TASK_STATUS_RETRYING,
TASK_STATUS_RUNNING,
TASK_STATUS_SUBMITTED,
TASK_STATUS_SUBMIT_RETRYING,
TASK_STATUS_SUCCEEDED,
TASK_STATUS_WAITING
)
from cylc.flow.wallclock import get_current_time_string
from cylc.flow.platforms import get_platform
class FlowLabelMgr:
"""
Manage flow labels consisting of a string of one or more letters [a-zA-Z].
Flow labels are task attributes representing the flow the task belongs to,
passed down to spawned children. If a new flow is started, a new single
character label is chosen randomly. This allows for 52 simultaneous flows
(which should be more than enough) with labels that are easy to work with.
Flows merge locally when a task can't be spawned because it already exists
in the pool with a different label. We merge the labels at such tasks so
that downstream events can be considered to belong to either of the
original flows. Merged labels are simple strings that contains the
component labels, e.g. if flow "a" merges with flow "b" the merged result
is "ab" (or "ba", it doesn't matter which).
"""
def __init__(self):
"""Store available and used labels."""
self.avail = set(ascii_letters)
self.inuse = set()
def get_num_inuse(self):
"""Return the number of labels currently in use."""
return len(list(self.inuse))
def make_avail(self, labels):
"""Return labels (set) to the pool of available labels."""
LOG.info("returning flow label(s) %s", labels)
for label in labels:
try:
self.inuse.remove(label)
except KeyError:
pass
self.avail.add(label)
def get_new_label(self):
"""Return a new label, or None if we've run out."""
try:
label = self.avail.pop()
except KeyError:
return None
self.inuse.add(label)
return label
@staticmethod
def get_common_labels(labels):
"""Return list of common labels."""
set_labels = [set(lab) for lab in labels]
return set.intersection(*set_labels)
@staticmethod
def merge_labels(lab1, lab2):
"""Return the label representing both lab1 and lab2.
Note the incoming labels could already be merged.
"""
if lab1 == lab2:
return lab1
labs1 = set(list(lab1))
labs2 = set(list(lab2))
return ''.join(labs1.union(labs2))
@staticmethod
def unmerge_labels(prune, target):
"""Unmerge prune from target."""
for char in list(prune):
target = target.replace(char, '')
return target
@staticmethod
def match_labels(lab1, lab2):
"""Return True if lab1 and lab2 have any labels in common.
If they do, the owner tasks can be considered part of the same flow.
Note the incoming labels could already be merged.
"""
labs1 = set(list(lab1))
labs2 = set(list(lab2))
return bool(labs1.intersection(labs2))
class TaskPool:
"""Task pool of a suite."""
ERR_PREFIX_TASKID_MATCH = "No matching tasks found: "
def __init__(self, config, suite_db_mgr, task_events_mgr, job_pool):
self.config = config
self.stop_point = config.final_point
self.suite_db_mgr = suite_db_mgr
self.task_events_mgr = task_events_mgr
# TODO this is ugly:
self.task_events_mgr.spawn_func = self.spawn_on_output
self.job_pool = job_pool
self.flow_label_mgr = FlowLabelMgr()
self.do_reload = False
self.custom_runahead_limit = self.config.get_custom_runahead_limit()
self.max_future_offset = None
self._prev_runahead_base_point = None
self.max_num_active_cycle_points = (
self.config.get_max_num_active_cycle_points())
self._prev_runahead_sequence_points = None
self.pool = {}
self.runahead_pool = {}
self.myq = {}
self.queues = {}
self.assign_queues()
self.pool_list = []
self.rhpool_list = []
self.pool_changed = False
self.rhpool_changed = False
self.pool_changes = []
self.is_held = False
self.hold_point = None
self.stuck_future_tasks = []
self.abs_outputs_done = set()
self.stop_task_id = None
self.stop_task_finished = False
self.abort_task_failed = False
self.expected_failed_tasks = self.config.get_expected_failed_tasks()
self.orphans = []
self.task_name_list = self.config.get_task_name_list()
def set_stop_task(self, task_id):
"""Set stop after a task."""
name = TaskID.split(task_id)[0]
if name in self.config.get_task_name_list():
task_id = TaskID.get_standardised_taskid(task_id)
LOG.info("Setting stop task: " + task_id)
self.stop_task_id = task_id
self.stop_task_finished = False
self.suite_db_mgr.put_suite_stop_task(task_id)
else:
LOG.warning("Requested stop task name does not exist: %s" % name)
def stop_task_done(self):
"""Return True if stop task has succeeded."""
if self.stop_task_id is not None and self.stop_task_finished:
LOG.info("Stop task %s finished" % self.stop_task_id)
self.stop_task_id = None
self.stop_task_finished = False
self.suite_db_mgr.delete_suite_stop_task()
return True
else:
return False
def assign_queues(self):
"""self.myq[taskname] = qfoo"""
self.myq.clear()
for queue, qconfig in self.config.cfg['scheduling']['queues'].items():
self.myq.update((name, queue) for name in qconfig['members'])
def add_to_runahead_pool(self, itask, is_new=True):
"""Add a new task to the runahead pool if possible.
Tasks whose recurrences allow them to spawn beyond the suite
stop point are added to the pool in the held state, ready to be
released if the suite stop point is changed.
"""
# add to the runahead pool
self.runahead_pool.setdefault(itask.point, OrderedDict())
self.runahead_pool[itask.point][itask.identity] = itask
self.rhpool_changed = True
# add row to "task_states" table
if is_new:
self.suite_db_mgr.put_insert_task_states(itask, {
"time_created": get_current_time_string(),
"time_updated": get_current_time_string(),
"status": itask.state.status,
"flow_label": itask.flow_label})
if itask.state.outputs.has_custom_triggers():
self.suite_db_mgr.put_insert_task_outputs(itask)
return itask
def release_runahead_tasks(self):
"""Restrict the number of active cycle points.
Compute max active cycle points or runahead limit, and release tasks to
the main pool if they are below that point (and <= the stop point, if
there is a stop point). Return True if any tasks released, else False.
"""
released = False
if not self.runahead_pool:
return released
# Any finished tasks can be released immediately (this can happen at
# restart when all tasks are initially loaded into the runahead pool).
for itask_id_maps in self.runahead_pool.copy().values():
for itask in itask_id_maps.copy().values():
if itask.state(
TASK_STATUS_FAILED,
TASK_STATUS_SUCCEEDED,
TASK_STATUS_EXPIRED
):
self.release_runahead_task(itask)
released = True
limit = self.max_num_active_cycle_points
points = []
for point, itasks in sorted(
self.get_tasks_by_point(incl_runahead=True).items()):
has_unfinished_itasks = False
for itask in itasks:
if not itask.state(
TASK_STATUS_FAILED,
TASK_STATUS_SUCCEEDED,
TASK_STATUS_EXPIRED
):
has_unfinished_itasks = True
break
if not points and not has_unfinished_itasks:
# We need to begin with an unfinished cycle point.
continue
points.append(point)
if not points:
return False
# Get the earliest point with unfinished tasks.
runahead_base_point = min(points)
# Get all cycling points possible after the runahead base point.
if (self._prev_runahead_base_point is not None and
runahead_base_point == self._prev_runahead_base_point):
# Cache for speed.
sequence_points = self._prev_runahead_sequence_points
else:
sequence_points = []
for sequence in self.config.sequences:
point = runahead_base_point
for _ in range(limit):
point = sequence.get_next_point(point)
if point is None:
break
sequence_points.append(point)
sequence_points = set(sequence_points)
self._prev_runahead_sequence_points = sequence_points
self._prev_runahead_base_point = runahead_base_point
points = set(points).union(sequence_points)
if self.custom_runahead_limit is None:
# Calculate which tasks to release based on a maximum number of
# active cycle points (active meaning non-finished tasks).
latest_allowed_point = sorted(points)[:limit][-1]
if self.max_future_offset is not None:
# For the first N points, release their future trigger tasks.
latest_allowed_point += self.max_future_offset
else:
# Calculate which tasks to release based on a maximum duration
# measured from the oldest non-finished task.
latest_allowed_point = (
runahead_base_point + self.custom_runahead_limit)
if (self._prev_runahead_base_point is None or
self._prev_runahead_base_point != runahead_base_point):
if self.custom_runahead_limit < self.max_future_offset:
LOG.warning(
('custom runahead limit of %s is less than ' +
'future triggering offset %s: suite may stall.') % (
self.custom_runahead_limit,
self.max_future_offset
)
)
self._prev_runahead_base_point = runahead_base_point
if self.stop_point and latest_allowed_point > self.stop_point:
latest_allowed_point = self.stop_point
for point, itask_id_map in self.runahead_pool.copy().items():
if point <= latest_allowed_point:
for itask in itask_id_map.copy().values():
self.release_runahead_task(itask)
released = True
return released
def load_abs_outputs_for_restart(self, row_idx, row):
cycle, name, output = row
self.abs_outputs_done.add((name, cycle, output))
def load_db_task_pool_for_restart(self, row_idx, row):
"""Load tasks from DB task pool/states/jobs tables, to runahead pool.
Output completion status is loaded from the DB, and tasks recorded
as submitted or running are polled to confirm their true status.
Tasks are added to queues again on release from runahead pool.
"""
if row_idx == 0:
LOG.info("LOADING task proxies")
# Create a task proxy corresponding to this DB entry.
(cycle, name, flow_label, is_late, status, satisfied,
is_held, submit_num, _, platform_name, time_submit, time_run, timeout,
outputs_str) = row
try:
itask = TaskProxy(
self.config.get_taskdef(name),
get_point(cycle),
flow_label,
is_held=is_held,
submit_num=submit_num,
is_late=bool(is_late))
except SuiteConfigError:
LOG.exception((
'ignoring task %s from the suite run database\n'
'(its task definition has probably been deleted).'
) % name)
except Exception:
LOG.exception('could not load task %s' % name)
else:
if status in (
TASK_STATUS_SUBMITTED,
TASK_STATUS_RUNNING
):
# update the task proxy with user@host
itask.platform = get_platform(platform_name)
if time_submit:
itask.set_summary_time('submitted', time_submit)
if time_run:
itask.set_summary_time('started', time_run)
if timeout is not None:
itask.timeout = timeout
elif status == TASK_STATUS_READY:
# put back to be readied again.
status = TASK_STATUS_WAITING
# Running or finished task can have completed custom outputs.
if itask.state(
TASK_STATUS_RUNNING,
TASK_STATUS_FAILED,
TASK_STATUS_SUCCEEDED
):
try:
for message in json.loads(outputs_str).values():
itask.state.outputs.set_completion(message, True)
except (AttributeError, TypeError, ValueError):
# Back compat for <=7.6.X
# Each output in separate line as "trigger=message"
try:
for output in outputs_str.splitlines():
itask.state.outputs.set_completion(
output.split("=", 1)[1], True)
except AttributeError:
pass
if platform_name:
itask.summary['platforms_used'][
int(submit_num)] = platform_name
LOG.info("+ %s.%s %s%s" % (
name, cycle, status, ' (held)' if is_held else ''))
# Update prerequisite satisfaction status from DB
sat = {}
for k, v in json.loads(satisfied).items():
sat[tuple(json.loads(k))] = v
# TODO (from Oliver's PR review):
# Wait, what, the keys to a JSON dictionary are themselves JSON
# :vomiting_face:!
# This should be converted to its own DB table pre-8.0.0.
for pre in itask.state.prerequisites:
for k, v in pre.satisfied.items():
pre.satisfied[k] = sat[k]
itask.state.reset(status)
self.add_to_runahead_pool(itask, is_new=False)
def load_db_task_action_timers(self, row_idx, row):
"""Load a task action timer, e.g. event handlers, retry states."""
if row_idx == 0:
LOG.info("LOADING task action timers")
(cycle, name, ctx_key_raw, ctx_raw, delays_raw, num, delay,
timeout) = row
id_ = TaskID.get(name, cycle)
try:
# Extract type namedtuple variables from JSON strings
ctx_key = json.loads(str(ctx_key_raw))
ctx_data = json.loads(str(ctx_raw))
for known_cls in [
CustomTaskEventHandlerContext,
TaskEventMailContext,
TaskJobLogsRetrieveContext]:
if ctx_data and ctx_data[0] == known_cls.__name__:
ctx = known_cls(*ctx_data[1])
break
else:
ctx = ctx_data
if ctx is not None:
ctx = tuple(ctx)
delays = json.loads(str(delays_raw))
except ValueError:
LOG.exception(
"%(id)s: skip action timer %(ctx_key)s" %
{"id": id_, "ctx_key": ctx_key_raw})
return
if ctx_key == "poll_timer" or ctx_key[0] == "poll_timers":
# "poll_timers" for back compat with <=7.6.X
itask = self.get_task_by_id(id_)
if itask is None:
LOG.warning("%(id)s: task not found, skip" % {"id": id_})
return
itask.poll_timer = TaskActionTimer(
ctx, delays, num, delay, timeout)
elif ctx_key[0] == "try_timers":
itask = self.get_task_by_id(id_)
if itask is None:
LOG.warning("%(id)s: task not found, skip" % {"id": id_})
return
itask.try_timers[ctx_key[1]] = TaskActionTimer(
ctx, delays, num, delay, timeout)
elif ctx:
key1, submit_num = ctx_key
# Convert key1 to type tuple - JSON restores as type list
# and this will not previously have been converted back
if isinstance(key1, list):
key1 = tuple(key1)
key = (key1, cycle, name, submit_num)
self.task_events_mgr.event_timers[key] = TaskActionTimer(
ctx, delays, num, delay, timeout)
else:
LOG.exception(
"%(id)s: skip action timer %(ctx_key)s" %
{"id": id_, "ctx_key": ctx_key_raw})
return
LOG.info("+ %s.%s %s" % (name, cycle, ctx_key))
def release_runahead_task(self, itask):
"""Release itask to the appropriate queue in the active pool.
Also auto-spawn next instance if:
- no parents to do it
- has absolute triggers (these are satisfied already by definition)
"""
try:
queue = self.myq[itask.tdef.name]
except KeyError:
queue = self.config.Q_DEFAULT
self.queues.setdefault(queue, OrderedDict())
self.queues[queue][itask.identity] = itask
self.pool.setdefault(itask.point, {})
self.pool[itask.point][itask.identity] = itask
self.pool_changed = True
self.pool_changes.append(itask)
LOG.debug("[%s] -released to the task pool", itask)
del self.runahead_pool[itask.point][itask.identity]
if not self.runahead_pool[itask.point]:
del self.runahead_pool[itask.point]
self.rhpool_changed = True
if itask.tdef.max_future_prereq_offset is not None:
self.set_max_future_offset()
if itask.tdef.sequential:
# implicit prev-instance parent
return
if not itask.reflow:
return
next_point = itask.next_point()
if next_point is not None:
parent_points = itask.tdef.get_parent_points(next_point)
if (not parent_points or
all(x < self.config.start_point for x in parent_points)):
# Auto-spawn next instance of tasks with no parents at the next
# point (or with all parents before the suite start point).
self.get_or_spawn_task(
itask.tdef.name, next_point, flow_label=itask.flow_label,
parent_id=itask.identity)
else:
# Auto-spawn (if needed) next absolute-triggered instances.
for trig in itask.tdef.get_abs_triggers(next_point):
self.get_or_spawn_task(
itask.tdef.name, next_point,
flow_label=itask.flow_label,
parent_id=itask.identity)
def remove(self, itask, reason=""):
"""Remove a task from the pool."""
msg = "task proxy removed"
if reason:
msg += " (%s)" % reason
try:
del self.runahead_pool[itask.point][itask.identity]
except KeyError:
# Not in runahead pool.
try:
del self.pool[itask.point][itask.identity]
except KeyError:
# Not in main pool (forced spawn uses temporary non-pool tasks)
return
else:
# In main pool: remove from pool and queues.
if not self.pool[itask.point]:
del self.pool[itask.point]
self.pool_changed = True
if itask.tdef.name in self.myq: # A reload can remove a task
del self.queues[self.myq[itask.tdef.name]][itask.identity]
if itask.tdef.max_future_prereq_offset is not None:
self.set_max_future_offset()
else:
# In runahead pool.
if not self.runahead_pool[itask.point]:
del self.runahead_pool[itask.point]
self.rhpool_changed = True
# Event-driven final update of task_states table.
# TODO: same for datastore (still updated by iterating the task pool)
self.suite_db_mgr.put_update_task_state(itask)
LOG.debug("[%s] -%s", itask, msg)
del itask
def get_all_tasks(self):
"""Return a list of all task proxies."""
return self.get_rh_tasks() + self.get_tasks()
def get_tasks(self):
"""Return a list of task proxies in the main task pool."""
if self.pool_changed:
self.pool_changed = False
self.pool_list = []
for itask_id_maps in self.queues.values():
self.pool_list.extend(list(itask_id_maps.values()))
return self.pool_list
def get_rh_tasks(self):
"""Return a list of task proxies in the runahead pool."""
if self.rhpool_changed:
self.rhpool_changed = False
self.rhpool_list = []
for itask_id_maps in self.runahead_pool.values():
self.rhpool_list.extend(list(itask_id_maps.values()))
return self.rhpool_list
def get_pool_change_tasks(self):
"""Return a list of task proxies that changed pool."""
results = self.pool_changes
self.pool_changes = []
return results
def get_tasks_by_point(self, incl_runahead):
"""Return a map of task proxies by cycle point."""
point_itasks = {}
for point, itask_id_map in self.pool.items():
point_itasks[point] = list(itask_id_map.values())
if not incl_runahead:
return point_itasks
for point, itask_id_map in self.runahead_pool.items():
point_itasks.setdefault(point, [])
point_itasks[point].extend(list(itask_id_map.values()))
return point_itasks
def get_task_by_id(self, id_):
"""Return task by ID if in the runahead pool or main pool.
Return None if task does not exist.
"""
for itask_ids in (
list(self.queues.values())
+ list(self.runahead_pool.values())):
try:
return itask_ids[id_]
except KeyError:
pass
def get_ready_tasks(self):
"""
1) queue tasks that are ready to run (prerequisites satisfied,
clock-trigger time up) or if their manual trigger flag is set.
2) then submit queued tasks if their queue limit has not been
reached or their manual trigger flag is set.
If TASK_STATUS_QUEUED the task will submit as soon as its internal
queue allows (or immediately if manually triggered first).
Use of "cylc trigger" sets a task's manual trigger flag. Then,
below, an unqueued task will be queued whether or not it is
ready to run; and a queued task will be submitted whether or not
its queue limit has been reached. The flag is immediately unset
after use so that two manual trigger ops are required to submit
an initially unqueued task that is queue-limited.
Return the tasks that are dequeued.
"""
ready_tasks = []
qconfig = self.config.cfg['scheduling']['queues']
for queue in self.queues:
# 1) queue unqueued tasks that are ready to run or manually forced
for itask in list(self.queues[queue].values()):
if not itask.state(TASK_STATUS_QUEUED):
# only need to check that unqueued tasks are ready
if itask.is_ready():
# queue the task
itask.state.reset(TASK_STATUS_QUEUED)
itask.reset_manual_trigger()
# move the task to the back of the queue
self.queues[queue][itask.identity] = \
self.queues[queue].pop(itask.identity)
# 2) submit queued tasks if manually forced or not queue-limited
n_active = 0
n_release = 0
n_limit = qconfig[queue]['limit']
tasks = list(self.queues[queue].values())
# 2.1) count active tasks and compare to queue limit
if n_limit:
for itask in tasks:
if itask.state(
TASK_STATUS_READY,
TASK_STATUS_SUBMITTED,
TASK_STATUS_RUNNING,
is_held=False
):
n_active += 1
n_release = n_limit - n_active
# 2.2) release queued tasks if not limited or if manually forced
for itask in tasks:
if not itask.state(TASK_STATUS_QUEUED):
# (This excludes tasks remaining TASK_STATUS_READY because
# job submission has been stopped with 'cylc shutdown').
continue
if itask.manual_trigger or not n_limit or n_release > 0:
# manual release, or no limit, or not currently limited
n_release -= 1
ready_tasks.append(itask)
itask.reset_manual_trigger()
# (Set to 'ready' is done just before job submission).
# else leaved queued
LOG.debug('%d task(s) de-queued' % len(ready_tasks))
return ready_tasks
def task_has_future_trigger_overrun(self, itask):
"""Check for future triggers extending beyond the final cycle."""
if not self.stop_point:
return False
for pct in itask.state.prerequisites_get_target_points():
if pct > self.stop_point:
return True
return False
def get_min_point(self):
"""Return the minimum cycle point currently in the pool."""
cycles = list(self.pool)
minc = None
if cycles:
minc = min(cycles)
return minc
def get_max_point(self):
"""Return the maximum cycle point currently in the pool."""
cycles = list(self.pool)
maxc = None
if cycles:
maxc = max(cycles)
return maxc
def get_max_point_runahead(self):
"""Return the maximum cycle point currently in the runahead pool."""
cycles = list(self.runahead_pool)
maxc = None
if cycles:
maxc = max(cycles)
return maxc
def set_max_future_offset(self):
"""Calculate the latest required future trigger offset."""
max_offset = None
for itask in self.get_tasks():
if (itask.tdef.max_future_prereq_offset is not None and
(max_offset is None or
itask.tdef.max_future_prereq_offset > max_offset)):
max_offset = itask.tdef.max_future_prereq_offset
self.max_future_offset = max_offset
def set_do_reload(self, config):
"""Set the task pool to reload mode."""
self.config = config
if config.options.stopcp:
self.stop_point = get_point(config.options.stopcp)
else:
self.stop_point = config.final_point
self.do_reload = True
self.custom_runahead_limit = self.config.get_custom_runahead_limit()
self.max_num_active_cycle_points = (
self.config.get_max_num_active_cycle_points())
# find any old tasks that have been removed from the suite
old_task_name_list = self.task_name_list
self.task_name_list = self.config.get_task_name_list()
for name in old_task_name_list:
if name not in self.task_name_list:
self.orphans.append(name)
for name in self.task_name_list:
if name in self.orphans:
self.orphans.remove(name)
# adjust the new suite config to handle the orphans
self.config.adopt_orphans(self.orphans)
# reassign live tasks from the old queues to the new.
# self.queues[queue][id_] = task
self.assign_queues()
new_queues = {}
for queue in self.queues:
for id_, itask in self.queues[queue].items():
if itask.tdef.name not in self.myq:
continue
key = self.myq[itask.tdef.name]
new_queues.setdefault(key, OrderedDict())
new_queues[key][id_] = itask
self.queues = new_queues
def reload_taskdefs(self):
"""Reload the definitions of task proxies in the pool.
Orphaned tasks (proxies whose definitions were removed from the suite):
- remove if not active yet
- if active, leave them but prevent them from spawning children on
subsequent outputs
Otherwise: replace task definitions but copy over existing outputs etc.
TODO: document for users: beware of reloading graph changes that affect
current active tasks. Such tasks are active with their original defns -
including what children they spawn - and it is not possible in general
to be sure that new defns are compatible with already-active old tasks.
So active tasks attempt to spawn the children that their (pre-reload)
defns say they should.
"""
LOG.info("Reloading task definitions.")
tasks = self.get_all_tasks()
# Log tasks orphaned by a reload but not currently in the task pool.
for name in self.orphans:
if name not in (itask.tdef.name for itask in tasks):
LOG.warning("Removed task: '%s'", name)
for itask in tasks:
if itask.tdef.name in self.orphans:
if (
itask.state(
TASK_STATUS_WAITING,
TASK_STATUS_QUEUED,
TASK_STATUS_SUBMIT_RETRYING,
TASK_STATUS_RETRYING,
)
or itask.state.is_held
):
# Remove orphaned task if it hasn't started running yet.
self.remove(itask, 'task definition removed')
else:
# Keep active orphaned task, but stop it from spawning.
itask.graph_children = {}
LOG.warning("[%s] -will not spawn children"
" (task definition removed)", itask)
else:
self.remove(itask, 'suite definition reload')
new_task = self.add_to_runahead_pool(
TaskProxy(
self.config.get_taskdef(itask.tdef.name),
itask.point,
itask.flow_label, itask.state.status,
submit_num=itask.submit_num))
itask.copy_to_reload_successor(new_task)
LOG.info('[%s] -reloaded task definition', itask)
if itask.state(*TASK_STATUSES_ACTIVE):
LOG.warning(
"[%s] -job(%02d) active with pre-reload settings",
itask,
itask.submit_num)
LOG.info("Reload completed.")
self.do_reload = False
def set_stop_point(self, stop_point):
"""Set the global suite stop point."""
if self.stop_point == stop_point:
return
LOG.info("Setting stop cycle point: %s", stop_point)
self.stop_point = stop_point
for itask in self.get_tasks():
# check cycle stop or hold conditions
if (
self.stop_point
and itask.point > self.stop_point
and itask.state(
TASK_STATUS_WAITING,
TASK_STATUS_QUEUED,
is_held=False
)
):
LOG.warning(
"[%s] -not running (beyond suite stop cycle) %s",
itask,
self.stop_point)
itask.state.reset(is_held=True)
return self.stop_point
def can_stop(self, stop_mode):
"""Return True if suite can stop.
A task is considered active if:
* It is in the active state and not marked with a kill failure.
* It has pending event handlers.
"""
if stop_mode is None:
return False
if stop_mode == StopMode.REQUEST_NOW_NOW:
return True
if self.task_events_mgr.event_timers:
return False
for itask in self.get_tasks():
if (
stop_mode == StopMode.REQUEST_CLEAN
and itask.state(*TASK_STATUSES_ACTIVE)
and not itask.state.kill_failed
):
return False
return True
def warn_stop_orphans(self):
"""Log (warning) orphaned tasks on suite stop."""
for itask in self.get_tasks():
if (
itask.state(*TASK_STATUSES_ACTIVE)
and itask.state.kill_failed
):
LOG.warning("%s: orphaned task (%s, kill failed)" % (
itask.identity, itask.state.status))
elif itask.state(*TASK_STATUSES_ACTIVE):
LOG.warning("%s: orphaned task (%s)" % (
itask.identity, itask.state.status))
for key1, point, name, submit_num in self.task_events_mgr.event_timers:
LOG.warning("%s/%s/%s: incomplete task event handler %s" % (
point, name, submit_num, key1))
def is_stalled(self):
"""Return True if the suite is stalled.
A suite is stalled when:
* It is not held.
* It has no active tasks.
* It has waiting tasks with unmet prerequisites
(ignoring clock triggers).
"""
if self.is_held:
return False
can_be_stalled = False
for itask in self.get_tasks():
if (
self.stop_point
and itask.point > self.stop_point
or itask.state(
TASK_STATUS_SUCCEEDED,
TASK_STATUS_EXPIRED,
)
):
# Ignore: Task beyond stop point.
# Ignore: Succeeded and expired tasks.
continue
if itask.state(*TASK_STATUSES_NOT_STALLED):
# Pool contains active tasks (or held active tasks)
# Return "not stalled" immediately.
return False
if (
itask.state(TASK_STATUS_WAITING)
and itask.state.prerequisites_all_satisfied()
):
# Waiting tasks with all prerequisites satisfied,
# probably waiting for clock trigger only.
# This task can be considered active.
# Return "not stalled" immediately.
return False
# We should be left with (submission) failed tasks and
# waiting tasks with unsatisfied prerequisites.
can_be_stalled = True
return can_be_stalled
def report_stalled_task_deps(self):
"""Log unmet dependencies on stalled."""
prereqs_map = {}
for itask in self.get_tasks():
if (
itask.state(TASK_STATUS_WAITING)
and itask.state.prerequisites_are_not_all_satisfied()
):
prereqs_map[itask.identity] = []
for prereq_str, is_met in itask.state.prerequisites_dump():
if not is_met:
prereqs_map[itask.identity].append(prereq_str)
# prune tree to ignore items that are elsewhere in it
for id_, prereqs in list(prereqs_map.copy().items()):
for prereq in prereqs:
prereq_strs = prereq.split()
if prereq_strs[0] == "LABEL:":
unsatisfied_id = prereq_strs[3]
elif prereq_strs[0] == "CONDITION:":
continue
else:
unsatisfied_id = prereq_strs[0]
# Clear out tasks with dependencies on other waiting tasks
if unsatisfied_id in prereqs_map:
del prereqs_map[id_]
break
for id_, prereqs in prereqs_map.items():
LOG.warning("Unmet prerequisites for %s:" % id_)
for prereq in prereqs:
LOG.warning(" * %s" % prereq)
def set_hold_point(self, point):
"""Set the point after which tasks must be held."""
self.hold_point = point
if point is not None:
for itask in self.get_all_tasks():
if itask.point > point:
itask.state.reset(is_held=True)
def hold_tasks(self, items):
"""Hold tasks with IDs matching any item in "ids"."""
itasks, bad_items = self.filter_task_proxies(items)
for itask in itasks:
itask.state.reset(is_held=True)
return len(bad_items)
def release_tasks(self, items):
"""Release held tasks with IDs matching any item in "ids"."""
itasks, bad_items = self.filter_task_proxies(items)
for itask in itasks:
itask.state.reset(is_held=False)
return len(bad_items)
def hold_all_tasks(self):
"""Hold all tasks."""
LOG.info("Holding all waiting or queued tasks now")
self.is_held = True
for itask in self.get_all_tasks():
itask.state.reset(is_held=True)
def release_all_tasks(self):
"""Release all held tasks."""
self.is_held = False
self.release_tasks(None)
def check_abort_on_task_fails(self):
"""Check whether suite should abort on task failure.
Return True if a task failed and `abort if any task fails` is set.
"""
return self.abort_task_failed
def spawn_on_output(self, itask, output):
"""Spawn and update children, remove finished tasks.
Also set a the abort-on-task-failed flag if necessary.
If not itask.reflow update existing children but don't spawn them.
If an absolute output is completed update the store of completed abs
outputs, and update the prerequisites of every instance of the child
in the pool. (And in self.spawn() use the store of completed abs
outputs to satisfy any tasks with abs prerequisites).
"""
if output == TASK_OUTPUT_FAILED:
if (self.expected_failed_tasks is not None
and itask.identity not in self.expected_failed_tasks):
self.abort_task_failed = True
try:
children = itask.graph_children[output]
except KeyError:
# No children depend on this output
children = []
suicide = []
for c_name, c_point, is_abs in children:
if is_abs:
self.abs_outputs_done.add((itask.tdef.name,
str(itask.point), output))
self.suite_db_mgr.put_insert_abs_output(
str(itask.point), itask.tdef.name, output)
self.suite_db_mgr.process_queued_ops()
if itask.reflow:
c_task = self.get_or_spawn_task(
c_name, c_point, flow_label=itask.flow_label,
parent_id=itask.identity)
else:
# Don't spawn, but update existing children.
c_task = self.get_task(c_name, c_point)
if c_task is not None:
# Update downstream prerequisites directly.
if is_abs:
tasks, _ = self.filter_task_proxies([c_name])
else:
tasks = [c_task]
for t in tasks:
t.state.satisfy_me(
set([(itask.tdef.name, str(itask.point), output)]))
# Event-driven suicide.
if (c_task.state.suicide_prerequisites and
c_task.state.suicide_prerequisites_all_satisfied()):
suicide.append(c_task)
# TODO event-driven submit: check if prereqs are satisfied now.
for c_task in suicide:
if c_task.state(
TASK_STATUS_READY,
TASK_STATUS_SUBMITTED,
TASK_STATUS_RUNNING,
is_held=False):
LOG.warning(f'[{c_task}] -suiciding while active')
self.remove(c_task, 'SUICIDE')
# Remove the parent task if finished.
if (output in [TASK_OUTPUT_SUCCEEDED, TASK_OUTPUT_EXPIRED]
or output == TASK_OUTPUT_FAILED and itask.failure_handled):
if itask.identity == self.stop_task_id:
self.stop_task_finished = True
self.remove(itask, 'finished')
def get_or_spawn_task(self, name, point, flow_label=None, reflow=True,
parent_id=None):
"""Return existing or spawned task, or None."""
return (self.get_task(name, point, flow_label)
or self.spawn_task(name, point, flow_label, reflow, parent_id))
def merge_flow_labels(self, itask, flab2):
"""Merge flab2 into itask's flow label and update DB."""
# TODO can we do a more minimal (flow-label only) update of the
# existing row? (flow label is a primary key so need new insert).
# ? self.suite_db_mgr.put_update_task_state(itask)
if flab2 is None or flab2 == itask.flow_label:
return
itask.flow_label = self.flow_label_mgr.merge_labels(
itask.flow_label, flab2)
self.suite_db_mgr.put_insert_task_states(itask, {
"status": itask.state.status,
"flow_label": itask.flow_label})
self.suite_db_mgr.process_queued_ops() # TODO is this needed here?
LOG.info('%s merged flow(%s)', itask.identity, itask.flow_label)
def get_task(self, name, point, flow_label=None):
"""Return existing task proxy and merge flow label if found."""
itask = self.get_task_by_id(TaskID.get(name, point))
if itask is None:
LOG.debug('Task %s.%s not found in task pool.', name, point)
return None
self.merge_flow_labels(itask, flow_label)
return itask
def can_spawn(self, name, point):
"""Return True if name.point is within various suite limits."""
if name not in self.config.get_task_name_list():
LOG.debug('No task definition %s', name)
return False
# Don't spawn outside of graph limits.
# TODO: is it possible for initial_point to not be defined??
# (see also the similar check + log message in scheduler.py)
if self.config.initial_point and point < self.config.initial_point:
# Attempted manual trigger prior to FCP
# or future triggers like foo[+P1] => bar, with foo at ICP.
LOG.debug(
'Not spawning %s.%s: before initial cycle point', name, point)
return False
elif self.config.final_point and point > self.config.final_point:
# Only happens on manual trigger beyond FCP
LOG.debug(
'Not spawning %s.%s: beyond final cycle point', name, point)
return False
return True
def spawn_task(self, name, point, flow_label=None, reflow=True,
parent_id=None):
"""Spawn name.point and add to runahead pool. Return it, or None."""
if not self.can_spawn(name, point):
return None
# Get submit number by flow label {flow_label: submit_num, ...}
snums = self.suite_db_mgr.pri_dao.select_submit_nums(name, str(point))
try:
submit_num = max(snums.values())
except ValueError:
# Task never spawned in any flow.
submit_num = 0
for f_id in snums.keys():
# Flow labels of previous instances. E.g. f_id "u".
if self.flow_label_mgr.match_labels(flow_label, f_id):
# Already spawned in this flow. E.g. flow_label "uV".
# TODO update existing DB row to avoid cond reflow from V too?
LOG.warning('Not spawning %s.%s (spawned in flow %s)',
name, point, f_id)
return None
# Spawn if on-sequence and within recurrence bounds.
taskdef = self.config.get_taskdef(name)
if not taskdef.is_valid_point(point):
return None
itask = TaskProxy(
taskdef,
point, flow_label,
submit_num=submit_num, reflow=reflow)
if parent_id is not None:
msg = "(" + parent_id + ") spawned %s.%s flow(%s)"
else:
msg = "(no parent) spawned %s.%s %s"
if flow_label is None:
# Manual trigger: new flow
msg += " (new flow)"
if self.hold_point and itask.point > self.hold_point:
# Hold if beyond the suite hold point
LOG.info(
"[%s] -holding (beyond suite hold point) %s",
itask, self.hold_point)
itask.state.reset(is_held=True)
elif (self.stop_point and itask.point <= self.stop_point and
self.task_has_future_trigger_overrun(itask)):
# Record tasks waiting on a future trigger beyond the stop point.
# (We ignore these waiting tasks when considering shutdown).
LOG.info("[%s] -holding (future trigger beyond stop point)", itask)
self.stuck_future_tasks.append(itask.identity)
elif (self.is_held
and itask.state(TASK_STATUS_WAITING, is_held=False)):
# Hold newly-spawned tasks in a held suite (e.g. due to manual
# triggering of a held task).
itask.state.reset(is_held=True)
# Attempt to satisfy any absolute triggers now.
# TODO: consider doing this only for tasks with absolute prerequisites.
if itask.state.prerequisites_are_not_all_satisfied():
itask.state.satisfy_me(self.abs_outputs_done)
self.add_to_runahead_pool(itask)
LOG.info(msg, name, point, flow_label)
return itask
def match_taskdefs(self, items):
"""Return matching taskdefs valid for selected cycle points."""
n_warnings = 0
task_items = {}
for item in items:
point_str, name_str = self._parse_task_item(item)[:2]
if point_str is None:
LOG.warning(
"%s: task to spawn must have a cycle point" % (item))
n_warnings += 1
continue
try:
point_str = standardise_point_string(point_str)
except PointParsingError as exc:
LOG.warning(
self.ERR_PREFIX_TASKID_MATCH + ("%s (%s)" % (item, exc)))
n_warnings += 1
continue
taskdefs = self.config.find_taskdefs(name_str)
if not taskdefs:
LOG.warning(self.ERR_PREFIX_TASKID_MATCH + item)
n_warnings += 1
continue
point = get_point(point_str)
for taskdef in taskdefs:
if taskdef.is_valid_point(point):
task_items[(taskdef.name, point)] = taskdef
return n_warnings, task_items
def force_spawn_children(self, items, outputs):
"""Spawn downstream children of given task outputs on user command."""
if not outputs:
outputs = [TASK_OUTPUT_SUCCEEDED]
n_warnings, task_items = self.match_taskdefs(items)
for (_, point), taskdef in sorted(task_items.items()):
# This the upstream target task:
itask = TaskProxy(taskdef, point,
self.flow_label_mgr.get_new_label())
# Spawn downstream on selected outputs.
for trig, out, status in itask.state.outputs.get_all():
if trig in outputs:
LOG.info('Forced spawning on %s:%s', itask.identity, out)
self.spawn_on_output(itask, out)
def remove_tasks(self, items):
"""Remove tasks from the pool."""
itasks, bad_items = self.filter_task_proxies(items)
for itask in itasks:
self.remove(itask, 'request')
return len(bad_items)
def force_trigger_tasks(self, items, reflow=False):
"""Trigger matching tasks, with or without reflow."""
# TODO check reflow from existing tasks - unless unhandled fail?
n_warnings, task_items = self.match_taskdefs(items)
flow_label = self.flow_label_mgr.get_new_label()
for name, point in task_items.keys():
# Already in pool? Keep merge flow labels.
itask = self.get_task(name, point, flow_label)
if itask is None:
# Spawn with new flow label.
itask = self.spawn_task(name, point, flow_label, reflow=reflow)
if itask is not None:
# (If None, spawner reports cycle bounds errors).
itask.manual_trigger = True
itask.state.reset(TASK_STATUS_WAITING)
LOG.critical('setting %s ready to run', itask)
itask.state.set_prerequisites_all_satisfied()
return n_warnings
def sim_time_check(self, message_queue):
"""Simulation mode: simulate task run times and set states."""
sim_task_state_changed = False
now = time()
for itask in self.get_tasks():
if itask.state.status != TASK_STATUS_RUNNING:
continue
# Started time is not set on restart
if itask.summary['started_time'] is None:
itask.summary['started_time'] = now
timeout = (itask.summary['started_time'] +
itask.tdef.rtconfig['job']['simulated run length'])
if now > timeout:
conf = itask.tdef.rtconfig['simulation']
job_d = get_task_job_id(
itask.point, itask.tdef.name, itask.submit_num)
now_str = get_current_time_string()
if (itask.point in conf['fail cycle points'] and
(itask.get_try_num() == 1 or
not conf['fail try 1 only'])):
message_queue.put(
(job_d, now_str, 'CRITICAL', TASK_STATUS_FAILED))
else:
# Simulate message outputs.
for msg in itask.tdef.rtconfig['outputs'].values():
message_queue.put((job_d, now_str, 'INFO', msg))
message_queue.put(
(job_d, now_str, 'INFO', TASK_STATUS_SUCCEEDED))
sim_task_state_changed = True
return sim_task_state_changed
def set_expired_task(self, itask, now):
"""Check if task has expired. Set state and event handler if so.
Return True if task has expired.
"""
if (
not itask.state(
TASK_STATUS_WAITING,
is_held=False
)
or itask.tdef.expiration_offset is None
):
return False
if itask.expire_time is None:
itask.expire_time = (
itask.get_point_as_seconds() +
itask.get_offset_as_seconds(itask.tdef.expiration_offset))
if now > itask.expire_time:
msg = 'Task expired (skipping job).'
LOG.warning('[%s] -%s', itask, msg)
self.task_events_mgr.setup_event_handlers(itask, "expired", msg)
# TODO succeeded and expired states are useless due to immediate
# removal under all circumstances (unhandled failed is still used).
itask.state.reset(TASK_STATUS_EXPIRED, is_held=False)
self.remove(itask, 'expired')
return True
return False
def task_succeeded(self, id_):
"""Return True if task with id_ is in the succeeded state."""
for itask in self.get_tasks():
if (
itask.identity == id_
and itask.state(TASK_STATUS_SUCCEEDED)
):
return True
return False
def filter_task_proxies(self, items):
"""Return task proxies that match names, points, states in items.
Return (itasks, bad_items).
In the new form, the arguments should look like:
items -- a list of strings for matching task proxies, each with
the general form name[.point][:state] or [point/]name[:state]
where name is a glob-like pattern for matching a task name or
a family name.
"""
itasks = []
bad_items = []
if not items:
itasks += self.get_all_tasks()
else:
for item in items:
point_str, name_str, status = self._parse_task_item(item)
if point_str is None:
point_str = "*"
else:
try:
point_str = standardise_point_string(point_str)
except PointParsingError:
# point_str may be a glob
pass
tasks_found = False
for itask in self.get_all_tasks():
nss = itask.tdef.namespace_hierarchy
if (fnmatchcase(str(itask.point), point_str) and
(not status or itask.state.status == status) and
(fnmatchcase(itask.tdef.name, name_str) or
any(fnmatchcase(ns, name_str) for ns in nss))):
itasks.append(itask)
tasks_found = True
if not tasks_found:
LOG.warning(self.ERR_PREFIX_TASKID_MATCH + item)
bad_items.append(item)
return itasks, bad_items
def stop_flow(self, flow_label):
"""Stop a particular flow from spawning any further."""
# Stop tasks belong to flow_label from continuing.
for itask in self.get_all_tasks():
# Don't use match_label(); we don't want to stop merged flows.
if itask.flow_label == flow_label:
itask.reflow = False
def prune_flow_labels(self):
"""Remove redundant flow labels.
Note this iterates the task pool twice but it can be called
infrequently and doesn't do anything if there is only one flow.
"""
if self.flow_label_mgr.get_num_inuse() == 1:
# Nothing to do.
return
# Gather all current labels.
labels = []
for itask in self.get_all_tasks():
labels.append(itask.flow_label)
# Find any labels common to all tasks.
common = self.flow_label_mgr.get_common_labels(labels)
# And prune them back to just one.
num = len(list(common))
if num <= 1:
return
LOG.debug('Pruning redundant flow labels: %s', common)
to_prune = []
while num > 1:
to_prune.append(common.pop())
num -= 1
for itask in self.get_all_tasks():
itask.flow_label = self.flow_label_mgr.unmerge_labels(
to_prune, itask.flow_label)
self.flow_label_mgr.make_avail(to_prune)
@staticmethod
def _parse_task_item(item):
"""Parse point/name:state or name.point:state syntax."""
if ":" in item:
head, state_str = item.rsplit(":", 1)
else:
head, state_str = (item, None)
if "/" in head:
point_str, name_str = head.split("/", 1)
elif "." in head:
name_str, point_str = head.split(".", 1)
else:
name_str, point_str = (head, None)
return (point_str, name_str, state_str)
|
gpl-3.0
| -4,525,893,904,254,624,000 | 39.733793 | 79 | 0.552993 | false |
sadmansk/servo
|
tests/wpt/web-platform-tests/tools/wpt/install.py
|
1
|
3192
|
import argparse
import browser
import sys
latest_channels = {
'firefox': 'nightly',
'chrome': 'dev',
'safari': 'preview',
'safari_webdriver': 'preview',
'servo': 'nightly'
}
channel_by_name = {
'stable': 'stable',
'release': 'stable',
'beta': 'beta',
'nightly': latest_channels,
'dev': latest_channels,
'preview': latest_channels,
'experimental': latest_channels,
}
def get_parser():
parser = argparse.ArgumentParser(description="""Install a given browser or webdriver frontend.
For convenience the release channel of the browser accepts various spellings,
but we actually support at most three variants; whatever the latest development
release is (e.g. Firefox nightly or Chrome dev), the latest beta release, and
the most recent stable release.""")
parser.add_argument('browser', choices=['firefox', 'chrome', 'servo'],
help='name of web browser product')
parser.add_argument('component', choices=['browser', 'webdriver'],
help='name of component')
parser.add_argument('--channel', choices=channel_by_name.keys(),
default="nightly", help='Name of browser release channel. '
'"stable" and "release" are synonyms for the latest browser stable release,'
'"nightly", "dev", "experimental", and "preview" are all synonyms for '
'the latest available development release. For WebDriver installs, '
'we attempt to select an appropriate, compatible, version for the '
'latest browser release on the selected channel.')
parser.add_argument('-d', '--destination',
help='filesystem directory to place the component')
return parser
def get_channel(browser, channel):
channel = channel_by_name[channel]
if isinstance(channel, dict):
channel = channel.get(browser)
return channel
def run(venv, **kwargs):
browser = kwargs["browser"]
destination = kwargs["destination"]
channel = get_channel(browser, kwargs["channel"])
if channel != kwargs["channel"]:
print "Interpreting channel '%s' as '%s'" % (kwargs["channel"],
channel)
if destination is None:
if venv:
if kwargs["component"] == "browser":
destination = venv.path
else:
destination = venv.bin_path
else:
raise argparse.ArgumentError(None,
"No --destination argument, and no default for the environment")
install(browser, kwargs["component"], destination, channel)
def install(name, component, destination, channel="nightly"):
if component == 'webdriver':
method = 'install_webdriver'
else:
method = 'install'
subclass = getattr(browser, name.title())
sys.stdout.write('Now installing %s %s...\n' % (name, component))
path = getattr(subclass(), method)(dest=destination, channel=channel)
if path:
sys.stdout.write('Binary installed as %s\n' % (path,))
|
mpl-2.0
| 281,753,135,392,213,300 | 35.689655 | 105 | 0.602757 | false |
its-dirg/saml-metadata-upload
|
src/metadata_upload/service.py
|
1
|
1421
|
import os
import random
import string
from flask_transfer.transfer import Transfer
from flask_transfer.validators import AllowedExts
from flask_wtf.file import FileField
from flask_wtf.file import FileRequired, FileAllowed
from flask_wtf.form import Form
from werkzeug.utils import secure_filename
from wtforms.fields.simple import SubmitField
from metadata_upload.validation import SAMLMetadataValidator
ALLOWED_EXTENSIONS = ['xml']
class SAMLMetadataUploadForm(Form):
uploaded_file = FileField(label='Upload metadata',
validators=[
FileRequired(),
FileAllowed(ALLOWED_EXTENSIONS,
'Only SAML Metadata (.xml) allowed.')
])
submit = SubmitField(label='Upload')
def random_string(n=16):
return ''.join(
random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(n))
def save_with_sanitized_filename(filehandle, upload_directory):
fullpath = os.path.join(upload_directory, secure_filename(filehandle.filename))
filehandle.save(fullpath)
SAMLMetadataDocuments = AllowedExts(*ALLOWED_EXTENSIONS)
SAMLMetadataUpload = Transfer(validators=[SAMLMetadataDocuments],
destination=save_with_sanitized_filename)
SAMLMetadataUpload.validator(SAMLMetadataValidator())
|
apache-2.0
| -3,014,370,430,212,958,000 | 33.658537 | 95 | 0.681914 | false |
carltongibson/django-staticsite
|
staticsite/management/commands/buildstaticsite.py
|
1
|
3354
|
# -*- coding: utf-8 -*-
import os
import codecs
from itertools import chain
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.template import Template
from django.template.loader import get_template
from django.test import RequestFactory
from staticsite.views import CONTEXT_MAP
### Notes
# Expects `staticsite` folder to exist in `settings.TEMPLATE_DIRS`
# This COULD use Template Loaders.
#
# Uses `settings.STATICSITE_OUTPUT_DIR` for write location.
# MAYBE make this a command line argument.
#
# TODO: Set STATIC_URL context variable from settings.
# TODO: Render the view here. (Take advantage of get_context_data)
# — Test that output is correct.
# TODO: Cache templates so we only write changed files.
# - c.p. django.contrib.staticfiles collectstatic
class Command(BaseCommand):
help = 'Writes (HTML) files for templates in staticsite template dir.'
def handle(self, *args, **options):
staticsite_dir = self._staticsite_dir()
output_dir = settings.STATICSITE_OUTPUT_DIR
factory = RequestFactory()
for dirpath, subdirs, filenames in os.walk(staticsite_dir):
for name in filenames:
if name.startswith('.'):
continue # Put this in for OS X's .DS_Store files.
# TODO: Think about this.
# get template and render
template_path = os.path.join(dirpath, name)
request_path = "/" + os.path.relpath(template_path, staticsite_dir)
print("Loading template at: %s with request_path: %s" % (template_path, request_path))
t = get_template(template_path)
# Get context.
request = factory.get(request_path)
context = {
'STATIC_URL': '/static/',
'csrf_token': 'NOTPROVIDED',
'request': request,
}
context_map_key = template_path.replace(staticsite_dir, '').lstrip('/')
if context_map_key in CONTEXT_MAP:
context.update(CONTEXT_MAP[context_map_key])
html = t.render(context)
# and write
write_dir = dirpath.replace(staticsite_dir, output_dir, 1)
if not os.path.exists(write_dir):
os.makedirs(write_dir)
write_path = os.path.join(write_dir, name)
write_file = codecs.open(write_path, encoding='utf-8', mode='w')
write_file.write(html)
write_file.close()
print("Wrote: %s" % write_path)
def _staticsite_dir(self):
dirs = chain.from_iterable(i["DIRS"] for i in settings.TEMPLATES)
for template_dir in dirs:
for path in os.listdir(template_dir):
if path == 'staticsite':
staticsite_dir = os.path.join(template_dir, path)
self.stdout.write('Building staticsite from %s' % staticsite_dir)
return staticsite_dir
raise CommandError('staticsite dir not found in settings.TEMPLATE_DIRS')
|
bsd-2-clause
| 7,664,765,751,399,810,000 | 38.904762 | 106 | 0.568616 | false |
oriel-hub/api
|
django/idsapi/openapi/permissions.py
|
1
|
1176
|
from rest_framework.throttling import UserRateThrottle
from rest_framework.exceptions import PermissionDenied
from django.conf import settings
from django.contrib.sites.models import Site
class PerUserThrottlingRatePerGroup(UserRateThrottle):
def allow_request(self, request, view):
"""
Implement the check to see if the request should be throttled.
On success calls `throttle_success`.
On failure calls `throttle_failure`.
"""
profile = request.user.userprofile
try:
user_rate = settings.USER_LEVEL_INFO[profile.user_level]['max_call_rate']
except KeyError:
domain = Site.objects.all()[0].domain
# The user has not completed registration
raise PermissionDenied(detail=(
'You must complete registration before using the API. ',
'Please visit http://%s/profiles/edit/ and complete your registration.' % domain
)
)
self.rate = user_rate
self.num_requests, self.duration = self.parse_rate(self.rate)
return super(UserRateThrottle, self).allow_request(request, view)
|
gpl-2.0
| -4,485,740,001,450,202,600 | 38.2 | 100 | 0.651361 | false |
Hwesta/advent-of-code
|
aoc2016/day5.py
|
1
|
4067
|
#!/usr/bin/env python
"""
--- Day 5: How About a Nice Game of Chess? ---
You are faced with a security door designed by Easter Bunny engineers that seem to have acquired most of their security knowledge by watching hacking movies.
The eight-character password for the door is generated one character at a time by finding the MD5 hash of some Door ID (your puzzle input) and an increasing integer index (starting with 0).
A hash indicates the next character in the password if its hexadecimal representation starts with five zeroes. If it does, the sixth character in the hash is the next character of the password.
For example, if the Door ID is abc:
The first index which produces a hash that starts with five zeroes is 3231929, which we find by hashing abc3231929; the sixth character of the hash, and thus the first character of the password, is 1.
5017308 produces the next interesting hash, which starts with 000008f82..., so the second character of the password is 8.
The third time a hash starts with five zeroes is for abc5278568, discovering the character f.
In this example, after continuing this search a total of eight times, the password is 18f47a30.
Given the actual Door ID, what is the password?
--- Part Two ---
As the door slides open, you are presented with a second door that uses a slightly more inspired security mechanism. Clearly unimpressed by the last version (in what movie is the password decrypted in order?!), the Easter Bunny engineers have worked out a better solution.
Instead of simply filling in the password from left to right, the hash now also indicates the position within the password to fill. You still look for hashes that begin with five zeroes; however, now, the sixth character represents the position (0-7), and the seventh character is the character to put in that position.
A hash result of 000001f means that f is the second character in the password. Use only the first result for each position, and ignore invalid positions.
For example, if the Door ID is abc:
The first interesting hash is from abc3231929, which produces 0000015...; so, 5 goes in position 1: _5______.
In the previous method, 5017308 produced an interesting hash; however, it is ignored, because it specifies an invalid position (8).
The second interesting hash is at index 5357525, which produces 000004e...; so, e goes in position 4: _5__e___.
You almost choke on your popcorn as the final character falls into place, producing the password 05ace8e3.
Given the actual Door ID and this new method, what is the password? Be extra proud of your solution if it uses a cinematic "decrypting" animation.
"""
from __future__ import print_function
import hashlib
import os
def solve(data):
secret_key = data
starts_with = '00000'
start = 0
password1 = ''
password2 = [None] * 8
print('secret', secret_key)
digest = hashlib.md5()
digest.update(secret_key.encode('utf8'))
while True:
m = digest.copy()
m.update(str(start).encode('utf8'))
if m.hexdigest().startswith(starts_with):
print('found hex', m.hexdigest())
# Part 1
if len(password1) < 8:
password1 += m.hexdigest()[5]
print('password1', password1, len(password1))
# Part 2
index = int(m.hexdigest()[5], 16)
value = m.hexdigest()[6]
print('idx', index, 'val', value)
if index < 8 and password2[index] is None:
password2[index] = value
print('password2', password2)
if len(password1) == 8 and password2.count(None) == 0:
break
start += 1
print('total hashes', start)
return password1, ''.join(password2)
if __name__ == '__main__':
this_dir = os.path.dirname(__file__)
with open(os.path.join(this_dir, 'day5.input')) as f:
data = f.read().strip()
password1, password2 = solve(data)
print('The first password is', password1)
print('The second password is', password2)
|
mit
| -2,213,071,398,755,253,800 | 45.215909 | 319 | 0.695107 | false |
dims/neutron
|
neutron/tests/unit/scheduler/test_l3_agent_scheduler.py
|
1
|
76578
|
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import contextlib
import datetime
import uuid
import mock
from oslo_config import cfg
from oslo_utils import importutils
from oslo_utils import timeutils
import testscenarios
from neutron.common import constants
from neutron import context as n_context
from neutron.db import agents_db
from neutron.db import common_db_mixin
from neutron.db import db_base_plugin_v2 as db_v2
from neutron.db import l3_agentschedulers_db
from neutron.db import l3_db
from neutron.db import l3_dvr_ha_scheduler_db
from neutron.db import l3_dvrscheduler_db
from neutron.db import l3_hamode_db
from neutron.db import l3_hascheduler_db
from neutron.extensions import l3_ext_ha_mode as l3_ha
from neutron.extensions import l3agentscheduler as l3agent
from neutron.extensions import portbindings
from neutron import manager
from neutron.scheduler import l3_agent_scheduler
from neutron.tests import base
from neutron.tests.common import helpers
from neutron.tests.unit.db import test_db_base_plugin_v2
from neutron.tests.unit.extensions import test_l3
from neutron.tests.unit import testlib_api
# the below code is required for the following reason
# (as documented in testscenarios)
"""Multiply tests depending on their 'scenarios' attribute.
This can be assigned to 'load_tests' in any test module to make this
automatically work across tests in the module.
"""
load_tests = testscenarios.load_tests_apply_scenarios
HOST_DVR = 'my_l3_host_dvr'
HOST_DVR_SNAT = 'my_l3_host_dvr_snat'
DEVICE_OWNER_COMPUTE = constants.DEVICE_OWNER_COMPUTE_PREFIX + 'fake'
DEVICE_OWNER_COMPUTE_NOVA = constants.DEVICE_OWNER_COMPUTE_PREFIX + 'nova'
class FakeL3Scheduler(l3_agent_scheduler.L3Scheduler):
def schedule(self):
pass
def _choose_router_agent(self):
pass
def _choose_router_agents_for_ha(self):
pass
class FakePortDB(object):
def __init__(self, port_list):
self._port_list = port_list
def _get_query_answer(self, port_list, filters):
answers = []
for port in port_list:
matched = True
for key, search_values in filters.items():
port_value = port.get(key, None)
if not port_value:
matched = False
break
if isinstance(port_value, list):
sub_answers = self._get_query_answer(port_value,
search_values)
matched = len(sub_answers) > 0
else:
matched = port_value in search_values
if not matched:
break
if matched:
answers.append(port)
return answers
def get_port(self, context, port_id):
for port in self._port_list:
if port['id'] == port_id:
if port['tenant_id'] == context.tenant_id or context.is_admin:
return port
break
return None
def get_ports(self, context, filters=None):
query_filters = dict()
if filters:
query_filters.update(filters)
if not context.is_admin:
query_filters['tenant_id'] = [context.tenant_id]
result = self._get_query_answer(self._port_list, query_filters)
return result
class L3SchedulerBaseTestCase(base.BaseTestCase):
def setUp(self):
super(L3SchedulerBaseTestCase, self).setUp()
self.scheduler = FakeL3Scheduler()
self.plugin = mock.Mock()
def test_auto_schedule_routers(self):
self.plugin.get_enabled_agent_on_host.return_value = [mock.ANY]
with mock.patch.object(self.scheduler,
'_get_routers_to_schedule') as gs,\
mock.patch.object(self.scheduler,
'_get_routers_can_schedule') as gr:
result = self.scheduler.auto_schedule_routers(
self.plugin, mock.ANY, mock.ANY, mock.ANY)
self.assertTrue(self.plugin.get_enabled_agent_on_host.called)
self.assertTrue(result)
self.assertTrue(gs.called)
self.assertTrue(gr.called)
def test_auto_schedule_routers_no_agents(self):
self.plugin.get_enabled_agent_on_host.return_value = None
result = self.scheduler.auto_schedule_routers(
self.plugin, mock.ANY, mock.ANY, mock.ANY)
self.assertTrue(self.plugin.get_enabled_agent_on_host.called)
self.assertFalse(result)
def test_auto_schedule_routers_no_unscheduled_routers(self):
type(self.plugin).supported_extension_aliases = (
mock.PropertyMock(return_value=[]))
with mock.patch.object(self.scheduler,
'_get_routers_to_schedule') as mock_routers:
mock_routers.return_value = []
result = self.scheduler.auto_schedule_routers(
self.plugin, mock.ANY, mock.ANY, mock.ANY)
self.assertTrue(self.plugin.get_enabled_agent_on_host.called)
self.assertFalse(result)
def test_auto_schedule_routers_no_target_routers(self):
self.plugin.get_enabled_agent_on_host.return_value = [mock.ANY]
with mock.patch.object(
self.scheduler,
'_get_routers_to_schedule') as mock_unscheduled_routers,\
mock.patch.object(
self.scheduler,
'_get_routers_can_schedule') as mock_target_routers:
mock_unscheduled_routers.return_value = mock.ANY
mock_target_routers.return_value = None
result = self.scheduler.auto_schedule_routers(
self.plugin, mock.ANY, mock.ANY, mock.ANY)
self.assertTrue(self.plugin.get_enabled_agent_on_host.called)
self.assertFalse(result)
def test__get_routers_to_schedule_with_router_ids(self):
router_ids = ['foo_router_1', 'foo_router_2']
expected_routers = [
{'id': 'foo_router1'}, {'id': 'foo_router_2'}
]
self.plugin.get_routers.return_value = expected_routers
with mock.patch.object(self.scheduler,
'_filter_unscheduled_routers') as mock_filter:
mock_filter.return_value = expected_routers
unscheduled_routers = self.scheduler._get_routers_to_schedule(
mock.ANY, self.plugin, router_ids)
mock_filter.assert_called_once_with(
mock.ANY, self.plugin, expected_routers)
self.assertEqual(expected_routers, unscheduled_routers)
def test__get_routers_to_schedule_without_router_ids(self):
expected_routers = [
{'id': 'foo_router1'}, {'id': 'foo_router_2'}
]
with mock.patch.object(self.scheduler,
'_get_unscheduled_routers') as mock_get:
mock_get.return_value = expected_routers
unscheduled_routers = self.scheduler._get_routers_to_schedule(
mock.ANY, self.plugin)
mock_get.assert_called_once_with(mock.ANY, self.plugin)
self.assertEqual(expected_routers, unscheduled_routers)
def _test__get_routers_can_schedule(self, routers, agent, target_routers):
self.plugin.get_l3_agent_candidates.return_value = agent
result = self.scheduler._get_routers_can_schedule(
mock.ANY, self.plugin, routers, mock.ANY)
self.assertEqual(target_routers, result)
def _test__filter_unscheduled_routers(self, routers, agents, expected):
self.plugin.get_l3_agents_hosting_routers.return_value = agents
unscheduled_routers = self.scheduler._filter_unscheduled_routers(
mock.ANY, self.plugin, routers)
self.assertEqual(expected, unscheduled_routers)
def test__filter_unscheduled_routers_already_scheduled(self):
self._test__filter_unscheduled_routers(
[{'id': 'foo_router1'}, {'id': 'foo_router_2'}],
[{'id': 'foo_agent_id'}], [])
def test__filter_unscheduled_routers_non_scheduled(self):
self._test__filter_unscheduled_routers(
[{'id': 'foo_router1'}, {'id': 'foo_router_2'}],
None, [{'id': 'foo_router1'}, {'id': 'foo_router_2'}])
def test__get_routers_can_schedule_with_compat_agent(self):
routers = [{'id': 'foo_router'}]
self._test__get_routers_can_schedule(routers, mock.ANY, routers)
def test__get_routers_can_schedule_with_no_compat_agent(self):
routers = [{'id': 'foo_router'}]
self._test__get_routers_can_schedule(routers, None, [])
def test__bind_routers_centralized(self):
routers = [{'id': 'foo_router'}]
with mock.patch.object(self.scheduler, 'bind_router') as mock_bind:
self.scheduler._bind_routers(mock.ANY, mock.ANY, routers, mock.ANY)
mock_bind.assert_called_once_with(mock.ANY, 'foo_router', mock.ANY)
def _test__bind_routers_ha(self, has_binding):
routers = [{'id': 'foo_router', 'ha': True, 'tenant_id': '42'}]
agent = agents_db.Agent(id='foo_agent')
with mock.patch.object(self.scheduler,
'_router_has_binding',
return_value=has_binding) as mock_has_binding,\
mock.patch.object(self.scheduler,
'create_ha_port_and_bind') as mock_bind:
self.scheduler._bind_routers(mock.ANY, mock.ANY, routers, agent)
mock_has_binding.assert_called_once_with(mock.ANY, 'foo_router',
'foo_agent')
self.assertEqual(not has_binding, mock_bind.called)
def test__bind_routers_ha_has_binding(self):
self._test__bind_routers_ha(has_binding=True)
def test__bind_routers_ha_no_binding(self):
self._test__bind_routers_ha(has_binding=False)
def test__get_candidates_iterable_on_early_returns(self):
plugin = mock.MagicMock()
# non-distributed router already hosted
plugin.get_l3_agents_hosting_routers.return_value = [{'id': 'a1'}]
router = {'distributed': False, 'id': 'falafel'}
iter(self.scheduler._get_candidates(plugin, mock.MagicMock(), router))
# distributed router but no agents
router['distributed'] = True
plugin.get_l3_agents.return_value = []
iter(self.scheduler._get_candidates(plugin, mock.MagicMock(), router))
self.assertFalse(plugin.get_l3_agent_candidates.called)
def test__get_candidates_skips_get_l3_agent_candidates_if_dvr_scheduled(
self):
plugin = mock.MagicMock()
# distributed router already hosted
plugin.get_l3_agents_hosting_routers.return_value = [{'id': 'a1'}]
router = {'distributed': True, 'id': str(uuid.uuid4())}
plugin.get_l3_agents.return_value = ['a1']
self.scheduler._get_candidates(plugin, mock.MagicMock(), router)
self.assertFalse(plugin.get_l3_agent_candidates.called)
class L3SchedulerBaseMixin(object):
def _register_l3_agents(self, plugin=None):
self.agent1 = helpers.register_l3_agent(
'host_1', constants.L3_AGENT_MODE_LEGACY)
self.agent_id1 = self.agent1.id
self.agent2 = helpers.register_l3_agent(
'host_2', constants.L3_AGENT_MODE_LEGACY)
self.agent_id2 = self.agent2.id
def _register_l3_dvr_agents(self):
self.l3_dvr_agent = helpers.register_l3_agent(
HOST_DVR, constants.L3_AGENT_MODE_DVR)
self.l3_dvr_agent_id = self.l3_dvr_agent.id
self.l3_dvr_snat_agent = helpers.register_l3_agent(
HOST_DVR_SNAT, constants.L3_AGENT_MODE_DVR_SNAT)
self.l3_dvr_snat_id = self.l3_dvr_snat_agent.id
def _set_l3_agent_admin_state(self, context, agent_id, state=True):
update = {'agent': {'admin_state_up': state}}
self.plugin.update_agent(context, agent_id, update)
def _set_l3_agent_dead(self, agent_id):
update = {
'agent': {
'heartbeat_timestamp':
timeutils.utcnow() - datetime.timedelta(hours=1)}}
self.plugin.update_agent(self.adminContext, agent_id, update)
@contextlib.contextmanager
def router_with_ext_gw(self, name='router1', admin_state_up=True,
fmt=None, tenant_id=str(uuid.uuid4()),
external_gateway_info=None,
subnet=None, set_context=False,
**kwargs):
router = self._make_router(fmt or self.fmt, tenant_id, name,
admin_state_up, external_gateway_info,
set_context, **kwargs)
self._add_external_gateway_to_router(
router['router']['id'],
subnet['subnet']['network_id'])
yield router
self._remove_external_gateway_from_router(
router['router']['id'], subnet['subnet']['network_id'])
self._delete('routers', router['router']['id'])
class L3SchedulerTestBaseMixin(object):
def _test_add_router_to_l3_agent(self,
distributed=False,
already_scheduled=False,
external_gw=None):
agent_id = self.agent_id1
agent = self.agent1
if distributed:
self._register_l3_dvr_agents()
agent_id = self.l3_dvr_snat_id
agent = self.l3_dvr_snat_agent
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r1')
router['router']['distributed'] = distributed
router['router']['external_gateway_info'] = external_gw
if already_scheduled:
self._test_schedule_bind_router(agent, router)
with mock.patch.object(self, "validate_agent_router_combination"),\
mock.patch.object(self,
"create_router_to_agent_binding") as auto_s,\
mock.patch('neutron.db.l3_db.L3_NAT_db_mixin.get_router',
return_value=router['router']):
self.add_router_to_l3_agent(self.adminContext, agent_id,
router['router']['id'])
self.assertNotEqual(already_scheduled, auto_s.called)
def test__unbind_router_removes_binding(self):
agent_id = self.agent_id1
agent = self.agent1
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r1')
self._test_schedule_bind_router(agent, router)
self._unbind_router(self.adminContext,
router['router']['id'],
agent_id)
bindings = self._get_l3_bindings_hosting_routers(
self.adminContext, [router['router']['id']])
self.assertEqual(0, len(bindings))
def _create_router_for_l3_agent_dvr_test(self,
distributed=False,
external_gw=None):
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r1')
router['router']['distributed'] = distributed
router['router']['external_gateway_info'] = external_gw
return router
def _prepare_l3_agent_dvr_move_exceptions(self,
distributed=False,
external_gw=None,
agent_id=None,
expected_exception=None):
router = self._create_router_for_l3_agent_dvr_test(
distributed=distributed, external_gw=external_gw)
with mock.patch.object(self, "create_router_to_agent_binding"),\
mock.patch('neutron.db.l3_db.L3_NAT_db_mixin.get_router',
return_value=router['router']):
self.assertRaises(expected_exception,
self.add_router_to_l3_agent,
self.adminContext, agent_id,
router['router']['id'])
def test_add_router_to_l3_agent_mismatch_error_dvr_to_legacy(self):
self._register_l3_agents()
self._prepare_l3_agent_dvr_move_exceptions(
distributed=True,
agent_id=self.agent_id1,
expected_exception=l3agent.RouterL3AgentMismatch)
def test_add_router_to_l3_agent_mismatch_error_legacy_to_dvr(self):
self._register_l3_dvr_agents()
self._prepare_l3_agent_dvr_move_exceptions(
agent_id=self.l3_dvr_agent_id,
expected_exception=l3agent.DVRL3CannotAssignToDvrAgent)
def test_add_router_to_l3_agent_mismatch_error_dvr_to_dvr(self):
self._register_l3_dvr_agents()
self._prepare_l3_agent_dvr_move_exceptions(
distributed=True,
agent_id=self.l3_dvr_agent_id,
expected_exception=l3agent.DVRL3CannotAssignToDvrAgent)
def test_add_router_to_l3_agent_dvr_to_snat(self):
external_gw_info = {
"network_id": str(uuid.uuid4()),
"enable_snat": True
}
self._register_l3_dvr_agents()
agent_id = self.l3_dvr_snat_id
router = self._create_router_for_l3_agent_dvr_test(
distributed=True,
external_gw=external_gw_info)
with mock.patch.object(self, "validate_agent_router_combination"),\
mock.patch.object(
self,
"create_router_to_agent_binding") as rtr_agent_binding,\
mock.patch('neutron.db.l3_db.L3_NAT_db_mixin.get_router',
return_value=router['router']):
self.add_router_to_l3_agent(self.adminContext, agent_id,
router['router']['id'])
rtr_agent_binding.assert_called_once_with(
self.adminContext, mock.ANY, router['router'])
def test_add_router_to_l3_agent(self):
self._test_add_router_to_l3_agent()
def test_add_distributed_router_to_l3_agent(self):
external_gw_info = {
"network_id": str(uuid.uuid4()),
"enable_snat": True
}
self._test_add_router_to_l3_agent(distributed=True,
external_gw=external_gw_info)
def test_add_router_to_l3_agent_already_scheduled(self):
self._test_add_router_to_l3_agent(already_scheduled=True)
def test_add_distributed_router_to_l3_agent_already_scheduled(self):
external_gw_info = {
"network_id": str(uuid.uuid4()),
"enable_snat": True
}
self._test_add_router_to_l3_agent(distributed=True,
already_scheduled=True,
external_gw=external_gw_info)
def test_remove_router_from_l3_agent_in_dvr_mode(self):
self._register_l3_dvr_agents()
self.assertRaises(l3agent.DVRL3CannotRemoveFromDvrAgent,
self.remove_router_from_l3_agent,
self.adminContext,
self.l3_dvr_agent_id,
mock.ANY)
def test_remove_router_from_l3_agent_in_dvr_snat_mode(self):
self._register_l3_dvr_agents()
router = self._create_router_for_l3_agent_dvr_test(
distributed=True)
agent_id = self.l3_dvr_snat_id
l3_notifier = mock.Mock()
self.agent_notifiers = {constants.AGENT_TYPE_L3: l3_notifier}
self.remove_router_from_l3_agent(self.adminContext, agent_id,
router['router']['id'])
l3_notifier.router_removed_from_agent.assert_called_once_with(
self.adminContext, router['router']['id'],
self.l3_dvr_snat_agent.host)
def _prepare_schedule_dvr_tests(self):
scheduler = l3_agent_scheduler.ChanceScheduler()
agent = agents_db.Agent()
agent.admin_state_up = True
agent.heartbeat_timestamp = timeutils.utcnow()
plugin = mock.Mock()
plugin.get_l3_agents_hosting_routers.return_value = []
plugin.get_l3_agents.return_value = [agent]
plugin.get_l3_agent_candidates.return_value = [agent]
return scheduler, agent, plugin
def test_schedule_dvr_router_without_snatbinding_and_no_gw(self):
scheduler, agent, plugin = self._prepare_schedule_dvr_tests()
sync_router = {
'id': 'foo_router_id',
'distributed': True
}
plugin.get_router.return_value = sync_router
with mock.patch.object(scheduler, 'bind_router'),\
mock.patch.object(plugin,
'get_snat_bindings',
return_value=False):
scheduler._schedule_router(
plugin, self.adminContext, 'foo_router_id', None)
expected_calls = [
mock.call.get_router(mock.ANY, 'foo_router_id'),
mock.call.get_l3_agents_hosting_routers(
mock.ANY, ['foo_router_id'], admin_state_up=True),
mock.call.get_l3_agents(mock.ANY, active=True),
mock.call.get_l3_agent_candidates(mock.ANY, sync_router, [agent]),
]
plugin.assert_has_calls(expected_calls)
def test_schedule_router_distributed(self):
scheduler, agent, plugin = self._prepare_schedule_dvr_tests()
sync_router = {
'id': 'foo_router_id',
'distributed': True,
'external_gateway_info': {
'network_id': str(uuid.uuid4()),
'enable_snat': True
}
}
plugin.get_router.return_value = sync_router
with mock.patch.object(scheduler, 'bind_router'):
scheduler._schedule_router(
plugin, self.adminContext, 'foo_router_id', None)
expected_calls = [
mock.call.get_router(mock.ANY, 'foo_router_id'),
mock.call.get_l3_agents_hosting_routers(
mock.ANY, ['foo_router_id'], admin_state_up=True),
mock.call.get_l3_agents(mock.ANY, active=True),
mock.call.get_l3_agent_candidates(mock.ANY, sync_router,
[agent]),
]
plugin.assert_has_calls(expected_calls)
def _test_schedule_bind_router(self, agent, router):
ctx = self.adminContext
session = ctx.session
db = l3_agentschedulers_db.RouterL3AgentBinding
scheduler = l3_agent_scheduler.ChanceScheduler()
rid = router['router']['id']
scheduler.bind_router(ctx, rid, agent)
results = (session.query(db).filter_by(router_id=rid).all())
self.assertTrue(len(results) > 0)
self.assertIn(agent.id, [bind.l3_agent_id for bind in results])
def test_bind_new_router(self):
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r1')
with mock.patch.object(l3_agent_scheduler.LOG, 'debug') as flog:
self._test_schedule_bind_router(self.agent1, router)
self.assertEqual(1, flog.call_count)
args, kwargs = flog.call_args
self.assertIn('is scheduled', args[0])
def test_bind_absent_router(self):
scheduler = l3_agent_scheduler.ChanceScheduler()
# checking that bind_router() is not throwing
# when supplied with router_id of non-existing router
scheduler.bind_router(self.adminContext, "dummyID", self.agent1)
def test_bind_existing_router(self):
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
self._test_schedule_bind_router(self.agent1, router)
with mock.patch.object(l3_agent_scheduler.LOG, 'debug') as flog:
self._test_schedule_bind_router(self.agent1, router)
self.assertEqual(1, flog.call_count)
args, kwargs = flog.call_args
self.assertIn('has already been scheduled', args[0])
def _check_get_l3_agent_candidates(
self, router, agent_list, exp_host, count=1):
candidates = self.get_l3_agent_candidates(self.adminContext,
router, agent_list)
self.assertEqual(count, len(candidates))
if count:
self.assertEqual(exp_host, candidates[0]['host'])
def test_get_l3_agent_candidates_legacy(self):
self._register_l3_dvr_agents()
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
router['external_gateway_info'] = None
router['id'] = str(uuid.uuid4())
agent_list = [self.agent1, self.l3_dvr_agent]
# test legacy agent_mode case: only legacy agent should be candidate
router['distributed'] = False
exp_host = 'host_1'
self._check_get_l3_agent_candidates(router, agent_list, exp_host)
def test_get_l3_agent_candidates_dvr(self):
self._register_l3_dvr_agents()
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
router['external_gateway_info'] = None
router['id'] = str(uuid.uuid4())
agent_list = [self.agent1, self.l3_dvr_agent]
# test dvr agent_mode case no candidates
router['distributed'] = True
self.get_subnet_ids_on_router = mock.Mock()
self._check_dvr_serviceable_ports_on_host = mock.Mock(
return_value=True)
self._check_get_l3_agent_candidates(router, agent_list, None, count=0)
def test_get_l3_agent_candidates_dvr_no_vms(self):
self._register_l3_dvr_agents()
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
router['external_gateway_info'] = None
router['id'] = str(uuid.uuid4())
agent_list = [self.agent1, self.l3_dvr_agent]
router['distributed'] = True
# Test no VMs present case
self.get_subnet_ids_on_router = mock.Mock()
self._check_dvr_serviceable_ports_on_host = mock.Mock(
return_value=False)
self._check_get_l3_agent_candidates(
router, agent_list, HOST_DVR, count=0)
def test_get_l3_agent_candidates_dvr_snat(self):
self._register_l3_dvr_agents()
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
router['external_gateway_info'] = None
router['id'] = str(uuid.uuid4())
router['distributed'] = True
agent_list = [self.l3_dvr_snat_agent]
self.get_subnet_ids_on_router = mock.Mock()
self._check_dvr_serviceable_ports_on_host = mock.Mock(
return_value=True)
self._check_get_l3_agent_candidates(router, agent_list, HOST_DVR_SNAT)
def test_get_l3_agent_candidates_dvr_snat_no_vms(self):
self._register_l3_dvr_agents()
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
router['external_gateway_info'] = None
router['id'] = str(uuid.uuid4())
router['distributed'] = True
agent_list = [self.l3_dvr_snat_agent]
self._check_dvr_serviceable_ports_on_host = mock.Mock(
return_value=False)
# Test no VMs present case
self.get_subnet_ids_on_router = mock.Mock()
self._check_dvr_serviceable_ports_on_host.return_value = False
self._check_get_l3_agent_candidates(
router, agent_list, HOST_DVR_SNAT, count=1)
def test_get_l3_agent_candidates_dvr_ha_snat_no_vms(self):
self._register_l3_dvr_agents()
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
router['external_gateway_info'] = None
router['id'] = str(uuid.uuid4())
router['distributed'] = True
router['ha'] = True
agent_list = [self.l3_dvr_snat_agent]
self.check_ports_exist_on_l3agent = mock.Mock(return_value=False)
# Test no VMs present case
self.check_ports_exist_on_l3agent.return_value = False
self.get_subnet_ids_on_router = mock.Mock(return_value=set())
self._check_get_l3_agent_candidates(
router, agent_list, HOST_DVR_SNAT, count=1)
def test_get_l3_agent_candidates_centralized(self):
self._register_l3_dvr_agents()
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
router['external_gateway_info'] = None
router['id'] = str(uuid.uuid4())
# check centralized test case
router['distributed'] = False
agent_list = [self.l3_dvr_snat_agent]
self._check_get_l3_agent_candidates(router, agent_list, HOST_DVR_SNAT)
def test_get_l3_agents_hosting_routers(self):
agent = helpers.register_l3_agent('host_6')
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r1')
ctx = self.adminContext
router_id = router['router']['id']
self.plugin.router_scheduler.bind_router(ctx, router_id, agent)
agents = self.get_l3_agents_hosting_routers(ctx,
[router_id])
self.assertEqual([agent.id], [agt.id for agt in agents])
agents = self.get_l3_agents_hosting_routers(ctx,
[router_id],
admin_state_up=True)
self.assertEqual([agent.id], [agt.id for agt in agents])
self._set_l3_agent_admin_state(ctx, agent.id, False)
agents = self.get_l3_agents_hosting_routers(ctx,
[router_id])
self.assertEqual([agent.id], [agt.id for agt in agents])
agents = self.get_l3_agents_hosting_routers(ctx,
[router_id],
admin_state_up=True)
self.assertEqual([], agents)
class L3SchedulerTestCaseMixin(l3_agentschedulers_db.L3AgentSchedulerDbMixin,
l3_db.L3_NAT_db_mixin,
common_db_mixin.CommonDbMixin,
test_l3.L3NatTestCaseMixin,
L3SchedulerBaseMixin,
L3SchedulerTestBaseMixin):
def setUp(self):
self.mock_rescheduling = False
ext_mgr = test_l3.L3TestExtensionManager()
plugin_str = ('neutron.tests.unit.extensions.test_l3.'
'TestL3NatIntAgentSchedulingPlugin')
super(L3SchedulerTestCaseMixin, self).setUp(plugin=plugin_str,
ext_mgr=ext_mgr)
self.adminContext = n_context.get_admin_context()
self.plugin = manager.NeutronManager.get_plugin()
self.plugin.router_scheduler = importutils.import_object(
'neutron.scheduler.l3_agent_scheduler.ChanceScheduler'
)
self._register_l3_agents()
class L3AgentChanceSchedulerTestCase(L3SchedulerTestCaseMixin,
test_db_base_plugin_v2.
NeutronDbPluginV2TestCase):
def test_random_scheduling(self):
random_patch = mock.patch('random.choice')
random_mock = random_patch.start()
def side_effect(seq):
return seq[0]
random_mock.side_effect = side_effect
with self.subnet() as subnet:
self._set_net_external(subnet['subnet']['network_id'])
with self.router_with_ext_gw(name='r1', subnet=subnet) as r1:
agents = self.get_l3_agents_hosting_routers(
self.adminContext, [r1['router']['id']],
admin_state_up=True)
self.assertEqual(1, len(agents))
self.assertEqual(1, random_mock.call_count)
with self.router_with_ext_gw(name='r2', subnet=subnet) as r2:
agents = self.get_l3_agents_hosting_routers(
self.adminContext, [r2['router']['id']],
admin_state_up=True)
self.assertEqual(len(agents), 1)
self.assertEqual(2, random_mock.call_count)
random_patch.stop()
def test_scheduler_auto_schedule_when_agent_added(self):
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id1, False)
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id2, False)
with self.subnet() as subnet:
self._set_net_external(subnet['subnet']['network_id'])
with self.router_with_ext_gw(name='r1', subnet=subnet) as r1:
agents = self.get_l3_agents_hosting_routers(
self.adminContext, [r1['router']['id']],
admin_state_up=True)
self.assertEqual(0, len(agents))
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id1, True)
self.plugin.auto_schedule_routers(self.adminContext,
'host_1',
[r1['router']['id']])
agents = self.get_l3_agents_hosting_routers(
self.adminContext, [r1['router']['id']],
admin_state_up=True)
self.assertEqual('host_1', agents[0]['host'])
class L3AgentLeastRoutersSchedulerTestCase(L3SchedulerTestCaseMixin,
test_db_base_plugin_v2.
NeutronDbPluginV2TestCase):
def setUp(self):
super(L3AgentLeastRoutersSchedulerTestCase, self).setUp()
self.plugin.router_scheduler = importutils.import_object(
'neutron.scheduler.l3_agent_scheduler.LeastRoutersScheduler'
)
def test_scheduler(self):
# disable one agent to force the scheduling to the only one.
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id2, False)
with self.subnet() as subnet:
self._set_net_external(subnet['subnet']['network_id'])
with self.router_with_ext_gw(name='r1', subnet=subnet) as r1:
agents = self.get_l3_agents_hosting_routers(
self.adminContext, [r1['router']['id']],
admin_state_up=True)
self.assertEqual(1, len(agents))
agent_id1 = agents[0]['id']
with self.router_with_ext_gw(name='r2', subnet=subnet) as r2:
agents = self.get_l3_agents_hosting_routers(
self.adminContext, [r2['router']['id']],
admin_state_up=True)
self.assertEqual(1, len(agents))
agent_id2 = agents[0]['id']
self.assertEqual(agent_id1, agent_id2)
# re-enable the second agent to see whether the next router
# spawned will be on this one.
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id2, True)
with self.router_with_ext_gw(name='r3',
subnet=subnet) as r3:
agents = self.get_l3_agents_hosting_routers(
self.adminContext, [r3['router']['id']],
admin_state_up=True)
self.assertEqual(1, len(agents))
agent_id3 = agents[0]['id']
self.assertNotEqual(agent_id1, agent_id3)
class L3DvrScheduler(l3_db.L3_NAT_db_mixin,
l3_dvrscheduler_db.L3_DVRsch_db_mixin):
pass
class L3DvrSchedulerTestCase(testlib_api.SqlTestCase):
def setUp(self):
plugin = 'neutron.plugins.ml2.plugin.Ml2Plugin'
self.setup_coreplugin(plugin)
super(L3DvrSchedulerTestCase, self).setUp()
self.adminContext = n_context.get_admin_context()
self.dut = L3DvrScheduler()
def test__notify_l3_agent_update_port_no_removing_routers(self):
port_id = 'fake-port'
kwargs = {
'context': self.adminContext,
'port': None,
'original_port': {
'id': port_id,
portbindings.HOST_ID: 'vm-host',
'device_id': 'vm-id',
'device_owner': DEVICE_OWNER_COMPUTE,
'mac_address': '02:04:05:17:18:19'
},
'mac_address_updated': True
}
plugin = manager.NeutronManager.get_plugin()
l3plugin = mock.Mock()
l3plugin.supported_extension_aliases = [
'router', constants.L3_AGENT_SCHEDULER_EXT_ALIAS,
constants.L3_DISTRIBUTED_EXT_ALIAS
]
with mock.patch.object(manager.NeutronManager,
'get_service_plugins',
return_value={'L3_ROUTER_NAT': l3plugin}):
l3_dvrscheduler_db._notify_l3_agent_port_update(
'port', 'after_update', plugin, **kwargs)
self.assertFalse(
l3plugin.update_arp_entry_for_dvr_service_port.called)
self.assertFalse(
l3plugin.dvr_handle_new_service_port.called)
self.assertFalse(l3plugin.remove_router_from_l3_agent.called)
self.assertFalse(l3plugin.get_dvr_routers_to_remove.called)
def test__notify_l3_agent_new_port_action(self):
kwargs = {
'context': self.adminContext,
'original_port': None,
'port': {
'device_owner': DEVICE_OWNER_COMPUTE,
},
}
l3plugin = mock.Mock()
with mock.patch.object(manager.NeutronManager,
'get_service_plugins',
return_value={'L3_ROUTER_NAT': l3plugin}):
l3_dvrscheduler_db._notify_l3_agent_new_port(
'port', 'after_create', mock.ANY, **kwargs)
l3plugin.update_arp_entry_for_dvr_service_port.\
assert_called_once_with(
self.adminContext, kwargs.get('port'))
l3plugin.dvr_handle_new_service_port.assert_called_once_with(
self.adminContext, kwargs.get('port'))
def test__notify_l3_agent_new_port_no_action(self):
kwargs = {
'context': self.adminContext,
'original_port': None,
'port': {
'device_owner': 'network:None',
}
}
l3plugin = mock.Mock()
with mock.patch.object(manager.NeutronManager,
'get_service_plugins',
return_value={'L3_ROUTER_NAT': l3plugin}):
l3_dvrscheduler_db._notify_l3_agent_new_port(
'port', 'after_create', mock.ANY, **kwargs)
self.assertFalse(
l3plugin.update_arp_entry_for_dvr_service_port.called)
self.assertFalse(
l3plugin.dvr_handle_new_service_port.called)
def test__notify_l3_agent_update_port_no_action(self):
kwargs = {
'context': self.adminContext,
'original_port': {
portbindings.HOST_ID: 'vm-host',
'device_owner': DEVICE_OWNER_COMPUTE,
},
'port': {
portbindings.HOST_ID: 'vm-host',
'device_owner': DEVICE_OWNER_COMPUTE,
},
}
l3plugin = mock.Mock()
with mock.patch.object(manager.NeutronManager,
'get_service_plugins',
return_value={'L3_ROUTER_NAT': l3plugin}):
l3_dvrscheduler_db._notify_l3_agent_port_update(
'port', 'after_update', mock.ANY, **kwargs)
self.assertFalse(
l3plugin.update_arp_entry_for_dvr_service_port.called)
self.assertFalse(
l3plugin.dvr_handle_new_service_port.called)
self.assertFalse(l3plugin.remove_router_from_l3_agent.called)
self.assertFalse(l3plugin.get_dvr_routers_to_remove.called)
def test__notify_l3_agent_update_port_with_mac_address_update(self):
kwargs = {
'context': self.adminContext,
'original_port': {
portbindings.HOST_ID: 'vm-host',
'mac_address': '02:04:05:17:18:19'
},
'port': {
portbindings.HOST_ID: 'vm-host',
'mac_address': '02:04:05:17:18:29'
},
'mac_address_updated': True
}
l3plugin = mock.Mock()
with mock.patch.object(manager.NeutronManager,
'get_service_plugins',
return_value={'L3_ROUTER_NAT': l3plugin}):
l3_dvrscheduler_db._notify_l3_agent_port_update(
'port', 'after_update', mock.ANY, **kwargs)
l3plugin.update_arp_entry_for_dvr_service_port.\
assert_called_once_with(
self.adminContext, kwargs.get('port'))
self.assertFalse(l3plugin.dvr_handle_new_service_port.called)
def test__notify_l3_agent_update_port_with_port_binding_change(self):
source_host = 'vm-host1'
kwargs = {
'context': self.adminContext,
'original_port': {
'id': str(uuid.uuid4()),
portbindings.HOST_ID: source_host,
'device_owner': DEVICE_OWNER_COMPUTE,
},
'port': {
portbindings.HOST_ID: 'vm-host2',
'device_owner': DEVICE_OWNER_COMPUTE,
},
}
l3plugin = mock.Mock()
with mock.patch.object(manager.NeutronManager,
'get_service_plugins',
return_value={'L3_ROUTER_NAT': l3plugin}),\
mock.patch.object(l3plugin, 'get_dvr_routers_to_remove',
return_value=[{'agent_id': 'foo_agent',
'router_id': 'foo_id',
'host': source_host}]):
l3_dvrscheduler_db._notify_l3_agent_port_update(
'port', 'after_update', mock.ANY, **kwargs)
(l3plugin.l3_rpc_notifier.router_removed_from_agent.
assert_called_once_with(mock.ANY, 'foo_id', source_host))
self.assertEqual(
1, l3plugin.update_arp_entry_for_dvr_service_port.call_count)
self.assertEqual(
1, l3plugin.delete_arp_entry_for_dvr_service_port.call_count)
l3plugin.dvr_handle_new_service_port.assert_called_once_with(
self.adminContext, kwargs.get('port'))
def test__notify_l3_agent_update_port_removing_routers(self):
port_id = 'fake-port'
source_host = 'vm-host'
kwargs = {
'context': self.adminContext,
'port': {
'id': port_id,
portbindings.HOST_ID: None,
'device_id': '',
'device_owner': ''
},
'mac_address_updated': False,
'original_port': {
'id': port_id,
portbindings.HOST_ID: source_host,
'device_id': 'vm-id',
'device_owner': DEVICE_OWNER_COMPUTE
}
}
plugin = manager.NeutronManager.get_plugin()
l3plugin = mock.Mock()
l3plugin.supported_extension_aliases = [
'router', constants.L3_AGENT_SCHEDULER_EXT_ALIAS,
constants.L3_DISTRIBUTED_EXT_ALIAS
]
with mock.patch.object(manager.NeutronManager,
'get_service_plugins',
return_value={'L3_ROUTER_NAT': l3plugin}),\
mock.patch.object(l3plugin, 'get_dvr_routers_to_remove',
return_value=[{'agent_id': 'foo_agent',
'router_id': 'foo_id',
'host': source_host}]):
l3_dvrscheduler_db._notify_l3_agent_port_update(
'port', 'after_update', plugin, **kwargs)
self.assertEqual(
1, l3plugin.delete_arp_entry_for_dvr_service_port.call_count)
l3plugin.delete_arp_entry_for_dvr_service_port.\
assert_called_once_with(
self.adminContext, mock.ANY)
self.assertFalse(
l3plugin.dvr_handle_new_service_port.called)
(l3plugin.l3_rpc_notifier.router_removed_from_agent.
assert_called_once_with(mock.ANY, 'foo_id', source_host))
def test__notify_port_delete(self):
plugin = manager.NeutronManager.get_plugin()
l3plugin = mock.Mock()
l3plugin.supported_extension_aliases = [
'router', constants.L3_AGENT_SCHEDULER_EXT_ALIAS,
constants.L3_DISTRIBUTED_EXT_ALIAS
]
with mock.patch.object(manager.NeutronManager,
'get_service_plugins',
return_value={'L3_ROUTER_NAT': l3plugin}):
kwargs = {
'context': self.adminContext,
'port': mock.ANY,
}
removed_routers = [{'agent_id': 'foo_agent',
'router_id': 'foo_id',
'host': 'foo_host'}]
l3plugin.get_dvr_routers_to_remove.return_value = removed_routers
l3_dvrscheduler_db._notify_port_delete(
'port', 'after_delete', plugin, **kwargs)
l3plugin.delete_arp_entry_for_dvr_service_port.\
assert_called_once_with(
self.adminContext, mock.ANY)
(l3plugin.l3_rpc_notifier.router_removed_from_agent.
assert_called_once_with(mock.ANY, 'foo_id', 'foo_host'))
def test_dvr_handle_new_service_port(self):
port = {
'id': 'port1',
'device_id': 'abcd',
'device_owner': DEVICE_OWNER_COMPUTE_NOVA,
portbindings.HOST_ID: 'host1',
'fixed_ips': [
{
'subnet_id': '80947d4a-fbc8-484b-9f92-623a6bfcf3e0',
'ip_address': '10.10.10.3'
}
]
}
dvr_ports = [
{
'id': 'dvr_port1',
'device_id': 'r1',
'device_owner': constants.DEVICE_OWNER_DVR_INTERFACE,
'fixed_ips': [
{
'subnet_id': '80947d4a-fbc8-484b-9f92-623a6bfcf3e0',
'ip_address': '10.10.10.1'
}
]
},
{
'id': 'dvr_port2',
'device_id': 'r2',
'device_owner': constants.DEVICE_OWNER_DVR_INTERFACE,
'fixed_ips': [
{
'subnet_id': '80947d4a-fbc8-484b-9f92-623a6bfcf3e0',
'ip_address': '10.10.10.123'
}
]
}
]
agent_on_host = {'id': 'agent1'}
with mock.patch(
'neutron.db.db_base_plugin_v2.NeutronDbPluginV2' '.get_ports',
return_value=dvr_ports),\
mock.patch('neutron.api.rpc.agentnotifiers.l3_rpc_agent_api'
'.L3AgentNotifyAPI'),\
mock.patch.object(
self.dut, 'get_l3_agents',
return_value=[agent_on_host]) as get_l3_agents:
self.dut.dvr_handle_new_service_port(
self.adminContext, port)
get_l3_agents.assert_called_once_with(
self.adminContext,
filters={'host': [port[portbindings.HOST_ID]]})
(self.dut.l3_rpc_notifier.routers_updated_on_host.
assert_called_once_with(
self.adminContext, {'r1', 'r2'}, 'host1'))
self.assertFalse(self.dut.l3_rpc_notifier.routers_updated.called)
def test_get_dvr_routers_by_subnet_ids(self):
subnet_id = '80947d4a-fbc8-484b-9f92-623a6bfcf3e0'
dvr_port = {
'id': 'dvr_port1',
'device_id': 'r1',
'device_owner': constants.DEVICE_OWNER_DVR_INTERFACE,
'fixed_ips': [
{
'subnet_id': subnet_id,
'ip_address': '10.10.10.1'
}
]
}
r1 = {
'id': 'r1',
'distributed': True,
}
with mock.patch(
'neutron.db.db_base_plugin_v2.NeutronDbPluginV2' '.get_port',
return_value=dvr_port),\
mock.patch('neutron.db.db_base_plugin_v2.NeutronDbPluginV2'
'.get_ports', return_value=[dvr_port]):
router_id = self.dut.get_dvr_routers_by_subnet_ids(
self.adminContext, [subnet_id])
self.assertEqual(r1['id'], router_id.pop())
def test_get_subnet_ids_on_router(self):
dvr_port = {
'id': 'dvr_port1',
'device_id': 'r1',
'device_owner': constants.DEVICE_OWNER_DVR_INTERFACE,
'fixed_ips': [
{
'subnet_id': '80947d4a-fbc8-484b-9f92-623a6bfcf3e0',
'ip_address': '10.10.10.1'
}
]
}
r1 = {
'id': 'r1',
'distributed': True,
}
with mock.patch(
'neutron.db.db_base_plugin_v2.NeutronDbPluginV2' '.get_ports',
return_value=[dvr_port]):
sub_ids = self.dut.get_subnet_ids_on_router(self.adminContext,
r1['id'])
self.assertEqual(sub_ids.pop(),
dvr_port.get('fixed_ips').pop(0).get('subnet_id'))
def test_get_subnet_ids_on_router_no_subnet(self):
dvr_port = {
'id': 'dvr_port1',
'device_id': 'r1',
'device_owner': 'network:router_interface_distributed',
'fixed_ips': []
}
r1 = {
'id': 'r1',
'distributed': True,
}
with mock.patch.object(db_v2.NeutronDbPluginV2, 'get_ports',
return_value=[dvr_port]):
sub_ids = self.dut.get_subnet_ids_on_router(self.adminContext,
r1['id'])
self.assertEqual(0, len(sub_ids))
def _prepare_schedule_snat_tests(self):
agent = agents_db.Agent()
agent.admin_state_up = True
agent.heartbeat_timestamp = timeutils.utcnow()
router = {
'id': 'foo_router_id',
'distributed': True,
'external_gateway_info': {
'network_id': str(uuid.uuid4()),
'enable_snat': True
}
}
return agent, router
class L3HAPlugin(db_v2.NeutronDbPluginV2,
l3_hamode_db.L3_HA_NAT_db_mixin,
l3_hascheduler_db.L3_HA_scheduler_db_mixin):
supported_extension_aliases = ["l3-ha", "router_availability_zone"]
class L3HATestCaseMixin(testlib_api.SqlTestCase,
L3SchedulerBaseMixin):
def setUp(self):
super(L3HATestCaseMixin, self).setUp()
self.adminContext = n_context.get_admin_context()
mock.patch('neutron.common.rpc.get_client').start()
self.plugin = L3HAPlugin()
self.setup_coreplugin('neutron.plugins.ml2.plugin.Ml2Plugin')
cfg.CONF.set_override('service_plugins',
['neutron.services.l3_router.'
'l3_router_plugin.L3RouterPlugin'])
cfg.CONF.set_override('max_l3_agents_per_router', 0)
self.plugin.router_scheduler = importutils.import_object(
'neutron.scheduler.l3_agent_scheduler.ChanceScheduler'
)
self._register_l3_agents()
def _create_ha_router(self, ha=True, tenant_id='tenant1', az_hints=None):
self.adminContext.tenant_id = tenant_id
router = {'name': 'router1', 'admin_state_up': True,
'tenant_id': tenant_id}
if ha is not None:
router['ha'] = ha
if az_hints is None:
az_hints = []
router['availability_zone_hints'] = az_hints
return self.plugin.create_router(self.adminContext,
{'router': router})
class L3_HA_scheduler_db_mixinTestCase(L3HATestCaseMixin):
def _register_l3_agents(self, plugin=None):
super(L3_HA_scheduler_db_mixinTestCase,
self)._register_l3_agents(plugin=plugin)
self.agent3 = helpers.register_l3_agent(host='host_3')
self.agent_id3 = self.agent3.id
self.agent4 = helpers.register_l3_agent(host='host_4')
self.agent_id4 = self.agent4.id
def test_get_ha_routers_l3_agents_count(self):
router1 = self._create_ha_router()
router2 = self._create_ha_router()
router3 = self._create_ha_router(ha=False)
result = self.plugin.get_ha_routers_l3_agents_count(self.adminContext)
self.assertEqual(2, len(result))
check_result = [(router['id'], agents) for router, agents in result]
self.assertIn((router1['id'], 4), check_result)
self.assertIn((router2['id'], 4), check_result)
self.assertNotIn((router3['id'], mock.ANY), check_result)
def test_get_ordered_l3_agents_by_num_routers(self):
# Mock scheduling so that the test can control it explicitly
mock.patch.object(l3_hamode_db.L3_HA_NAT_db_mixin,
'_notify_ha_interfaces_updated').start()
router1 = self._create_ha_router()
router2 = self._create_ha_router()
router3 = self._create_ha_router(ha=False)
router4 = self._create_ha_router(ha=False)
# Agent 1 will host 0 routers, agent 2 will host 1, agent 3 will
# host 2, and agent 4 will host 3.
self.plugin.schedule_router(self.adminContext, router1['id'],
candidates=[self.agent2, self.agent4])
self.plugin.schedule_router(self.adminContext, router2['id'],
candidates=[self.agent3, self.agent4])
self.plugin.schedule_router(self.adminContext, router3['id'],
candidates=[self.agent3])
self.plugin.schedule_router(self.adminContext, router4['id'],
candidates=[self.agent4])
agent_ids = [self.agent_id1, self.agent_id2, self.agent_id3,
self.agent_id4]
result = self.plugin.get_l3_agents_ordered_by_num_routers(
self.adminContext, agent_ids)
self.assertEqual(agent_ids, [record['id'] for record in result])
class L3AgentSchedulerDbMixinTestCase(L3HATestCaseMixin):
def _setup_ha_router(self):
router = self._create_ha_router()
agents = self._get_agents_scheduled_for_router(router)
return router, agents
def test_reschedule_ha_routers_from_down_agents(self):
agents = self._setup_ha_router()[1]
self.assertEqual(2, len(agents))
self._set_l3_agent_dead(self.agent_id1)
with mock.patch.object(self.plugin, 'reschedule_router') as reschedule:
self.plugin.reschedule_routers_from_down_agents()
self.assertFalse(reschedule.called)
def test_list_l3_agents_hosting_ha_router(self):
router = self._create_ha_router()
agents = self.plugin.list_l3_agents_hosting_router(
self.adminContext, router['id'])['agents']
for agent in agents:
self.assertEqual('standby', agent['ha_state'])
self.plugin.update_routers_states(
self.adminContext, {router['id']: 'active'}, self.agent1.host)
agents = self.plugin.list_l3_agents_hosting_router(
self.adminContext, router['id'])['agents']
for agent in agents:
expected_state = ('active' if agent['host'] == self.agent1.host
else 'standby')
self.assertEqual(expected_state, agent['ha_state'])
def test_list_l3_agents_hosting_legacy_router(self):
router = self._create_ha_router(ha=False)
self.plugin.schedule_router(self.adminContext, router['id'])
agent = self.plugin.list_l3_agents_hosting_router(
self.adminContext, router['id'])['agents'][0]
self.assertIsNone(agent['ha_state'])
def test_get_agents_dict_for_router_unscheduled_returns_empty_list(self):
self.assertEqual({'agents': []},
self.plugin._get_agents_dict_for_router([]))
def test_manual_add_ha_router_to_agent(self):
cfg.CONF.set_override('max_l3_agents_per_router', 2)
router, agents = self._setup_ha_router()
self.assertEqual(2, len(agents))
agent = helpers.register_l3_agent(host='myhost_3')
# We allow to exceed max l3 agents per router via manual scheduling
self.plugin.add_router_to_l3_agent(
self.adminContext, agent.id, router['id'])
agents = self._get_agents_scheduled_for_router(router)
self.assertIn(agent.id, [_agent.id for _agent in agents])
self.assertEqual(3, len(agents))
def test_manual_remove_ha_router_from_agent(self):
router, agents = self._setup_ha_router()
self.assertEqual(2, len(agents))
agent = agents.pop()
# Remove router from agent and make sure it is removed
self.plugin.remove_router_from_l3_agent(
self.adminContext, agent.id, router['id'])
agents = self._get_agents_scheduled_for_router(router)
self.assertEqual(1, len(agents))
self.assertNotIn(agent.id, [_agent.id for _agent in agents])
def test_manual_remove_ha_router_from_all_agents(self):
router, agents = self._setup_ha_router()
self.assertEqual(2, len(agents))
agent = agents.pop()
self.plugin.remove_router_from_l3_agent(
self.adminContext, agent.id, router['id'])
agent = agents.pop()
self.plugin.remove_router_from_l3_agent(
self.adminContext, agent.id, router['id'])
agents = self._get_agents_scheduled_for_router(router)
self.assertEqual(0, len(agents))
def _get_agents_scheduled_for_router(self, router):
return self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [router['id']],
admin_state_up=True)
def test_delete_ha_interfaces_from_agent(self):
router, agents = self._setup_ha_router()
agent = agents.pop()
self.plugin.remove_router_from_l3_agent(
self.adminContext, agent.id, router['id'])
session = self.adminContext.session
db = l3_hamode_db.L3HARouterAgentPortBinding
results = session.query(db).filter_by(
router_id=router['id'])
results = [binding.l3_agent_id for binding in results.all()]
self.assertNotIn(agent.id, results)
def test_add_ha_interface_to_l3_agent(self):
agent = self.plugin.get_agents_db(self.adminContext)[0]
router = self._create_ha_router()
self.plugin.add_router_to_l3_agent(self.adminContext, agent.id,
router['id'])
# Verify agent has HA interface
ha_ports = self.plugin.get_ha_router_port_bindings(self.adminContext,
[router['id']])
self.assertIn(agent.id, [ha_port.l3_agent_id for ha_port in ha_ports])
class L3HAChanceSchedulerTestCase(L3HATestCaseMixin):
def test_scheduler_with_ha_enabled(self):
router = self._create_ha_router()
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [router['id']],
admin_state_up=True)
self.assertEqual(2, len(agents))
for agent in agents:
sync_data = self.plugin.get_ha_sync_data_for_host(
self.adminContext, router_ids=[router['id']],
host=agent.host, agent=agent)
self.assertEqual(1, len(sync_data))
interface = sync_data[0][constants.HA_INTERFACE_KEY]
self.assertIsNotNone(interface)
def test_auto_schedule(self):
# Mock scheduling so that the test can control it explicitly
mock.patch.object(l3_hamode_db.L3_HA_NAT_db_mixin,
'_notify_ha_interfaces_updated').start()
router = self._create_ha_router()
self.plugin.auto_schedule_routers(
self.adminContext, self.agent1.host, None)
self.plugin.auto_schedule_routers(
self.adminContext, self.agent2.host, None)
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [router['id']])
self.assertEqual(2, len(agents))
def test_auto_schedule_specific_router_when_agent_added(self):
self._auto_schedule_when_agent_added(True)
def test_auto_schedule_all_routers_when_agent_added(self):
self._auto_schedule_when_agent_added(False)
def _auto_schedule_when_agent_added(self, specific_router):
router = self._create_ha_router()
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [router['id']],
admin_state_up=True)
self.assertEqual(2, len(agents))
agent_ids = [agent['id'] for agent in agents]
self.assertIn(self.agent_id1, agent_ids)
self.assertIn(self.agent_id2, agent_ids)
agent = helpers.register_l3_agent(host='host_3')
self.agent_id3 = agent.id
routers_to_auto_schedule = [router['id']] if specific_router else []
self.plugin.auto_schedule_routers(self.adminContext,
'host_3',
routers_to_auto_schedule)
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [router['id']],
admin_state_up=True)
self.assertEqual(3, len(agents))
# Simulate agent restart to make sure we don't try to re-bind
self.plugin.auto_schedule_routers(self.adminContext,
'host_3',
routers_to_auto_schedule)
def test_scheduler_with_ha_enabled_not_enough_agent(self):
r1 = self._create_ha_router()
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r1['id']],
admin_state_up=True)
self.assertEqual(2, len(agents))
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id2, False)
self.assertRaises(
l3_ha.HANotEnoughAvailableAgents, self._create_ha_router)
class L3HALeastRoutersSchedulerTestCase(L3HATestCaseMixin):
def _register_l3_agents(self, plugin=None):
super(L3HALeastRoutersSchedulerTestCase,
self)._register_l3_agents(plugin=plugin)
agent = helpers.register_l3_agent(host='host_3')
self.agent_id3 = agent.id
agent = helpers.register_l3_agent(host='host_4')
self.agent_id4 = agent.id
def setUp(self):
super(L3HALeastRoutersSchedulerTestCase, self).setUp()
self.plugin.router_scheduler = importutils.import_object(
'neutron.scheduler.l3_agent_scheduler.LeastRoutersScheduler'
)
def test_scheduler(self):
cfg.CONF.set_override('max_l3_agents_per_router', 2)
# disable the third agent to be sure that the router will
# be scheduled of the two firsts
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id3, False)
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id4, False)
r1 = self._create_ha_router()
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r1['id']],
admin_state_up=True)
self.assertEqual(2, len(agents))
agent_ids = [agent['id'] for agent in agents]
self.assertIn(self.agent_id1, agent_ids)
self.assertIn(self.agent_id2, agent_ids)
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id3, True)
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id4, True)
r2 = self._create_ha_router()
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r2['id']],
admin_state_up=True)
self.assertEqual(2, len(agents))
agent_ids = [agent['id'] for agent in agents]
self.assertIn(self.agent_id3, agent_ids)
self.assertIn(self.agent_id4, agent_ids)
class TestGetL3AgentsWithAgentModeFilter(testlib_api.SqlTestCase,
L3SchedulerBaseMixin):
"""Test cases to test get_l3_agents.
This class tests the L3AgentSchedulerDbMixin.get_l3_agents()
for the 'agent_mode' filter with various values.
5 l3 agents are registered in the order - legacy, dvr_snat, dvr, fake_mode
and legacy
"""
scenarios = [
('no filter',
dict(agent_modes=[],
expected_agent_modes=['legacy', 'dvr_snat', 'dvr',
'fake_mode', 'legacy'])),
('legacy',
dict(agent_modes=['legacy'],
expected_agent_modes=['legacy', 'legacy'])),
('dvr_snat',
dict(agent_modes=['dvr_snat'],
expected_agent_modes=['dvr_snat'])),
('dvr ',
dict(agent_modes=['dvr'],
expected_agent_modes=['dvr'])),
('legacy and dvr snat',
dict(agent_modes=['legacy', 'dvr_snat', 'legacy'],
expected_agent_modes=['legacy', 'dvr_snat', 'legacy'])),
('legacy and dvr',
dict(agent_modes=['legacy', 'dvr'],
expected_agent_modes=['legacy', 'dvr', 'legacy'])),
('dvr_snat and dvr',
dict(agent_modes=['dvr_snat', 'dvr'],
expected_agent_modes=['dvr_snat', 'dvr'])),
('legacy, dvr_snat and dvr',
dict(agent_modes=['legacy', 'dvr_snat', 'dvr'],
expected_agent_modes=['legacy', 'dvr_snat', 'dvr',
'legacy'])),
('invalid',
dict(agent_modes=['invalid'],
expected_agent_modes=[])),
]
def setUp(self):
super(TestGetL3AgentsWithAgentModeFilter, self).setUp()
self.plugin = L3HAPlugin()
self.setup_coreplugin('neutron.plugins.ml2.plugin.Ml2Plugin')
self.adminContext = n_context.get_admin_context()
hosts = ['host_1', 'host_2', 'host_3', 'host_4', 'host_5']
agent_modes = ['legacy', 'dvr_snat', 'dvr', 'fake_mode', 'legacy']
for host, agent_mode in zip(hosts, agent_modes):
helpers.register_l3_agent(host, agent_mode)
def _get_agent_mode(self, agent):
agent_conf = self.plugin.get_configuration_dict(agent)
return agent_conf.get('agent_mode', 'None')
def test_get_l3_agents(self):
l3_agents = self.plugin.get_l3_agents(
self.adminContext, filters={'agent_modes': self.agent_modes})
self.assertEqual(len(self.expected_agent_modes), len(l3_agents))
returned_agent_modes = [self._get_agent_mode(agent)
for agent in l3_agents]
self.assertEqual(self.expected_agent_modes, returned_agent_modes)
class L3AgentAZLeastRoutersSchedulerTestCase(L3HATestCaseMixin):
def setUp(self):
super(L3AgentAZLeastRoutersSchedulerTestCase, self).setUp()
self.plugin.router_scheduler = importutils.import_object(
'neutron.scheduler.l3_agent_scheduler.AZLeastRoutersScheduler')
# Mock scheduling so that the test can control it explicitly
mock.patch.object(l3_hamode_db.L3_HA_NAT_db_mixin,
'_notify_ha_interfaces_updated').start()
def _register_l3_agents(self):
self.agent1 = helpers.register_l3_agent(host='az1-host1', az='az1')
self.agent2 = helpers.register_l3_agent(host='az1-host2', az='az1')
self.agent3 = helpers.register_l3_agent(host='az2-host1', az='az2')
self.agent4 = helpers.register_l3_agent(host='az2-host2', az='az2')
self.agent5 = helpers.register_l3_agent(host='az3-host1', az='az3')
self.agent6 = helpers.register_l3_agent(host='az3-host2', az='az3')
def test_az_scheduler_auto_schedule(self):
r1 = self._create_ha_router(ha=False, az_hints=['az1'])
self.plugin.auto_schedule_routers(self.adminContext,
'az1-host2', None)
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r1['id']])
self.assertEqual(1, len(agents))
self.assertEqual('az1-host2', agents[0]['host'])
def test_az_scheduler_auto_schedule_no_match(self):
r1 = self._create_ha_router(ha=False, az_hints=['az1'])
self.plugin.auto_schedule_routers(self.adminContext,
'az2-host1', None)
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r1['id']])
self.assertEqual(0, len(agents))
def test_az_scheduler_default_az(self):
cfg.CONF.set_override('default_availability_zones', ['az2'])
r1 = self._create_ha_router(ha=False)
r2 = self._create_ha_router(ha=False)
r3 = self._create_ha_router(ha=False)
self.plugin.schedule_router(self.adminContext, r1['id'])
self.plugin.schedule_router(self.adminContext, r2['id'])
self.plugin.schedule_router(self.adminContext, r3['id'])
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r1['id'], r2['id'], r3['id']])
self.assertEqual(3, len(agents))
expected_hosts = set(['az2-host1', 'az2-host2'])
hosts = set([a['host'] for a in agents])
self.assertEqual(expected_hosts, hosts)
def test_az_scheduler_az_hints(self):
r1 = self._create_ha_router(ha=False, az_hints=['az3'])
r2 = self._create_ha_router(ha=False, az_hints=['az3'])
r3 = self._create_ha_router(ha=False, az_hints=['az3'])
self.plugin.schedule_router(self.adminContext, r1['id'])
self.plugin.schedule_router(self.adminContext, r2['id'])
self.plugin.schedule_router(self.adminContext, r3['id'])
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r1['id'], r2['id'], r3['id']])
self.assertEqual(3, len(agents))
expected_hosts = set(['az3-host1', 'az3-host2'])
hosts = set([a['host'] for a in agents])
self.assertEqual(expected_hosts, hosts)
def test_az_scheduler_least_routers(self):
r1 = self._create_ha_router(ha=False, az_hints=['az1'])
r2 = self._create_ha_router(ha=False, az_hints=['az1'])
r3 = self._create_ha_router(ha=False, az_hints=['az1'])
r4 = self._create_ha_router(ha=False, az_hints=['az1'])
self.plugin.schedule_router(self.adminContext, r1['id'])
self.plugin.schedule_router(self.adminContext, r2['id'])
self.plugin.schedule_router(self.adminContext, r3['id'])
self.plugin.schedule_router(self.adminContext, r4['id'])
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r1['id'], r2['id'], r3['id'], r4['id']])
host_num = collections.defaultdict(int)
for agent in agents:
host_num[agent['host']] += 1
self.assertEqual(2, host_num['az1-host1'])
self.assertEqual(2, host_num['az1-host2'])
def test_az_scheduler_ha_az_hints(self):
cfg.CONF.set_override('max_l3_agents_per_router', 2)
r1 = self._create_ha_router(az_hints=['az1', 'az3'])
self.plugin.schedule_router(self.adminContext, r1['id'])
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r1['id']])
self.assertEqual(2, len(agents))
expected_azs = set(['az1', 'az3'])
azs = set([a['availability_zone'] for a in agents])
self.assertEqual(expected_azs, azs)
def test_az_scheduler_ha_auto_schedule(self):
cfg.CONF.set_override('max_l3_agents_per_router', 3)
r1 = self._create_ha_router(az_hints=['az1', 'az3'])
self._set_l3_agent_admin_state(self.adminContext, self.agent2['id'],
state=False)
self._set_l3_agent_admin_state(self.adminContext, self.agent6['id'],
state=False)
self.plugin.schedule_router(self.adminContext, r1['id'])
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r1['id']])
self.assertEqual(2, len(agents))
hosts = set([a['host'] for a in agents])
self.assertEqual(set(['az1-host1', 'az3-host1']), hosts)
self._set_l3_agent_admin_state(self.adminContext, self.agent6['id'],
state=True)
self.plugin.auto_schedule_routers(self.adminContext,
'az3-host2', None)
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r1['id']])
self.assertEqual(3, len(agents))
expected_hosts = set(['az1-host1', 'az3-host1', 'az3-host2'])
hosts = set([a['host'] for a in agents])
self.assertEqual(expected_hosts, hosts)
class L3DVRHAPlugin(db_v2.NeutronDbPluginV2,
l3_hamode_db.L3_HA_NAT_db_mixin,
l3_dvr_ha_scheduler_db.L3_DVR_HA_scheduler_db_mixin):
pass
class L3DVRHATestCaseMixin(testlib_api.SqlTestCase,
L3SchedulerBaseMixin):
def setUp(self):
super(L3DVRHATestCaseMixin, self).setUp()
self.adminContext = n_context.get_admin_context()
self.plugin = L3DVRHAPlugin()
|
apache-2.0
| -2,183,191,022,081,990,100 | 42.118243 | 79 | 0.558202 | false |
pmeier82/django-spikeval
|
djspikeval/forms/datafile.py
|
1
|
2926
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.apps import apps
from .util import form_with_captcha
__all__ = ["DatafileForm"]
__author__ = "pmeier82"
Asset = apps.get_registered_model("base", "asset")
Datafile = apps.get_registered_model("djspikeval", "datafile")
Dataset = apps.get_registered_model("djspikeval", "dataset")
@form_with_captcha
class DatafileForm(forms.ModelForm):
"""`Datafile` model form"""
# meta
class Meta:
model = Datafile
exclude = ("created", "modified", "valid_rd_log", "valid_gt_log")
# extra fields
rd_upload = forms.FileField(label="Rawdata File", required=False)
gt_upload = forms.FileField(label="Groundtruth File", required=False)
# constructor
def __init__(self, *args, **kwargs):
self.user = kwargs.pop("user", None)
self.dataset = kwargs.pop("dataset", None)
super(DatafileForm, self).__init__(*args, **kwargs)
if self.instance.id is None:
self.fields.pop("dataset")
self.initial["dataset"] = self.dataset
if self.instance.rd_file:
self.initial["rd_upload"] = self.instance.rd_file.data
if self.instance.gt_file:
self.initial["gt_upload"] = self.instance.gt_file.data
if self.dataset is not None:
self.fields["parameter"].label = self.dataset.parameter
def save(self, *args, **kwargs):
# init and checks
if not self.changed_data:
return
if self.instance.id is None:
if "rd_upload" not in self.changed_data:
return
self.instance.dataset = self.dataset
tr = super(DatafileForm, self).save(*args, **kwargs)
# handling rd_file upload
if "rd_upload" in self.changed_data:
if tr.rd_file:
tr.rd_file.delete()
tr.valid_rd_log = None
tr.save()
if self.cleaned_data["rd_upload"]:
rd_file = Asset(
name=self.cleaned_data["rd_upload"].name,
data_orig_name=self.cleaned_data["rd_upload"].name,
data=self.cleaned_data["rd_upload"],
kind="rd_file",
content_object=tr)
rd_file.save()
# handling st_file upload
if "gt_upload" in self.changed_data:
if tr.gt_file:
tr.gt_file.delete()
tr.valid_gt_log = None
tr.save()
if self.cleaned_data["gt_upload"]:
st_file = Asset(
name=self.cleaned_data["gt_upload"].name,
data=self.cleaned_data["gt_upload"],
kind="st_file",
content_object=tr)
st_file.save()
# return
return tr
if __name__ == "__main__":
pass
|
bsd-3-clause
| -8,451,139,807,943,812,000 | 32.25 | 73 | 0.546822 | false |
abourget/formalchemy-abourget
|
formalchemy/tests/test_multiple_keys.py
|
1
|
3071
|
# -*- coding: utf-8 -*-
from formalchemy.tests import *
def test_renderer_names():
"""
Check that the input name take care of multiple primary keys::
>>> fs = FieldSet(primary1)
>>> print fs.field.render()
<input id="PrimaryKeys-1_22-field" maxlength="10" name="PrimaryKeys-1_22-field" type="text" value="value1" />
>>> fs = FieldSet(primary2)
>>> print fs.field.render()
<input id="PrimaryKeys-1_33-field" maxlength="10" name="PrimaryKeys-1_33-field" type="text" value="value2" />
Check form rendering with keys::
>>> fs = FieldSet(primary2)
>>> fs.configure(pk=True)
>>> print fs.render()
<div>
<label class="field_req" for="PrimaryKeys-1_33-id">
Id
</label>
<input id="PrimaryKeys-1_33-id" name="PrimaryKeys-1_33-id" type="text" value="1" />
</div>
<script type="text/javascript">
//<![CDATA[
document.getElementById("PrimaryKeys-1_33-id").focus();
//]]>
</script>
<div>
<label class="field_req" for="PrimaryKeys-1_33-id2">
Id2
</label>
<input id="PrimaryKeys-1_33-id2" maxlength="10" name="PrimaryKeys-1_33-id2" type="text" value="33" />
</div>
<div>
<label class="field_req" for="PrimaryKeys-1_33-field">
Field
</label>
<input id="PrimaryKeys-1_33-field" maxlength="10" name="PrimaryKeys-1_33-field" type="text" value="value2" />
</div>
"""
def test_foreign_keys():
"""
Assume that we can have more than one ForeignKey as primary key::
>>> fs = FieldSet(orderuser2)
>>> fs.configure(pk=True)
>>> print pretty_html(fs.user.render())
<select id="OrderUser-1_2-user_id" name="OrderUser-1_2-user_id">
<option value="1" selected="selected">
Bill
</option>
<option value="2">
John
</option>
</select>
>>> print pretty_html(fs.order.render())
<select id="OrderUser-1_2-order_id" name="OrderUser-1_2-order_id">
<option value="1">
Quantity: 10
</option>
<option value="2" selected="selected">
Quantity: 5
</option>
<option value="3">
Quantity: 6
</option>
</select>
"""
def test_deserialize():
"""
Assume that we can deserialize a value
"""
fs = FieldSet(primary1, data={'PrimaryKeys-1_22-field':'new_value'})
assert fs.validate() is True
assert fs.field.value == 'new_value'
fs.sync()
session.rollback()
def test_deserialize_new_record():
"""
Assume that we can deserialize a value
"""
fs = FieldSet(PrimaryKeys(), data={'PrimaryKeys-_-id':'8',
'PrimaryKeys-_-id2':'9'})
fs.configure(include=[fs.id, fs.id2])
assert fs.validate() is True
fs.sync()
assert fs.model.id == 8, fs.model.id
assert fs.model.id2 == '9', fs.model.id2
session.rollback()
|
mit
| 7,984,146,956,180,403,000 | 29.405941 | 118 | 0.547704 | false |
smartstudy/project_cron
|
main.py
|
1
|
2804
|
import json
import os
from AppKit import NSApplication, NSStatusBar, NSMenu, NSMenuItem, NSVariableStatusItemLength, NSImage
from PyObjCTools import AppHelper
from project_cron.models import Schedule
from threading import Timer
from project_cron.utils import logutil
class App(NSApplication):
def finishLaunching(self):
# Make statusbar item
statusbar = NSStatusBar.systemStatusBar()
self.statusitem = statusbar.statusItemWithLength_(NSVariableStatusItemLength)
self.icon = NSImage.alloc().initByReferencingFile_('icon.png')
self.icon.setScalesWhenResized_(True)
self.icon.setSize_((20, 20))
self.statusitem.setImage_(self.icon)
self._schedules = []
self._menu_items = []
self._initialize_schedules()
self._initialize_menu()
self._timer = Timer(60, self.timer_callback)
self._timer.start()
def _initialize_schedules(self):
USER_ROOT = os.path.expanduser('~')
DOCUMENTS = os.path.join(USER_ROOT, 'Documents')
SCHEDULES = os.path.join(DOCUMENTS, 'schedules.json')
schedules = json.load(open(SCHEDULES, encoding='utf8'))
for raw_info in schedules:
self._schedules.append(Schedule(raw_info))
def _initialize_menu(self):
self.menubarMenu = NSMenu.alloc().init()
for schedule in self._schedules:
menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(schedule.name, 'execute:', '')
self._menu_items.append(menu_item)
self.menubarMenu.addItem_(menu_item)
menu_item = NSMenuItem.separatorItem()
self.menubarMenu.addItem_(menu_item)
self.quit = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_('Quit', 'terminate:', '')
self.menubarMenu.addItem_(self.quit)
self.statusitem.setMenu_(self.menubarMenu)
self.statusitem.setToolTip_('Crow')
def timer_callback(self):
self._timer = None
for schedule in self._schedules:
try:
schedule.execute()
except:
import traceback
logutil.error(schedule.name, traceback.format_exc())
schedule.reset()
interval = 60
self._timer = Timer(interval, self.timer_callback)
self._timer.start()
def execute_(self, notification):
for schedule in self._schedules:
if schedule.name == notification.title():
try:
schedule.execute_actions()
except:
import traceback
logutil.error(schedule.name, traceback.format_exc())
schedule.reset()
if __name__ == "__main__":
app = App.sharedApplication()
AppHelper.runEventLoop()
|
mit
| -2,770,964,706,049,844,700 | 34.05 | 109 | 0.622325 | false |
bsmr-misc-forks/letsencrypt
|
certbot/tests/display/ops_test.py
|
1
|
12393
|
# coding=utf-8
"""Test certbot.display.ops."""
import os
import sys
import tempfile
import unittest
import mock
import zope.component
from acme import jose
from acme import messages
from certbot import account
from certbot import errors
from certbot import interfaces
from certbot.display import util as display_util
from certbot.tests import test_util
KEY = jose.JWKRSA.load(test_util.load_vector("rsa512_key.pem"))
class GetEmailTest(unittest.TestCase):
"""Tests for certbot.display.ops.get_email."""
def setUp(self):
mock_display = mock.MagicMock()
self.input = mock_display.input
zope.component.provideUtility(mock_display, interfaces.IDisplay)
@classmethod
def _call(cls, **kwargs):
from certbot.display.ops import get_email
return get_email(**kwargs)
def test_cancel_none(self):
self.input.return_value = (display_util.CANCEL, "foo@bar.baz")
self.assertRaises(errors.Error, self._call)
self.assertRaises(errors.Error, self._call, optional=False)
def test_ok_safe(self):
self.input.return_value = (display_util.OK, "foo@bar.baz")
with mock.patch("certbot.display.ops.util.safe_email") as mock_safe_email:
mock_safe_email.return_value = True
self.assertTrue(self._call() is "foo@bar.baz")
def test_ok_not_safe(self):
self.input.return_value = (display_util.OK, "foo@bar.baz")
with mock.patch("certbot.display.ops.util.safe_email") as mock_safe_email:
mock_safe_email.side_effect = [False, True]
self.assertTrue(self._call() is "foo@bar.baz")
def test_invalid_flag(self):
invalid_txt = "There seem to be problems"
self.input.return_value = (display_util.OK, "foo@bar.baz")
with mock.patch("certbot.display.ops.util.safe_email") as mock_safe_email:
mock_safe_email.return_value = True
self._call()
self.assertTrue(invalid_txt not in self.input.call_args[0][0])
self._call(invalid=True)
self.assertTrue(invalid_txt in self.input.call_args[0][0])
def test_optional_flag(self):
self.input.return_value = (display_util.OK, "foo@bar.baz")
with mock.patch("certbot.display.ops.util.safe_email") as mock_safe_email:
mock_safe_email.side_effect = [False, True]
self._call(optional=False)
for call in self.input.call_args_list:
self.assertTrue(
"--register-unsafely-without-email" not in call[0][0])
class ChooseAccountTest(unittest.TestCase):
"""Tests for certbot.display.ops.choose_account."""
def setUp(self):
zope.component.provideUtility(display_util.FileDisplay(sys.stdout))
self.accounts_dir = tempfile.mkdtemp("accounts")
self.account_keys_dir = os.path.join(self.accounts_dir, "keys")
os.makedirs(self.account_keys_dir, 0o700)
self.config = mock.MagicMock(
accounts_dir=self.accounts_dir,
account_keys_dir=self.account_keys_dir,
server="certbot-demo.org")
self.key = KEY
self.acc1 = account.Account(messages.RegistrationResource(
uri=None, new_authzr_uri=None, body=messages.Registration.from_data(
email="email1@g.com")), self.key)
self.acc2 = account.Account(messages.RegistrationResource(
uri=None, new_authzr_uri=None, body=messages.Registration.from_data(
email="email2@g.com", phone="phone")), self.key)
@classmethod
def _call(cls, accounts):
from certbot.display import ops
return ops.choose_account(accounts)
@mock.patch("certbot.display.ops.z_util")
def test_one(self, mock_util):
mock_util().menu.return_value = (display_util.OK, 0)
self.assertEqual(self._call([self.acc1]), self.acc1)
@mock.patch("certbot.display.ops.z_util")
def test_two(self, mock_util):
mock_util().menu.return_value = (display_util.OK, 1)
self.assertEqual(self._call([self.acc1, self.acc2]), self.acc2)
@mock.patch("certbot.display.ops.z_util")
def test_cancel(self, mock_util):
mock_util().menu.return_value = (display_util.CANCEL, 1)
self.assertTrue(self._call([self.acc1, self.acc2]) is None)
class GenSSLLabURLs(unittest.TestCase):
"""Loose test of _gen_ssl_lab_urls. URL can change easily in the future."""
def setUp(self):
zope.component.provideUtility(display_util.FileDisplay(sys.stdout))
@classmethod
def _call(cls, domains):
from certbot.display.ops import _gen_ssl_lab_urls
return _gen_ssl_lab_urls(domains)
def test_zero(self):
self.assertEqual(self._call([]), [])
def test_two(self):
urls = self._call(["eff.org", "umich.edu"])
self.assertTrue("eff.org" in urls[0])
self.assertTrue("umich.edu" in urls[1])
class GenHttpsNamesTest(unittest.TestCase):
"""Test _gen_https_names."""
def setUp(self):
zope.component.provideUtility(display_util.FileDisplay(sys.stdout))
@classmethod
def _call(cls, domains):
from certbot.display.ops import _gen_https_names
return _gen_https_names(domains)
def test_zero(self):
self.assertEqual(self._call([]), "")
def test_one(self):
doms = [
"example.com",
"asllkjsadfljasdf.c",
]
for dom in doms:
self.assertEqual(self._call([dom]), "https://%s" % dom)
def test_two(self):
domains_list = [
["foo.bar.org", "bar.org"],
["paypal.google.facebook.live.com", "*.zombo.example.com"],
]
for doms in domains_list:
self.assertEqual(
self._call(doms),
"https://{dom[0]} and https://{dom[1]}".format(dom=doms))
def test_three(self):
doms = ["a.org", "b.org", "c.org"]
# We use an oxford comma
self.assertEqual(
self._call(doms),
"https://{dom[0]}, https://{dom[1]}, and https://{dom[2]}".format(
dom=doms))
def test_four(self):
doms = ["a.org", "b.org", "c.org", "d.org"]
exp = ("https://{dom[0]}, https://{dom[1]}, https://{dom[2]}, "
"and https://{dom[3]}".format(dom=doms))
self.assertEqual(self._call(doms), exp)
class ChooseNamesTest(unittest.TestCase):
"""Test choose names."""
def setUp(self):
zope.component.provideUtility(display_util.FileDisplay(sys.stdout))
self.mock_install = mock.MagicMock()
@classmethod
def _call(cls, installer):
from certbot.display.ops import choose_names
return choose_names(installer)
@mock.patch("certbot.display.ops._choose_names_manually")
def test_no_installer(self, mock_manual):
self._call(None)
self.assertEqual(mock_manual.call_count, 1)
@mock.patch("certbot.display.ops.z_util")
def test_no_installer_cancel(self, mock_util):
mock_util().input.return_value = (display_util.CANCEL, [])
self.assertEqual(self._call(None), [])
@mock.patch("certbot.display.ops.z_util")
def test_no_names_choose(self, mock_util):
self.mock_install().get_all_names.return_value = set()
domain = "example.com"
mock_util().input.return_value = (display_util.OK, domain)
actual_doms = self._call(self.mock_install)
self.assertEqual(mock_util().input.call_count, 1)
self.assertEqual(actual_doms, [domain])
self.assertTrue(
"configuration files" in mock_util().input.call_args[0][0])
@mock.patch("certbot.display.ops.z_util")
def test_filter_names_valid_return(self, mock_util):
self.mock_install.get_all_names.return_value = set(["example.com"])
mock_util().checklist.return_value = (display_util.OK, ["example.com"])
names = self._call(self.mock_install)
self.assertEqual(names, ["example.com"])
self.assertEqual(mock_util().checklist.call_count, 1)
@mock.patch("certbot.display.ops.z_util")
def test_filter_names_nothing_selected(self, mock_util):
self.mock_install.get_all_names.return_value = set(["example.com"])
mock_util().checklist.return_value = (display_util.OK, [])
self.assertEqual(self._call(self.mock_install), [])
@mock.patch("certbot.display.ops.z_util")
def test_filter_names_cancel(self, mock_util):
self.mock_install.get_all_names.return_value = set(["example.com"])
mock_util().checklist.return_value = (
display_util.CANCEL, ["example.com"])
self.assertEqual(self._call(self.mock_install), [])
def test_get_valid_domains(self):
from certbot.display.ops import get_valid_domains
all_valid = ["example.com", "second.example.com",
"also.example.com", "under_score.example.com",
"justtld"]
all_invalid = ["öóòps.net", "*.wildcard.com", "uniçodé.com"]
two_valid = ["example.com", "úniçøde.com", "also.example.com"]
self.assertEqual(get_valid_domains(all_valid), all_valid)
self.assertEqual(get_valid_domains(all_invalid), [])
self.assertEqual(len(get_valid_domains(two_valid)), 2)
@mock.patch("certbot.display.ops.z_util")
def test_choose_manually(self, mock_util):
from certbot.display.ops import _choose_names_manually
# No retry
mock_util().yesno.return_value = False
# IDN and no retry
mock_util().input.return_value = (display_util.OK,
"uniçodé.com")
self.assertEqual(_choose_names_manually(), [])
# IDN exception with previous mocks
with mock.patch(
"certbot.display.ops.display_util.separate_list_input"
) as mock_sli:
unicode_error = UnicodeEncodeError('mock', u'', 0, 1, 'mock')
mock_sli.side_effect = unicode_error
self.assertEqual(_choose_names_manually(), [])
# Valid domains
mock_util().input.return_value = (display_util.OK,
("example.com,"
"under_score.example.com,"
"justtld,"
"valid.example.com"))
self.assertEqual(_choose_names_manually(),
["example.com", "under_score.example.com",
"justtld", "valid.example.com"])
# Three iterations
mock_util().input.return_value = (display_util.OK,
"uniçodé.com")
yn = mock.MagicMock()
yn.side_effect = [True, True, False]
mock_util().yesno = yn
_choose_names_manually()
self.assertEqual(mock_util().yesno.call_count, 3)
class SuccessInstallationTest(unittest.TestCase):
# pylint: disable=too-few-public-methods
"""Test the success installation message."""
@classmethod
def _call(cls, names):
from certbot.display.ops import success_installation
success_installation(names)
@mock.patch("certbot.display.ops.z_util")
def test_success_installation(self, mock_util):
mock_util().notification.return_value = None
names = ["example.com", "abc.com"]
self._call(names)
self.assertEqual(mock_util().notification.call_count, 1)
arg = mock_util().notification.call_args_list[0][0][0]
for name in names:
self.assertTrue(name in arg)
class SuccessRenewalTest(unittest.TestCase):
# pylint: disable=too-few-public-methods
"""Test the success renewal message."""
@classmethod
def _call(cls, names):
from certbot.display.ops import success_renewal
success_renewal(names, "renew")
@mock.patch("certbot.display.ops.z_util")
def test_success_renewal(self, mock_util):
mock_util().notification.return_value = None
names = ["example.com", "abc.com"]
self._call(names)
self.assertEqual(mock_util().notification.call_count, 1)
arg = mock_util().notification.call_args_list[0][0][0]
for name in names:
self.assertTrue(name in arg)
if __name__ == "__main__":
unittest.main() # pragma: no cover
|
apache-2.0
| -3,321,504,893,694,046,700 | 36.18018 | 82 | 0.609725 | false |
nttks/edx-platform
|
biz/djangoapps/ga_achievement/tests/test_views.py
|
1
|
201745
|
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from collections import OrderedDict
from datetime import datetime
from ddt import ddt
import json
from mock import patch
import pytz
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from biz.djangoapps.ga_achievement.achievement_store import PlaybackStore, ScoreStore
from biz.djangoapps.ga_achievement.tests.factories import PlaybackFactory, ScoreFactory, PlaybackBatchStatusFactory, \
ScoreBatchStatusFactory
from biz.djangoapps.ga_contract.tests.factories import ContractFactory
from biz.djangoapps.gx_member.tests.factories import MemberFactory
from biz.djangoapps.gx_org_group.models import Group
from biz.djangoapps.gx_org_group.tests.factories import RightFactory, GroupUtil
from biz.djangoapps.util import datetime_utils
from biz.djangoapps.util.tests.testcase import BizStoreTestBase, BizViewTestBase
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from util.file import course_filename_prefix_generator
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@ddt
class ScoreViewTest(BizStoreTestBase, BizViewTestBase, ModuleStoreTestCase):
def _index_view(self):
return reverse('biz:achievement:score')
def _ajax_view(self):
return reverse('biz:achievement:score_search_ajax')
def _download_csv_view(self):
return reverse('biz:achievement:score_download_csv')
def _setup(self, is_achievement_data_empty=False, record_count=1):
self.maxDiff = None # over max character in assertDictEqual
self.expect_record_list = []
self._set_date()
self.setup_user()
self._create_course_data()
self._create_org_data()
self._create_contract_data()
self._create_achievement_column()
if not is_achievement_data_empty:
for i in range(record_count):
self._create_achievement_data(suffix=str(i))
def _setup_and_user_create(self, is_achievement_data_empty=False, record_count=1):
self.maxDiff = None # over max character in assertDictEqual
self.expect_record_list = []
self._set_date()
self.setup_user()
self._create_course_data()
self._create_org_data()
self._create_contract_data()
self._create_achievement_column()
if not is_achievement_data_empty:
for i in range(record_count):
self._create_achievement_data_and_user(suffix=str(i))
def _setup_and_user_create_not_group(self, is_achievement_data_empty=False, record_count=1):
self.maxDiff = None # over max character in assertDictEqual
self.expect_record_list = []
self._set_date()
self.setup_user()
self._create_course_data()
self._create_org_data()
self._create_contract_data()
self._create_achievement_column()
if not is_achievement_data_empty:
for i in range(record_count):
self._create_achievement_data_and_user_not_group(suffix=str(i))
def _create_achievement_column(self):
self.dict_column_data = {
ScoreStore.FIELD_CONTRACT_ID: self.contract.id,
ScoreStore.FIELD_COURSE_ID: unicode(self.course.id),
ScoreStore.FIELD_DOCUMENT_TYPE: ScoreStore.FIELD_DOCUMENT_TYPE__COLUMN,
ScoreStore.FIELD_FULL_NAME: ScoreStore.COLUMN_TYPE__TEXT,
ScoreStore.FIELD_USERNAME: ScoreStore.COLUMN_TYPE__TEXT,
ScoreStore.FIELD_EMAIL: ScoreStore.COLUMN_TYPE__TEXT,
ScoreStore.FIELD_STUDENT_STATUS: ScoreStore.COLUMN_TYPE__TEXT,
ScoreStore.FIELD_CERTIFICATE_STATUS: ScoreStore.COLUMN_TYPE__TIME,
ScoreStore.FIELD_TOTAL_SCORE: ScoreStore.COLUMN_TYPE__PERCENT,
'SECTIONS': ScoreStore.COLUMN_TYPE__TEXT,
}
def _set_date(self):
self.utc_datetime = datetime(2016, 3, 1, 16, 58, 30, 0, tzinfo=pytz.utc)
self.utc_datetime_update = datetime(2016, 3, 10, 16, 58, 30, 0, tzinfo=pytz.utc)
def _create_course_data(self):
self.course = CourseFactory.create(org='gacco', number='course', run='run1')
self.overview = CourseOverview.get_from_id(self.course.id)
def _create_org_data(self):
self.org_a = self._create_organization(org_name='a', org_code='a', creator_org=self.gacco_organization)
def _create_contract_data(self):
self.contract = self._create_contract('contract_a', self.org_a, self.gacco_organization,
self.user, 'invitation_code_a')
def _create_contract(self, name, contractor, owner, created_by, invitation_code):
return ContractFactory.create(contract_name=name, contractor_organization=contractor, owner_organization=owner,
created_by=created_by, invitation_code=invitation_code)
def _create_achievement_data(self, suffix=''):
self.dict_data = {
ScoreStore.FIELD_CONTRACT_ID: self.contract.id,
ScoreStore.FIELD_COURSE_ID: unicode(self.course.id),
ScoreStore.FIELD_DOCUMENT_TYPE: ScoreStore.FIELD_DOCUMENT_TYPE__RECORD,
ScoreStore.FIELD_FULL_NAME: 'TEST TEST{}'.format(suffix),
ScoreStore.FIELD_USERNAME: 'TEST{}'.format(suffix),
ScoreStore.FIELD_EMAIL: 'test{}@example.com'.format(suffix),
ScoreStore.FIELD_STUDENT_STATUS: ScoreStore.FIELD_CERTIFICATE_STATUS__UNPUBLISHED,
ScoreStore.FIELD_CERTIFICATE_STATUS: ScoreStore.FIELD_CERTIFICATE_STATUS__DOWNLOADABLE,
ScoreStore.FIELD_TOTAL_SCORE: 0.9,
'SECTIONS': 'SECTION_{}'.format(suffix),
}
self.dict_data.update({ScoreStore.FIELD_CERTIFICATE_ISSUE_DATE: self.utc_datetime})
self.expect_record_list.append(self.dict_data.copy())
ScoreFactory.create(**self.dict_data)
def _create_achievement_data_and_user(self, suffix=''):
active_user = UserFactory.create()
GroupUtil(org=self.org_a, user=self.user).import_data()
groups = Group.objects.filter(org=self.org_a, group_code='G02-01').first()
MemberFactory.create(
org=self.org_a,
group=groups,
user=active_user,
code='code{}'.format(suffix),
created_by=self.user,
creator_org=self.org_a,
updated_by=self.user,
updated_org=self.org_a,
is_active=True,
is_delete=False,
org1='org1',
)
self.dict_data = {
ScoreStore.FIELD_CONTRACT_ID: self.contract.id,
ScoreStore.FIELD_COURSE_ID: unicode(self.course.id),
ScoreStore.FIELD_DOCUMENT_TYPE: ScoreStore.FIELD_DOCUMENT_TYPE__RECORD,
ScoreStore.FIELD_FULL_NAME: 'TEST TEST{}'.format(suffix),
ScoreStore.FIELD_USERNAME: active_user.username,
ScoreStore.FIELD_EMAIL: active_user.email,
ScoreStore.FIELD_STUDENT_STATUS: ScoreStore.FIELD_STUDENT_STATUS__ENROLLED,
ScoreStore.FIELD_CERTIFICATE_STATUS: ScoreStore.FIELD_CERTIFICATE_STATUS__DOWNLOADABLE,
ScoreStore.FIELD_TOTAL_SCORE: 0.9,
'SECTIONS': 'SECTION_{}'.format(suffix),
}
self.dict_data.update({ScoreStore.FIELD_CERTIFICATE_ISSUE_DATE: self.utc_datetime})
self.expect_record_list.append(self.dict_data.copy())
ScoreFactory.create(**self.dict_data)
def _create_achievement_data_and_user_not_group(self, suffix=''):
active_user = UserFactory.create()
GroupUtil(org=self.org_a, user=self.user).import_data()
MemberFactory.create(
org=self.org_a,
group=None,
user=active_user,
code='code{}'.format(suffix),
created_by=self.user,
creator_org=self.org_a,
updated_by=self.user,
updated_org=self.org_a,
is_active=True,
is_delete=False,
org1='org1',
)
self.dict_data = {
ScoreStore.FIELD_CONTRACT_ID: self.contract.id,
ScoreStore.FIELD_COURSE_ID: unicode(self.course.id),
ScoreStore.FIELD_DOCUMENT_TYPE: ScoreStore.FIELD_DOCUMENT_TYPE__RECORD,
ScoreStore.FIELD_FULL_NAME: 'TEST TEST{}'.format(suffix),
ScoreStore.FIELD_USERNAME: active_user.username,
ScoreStore.FIELD_EMAIL: active_user.email,
ScoreStore.FIELD_STUDENT_STATUS: ScoreStore.FIELD_STUDENT_STATUS__NOT_ENROLLED,
ScoreStore.FIELD_CERTIFICATE_STATUS: ScoreStore.FIELD_CERTIFICATE_STATUS__DOWNLOADABLE,
ScoreStore.FIELD_TOTAL_SCORE: 0.9,
'SECTIONS': 'SECTION_{}'.format(suffix),
}
self.dict_data.update({ScoreStore.FIELD_CERTIFICATE_ISSUE_DATE: self.utc_datetime})
self.expect_record_list.append(self.dict_data.copy())
ScoreFactory.create(**self.dict_data)
def _create_batch_status(self, status):
self.batch_status = ScoreBatchStatusFactory.create(contract=self.contract,
course_id=unicode(self.course.id),
status=status,
student_count=4)
def _assert_student_status(self, student_status):
self.assertEqual(student_status, [
'Not Enrolled',
'Enrolled',
'Finish Enrolled',
])
def _get_csv_file_name(self, str_datetime):
return u'{course_prefix}_{csv_name}_{timestamp_str}.csv'.format(
course_prefix=course_filename_prefix_generator(self.course.id),
csv_name='score_status',
timestamp_str=str_datetime
)
def _assert_column_data(self, columns):
self.assertDictEqual(dict(json.loads(columns)), {
ScoreStore.FIELD_USERNAME: 'text',
ScoreStore.FIELD_CERTIFICATE_STATUS: 'text',
ScoreStore.FIELD_TOTAL_SCORE: 'percent',
'Register Status': 'text',
ScoreStore.FIELD_CERTIFICATE_ISSUE_DATE: 'date',
ScoreStore.FIELD_DOCUMENT_TYPE: 'text',
'SECTIONS': 'text',
ScoreStore.FIELD_FULL_NAME: 'text',
ScoreStore.FIELD_EMAIL: 'text',
'Organization Groups': 'text',
'Organization1': 'text',
'Organization2': 'text',
'Organization3': 'text',
'Organization4': 'hidden',
'Organization5': 'hidden',
'Organization6': 'hidden',
'Organization7': 'hidden',
'Organization8': 'hidden',
'Organization9': 'hidden',
'Organization10': 'hidden',
'Item1': 'text',
'Item2': 'text',
'Item3': 'text',
'Item4': 'hidden',
'Item5': 'hidden',
'Item6': 'hidden',
'Item7': 'hidden',
'Item8': 'hidden',
'Item9': 'hidden',
'Item10': 'hidden',
})
def _assert_column_data_is_status_true(self, columns):
self.assertDictEqual(dict(json.loads(columns)), {
ScoreStore.FIELD_USERNAME: 'text',
ScoreStore.FIELD_CERTIFICATE_STATUS: 'text',
ScoreStore.FIELD_TOTAL_SCORE: 'percent',
ScoreStore.FIELD_STUDENT_STATUS: 'text',
'Register Status': 'text',
ScoreStore.FIELD_CERTIFICATE_ISSUE_DATE: 'date',
ScoreStore.FIELD_DOCUMENT_TYPE: 'text',
'SECTIONS': 'text',
ScoreStore.FIELD_FULL_NAME: 'text',
ScoreStore.FIELD_EMAIL: 'text',
'Organization Groups': 'text',
'Organization1': 'text',
'Organization2': 'text',
'Organization3': 'text',
'Organization4': 'hidden',
'Organization5': 'hidden',
'Organization6': 'hidden',
'Organization7': 'hidden',
'Organization8': 'hidden',
'Organization9': 'hidden',
'Organization10': 'hidden',
'Item1': 'text',
'Item2': 'text',
'Item3': 'text',
'Item4': 'hidden',
'Item5': 'hidden',
'Item6': 'hidden',
'Item7': 'hidden',
'Item8': 'hidden',
'Item9': 'hidden',
'Item10': 'hidden',
})
def _assert_status_list(self, status_list):
self.assertEqual(status_list, {
'Unregister': 'Unregister Invitation',
'Input': 'Input Invitation',
'Register': 'Register Invitation'
})
def _assert_member_org_item_list(self, member_org_item_list):
self.assertEqual(member_org_item_list, OrderedDict([
('org1', 'Organization1'),
('org2', 'Organization2'),
('org3', 'Organization3'),
('org4', 'Organization4'),
('org5', 'Organization5'),
('org6', 'Organization6'),
('org7', 'Organization7'),
('org8', 'Organization8'),
('org9', 'Organization9'),
('org10', 'Organization10'),
('item1', 'Item1'),
('item2', 'Item2'),
('item3', 'Item3'),
('item4', 'Item4'),
('item5', 'Item5'),
('item6', 'Item6'),
('item7', 'Item7'),
('item8', 'Item8'),
('item9', 'Item9'),
('item10', 'Item10'),
]))
def _assert_record_data(self, records):
record = json.loads(records.replace('[', '').replace(']', ''))
register_status = record['Register Status']
expect = {
ScoreStore.FIELD_FULL_NAME: self.dict_data[ScoreStore.FIELD_FULL_NAME],
ScoreStore.FIELD_USERNAME: self.dict_data[ScoreStore.FIELD_USERNAME],
ScoreStore.FIELD_EMAIL: self.dict_data[ScoreStore.FIELD_EMAIL],
'Register Status': register_status,
ScoreStore.FIELD_CERTIFICATE_STATUS: self.dict_data[ScoreStore.FIELD_CERTIFICATE_STATUS],
ScoreStore.FIELD_TOTAL_SCORE: self.dict_data[ScoreStore.FIELD_TOTAL_SCORE],
ScoreStore.FIELD_CERTIFICATE_ISSUE_DATE: datetime_utils.format_for_w2ui(
self.dict_data[ScoreStore.FIELD_CERTIFICATE_ISSUE_DATE]),
'SECTIONS': self.dict_data['SECTIONS'],
ScoreStore.FIELD_DOCUMENT_TYPE: self.dict_data[ScoreStore.FIELD_DOCUMENT_TYPE],
u'recid': 1,
}
self.assertDictEqual(json.loads(records)[0], json.loads(unicode(json.dumps(expect))))
def _assert_record_data_status_true(self, records):
record = json.loads(records.replace('[', '').replace(']', ''))
register_status = record['Register Status']
student_status = record[ScoreStore.FIELD_STUDENT_STATUS]
expect = {
ScoreStore.FIELD_FULL_NAME: self.dict_data[ScoreStore.FIELD_FULL_NAME],
ScoreStore.FIELD_USERNAME: self.dict_data[ScoreStore.FIELD_USERNAME],
ScoreStore.FIELD_EMAIL: self.dict_data[ScoreStore.FIELD_EMAIL],
ScoreStore.FIELD_STUDENT_STATUS: student_status,
'Register Status': register_status,
ScoreStore.FIELD_CERTIFICATE_STATUS: self.dict_data[ScoreStore.FIELD_CERTIFICATE_STATUS],
ScoreStore.FIELD_TOTAL_SCORE: self.dict_data[ScoreStore.FIELD_TOTAL_SCORE],
ScoreStore.FIELD_CERTIFICATE_ISSUE_DATE: datetime_utils.format_for_w2ui(
self.dict_data[ScoreStore.FIELD_CERTIFICATE_ISSUE_DATE]),
'SECTIONS': self.dict_data['SECTIONS'],
ScoreStore.FIELD_DOCUMENT_TYPE: self.dict_data[ScoreStore.FIELD_DOCUMENT_TYPE],
u'recid': 1,
}
self.assertDictEqual(json.loads(records)[0], json.loads(unicode(json.dumps(expect))))
def _assert_record_data_member(self, records):
record = json.loads(records.replace('[', '').replace(']', ''))
register_status = record['Register Status']
expect = {
ScoreStore.FIELD_FULL_NAME: self.dict_data[ScoreStore.FIELD_FULL_NAME],
ScoreStore.FIELD_USERNAME: self.dict_data[ScoreStore.FIELD_USERNAME],
ScoreStore.FIELD_EMAIL: self.dict_data[ScoreStore.FIELD_EMAIL],
'Register Status': register_status,
ScoreStore.FIELD_CERTIFICATE_STATUS: self.dict_data[ScoreStore.FIELD_CERTIFICATE_STATUS],
ScoreStore.FIELD_TOTAL_SCORE: self.dict_data[ScoreStore.FIELD_TOTAL_SCORE],
ScoreStore.FIELD_CERTIFICATE_ISSUE_DATE:
datetime_utils.format_for_w2ui(self.dict_data[ScoreStore.FIELD_CERTIFICATE_ISSUE_DATE]),
'SECTIONS': self.dict_data['SECTIONS'],
ScoreStore.FIELD_DOCUMENT_TYPE: self.dict_data[ScoreStore.FIELD_DOCUMENT_TYPE],
"Organization Groups": "G2-1",
"Organization1": 'org1',
"Organization2": None,
"Organization3": None,
"Organization4": None,
"Organization5": None,
"Organization6": None,
"Organization7": None,
"Organization8": None,
"Organization9": None,
"Organization10": None,
"Item1": None,
"Item2": None,
"Item3": None,
"Item4": None,
"Item5": None,
"Item6": None,
"Item7": None,
"Item8": None,
"Item9": None,
"Item10": None,
'recid': 1,
}
self.assertDictEqual(json.loads(records)[0], json.loads(unicode(json.dumps(expect))))
def _assert_record_data_member_status_true(self, records):
record = json.loads(records.replace('[', '').replace(']', ''))
register_status = record['Register Status']
student_status = record[ScoreStore.FIELD_STUDENT_STATUS]
expect = {
ScoreStore.FIELD_FULL_NAME: self.dict_data[ScoreStore.FIELD_FULL_NAME],
ScoreStore.FIELD_USERNAME: self.dict_data[ScoreStore.FIELD_USERNAME],
ScoreStore.FIELD_EMAIL: self.dict_data[ScoreStore.FIELD_EMAIL],
ScoreStore.FIELD_STUDENT_STATUS: student_status,
'Register Status': register_status,
ScoreStore.FIELD_CERTIFICATE_STATUS: self.dict_data[ScoreStore.FIELD_CERTIFICATE_STATUS],
ScoreStore.FIELD_TOTAL_SCORE: self.dict_data[ScoreStore.FIELD_TOTAL_SCORE],
ScoreStore.FIELD_CERTIFICATE_ISSUE_DATE:
datetime_utils.format_for_w2ui(self.dict_data[ScoreStore.FIELD_CERTIFICATE_ISSUE_DATE]),
'SECTIONS': self.dict_data['SECTIONS'],
ScoreStore.FIELD_DOCUMENT_TYPE: self.dict_data[ScoreStore.FIELD_DOCUMENT_TYPE],
"Organization Groups": "G2-1",
"Organization1": 'org1',
"Organization2": None,
"Organization3": None,
"Organization4": None,
"Organization5": None,
"Organization6": None,
"Organization7": None,
"Organization8": None,
"Organization9": None,
"Organization10": None,
"Item1": None,
"Item2": None,
"Item3": None,
"Item4": None,
"Item5": None,
"Item6": None,
"Item7": None,
"Item8": None,
"Item9": None,
"Item10": None,
'recid': 1,
}
self.assertDictEqual(json.loads(records)[0], json.loads(unicode(json.dumps(expect))))
def _assert_record_count(self, records_count, expect_records_count):
self.assertEqual(records_count, expect_records_count)
def _create_param_search_ajax(self):
param = {
'student_status': '',
'group_code': '',
'offset': 0,
'limit': 100,
'certificate_status': '',
'total_score_from': '',
'total_score_to': '',
'detail_condition_member_name_1': '',
'detail_condition_member_1': '',
}
for i in range(1, 6):
param['detail_condition_member_' + str(i)] = ''
param['detail_condition_member_name_' + str(i)] = ''
param['detail_condition_score_from_' + str(i)] = ''
param['detail_condition_score_name_' + str(i)] = ''
param['detail_condition_score_to_' + str(i)] = ''
return param
def test_index_views(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/score.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data(render_to_response_args[1]['score_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['score_section_names'], ['SECTIONS'])
self._assert_record_data(render_to_response_args[1]['score_records'])
def test_index_views_manager(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/score.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data(render_to_response_args[1]['score_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['score_section_names'], ['SECTIONS'])
self.assertEqual('[]', render_to_response_args[1]['score_records'])
def test_index_views_status_none(self):
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/score.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'], '')
self.assertEqual(render_to_response_args[1]['update_status'], '')
self._assert_column_data(render_to_response_args[1]['score_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['score_section_names'], ['SECTIONS'])
self._assert_record_data(render_to_response_args[1]['score_records'])
def test_index_views_member(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/score.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data(render_to_response_args[1]['score_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [
(u'G01', 9, u'G1'), (u'G01-01', 3, u'G1-1'), (u'G01-01-01', 8, u'G1-1-1'), (u'G01-01-02', 7, u'G1-1-2'),
(u'G01-02', 4, u'G1-2'), (u'G02', 10, u'G2'), (u'G02-01', 5, u'G2-1'), (u'G02-01-01', 1, u'G2-1-1'),
(u'G02-01-02', 2, u'G2-1-2'), (u'G02-02', 6, u'G2-2')])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['score_section_names'], ['SECTIONS'])
self._assert_record_data_member(render_to_response_args[1]['score_records'])
def test_index_views_member_manager(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager_manager = self._create_manager(
org=self.org_a, user=self.user, created=self.contract, permissions=[self.manager_permission])
groups = Group.objects.filter(org=self.org_a, group_code='G01-01').first()
RightFactory.create(org=self.org_a, group=groups, user=manager_manager.user, created_by=self.user,
creator_org=self.org_a)
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager_manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/score.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data(render_to_response_args[1]['score_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'],
[(u'G01-01', 3, u'G1-1'), (u'G01-01-01', 8, u'G1-1-1'), (u'G01-01-02', 7, u'G1-1-2')])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['score_section_names'], ['SECTIONS'])
self.assertEqual('[]', render_to_response_args[1]['score_records'])
def test_search_ajax(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['detail_condition_score_name_1'] = ''
post_value['detail_condition_score_from_1'] = ''
post_value['detail_condition_score_to_1'] = ''
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_mismatch(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_score_from'] = '0'
post_value['total_score_to'] = '0'
post_value['detail_condition_score_name_1'] = 'SECTION_1'
post_value['detail_condition_score_from_1'] = '0'
post_value['detail_condition_score_to_1'] = '0'
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_score_no(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['total_score_no'] = 'True'
post_value['detail_condition_score_name_1'] = ''
post_value['detail_condition_score_from_1'] = ''
post_value['detail_condition_score_to_1'] = ''
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_certificate_status(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['detail_condition_score_name_1'] = ''
post_value['detail_condition_score_from_1'] = ''
post_value['detail_condition_score_to_1'] = ''
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ScoreStore.FIELD_CERTIFICATE_STATUS__DOWNLOADABLE
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_detail_condition(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['detail_condition_score_name_1'] = 'SECTION_1'
post_value['detail_condition_score_from_1'] = '0'
post_value['detail_condition_score_to_1'] = '0'
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_manager(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['detail_condition_score_name_1'] = ''
post_value['detail_condition_score_from_1'] = ''
post_value['detail_condition_score_to_1'] = ''
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_intentional_exception(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_score_from'] = '0'
post_value['total_score_to'] = '0'
post_value['detail_condition_score_name_1'] = 'SECTION_1'
post_value['detail_condition_score_from_1'] = '0'
post_value['detail_condition_score_to_1'] = '0'
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0.1'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(400, response.status_code)
def test_search_ajax_not_list_detail_member(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['detail_condition_score_name_1'] = ''
post_value['detail_condition_score_from_1'] = ''
post_value['detail_condition_score_to_1'] = ''
post_value['detail_condition_score_no_1'] = ''
post_value['detail_condition_member_name_1'] = ''
post_value['detail_condition_member_name_2'] = 'org1'
post_value['detail_condition_member_1'] = ''
post_value['detail_condition_member_2'] = 'org1'
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_not_list_group_code(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['detail_condition_score_name_1'] = ''
post_value['detail_condition_score_from_1'] = ''
post_value['detail_condition_score_to_1'] = ''
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = '1234'
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_not_value(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['detail_condition_member_name_1'] = 'org1'
post_value['detail_condition_member_1'] = 'abc'
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_group_none(self):
status = 'Finished'
self._setup_and_user_create_not_group()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = '1234'
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_group_mismatch(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = '1234'
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_manager(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['detail_condition_member_name_1'] = ''
post_value['detail_condition_member_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_success(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_download_csv(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_batch_status_none(self):
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name('no-timestamp')
))
def test_download_csv_manager(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_member(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_member_manager(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_field_types(self):
status = 'Finished'
self._setup(is_achievement_data_empty=True)
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
self.dict_column_data = {
ScoreStore.FIELD_CONTRACT_ID: self.contract.id,
ScoreStore.FIELD_COURSE_ID: unicode(self.course.id),
ScoreStore.FIELD_DOCUMENT_TYPE: ScoreStore.FIELD_DOCUMENT_TYPE__COLUMN,
ScoreStore.FIELD_FULL_NAME: ScoreStore.COLUMN_TYPE__TEXT,
ScoreStore.FIELD_USERNAME: ScoreStore.COLUMN_TYPE__TEXT,
ScoreStore.FIELD_EMAIL: ScoreStore.COLUMN_TYPE__TEXT,
ScoreStore.FIELD_STUDENT_STATUS: ScoreStore.COLUMN_TYPE__TEXT,
ScoreStore.FIELD_CERTIFICATE_STATUS: ScoreStore.COLUMN_TYPE__TEXT,
ScoreStore.FIELD_TOTAL_SCORE: ScoreStore.COLUMN_TYPE__TEXT,
'SECTIONS_1': ScoreStore.COLUMN_TYPE__TEXT,
'SECTIONS_2': ScoreStore.COLUMN_TYPE__TEXT,
'SECTIONS_3': ScoreStore.COLUMN_TYPE__DATE,
'SECTIONS_4': ScoreStore.COLUMN_TYPE__DATE,
'SECTIONS_5': ScoreStore.COLUMN_TYPE__TIME,
'SECTIONS_6': ScoreStore.COLUMN_TYPE__TIME,
'SECTIONS_7': ScoreStore.COLUMN_TYPE__PERCENT,
'SECTIONS_8': ScoreStore.COLUMN_TYPE__PERCENT,
'SECTIONS_9': ScoreStore.COLUMN_TYPE__PERCENT,
'SECTIONS_10': 'UnknownType',
'SECTIONS_11': ScoreStore.COLUMN_TYPE__TIME,
}
ScoreFactory.create(**self.dict_column_data)
suffix = 1
self.dict_data = {
ScoreStore.FIELD_CONTRACT_ID: self.contract.id,
ScoreStore.FIELD_COURSE_ID: unicode(self.course.id),
ScoreStore.FIELD_DOCUMENT_TYPE: ScoreStore.FIELD_DOCUMENT_TYPE__RECORD,
ScoreStore.FIELD_FULL_NAME: 'TEST TEST{}'.format(suffix),
ScoreStore.FIELD_USERNAME: 'TEST{}'.format(suffix),
ScoreStore.FIELD_EMAIL: 'test{}@example.com'.format(suffix),
ScoreStore.FIELD_STUDENT_STATUS: "Registration cancellation",
ScoreStore.FIELD_CERTIFICATE_STATUS: ScoreStore.FIELD_CERTIFICATE_STATUS__DOWNLOADABLE,
ScoreStore.FIELD_TOTAL_SCORE: 0.9,
'SECTIONS_1': 'SECTION_{}'.format(suffix),
'SECTIONS_2': suffix,
'SECTIONS_3': datetime(2016, 3, 10, 16, 58, 30, 0, tzinfo=pytz.utc),
'SECTIONS_4': '',
'SECTIONS_5': '1',
'SECTIONS_6': '',
'SECTIONS_7': ScoreStore.VALUE__NOT_ATTEMPTED,
'SECTIONS_8': 0.5,
'SECTIONS_9': '',
'SECTIONS_10': None,
'SECTIONS_11': 0.5,
}
self.expect_record_list.append(self.dict_data.copy())
ScoreFactory.create(**self.dict_data)
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_searched_csv(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
param = self._create_param_search_ajax()
param["search-download"] = "search-download"
response = self.client.post(self._download_csv_view(), param)
self.assertEqual(200, response.status_code)
def test_index_views_is_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/score.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data_is_status_true(render_to_response_args[1]['score_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['score_section_names'], ['SECTIONS'])
self._assert_record_data_status_true(render_to_response_args[1]['score_records'])
def test_index_views_manager_is_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/score.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data_is_status_true(render_to_response_args[1]['score_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['score_section_names'], ['SECTIONS'])
self.assertEqual('[]', render_to_response_args[1]['score_records'])
def test_index_views_status_none_is_status_true(self):
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/score.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'], '')
self.assertEqual(render_to_response_args[1]['update_status'], '')
self._assert_column_data_is_status_true(render_to_response_args[1]['score_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['score_section_names'], ['SECTIONS'])
self._assert_record_data_status_true(render_to_response_args[1]['score_records'])
def test_index_views_member_is_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/score.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data_is_status_true(render_to_response_args[1]['score_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [
(u'G01', 9, u'G1'), (u'G01-01', 3, u'G1-1'), (u'G01-01-01', 8, u'G1-1-1'), (u'G01-01-02', 7, u'G1-1-2'),
(u'G01-02', 4, u'G1-2'), (u'G02', 10, u'G2'), (u'G02-01', 5, u'G2-1'), (u'G02-01-01', 1, u'G2-1-1'),
(u'G02-01-02', 2, u'G2-1-2'), (u'G02-02', 6, u'G2-2')])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['score_section_names'], ['SECTIONS'])
self._assert_record_data_member_status_true(render_to_response_args[1]['score_records'])
def test_index_views_member_manager_is_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager_manager = self._create_manager(
org=self.org_a, user=self.user, created=self.contract, permissions=[self.manager_permission])
groups = Group.objects.filter(org=self.org_a, group_code='G01-01').first()
RightFactory.create(org=self.org_a, group=groups, user=manager_manager.user, created_by=self.user,
creator_org=self.org_a)
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager_manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/score.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data_is_status_true(render_to_response_args[1]['score_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'],
[(u'G01-01', 3, u'G1-1'), (u'G01-01-01', 8, u'G1-1-1'), (u'G01-01-02', 7, u'G1-1-2')])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['score_section_names'], ['SECTIONS'])
self.assertEqual('[]', render_to_response_args[1]['score_records'])
def test_search_ajax_is_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['detail_condition_score_name_1'] = ''
post_value['detail_condition_score_from_1'] = ''
post_value['detail_condition_score_to_1'] = ''
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_mismatch_is_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_score_from'] = '0'
post_value['total_score_to'] = '0'
post_value['detail_condition_score_name_1'] = 'SECTION_1'
post_value['detail_condition_score_from_1'] = '0'
post_value['detail_condition_score_to_1'] = '0'
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_score_no_is_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['total_score_no'] = 'True'
post_value['detail_condition_score_name_1'] = ''
post_value['detail_condition_score_from_1'] = ''
post_value['detail_condition_score_to_1'] = ''
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_certificate_status_is_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['detail_condition_score_name_1'] = ''
post_value['detail_condition_score_from_1'] = ''
post_value['detail_condition_score_to_1'] = ''
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ScoreStore.FIELD_CERTIFICATE_STATUS__DOWNLOADABLE
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_detail_condition_is_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['detail_condition_score_name_1'] = 'SECTION_1'
post_value['detail_condition_score_from_1'] = '0'
post_value['detail_condition_score_to_1'] = '0'
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_manager_is_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['detail_condition_score_name_1'] = ''
post_value['detail_condition_score_from_1'] = ''
post_value['detail_condition_score_to_1'] = ''
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_intentional_exception_is_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_score_from'] = '0'
post_value['total_score_to'] = '0'
post_value['detail_condition_score_name_1'] = 'SECTION_1'
post_value['detail_condition_score_from_1'] = '0'
post_value['detail_condition_score_to_1'] = '0'
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0.1'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(400, response.status_code)
def test_search_ajax_not_list_detail_member_is_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['detail_condition_score_name_1'] = ''
post_value['detail_condition_score_from_1'] = ''
post_value['detail_condition_score_to_1'] = ''
post_value['detail_condition_score_no_1'] = ''
post_value['detail_condition_member_name_1'] = ''
post_value['detail_condition_member_name_2'] = 'org1'
post_value['detail_condition_member_1'] = ''
post_value['detail_condition_member_2'] = 'org1'
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_not_list_group_code_is_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['detail_condition_score_name_1'] = ''
post_value['detail_condition_score_from_1'] = ''
post_value['detail_condition_score_to_1'] = ''
post_value['detail_condition_score_no_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = '1234'
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_not_value_is_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['detail_condition_member_name_1'] = 'org1'
post_value['detail_condition_member_1'] = 'abc'
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_group_none_is_status_true(self):
status = 'Finished'
self._setup_and_user_create_not_group()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = '1234'
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_group_mismatch_is_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = '1234'
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_manager_is_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['detail_condition_member_name_1'] = ''
post_value['detail_condition_member_1'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_success_is_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_score_from'] = ''
post_value['total_score_to'] = ''
post_value['certificate_status'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_download_csv_is_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_batch_status_none_is_status_true(self):
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name('no-timestamp')
))
def test_download_csv_manager_is_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_member_is_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_member_manager_is_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_field_types_is_status_true(self):
status = 'Finished'
self._setup(is_achievement_data_empty=True)
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
self.dict_column_data = {
ScoreStore.FIELD_CONTRACT_ID: self.contract.id,
ScoreStore.FIELD_COURSE_ID: unicode(self.course.id),
ScoreStore.FIELD_DOCUMENT_TYPE: ScoreStore.FIELD_DOCUMENT_TYPE__COLUMN,
ScoreStore.FIELD_FULL_NAME: ScoreStore.COLUMN_TYPE__TEXT,
ScoreStore.FIELD_USERNAME: ScoreStore.COLUMN_TYPE__TEXT,
ScoreStore.FIELD_EMAIL: ScoreStore.COLUMN_TYPE__TEXT,
ScoreStore.FIELD_STUDENT_STATUS: ScoreStore.COLUMN_TYPE__TEXT,
ScoreStore.FIELD_CERTIFICATE_STATUS: ScoreStore.COLUMN_TYPE__TEXT,
ScoreStore.FIELD_TOTAL_SCORE: ScoreStore.COLUMN_TYPE__TEXT,
'SECTIONS_1': ScoreStore.COLUMN_TYPE__TEXT,
'SECTIONS_2': ScoreStore.COLUMN_TYPE__TEXT,
'SECTIONS_3': ScoreStore.COLUMN_TYPE__DATE,
'SECTIONS_4': ScoreStore.COLUMN_TYPE__DATE,
'SECTIONS_5': ScoreStore.COLUMN_TYPE__TIME,
'SECTIONS_6': ScoreStore.COLUMN_TYPE__TIME,
'SECTIONS_7': ScoreStore.COLUMN_TYPE__PERCENT,
'SECTIONS_8': ScoreStore.COLUMN_TYPE__PERCENT,
'SECTIONS_9': ScoreStore.COLUMN_TYPE__PERCENT,
'SECTIONS_10': 'UnknownType',
'SECTIONS_11': ScoreStore.COLUMN_TYPE__TIME,
}
ScoreFactory.create(**self.dict_column_data)
suffix = 1
self.dict_data = {
ScoreStore.FIELD_CONTRACT_ID: self.contract.id,
ScoreStore.FIELD_COURSE_ID: unicode(self.course.id),
ScoreStore.FIELD_DOCUMENT_TYPE: ScoreStore.FIELD_DOCUMENT_TYPE__RECORD,
ScoreStore.FIELD_FULL_NAME: 'TEST TEST{}'.format(suffix),
ScoreStore.FIELD_USERNAME: 'TEST{}'.format(suffix),
ScoreStore.FIELD_EMAIL: 'test{}@example.com'.format(suffix),
ScoreStore.FIELD_STUDENT_STATUS: "Registration cancellation",
ScoreStore.FIELD_CERTIFICATE_STATUS: ScoreStore.FIELD_CERTIFICATE_STATUS__DOWNLOADABLE,
ScoreStore.FIELD_TOTAL_SCORE: 0.9,
'SECTIONS_1': 'SECTION_{}'.format(suffix),
'SECTIONS_2': suffix,
'SECTIONS_3': datetime(2016, 3, 10, 16, 58, 30, 0, tzinfo=pytz.utc),
'SECTIONS_4': '',
'SECTIONS_5': '1',
'SECTIONS_6': '',
'SECTIONS_7': ScoreStore.VALUE__NOT_ATTEMPTED,
'SECTIONS_8': 0.5,
'SECTIONS_9': '',
'SECTIONS_10': None,
'SECTIONS_11': 0.5,
}
self.expect_record_list.append(self.dict_data.copy())
ScoreFactory.create(**self.dict_data)
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_searched_csv_is_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
param = self._create_param_search_ajax()
param["search-download"] = "search-download"
response = self.client.post(self._download_csv_view(), param)
self.assertEqual(200, response.status_code)
@ddt
class PlaybackViewTest(BizStoreTestBase, BizViewTestBase, ModuleStoreTestCase):
def _index_view(self):
return reverse('biz:achievement:playback')
def _ajax_view(self):
return reverse('biz:achievement:playback_search_ajax')
def _download_csv_view(self):
return reverse('biz:achievement:playback_download_csv')
def _setup(self, is_achievement_data_empty=False, record_count=1):
self.expect_record_list = []
self._set_date()
self.setup_user()
self._create_course_data()
self._create_org_data()
self._create_contract_data()
if not is_achievement_data_empty:
for i in range(record_count):
self._create_achievement_data(suffix=str(i))
def _setup_and_user_create(self, is_achievement_data_empty=False, record_count=1):
self.expect_record_list = []
self._set_date()
self.setup_user()
self._create_course_data()
self._create_org_data()
self._create_contract_data()
if not is_achievement_data_empty:
for i in range(record_count):
self._create_achievement_data_and_user(suffix=str(i))
def _setup_and_user_create_not_group(self, is_achievement_data_empty=False, record_count=1):
self.expect_record_list = []
self._set_date()
self.setup_user()
self._create_course_data()
self._create_org_data()
self._create_contract_data()
if not is_achievement_data_empty:
for i in range(record_count):
self._create_achievement_data_and_user_not_group(suffix=str(i))
def _set_date(self):
self.utc_datetime_update = datetime(2016, 3, 10, 16, 58, 30, 0, tzinfo=pytz.utc)
def _create_course_data(self):
self.course = CourseFactory.create(org='gacco', number='course', run='run1')
self.overview = CourseOverview.get_from_id(self.course.id)
def _create_org_data(self):
self.org_a = self._create_organization(org_name='a', org_code='a', creator_org=self.gacco_organization)
def _create_contract_data(self):
self.contract = self._create_contract('contract_a', self.org_a, self.gacco_organization,
self.user, 'invitation_code_a')
def _create_contract(self, name, contractor, owner, created_by, invitation_code):
return ContractFactory.create(contract_name=name, contractor_organization=contractor, owner_organization=owner,
created_by=created_by, invitation_code=invitation_code)
def _create_achievement_data(self, suffix=''):
self.dict_column_data = {
PlaybackStore.FIELD_CONTRACT_ID: self.contract.id,
PlaybackStore.FIELD_COURSE_ID: unicode(self.course.id),
PlaybackStore.FIELD_DOCUMENT_TYPE: PlaybackStore.FIELD_DOCUMENT_TYPE__COLUMN,
PlaybackStore.FIELD_FULL_NAME: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_USERNAME: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_EMAIL: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_STUDENT_STATUS: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: PlaybackStore.COLUMN_TYPE__TIME,
'SECTIONS': ScoreStore.COLUMN_TYPE__TEXT,
}
PlaybackFactory.create(**self.dict_column_data)
self.dict_record_data = {
PlaybackStore.FIELD_CONTRACT_ID: self.contract.id,
PlaybackStore.FIELD_COURSE_ID: unicode(self.course.id),
PlaybackStore.FIELD_DOCUMENT_TYPE: PlaybackStore.FIELD_DOCUMENT_TYPE__RECORD,
PlaybackStore.FIELD_FULL_NAME: 'TEST TEST{}'.format(suffix),
PlaybackStore.FIELD_USERNAME: 'TEST{}'.format(suffix),
PlaybackStore.FIELD_EMAIL: 'test{}@example.com'.format(suffix),
PlaybackStore.FIELD_STUDENT_STATUS: PlaybackStore.FIELD_STUDENT_STATUS__ENROLLED,
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: 999,
'SECTIONS': 'SECTION_{}'.format(suffix),
}
self.expect_record_list.append(self.dict_record_data.copy())
PlaybackFactory.create(**self.dict_record_data)
def _create_achievement_data_and_user(self, suffix=''):
active_user = UserFactory.create()
GroupUtil(org=self.org_a, user=self.user).import_data()
groups = Group.objects.filter(org=self.org_a, group_code='G02-01').first()
MemberFactory.create(
org=self.org_a,
group=groups,
user=active_user,
code='code{}'.format(suffix),
created_by=self.user,
creator_org=self.org_a,
updated_by=self.user,
updated_org=self.org_a,
is_active=True,
is_delete=False,
org1='org1',
)
self.dict_column_data = {
PlaybackStore.FIELD_CONTRACT_ID: self.contract.id,
PlaybackStore.FIELD_COURSE_ID: unicode(self.course.id),
PlaybackStore.FIELD_DOCUMENT_TYPE: PlaybackStore.FIELD_DOCUMENT_TYPE__COLUMN,
PlaybackStore.FIELD_FULL_NAME: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_USERNAME: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_EMAIL: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_STUDENT_STATUS: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: PlaybackStore.COLUMN_TYPE__TIME,
'SECTIONS': ScoreStore.COLUMN_TYPE__TEXT,
}
PlaybackFactory.create(**self.dict_column_data)
self.dict_record_data = {
PlaybackStore.FIELD_CONTRACT_ID: self.contract.id,
PlaybackStore.FIELD_COURSE_ID: unicode(self.course.id),
PlaybackStore.FIELD_DOCUMENT_TYPE: PlaybackStore.FIELD_DOCUMENT_TYPE__RECORD,
PlaybackStore.FIELD_FULL_NAME: 'TEST TEST{}'.format(suffix),
PlaybackStore.FIELD_USERNAME: active_user.username,
PlaybackStore.FIELD_EMAIL: active_user.email,
PlaybackStore.FIELD_STUDENT_STATUS: PlaybackStore.FIELD_STUDENT_STATUS__UNENROLLED,
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: 999,
'SECTIONS': 'SECTION_{}'.format(suffix),
}
self.expect_record_list.append(self.dict_record_data.copy())
PlaybackFactory.create(**self.dict_record_data)
def _create_achievement_data_and_user_not_group(self, suffix=''):
active_user = UserFactory.create()
GroupUtil(org=self.org_a, user=self.user).import_data()
MemberFactory.create(
org=self.org_a,
group=None,
user=active_user,
code='code{}'.format(suffix),
created_by=self.user,
creator_org=self.org_a,
updated_by=self.user,
updated_org=self.org_a,
is_active=True,
is_delete=False,
org1='org1',
)
self.dict_column_data = {
PlaybackStore.FIELD_CONTRACT_ID: self.contract.id,
PlaybackStore.FIELD_COURSE_ID: unicode(self.course.id),
PlaybackStore.FIELD_DOCUMENT_TYPE: PlaybackStore.FIELD_DOCUMENT_TYPE__COLUMN,
PlaybackStore.FIELD_FULL_NAME: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_USERNAME: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_EMAIL: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_STUDENT_STATUS: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: PlaybackStore.COLUMN_TYPE__TIME,
'SECTIONS': ScoreStore.COLUMN_TYPE__TEXT,
}
PlaybackFactory.create(**self.dict_column_data)
self.dict_record_data = {
PlaybackStore.FIELD_CONTRACT_ID: self.contract.id,
PlaybackStore.FIELD_COURSE_ID: unicode(self.course.id),
PlaybackStore.FIELD_DOCUMENT_TYPE: PlaybackStore.FIELD_DOCUMENT_TYPE__RECORD,
PlaybackStore.FIELD_FULL_NAME: 'TEST TEST{}'.format(suffix),
PlaybackStore.FIELD_USERNAME: active_user.username,
PlaybackStore.FIELD_EMAIL: active_user.email,
PlaybackStore.FIELD_STUDENT_STATUS: PlaybackStore.FIELD_STUDENT_STATUS__UNENROLLED,
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: 999,
'SECTIONS': 'SECTION_{}'.format(suffix),
}
self.expect_record_list.append(self.dict_record_data.copy())
PlaybackFactory.create(**self.dict_record_data)
def _create_batch_status(self, status):
self.batch_status = PlaybackBatchStatusFactory.create(contract=self.contract,
course_id=unicode(self.course.id),
status=status,
student_count=4)
def _create_param_search_ajax(self):
param = {
'student_status': '',
'group_code': '',
'offset': 0,
'limit': 100,
'total_playback_time_from': '',
'total_playback_time_to': '',
}
for i in range(1, 6):
param['detail_condition_member_' + str(i)] = ''
param['detail_condition_member_name_' + str(i)] = ''
param['detail_condition_playback_from_' + str(i)] = ''
param['detail_condition_playback_name_' + str(i)] = ''
param['detail_condition_playback_to_' + str(i)] = ''
return param
def _assert_student_status(self, student_status):
self.assertEqual(student_status, [
'Finish Enrolled',
'Enrolled',
'Not Enrolled'
])
def _get_csv_file_name(self, str_datetime):
return u'{course_prefix}_{csv_name}_{timestamp_str}.csv'.format(
course_prefix=course_filename_prefix_generator(self.course.id),
csv_name='playback_status',
timestamp_str=str_datetime
)
def _assert_column_data(self, columns):
self.assertDictEqual(dict(json.loads(columns)), {
PlaybackStore.FIELD_FULL_NAME: self.dict_column_data[PlaybackStore.FIELD_FULL_NAME],
PlaybackStore.FIELD_USERNAME: self.dict_column_data[PlaybackStore.FIELD_USERNAME],
PlaybackStore.FIELD_EMAIL: self.dict_column_data[PlaybackStore.FIELD_EMAIL],
'Register Status': self.dict_column_data[PlaybackStore.FIELD_STUDENT_STATUS],
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: self.dict_column_data[PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME],
'SECTIONS': 'text',
"Organization Groups": "text",
'Organization1': 'text',
'Organization2': 'text',
'Organization3': 'text',
'Organization4': 'hidden',
'Organization5': 'hidden',
'Organization6': 'hidden',
'Organization7': 'hidden',
'Organization8': 'hidden',
'Organization9': 'hidden',
'Organization10': 'hidden',
'Item1': 'text',
'Item2': 'text',
'Item3': 'text',
'Item4': 'hidden',
'Item5': 'hidden',
'Item6': 'hidden',
'Item7': 'hidden',
'Item8': 'hidden',
'Item9': 'hidden',
'Item10': 'hidden',
})
def _assert_column_data_status_true(self, columns):
self.assertDictEqual(dict(json.loads(columns)), {
PlaybackStore.FIELD_FULL_NAME: self.dict_column_data[PlaybackStore.FIELD_FULL_NAME],
PlaybackStore.FIELD_USERNAME: self.dict_column_data[PlaybackStore.FIELD_USERNAME],
PlaybackStore.FIELD_EMAIL: self.dict_column_data[PlaybackStore.FIELD_EMAIL],
PlaybackStore.FIELD_STUDENT_STATUS: 'text',
'Register Status': self.dict_column_data[PlaybackStore.FIELD_STUDENT_STATUS],
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: self.dict_column_data[PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME],
'SECTIONS': 'text',
"Organization Groups": "text",
'Organization1': 'text',
'Organization2': 'text',
'Organization3': 'text',
'Organization4': 'hidden',
'Organization5': 'hidden',
'Organization6': 'hidden',
'Organization7': 'hidden',
'Organization8': 'hidden',
'Organization9': 'hidden',
'Organization10': 'hidden',
'Item1': 'text',
'Item2': 'text',
'Item3': 'text',
'Item4': 'hidden',
'Item5': 'hidden',
'Item6': 'hidden',
'Item7': 'hidden',
'Item8': 'hidden',
'Item9': 'hidden',
'Item10': 'hidden',
})
def _assert_status_list(self, status_list):
self.assertEqual(status_list, {
'Unregister': 'Unregister Invitation',
'Input': 'Input Invitation',
'Register': 'Register Invitation'
})
def _assert_member_org_item_list(self, member_org_item_list):
self.assertEqual(member_org_item_list, OrderedDict([
('org1', 'Organization1'),
('org2', 'Organization2'),
('org3', 'Organization3'),
('org4', 'Organization4'),
('org5', 'Organization5'),
('org6', 'Organization6'),
('org7', 'Organization7'),
('org8', 'Organization8'),
('org9', 'Organization9'),
('org10', 'Organization10'),
('item1', 'Item1'),
('item2', 'Item2'),
('item3', 'Item3'),
('item4', 'Item4'),
('item5', 'Item5'),
('item6', 'Item6'),
('item7', 'Item7'),
('item8', 'Item8'),
('item9', 'Item9'),
('item10', 'Item10'),
]))
def _assert_record_data(self, records):
record = json.loads(records.replace('[', '').replace(']', ''))
register_status = record['Register Status']
expect = {
PlaybackStore.FIELD_FULL_NAME: self.dict_record_data[PlaybackStore.FIELD_FULL_NAME],
PlaybackStore.FIELD_USERNAME: self.dict_record_data[PlaybackStore.FIELD_USERNAME],
PlaybackStore.FIELD_EMAIL: self.dict_record_data[PlaybackStore.FIELD_EMAIL],
'Register Status': register_status,
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: self.dict_record_data[
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME],
PlaybackStore.FIELD_DOCUMENT_TYPE:
self.dict_record_data[PlaybackStore.FIELD_DOCUMENT_TYPE],
'SECTIONS': self.dict_record_data['SECTIONS'],
u'recid': 1,
}
self.assertDictEqual(json.loads(records)[0], json.loads(unicode(json.dumps(expect))))
def _assert_record_data_status_true(self, records):
record = json.loads(records.replace('[', '').replace(']', ''))
register_status = record['Register Status']
student_status = record[PlaybackStore.FIELD_STUDENT_STATUS]
expect = {
PlaybackStore.FIELD_FULL_NAME: self.dict_record_data[PlaybackStore.FIELD_FULL_NAME],
PlaybackStore.FIELD_USERNAME: self.dict_record_data[PlaybackStore.FIELD_USERNAME],
PlaybackStore.FIELD_EMAIL: self.dict_record_data[PlaybackStore.FIELD_EMAIL],
PlaybackStore.FIELD_STUDENT_STATUS: student_status,
'Register Status': register_status,
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: self.dict_record_data[
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME],
PlaybackStore.FIELD_DOCUMENT_TYPE:
self.dict_record_data[PlaybackStore.FIELD_DOCUMENT_TYPE],
'SECTIONS': self.dict_record_data['SECTIONS'],
u'recid': 1,
}
self.assertDictEqual(json.loads(records)[0], json.loads(unicode(json.dumps(expect))))
def _assert_record_data_member(self, records):
record = json.loads(records.replace('[', '').replace(']', ''))
register_status = record['Register Status']
expect = {
PlaybackStore.FIELD_FULL_NAME: self.dict_record_data[PlaybackStore.FIELD_FULL_NAME],
PlaybackStore.FIELD_USERNAME: self.dict_record_data[PlaybackStore.FIELD_USERNAME],
PlaybackStore.FIELD_EMAIL: self.dict_record_data[PlaybackStore.FIELD_EMAIL],
'Register Status': register_status,
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: self.dict_record_data[
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME],
PlaybackStore.FIELD_DOCUMENT_TYPE:
self.dict_record_data[PlaybackStore.FIELD_DOCUMENT_TYPE],
'SECTIONS': self.dict_record_data['SECTIONS'],
"Organization Groups": "G2-1",
"Organization1": 'org1',
"Organization2": None,
"Organization3": None,
"Organization4": None,
"Organization5": None,
"Organization6": None,
"Organization7": None,
"Organization8": None,
"Organization9": None,
"Organization10": None,
"Item1": None,
"Item2": None,
"Item3": None,
"Item4": None,
"Item5": None,
"Item6": None,
"Item7": None,
"Item8": None,
"Item9": None,
"Item10": None,
u'recid': 1,
}
self.assertDictEqual(json.loads(records)[0], json.loads(unicode(json.dumps(expect))))
def _assert_record_data_member_status_true(self, records):
record = json.loads(records.replace('[', '').replace(']', ''))
register_status = record['Register Status']
student_status = record[PlaybackStore.FIELD_STUDENT_STATUS]
expect = {
PlaybackStore.FIELD_FULL_NAME: self.dict_record_data[PlaybackStore.FIELD_FULL_NAME],
PlaybackStore.FIELD_USERNAME: self.dict_record_data[PlaybackStore.FIELD_USERNAME],
PlaybackStore.FIELD_EMAIL: self.dict_record_data[PlaybackStore.FIELD_EMAIL],
PlaybackStore.FIELD_STUDENT_STATUS: student_status,
'Register Status': register_status,
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: self.dict_record_data[
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME],
PlaybackStore.FIELD_DOCUMENT_TYPE:
self.dict_record_data[PlaybackStore.FIELD_DOCUMENT_TYPE],
'SECTIONS': self.dict_record_data['SECTIONS'],
"Organization Groups": "G2-1",
"Organization1": 'org1',
"Organization2": None,
"Organization3": None,
"Organization4": None,
"Organization5": None,
"Organization6": None,
"Organization7": None,
"Organization8": None,
"Organization9": None,
"Organization10": None,
"Item1": None,
"Item2": None,
"Item3": None,
"Item4": None,
"Item5": None,
"Item6": None,
"Item7": None,
"Item8": None,
"Item9": None,
"Item10": None,
u'recid': 1,
}
self.assertDictEqual(json.loads(records)[0], json.loads(unicode(json.dumps(expect))))
def _assert_record_count(self, records_count, expect_records_count):
self.assertEqual(records_count, expect_records_count)
def test_index_views(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/playback.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data(render_to_response_args[1]['playback_columns'])
self._assert_record_data(render_to_response_args[1]['playback_records'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['playback_section_names'], ['SECTIONS'])
def test_index_views_manager(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/playback.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data(render_to_response_args[1]['playback_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['playback_section_names'], ['SECTIONS'])
def test_index_views_status_none(self):
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/playback.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'], '')
self.assertEqual(render_to_response_args[1]['update_status'], '')
self._assert_column_data(render_to_response_args[1]['playback_columns'])
self._assert_record_data(render_to_response_args[1]['playback_records'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['playback_section_names'], ['SECTIONS'])
def test_index_views_member(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/playback.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data(render_to_response_args[1]['playback_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_record_data_member(render_to_response_args[1]['playback_records'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [
(u'G01', 9, u'G1'), (u'G01-01', 3, u'G1-1'), (u'G01-01-01', 8, u'G1-1-1'), (u'G01-01-02', 7, u'G1-1-2'),
(u'G01-02', 4, u'G1-2'), (u'G02', 10, u'G2'), (u'G02-01', 5, u'G2-1'), (u'G02-01-01', 1, u'G2-1-1'),
(u'G02-01-02', 2, u'G2-1-2'), (u'G02-02', 6, u'G2-2')])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['playback_section_names'], ['SECTIONS'])
def test_index_views_member_manager(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager_manager = self._create_manager(
org=self.org_a, user=self.user, created=self.contract, permissions=[self.manager_permission])
groups = Group.objects.filter(org=self.org_a, group_code='G01-01').first()
RightFactory.create(org=self.org_a, group=groups, user=manager_manager.user, created_by=self.user,
creator_org=self.org_a)
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager_manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/playback.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data(render_to_response_args[1]['playback_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'],
[(u'G01-01', 3, u'G1-1'), (u'G01-01-01', 8, u'G1-1-1'), (u'G01-01-02', 7, u'G1-1-2')])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['playback_section_names'], ['SECTIONS'])
def test_search_ajax(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['detail_condition_playback_name_1'] = ''
post_value['detail_condition_playback_from_1'] = ''
post_value['detail_condition_playback_to_1'] = ''
post_value['detail_condition_playback_no_1'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_mismatch(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_playback_time_from'] = '0'
post_value['total_playback_time_to'] = '0'
post_value['detail_condition_playback_name_1'] = 'SECTION_1'
post_value['detail_condition_playback_from_1'] = '0'
post_value['detail_condition_playback_to_1'] = '0'
post_value['detail_condition_playback_no_1'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_total_no(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['total_playback_time_no'] = 'True'
post_value['detail_condition_playback_name_1'] = ''
post_value['detail_condition_playback_from_1'] = ''
post_value['detail_condition_playback_to_1'] = ''
post_value['detail_condition_playback_no_1'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_detail_condition(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['detail_condition_playback_name_1'] = 'SECTION_1'
post_value['detail_condition_playback_from_1'] = '0'
post_value['detail_condition_playback_to_1'] = '0'
post_value['detail_condition_playback_no_1'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_manager(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['detail_condition_playback_name_1'] = ''
post_value['detail_condition_playback_from_1'] = ''
post_value['detail_condition_playback_to_1'] = ''
post_value['detail_condition_playback_no_1'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_intentional_exception(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_playback_time_from'] = '0'
post_value['total_playback_time_to'] = '0'
post_value['detail_condition_playback_name_1'] = 'SECTION_1'
post_value['detail_condition_playback_from_1'] = '0'
post_value['detail_condition_playback_to_1'] = '0'
post_value['detail_condition_playback_no_1'] = ''
post_value['offset'] = '0.1'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(400, response.status_code)
def test_search_ajax_not_list_detail_member(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['detail_condition_playback_name_1'] = ''
post_value['detail_condition_playback_from_1'] = ''
post_value['detail_condition_playback_to_1'] = ''
post_value['detail_condition_playback_no_1'] = ''
post_value['detail_condition_member_name_1'] = ''
post_value['detail_condition_member_name_2'] = 'org1'
post_value['detail_condition_member_1'] = ''
post_value['detail_condition_member_2'] = 'org1'
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_not_list_group_code(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['detail_condition_playback_name_1'] = ''
post_value['detail_condition_playback_from_1'] = ''
post_value['detail_condition_playback_to_1'] = ''
post_value['detail_condition_playback_no_1'] = ''
post_value['offset'] = '0'
post_value['group_code'] = '1234'
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_not_value(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['detail_condition_member_name_1'] = 'org1'
post_value['detail_condition_member_1'] = 'abc'
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_group_none(self):
status = 'Finished'
self._setup_and_user_create_not_group()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['offset'] = '0'
post_value['group_code'] = '1234'
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_group_mismatch(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['offset'] = '0'
post_value['group_code'] = '1234'
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_manager(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['detail_condition_member_name_1'] = 'org1'
post_value['detail_condition_member_1'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_success(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_download_csv(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_batch_status_none(self):
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name('no-timestamp')
))
def test_download_csv_manager(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_member(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_member_manager(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_field_types(self):
status = 'Finished'
self._setup(is_achievement_data_empty=True)
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
self.dict_column_data = {
PlaybackStore.FIELD_CONTRACT_ID: self.contract.id,
PlaybackStore.FIELD_COURSE_ID: unicode(self.course.id),
PlaybackStore.FIELD_DOCUMENT_TYPE: PlaybackStore.FIELD_DOCUMENT_TYPE__COLUMN,
PlaybackStore.FIELD_FULL_NAME: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_USERNAME: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_EMAIL: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_STUDENT_STATUS: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: PlaybackStore.COLUMN_TYPE__TIME,
'SECTIONS_1': ScoreStore.COLUMN_TYPE__TEXT,
'SECTIONS_2': ScoreStore.COLUMN_TYPE__TEXT,
'SECTIONS_3': ScoreStore.COLUMN_TYPE__DATE,
'SECTIONS_4': ScoreStore.COLUMN_TYPE__DATE,
'SECTIONS_5': ScoreStore.COLUMN_TYPE__TIME,
'SECTIONS_6': ScoreStore.COLUMN_TYPE__TIME,
'SECTIONS_7': ScoreStore.COLUMN_TYPE__PERCENT,
'SECTIONS_8': ScoreStore.COLUMN_TYPE__PERCENT,
'SECTIONS_9': ScoreStore.COLUMN_TYPE__PERCENT,
'SECTIONS_10': 'UnknownType',
}
PlaybackFactory.create(**self.dict_column_data)
suffix = 1
self.dict_record_data = {
PlaybackStore.FIELD_CONTRACT_ID: self.contract.id,
PlaybackStore.FIELD_COURSE_ID: unicode(self.course.id),
PlaybackStore.FIELD_DOCUMENT_TYPE: PlaybackStore.FIELD_DOCUMENT_TYPE__RECORD,
PlaybackStore.FIELD_FULL_NAME: 'TEST TEST{}'.format(suffix),
PlaybackStore.FIELD_USERNAME: 'TEST{}'.format(suffix),
PlaybackStore.FIELD_EMAIL: 'test{}@example.com'.format(suffix),
PlaybackStore.FIELD_STUDENT_STATUS: "Registration cancellation",
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: 999,
'SECTIONS_1': 'SECTION_{}'.format(suffix),
'SECTIONS_2': suffix,
'SECTIONS_3': datetime(2016, 3, 10, 16, 58, 30, 0, tzinfo=pytz.utc),
'SECTIONS_4': '',
'SECTIONS_5': '1',
'SECTIONS_6': '',
'SECTIONS_7': PlaybackStore.VALUE__NOT_ATTEMPTED,
'SECTIONS_8': 0.5,
'SECTIONS_9': '',
'SECTIONS_10': None,
}
self.expect_record_list.append(self.dict_record_data.copy())
PlaybackFactory.create(**self.dict_record_data)
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_searched_csv(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = False
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
param = self._create_param_search_ajax()
param["search-download"] = "search-download"
response = self.client.post(self._download_csv_view(), param)
self.assertEqual(200, response.status_code)
def test_index_views_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/playback.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data_status_true(render_to_response_args[1]['playback_columns'])
self._assert_record_data_status_true(render_to_response_args[1]['playback_records'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['playback_section_names'], ['SECTIONS'])
def test_index_views_manager_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/playback.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data_status_true(render_to_response_args[1]['playback_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['playback_section_names'], ['SECTIONS'])
def test_index_views_status_none_status_true(self):
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/playback.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'], '')
self.assertEqual(render_to_response_args[1]['update_status'], '')
self._assert_column_data_status_true(render_to_response_args[1]['playback_columns'])
self._assert_record_data_status_true(render_to_response_args[1]['playback_records'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['playback_section_names'], ['SECTIONS'])
def test_index_views_member_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/playback.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data_status_true(render_to_response_args[1]['playback_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_record_data_member_status_true(render_to_response_args[1]['playback_records'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'], [
(u'G01', 9, u'G1'), (u'G01-01', 3, u'G1-1'), (u'G01-01-01', 8, u'G1-1-1'), (u'G01-01-02', 7, u'G1-1-2'),
(u'G01-02', 4, u'G1-2'), (u'G02', 10, u'G2'), (u'G02-01', 5, u'G2-1'), (u'G02-01-01', 1, u'G2-1-1'),
(u'G02-01-02', 2, u'G2-1-2'), (u'G02-02', 6, u'G2-2')])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['playback_section_names'], ['SECTIONS'])
def test_index_views_member_manager_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager_manager = self._create_manager(
org=self.org_a, user=self.user, created=self.contract, permissions=[self.manager_permission])
groups = Group.objects.filter(org=self.org_a, group_code='G01-01').first()
RightFactory.create(org=self.org_a, group=groups, user=manager_manager.user, created_by=self.user,
creator_org=self.org_a)
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager_manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
with patch('biz.djangoapps.ga_achievement.views.render_to_response',
return_value=HttpResponse()) as mock_render_to_response:
self.client.get(self._index_view())
render_to_response_args = mock_render_to_response.call_args[0]
self.assertEqual(render_to_response_args[0], 'ga_achievement/playback.html')
self._assert_record_count(render_to_response_args[1]['update_datetime'],
datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y/%m/%d %H:%M'))
self.assertEqual(render_to_response_args[1]['update_status'], status)
self._assert_column_data_status_true(render_to_response_args[1]['playback_columns'])
self._assert_status_list(render_to_response_args[1]['status_list'])
self._assert_member_org_item_list(render_to_response_args[1]['member_org_item_list'])
self.assertEqual(render_to_response_args[1]['group_list'],
[(u'G01-01', 3, u'G1-1'), (u'G01-01-01', 8, u'G1-1-1'), (u'G01-01-02', 7, u'G1-1-2')])
self._assert_student_status(render_to_response_args[1]['student_status'])
self.assertEqual(render_to_response_args[1]['playback_section_names'], ['SECTIONS'])
def test_search_ajax_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['detail_condition_playback_name_1'] = ''
post_value['detail_condition_playback_from_1'] = ''
post_value['detail_condition_playback_to_1'] = ''
post_value['detail_condition_playback_no_1'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_mismatch_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_playback_time_from'] = '0'
post_value['total_playback_time_to'] = '0'
post_value['detail_condition_playback_name_1'] = 'SECTION_1'
post_value['detail_condition_playback_from_1'] = '0'
post_value['detail_condition_playback_to_1'] = '0'
post_value['detail_condition_playback_no_1'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_total_no_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['total_playback_time_no'] = 'True'
post_value['detail_condition_playback_name_1'] = ''
post_value['detail_condition_playback_from_1'] = ''
post_value['detail_condition_playback_to_1'] = ''
post_value['detail_condition_playback_no_1'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_detail_condition_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['detail_condition_playback_name_1'] = 'SECTION_1'
post_value['detail_condition_playback_from_1'] = '0'
post_value['detail_condition_playback_to_1'] = '0'
post_value['detail_condition_playback_no_1'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_manager_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['detail_condition_playback_name_1'] = ''
post_value['detail_condition_playback_from_1'] = ''
post_value['detail_condition_playback_to_1'] = ''
post_value['detail_condition_playback_no_1'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_intentional_exception_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_playback_time_from'] = '0'
post_value['total_playback_time_to'] = '0'
post_value['detail_condition_playback_name_1'] = 'SECTION_1'
post_value['detail_condition_playback_from_1'] = '0'
post_value['detail_condition_playback_to_1'] = '0'
post_value['detail_condition_playback_no_1'] = ''
post_value['offset'] = '0.1'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(400, response.status_code)
def test_search_ajax_not_list_detail_member_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['detail_condition_playback_name_1'] = ''
post_value['detail_condition_playback_from_1'] = ''
post_value['detail_condition_playback_to_1'] = ''
post_value['detail_condition_playback_no_1'] = ''
post_value['detail_condition_member_name_1'] = ''
post_value['detail_condition_member_name_2'] = 'org1'
post_value['detail_condition_member_1'] = ''
post_value['detail_condition_member_2'] = 'org1'
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_not_list_group_code_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['detail_condition_playback_name_1'] = ''
post_value['detail_condition_playback_from_1'] = ''
post_value['detail_condition_playback_to_1'] = ''
post_value['detail_condition_playback_no_1'] = ''
post_value['offset'] = '0'
post_value['group_code'] = '1234'
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_not_value_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['detail_condition_member_name_1'] = 'org1'
post_value['detail_condition_member_1'] = 'abc'
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_group_none_status_true(self):
status = 'Finished'
self._setup_and_user_create_not_group()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['offset'] = '0'
post_value['group_code'] = '1234'
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_group_mismatch_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['offset'] = '0'
post_value['group_code'] = '1234'
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_manager_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['detail_condition_member_name_1'] = 'org1'
post_value['detail_condition_member_1'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_search_ajax_member_success_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
post_value = dict()
post_value['student_status'] = ''
post_value['total_playback_time_from'] = ''
post_value['total_playback_time_to'] = ''
post_value['offset'] = '0'
post_value['group_code'] = ''
response = self.client.post(self._ajax_view(), post_value)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
self.assertEqual('success', response_data['status'])
def test_download_csv_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_batch_status_non_status_true(self):
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name('no-timestamp')
))
def test_download_csv_manager_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
GroupUtil(org=self.org_a, user=self.user).import_data()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_member_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_member_manager_status_true(self):
status = 'Finished'
self._setup_and_user_create()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.manager_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_csv_field_types_status_true(self):
status = 'Finished'
self._setup(is_achievement_data_empty=True)
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
self.dict_column_data = {
PlaybackStore.FIELD_CONTRACT_ID: self.contract.id,
PlaybackStore.FIELD_COURSE_ID: unicode(self.course.id),
PlaybackStore.FIELD_DOCUMENT_TYPE: PlaybackStore.FIELD_DOCUMENT_TYPE__COLUMN,
PlaybackStore.FIELD_FULL_NAME: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_USERNAME: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_EMAIL: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_STUDENT_STATUS: PlaybackStore.COLUMN_TYPE__TEXT,
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: PlaybackStore.COLUMN_TYPE__TIME,
'SECTIONS_1': ScoreStore.COLUMN_TYPE__TEXT,
'SECTIONS_2': ScoreStore.COLUMN_TYPE__TEXT,
'SECTIONS_3': ScoreStore.COLUMN_TYPE__DATE,
'SECTIONS_4': ScoreStore.COLUMN_TYPE__DATE,
'SECTIONS_5': ScoreStore.COLUMN_TYPE__TIME,
'SECTIONS_6': ScoreStore.COLUMN_TYPE__TIME,
'SECTIONS_7': ScoreStore.COLUMN_TYPE__PERCENT,
'SECTIONS_8': ScoreStore.COLUMN_TYPE__PERCENT,
'SECTIONS_9': ScoreStore.COLUMN_TYPE__PERCENT,
'SECTIONS_10': 'UnknownType',
}
PlaybackFactory.create(**self.dict_column_data)
suffix = 1
self.dict_record_data = {
PlaybackStore.FIELD_CONTRACT_ID: self.contract.id,
PlaybackStore.FIELD_COURSE_ID: unicode(self.course.id),
PlaybackStore.FIELD_DOCUMENT_TYPE: PlaybackStore.FIELD_DOCUMENT_TYPE__RECORD,
PlaybackStore.FIELD_FULL_NAME: 'TEST TEST{}'.format(suffix),
PlaybackStore.FIELD_USERNAME: 'TEST{}'.format(suffix),
PlaybackStore.FIELD_EMAIL: 'test{}@example.com'.format(suffix),
PlaybackStore.FIELD_STUDENT_STATUS: "Registration cancellation",
PlaybackStore.FIELD_TOTAL_PLAYBACK_TIME: 999,
'SECTIONS_1': 'SECTION_{}'.format(suffix),
'SECTIONS_2': suffix,
'SECTIONS_3': datetime(2016, 3, 10, 16, 58, 30, 0, tzinfo=pytz.utc),
'SECTIONS_4': '',
'SECTIONS_5': '1',
'SECTIONS_6': '',
'SECTIONS_7': PlaybackStore.VALUE__NOT_ATTEMPTED,
'SECTIONS_8': 0.5,
'SECTIONS_9': '',
'SECTIONS_10': None,
}
self.expect_record_list.append(self.dict_record_data.copy())
PlaybackFactory.create(**self.dict_record_data)
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
response = self.client.post(self._download_csv_view())
self.assertEqual(200, response.status_code)
self.assertEqual(response['content-disposition'], 'attachment; filename*=UTF-8\'\'{}'.format(
self._get_csv_file_name(datetime_utils.to_jst(self.utc_datetime_update).strftime('%Y-%m-%d-%H%M'))
))
def test_download_searched_csv_status_true(self):
status = 'Finished'
self._setup()
self.overview.extra.is_status_managed = True
self.overview.extra.save()
manager = self._create_manager(org=self.org_a, user=self.user, created=self.gacco_organization,
permissions=[self.director_permission])
self._create_batch_status(status)
self.batch_status.created = self.utc_datetime_update
self.batch_status.save()
with self.skip_check_course_selection(current_manager=manager, current_organization=self.org_a,
current_contract=self.contract, current_course=self.course):
param = self._create_param_search_ajax()
param["search-download"] = "search-download"
response = self.client.post(self._download_csv_view(), param)
self.assertEqual(200, response.status_code)
|
agpl-3.0
| -2,311,038,217,161,401,000 | 48.562657 | 119 | 0.591504 | false |
ESOedX/edx-platform
|
common/test/acceptance/tests/lms/test_teams.py
|
1
|
84355
|
"""
Acceptance tests for the teams feature.
"""
from __future__ import absolute_import
import json
import random
import time
from uuid import uuid4
import ddt
from dateutil.parser import parse
from selenium.common.exceptions import TimeoutException
from six.moves import map, range
from common.test.acceptance.fixtures import LMS_BASE_URL
from common.test.acceptance.fixtures.course import CourseFixture
from common.test.acceptance.fixtures.discussion import ForumsConfigMixin, MultipleThreadFixture, Thread
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.common.utils import confirm_prompt
from common.test.acceptance.pages.lms.course_home import CourseHomePage
from common.test.acceptance.pages.lms.learner_profile import LearnerProfilePage
from common.test.acceptance.pages.lms.tab_nav import TabNavPage
from common.test.acceptance.pages.lms.teams import (
BrowseTeamsPage,
BrowseTopicsPage,
EditMembershipPage,
MyTeamsPage,
TeamManagementPage,
TeamPage,
TeamsPage
)
from common.test.acceptance.tests.helpers import EventsTestMixin, UniqueCourseTest, get_modal_alert
from openedx.core.lib.tests import attr
TOPICS_PER_PAGE = 12
class TeamsTabBase(EventsTestMixin, ForumsConfigMixin, UniqueCourseTest):
"""Base class for Teams Tab tests"""
def setUp(self):
super(TeamsTabBase, self).setUp()
self.tab_nav = TabNavPage(self.browser)
self.course_home_page = CourseHomePage(self.browser, self.course_id)
self.teams_page = TeamsPage(self.browser, self.course_id)
# TODO: Refactor so resetting events database is not necessary
self.reset_event_tracking()
self.enable_forums()
def create_topics(self, num_topics):
"""Create `num_topics` test topics."""
return [{u"description": i, u"name": i, u"id": i} for i in map(str, range(num_topics))]
def create_teams(self, topic, num_teams, time_between_creation=0):
"""Create `num_teams` teams belonging to `topic`."""
teams = []
for i in range(num_teams):
team = {
'course_id': self.course_id,
'topic_id': topic['id'],
'name': u'Team {}'.format(i),
'description': u'Description {}'.format(i),
'language': 'aa',
'country': 'AF'
}
teams.append(self.post_team_data(team))
# Sadly, this sleep is necessary in order to ensure that
# sorting by last_activity_at works correctly when running
# in Jenkins.
# THIS IS AN ANTI-PATTERN - DO NOT COPY.
time.sleep(time_between_creation)
return teams
def post_team_data(self, team_data):
"""Given a JSON representation of a team, post it to the server."""
response = self.course_fixture.session.post(
LMS_BASE_URL + '/api/team/v0/teams/',
data=json.dumps(team_data),
headers=self.course_fixture.headers
)
self.assertEqual(response.status_code, 200)
return json.loads(response.text)
def create_memberships(self, num_memberships, team_id):
"""Create `num_memberships` users and assign them to `team_id`. The
last user created becomes the current user."""
memberships = []
for __ in range(num_memberships):
user_info = AutoAuthPage(self.browser, course_id=self.course_id).visit().user_info
memberships.append(user_info)
self.create_membership(user_info['username'], team_id)
#pylint: disable=attribute-defined-outside-init
self.user_info = memberships[-1]
return memberships
def create_membership(self, username, team_id):
"""Assign `username` to `team_id`."""
response = self.course_fixture.session.post(
LMS_BASE_URL + '/api/team/v0/team_membership/',
data=json.dumps({'username': username, 'team_id': team_id}),
headers=self.course_fixture.headers
)
return json.loads(response.text)
def set_team_configuration(self, configuration, enroll_in_course=True, global_staff=False):
"""
Sets team configuration on the course and calls auto-auth on the user.
"""
#pylint: disable=attribute-defined-outside-init
self.course_fixture = CourseFixture(**self.course_info)
if configuration:
self.course_fixture.add_advanced_settings(
{u"teams_configuration": {u"value": configuration}}
)
self.course_fixture.install()
enroll_course_id = self.course_id if enroll_in_course else None
#pylint: disable=attribute-defined-outside-init
self.user_info = AutoAuthPage(self.browser, course_id=enroll_course_id, staff=global_staff).visit().user_info
self.course_home_page.visit()
def verify_teams_present(self, present):
"""
Verifies whether or not the teams tab is present. If it should be present, also
checks the text on the page (to ensure view is working).
"""
if present:
self.assertIn("Teams", self.tab_nav.tab_names)
self.teams_page.visit()
self.assertEqual(self.teams_page.active_tab(), 'browse')
else:
self.assertNotIn("Teams", self.tab_nav.tab_names)
def verify_teams(self, page, expected_teams):
"""Verify that the list of team cards on the current page match the expected teams in order."""
def assert_team_equal(expected_team, team_card_name, team_card_description):
"""
Helper to assert that a single team card has the expected name and
description.
"""
self.assertEqual(expected_team['name'], team_card_name)
self.assertEqual(expected_team['description'], team_card_description)
team_card_names = page.team_names
team_card_descriptions = page.team_descriptions
list(map(assert_team_equal, expected_teams, team_card_names, team_card_descriptions))
def verify_my_team_count(self, expected_number_of_teams):
""" Verify the number of teams shown on "My Team". """
# We are doing these operations on this top-level page object to avoid reloading the page.
self.teams_page.verify_my_team_count(expected_number_of_teams)
def only_team_events(self, event):
"""Filter out all non-team events."""
return event['event_type'].startswith('edx.team.')
@ddt.ddt
@attr(shard=5)
class TeamsTabTest(TeamsTabBase):
"""
Tests verifying when the Teams tab is present.
"""
def test_teams_not_enabled(self):
"""
Scenario: teams tab should not be present if no team configuration is set
Given I am enrolled in a course without team configuration
When I view the course info page
Then I should not see the Teams tab
"""
self.set_team_configuration(None)
self.verify_teams_present(False)
def test_teams_not_enabled_no_topics(self):
"""
Scenario: teams tab should not be present if team configuration does not specify topics
Given I am enrolled in a course with no topics in the team configuration
When I view the course info page
Then I should not see the Teams tab
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": []})
self.verify_teams_present(False)
def test_teams_enabled(self):
"""
Scenario: teams tab should be present if user is enrolled in the course and it has team configuration
Given I am enrolled in a course with team configuration and topics
When I view the course info page
Then I should see the Teams tab
And the correct content should be on the page
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(1)})
self.verify_teams_present(True)
def test_teams_enabled_global_staff(self):
"""
Scenario: teams tab should be present if user is not enrolled in the course, but is global staff
Given there is a course with team configuration
And I am not enrolled in that course, but am global staff
When I view the course info page
Then I should see the Teams tab
And the correct content should be on the page
"""
self.set_team_configuration(
{u"max_team_size": 10, u"topics": self.create_topics(1)},
enroll_in_course=False,
global_staff=True
)
self.verify_teams_present(True)
@ddt.data(
'topics/{topic_id}',
'topics/{topic_id}/search',
'teams/{topic_id}/{team_id}/edit-team',
'teams/{topic_id}/{team_id}'
)
def test_unauthorized_error_message(self, route):
"""Ensure that an error message is shown to the user if they attempt
to take an action which makes an AJAX request while not signed
in.
"""
topics = self.create_topics(1)
topic = topics[0]
self.set_team_configuration(
{u'max_team_size': 10, u'topics': topics},
global_staff=True
)
team = self.create_teams(topic, 1)[0]
self.teams_page.visit()
self.browser.delete_cookie('sessionid')
url = self.browser.current_url.split('#')[0]
self.browser.get(
'{url}#{route}'.format(
url=url,
route=route.format(
topic_id=topic['id'],
team_id=team['id']
)
)
)
self.teams_page.wait_for_ajax()
self.assertEqual(
self.teams_page.warning_message,
u"Your request could not be completed. Reload the page and try again."
)
@ddt.data(
('browse', '.topics-list'),
# TODO: find a reliable way to match the "My Teams" tab
# ('my-teams', 'div.teams-list'),
('teams/{topic_id}/{team_id}', 'div.discussion-module'),
('topics/{topic_id}/create-team', 'div.create-team-instructions'),
('topics/{topic_id}', '.teams-list'),
('not-a-real-route', 'div.warning')
)
@ddt.unpack
def test_url_routing(self, route, selector):
"""Ensure that navigating to a URL route correctly updates the page
content.
"""
topics = self.create_topics(1)
topic = topics[0]
self.set_team_configuration({
u'max_team_size': 10,
u'topics': topics
})
team = self.create_teams(topic, 1)[0]
self.teams_page.visit()
# Get the base URL (the URL without any trailing fragment)
url = self.browser.current_url
fragment_index = url.find('#')
if fragment_index >= 0:
url = url[0:fragment_index]
self.browser.get(
'{url}#{route}'.format(
url=url,
route=route.format(
topic_id=topic['id'],
team_id=team['id']
))
)
self.teams_page.wait_for_page()
self.teams_page.wait_for_ajax()
self.assertTrue(self.teams_page.q(css=selector).present)
self.assertTrue(self.teams_page.q(css=selector).visible)
@attr(shard=5)
class MyTeamsTest(TeamsTabBase):
"""
Tests for the "My Teams" tab of the Teams page.
"""
def setUp(self):
super(MyTeamsTest, self).setUp()
self.topic = {u"name": u"Example Topic", u"id": "example_topic", u"description": "Description"}
self.set_team_configuration({'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]})
self.my_teams_page = MyTeamsPage(self.browser, self.course_id)
self.page_viewed_event = {
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'my-teams',
'topic_id': None,
'team_id': None
}
}
def test_not_member_of_any_teams(self):
"""
Scenario: Visiting the My Teams page when user is not a member of any team should not display any teams.
Given I am enrolled in a course with a team configuration and a topic but am not a member of a team
When I visit the My Teams page
And I should see no teams
And I should see a message that I belong to no teams.
"""
with self.assert_events_match_during(self.only_team_events, expected_events=[self.page_viewed_event]):
self.my_teams_page.visit()
self.assertEqual(len(self.my_teams_page.team_cards), 0, msg='Expected to see no team cards')
self.assertEqual(
self.my_teams_page.q(css='.page-content-main').text,
[u'You are not currently a member of any team.']
)
def test_member_of_a_team(self):
"""
Scenario: Visiting the My Teams page when user is a member of a team should display the teams.
Given I am enrolled in a course with a team configuration and a topic and am a member of a team
When I visit the My Teams page
Then I should see a pagination header showing the number of teams
And I should see all the expected team cards
And I should not see a pagination footer
"""
teams = self.create_teams(self.topic, 1)
self.create_membership(self.user_info['username'], teams[0]['id'])
with self.assert_events_match_during(self.only_team_events, expected_events=[self.page_viewed_event]):
self.my_teams_page.visit()
self.verify_teams(self.my_teams_page, teams)
def test_multiple_team_members(self):
"""
Scenario: Visiting the My Teams page when user is a member of a team should display the teams.
Given I am a member of a team with multiple members
When I visit the My Teams page
Then I should see the correct number of team members on my membership
"""
teams = self.create_teams(self.topic, 1)
self.create_memberships(4, teams[0]['id'])
self.my_teams_page.visit()
self.assertEqual(self.my_teams_page.team_memberships[0], '4 / 10 Members')
@attr(shard=5)
@ddt.ddt
class BrowseTopicsTest(TeamsTabBase):
"""
Tests for the Browse tab of the Teams page.
"""
def setUp(self):
super(BrowseTopicsTest, self).setUp()
self.topics_page = BrowseTopicsPage(self.browser, self.course_id)
@ddt.data(('name', False), ('team_count', True))
@ddt.unpack
def test_sort_topics(self, sort_order, reverse):
"""
Scenario: the user should be able to sort the list of topics by name or team count
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
Then I should see a list of topics for the course
When I choose a sort order
Then I should see the paginated list of topics in that order
"""
topics = self.create_topics(TOPICS_PER_PAGE + 1)
self.set_team_configuration({u"max_team_size": 100, u"topics": topics})
for i, topic in enumerate(random.sample(topics, len(topics))):
self.create_teams(topic, i)
topic['team_count'] = i
self.topics_page.visit()
self.topics_page.sort_topics_by(sort_order)
topic_names = self.topics_page.topic_names
self.assertEqual(len(topic_names), TOPICS_PER_PAGE)
self.assertEqual(
topic_names,
[t['name'] for t in sorted(topics, key=lambda t: t[sort_order], reverse=reverse)][:TOPICS_PER_PAGE]
)
def test_sort_topics_update(self):
"""
Scenario: the list of topics should remain sorted after updates
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics and choose a sort order
Then I should see the paginated list of topics in that order
When I create a team in one of those topics
And I return to the topics list
Then I should see the topics in the correct sorted order
"""
topics = self.create_topics(3)
self.set_team_configuration({u"max_team_size": 100, u"topics": topics})
self.topics_page.visit()
self.topics_page.sort_topics_by('team_count')
topic_name = self.topics_page.topic_names[-1]
topic = [t for t in topics if t['name'] == topic_name][0]
self.topics_page.browse_teams_for_topic(topic_name)
browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, topic)
browse_teams_page.wait_for_page()
browse_teams_page.click_create_team_link()
create_team_page = TeamManagementPage(self.browser, self.course_id, topic)
create_team_page.create_team()
team_page = TeamPage(self.browser, self.course_id)
team_page.wait_for_page()
team_page.click_all_topics()
self.topics_page.wait_for_page()
self.topics_page.wait_for_ajax()
self.assertEqual(topic_name, self.topics_page.topic_names[0])
def test_list_topics(self):
"""
Scenario: a list of topics should be visible in the "Browse" tab
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
Then I should see a list of topics for the course
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(2)})
self.topics_page.visit()
self.assertEqual(len(self.topics_page.topic_cards), 2)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-2 out of 2 total'))
self.assertFalse(self.topics_page.pagination_controls_visible())
self.assertFalse(self.topics_page.is_previous_page_button_enabled())
self.assertFalse(self.topics_page.is_next_page_button_enabled())
def test_topic_pagination(self):
"""
Scenario: a list of topics should be visible in the "Browse" tab, paginated 12 per page
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
Then I should see only the first 12 topics
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(20)})
self.topics_page.visit()
self.assertEqual(len(self.topics_page.topic_cards), TOPICS_PER_PAGE)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-12 out of 20 total'))
self.assertTrue(self.topics_page.pagination_controls_visible())
self.assertFalse(self.topics_page.is_previous_page_button_enabled())
self.assertTrue(self.topics_page.is_next_page_button_enabled())
def test_go_to_numbered_page(self):
"""
Scenario: topics should be able to be navigated by page number
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
And I enter a valid page number in the page number input
Then I should see that page of topics
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(25)})
self.topics_page.visit()
self.topics_page.go_to_page(3)
self.assertEqual(len(self.topics_page.topic_cards), 1)
self.assertTrue(self.topics_page.is_previous_page_button_enabled())
self.assertFalse(self.topics_page.is_next_page_button_enabled())
def test_go_to_invalid_page(self):
"""
Scenario: browsing topics should not respond to invalid page numbers
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
And I enter an invalid page number in the page number input
Then I should stay on the current page
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(13)})
self.topics_page.visit()
self.topics_page.go_to_page(3)
self.assertEqual(self.topics_page.get_current_page_number(), 1)
def test_page_navigation_buttons(self):
"""
Scenario: browsing topics should not respond to invalid page numbers
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
When I press the next page button
Then I should move to the next page
When I press the previous page button
Then I should move to the previous page
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(13)})
self.topics_page.visit()
self.topics_page.press_next_page_button()
self.assertEqual(len(self.topics_page.topic_cards), 1)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 13-13 out of 13 total'))
self.topics_page.press_previous_page_button()
self.assertEqual(len(self.topics_page.topic_cards), TOPICS_PER_PAGE)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-12 out of 13 total'))
def test_topic_pagination_one_page(self):
"""
Scenario: Browsing topics when there are fewer topics than the page size i.e. 12
all topics should show on one page
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
And I should see corrected number of topic cards
And I should see the correct page header
And I should not see a pagination footer
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(10)})
self.topics_page.visit()
self.assertEqual(len(self.topics_page.topic_cards), 10)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-10 out of 10 total'))
self.assertFalse(self.topics_page.pagination_controls_visible())
def test_topic_description_truncation(self):
"""
Scenario: excessively long topic descriptions should be truncated so
as to fit within a topic card.
Given I am enrolled in a course with a team configuration and a topic
with a long description
When I visit the Teams page
And I browse topics
Then I should see a truncated topic description
"""
initial_description = "A" + " really" * 50 + " long description"
self.set_team_configuration(
{u"max_team_size": 1, u"topics": [{"name": "", "id": "", "description": initial_description}]}
)
self.topics_page.visit()
truncated_description = self.topics_page.topic_descriptions[0]
self.assertLess(len(truncated_description), len(initial_description))
self.assertTrue(truncated_description.endswith('...'))
self.assertIn(truncated_description.split('...')[0], initial_description)
def test_go_to_teams_list(self):
"""
Scenario: Clicking on a Topic Card should take you to the
teams list for that Topic.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page
And I browse topics
And I click on the arrow link to view teams for the first topic
Then I should be on the browse teams page
"""
topic = {u"name": u"Example Topic", u"id": u"example_topic", u"description": "Description"}
self.set_team_configuration(
{u"max_team_size": 1, u"topics": [topic]}
)
self.topics_page.visit()
self.topics_page.browse_teams_for_topic('Example Topic')
browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, topic)
browse_teams_page.wait_for_page()
self.assertEqual(browse_teams_page.header_name, 'Example Topic')
self.assertEqual(browse_teams_page.header_description, 'Description')
def test_page_viewed_event(self):
"""
Scenario: Visiting the browse topics page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the browse topics page
Then my browser should post a page viewed event
"""
topic = {u"name": u"Example Topic", u"id": u"example_topic", u"description": "Description"}
self.set_team_configuration(
{u"max_team_size": 1, u"topics": [topic]}
)
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'browse',
'topic_id': None,
'team_id': None
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.topics_page.visit()
@attr(shard=5)
@ddt.ddt
class BrowseTeamsWithinTopicTest(TeamsTabBase):
"""
Tests for browsing Teams within a Topic on the Teams page.
"""
TEAMS_PAGE_SIZE = 10
def setUp(self):
super(BrowseTeamsWithinTopicTest, self).setUp()
self.topic = {u"name": u"Example Topic", u"id": "example_topic", u"description": "Description"}
self.max_team_size = 10
self.set_team_configuration({
'course_id': self.course_id,
'max_team_size': self.max_team_size,
'topics': [self.topic]
})
self.browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
self.topics_page = BrowseTopicsPage(self.browser, self.course_id)
def teams_with_default_sort_order(self, teams):
"""Return a list of teams sorted according to the default ordering
(last_activity_at, with a secondary sort by open slots).
"""
return sorted(
sorted(teams, key=lambda t: len(t['membership']), reverse=True),
key=lambda t: parse(t['last_activity_at']).replace(microsecond=0),
reverse=True
)
def verify_page_header(self):
"""Verify that the page header correctly reflects the current topic's name and description."""
self.assertEqual(self.browse_teams_page.header_name, self.topic['name'])
self.assertEqual(self.browse_teams_page.header_description, self.topic['description'])
def verify_search_header(self, search_results_page, search_query):
"""Verify that the page header correctly reflects the current topic's name and description."""
self.assertEqual(search_results_page.header_name, 'Team Search')
self.assertEqual(
search_results_page.header_description,
u'Showing results for "{search_query}"'.format(search_query=search_query)
)
def verify_on_page(self, teams_page, page_num, total_teams, pagination_header_text, footer_visible):
"""
Verify that we are on the correct team list page.
Arguments:
teams_page (BaseTeamsPage): The teams page object that should be the current page.
page_num (int): The one-indexed page number that we expect to be on
total_teams (list): An unsorted list of all the teams for the
current topic
pagination_header_text (str): Text we expect to see in the
pagination header.
footer_visible (bool): Whether we expect to see the pagination
footer controls.
"""
sorted_teams = self.teams_with_default_sort_order(total_teams)
self.assertTrue(teams_page.get_pagination_header_text().startswith(pagination_header_text))
self.verify_teams(
teams_page,
sorted_teams[(page_num - 1) * self.TEAMS_PAGE_SIZE:page_num * self.TEAMS_PAGE_SIZE]
)
self.assertEqual(
teams_page.pagination_controls_visible(),
footer_visible,
msg='Expected paging footer to be ' + 'visible' if footer_visible else 'invisible'
)
@ddt.data(
('open_slots', 'last_activity_at', True),
('last_activity_at', 'open_slots', True)
)
@ddt.unpack
def test_sort_teams(self, sort_order, secondary_sort_order, reverse):
"""
Scenario: the user should be able to sort the list of teams by open slots or last activity
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse teams within a topic
Then I should see a list of teams for that topic
When I choose a sort order
Then I should see the paginated list of teams in that order
"""
teams = self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 1)
for i, team in enumerate(random.sample(teams, len(teams))):
for _ in range(i):
user_info = AutoAuthPage(self.browser, course_id=self.course_id).visit().user_info
self.create_membership(user_info['username'], team['id'])
team['open_slots'] = self.max_team_size - i
# Re-authenticate as staff after creating users
AutoAuthPage(
self.browser,
course_id=self.course_id,
staff=True
).visit()
self.browse_teams_page.visit()
self.browse_teams_page.sort_teams_by(sort_order)
team_names = self.browse_teams_page.team_names
self.assertEqual(len(team_names), self.TEAMS_PAGE_SIZE)
sorted_teams = [
team['name']
for team in sorted(
sorted(teams, key=lambda t: t[secondary_sort_order], reverse=reverse),
key=lambda t: t[sort_order],
reverse=reverse
)
][:self.TEAMS_PAGE_SIZE]
self.assertEqual(team_names, sorted_teams)
def test_default_sort_order(self):
"""
Scenario: the list of teams should be sorted by last activity by default
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse teams within a topic
Then I should see a list of teams for that topic, sorted by last activity
"""
self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 1)
self.browse_teams_page.visit()
self.assertEqual(self.browse_teams_page.sort_order, 'last activity')
def test_no_teams(self):
"""
Scenario: Visiting a topic with no teams should not display any teams.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see a pagination header showing no teams
And I should see no teams
And I should see a button to add a team
And I should not see a pagination footer
"""
self.browse_teams_page.visit()
self.verify_page_header()
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
self.assertEqual(len(self.browse_teams_page.team_cards), 0, msg='Expected to see no team cards')
self.assertFalse(
self.browse_teams_page.pagination_controls_visible(),
msg='Expected paging footer to be invisible'
)
def test_teams_one_page(self):
"""
Scenario: Visiting a topic with fewer teams than the page size should
all those teams on one page.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see a pagination header showing the number of teams
And I should see all the expected team cards
And I should see a button to add a team
And I should not see a pagination footer
"""
teams = self.teams_with_default_sort_order(
self.create_teams(self.topic, self.TEAMS_PAGE_SIZE, time_between_creation=1)
)
self.browse_teams_page.visit()
self.verify_page_header()
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 1-10 out of 10 total'))
self.verify_teams(self.browse_teams_page, teams)
self.assertFalse(
self.browse_teams_page.pagination_controls_visible(),
msg='Expected paging footer to be invisible'
)
def test_teams_navigation_buttons(self):
"""
Scenario: The user should be able to page through a topic's team list
using navigation buttons when it is longer than the page size.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see that I am on the first page of results
When I click on the next page button
Then I should see that I am on the second page of results
And when I click on the previous page button
Then I should see that I am on the first page of results
"""
teams = self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 1, time_between_creation=1)
self.browse_teams_page.visit()
self.verify_page_header()
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 11 total', True)
self.browse_teams_page.press_next_page_button()
self.verify_on_page(self.browse_teams_page, 2, teams, 'Showing 11-11 out of 11 total', True)
self.browse_teams_page.press_previous_page_button()
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 11 total', True)
def test_teams_page_input(self):
"""
Scenario: The user should be able to page through a topic's team list
using the page input when it is longer than the page size.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see that I am on the first page of results
When I input the second page
Then I should see that I am on the second page of results
When I input the first page
Then I should see that I am on the first page of results
"""
teams = self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 10, time_between_creation=1)
self.browse_teams_page.visit()
self.verify_page_header()
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 20 total', True)
self.browse_teams_page.go_to_page(2)
self.verify_on_page(self.browse_teams_page, 2, teams, 'Showing 11-20 out of 20 total', True)
self.browse_teams_page.go_to_page(1)
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 20 total', True)
def test_browse_team_topics(self):
"""
Scenario: User should be able to navigate to "browse all teams" and "search team description" links.
Given I am enrolled in a course with teams enabled
When I visit the Teams page for a topic
Then I should see the correct page header
And I should see the link to "browse teams in other topics"
When I should navigate to that link
Then I should see the topic browse page
"""
self.browse_teams_page.visit()
self.verify_page_header()
self.browse_teams_page.click_browse_all_teams_link()
self.topics_page.wait_for_page()
def test_search(self):
"""
Scenario: User should be able to search for a team
Given I am enrolled in a course with teams enabled
When I visit the Teams page for that topic
And I search for 'banana'
Then I should see the search result page
And the search header should be shown
And 0 results should be shown
And my browser should fire a page viewed event for the search page
And a searched event should have been fired
"""
# Note: all searches will return 0 results with the mock search server
# used by Bok Choy.
search_text = 'banana'
self.create_teams(self.topic, 5)
self.browse_teams_page.visit()
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'search-teams',
'topic_id': self.topic['id'],
'team_id': None
}
}, {
'event_type': 'edx.team.searched',
'event': {
'search_text': search_text,
'topic_id': self.topic['id'],
'number_of_results': 0
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events, in_order=False):
search_results_page = self.browse_teams_page.search(search_text)
self.verify_search_header(search_results_page, search_text)
self.assertTrue(search_results_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
def test_page_viewed_event(self):
"""
Scenario: Visiting the browse page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page
Then my browser should post a page viewed event for the teams page
"""
self.create_teams(self.topic, 5)
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'single-topic',
'topic_id': self.topic['id'],
'team_id': None
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.browse_teams_page.visit()
def test_team_name_xss(self):
"""
Scenario: Team names should be HTML-escaped on the teams page
Given I am enrolled in a course with teams enabled
When I visit the Teams page for a topic, with a team name containing JS code
Then I should not see any alerts
"""
self.post_team_data({
'course_id': self.course_id,
'topic_id': self.topic['id'],
'name': '<script>alert("XSS")</script>',
'description': 'Description',
'language': 'aa',
'country': 'AF'
})
with self.assertRaises(TimeoutException):
self.browser.get(self.browse_teams_page.url)
alert = get_modal_alert(self.browser)
alert.accept()
class TeamFormActions(TeamsTabBase):
"""
Base class for create, edit, and delete team.
"""
TEAM_DESCRIPTION = 'The Avengers are a fictional team of superheroes.'
topic = {'name': 'Example Topic', 'id': 'example_topic', 'description': 'Description'}
TEAMS_NAME = 'Avengers'
def setUp(self):
super(TeamFormActions, self).setUp()
self.team_management_page = TeamManagementPage(self.browser, self.course_id, self.topic)
def verify_page_header(self, title, description, breadcrumbs):
"""
Verify that the page header correctly reflects the
create team header, description and breadcrumb.
"""
self.assertEqual(self.team_management_page.header_page_name, title)
self.assertEqual(self.team_management_page.header_page_description, description)
self.assertEqual(self.team_management_page.header_page_breadcrumbs, breadcrumbs)
def verify_and_navigate_to_create_team_page(self):
"""Navigates to the create team page and verifies."""
self.browse_teams_page.click_create_team_link()
self.verify_page_header(
title='Create a New Team',
description='Create a new team if you can\'t find an existing team to join, '
'or if you would like to learn with friends you know.',
breadcrumbs=u'All Topics {topic_name}'.format(topic_name=self.topic['name'])
)
def verify_and_navigate_to_edit_team_page(self):
"""Navigates to the edit team page and verifies."""
self.assertEqual(self.team_page.team_name, self.team['name'])
self.assertTrue(self.team_page.edit_team_button_present)
self.team_page.click_edit_team_button()
self.team_management_page.wait_for_page()
# Edit page header.
self.verify_page_header(
title='Edit Team',
description='If you make significant changes, make sure you notify '
'members of the team before making these changes.',
breadcrumbs=u'All Topics {topic_name} {team_name}'.format(
topic_name=self.topic['name'],
team_name=self.team['name']
)
)
def verify_team_info(self, name, description, location, language):
"""Verify the team information on team page."""
self.assertEqual(self.team_page.team_name, name)
self.assertEqual(self.team_page.team_description, description)
self.assertEqual(self.team_page.team_location, location)
self.assertEqual(self.team_page.team_language, language)
def fill_create_or_edit_form(self):
"""Fill the create/edit team form fields with appropriate values."""
self.team_management_page.value_for_text_field(
field_id='name',
value=self.TEAMS_NAME,
press_enter=False
)
self.team_management_page.set_value_for_textarea_field(
field_id='description',
value=self.TEAM_DESCRIPTION
)
self.team_management_page.value_for_dropdown_field(field_id='language', value='English')
self.team_management_page.value_for_dropdown_field(field_id='country', value='Pakistan')
def verify_all_fields_exist(self):
"""
Verify the fields for create/edit page.
"""
self.assertEqual(
self.team_management_page.message_for_field('name'),
'A name that identifies your team (maximum 255 characters).'
)
self.assertEqual(
self.team_management_page.message_for_textarea_field('description'),
'A short description of the team to help other learners understand '
'the goals or direction of the team (maximum 300 characters).'
)
self.assertEqual(
self.team_management_page.message_for_field('country'),
'The country that team members primarily identify with.'
)
self.assertEqual(
self.team_management_page.message_for_field('language'),
'The language that team members primarily use to communicate with each other.'
)
@attr(shard=5)
@ddt.ddt
class CreateTeamTest(TeamFormActions):
"""
Tests for creating a new Team within a Topic on the Teams page.
"""
def setUp(self):
super(CreateTeamTest, self).setUp()
self.set_team_configuration({'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]})
self.browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
self.browse_teams_page.visit()
def test_user_can_see_create_team_page(self):
"""
Scenario: The user should be able to see the create team page via teams list page.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the Create Team page link on bottom
And When I click create team link
Then I should see the create team page.
And I should see the create team header
And I should also see the help messages for fields.
"""
self.verify_and_navigate_to_create_team_page()
self.verify_all_fields_exist()
def test_user_can_see_error_message_for_missing_data(self):
"""
Scenario: The user should be able to see error message in case of missing required field.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
And When I click create team button without filling required fields
Then I should see the error message and highlighted fields.
"""
self.verify_and_navigate_to_create_team_page()
# `submit_form` clicks on a button, but that button doesn't always
# have the click event handler registered on it in time. That's why
# this test is flaky. Unfortunately, I don't know of a straightforward
# way to write something that waits for that event handler to be bound
# to the button element. So I used time.sleep as well, even though
# the bok choy docs explicitly ask us not to:
# https://bok-choy.readthedocs.io/en/latest/guidelines.html
# Sorry! For the story to address this anti-pattern, see TNL-5820
time.sleep(0.5)
self.team_management_page.submit_form()
self.team_management_page.wait_for(
lambda: self.team_management_page.validation_message_text,
"Validation message text never loaded."
)
self.assertEqual(
self.team_management_page.validation_message_text,
'Check the highlighted fields below and try again.'
)
self.assertTrue(self.team_management_page.error_for_field(field_id='name'))
self.assertTrue(self.team_management_page.error_for_field(field_id='description'))
def test_user_can_see_error_message_for_incorrect_data(self):
"""
Scenario: The user should be able to see error message in case of increasing length for required fields.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
When I add text > than 255 characters for name field
And I click Create button
Then I should see the error message for exceeding length.
"""
self.verify_and_navigate_to_create_team_page()
# Fill the name field with >255 characters to see validation message.
self.team_management_page.value_for_text_field(
field_id='name',
value='EdX is a massive open online course (MOOC) provider and online learning platform. '
'It hosts online university-level courses in a wide range of disciplines to a worldwide '
'audience, some at no charge. It also conducts research into learning based on how '
'people use its platform. EdX was created for students and institutions that seek to'
'transform themselves through cutting-edge technologies, innovative pedagogy, and '
'rigorous courses. More than 70 schools, nonprofits, corporations, and international'
'organizations offer or plan to offer courses on the edX website. As of 22 October 2014,'
'edX has more than 4 million users taking more than 500 courses online.',
press_enter=False
)
self.team_management_page.submit_form()
self.assertEqual(
self.team_management_page.validation_message_text,
'Check the highlighted fields below and try again.'
)
self.assertTrue(self.team_management_page.error_for_field(field_id='name'))
def test_user_can_create_new_team_successfully(self):
"""
Scenario: The user should be able to create new team.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
When I fill all the fields present with appropriate data
And I click Create button
Then I expect analytics events to be emitted
And I should see the page for my team
And I should see the message that says "You are member of this team"
And the new team should be added to the list of teams within the topic
And the number of teams should be updated on the topic card
And if I switch to "My Team", the newly created team is displayed
"""
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.browse_teams_page.visit()
self.verify_and_navigate_to_create_team_page()
self.fill_create_or_edit_form()
expected_events = [
{
'event_type': 'edx.team.created'
},
{
'event_type': 'edx.team.learner_added',
'event': {
'add_method': 'added_on_create',
}
}
]
with self.assert_events_match_during(event_filter=self.only_team_events, expected_events=expected_events):
self.team_management_page.submit_form()
# Verify that the page is shown for the new team
team_page = TeamPage(self.browser, self.course_id)
team_page.wait_for_page()
self.assertEqual(team_page.team_name, self.TEAMS_NAME)
self.assertEqual(team_page.team_description, self.TEAM_DESCRIPTION)
self.assertEqual(team_page.team_user_membership_text, 'You are a member of this team.')
# Verify the new team was added to the topic list
self.teams_page.click_specific_topic("Example Topic")
self.teams_page.verify_topic_team_count(1)
self.teams_page.click_all_topics()
self.teams_page.verify_team_count_in_first_topic(1)
# Verify that if one switches to "My Team" without reloading the page, the newly created team is shown.
self.verify_my_team_count(1)
def test_user_can_cancel_the_team_creation(self):
"""
Scenario: The user should be able to cancel the creation of new team.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
When I click Cancel button
Then I should see teams list page without any new team.
And if I switch to "My Team", it shows no teams
"""
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
self.verify_and_navigate_to_create_team_page()
# We add a sleep here to allow time for the click event handler to bind
# to the cancel button. Using time.sleep in bok-choy tests is,
# generally, an anti-pattern. So don't copy this :).
# For the story to address this anti-pattern, see TNL-5820
time.sleep(0.5)
self.team_management_page.cancel_team()
self.browse_teams_page.wait_for_page()
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
self.teams_page.click_all_topics()
self.teams_page.verify_team_count_in_first_topic(0)
self.verify_my_team_count(0)
def test_page_viewed_event(self):
"""
Scenario: Visiting the create team page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the create team page
Then my browser should post a page viewed event
"""
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'new-team',
'topic_id': self.topic['id'],
'team_id': None
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.verify_and_navigate_to_create_team_page()
@attr(shard=21)
@ddt.ddt
class DeleteTeamTest(TeamFormActions):
"""
Tests for deleting teams.
"""
def setUp(self):
super(DeleteTeamTest, self).setUp()
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]},
global_staff=True
)
self.team = self.create_teams(self.topic, num_teams=1)[0]
self.team_page = TeamPage(self.browser, self.course_id, team=self.team)
#need to have a membership to confirm it gets deleted as well
self.create_membership(self.user_info['username'], self.team['id'])
self.team_page.visit()
def test_cancel_delete(self):
"""
Scenario: The user should be able to cancel the Delete Team dialog
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Delete Team button
When I click the delete team button
And I cancel the prompt
And I refresh the page
Then I should still see the team
"""
self.delete_team(cancel=True)
self.team_management_page.wait_for_page()
self.browser.refresh()
self.team_management_page.wait_for_page()
self.assertEqual(
' '.join(('All Topics', self.topic['name'], self.team['name'])),
self.team_management_page.header_page_breadcrumbs
)
@ddt.data('Moderator', 'Community TA', 'Administrator', None)
def test_delete_team(self, role):
"""
Scenario: The user should be able to see and navigate to the delete team page.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Delete Team button
When I click the delete team button
And I confirm the prompt
Then I should see the browse teams page
And the team should not be present
"""
# If role is None, remain logged in as global staff
if role is not None:
AutoAuthPage(
self.browser,
course_id=self.course_id,
staff=False,
roles=role
).visit()
self.team_page.visit()
self.delete_team(require_notification=False)
browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
browse_teams_page.wait_for_page()
self.assertNotIn(self.team['name'], browse_teams_page.team_names)
def delete_team(self, **kwargs):
"""
Delete a team. Passes `kwargs` to `confirm_prompt`.
Expects edx.team.deleted event to be emitted, with correct course_id.
Also expects edx.team.learner_removed event to be emitted for the
membership that is removed as a part of the delete operation.
"""
self.team_page.click_edit_team_button()
self.team_management_page.wait_for_page()
self.team_management_page.delete_team_button.click()
if 'cancel' in kwargs and kwargs['cancel'] is True:
confirm_prompt(self.team_management_page, **kwargs)
else:
expected_events = [
{
'event_type': 'edx.team.deleted',
'event': {
'team_id': self.team['id']
}
},
{
'event_type': 'edx.team.learner_removed',
'event': {
'team_id': self.team['id'],
'remove_method': 'team_deleted',
'user_id': self.user_info['user_id']
}
}
]
with self.assert_events_match_during(
event_filter=self.only_team_events, expected_events=expected_events
):
confirm_prompt(self.team_management_page, **kwargs)
def test_delete_team_updates_topics(self):
"""
Scenario: Deleting a team should update the team count on the topics page
Given I am staff user for a course with a team
And I delete a team
When I navigate to the browse topics page
Then the team count for the deletd team's topic should be updated
"""
self.delete_team(require_notification=False)
BrowseTeamsPage(self.browser, self.course_id, self.topic).click_all_topics()
topics_page = BrowseTopicsPage(self.browser, self.course_id)
topics_page.wait_for_page()
self.teams_page.verify_topic_team_count(0)
@attr(shard=17)
@ddt.ddt
class EditTeamTest(TeamFormActions):
"""
Tests for editing the team.
"""
def setUp(self):
super(EditTeamTest, self).setUp()
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]},
global_staff=True
)
self.team = self.create_teams(self.topic, num_teams=1)[0]
self.team_page = TeamPage(self.browser, self.course_id, team=self.team)
self.team_page.visit()
def test_staff_can_navigate_to_edit_team_page(self):
"""
Scenario: The user should be able to see and navigate to the edit team page.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the edit team page
And I should see the edit team header
And I should also see the help messages for fields
"""
self.verify_and_navigate_to_edit_team_page()
self.verify_all_fields_exist()
def test_staff_can_edit_team_successfully(self):
"""
Scenario: The staff should be able to edit team successfully.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the edit team page
And an analytics event should be fired
When I edit all the fields with appropriate data
And I click Update button
Then I should see the page for my team with updated data
"""
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
self.verify_and_navigate_to_edit_team_page()
self.fill_create_or_edit_form()
expected_events = [
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'country',
'old': 'AF',
'new': 'PK',
'truncated': [],
}
},
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'name',
'old': self.team['name'],
'new': self.TEAMS_NAME,
'truncated': [],
}
},
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'language',
'old': 'aa',
'new': 'en',
'truncated': [],
}
},
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'description',
'old': self.team['description'],
'new': self.TEAM_DESCRIPTION,
'truncated': [],
}
},
]
with self.assert_events_match_during(
event_filter=self.only_team_events,
expected_events=expected_events,
):
self.team_management_page.submit_form()
self.team_page.wait_for_page()
self.verify_team_info(
name=self.TEAMS_NAME,
description=self.TEAM_DESCRIPTION,
location='Pakistan',
language='English'
)
def test_staff_can_cancel_the_team_edit(self):
"""
Scenario: The user should be able to cancel the editing of team.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the edit team page
Then I should see the Edit Team header
When I click Cancel button
Then I should see team page page without changes.
"""
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
self.verify_and_navigate_to_edit_team_page()
self.fill_create_or_edit_form()
self.team_management_page.cancel_team()
self.team_page.wait_for_page()
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
def test_student_cannot_see_edit_button(self):
"""
Scenario: The student should not see the edit team button.
Given I am student for a course with a team
When I visit the Team profile page
Then I should not see the Edit Team button
"""
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.team_page.visit()
self.assertFalse(self.team_page.edit_team_button_present)
@ddt.data('Moderator', 'Community TA', 'Administrator')
def test_discussion_privileged_user_can_edit_team(self, role):
"""
Scenario: The user with specified role should see the edit team button.
Given I am user with privileged role for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
"""
kwargs = {
'course_id': self.course_id,
'staff': False
}
if role is not None:
kwargs['roles'] = role
AutoAuthPage(self.browser, **kwargs).visit()
self.team_page.visit()
self.teams_page.wait_for_page()
self.assertTrue(self.team_page.edit_team_button_present)
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
self.verify_and_navigate_to_edit_team_page()
self.fill_create_or_edit_form()
self.team_management_page.submit_form()
self.team_page.wait_for_page()
self.verify_team_info(
name=self.TEAMS_NAME,
description=self.TEAM_DESCRIPTION,
location='Pakistan',
language='English'
)
def test_page_viewed_event(self):
"""
Scenario: Visiting the edit team page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the edit team page
Then my browser should post a page viewed event
"""
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'edit-team',
'topic_id': self.topic['id'],
'team_id': self.team['id']
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.verify_and_navigate_to_edit_team_page()
@attr(shard=17)
@ddt.ddt
class EditMembershipTest(TeamFormActions):
"""
Tests for administrating from the team membership page
"""
def setUp(self):
super(EditMembershipTest, self).setUp()
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]},
global_staff=True
)
self.team_management_page = TeamManagementPage(self.browser, self.course_id, self.topic)
self.team = self.create_teams(self.topic, num_teams=1)[0]
#make sure a user exists on this team so we can edit the membership
self.create_membership(self.user_info['username'], self.team['id'])
self.edit_membership_page = EditMembershipPage(self.browser, self.course_id, self.team)
self.team_page = TeamPage(self.browser, self.course_id, team=self.team)
def edit_membership_helper(self, role, cancel=False):
"""
Helper for common functionality in edit membership tests.
Checks for all relevant assertions about membership being removed,
including verify edx.team.learner_removed events are emitted.
"""
if role is not None:
AutoAuthPage(
self.browser,
course_id=self.course_id,
staff=False,
roles=role
).visit()
self.team_page.visit()
self.team_page.click_edit_team_button()
self.team_management_page.wait_for_page()
self.assertTrue(
self.team_management_page.membership_button_present
)
self.team_management_page.click_membership_button()
self.edit_membership_page.wait_for_page()
self.edit_membership_page.click_first_remove()
if cancel:
self.edit_membership_page.cancel_delete_membership_dialog()
self.assertEqual(self.edit_membership_page.team_members, 1)
else:
expected_events = [
{
'event_type': 'edx.team.learner_removed',
'event': {
'team_id': self.team['id'],
'remove_method': 'removed_by_admin',
'user_id': self.user_info['user_id']
}
}
]
with self.assert_events_match_during(
event_filter=self.only_team_events, expected_events=expected_events
):
self.edit_membership_page.confirm_delete_membership_dialog()
self.assertEqual(self.edit_membership_page.team_members, 0)
self.edit_membership_page.wait_for_page()
@ddt.data('Moderator', 'Community TA', 'Administrator', None)
def test_remove_membership(self, role):
"""
Scenario: The user should be able to remove a membership
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Edit Membership button
And When I click the edit membership button
Then I should see the edit membership page
And When I click the remove button and confirm the dialog
Then my membership should be removed, and I should remain on the page
"""
self.edit_membership_helper(role, cancel=False)
@ddt.data('Moderator', 'Community TA', 'Administrator', None)
def test_cancel_remove_membership(self, role):
"""
Scenario: The user should be able to remove a membership
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Edit Membership button
And When I click the edit membership button
Then I should see the edit membership page
And When I click the remove button and cancel the dialog
Then my membership should not be removed, and I should remain on the page
"""
self.edit_membership_helper(role, cancel=True)
@attr(shard=17)
@ddt.ddt
class TeamPageTest(TeamsTabBase):
"""Tests for viewing a specific team"""
SEND_INVITE_TEXT = 'Send this link to friends so that they can join too.'
def setUp(self):
super(TeamPageTest, self).setUp()
self.topic = {u"name": u"Example Topic", u"id": "example_topic", u"description": "Description"}
def _set_team_configuration_and_membership(
self,
max_team_size=10,
membership_team_index=0,
visit_team_index=0,
create_membership=True,
another_user=False):
"""
Set team configuration.
Arguments:
max_team_size (int): number of users a team can have
membership_team_index (int): index of team user will join
visit_team_index (int): index of team user will visit
create_membership (bool): whether to create membership or not
another_user (bool): another user to visit a team
"""
#pylint: disable=attribute-defined-outside-init
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': max_team_size, 'topics': [self.topic]}
)
self.teams = self.create_teams(self.topic, 2)
if create_membership:
self.create_membership(self.user_info['username'], self.teams[membership_team_index]['id'])
if another_user:
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.team_page = TeamPage(self.browser, self.course_id, self.teams[visit_team_index])
def setup_thread(self):
"""
Create and return a thread for this test's discussion topic.
"""
thread = Thread(
id="test_thread_{}".format(uuid4().hex),
commentable_id=self.teams[0]['discussion_topic_id'],
body="Dummy text body.",
context="standalone",
)
thread_fixture = MultipleThreadFixture([thread])
thread_fixture.push()
return thread
def setup_discussion_user(self, role=None, staff=False):
"""Set this test's user to have the given role in its
discussions. Role is one of 'Community TA', 'Moderator',
'Administrator', or 'Student'.
"""
kwargs = {
'course_id': self.course_id,
'staff': staff
}
if role is not None:
kwargs['roles'] = role
#pylint: disable=attribute-defined-outside-init
self.user_info = AutoAuthPage(self.browser, **kwargs).visit().user_info
def verify_teams_discussion_permissions(self, should_have_permission):
"""Verify that the teams discussion component is in the correct state
for the test user. If `should_have_permission` is True, assert that
the user can see controls for posting replies, voting, editing, and
deleting. Otherwise, assert that those controls are hidden.
"""
thread = self.setup_thread()
self.team_page.visit()
self.assertEqual(self.team_page.discussion_id, self.teams[0]['discussion_topic_id'])
discussion_page = self.team_page.discussion_page
discussion_page.wait_for_page()
self.assertTrue(discussion_page.is_discussion_expanded())
self.assertEqual(discussion_page.get_num_displayed_threads(), 1)
discussion_page.show_thread(thread['id'])
thread_page = discussion_page.thread_page
assertion = self.assertTrue if should_have_permission else self.assertFalse
assertion(thread_page.q(css='.post-header-actions').present)
assertion(thread_page.q(css='.add-response').present)
def test_discussion_on_my_team_page(self):
"""
Scenario: Team Page renders a discussion for a team to which I belong.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am a member
When the team has a discussion with a thread
And I visit the Team page for that team
Then I should see a discussion with the correct discussion_id
And I should see the existing thread
And I should see controls to change the state of the discussion
"""
self._set_team_configuration_and_membership()
self.verify_teams_discussion_permissions(True)
@ddt.data(True, False)
def test_discussion_on_other_team_page(self, is_staff):
"""
Scenario: Team Page renders a team discussion for a team to which I do
not belong.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am not a member
When the team has a discussion with a thread
And I visit the Team page for that team
Then I should see a discussion with the correct discussion_id
And I should see the team's thread
And I should not see controls to change the state of the discussion
"""
self._set_team_configuration_and_membership(create_membership=False)
self.setup_discussion_user(staff=is_staff)
self.verify_teams_discussion_permissions(False)
@ddt.data('Moderator', 'Community TA', 'Administrator')
def test_discussion_privileged(self, role):
self._set_team_configuration_and_membership(create_membership=False)
self.setup_discussion_user(role=role)
self.verify_teams_discussion_permissions(True)
def assert_team_details(self, num_members, is_member=True, max_size=10):
"""
Verifies that user can see all the information, present on detail page according to their membership status.
Arguments:
num_members (int): number of users in a team
is_member (bool) default True: True if request user is member else False
max_size (int): number of users a team can have
"""
self.assertEqual(
self.team_page.team_capacity_text,
self.team_page.format_capacity_text(num_members, max_size)
)
self.assertEqual(self.team_page.team_location, 'Afghanistan')
self.assertEqual(self.team_page.team_language, 'Afar')
self.assertEqual(self.team_page.team_members, num_members)
if num_members > 0:
self.assertTrue(self.team_page.team_members_present)
else:
self.assertFalse(self.team_page.team_members_present)
if is_member:
self.assertEqual(self.team_page.team_user_membership_text, 'You are a member of this team.')
self.assertTrue(self.team_page.team_leave_link_present)
self.assertTrue(self.team_page.new_post_button_present)
else:
self.assertEqual(self.team_page.team_user_membership_text, '')
self.assertFalse(self.team_page.team_leave_link_present)
self.assertFalse(self.team_page.new_post_button_present)
def test_team_member_can_see_full_team_details(self):
"""
Scenario: Team member can see full info for team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am a member
When I visit the Team page for that team
Then I should see the full team detail
And I should see the team members
And I should see my team membership text
And I should see the language & country
And I should see the Leave Team and Invite Team
"""
self._set_team_configuration_and_membership()
self.team_page.visit()
self.assert_team_details(
num_members=1,
)
def test_other_users_can_see_limited_team_details(self):
"""
Scenario: Users who are not member of this team can only see limited info for this team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am not a member
When I visit the Team page for that team
Then I should not see full team detail
And I should see the team members
And I should not see my team membership text
And I should not see the Leave Team and Invite Team links
"""
self._set_team_configuration_and_membership(create_membership=False)
self.team_page.visit()
self.assert_team_details(is_member=False, num_members=0)
def test_user_can_navigate_to_members_profile_page(self):
"""
Scenario: User can navigate to profile page via team member profile image.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am a member
When I visit the Team page for that team
Then I should see profile images for the team members
When I click on the first profile image
Then I should be taken to the user's profile page
And I should see the username on profile page
"""
self._set_team_configuration_and_membership()
self.team_page.visit()
learner_name = self.team_page.first_member_username
self.team_page.click_first_profile_image()
learner_profile_page = LearnerProfilePage(self.browser, learner_name)
learner_profile_page.wait_for_page()
learner_profile_page.wait_for_field('username')
self.assertTrue(learner_profile_page.field_is_visible('username'))
def test_join_team(self):
"""
Scenario: User can join a Team if not a member already..
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And I visit the Team page for that team
Then I should see Join Team button
And I should not see New Post button
When I click on Join Team button
Then there should be no Join Team button and no message
And an analytics event should be emitted
And I should see the updated information under Team Details
And I should see New Post button
And if I switch to "My Team", the team I have joined is displayed
"""
self._set_team_configuration_and_membership(create_membership=False)
teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
teams_page.visit()
teams_page.view_first_team()
self.assertTrue(self.team_page.join_team_button_present)
expected_events = [
{
'event_type': 'edx.team.learner_added',
'event': {
'add_method': 'joined_from_team_view'
}
}
]
with self.assert_events_match_during(event_filter=self.only_team_events, expected_events=expected_events):
self.team_page.click_join_team_button()
self.assertFalse(self.team_page.join_team_button_present)
self.assertFalse(self.team_page.join_team_message_present)
self.assert_team_details(num_members=1, is_member=True)
# Verify that if one switches to "My Team" without reloading the page, the newly joined team is shown.
self.teams_page.click_all_topics()
self.verify_my_team_count(1)
def test_already_member_message(self):
"""
Scenario: User should see `You are already in a team` if user is a
member of other team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And I am already a member of a team
And I visit a team other than mine
Then I should see `You are already in a team` message
"""
self._set_team_configuration_and_membership(membership_team_index=0, visit_team_index=1)
self.team_page.visit()
self.assertEqual(self.team_page.join_team_message, 'You already belong to another team.')
self.assert_team_details(num_members=0, is_member=False)
def test_team_full_message(self):
"""
Scenario: User should see `Team is full` message when team is full.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And team has no space left
And I am not a member of any team
And I visit the team
Then I should see `Team is full` message
"""
self._set_team_configuration_and_membership(
create_membership=True,
max_team_size=1,
membership_team_index=0,
visit_team_index=0,
another_user=True
)
self.team_page.visit()
self.assertEqual(self.team_page.join_team_message, 'This team is full.')
self.assert_team_details(num_members=1, is_member=False, max_size=1)
def test_leave_team(self):
"""
Scenario: User can leave a team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And I am a member of team
And I visit the team
And I should not see Join Team button
And I should see New Post button
Then I should see Leave Team link
When I click on Leave Team link
Then user should be removed from team
And an analytics event should be emitted
And I should see Join Team button
And I should not see New Post button
And if I switch to "My Team", the team I have left is not displayed
"""
self._set_team_configuration_and_membership()
self.team_page.visit()
self.assertFalse(self.team_page.join_team_button_present)
self.assert_team_details(num_members=1)
expected_events = [
{
'event_type': 'edx.team.learner_removed',
'event': {
'remove_method': 'self_removal'
}
}
]
with self.assert_events_match_during(event_filter=self.only_team_events, expected_events=expected_events):
# I think we're seeing the same problem that we're seeing in
# CreateTeamTest.test_user_can_see_error_message_for_missing_data.
# We click on the "leave team" link after it's loaded, but before
# its JavaScript event handler is added. Adding this sleep gives
# enough time for that event handler to bind to the link. Sorry!
# For the story to address this anti-pattern, see TNL-5820
time.sleep(0.5)
self.team_page.click_leave_team_link()
self.assert_team_details(num_members=0, is_member=False)
self.assertTrue(self.team_page.join_team_button_present)
# Verify that if one switches to "My Team" without reloading the page, the old team no longer shows.
self.teams_page.click_all_topics()
self.verify_my_team_count(0)
def test_page_viewed_event(self):
"""
Scenario: Visiting the team profile page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the team profile page
Then my browser should post a page viewed event
"""
self._set_team_configuration_and_membership()
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'single-team',
'topic_id': self.topic['id'],
'team_id': self.teams[0]['id']
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.team_page.visit()
|
agpl-3.0
| -7,881,095,357,731,732,000 | 41.560545 | 119 | 0.618576 | false |
purushothamc/myibitsolutions
|
arrays/fill_range_in_spiral.py
|
1
|
1051
|
def generateMatrix(A):
n = A
rows, cols = n, n
top, bottom = 0, n - 1
left, right = 0, n - 1
direction = 0
result = [[0 for j in xrange(n)] for i in xrange(n)]
count = 1
while top <= bottom and left <= right:
if direction is 0:
for i in xrange(left, right + 1):
result[top][i] = count
count += 1
top += 1
direction = 1
elif direction is 1:
for i in xrange(top, bottom + 1):
result[i][right] = count
count += 1
right -= 1
direction = 2
elif direction is 2:
for i in xrange(right, left - 1, -1):
result[bottom][i] = count
count += 1
bottom -= 1
direction = 3
elif direction is 3:
for i in xrange(bottom, top - 1, -1):
result[i][left] = count
count += 1
left += 1
direction = 4
direction %= 4
return result
|
gpl-3.0
| 9,140,552,636,594,468,000 | 29.057143 | 56 | 0.432921 | false |
zcbenz/cefode-chromium
|
native_client_sdk/src/build_tools/tests/update_nacl_manifest_test.py
|
1
|
25472
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import datetime
import hashlib
import os
import posixpath
import subprocess
import sys
import tempfile
import unittest
import urlparse
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
BUILD_TOOLS_DIR = os.path.dirname(SCRIPT_DIR)
sys.path.append(BUILD_TOOLS_DIR)
import manifest_util
import update_nacl_manifest
from update_nacl_manifest import CANARY_BUNDLE_NAME
HTTPS_BASE_URL = 'https://commondatastorage.googleapis.com' \
'/nativeclient_mirror/nacl/nacl_sdk/'
OS_CR = ('cros',)
OS_M = ('mac',)
OS_ML = ('mac', 'linux')
OS_MW = ('mac', 'win')
OS_MLW = ('mac', 'linux', 'win')
OS_ALL = ('all',)
POST_STABLE = 'post_stable'
STABLE = 'stable'
BETA = 'beta'
DEV = 'dev'
CANARY = 'canary'
def GetArchiveURL(basename, version):
return urlparse.urljoin(HTTPS_BASE_URL, posixpath.join(version, basename))
def GetPlatformArchiveUrl(host_os, version):
basename = 'naclsdk_%s.tar.bz2' % (host_os,)
return GetArchiveURL(basename, version)
def MakeGsUrl(rel_path):
return update_nacl_manifest.GS_BUCKET_PATH + rel_path
def GetPathFromGsUrl(url):
assert url.startswith(update_nacl_manifest.GS_BUCKET_PATH)
return url[len(update_nacl_manifest.GS_BUCKET_PATH):]
def GetPathFromHttpsUrl(url):
assert url.startswith(HTTPS_BASE_URL)
return url[len(HTTPS_BASE_URL):]
def MakeArchive(url, host_os):
archive = manifest_util.Archive(host_os)
archive.url = url
# dummy values that won't succeed if we ever use them, but will pass
# validation. :)
archive.checksum = {'sha1': 'foobar'}
archive.size = 1
return archive
def MakePlatformArchive(host_os, version):
return MakeArchive(GetPlatformArchiveUrl(host_os, version), host_os)
def MakeNonPlatformArchive(basename, version):
return MakeArchive(GetArchiveURL(basename, version), 'all')
def MakeNonPepperBundle(name, with_archives=False):
bundle = manifest_util.Bundle(name)
bundle.version = 1
bundle.revision = 1
bundle.description = 'Dummy bundle'
bundle.recommended = 'yes'
bundle.stability = 'stable'
if with_archives:
for host_os in OS_MLW:
archive = manifest_util.Archive(host_os)
archive.url = 'http://example.com'
archive.checksum = {'sha1': 'blah'}
archive.size = 2
bundle.AddArchive(archive)
return bundle
def MakePepperBundle(major_version, revision=0, version=None, stability='dev'):
assert (version is None or
version.split('.')[0] == 'trunk' or
version.split('.')[0] == str(major_version))
if stability == CANARY:
bundle_name = CANARY_BUNDLE_NAME
else:
bundle_name = 'pepper_' + str(major_version)
bundle = manifest_util.Bundle(bundle_name)
bundle.version = major_version
bundle.revision = revision
bundle.description = 'Chrome %s bundle, revision %s' % (major_version,
revision)
bundle.repath = 'pepper_' + str(major_version)
bundle.recommended = 'no'
bundle.stability = stability
return bundle
def MakePlatformBundle(major_version, revision=0, version=None, host_oses=None,
stability='dev'):
bundle = MakePepperBundle(major_version, revision, version, stability)
if host_oses:
for host_os in host_oses:
bundle.AddArchive(MakePlatformArchive(host_os, version))
return bundle
class MakeManifest(manifest_util.SDKManifest):
def __init__(self, *args):
manifest_util.SDKManifest.__init__(self)
for bundle in args:
self.AddBundle(bundle)
def AddBundle(self, bundle):
self.MergeBundle(bundle, allow_existing=False)
class MakeHistory(object):
def __init__(self):
# used for a dummy timestamp
self.datetime = datetime.datetime.utcnow()
self.history = []
def Add(self, host_oses, channel, version):
for host_os in host_oses:
timestamp = self.datetime.strftime('%Y-%m-%d %H:%M:%S.%f')
self.history.append((host_os, channel, version, timestamp))
self.datetime += datetime.timedelta(0, -3600) # one hour earlier
self.datetime += datetime.timedelta(-1) # one day earlier
class MakeFiles(dict):
def AddOnlineManifest(self, manifest_string):
self['naclsdk_manifest2.json'] = manifest_string
def Add(self, bundle, add_archive_for_os=OS_MLW, add_json_for_os=OS_MLW):
for archive in bundle.GetArchives():
if not archive.host_os in add_archive_for_os:
continue
self.AddArchive(bundle, archive, archive.host_os in add_json_for_os)
def AddArchive(self, bundle, archive, add_json=True):
path = GetPathFromHttpsUrl(archive.url)
self[path] = 'My Dummy archive'
if add_json:
# add .json manifest snippet, it should look like a normal Bundle, but
# only has one archive.
new_bundle = manifest_util.Bundle('')
new_bundle.CopyFrom(bundle)
del new_bundle.archives[:]
new_bundle.AddArchive(archive)
self[path + '.json'] = new_bundle.GetDataAsString()
class TestDelegate(update_nacl_manifest.Delegate):
def __init__(self, manifest, history, files, version_mapping):
self.manifest = manifest
self.history = history
self.files = files
self.version_mapping = version_mapping
self.dryrun = 0
self.called_gsutil_cp = False
def GetRepoManifest(self):
return self.manifest
def GetHistory(self):
return self.history
def GetTrunkRevision(self, version):
return self.version_mapping[version]
def GsUtil_ls(self, url):
path = GetPathFromGsUrl(url)
result = []
for filename, _ in self.files.iteritems():
if filename.startswith(path):
result.append(MakeGsUrl(filename))
return result
def GsUtil_cat(self, url):
path = GetPathFromGsUrl(url)
if path not in self.files:
raise subprocess.CalledProcessError(1, 'gsutil cat %s' % (url,))
return self.files[path]
def GsUtil_cp(self, src, dest, stdin=None):
self.called_gsutil_cp = True
dest_path = GetPathFromGsUrl(dest)
if src == '-':
self.files[dest_path] = stdin
else:
src_path = GetPathFromGsUrl(src)
if src_path not in self.files:
raise subprocess.CalledProcessError(1, 'gsutil cp %s %s' % (src, dest))
self.files[dest_path] = self.files[src_path]
def Print(self, *args):
# eat all informational messages
pass
# Shorthand for premade bundles/versions
V18_0_1025_163 = '18.0.1025.163'
V18_0_1025_175 = '18.0.1025.175'
V18_0_1025_184 = '18.0.1025.184'
V19_0_1084_41 = '19.0.1084.41'
V19_0_1084_67 = '19.0.1084.67'
V21_0_1145_0 = '21.0.1145.0'
V21_0_1166_0 = '21.0.1166.0'
V26_0_1386_0 = '26.0.1386.0'
VTRUNK_138079 = 'trunk.138079'
B18_0_1025_163_R1_MLW = MakePlatformBundle(18, 1, V18_0_1025_163, OS_MLW)
B18_0_1025_184_R1_MLW = MakePlatformBundle(18, 1, V18_0_1025_184, OS_MLW)
B18_R1_NONE = MakePlatformBundle(18)
B19_0_1084_41_R1_MLW = MakePlatformBundle(19, 1, V19_0_1084_41, OS_MLW)
B19_0_1084_67_R1_MLW = MakePlatformBundle(19, 1, V19_0_1084_67, OS_MLW)
B19_R1_NONE = MakePlatformBundle(19)
BCANARY_R1_NONE = MakePlatformBundle(0, stability=CANARY)
B21_0_1145_0_R1_MLW = MakePlatformBundle(21, 1, V21_0_1145_0, OS_MLW)
B21_0_1166_0_R1_MW = MakePlatformBundle(21, 1, V21_0_1166_0, OS_MW)
B26_R1_NONE = MakePlatformBundle(26)
B26_0_1386_0_R1_MLW = MakePlatformBundle(26, 1, V26_0_1386_0, OS_MLW)
BTRUNK_138079_R1_MLW = MakePlatformBundle(21, 1, VTRUNK_138079, OS_MLW)
NON_PEPPER_BUNDLE_NOARCHIVES = MakeNonPepperBundle('foo')
NON_PEPPER_BUNDLE_ARCHIVES = MakeNonPepperBundle('bar', with_archives=True)
class TestUpdateManifest(unittest.TestCase):
def setUp(self):
self.history = MakeHistory()
self.files = MakeFiles()
self.version_mapping = {}
self.delegate = None
self.uploaded_manifest = None
self.manifest = None
def _MakeDelegate(self):
self.delegate = TestDelegate(self.manifest, self.history.history,
self.files, self.version_mapping)
def _Run(self, host_oses, extra_archives=None, fixed_bundle_versions=None):
update_nacl_manifest.Run(self.delegate, host_oses, extra_archives,
fixed_bundle_versions)
def _HasUploadedManifest(self):
return 'naclsdk_manifest2.json' in self.files
def _ReadUploadedManifest(self):
self.uploaded_manifest = manifest_util.SDKManifest()
self.uploaded_manifest.LoadDataFromString(
self.files['naclsdk_manifest2.json'])
def _AssertUploadedManifestHasBundle(self, bundle, stability):
if stability == CANARY:
bundle_name = CANARY_BUNDLE_NAME
else:
bundle_name = bundle.name
uploaded_manifest_bundle = self.uploaded_manifest.GetBundle(bundle_name)
# Bundles that we create in the test (and in the manifest snippets) have
# their stability set to "dev". update_nacl_manifest correctly updates it.
# So we have to force the stability of |bundle| so they compare equal.
test_bundle = copy.copy(bundle)
test_bundle.stability = stability
if stability == CANARY:
test_bundle.name = CANARY_BUNDLE_NAME
self.assertEqual(uploaded_manifest_bundle, test_bundle)
def _AddCsvHistory(self, history):
import csv
import cStringIO
history_stream = cStringIO.StringIO(history)
self.history.history = [(platform, channel, version, date)
for platform, channel, version, date in csv.reader(history_stream)]
def testNoUpdateNeeded(self):
self.manifest = MakeManifest(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self.assertFalse(self._HasUploadedManifest())
# Add another bundle, make sure it still doesn't update
self.manifest.AddBundle(B19_0_1084_41_R1_MLW)
self._Run(OS_MLW)
self.assertFalse(self._HasUploadedManifest())
def testSimpleUpdate(self):
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testOnePlatformHasNewerRelease(self):
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_M, BETA, V18_0_1025_175) # Mac has newer version
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testMultipleMissingPlatformsInHistory(self):
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_ML, BETA, V18_0_1025_184)
self.history.Add(OS_M, BETA, V18_0_1025_175)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testUpdateOnlyOneBundle(self):
self.manifest = MakeManifest(B18_R1_NONE, B19_0_1084_41_R1_MLW)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self._AssertUploadedManifestHasBundle(B19_0_1084_41_R1_MLW, DEV)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 2)
def testUpdateTwoBundles(self):
self.manifest = MakeManifest(B18_R1_NONE, B19_R1_NONE)
self.history.Add(OS_MLW, DEV, V19_0_1084_41)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self.files.Add(B19_0_1084_41_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self._AssertUploadedManifestHasBundle(B19_0_1084_41_R1_MLW, DEV)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 2)
def testUpdateWithMissingPlatformsInArchives(self):
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_MLW, BETA, V18_0_1025_184)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_184_R1_MLW, add_archive_for_os=OS_M)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testUpdateWithMissingManifestSnippets(self):
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_MLW, BETA, V18_0_1025_184)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_184_R1_MLW, add_json_for_os=OS_ML)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testRecommendedIsStable(self):
for channel in STABLE, BETA, DEV, CANARY:
self.setUp()
bundle = copy.deepcopy(B18_R1_NONE)
self.manifest = MakeManifest(bundle)
self.history.Add(OS_MLW, channel, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
uploaded_bundle = self.uploaded_manifest.GetBundle('pepper_18')
if channel == STABLE:
self.assertEqual(uploaded_bundle.recommended, 'yes')
else:
self.assertEqual(uploaded_bundle.recommended, 'no')
def testNoUpdateWithNonPepperBundle(self):
self.manifest = MakeManifest(NON_PEPPER_BUNDLE_NOARCHIVES,
B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self.assertFalse(self._HasUploadedManifest())
def testUpdateWithHistoryWithExtraneousPlatforms(self):
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_ML, BETA, V18_0_1025_184)
self.history.Add(OS_CR, BETA, V18_0_1025_184)
self.history.Add(OS_CR, BETA, V18_0_1025_175)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testSnippetWithStringRevisionAndVersion(self):
# This test exists because some manifest snippets were uploaded with
# strings for their revisions and versions. I want to make sure the
# resulting manifest is still consistent with the old format.
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
bundle_string_revision = MakePlatformBundle('18', '1234', V18_0_1025_163,
OS_MLW)
self.files.Add(bundle_string_revision)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
uploaded_bundle = self.uploaded_manifest.GetBundle(
bundle_string_revision.name)
self.assertEqual(uploaded_bundle.revision, 1234)
self.assertEqual(uploaded_bundle.version, 18)
def testUpdateCanary(self):
# Note that the bundle in naclsdk_manifest2.json will be called
# CANARY_BUNDLE_NAME, whereas the bundle in the manifest "snippet" will be
# called "pepper_21".
canary_bundle = copy.deepcopy(BCANARY_R1_NONE)
self.manifest = MakeManifest(canary_bundle)
self.history.Add(OS_MW, CANARY, V21_0_1145_0)
self.files.Add(B21_0_1145_0_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B21_0_1145_0_R1_MLW, CANARY)
def testUpdateCanaryUseTrunkArchives(self):
canary_bundle = copy.deepcopy(BCANARY_R1_NONE)
self.manifest = MakeManifest(canary_bundle)
self.history.Add(OS_MW, CANARY, V21_0_1166_0)
self.files.Add(B21_0_1166_0_R1_MW)
self.files.Add(BTRUNK_138079_R1_MLW)
self.version_mapping[V21_0_1166_0] = VTRUNK_138079
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
test_bundle = copy.deepcopy(B21_0_1166_0_R1_MW)
test_bundle.AddArchive(BTRUNK_138079_R1_MLW.GetArchive('linux'))
self._AssertUploadedManifestHasBundle(test_bundle, CANARY)
def testCanaryUseOnlyTrunkArchives(self):
self.manifest = MakeManifest(copy.deepcopy(BCANARY_R1_NONE))
history = """win,canary,21.0.1163.0,2012-06-04 12:35:44.784446
mac,canary,21.0.1163.0,2012-06-04 11:54:09.433166"""
self._AddCsvHistory(history)
self.version_mapping['21.0.1163.0'] = 'trunk.140240'
my_bundle = MakePlatformBundle(21, 140240, '21.0.1163.0', OS_MLW)
self.files.Add(my_bundle)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(my_bundle, CANARY)
def testCanaryShouldOnlyUseCanaryVersions(self):
canary_bundle = copy.deepcopy(BCANARY_R1_NONE)
self.manifest = MakeManifest(canary_bundle)
self.history.Add(OS_MW, CANARY, V21_0_1166_0)
self.history.Add(OS_MW, BETA, V19_0_1084_41)
self.files.Add(B19_0_1084_41_R1_MLW)
self.version_mapping[V21_0_1166_0] = VTRUNK_138079
self._MakeDelegate()
self.assertRaises(Exception, self._Run, OS_MLW)
def testMissingCanaryFollowedByStableShouldWork(self):
history = """win,canary,21.0.1160.0,2012-06-01 19:44:35.936109
mac,canary,21.0.1160.0,2012-06-01 18:20:02.003123
mac,stable,19.0.1084.52,2012-06-01 17:59:21.559710
win,canary,21.0.1159.2,2012-06-01 02:31:43.877688
mac,stable,19.0.1084.53,2012-06-01 01:39:57.549149
win,canary,21.0.1158.0,2012-05-31 20:16:55.615236
win,canary,21.0.1157.0,2012-05-31 17:41:29.516013
mac,canary,21.0.1158.0,2012-05-31 17:41:27.591354
mac,beta,20.0.1132.21,2012-05-30 23:45:38.535586
linux,beta,20.0.1132.21,2012-05-30 23:45:37.025015
cf,beta,20.0.1132.21,2012-05-30 23:45:36.767529
win,beta,20.0.1132.21,2012-05-30 23:44:56.675123
win,canary,21.0.1156.1,2012-05-30 22:28:01.872056
mac,canary,21.0.1156.1,2012-05-30 21:20:29.920390
win,canary,21.0.1156.0,2012-05-30 12:46:48.046627
mac,canary,21.0.1156.0,2012-05-30 12:14:21.305090"""
self.manifest = MakeManifest(copy.deepcopy(BCANARY_R1_NONE))
self._AddCsvHistory(history)
self.version_mapping = {
'21.0.1160.0': 'trunk.139984',
'21.0.1159.2': 'trunk.139890',
'21.0.1158.0': 'trunk.139740',
'21.0.1157.0': 'unknown',
'21.0.1156.1': 'trunk.139576',
'21.0.1156.0': 'trunk.139984'}
self.files.Add(MakePlatformBundle(21, 139890, '21.0.1159.2', OS_MLW))
self.files.Add(MakePlatformBundle(21, 0, '21.0.1157.1', ('linux', 'win')))
my_bundle = MakePlatformBundle(21, 139576, '21.0.1156.1', OS_MLW)
self.files.Add(my_bundle)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(my_bundle, CANARY)
def testExtensionWorksAsBz2(self):
# Allow old bundles with just .bz2 extension to work
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
bundle = copy.deepcopy(B18_0_1025_163_R1_MLW)
archive_url = bundle.GetArchive('mac').url
bundle.GetArchive('mac').url = archive_url.replace('.tar', '')
self.files.Add(bundle)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(bundle, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testOnlyOneStableBundle(self):
self.manifest = MakeManifest(B18_R1_NONE, B19_R1_NONE)
self.history.Add(OS_MLW, STABLE, V18_0_1025_163)
self.history.Add(OS_MLW, STABLE, V19_0_1084_41)
self.files.Add(B18_0_1025_163_R1_MLW)
self.files.Add(B19_0_1084_41_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
p18_bundle = self.uploaded_manifest.GetBundle(B18_R1_NONE.name)
self.assertEqual(p18_bundle.stability, POST_STABLE)
p19_bundle = self.uploaded_manifest.GetBundle(B19_R1_NONE.name)
self.assertEqual(p19_bundle.stability, STABLE)
def testDontPushIfNoChange(self):
# Make an online manifest that already has this bundle.
online_manifest = MakeManifest(B18_0_1025_163_R1_MLW)
self.files.AddOnlineManifest(online_manifest.GetDataAsString())
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_MLW, DEV, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self.assertFalse(self.delegate.called_gsutil_cp)
def testDontPushIfRollback(self):
# Make an online manifest that has a newer bundle
online_manifest = MakeManifest(B18_0_1025_184_R1_MLW)
self.files.AddOnlineManifest(online_manifest.GetDataAsString())
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_MLW, DEV, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self.assertFalse(self.delegate.called_gsutil_cp)
def testRunWithFixedBundleVersions(self):
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self.files.Add(B18_0_1025_184_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW, None, [('pepper_18', '18.0.1025.184')])
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_184_R1_MLW, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testRunWithMissingFixedBundleVersions(self):
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW, None, [('pepper_18', '18.0.1025.184')])
# Nothing should be uploaded if the user gives a missing fixed version.
self.assertFalse(self.delegate.called_gsutil_cp)
def testDontIncludeRandomBundles(self):
self.manifest = MakeManifest(B26_R1_NONE)
self.history.Add(OS_MLW, BETA, V26_0_1386_0)
self.files.Add(B26_0_1386_0_R1_MLW)
some_other_bundle = MakePepperBundle(26, 1, V26_0_1386_0, BETA)
some_other_archive = MakeNonPlatformArchive('some_other.tar.bz2',
V26_0_1386_0)
some_other_bundle.AddArchive(some_other_archive)
self.files.AddArchive(some_other_bundle, some_other_archive)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
uploaded_bundle = self.uploaded_manifest.GetBundle('pepper_26')
self.assertEqual(1, len(uploaded_bundle.GetHostOSArchives()))
def testNaclportsBundle(self):
self.manifest = MakeManifest(B26_R1_NONE)
self.history.Add(OS_MLW, BETA, V26_0_1386_0)
self.files.Add(B26_0_1386_0_R1_MLW)
# NaclPorts "bundle".
naclports_bundle = MakePepperBundle(26, 1, V26_0_1386_0, BETA)
naclports_archive = MakeNonPlatformArchive('naclports.tar.bz2',
V26_0_1386_0)
naclports_bundle.AddArchive(naclports_archive)
self.files.AddArchive(naclports_bundle, naclports_archive)
self._MakeDelegate()
self._Run(OS_MLW, [('naclports.tar.bz2', '26.0.1386.0')])
self._ReadUploadedManifest()
uploaded_bundle = self.uploaded_manifest.GetBundle('pepper_26')
self.assertEqual(2, len(uploaded_bundle.GetHostOSArchives()))
class TestUpdateVitals(unittest.TestCase):
def setUp(self):
f = tempfile.NamedTemporaryFile('w', prefix="test_update_nacl_manifest")
self.test_file = f.name
f.close()
test_data = "Some test data"
self.sha1 = hashlib.sha1(test_data).hexdigest()
self.data_len = len(test_data)
with open(self.test_file, 'w') as f:
f.write(test_data)
def tearDown(self):
os.remove(self.test_file)
def testUpdateVitals(self):
archive = manifest_util.Archive(manifest_util.GetHostOS())
path = os.path.abspath(self.test_file)
if sys.platform == 'win32':
# On Windows, the path must start with three slashes, i.e.
# (file:///C:\whatever)
path = '/' + path
archive.url = 'file://' + path
bundle = MakePlatformBundle(18)
bundle.AddArchive(archive)
manifest = MakeManifest(bundle)
archive = manifest.GetBundles()[0]['archives'][0]
self.assertTrue('size' not in archive)
self.assertTrue('checksum' not in archive)
self.assertRaises(manifest_util.Error, manifest.Validate)
manifest.Validate(add_missing_info=True)
self.assertEqual(archive['size'], self.data_len)
self.assertEqual(archive['checksum']['sha1'], self.sha1)
class TestRealDelegate(unittest.TestCase):
def setUp(self):
self.delegate = update_nacl_manifest.RealDelegate()
def testGetTrunkRevision(self):
revision_dict = {
'21.0.1180.80': '151582',
'23.0.1271.89': '167132',
'24.0.1305.4': '164971',
}
for version, revision in revision_dict.iteritems():
self.assertEqual('trunk.%s' % revision,
self.delegate.GetTrunkRevision(version))
if __name__ == '__main__':
sys.exit(unittest.main())
|
bsd-3-clause
| 7,287,103,956,500,293,000 | 35.181818 | 79 | 0.696569 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.