repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
edmorley/treeherder
|
treeherder/services/pulse/consumers.py
|
1
|
5645
|
import logging
import newrelic.agent
from kombu import (Exchange,
Queue)
from kombu.mixins import ConsumerMixin
from treeherder.etl.common import fetch_json
from treeherder.etl.tasks.pulse_tasks import (store_pulse_jobs,
store_pulse_pushes)
from .exchange import get_exchange
logger = logging.getLogger(__name__)
# Used for making API calls to Pulse Guardian, such as detecting bindings on
# the current ingestion queue.
PULSE_GUARDIAN_URL = "https://pulseguardian.mozilla.org/"
class PulseConsumer(ConsumerMixin):
"""
Consume jobs from Pulse exchanges
"""
def __init__(self, connection):
self.connection = connection
self.consumers = []
self.queue = None
self.queue_name = "queue/{}/{}".format(connection.userid, self.queue_suffix)
def get_consumers(self, Consumer, channel):
return [
Consumer(**c) for c in self.consumers
]
def bind_to(self, exchange, routing_key):
if not self.queue:
self.queue = Queue(
name=self.queue_name,
channel=self.connection.channel(),
exchange=exchange,
routing_key=routing_key,
durable=True,
auto_delete=False,
)
self.consumers.append(dict(queues=self.queue,
callbacks=[self.on_message]))
# just in case the queue does not already exist on Pulse
self.queue.declare()
else:
self.queue.bind_to(exchange=exchange, routing_key=routing_key)
def unbind_from(self, exchange, routing_key):
self.queue.unbind_from(exchange, routing_key)
def close(self):
self.connection.release()
def prune_bindings(self, new_bindings):
# get the existing bindings for the queue
bindings = []
try:
bindings = self.get_bindings(self.queue_name)["bindings"]
except Exception:
logger.error("Unable to fetch existing bindings for %s. Data ingestion may proceed, "
"but no bindings will be pruned", self.queue_name)
# Now prune any bindings from the queue that were not
# established above.
# This indicates that they are no longer in the config, and should
# therefore be removed from the durable queue bindings list.
for binding in bindings:
if binding["source"]:
binding_str = self.get_binding_str(binding["source"],
binding["routing_key"])
if binding_str not in new_bindings:
self.unbind_from(Exchange(binding["source"]),
binding["routing_key"])
logger.info("Unbound from: %s", binding_str)
def get_binding_str(self, exchange, routing_key):
"""Use consistent string format for binding comparisons"""
return "{} {}".format(exchange, routing_key)
def get_bindings(self, queue_name):
"""Get list of bindings from the pulse API"""
return fetch_json("{}queue/{}/bindings".format(PULSE_GUARDIAN_URL, queue_name))
class JobConsumer(PulseConsumer):
queue_suffix = "jobs"
@newrelic.agent.background_task(name='pulse-listener-jobs.on_message', group='Pulse Listener')
def on_message(self, body, message):
exchange = message.delivery_info['exchange']
routing_key = message.delivery_info['routing_key']
logger.info('received job message from %s#%s', exchange, routing_key)
store_pulse_jobs.apply_async(
args=[body, exchange, routing_key],
queue='store_pulse_jobs'
)
message.ack()
class PushConsumer(PulseConsumer):
queue_suffix = "resultsets"
@newrelic.agent.background_task(name='pulse-listener-pushes.on_message', group='Pulse Listener')
def on_message(self, body, message):
exchange = message.delivery_info['exchange']
routing_key = message.delivery_info['routing_key']
logger.info('received push message from %s#%s', exchange, routing_key)
store_pulse_pushes.apply_async(
args=[body, exchange, routing_key],
queue='store_pulse_pushes'
)
message.ack()
def bind_to(consumer, exchange, routing_key):
# bind the given consumer to the current exchange with a routing key
consumer.bind_to(exchange=exchange, routing_key=routing_key)
# get the binding key for this consumer
binding = consumer.get_binding_str(exchange.name, routing_key)
logger.info("Pulse queue {} bound to: {}".format(consumer.queue_name, binding))
return binding
def prepare_consumer(connection, consumer_cls, sources, build_routing_key=None):
consumer = consumer_cls(connection)
bindings = []
for source in sources:
# split source string into exchange and routing key sections
exchange, _, routing_keys = source.partition('.')
# built an exchange object with our connection and exchange name
exchange = get_exchange(connection, exchange)
# split the routing keys up using the delimiter
for routing_key in routing_keys.split(':'):
if build_routing_key is not None: # build routing key
routing_key = build_routing_key(routing_key)
binding = bind_to(consumer, exchange, routing_key)
bindings.append(binding)
# prune stale queues using the binding strings
consumer.prune_bindings(bindings)
return consumer
|
mpl-2.0
| -7,936,321,859,886,278,000 | 35.419355 | 100 | 0.620903 | false |
tigeorgia/fixmystreet
|
apps/users/migrations/0001_initial.py
|
1
|
2962
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='FMSUser',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('username', models.CharField(unique=True, max_length=20, verbose_name='username')),
('email', models.EmailField(unique=True, max_length=254, verbose_name='email address')),
('first_name', models.CharField(max_length=70, verbose_name='first name')),
('last_name', models.CharField(max_length=70, verbose_name='last name')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('is_staff', models.BooleanField(default=False, verbose_name='staff')),
('is_active', models.BooleanField(default=True, verbose_name='active')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='FMSSettings',
fields=[
('user', models.OneToOneField(primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
('language', models.CharField(default=b'ka', max_length=2, verbose_name='language', choices=[(b'ka', 'Georgian'), (b'en', 'English')])),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='fmsuser',
name='groups',
field=models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of his/her group.', verbose_name='groups'),
preserve_default=True,
),
migrations.AddField(
model_name='fmsuser',
name='user_permissions',
field=models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Permission', blank=True, help_text='Specific permissions for this user.', verbose_name='user permissions'),
preserve_default=True,
),
]
|
gpl-2.0
| 3,277,017,067,668,940,000 | 49.20339 | 257 | 0.597907 | false |
WalkingMachine/sara_behaviors
|
sara_flexbe_behaviors/src/sara_flexbe_behaviors/action_receive_bag_sm.py
|
1
|
3679
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###########################################################
# WARNING: Generated code! #
# ************************** #
# Manual changes may get lost if file is generated again. #
# Only code inside the [MANUAL] tags will be kept. #
###########################################################
from flexbe_core import Behavior, Autonomy, OperatableStateMachine, ConcurrencyContainer, PriorityContainer, Logger
from sara_flexbe_states.run_trajectory import RunTrajectory
from sara_flexbe_states.set_gripper_state import SetGripperState
from sara_flexbe_states.sara_say import SaraSay
from sara_flexbe_states.torque_reader import ReadTorque
# Additional imports can be added inside the following tags
# [MANUAL_IMPORT]
# [/MANUAL_IMPORT]
'''
Created on Thu Jul 27 2017
@author: Redouane Laref
'''
class Action_Receive_BagSM(Behavior):
'''
Action for receiving the bag for help me carry scenario.
'''
def __init__(self):
super(Action_Receive_BagSM, self).__init__()
self.name = 'Action_Receive_Bag'
# parameters of this behavior
# references to used behaviors
# Additional initialization code can be added inside the following tags
# [MANUAL_INIT]
# [/MANUAL_INIT]
# Behavior comments:
# O 128 372
# Prend le sac et le rapporte dans son idle pose
def create(self):
# x:867 y:64, x:469 y:60
_state_machine = OperatableStateMachine(outcomes=['finished', 'failed'], input_keys=['Closed_Gripper_Width', 'Open_Gripper_Width', 'Closed_Gripper_Width'])
_state_machine.userdata.Closed_Gripper_Width = 1
_state_machine.userdata.Open_Gripper_Width = 255
_state_machine.userdata.effort = 50
# Additional creation code can be added inside the following tags
# [MANUAL_CREATE]
# [/MANUAL_CREATE]
with _state_machine:
# x:101 y:292
OperatableStateMachine.add('place arm',
RunTrajectory(file="receive_bag", duration=6),
transitions={'done': 'opengripper'},
autonomy={'done': Autonomy.Off})
# x:468 y:286
OperatableStateMachine.add('close_gripper',
SetGripperState(width=0, effort=1),
transitions={'object': 'thank you', 'no_object': 'thank you'},
autonomy={'object': Autonomy.Off, 'no_object': Autonomy.Off},
remapping={'object_size': 'object_size'})
# x:638 y:216
OperatableStateMachine.add('thank you',
SaraSay(sentence="Thank you", input_keys=[], emotion=1, block=True),
transitions={'done': 'place back arm'},
autonomy={'done': Autonomy.Off})
# x:653 y:81
OperatableStateMachine.add('place back arm',
RunTrajectory(file="sac_transport", duration=0),
transitions={'done': 'finished'},
autonomy={'done': Autonomy.Off})
# x:263 y:293
OperatableStateMachine.add('Torque_Reader',
ReadTorque(watchdog=5, Joint="right_elbow_pitch_joint", Threshold=0.5, min_time=1),
transitions={'threshold': 'close_gripper', 'watchdog': 'close_gripper', 'fail': 'failed'},
autonomy={'threshold': Autonomy.Off, 'watchdog': Autonomy.Off, 'fail': Autonomy.Off},
remapping={'torque': 'torque'})
# x:196 y:148
OperatableStateMachine.add('opengripper',
SetGripperState(width=0.25, effort=1),
transitions={'object': 'Torque_Reader', 'no_object': 'Torque_Reader'},
autonomy={'object': Autonomy.Off, 'no_object': Autonomy.Off},
remapping={'object_size': 'object_size'})
return _state_machine
# Private functions can be added inside the following tags
# [MANUAL_FUNC]
# [/MANUAL_FUNC]
|
bsd-3-clause
| 1,963,174,731,779,528,200 | 32.144144 | 157 | 0.633052 | false |
fparrel/regepe
|
vps/gpxparser.py
|
1
|
8080
|
# For xml parsing
try:
from etree.ElementTree import ElementTree
except ImportError:
from xml.etree.ElementTree import ElementTree
# For date parsing
from datetime import datetime
import time
# Model classes
from model import Bounds,Point,Track
#i18n
from flask_babel import gettext
# gpx creator="KeyMaze 500-700 PC Software" -> spdunit = hectometres/heure
class GpxPoint:
def __init__(self,trkptxmlelement,xmlns):
"Create a Point from a gpx:pt xmlelement and the xml namespace"
self.lat = float(trkptxmlelement.get('lat'))
self.lon = float(trkptxmlelement.get('lon'))
elestr = trkptxmlelement.findtext(xmlns+'ele')
if not elestr==None:
# replace , by . in case bad formating (MobiDream)
self.ele = float(elestr.replace(',','.'))
else:
self.ele = None
spdstr = trkptxmlelement.findtext(xmlns+'speed')
if not spdstr==None:
self.spd = float(spdstr.replace(',','.'))
else:
self.spd = None
coursestr = trkptxmlelement.findtext(xmlns+'course')
if not coursestr==None:
self.course = float(coursestr.replace(',','.'))
else:
self.course = None
datetimestr = trkptxmlelement.findtext(xmlns+'time')
if not datetimestr==None:
# date in format YY-mm-dd
if datetimestr.find('T')==8:
datetimestr = '20' + datetimestr
datetimestr = datetimestr[:19]
# Fix a GPS Action Replay bug
if not datetimestr.find('Z')==-1:
datetimestr = datetimestr[:datetimestr.find('Z')]
try:
# Python > 2.4
self.datetime = datetime.strptime(datetimestr,'%Y-%m-%dT%H:%M:%S')
except AttributeError:
try:
# Python 2.4
self.datetime = datetime(*(time.strptime(datetimestr,'%Y-%m-%dT%H:%M:%S')[0:6]))
except ValueError:
raise Exception(gettext('Cannot convert date %s') % datetimestr)
else:
self.datetime = None
for e in trkptxmlelement:
if e.tag==xmlns+'extensions':
for sube in e:
if sube.tag.endswith('TrackPointExtension'):
for subsube in sube:
if subsube.tag.endswith('hr'):
self.hr = int(subsube.text)
def ToPoint(self):
pt = Point(self.lat,self.lon,self.ele,self.spd,self.course,self.datetime)
if hasattr(self,'hr'):
pt.hr = self.hr
return pt
class GpxTrkSeg:
"Keeps a Track Segement (list of points) got from a gpx file (trkseg tag)"
# ptlist: list of points
# bounds: bounds of the list of points
def __init__(self,trksegxmlelement,xmlns):
"Create a TrackSeg from a gpx:trkseg xmlelement and the xml namespace"
self.bounds = Bounds()
self.ptlist = []
for trkpt in trksegxmlelement:
pt = GpxPoint(trkpt,xmlns)
if not(pt.lat==0.0 and pt.lon==0.0): # Fix for Garmin Connect's bug
self.ptlist.append(pt)
self.bounds.Extend(pt.lat,pt.lon)
def __add__(self,other):
out = GpxTrkSeg([],'')
out.ptlist.extend(self.ptlist)
out.ptlist.extend(other.ptlist)
for pt in out.ptlist:
out.bounds.Extend(pt.lat,pt.lon)
return out
class GpxTrack:
"Keeps a Track got from a gpx file (trk tag)"
# trkseglist: list of track segment
# name: name of the track
# bounds: bounds of track
def __init__(self,trkxmlelement,xmlns):
self.bounds = Bounds()
self.trkseglist = []
index = 0
for e in trkxmlelement:
if e.tag==xmlns+'name':
self.name = e.text
if e.tag==xmlns+'trkseg':
index = index + 1
trackseg = GpxTrkSeg(e,xmlns)
self.trkseglist.append(trackseg)
self.bounds.Extend(trackseg.bounds)
class GpxRoutePoint:
# lat
# lon
# ele
# name
def __init__(self,rteptxmlelement,xmlns):
"Create a Route Point from a gpx:rtept or a gpx:wpt xmlelement and the xml namespace"
self.lat = float(rteptxmlelement.get('lat'))
self.lon = float(rteptxmlelement.get('lon'))
elestr = rteptxmlelement.findtext(xmlns+'ele')
if not elestr==None:
# replace , by . in case bad formating (MobiDream)
self.ele = float(elestr.replace(',','.'))
else:
self.ele = None
self.name = rteptxmlelement.findtext(xmlns+'name')
def ToPoint(self):
return Point(self.lat,self.lon,self.ele,None,None,None)
def __str__(self):
return 'GpxRoutePoint(%f,%f,%f,%s)'%(self.lat,self.lon,self.ele,self.name)
class GpxRoute:
"Keeps a Route got from a gpx file (rte tag)"
# ptlist: list of GpxRoutePt
# name: name of the route
# bounds: bounds of route
def __init__(self,rtexmlelement,xmlns):
self.ptlist = []
self.bounds = Bounds()
for e in rtexmlelement:
if e.tag==xmlns+'name':
self.name = e.text
elif e.tag==xmlns+'rtept':
pt = GpxRoutePoint(e,xmlns)
self.ptlist.append(pt)
self.bounds.Extend(pt.lat,pt.lon)
class GpxWpts:
def __init__(self,rtexmlelement,xmlns):
self.ptlist = []
self.bounds = Bounds()
for e in rtexmlelement:
if e.tag==xmlns+'wpt':
pt = GpxRoutePoint(e,xmlns)
self.ptlist.append(pt)
self.bounds.Extend(pt.lat,pt.lon)
class GpxFile:
"Keeps data contained in a gpx file (gpx tag)"
# tracklist: list of GpxTrack
# routelist: list of GpxRoute
# bounds: bounds of gpx file
def ParseBounds(self,boundsxmlelement):
self.bounds.minlat = float(boundsxmlelement.get('minlat'))
self.bounds.maxlat = float(boundsxmlelement.get('maxlat'))
self.bounds.minlon = float(boundsxmlelement.get('minlon'))
self.bounds.maxlon = float(boundsxmlelement.get('maxlon'))
def __init__(self,gpxxmlelement,xmlns):
self.bounds = Bounds()
self.tracklist = []
self.routelist = []
for e in gpxxmlelement:
if e.tag==xmlns+'bounds':
self.ParseBounds(e)
if e.tag==xmlns+'trk':
track = GpxTrack(e,xmlns)
self.tracklist.append(track)
self.bounds.Extend(track.bounds)
if e.tag==xmlns+'rte':
route = GpxRoute(e,xmlns)
self.routelist.append(route)
if e.tag==xmlns+'wpt':
route = GpxWpts(gpxxmlelement,xmlns)
self.routelist.append(route)
def ParseGpxFile(inputfile,trk_id,trk_seg_id):
tree = ElementTree()
tree.parse(inputfile)
xmlns = str(tree.getroot())
xmlns = xmlns[xmlns.find('{'):xmlns.find('}')+1]
gpx = GpxFile(tree.getroot(),xmlns)
if len(gpx.tracklist)<1:
# Try with <rte>
if len(gpx.routelist)<1:
raise Exception(gettext('No track found in file'))
else:
return map(GpxRoutePoint.ToPoint,gpx.routelist[trk_id].ptlist)
return map(GpxPoint.ToPoint,reduce(lambda x,y:x+y,gpx.tracklist[trk_id].trkseglist).ptlist)
#return map(GpxPoint.ToPoint,gpx.tracklist[trk_id].trkseglist[trk_seg_id].ptlist)
## UNIT TEST CODE ##
def main():
#ptlist = ParseGpxFile('D:/Documents/Downloads/Racemment_importa_de_JAN0712_181820.GPX',0,0)
ptlist = ParseGpxFile('D:/Userfiles/fparrel/Downloads/2015-01-12 1951__20150112_1951.gpx',0,0)
#ptlist = ParseGpxFile('gpx/FPARREL_832004951_20091022_172903.gpx',0,0)
#ptlist = ParseGpxFile('Test.gpx',0,0)
for pt in ptlist:
print pt
if hasattr(pt,'hr'):
print pt.hr
#raw_input('Press Enter')
if __name__ == '__main__':
main()
|
gpl-3.0
| -4,651,196,639,429,337,000 | 34.911111 | 100 | 0.576485 | false |
Outernet-Project/outernet-broadman
|
broadman/db.py
|
1
|
1973
|
"""
Functions for working with broadcast database
Copyright 2015, Outernet Inc.
Some rights reserved.
This software is free software licensed under the terms of GPLv3. See COPYING
file that comes with the source code, or http://www.gnu.org/licenses/gpl.txt.
"""
import sqlite3
import datetime
import sqlize as sql
from . import path
OperationalError = sqlite3.OperationalError
ProgrammingError = sqlite3.ProgrammingError
class DB:
TABLE = 'broadcasts'
SCHEMA = """
create table if not exists broadcasts (
content_id text,
server_id text,
commit_hash text,
title text,
url text,
size integer,
collected timestamp,
packed timestamp,
aired timestamp,
removed timestamp,
expires timestamp
);
"""
def __init__(self, db=path.BROADCAST):
self.con = sqlite3.connect(db)
self.con.row_factory = sqlite3.Row
self.create_table()
def create_table(self):
self.con.executescript(self.SCHEMA)
def add_content(self, id, server, commit, title, url, size, collected,
packed, aired, expires=None):
q = sql.Insert(self.TABLE, cols=(
'content_id', 'server_id', 'commit_hash', 'title', 'url', 'size',
'collected', 'packed', 'aired', 'expires'))
self.con.execute(str(q), {
'content_id': id,
'server_id': server,
'commit_hash': commit,
'title': title,
'url': url,
'size': size,
'collected': collected,
'packed': packed,
'aired': aired,
'expires': expires,
})
self.con.commit()
self.con.close()
def remove_content(self, id):
q = sql.Update(self.TABLE, 'content_id=:id', removed=':time')
self.con.execute(str(q), {'id': id, 'time': datetime.datetime.today()})
self.con.commit()
self.con.close()
|
gpl-3.0
| 4,505,186,145,183,440,000 | 26.027397 | 79 | 0.580335 | false |
mgeorgehansen/FIFE_Technomage
|
tests/analyzers/dep_analyzer.py
|
1
|
7256
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2009 by the FIFE team
# http://www.fifengine.de
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ####################################################################
import sys, re, os
try:
from os.shutil import copyfile
except ImportError:
def copyfile(src,dest):
srcf = open(src)
destf = open(dest,"w")
destf.write( srcf.read() )
destf.close()
srcf.close()
if '.' not in sys.path:
sys.path.append('.')
from utils.util_scripts.path import path
from _allowed_dependencies import ALLOWED_MODULE_DEPS
_S = os.path.sep
ROOTDIRNAME = 'engine%score' % _S
FILE_DEPS_OUT = 'doc%sdependencies%sfiledeps' % (_S, _S)
DIR_DEPS_OUT = 'doc%sdependencies%sdirdeps' % (_S, _S)
MODULE_DEPS_OUT = 'doc%sdependencies%smoduledeps' % (_S, _S)
SKIPPED_PROVIDERS = []
SKIPPED_USERS = []
reInc = re.compile('#include "(.*?)"')
def add_to_dictset(d, key, val):
try:
d[key].add(val)
except KeyError:
d[key] = set([val])
# return error string in case of possible error
unknown_modules = []
def check_dep_error(allowed_deps, user, provider):
global unknown_modules
msg = ''
try:
if not provider in allowed_deps[user]:
msg = 'Illegal dependency between %s -> %s, % s can use only:\n' % (user, provider, user)
for d in allowed_deps[user]:
msg += ' %s\n' % d
except KeyError:
print unknown_modules
if user not in unknown_modules:
msg = 'Unknown module %s found in static check\n' % user
msg += ' please adjust dep_analyzer script to match new structure'
unknown_modules.append(user)
return msg
def get_file2inc(sources):
file2inc = {}
for f in sources:
inComment = False
for line in open(f):
if not inComment and line.find('/*') != -1:
inComment = True
continue
elif inComment:
if line.find('*/') != -1:
inComment = False
continue
elif line.strip().startswith('//'):
continue
m = reInc.search(line)
if m:
add_to_dictset(file2inc, f, m.group(1).replace( '/', _S ))
return file2inc
def fill_dep_infos(file2inc, fileUser2provider, dirUser2provider, moduleUser2provider, unknownIncludes, dirclusters):
for f, incs in file2inc.items():
#if f.find('engine.cpp') != -1:
# import pdb; pdb.set_trace()
skip = False
for user in SKIPPED_USERS:
if f.find(user) != -1:
skip = True
break
if skip:
continue
for i in incs:
user = str(f.dirname()).replace(ROOTDIRNAME + _S, '')
header = path((f.dirname() / path(i)).abspath().split(ROOTDIRNAME + _S)[1])
if not header.isfile():
header = path(ROOTDIRNAME) + _S + path(i)
if not header.isfile():
add_to_dictset(unknownIncludes, str(f), str(i))
continue
provider = str(header.dirname()).replace(ROOTDIRNAME + _S, '')
skip = False
for skipped in SKIPPED_PROVIDERS:
if header.find(skipped) != -1:
skip = True
break
if skip:
continue
add_to_dictset(dirUser2provider, user, provider)
usermodule = user.split(_S)[0]
providermodule = provider.split(_S)[0]
userfile = user.split(_S)[-1].split('.')[0]
providerfile = provider.split(_S)[-1].split('.')[0]
add_to_dictset(dirclusters, usermodule, user)
add_to_dictset(dirclusters, providermodule, provider)
add_to_dictset(moduleUser2provider, usermodule, providermodule)
add_to_dictset(fileUser2provider, userfile, providerfile)
def write_dot_file(fname, contents):
lines = []
a = lines.append
a('digraph "source tree" {')
a(' overlap=scale;')
a(' size="8,10";')
a(' ratio="fill";')
a(' fontsize="16";')
a(' fontname="Helvetica";')
a(' clusterrank="local";')
if type(contents) in (list, tuple):
lines += contents
else:
lines.append(contents)
a('}')
open(fname, 'w').write('\n'.join(lines))
def get_cluster_str(ind, elements, label):
lines = []
a = lines.append
a('subgraph cluster_%d {' % ind)
a(' style=filled;')
a(' color=lightgrey;')
a(' node [style=filled,color=white];')
a(' %s' % '; '.join('"%s"' % d for d in elements))
a(' label = "%s";' % label)
a('}')
return '\n'.join(lines)
def run_dot(basename,type):
dotname = basename + ".dot"
outname = basename + "." + type
dotchanged = True
try:
olddot = open(dotname + "~").read()
dotchanged = olddot != open(dotname).read()
dotchanged = dotchanged or not os.path.exists(outname)
except IOError: pass
if not dotchanged:
return
print "Generating: ",outname
cmd = 'dot -T%(type)s %(dotname)s > %(outname)s' % locals()
os.system(cmd)
copyfile(dotname,dotname + "~")
def analyze(write_postscript=False):
root = path(ROOTDIRNAME)
headers = list(root.walkfiles('*.h'))
sources = headers + list(root.walkfiles('*.cpp'))
file2inc = get_file2inc(sources)
moduleUser2provider = {}
dirUser2provider = {}
fileUser2provider = {}
unknownIncludes = {}
dirclusters = {}
fill_dep_infos(file2inc, fileUser2provider, dirUser2provider, moduleUser2provider, unknownIncludes, dirclusters)
# write module dep graph
out = []
illegalModuleDeps = []
for user, providers in sorted(moduleUser2provider.items()):
for provider in sorted(providers):
if user != provider:
out.append(' "' + user + '" -> "' + provider + '"')
msg = check_dep_error(ALLOWED_MODULE_DEPS, user, provider)
if msg:
illegalModuleDeps.append(msg)
write_dot_file('%s.dot' % MODULE_DEPS_OUT, out)
if write_postscript:
run_dot(MODULE_DEPS_OUT, "ps")
run_dot(MODULE_DEPS_OUT,"png")
# write dir dep graph
out = []
for cluster, subdirs in sorted(dirclusters.items()):
out.append(get_cluster_str(len(out), sorted(subdirs), cluster))
for user, providers in sorted(dirUser2provider.items()):
for provider in sorted(providers):
if user != provider:
out.append(' "' + user + '" -> "' + provider + '"')
write_dot_file('%s.dot' % DIR_DEPS_OUT, out)
if write_postscript:
run_dot(DIR_DEPS_OUT, "ps")
# write file dep graph
out = []
for user, providers in sorted(file2inc.items()):
for provider in sorted(providers):
if user != provider:
out.append(' "' + user + '" -> "' + provider + '"')
write_dot_file('%s.dot' % FILE_DEPS_OUT, out)
# os.system('dot -Tps %s.dot > %s.ps' % (MODULE_DEPS_OUT, MODULE_DEPS_OUT))
# write raw dep info
#out = []
#for f, file2inc
result = '\n'.join(illegalModuleDeps)
if result:
print result
else:
print "no dependency analyzer errors found"
return result
_ANALYZE_FN_ = analyze
if __name__ == '__main__':
analyze(True)
|
lgpl-2.1
| 2,913,993,431,824,117,000 | 28.258065 | 117 | 0.645535 | false |
genevolv/dbrev
|
py/freevolv/models/dbrev/table.py
|
1
|
8372
|
''' dbrev.table can be thought of as a bean or a template. It
has only attributes with getters and setters.
'''
import logging
LOG = logging.getLogger(__name__)
# LOG.setLevel(logging.INFO)
# Long lines expected.
# pylint: disable=C0301
# Cyclic imports protected by functions
# pylint: disable=R0401
class Table(object):
'''Table class generated from TABLES table.'''
def __init__(self, database_name=None, schema_name=None, name=None, py_singular=None, cap_words_singular=None, py_plural=None, cap_words_plural=None, supertype_schema=None, supertype_name=None, primary_key_name=None):
self.database_name = database_name
self.schema_name = schema_name
self.name = name
self.py_singular = py_singular
self.cap_words_singular = cap_words_singular
self.py_plural = py_plural
self.cap_words_plural = cap_words_plural
self.supertype_schema = supertype_schema
self.supertype_name = supertype_name
self.primary_key_name = primary_key_name
self._database = None
self._primary_key = None
self._schema = None
self._supertype = None
self._columns = None
self._foreign_keys = None
self._tables = None
self._unique_key_columns = None
self._unique_keys = None
def __str__(self):
out = 'Table('
if self.database_name != None:
out += 'database_name:' + str(self.database_name) + ','
if self.schema_name != None:
out += 'schema_name:' + str(self.schema_name) + ','
if self.name != None:
out += 'name:' + str(self.name) + ','
if self.py_singular != None:
out += 'py_singular:' + str(self.py_singular) + ','
if self.cap_words_singular != None:
out += 'cap_words_singular:' + str(self.cap_words_singular) + ','
if self.py_plural != None:
out += 'py_plural:' + str(self.py_plural) + ','
if self.cap_words_plural != None:
out += 'cap_words_plural:' + str(self.cap_words_plural) + ','
if self.supertype_schema != None:
out += 'supertype_schema:' + str(self.supertype_schema) + ','
if self.supertype_name != None:
out += 'supertype_name:' + str(self.supertype_name) + ','
if self.primary_key_name != None:
out += 'primary_key_name:' + str(self.primary_key_name)
if out[-1:] == ',':
out = out[:-1]
out += ')'
return out
def get_database(self):
''' Getter method for database.'''
if self.database_name != None and self._database == None:
from freevolv.models.dbrev import databases_table
self._database = databases_table.DatabasesTable.get_instance() \
.get_one(name=self.database_name)
return self._database
def set_database(self, database):
''' Setter method for database.'''
self._database = database
database = property(get_database, set_database)
def get_primary_key(self):
''' Getter method for primary_key.'''
if self.database_name != None and self.schema_name != None and self.name != None and self.primary_key_name != None and self._primary_key == None:
from freevolv.models.dbrev import unique_keys_table
self._primary_key = unique_keys_table.UniqueKeysTable.get_instance() \
.get_one(database_name=self.database_name, schema_name=self.schema_name, table_name=self.name, name=self.primary_key_name)
return self._primary_key
def set_primary_key(self, primary_key):
''' Setter method for primary_key.'''
self._primary_key = primary_key
primary_key = property(get_primary_key, set_primary_key)
def get_schema(self):
''' Getter method for schema.'''
if self.database_name != None and self.schema_name != None and self._schema == None:
from freevolv.models.dbrev import schemas_table
self._schema = schemas_table.SchemasTable.get_instance() \
.get_one(database_name=self.database_name, name=self.schema_name)
return self._schema
def set_schema(self, schema):
''' Setter method for schema.'''
self._schema = schema
schema = property(get_schema, set_schema)
def get_supertype(self):
''' Getter method for supertype.'''
if self.database_name != None and self.supertype_schema != None and self.supertype_name != None and self._supertype == None:
from freevolv.models.dbrev import tables_table
self._supertype = tables_table.TablesTable.get_instance() \
.get_one(database_name=self.database_name, schema_name=self.supertype_schema, name=self.supertype_name)
return self._supertype
def set_supertype(self, supertype):
''' Setter method for supertype.'''
self._supertype = supertype
supertype = property(get_supertype, set_supertype)
def get_columns(self):
''' Getter method for columns.'''
if self.database_name != None and self.schema_name != None and self.name != None and self._columns == None:
from freevolv.models.dbrev import columns_table
self._columns = columns_table.ColumnsTable.get_instance() \
.get(database_name=self.database_name, schema_name=self.schema_name, table_name=self.name)
return self._columns
def set_columns(self, columns):
''' Setter method for columns.'''
self._columns = columns
columns = property(get_columns, set_columns)
def get_foreign_keys(self):
''' Getter method for foreign_keys.'''
if self.database_name != None and self.schema_name != None and self.name != None and self._foreign_keys == None:
from freevolv.models.dbrev import foreign_keys_table
self._foreign_keys = foreign_keys_table.ForeignKeysTable.get_instance() \
.get(database_name=self.database_name, schema_name=self.schema_name, table_name=self.name)
return self._foreign_keys
def set_foreign_keys(self, foreign_keys):
''' Setter method for foreign_keys.'''
self._foreign_keys = foreign_keys
foreign_keys = property(get_foreign_keys, set_foreign_keys)
def get_tables(self):
''' Getter method for tables.'''
if self.database_name != None and self.schema_name != None and self.name != None and self._tables == None:
from freevolv.models.dbrev import tables_table
self._tables = tables_table.TablesTable.get_instance() \
.get(database_name=self.database_name, supertype_schema=self.schema_name, supertype_name=self.name)
return self._tables
def set_tables(self, tables):
''' Setter method for tables.'''
self._tables = tables
tables = property(get_tables, set_tables)
def get_unique_key_columns(self):
''' Getter method for unique_key_columns.'''
if self.database_name != None and self.schema_name != None and self.name != None and self._unique_key_columns == None:
from freevolv.models.dbrev import unique_key_columns_table
self._unique_key_columns = unique_key_columns_table.UniqueKeyColumnsTable.get_instance() \
.get(database_name=self.database_name, schema_name=self.schema_name, table_name=self.name)
return self._unique_key_columns
def set_unique_key_columns(self, unique_key_columns):
''' Setter method for unique_key_columns.'''
self._unique_key_columns = unique_key_columns
unique_key_columns = property(get_unique_key_columns, set_unique_key_columns)
def get_unique_keys(self):
''' Getter method for unique_keys.'''
if self.database_name != None and self.schema_name != None and self.name != None and self._unique_keys == None:
from freevolv.models.dbrev import unique_keys_table
self._unique_keys = unique_keys_table.UniqueKeysTable.get_instance() \
.get(database_name=self.database_name, schema_name=self.schema_name, table_name=self.name)
return self._unique_keys
def set_unique_keys(self, unique_keys):
''' Setter method for unique_keys.'''
self._unique_keys = unique_keys
unique_keys = property(get_unique_keys, set_unique_keys)
|
bsd-2-clause
| -5,223,640,026,249,078,000 | 47.674419 | 221 | 0.627807 | false |
openstack/ironic
|
ironic/drivers/modules/agent_power.py
|
1
|
8765
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
The agent power interface.
"""
import time
from oslo_config import cfg
from oslo_log import log
import tenacity
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import states
from ironic.conductor import utils as cond_utils
from ironic.drivers import base
from ironic.drivers.modules import agent_client
CONF = cfg.CONF
LOG = log.getLogger(__name__)
_POWER_WAIT = 30
class AgentPower(base.PowerInterface):
"""Power interface using the running agent for power actions."""
def __init__(self):
super(AgentPower, self).__init__()
if not CONF.deploy.fast_track:
raise exception.InvalidParameterValue(
_('[deploy]fast_track must be True to enable the agent '
'power interface'))
self._client = agent_client.AgentClient()
def get_properties(self):
"""Return the properties of the interface.
:returns: dictionary of <property name>:<property description> entries.
"""
return {}
def validate(self, task):
"""Validate the driver-specific Node deployment info.
:param task: A TaskManager instance containing the node to act on.
:raises: InvalidParameterValue on malformed parameter(s)
"""
# NOTE(dtantsur): the fast_track option is mutable, so we have to check
# it again on validation.
if not CONF.deploy.fast_track:
raise exception.InvalidParameterValue(
_('[deploy]fast_track must be True to enable the agent '
'power interface'))
# TODO(dtantsur): support ACTIVE nodes
if not cond_utils.agent_is_alive(task.node):
raise exception.InvalidParameterValue(
_('Agent seems offline for node %s, the agent power interface '
'cannot be used') % task.node.uuid)
def supports_power_sync(self, task):
"""Check if power sync is supported for the given node.
Not supported for the agent power since it is not possible to power
on/off nodes.
:param task: A TaskManager instance containing the node to act on
with a **shared** lock.
:returns: boolean, whether power sync is supported.
"""
return False
def get_supported_power_states(self, task):
"""Get a list of the supported power states.
Only contains REBOOT.
:param task: A TaskManager instance containing the node to act on.
:returns: A list with the supported power states defined
in :mod:`ironic.common.states`.
"""
return [states.REBOOT, states.SOFT_REBOOT]
def get_power_state(self, task):
"""Return the power state of the task's node.
Essentially, the only known state is POWER ON, everything else is
an error (or more precisely ``None``).
:param task: A TaskManager instance containing the node to act on.
:returns: A power state. One of :mod:`ironic.common.states`.
"""
# TODO(dtantsur): support ACTIVE nodes
if cond_utils.agent_is_alive(task.node):
return states.POWER_ON
else:
LOG.error('Node %s is not fast-track-able, cannot determine '
'its power state via the "agent" power interface',
task.node.uuid)
return None
def set_power_state(self, task, power_state, timeout=None):
"""Set the power state of the task's node.
:param task: A TaskManager instance containing the node to act on.
:param power_state: Power state from :mod:`ironic.common.states`.
Only REBOOT and SOFT_REBOOT are supported and are synonymous.
:param timeout: timeout (in seconds) positive integer (> 0) for any
power state. ``None`` indicates to use default timeout.
:raises: PowerStateFailure on non-supported power state.
"""
if power_state in (states.REBOOT, states.SOFT_REBOOT):
return self.reboot(task)
else:
LOG.error('Power state %(state)s is not implemented for node '
'%(node)s using the "agent" power interface',
{'node': task.node.uuid, 'state': power_state})
raise exception.PowerStateFailure(pstate=power_state)
def reboot(self, task, timeout=None):
"""Perform a reboot of the task's node.
Only soft reboot is implemented.
:param task: A TaskManager instance containing the node to act on.
:param timeout: timeout (in seconds) positive integer (> 0) for any
power state. ``None`` indicates to use default timeout.
"""
node = task.node
self._client.reboot(node)
info = node.driver_internal_info
# NOTE(dtantsur): wipe the agent token, otherwise the rebooted agent
# won't be able to heartbeat. This is mostly a precaution since the
# calling code in conductor is expected to handle it.
if not info.get('agent_secret_token_pregenerated'):
info.pop('agent_secret_token', None)
# NOTE(dtantsur): the URL may change on reboot, wipe it as well (but
# only after we call reboot).
info.pop('agent_url', None)
node.driver_internal_info = info
node.save()
LOG.debug('Requested reboot of node %(node)s via the agent, waiting '
'%(wait)d seconds for the node to power down',
{'node': task.node.uuid, 'wait': _POWER_WAIT})
time.sleep(_POWER_WAIT)
if (node.provision_state in (states.DEPLOYING, states.CLEANING)
and (node.driver_internal_info.get('deployment_reboot')
or node.driver_internal_info.get('cleaning_reboot'))):
# NOTE(dtantsur): we need to downgrade the lock otherwise
# heartbeats won't be processed. It should not have side effects
# for nodes in DEPLOYING/CLEANING.
task.downgrade_lock()
try:
self._wait_for_reboot(task, timeout)
finally:
# The caller probably expects a lock, so re-acquire it
task.upgrade_lock()
def _wait_for_reboot(self, task, timeout):
wait = CONF.agent.post_deploy_get_power_state_retry_interval
if not timeout:
timeout = CONF.agent.post_deploy_get_power_state_retries * wait
@tenacity.retry(
stop=tenacity.stop_after_delay(timeout),
retry=(tenacity.retry_if_result(lambda result: not result)
| tenacity.retry_if_exception_type(
exception.AgentConnectionFailed)),
wait=tenacity.wait_fixed(wait),
reraise=True)
def _wait_until_rebooted(task):
try:
status = self._client.get_commands_status(
task.node, retry_connection=False, expect_errors=True)
except exception.AgentConnectionFailed:
LOG.debug('Still waiting for the agent to come back on the '
'node %s', task.node.uuid)
raise
if any(cmd['command_name'] == agent_client.REBOOT_COMMAND
for cmd in status):
LOG.debug('Still waiting for the agent to power off on the '
'node %s', task.node.uuid)
return False
return True
try:
_wait_until_rebooted(task)
except exception.AgentConnectionFailed as exc:
msg = _('Agent failed to come back on %(node)s with the "agent" '
'power interface: %(exc)s') % {
'node': task.node.uuid, 'exc': exc}
LOG.error(msg)
raise exception.PowerStateFailure(msg)
except Exception as exc:
LOG.error('Could not reboot node %(node)s with the "agent" power '
'interface: %(exc)s',
{'node': task.node.uuid, 'exc': exc})
raise exception.PowerStateFailure(
_('Unexpected error when rebooting through the agent: %s')
% exc)
|
apache-2.0
| -4,038,878,199,996,820,000 | 38.840909 | 79 | 0.607872 | false |
christophreimer/pytesmo
|
tests/test_sat/test_ers.py
|
1
|
4902
|
# Copyright (c) 2013,Vienna University of Technology,
# Department of Geodesy and Geoinformation
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Vienna University of Technology, Department of
# Geodesy and Geoinformation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL VIENNA UNIVERSITY OF TECHNOLOGY,
# DEPARTMENT OF GEODESY AND GEOINFORMATION BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
Created on Nov 7, 2013
@author: Christoph Paulik christoph.paulik@geo.tuwien.ac.at
'''
import os
import unittest
from pytesmo.io.sat import ers
from datetime import datetime
import numpy as np
class TestERSNetCDF(unittest.TestCase):
def setUp(self):
self.ers_folder = os.path.join(os.path.dirname(__file__),
'..', 'test-data', 'sat',
'ers', '55R11')
self.ers_grid_folder = os.path.join(os.path.dirname(__file__),
'..', 'test-data', 'sat',
'ascat', 'netcdf', 'grid')
# init the ERS_SSM reader with the paths
self.ers_SSM_reader = ers.ERS_SSM(
self.ers_folder, self.ers_grid_folder)
def test_read_ssm(self):
gpi = 2329253
result = self.ers_SSM_reader.read_ssm(gpi, absolute_values=True)
assert result.gpi == gpi
np.testing.assert_approx_equal(
result.longitude, 14.28413, significant=4)
np.testing.assert_approx_equal(
result.latitude, 45.698074, significant=4)
assert list(result.data.columns) == ['orbit_dir', 'proc_flag',
'sm', 'sm_noise',
'sm_por_gldas',
'sm_noise_por_gldas',
'sm_por_hwsd',
'sm_noise_por_hwsd',
'frozen_prob',
'snow_prob']
assert len(result.data) == 478
assert result.data.ix[15].name == datetime(1992, 1, 27, 21, 11, 42, 55)
assert result.data.ix[15]['sm'] == 57
assert result.data.ix[15]['sm_noise'] == 7
assert result.data.ix[15]['frozen_prob'] == 18
assert result.data.ix[15]['snow_prob'] == 0
assert result.data.ix[15]['orbit_dir'].decode('utf-8') == 'A'
assert result.data.ix[15]['proc_flag'] == 0
np.testing.assert_approx_equal(
result.data.ix[15]['sm_por_gldas'], 0.3090667, significant=6)
np.testing.assert_approx_equal(
result.data.ix[15]['sm_noise_por_gldas'], 0.03795555,
significant=6)
np.testing.assert_approx_equal(
result.data.ix[15]['sm_por_hwsd'], 0.2452333, significant=6)
np.testing.assert_approx_equal(
result.data.ix[15]['sm_noise_por_hwsd'], 0.03011637, significant=6)
assert result.topo_complex == 14
assert result.wetland_frac == 0
np.testing.assert_approx_equal(
result.porosity_gldas, 0.54222, significant=5)
np.testing.assert_approx_equal(
result.porosity_hwsd, 0.430234, significant=5)
def test_neighbor_search(self):
self.ers_SSM_reader._load_grid_info()
gpi, distance = self.ers_SSM_reader.grid.find_nearest_gpi(3.25, 46.13)
assert gpi == 2346869
np.testing.assert_approx_equal(distance, 2267.42, significant=2)
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
| 3,637,355,955,954,336,000 | 43.563636 | 81 | 0.612403 | false |
andrewromanenco/pyjvm
|
pyjvm/platform/java/lang/object.py
|
1
|
3672
|
# PyJVM (pyjvm.org) Java Virtual Machine implemented in pure Python
# Copyright (C) 2014 Andrew Romanenco (andrew@romanenco.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''See natives.txt in documentation'''
from pyjvm.jvmo import JArray
from pyjvm.thread import SkipThreadCycle
def java_lang_Object_getClass___Ljava_lang_Class_(frame, args):
assert len(args) > 0
assert type(args[0]) is tuple
assert args[0][0] == "ref" and args[0][1] > 0
o = frame.vm.heap[args[0][1]]
klass = o.java_class
ref = frame.vm.get_class_class(klass)
frame.stack.append(ref)
def java_lang_Object_hashCode___I(frame, args):
assert type(args[0]) is tuple
frame.stack.append(args[0][1]) # address in heap is object's hash
def java_lang_Object_wait__J_V(frame, args):
ref = args[0]
waiting_time = args[1]
assert ref is not None
# NPE
o = frame.vm.heap[ref[1]]
assert o is not None
t = frame.thread
if t.is_notified:
t.waiting_notify = False
if "@monitor" in o.fields:
frame.stack.append(ref)
frame.stack.append(waiting_time)
raise SkipThreadCycle()
else:
o.waiting_list.remove(t)
o.fields["@monitor"] = t
o.fields["@monitor_count"] = t.monitor_count_cache
t.is_notified = False
return
if t.waiting_notify:
if t.sleep_until > 0:
now = int(time.time()) * 1000
if now <= t.sleep_until:
if "@monitor" in o.fields:
frame.stack.append(ref)
frame.stack.append(waiting_time)
raise SkipThreadCycle()
else:
o.waiting_list.remove(t)
o.fields["@monitor"] = t
o.fields["@monitor_count"] = t.monitor_count_cache
t.is_notified = False
t.waiting_notify = False
return
frame.stack.append(ref)
frame.stack.append(waiting_time)
raise SkipThreadCycle()
else:
assert "@monitor" in o.fields
assert o.fields["@monitor"] == frame.thread
o.waiting_list.append(t)
t.waiting_notify = True
if waiting_time[1] > 0:
now = int(time.time()) * 1000
t.sleep_until = now + waiting_time[1]
t.monitor_count_cache = o.fields["@monitor_count"]
del o.fields["@monitor"]
del o.fields["@monitor_count"]
frame.stack.append(ref)
frame.stack.append(waiting_time)
raise SkipThreadCycle()
def java_lang_Object_clone___Ljava_lang_Object_(frame, args):
# TODO NPE
o = frame.vm.heap[args[0][1]]
if o.java_class.is_array:
clone = JArray(o.java_class, frame.vm)
clone.values = o.values[:]
ref = frame.vm.add_to_heap(clone)
frame.stack.append(ref)
else:
clone = o.java_class.get_instance(frame.vm)
clone.fields = o.fields.copy()
ref = frame.vm.add_to_heap(clone)
frame.stack.append(ref)
|
gpl-3.0
| -3,382,427,627,073,679,000 | 33.971429 | 71 | 0.601852 | false |
credativUK/vdirsyncer
|
tests/cli/test_config.py
|
1
|
4939
|
import io
from textwrap import dedent
import pytest
from vdirsyncer import cli
@pytest.fixture
def read_config(tmpdir):
def inner(cfg):
f = io.StringIO(dedent(cfg.format(base=str(tmpdir))))
return cli.utils.read_config(f)
return inner
def test_read_config(read_config, monkeypatch):
errors = []
monkeypatch.setattr('vdirsyncer.cli.cli_logger.error', errors.append)
general, pairs, storages = read_config(u'''
[general]
status_path = /tmp/status/
[pair bob]
a = bob_a
b = bob_b
foo = bar
bam = true
[storage bob_a]
type = filesystem
path = /tmp/contacts/
fileext = .vcf
yesno = false
number = 42
[storage bob_b]
type = carddav
[bogus]
lol = true
''')
assert general == {'status_path': '/tmp/status/'}
assert pairs == {'bob': ('bob_a', 'bob_b', {'bam': True, 'foo': 'bar'})}
assert storages == {
'bob_a': {'type': 'filesystem', 'path': '/tmp/contacts/', 'fileext':
'.vcf', 'yesno': False, 'number': 42,
'instance_name': 'bob_a'},
'bob_b': {'type': 'carddav', 'instance_name': 'bob_b'}
}
assert len(errors) == 1
assert errors[0].startswith('Unknown section')
assert 'bogus' in errors[0]
def test_storage_instance_from_config(monkeypatch):
def lol(**kw):
assert kw == {'foo': 'bar', 'baz': 1}
return 'OK'
import vdirsyncer.storage
monkeypatch.setitem(vdirsyncer.cli.utils.storage_names._storages,
'lol', lol)
config = {'type': 'lol', 'foo': 'bar', 'baz': 1}
assert cli.utils.storage_instance_from_config(config) == 'OK'
def test_parse_pairs_args():
pairs = {
'foo': ('bar', 'baz', {'conflict_resolution': 'a wins'},
{'storage_option': True}),
'one': ('two', 'three', {'collections': 'a,b,c'}, {}),
'eins': ('zwei', 'drei', {'ha': True}, {})
}
assert sorted(
cli.parse_pairs_args(['foo/foocoll', 'one', 'eins'], pairs)
) == [
('eins', set()),
('foo', {'foocoll'}),
('one', set()),
]
def test_missing_general_section(read_config):
with pytest.raises(cli.CliError) as excinfo:
read_config(u'''
[pair my_pair]
a = my_a
b = my_b
[storage my_a]
type = filesystem
path = {base}/path_a/
fileext = .txt
[storage my_b]
type = filesystem
path = {base}/path_b/
fileext = .txt
''')
assert 'Invalid general section.' in excinfo.value.msg
def test_wrong_general_section(read_config):
with pytest.raises(cli.CliError) as excinfo:
read_config(u'''
[general]
wrong = true
''')
assert 'Invalid general section.' in excinfo.value.msg
assert excinfo.value.problems == [
'general section doesn\'t take the parameters: wrong',
'general section is missing the parameters: status_path'
]
def test_invalid_storage_name():
f = io.StringIO(dedent(u'''
[general]
status_path = {base}/status/
[storage foo.bar]
'''))
with pytest.raises(cli.CliError) as excinfo:
cli.utils.read_config(f)
assert 'invalid characters' in str(excinfo.value).lower()
def test_parse_config_value(capsys):
invalid = object()
def x(s):
try:
rv = cli.utils.parse_config_value(s)
except ValueError:
return invalid
else:
warnings = capsys.readouterr()[1]
return rv, len(warnings.splitlines())
assert x('123 # comment!') is invalid
assert x('True') == ('True', 1)
assert x('False') == ('False', 1)
assert x('Yes') == ('Yes', 1)
assert x('None') == ('None', 1)
assert x('"True"') == ('True', 0)
assert x('"False"') == ('False', 0)
assert x('"123 # comment!"') == ('123 # comment!', 0)
assert x('true') == (True, 0)
assert x('false') == (False, 0)
assert x('null') == (None, 0)
assert x('3.14') == (3.14, 0)
assert x('') == ('', 0)
assert x('""') == ('', 0)
def test_invalid_collections_arg():
f = io.StringIO(dedent(u'''
[general]
status_path = /tmp/status/
[pair foobar]
a = foo
b = bar
collections = [null]
[storage foo]
type = filesystem
path = /tmp/foo/
fileext = .txt
[storage bar]
type = filesystem
path = /tmp/bar/
fileext = .txt
'''))
with pytest.raises(cli.utils.CliError) as excinfo:
cli.utils.read_config(f)
assert (
'Section `pair foobar`: `collections` parameter must be a list of '
'collection names (strings!) or `null`.'
) in str(excinfo.value)
|
mit
| 157,188,139,893,423,170 | 24.723958 | 76 | 0.524803 | false |
DOAJ/doaj
|
portality/formcontext/render.py
|
1
|
23506
|
# DEPRECATED - this file is dead, and should be removed by the end of the redesign project
from portality.formcontext.formhelper import FormHelperBS3
from portality.formcontext.choices import Choices
from copy import deepcopy
class Renderer(object):
def __init__(self):
self.FIELD_GROUPS = {}
self.fh = FormHelperBS3()
self._error_fields = []
self._disabled_fields = []
self._disable_all_fields = False
self._highlight_completable_fields = False
def check_field_group_exists(self, field_group_name):
""" Return true if the field group exists in this form """
group_def = self.FIELD_GROUPS.get(field_group_name)
if group_def is None:
return False
else:
return True
def render_field_group(self, form_context, field_group_name=None, group_cfg=None):
if field_group_name is None:
return self._render_all(form_context)
# get the group definition
group_def = self.FIELD_GROUPS.get(field_group_name)
if group_def is None:
return ""
# build the frag
frag = ""
for entry in group_def:
field_name = list(entry.keys())[0]
config = entry.get(field_name)
config = deepcopy(config)
config = self._rewrite_extra_fields(form_context, config)
field = form_context.form[field_name]
if field_name in self.disabled_fields or self._disable_all_fields is True:
config["disabled"] = "disabled"
if self._highlight_completable_fields is True:
valid = field.validate(form_context.form)
config["complete_me"] = not valid
if group_cfg is not None:
config.update(group_cfg)
frag += self.fh.render_field(field, **config)
return frag
@property
def error_fields(self):
return self._error_fields
def set_error_fields(self, fields):
self._error_fields = fields
@property
def disabled_fields(self):
return self._disabled_fields
def set_disabled_fields(self, fields):
self._disabled_fields = fields
def disable_all_fields(self, disable):
self._disable_all_fields = disable
def _rewrite_extra_fields(self, form_context, config):
if "extra_input_fields" in config:
config = deepcopy(config)
for opt, field_ref in config.get("extra_input_fields").items():
extra_field = form_context.form[field_ref]
config["extra_input_fields"][opt] = extra_field
return config
def _render_all(self, form_context):
frag = ""
for field in form_context.form:
frag += self.fh.render_field(form_context, field.short_name)
return frag
def find_field(self, field, field_group):
for index, item in enumerate(self.FIELD_GROUPS[field_group]):
if field in item:
return index
def insert_field_after(self, field_to_insert, after_this_field, field_group):
self.FIELD_GROUPS[field_group].insert(
self.find_field(after_this_field, field_group) + 1,
field_to_insert
)
class BasicJournalInformationRenderer(Renderer):
def __init__(self):
super(BasicJournalInformationRenderer, self).__init__()
# allow the subclass to define the order the groups should be considered in. This is useful for
# numbering questions and determining first errors
self.NUMBERING_ORDER = ["basic_info", "editorial_process", "openness", "content_licensing", "copyright"]
self.ERROR_CHECK_ORDER = deepcopy(self.NUMBERING_ORDER)
# define the basic field groups
self.FIELD_GROUPS = {
"basic_info" : [
{"title" : {"class": "input-xlarge"}},
{"url" : {"class": "input-xlarge"}},
{"alternative_title" : {"class": "input-xlarge"}},
{"pissn" : {"class": "input-small", "size": "9", "maxlength": "9"}},
{"eissn" : {"class": "input-small", "size": "9", "maxlength": "9"}},
{"publisher" : {"class": "input-xlarge"}},
{"society_institution" : {"class": "input-xlarge"}},
{"platform" : {"class": "input-xlarge"}},
{"contact_name" : {}},
{"contact_email" : {}},
{"confirm_contact_email" : {}},
{"country" : {"class": "input-large"}},
{"processing_charges" : {}},
{"processing_charges_url" : {"class": "input-xlarge"}},
{"processing_charges_amount" : {"class": "input-mini"}},
{"processing_charges_currency" : {"class": "input-large"}},
{"submission_charges" : {}},
{"submission_charges_url" : {"class": "input-xlarge"}},
{"submission_charges_amount" : {"class": "input-mini"}},
{"submission_charges_currency" : {"class": "input-large"}},
{"waiver_policy" : {}},
{"waiver_policy_url" : {"class": "input-xlarge"}},
{
"digital_archiving_policy" : {
"extra_input_fields" : {
Choices.digital_archiving_policy_val("other") : "digital_archiving_policy_other",
Choices.digital_archiving_policy_val("library") : "digital_archiving_policy_library"
}
}
},
{"digital_archiving_policy_url" : {"class": "input-xlarge"}},
{"crawl_permission" : {}},
{
"article_identifiers" : {
"extra_input_fields": {
Choices.article_identifiers_val("other") : "article_identifiers_other"
}
}
},
{"download_statistics" : {}},
{"download_statistics_url" : {"class": "input-xlarge"}},
{"first_fulltext_oa_year" : {"class": "input-mini"}},
{
"fulltext_format" : {
"extra_input_fields": {
Choices.fulltext_format_val("other") : "fulltext_format_other"
}
}
},
{"keywords" : {"class": "input-xlarge"}},
{"languages" : {"class": "input-xlarge"}}
],
"editorial_process" : [
{"editorial_board_url" : {"class": "input-xlarge"}},
{"review_process" : {"class" : "form-control input-xlarge"}},
{"review_process_url" : {"class": "input-xlarge"}},
{"aims_scope_url" : {"class": "input-xlarge"}},
{"instructions_authors_url" : {"class": "input-xlarge"}},
{"plagiarism_screening" : {}},
{"plagiarism_screening_url" : {"class": "input-xlarge"}},
{"publication_time" : {"class": "input-tiny"}}
],
"openness" : [
{"oa_statement_url" : {"class": "input-xlarge"}}
],
"content_licensing" : [
{"license_embedded" : {}},
{"license_embedded_url" : {"class": "input-xlarge"}},
{
"license" : {
"extra_input_fields": {
Choices.licence_val("other") : "license_other"
}
}
},
{"license_checkbox" : {}},
{"license_url" : {"class": "input-xlarge"}},
{"open_access" : {}},
{
"deposit_policy" : {
"extra_input_fields": {
Choices.open_access_val("other") : "deposit_policy_other"
}
}
}
],
"copyright" : [
{
"copyright" : {}
},
{"copyright_url" : {"class": "input-xlarge"}},
{
"publishing_rights" : {}
},
{"publishing_rights_url" : {"class": "input-xlarge"}}
]
}
def check_field_groups(self):
'''
Check whether field groups which are being referenced in various renderer lists actually exist.
Should only be called in self.__init__ by non-abstract classes,
i.e. the bottom of the inheritance tree, the ones that would
actually get used to render forms.
Otherwise the check becomes meaningless (and always fails) as it will check whether
all groups are defined in a class that isn't supposed to have all
the definitions - being abstract, it may only have a few common ones.
'''
for group in self.NUMBERING_ORDER:
try:
self.FIELD_GROUPS[group]
except KeyError as e:
raise KeyError(
'Can\'t number a group which does not exist. '
'Field group "{0}" is not defined in self.FIELD_GROUPS '
'but is present in self.NUMBERING_ORDER. '
'This is in renderer {1}.'.format(str(e), self.__class__.__name__)
)
for group in self.ERROR_CHECK_ORDER:
try:
self.FIELD_GROUPS[group]
except KeyError as e:
raise KeyError(
'Can\'t check a group which does not exist for errors. '
'Field group "{0}" is not defined in self.FIELD_GROUPS '
'but is present in self.ERROR_CHECK_ORDER. '
'This is in renderer {1}.'.format(str(e), self.__class__.__name__)
)
def number_questions(self):
q = 1
for g in self.NUMBERING_ORDER:
cfg = self.FIELD_GROUPS.get(g)
for obj in cfg:
field = list(obj.keys())[0]
obj[field]["q_num"] = str(q)
q += 1
def question_number(self, field):
for g in self.FIELD_GROUPS:
cfg = self.FIELD_GROUPS.get(g)
for obj in cfg:
f = list(obj.keys())[0]
if f == field and "q_num" in obj[f]:
return obj[f]["q_num"]
return ""
def set_error_fields(self, fields):
super(BasicJournalInformationRenderer, self).set_error_fields(fields)
# find the first error in the form and tag it
found = False
for g in self.ERROR_CHECK_ORDER:
cfg = self.FIELD_GROUPS.get(g)
# If a group is specified as part of the error checks but is
# not defined in self.FIELD_GROUPS then do not try to check
# it for errors - there are no fields to check.
if cfg:
for obj in cfg:
field = list(obj.keys())[0]
if field in self.error_fields:
obj[field]["first_error"] = True
found = True
break
if found:
break
class ApplicationRenderer(BasicJournalInformationRenderer):
def __init__(self):
super(ApplicationRenderer, self).__init__()
# allow the subclass to define the order the groups should be considered in. This is useful for
# numbering questions and determining first errors
self.NUMBERING_ORDER.append("submitter_info")
self.ERROR_CHECK_ORDER = deepcopy(self.NUMBERING_ORDER) # in this case these can be the same
self.FIELD_GROUPS["submitter_info"] = [
{"suggester_name" : {"label_width" : 5}},
{"suggester_email" : {"label_width" : 5, "class": "input-xlarge"}},
{"suggester_email_confirm" : {"label_width" : 5, "class": "input-xlarge"}},
]
self.insert_field_after(
field_to_insert={"articles_last_year" : {"class": "input-mini"}},
after_this_field="submission_charges_currency",
field_group="basic_info"
)
self.insert_field_after(
field_to_insert={"articles_last_year_url" : {"class": "input-xlarge"}},
after_this_field="articles_last_year",
field_group="basic_info"
)
self.insert_field_after(
field_to_insert={"metadata_provision" : {}},
after_this_field="article_identifiers",
field_group="basic_info"
)
class PublicApplicationRenderer(ApplicationRenderer):
def __init__(self):
super(PublicApplicationRenderer, self).__init__()
# explicitly call number questions, as it is not called by default (because other implementations may want
# to mess with the group order and field groups first)
self.number_questions()
self.check_field_groups()
class PublisherUpdateRequestRenderer(ApplicationRenderer):
def __init__(self):
super(PublisherUpdateRequestRenderer, self).__init__()
self.NUMBERING_ORDER.remove("submitter_info")
self.ERROR_CHECK_ORDER = deepcopy(self.NUMBERING_ORDER)
del self.FIELD_GROUPS["submitter_info"]
# explicitly call number questions, as it is not called by default (because other implementations may want
# to mess with the group order and field groups first
self.number_questions()
self.check_field_groups()
self._highlight_completable_fields = True
class PublisherUpdateRequestReadOnlyRenderer(ApplicationRenderer):
def __init__(self):
super(PublisherUpdateRequestReadOnlyRenderer, self).__init__()
self.ERROR_CHECK_ORDER = []
self.number_questions()
self.check_field_groups()
class ManEdApplicationReviewRenderer(ApplicationRenderer):
def __init__(self):
super(ManEdApplicationReviewRenderer, self).__init__()
# extend the list of field groups
self.FIELD_GROUPS["status"] = [
{"application_status" : {"class" : "form-control input-large"}}
]
self.FIELD_GROUPS["account"] = [
{"owner" : {"class" : "input-large"}}
]
self.FIELD_GROUPS["subject"] = [
{"subject" : {}}
]
self.FIELD_GROUPS["editorial"] = [
{"editor_group" : {"class" : "input-large"}},
{"editor" : {"class" : "form-control input-large"}},
]
self.FIELD_GROUPS["notes"] = [
{
"notes" : {
"render_subfields_horizontal" : True,
"container_class" : "deletable",
"subfield_display-note" : "8",
"subfield_display-date" : "3",
"label_width" : 0
}
}
]
self.FIELD_GROUPS["seal"] = [
{"doaj_seal" : {}}
]
self.FIELD_GROUPS["continuations"] = [
{"replaces" : {"class": "input-xlarge"}},
{"is_replaced_by" : {"class": "input-xlarge"}},
{"discontinued_date" : {}}
]
self.ERROR_CHECK_ORDER = ["status", "account", "editorial", "continuations", "subject"] + self.ERROR_CHECK_ORDER + ["notes"] # but do NOT include the new groups in self.NUMBERING_ORDER, don"t want them numbered
self.number_questions()
self.check_field_groups()
self._highlight_completable_fields = True
class EditorApplicationReviewRenderer(ApplicationRenderer):
def __init__(self):
super(EditorApplicationReviewRenderer, self).__init__()
# extend the list of field groups
self.FIELD_GROUPS["status"] = [
{"application_status" : {"class" : "form-control input-large"}}
]
self.FIELD_GROUPS["subject"] = [
{"subject" : {}}
]
self.FIELD_GROUPS["editorial"] = [
{"editor_group" : {"class" : "input-large"}},
{"editor" : {"class" : "form-control input-large"}},
]
self.FIELD_GROUPS["notes"] = [
{
"notes" : {
"render_subfields_horizontal" : True,
"subfield_display-note" : "8",
"subfield_display-date" : "3",
"label_width" : 0
}
}
]
self.ERROR_CHECK_ORDER = ["status", "editorial", "subject"] + self.ERROR_CHECK_ORDER + ["notes"]
# don"t want the extra groups numbered so not added to self.NUMBERING_ORDER
self.number_questions()
self.check_field_groups()
self._highlight_completable_fields = True
class AssEdApplicationReviewRenderer(ApplicationRenderer):
def __init__(self):
super(AssEdApplicationReviewRenderer, self).__init__()
# extend the list of field groups
self.FIELD_GROUPS["status"] = [
{"application_status" : {"class" : "form-control input-large"}}
]
self.FIELD_GROUPS["subject"] = [
{"subject" : {}}
]
self.FIELD_GROUPS["notes"] = [
{
"notes" : {
"render_subfields_horizontal" : True,
"subfield_display-note" : "8",
"subfield_display-date" : "3",
"label_width" : 0
}
}
]
self.ERROR_CHECK_ORDER = ["status", "subject"] + self.ERROR_CHECK_ORDER + ["notes"]
self.number_questions()
self.check_field_groups()
self._highlight_completable_fields = True
class JournalRenderer(BasicJournalInformationRenderer):
def __init__(self):
super(JournalRenderer, self).__init__()
self.FIELD_GROUPS["subject"] = [
{"subject" : {}}
]
self.FIELD_GROUPS["old_journal_fields"] = [
{"author_pays": {}},
{"author_pays_url": {"class": "input-xlarge"}},
{"oa_end_year": {"class": "input-mini"}},
]
def render_field_group(self, form_context, field_group_name=None, **kwargs):
if field_group_name == "old_journal_fields":
display_old_journal_fields = False
for old_field_def in self.FIELD_GROUPS["old_journal_fields"]:
old_field_name = list(old_field_def.keys())[0]
old_field = getattr(form_context.form, old_field_name)
if old_field:
if old_field.data and old_field.data != 'None':
display_old_journal_fields = True
if not display_old_journal_fields:
return ""
# otherwise let it fall through and render the old journal fields
return super(JournalRenderer, self).render_field_group(form_context, field_group_name, **kwargs)
class ManEdJournalReviewRenderer(JournalRenderer):
def __init__(self):
super(ManEdJournalReviewRenderer, self).__init__()
# extend the list of field groups
self.FIELD_GROUPS["account"] = [
{"owner" : {"class" : "input-large"}}
]
self.FIELD_GROUPS["editorial"] = [
{"editor_group" : {"class" : "input-large"}},
{"editor" : {"class" : "form-control input-large"}},
]
self.FIELD_GROUPS["notes"] = [
{
"notes" : {
"render_subfields_horizontal" : True,
"container_class" : "deletable",
"subfield_display-note" : "8",
"subfield_display-date" : "3",
"label_width" : 0
}
}
]
self.FIELD_GROUPS["make_all_fields_optional"] = [
{"make_all_fields_optional": {}}
]
self.FIELD_GROUPS["seal"] = [
{"doaj_seal" : {}}
]
self.FIELD_GROUPS["continuations"] = [
{"replaces" : {"class": "input-xlarge"}},
{"is_replaced_by" : {"class": "input-xlarge"}},
{"discontinued_date" : {}}
]
self.ERROR_CHECK_ORDER = ["make_all_fields_optional", "account", "editorial", "continuations", "subject"] + self.ERROR_CHECK_ORDER + ["notes"]
self.number_questions()
self.check_field_groups()
self._highlight_completable_fields = True
class ManEdJournalBulkEditRenderer(Renderer):
def __init__(self):
super(ManEdJournalBulkEditRenderer, self).__init__()
self.FIELD_GROUPS = {
"main" : [
{"publisher" : {"class": "input-xlarge"}},
{"platform" : {"class": "input-xlarge"}},
{"country" : {"class": "input-large"}},
{"owner" : {"class" : "input-large"}},
{"contact_name" : {"class" : "input-large"}},
{"contact_email" : {"class" : "input-large"}},
{"doaj_seal" : {"class" : "form-control input-large"}}
]
}
class EditorJournalReviewRenderer(JournalRenderer):
def __init__(self):
self.display_old_journal_fields = False # an instance var flag for the template
super(EditorJournalReviewRenderer, self).__init__()
self.FIELD_GROUPS["editorial"] = [
{"editor_group" : {"class" : "input-large"}},
{"editor" : {"class" : "form-control input-large"}},
]
self.FIELD_GROUPS["notes"] = [
{
"notes" : {
"render_subfields_horizontal" : True,
"subfield_display-note" : "8",
"subfield_display-date" : "3",
"label_width" : 0
}
}
]
self.ERROR_CHECK_ORDER = ["editorial", "subject"] + self.ERROR_CHECK_ORDER + ["notes"]
# don't want the extra groups numbered so not added to self.NUMBERING_ORDER
self.number_questions()
self.check_field_groups()
self._highlight_completable_fields = True
class AssEdJournalReviewRenderer(JournalRenderer):
def __init__(self):
super(AssEdJournalReviewRenderer, self).__init__()
# extend the list of field groups
self.FIELD_GROUPS["notes"] = [
{
"notes" : {
"render_subfields_horizontal" : True,
"subfield_display-note" : "8",
"subfield_display-date" : "3",
"label_width" : 0
}
}
]
self.ERROR_CHECK_ORDER = ["subject"] + self.ERROR_CHECK_ORDER + ["notes"]
self.number_questions()
self.check_field_groups()
self._highlight_completable_fields = True
class ReadOnlyJournalRenderer(JournalRenderer):
def __init__(self):
super(ReadOnlyJournalRenderer, self).__init__()
# extend the list of field groups
self.FIELD_GROUPS["notes"] = [
{
"notes" : {
"render_subfields_horizontal" : True,
"subfield_display-note" : "8",
"subfield_display-date" : "3",
"label_width" : 0
}
}
]
self.ERROR_CHECK_ORDER = []
self.number_questions()
self.check_field_groups()
|
apache-2.0
| -1,921,230,620,043,289,300 | 35.5 | 219 | 0.51506 | false |
babble/babble
|
include/jython/Lib/test/test_jser2.py
|
1
|
2399
|
from test import test_support
import unittest
import java
from org.python.util import PythonObjectInputStream
def serialize(o, special=0):
b = java.io.ByteArrayOutputStream()
objs = java.io.ObjectOutputStream(b)
objs.writeObject(o)
if not special:
OIS = java.io.ObjectInputStream
else:
OIS = PythonObjectInputStream
objs = OIS(java.io.ByteArrayInputStream(b.toByteArray()))
return objs.readObject()
from jser2_classes import A, AJ, N, NL, NT
class TestJavaSerialisation(unittest.TestCase):
def test_list(self):
l = [1,"a", 3.0]
l1 = serialize(l)
self.assertEqual(l, l1)
def test_dict(self):
d = {'a': 3.0}
d1 = serialize(d)
self.assertEqual(d, d1)
def test_tuple(self):
t = (1, 'a')
t1 = serialize(t)
self.assertEqual(t, t1)
def test_oldstyle(self):
a = A('x')
a1 = serialize(a)
self.assertEqual(a, a1)
# wasn't working in 2.1 either
#def test_oldstyle_cls(self):
# A1 = serialize(A)
# self.assert_(A is A1)
def test_jsubcl(self):
aj = AJ('x')
aj1 = serialize(aj, special=1)
self.assertEqual(aj, aj1)
def test_singletons(self):
for v in (None, Ellipsis):
v1 = serialize(v)
self.assert_(v is v1)
v1 = serialize((v,))[0]
self.assert_(v is v1)
def test_NotImplemented(self):
# XXX serialize(NotImplemented) is None because of __tojava__
v1 = serialize((NotImplemented,))[0]
self.assert_(v1 is NotImplemented)
def test_type(self):
list1 = serialize(list)
self.assert_(list1 is list)
list1 = serialize((list,))[0]
self.assert_(list1 is list)
def test_user_type(self):
N1 = serialize(N)
self.assert_(N1 is N)
N1 = serialize((N,))[0]
self.assert_(N1 is N)
def test_newstyle(self):
n = N('x')
n1 = serialize(n)
self.assertEqual(n, n1)
def test_newstyle_list(self):
nl = NL('x',1,2,3)
nl1 = serialize(nl)
self.assertEqual(nl, nl1)
def test_newstyle_tuple(self):
nt = NT('x',1,2,3)
nt1 = serialize(nt)
self.assertEqual(nt, nt1)
def test_main():
test_support.run_unittest(TestJavaSerialisation)
if __name__ == "__main__":
test_main()
|
apache-2.0
| 2,428,962,986,861,832,000 | 24.521277 | 69 | 0.571905 | false |
fracturica/shardlib
|
shardlib/comp_analysis/SIMCompAnalysis.py
|
1
|
23592
|
import dataProcessing as dp
import plotFuncs as pf
import numpy as np
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
from mpl_toolkits.axes_grid1 import host_subplot
import mpl_toolkits.axisartist as AA
from matplotlib.path import Path
from mpl_toolkits.mplot3d import Axes3D
import matplotlib as mpl
from compAnalysisBase import CompAnalysisBase
class SIMCompAnalysis(CompAnalysisBase):
def __init__(self, leavesQueue, criteria, sifs):
self.queue = leavesQueue
self.sifs = sifs
self.crit = criteria
def printQueueItems(self, items):
self.queue.printTitle()
for i in sorted(items):
self.queue.printQueueItem(i)
def getItemNodeDict(self, items, queue):
qdict = queue.getQueueDict()
return dict([(i, qdict[i]) for i in items])
def calcAlphaVal(self, sif, item):
vals = len(self.dataDicts[0][0][sif][item])
if vals > 1000:
return 0.1
else:
return 1
class BoxCompPlot(SIMCompAnalysis):
def createCompBoxPlot(self, items, errType, fig):
self.items = items
self.errType = errType
self.createDataDictAndEstBoxPlot()
self.createDataStrBoxPlot()
self.createFigure(fig)
def createDataStrBoxPlot(self):
dd = self.getItemNodeDict(self.items, self.queue)
optKey = self.getLeavesOptKey()
data = [dd, optKey, 'Number in Queue', '']
self.dataStr = [data]
def getLeavesOptKey(self):
return sorted(self.est.items(), key=lambda x: abs(x[1]))[0][0]
def createDataDictAndEstBoxPlot(self):
dataDict = {s: {} for s in self.sifs}
est = {i: {} for i in self.items}
dd = self.getItemNodeDict(self.items, self.queue)
for i in self.items:
node = dd[i]
errs, est[i] = self.getNodeErrsEst(node)
for s in self.sifs:
dataDict[s][i] = errs[s]
self.est = {i: est[i][self.crit[1]] for i in self.items}
self.dataDicts = [dataDict]
def getNodeErrsEst(self, node):
adn = dp.AnalysisNodeData(node, self.sifs)
adn.performOperations()
est = adn.getEstimates()[self.crit[0]]
errs = adn.getErrors()[self.errType]
return errs, est
class HistCompPlot(SIMCompAnalysis):
def createCompHistPlot(self, items, errType, xlim, fig):
self.fig = fig
self.items = items
self.errType = errType
self.xlim = xlim
self.createDataStr()
self.createDataDict()
self.createFigure()
def createDataStr(self):
dd = self.getItemNodeDict(self.items.keys(), self.queue)
xlabel = 'errors "{0}"'.format(self.errType)
data = [dd, None, xlabel, 'hist']
self.dataStr = [data]
def createDataDict(self):
data = {s: {} for s in self.sifs}
for i in self.items.keys():
node = self.dataStr[0][0][i]
errs = self.getNodeErrors(node)
for s in self.sifs:
data[s][i] = errs[s]
self.dataDicts = [data]
def getNodeErrors(self, node):
adn = dp.AnalysisNodeData(node, self.sifs)
adn.performOperations()
errs = adn.getErrors()[self.errType]
return errs
def setAxesXlim(self):
for ax in self.axes:
ax.set_xlim(self.xlim)
def setAxesYlim(self):
ymin, ymax = 10e16, 10e-16
for ax in self.axes:
y1, y2 = ax.get_ylim()
ymin = y1 if y1 < ymin else ymin
ymax = y2 if y2 > ymax else ymax
for ax in self.axes:
ax.set_ylim((ymin, ymax))
def setLegend(self, handles):
text = 'Node: '
labels = [text + str(i) for i in sorted(handles.keys())]
handles = [handles[i] for i in sorted(handles.keys())]
self.axes[0].legend(handles, labels, bbox_to_anchor=(1.02, 1),
loc=2, borderaxespad=0)
def createFigure(self):
self.axes = []
self.createFigureAxes()
handles = {}
for k in range(len(self.axes)):
s = self.sifs[k]
for i in self.items.keys():
n, b, p = self.axes[k].hist(
self.dataDicts[0][s][i],
self.items[i],
normed=True,
alpha=0.5)
handles[i] = p[0]
self.setAxesXlim()
self.setAxesYlim()
self.setLegend(handles)
self.setXlabels()
self.printQueueItems(self.items.keys())
class CorrCompPlot(SIMCompAnalysis):
def createCompCorrPlot(self, items, quantityType, ylim, fig):
self.fig = fig
self.items = items
self.qt = quantityType
self.ylim = ylim
self.createDataStr()
self.createDataDict()
self.createFigure()
def createDataStr(self):
dd = self.getItemNodeDict(self.items, self.queue)
data = [dd, None, 'analytical values', 'analysis vs analytical']
self.dataStr = [data]
def createDataDict(self):
dataX = {s: {} for s in self.sifs}
dataY = {s: {} for s in self.sifs}
for i in self.items:
node = self.dataStr[0][0][i]
anSol, res = self.getNodeParams(node)
for s in self.sifs:
dataX[s][i] = anSol[s]
dataY[s][i] = res[s]
self.dataDicts = [[dataX, dataY]]
def getNodeParams(self, node):
adn = dp.AnalysisNodeData(node, self.sifs)
adn.performOperations()
anSol = adn.getAnSol()
res = adn.getDataByType(self.qt)
return anSol, res
def getReferenceXYVals(self):
minV = {s: 10e16 for s in self.sifs}
maxV = {s: -10e16 for s in self.sifs}
for s in self.sifs:
for i in self.items:
mn = min(self.dataDicts[0][0][s][i])
mx = max(self.dataDicts[0][0][s][i])
minV[s] = mn if mn < minV[s] else minV[s]
maxV[s] = mx if mx > maxV[s] else maxV[s]
if self.qt == 'results':
refX = {s: [minV[s], maxV[s]] for s in self.sifs}
return refX, refX
elif self.qt in ['difference', 'normedDiff']:
refX = {s: [max(0, minV[s]), maxV[s]] for s in self.sifs}
refY = {s: [0, 0] for s in self.sifs}
return refX, refY
else:
raise NotImplementedError
def getXYVals(self, sif, item):
if self.qt == 'results':
X = self.dataDicts[0][0][sif][item]
Y = self.dataDicts[0][1][sif][item]
elif self.qt in ['difference', 'normedDiff']:
X = np.abs(self.dataDicts[0][0][sif][item])
Y = self.dataDicts[0][1][sif][item]
else:
raise NotImplementedError
return X, Y
def createPlot(self):
self.handles = {}
refX, refY = self.getReferenceXYVals()
for k in range(len(self.axes)):
s = self.sifs[k]
for i in self.items:
alpha = self.calcAlphaVal(s, i)
X, Y = self.getXYVals(s, i)
p, = self.axes[k].plot(X, Y, '.', alpha=alpha)
self.handles[i] = p
r, = self.axes[k].plot(refX[s], refY[s], 'k', lw=1.5)
self.handles['reference'] = r
def setXLim(self):
refX, refY = self.getReferenceXYVals()
for k in range(len(self.axes)):
s = self.sifs[k]
self.axes[k].set_xlim(refX[s])
def setLegend(self):
text = 'Node: '
labels = [text + str(i) for i in self.items]
handles = [self.handles[i] for i in self.items]
if 'reference' in self.handles.keys():
handles.append(self.handles['reference'])
labels.append('ref line')
self.axes[0].legend(handles, labels, bbox_to_anchor=(1.02, 1),
loc=2, borderaxespad=0)
def setYLim(self):
if isinstance(self.ylim, (list, tuple)):
for ax in self.axes:
ax.set_ylim(self.ylim)
def createFigure(self):
self.axes = []
self.createFigureAxes()
self.createPlot()
self.setXLim()
self.setLegend()
self.printQueueItems(self.items)
self.setYLim()
class RangeCompPlot(SIMCompAnalysis):
def createCompRangePlot(self, items, opts, fig):
self.fig = fig
self.items = items
self.opts = opts
self.createDataStr()
self.createDataDict()
self.createFigure()
def createDataStr(self):
self.dataStr = []
qdict = self.queue.getQueueDict()
for k in sorted(self.items.keys()):
optSim = self.getOptSim(qdict[k])
data = [{k: qdict[k]}, optSim, 'angles',
self.getSubplotTitle(qdict[k])]
self.dataStr.append(data)
def getOptSim(self, node):
if self.opts['optSim']:
sims = node.getSuccessfulMembers()
optSim = pf.getSimIdsWithLowestErrorPerDH(
sims, self.crit[0], self.crit[1]).values()[0][0]
return optSim
else:
return None
def createDataDict(self):
self.dataDicts = []
for item in self.dataStr:
node = item[0].values()[0]
self.dataDicts.append(self.getNodeParams(node))
def getNodeParams(self, node):
adn = dp.AnalysisNodeData(node, self.sifs)
adn.performOperations()
angles = adn.getAngles()
results = adn.getResults()
ansol = adn.getAnSol()
errors = adn.getErrors()[self.opts['errors']]
return angles, results, ansol, errors
def createSlices(self):
self.slices = []
i = 0
for k in sorted(self.items.keys()):
numInt = self.items[k]
angles = self.dataDicts[i][0]
sl = self.createSliceIndices(angles, numInt)
self.slices.append(sl)
i += 1
def createSliceIndices(self, vals, numInts):
intLen = (max(vals) - min(vals)) / float(numInts)
indices = [[] for i in range(numInts)]
for x in vals:
i = int(x / intLen)
if i < numInts - 1:
indices[i].append(x)
else:
indices[-1].append(x)
if [] in indices:
raise ValueError('Try reducing the number of intervals.')
sliceInd = [[] for i in range(numInts)]
for i in range(numInts):
minVal = indices[i][0]
maxVal = indices[i][-1]
ind0 = np.where(vals == minVal)[0][0]
ind1 = np.where(vals == maxVal)[-1][-1] + 1
sliceInd[i].append(ind0)
sliceInd[i].append(ind1)
sliceInd[-1][1] += 1
return sliceInd
def createFigure(self):
self.axes = []
self.createFigureAxes()
if self.opts['range']:
self.createSlices()
self.plotRangeArea()
if self.opts['dataPoints']:
self.createDataPointsPlot()
if self.opts['analytical']:
self.createAnSolPlot()
if self.opts['optSim']:
self.createOptSimPlot()
self.setXLim()
self.createLegend()
self.setSubplotTitles()
self.setYlimits()
def createLegend(self):
handles = []
labels = []
h, l = self.axes[0].get_legend_handles_labels()
ind = len(self.dataStr) - 1
self.axes[ind].legend(h, l, bbox_to_anchor=(1, 1.02), loc=2)
def setXLim(self):
for n in range(len(self.dataStr)):
i = self.getItemKey(n)
for sif in self.sifs:
ax = self.getAxes(i, sif)
angles = self.dataDicts[n][0]
ax.set_xlim((min(angles), max(angles)))
def createOptSimPlot(self):
for n in range(len(self.dataDicts)):
i = self.getItemKey(n)
ad = dp.AnalysisData(self.dataStr[n][1])
ad.calcAnSol()
ad.calculateStats()
angles = ad.getAngles()
for sif in self.sifs:
ax = self.getAxes(i, sif)
res = ad.getResults()[sif]
ax.plot(angles, res, 'lime', lw=1,
label='optSim')
def createDataPointsPlot(self):
for n in range(len(self.dataStr)):
i = self.getItemKey(n)
for sif in self.sifs:
angles = self.dataDicts[n][0]
ax = self.getAxes(i, sif)
for dt in self.opts['data']:
dInd, color = self.getDataIndAndColor(dt)
data = self.dataDicts[n][dInd][sif]
alpha = self.calcAlphaValRP(n)
ax.plot(angles, data,
linestyle='-', marker='.',
color=color, alpha=alpha,
label=dt)
def calcAlphaValRP(self, n):
vals = len(self.dataDicts[n][0])
if vals > 1000:
return 0.05
else:
return 0.3
def createAnSolPlot(self):
for n in range(len(self.items.keys())):
i = self.getItemKey(n)
for sif in self.sifs:
ax = self.getAxes(i, sif)
angles = self.dataDicts[n][0]
anSol = self.dataDicts[n][2][sif]
ax.plot(angles, anSol, 'k', lw=2,
label='analytical')
def getAxes(self, item, sif):
itemInd = sorted(self.items.keys()).index(item)
itemLen = len(self.items)
ax = self.axes[itemLen * self.sifs.index(sif) + itemInd]
return ax
def getItemKey(self, n):
return sorted(self.items.keys())[n]
def plotRangeArea(self):
for n in range(len(self.items)):
i = self.getItemKey(n)
for sif in self.sifs:
axes = self.getAxes(i, sif)
self.plotRangeAreaPerAxes(axes, n, sif)
def getDataIndAndColor(self, dataType):
dataInds = {'results': 1, 'errors': 3}
colors = {'results': 'b', 'errors': 'r'}
return dataInds[dataType], colors[dataType]
def createVerts(self, slices, angles, values, func):
x, y, verts = [], [], []
valsl = [values[s[0] - 1 if s[0] > 0 else 0:s[1]] for s in slices]
angsl = [angles[s[0] - 1 if s[0] > 0 else 0:s[1]] for s in slices]
for a in angsl:
x.append(a[0])
x.append(a[-1])
for v in valsl:
y.append(func(v))
y.append(func(v))
verts = [[xi, yi] for xi, yi in zip(x, y)]
return verts
def createVerts2(self, slices, angles, values, func):
x, y, verts = [], [], []
valsl = [values[s[0]:s[1]] for s in slices]
angsl = [angles[s[0]:s[1]] for s in slices]
for an, va in zip(angsl, valsl):
y.append(func(va))
print va, y
print np.where(va == y[-1])
ind = np.where(va == y[-1])[0][0]
x.append(an[ind])
x.append(angles[-1])
x.insert(0, angles[0])
yavg = 0.5 * (y[0] + y[-1])
y.append(yavg)
y.insert(0, yavg)
verts = [[xi, yi] for xi, yi in zip(x, y)]
return verts
def plotRangeAreaPerAxes(self, axes, itemInd, sif):
vertMethods = {1: self.createVerts, 2: self.createVerts2}
vertFunc = vertMethods[self.opts['rangeType']]
slices = self.slices[itemInd]
angles = self.dataDicts[itemInd][0]
for dt in self.opts['data']:
dInd, color = self.getDataIndAndColor(dt)
values = self.dataDicts[itemInd][dInd][sif]
verts1 = vertFunc(slices, angles, values, min)
verts2 = vertFunc(slices, angles, values, max)[::-1]
verts = verts1 + verts2 + [verts2[-1]]
codes = self.createClosedPathCodes(verts)
p = Path(verts, codes)
patch = mpl.patches.PathPatch(
p,
facecolor=color,
edgecolor='none',
alpha=0.2,
label=dt +
' range')
axes.add_patch(patch)
patch = mpl.patches.PathPatch(p, edgecolor=color,
fill=False, lw=0.75, alpha=0.6)
axes.add_patch(patch)
def createClosedPathCodes(self, verts):
codes = [Path.MOVETO]
for i in range(len(verts) - 2):
codes.append(Path.LINETO)
codes.append(Path.CLOSEPOLY)
return codes
class BoundsCompPlot(SIMCompAnalysis):
def createBoundsPlot(self, items, targets, fig, tol=0.1, iterLim=100):
self.items = items
self.targets = targets
self.fig = fig
self.iterLim = iterLim
self.tol = tol
self.createDataStr()
self.createDataDicts()
self.printStats()
self.createFigure()
def createDataStr(self):
self.dataStr = []
qdict = self.queue.getQueueDict()
for i in self.items:
dd = [{i: qdict[i]}, None, 'angles',
self.getSubplotTitle(qdict[i])]
self.dataStr.append(dd)
def createDataDicts(self):
self.dataDicts = []
for n in range(len(self.items)):
i = self.items[n]
log = {s: {t: {'sigma': [], 'pip': []}
for t in self.targets.keys()}
for s in self.sifs}
node = self.dataStr[n][0][i]
adn = dp.AnalysisNodeData(node, self.sifs)
adn.performOperations()
sigmaUp = 2 * adn.getAnSolParams()['sigma']
sigmaLow = 0
for s in self.sifs:
for t in self.targets.keys():
log[s][t] = self.findSigmaBound(
adn, sigmaUp, sigmaLow, s,
self.targets[t], log[s][t])
self.dataDicts.append([adn, log])
def printStats(self):
for n in range(len(self.dataStr)):
i = self.items[n]
print self.dataStr[n][3]
log = self.dataDicts[n][1]
for s in self.sifs:
sigmas, bounds, its = [], [], []
for t in log[s].keys():
u = log[s][t]
sigmas.append(u['sigma'][-1])
bounds.append(u['pip'][-1])
its.append(len(u['sigma']))
info = '{0}sigma=[{1:.4}, {2:.4}] | bounds=[{3:.4}%, {4:.4}%] | iterations=[{5}, {6}]'.format(
' {0} '.format(s), sigmas[0], sigmas[1], bounds[0], bounds[1], its[0], its[1])
print info
def createFigure(self):
self.axes = []
self.createFigureAxes()
self.createPlot()
self.setXLimits()
self.setYlimits()
self.setSubplotTitles()
def setXLimits(self):
for n in range(len(self.dataStr)):
i = self.items[n]
adn = self.dataDicts[n][0]
a = adn.getAngles()
lims = (min(a), max(a))
for s in self.sifs:
ax = self.getAxes(i, s)
ax.set_xlim(lims)
def getAxes(self, item, sif):
itemLen = len(self.items)
itemInd = self.items.index(item)
ax = self.axes[itemLen * self.sifs.index(sif) + itemInd]
return ax
def getAlphaVal(self, item):
n = self.items.index(item)
adn = self.dataDicts[n][0]
if len(adn.getAngles()) > 1000:
return 0.1
else:
return 1
def createPlot(self):
for n in range(len(self.dataStr)):
i = self.items[n]
adn = self.dataDicts[n][0]
logs = self.dataDicts[n][1]
alpha = self.getAlphaVal(i)
for s in self.sifs:
ax = self.getAxes(i, s)
sigmaUpper = logs[s]['upper']['sigma'][-1]
sigmaLower = logs[s]['lower']['sigma'][-1]
ins, outs = self.getInOutPoints(adn,
sigmaLower, sigmaUpper, s)
ax.plot(ins[0], ins[1], 'b.',
label='inside bounds', alpha=alpha)
ax.plot(outs[0], outs[1], 'r.',
label='outside bounds', alpha=alpha)
angles = adn.getAngles()
anSol = adn.getAnSol()[s]
ax.plot(angles, anSol, 'k', lw=1.5,
label='analytical')
lowerBound = adn.calcSIFsForSigmaAndSIF(
sigmaLower, s)
upperBound = adn.calcSIFsForSigmaAndSIF(
sigmaUpper, s)
ax.plot(angles, upperBound, 'lime', lw=1.5,
label='bounds')
ax.plot(angles, lowerBound, 'lime', lw=1.5)
def findSigmaBound(self, adn, sigmaUp, sigmaLow,
sif, target, log):
sigma = 0.5 * (sigmaUp + sigmaLow)
pip = self.getPercentPointsInPoly(adn, sigma, sif)
log['pip'].append(pip)
log['sigma'].append(sigma)
if ((pip >= target - self.tol and pip <= target + self.tol) or
(len(log['sigma']) == self.iterLim)):
return log
elif pip < target - self.tol:
sigmaLow = sigma
return self.findSigmaBound(adn, sigmaUp, sigmaLow,
sif, target, log)
elif pip > target + self.tol:
sigmaUp = sigma
return self.findSigmaBound(adn, sigmaUp, sigmaLow,
sif, target, log)
else:
raise ValueError('unexpected condition reached')
def getPercentPointsInPoly(self, adn, sigma, sif):
allnum, numin, numout = self.countPointInOutOfContour(
adn, sigma, sif)
assert abs(numin + numout - allnum) < 10e-8
return float(numin) / float(allnum) * 100
def countPointInOutOfContour(self, adn, sigma, sif):
tfl = self.getInOutOfContour(adn, sigma, sif)
numin = np.sum(tfl)
allnum = len(tfl)
numout = allnum - numin
return allnum, numin, numout
def getInOutOfContour(self, adn, sigma, sif):
angles = adn.getAngles()
results = abs(adn.getResults()[sif])
points = [[xi, yi] for xi, yi in zip(angles, results)]
yVals = abs(np.array(adn.calcSIFsForSigmaAndSIF(sigma, sif)))
return self.getInOutPointsArray(angles, yVals, points)
def getInOutPointsArray(self, angles, yVals, points):
path = Path(self.createVertsForPolyPath(angles, yVals))
return path.contains_points(points, radius=0)
def getInOutPoints(self, adn, sigmaLow, sigmaUp, sif):
inoutLow = self.getInOutOfContour(adn, sigmaLow, sif)
inoutUp = self.getInOutOfContour(adn, sigmaUp, sif)
angles = adn.getAngles()
res = adn.getResults()[sif]
inAngles, inVals = [], []
outAngles, outVals = [], []
for i in range(len(inoutUp)):
if inoutLow[i] or not inoutUp[i]:
outAngles.append(angles[i])
outVals.append(res[i])
else:
inAngles.append(angles[i])
inVals.append(res[i])
return [[inAngles, inVals], [outAngles, outVals]]
def createVertsForPolyPath(self, x, y):
verts = [[xi, yi] for xi, yi in zip(x, y)]
verts.insert(0, [verts[0][0], -10e16])
verts.append([verts[-1][0], -10e16])
return verts
|
mit
| 8,652,777,072,238,680,000 | 33.951111 | 110 | 0.525348 | false |
DTOcean/dtocean-core
|
dtocean_core/tools/external.py
|
1
|
3947
|
# -*- coding: utf-8 -*-
# Copyright (C) 2016-2018 Mathew Topper
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import subprocess
from . import Tool
from ..utils.process import script
class WECSimulatorTool(Tool):
"""Start dtocean-wec"""
@classmethod
def get_name(cls):
return "WEC Simulator"
@classmethod
def declare_inputs(cls):
'''A class method to declare all the variables required as inputs by
this interface.
Returns:
list: List of inputs identifiers
Example:
The returned value can be None or a list of identifier strings which
appear in the data descriptions. For example::
inputs = ["My:first:variable",
"My:second:variable",
]
'''
return None
@classmethod
def declare_outputs(cls):
'''A class method to declare all the output variables provided by
this interface.
Returns:
list: List of output identifiers
Example:
The returned value can be None or a list of identifier strings which
appear in the data descriptions. For example::
outputs = ["My:first:variable",
"My:third:variable",
]
'''
return None
@classmethod
def declare_optional(cls):
'''A class method to declare all the variables which should be flagged
as optional.
Returns:
list: List of optional variable identifiers
Note:
Currently only inputs marked as optional have any logical effect.
However, this may change in future releases hence the general
approach.
Example:
The returned value can be None or a list of identifier strings which
appear in the declare_inputs output. For example::
optional = ["My:first:variable",
]
'''
return None
@classmethod
def declare_id_map(cls):
'''Declare the mapping for variable identifiers in the data description
to local names for use in the interface. This helps isolate changes in
the data description or interface from effecting the other.
Returns:
dict: Mapping of local to data description variable identifiers
Example:
The returned value must be a dictionary containing all the inputs and
outputs from the data description and a local alias string. For
example::
id_map = {"var1": "My:first:variable",
"var2": "My:second:variable",
"var3": "My:third:variable"
}
'''
id_map = {}
return id_map
def configure(self, kwargs=None):
"""Does nothing in this case"""
return
def connect(self, **kwargs):
script_path = script("dtocean-wec.exe")
if script_path is None: return
si = subprocess.STARTUPINFO()
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
subprocess.call(script_path, startupinfo=si)
return
|
gpl-3.0
| -1,000,664,811,130,914,200 | 27.395683 | 79 | 0.584241 | false |
bowen0701/algorithms_data_structures
|
lc0314_binary_tree_vertical_order_traversal.py
|
1
|
3585
|
"""Leetcode 314. Binary Tree Vertical Order Traversal
Medium
URL: https://leetcode.com/problems/binary-tree-vertical-order-traversal/
Given a binary tree, return the vertical order traversal of its nodes' values.
(ie, from top to bottom, column by column).
If two nodes are in the same row and column, the order should be from left to
right.
Examples 1:
Input: [3,9,20,null,null,15,7]
3
/\
/ \
9 20
/\
/ \
15 7
Output:
[
[9],
[3,15],
[20],
[7]
]
Examples 2:
Input: [3,9,8,4,0,1,7]
3
/\
/ \
9 8
/\ /\
/ \/ \
4 01 7
Output:
[
[4],
[9],
[3,0,1],
[8],
[7]
]
Examples 3:
Input: [3,9,8,4,0,1,7,null,null,null,2,5] (0's right child is 2 and 1's left
child is 5)
3
/\
/ \
9 8
/\ /\
/ \/ \
4 01 7
/\
/ \
5 2
Output:
[
[4],
[9,5],
[3,0,1],
[8,2],
[7]
]
"""
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, val):
self.val = val
self.left = None
self.right = None
class SolutionOrderValsDictQueue(object):
def verticalOrder(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
Time complexity: O(n).
Space complexity: O(n).
"""
from collections import defaultdict
from collections import deque
if not root:
return []
# Create dict: vertical order->list(node values).
vorder_vals_d = defaultdict(list)
# Use queue to add root and left/right with their orders to dict.
queue = deque([(root, 0)])
while queue:
current, vorder = queue.pop()
vorder_vals_d[vorder].append(current.val)
if current.left:
queue.appendleft((current.left, vorder - 1))
if current.right:
queue.appendleft((current.right, vorder + 1))
# Return sorted list(node values) based on vertical order.
vorder_vals = [vals for vorder, vals in sorted(vorder_vals_d.items())]
return vorder_vals
def main():
# 3
# /\
# / \
# 9 20
# /\
# / \
# 15 7
# Output:
# [
# [9],
# [3,15],
# [20],
# [7]
# ]
root = TreeNode(3)
root.left = TreeNode(9)
root.right = TreeNode(20)
root.right.left = TreeNode(15)
root.right.right = TreeNode(17)
print SolutionOrderValsDictQueue().verticalOrder(root)
# 3
# /\
# / \
# 9 8
# /\ /\
# / \/ \
# 4 01 7
# Output:
# [
# [4],
# [9],
# [3,0,1],
# [8],
# [7]
# ]
root = TreeNode(3)
root.left = TreeNode(9)
root.right = TreeNode(8)
root.left.left = TreeNode(4)
root.left.right = TreeNode(0)
root.right.left = TreeNode(1)
root.right.right = TreeNode(7)
print SolutionOrderValsDictQueue().verticalOrder(root)
# 3
# /\
# / \
# 9 8
# /\ /\
# / \/ \
# 4 01 7
# /\
# / \
# 5 2
# Output:
# [
# [4],
# [9,5],
# [3,0,1],
# [8,2],
# [7]
# ]
root = TreeNode(3)
root.left = TreeNode(9)
root.right = TreeNode(8)
root.left.left = TreeNode(4)
root.left.right = TreeNode(0)
root.right.left = TreeNode(1)
root.right.right = TreeNode(7)
root.left.right.right = TreeNode(2)
root.right.left.left = TreeNode(5)
print SolutionOrderValsDictQueue().verticalOrder(root)
if __name__ == '__main__':
main()
|
bsd-2-clause
| 4,864,136,224,572,877,000 | 17.769634 | 78 | 0.499861 | false |
DirectXMan12/nova-hacking
|
nova/tests/db/test_migration_utils.py
|
1
|
21712
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2013 Boris Pavlovic (boris@pavlovic.me).
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import warnings
from migrate.changeset import UniqueConstraint
from sqlalchemy.dialects import mysql
from sqlalchemy import Boolean, Index, Integer, DateTime, String
from sqlalchemy import MetaData, Table, Column
from sqlalchemy.engine import reflection
from sqlalchemy.exc import NoSuchTableError
from sqlalchemy.exc import SAWarning
from sqlalchemy.sql import select
from sqlalchemy.types import UserDefinedType, NullType
from nova.db.sqlalchemy import api as db
from nova.db.sqlalchemy import utils
from nova import exception
from nova.tests.db import test_migrations
class CustomType(UserDefinedType):
"""Dummy column type for testing unsupported types."""
def get_col_spec(self):
return "CustomType"
class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
"""Class for testing utils that are used in db migrations."""
def test_utils_drop_unique_constraint(self):
table_name = "__test_tmp_table__"
uc_name = 'uniq_foo'
values = [
{'id': 1, 'a': 3, 'foo': 10},
{'id': 2, 'a': 2, 'foo': 20},
{'id': 3, 'a': 1, 'foo': 30}
]
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
test_table = Table(table_name, meta,
Column('id', Integer, primary_key=True,
nullable=False),
Column('a', Integer),
Column('foo', Integer),
UniqueConstraint('a', name='uniq_a'),
UniqueConstraint('foo', name=uc_name))
test_table.create()
engine.execute(test_table.insert(), values)
# NOTE(boris-42): This method is generic UC dropper.
utils.drop_unique_constraint(engine, table_name, uc_name, 'foo')
s = test_table.select().order_by(test_table.c.id)
rows = engine.execute(s).fetchall()
for i in xrange(0, len(values)):
v = values[i]
self.assertEqual((v['id'], v['a'], v['foo']), rows[i])
# NOTE(boris-42): Update data about Table from DB.
meta = MetaData()
meta.bind = engine
test_table = Table(table_name, meta, autoload=True)
constraints = filter(lambda c: c.name == uc_name,
test_table.constraints)
self.assertEqual(len(constraints), 0)
self.assertEqual(len(test_table.constraints), 1)
test_table.drop()
def test_util_drop_unique_constraint_with_not_supported_sqlite_type(self):
table_name = "__test_tmp_table__"
uc_name = 'uniq_foo'
values = [
{'id': 1, 'a': 3, 'foo': 10},
{'id': 2, 'a': 2, 'foo': 20},
{'id': 3, 'a': 1, 'foo': 30}
]
engine = self.engines['sqlite']
meta = MetaData(bind=engine)
test_table = Table(table_name, meta,
Column('id', Integer, primary_key=True,
nullable=False),
Column('a', Integer),
Column('foo', CustomType, default=0),
UniqueConstraint('a', name='uniq_a'),
UniqueConstraint('foo', name=uc_name))
test_table.create()
engine.execute(test_table.insert(), values)
warnings.simplefilter("ignore", SAWarning)
# NOTE(boris-42): Missing info about column `foo` that has
# unsupported type CustomType.
self.assertRaises(exception.NovaException,
utils.drop_unique_constraint,
engine, table_name, uc_name, 'foo')
# NOTE(boris-42): Wrong type of foo instance. it should be
# instance of sqlalchemy.Column.
self.assertRaises(exception.NovaException,
utils.drop_unique_constraint,
engine, table_name, uc_name, 'foo', foo=Integer())
foo = Column('foo', CustomType, default=0)
utils.drop_unique_constraint(engine, table_name, uc_name, 'foo',
foo=foo)
s = test_table.select().order_by(test_table.c.id)
rows = engine.execute(s).fetchall()
for i in xrange(0, len(values)):
v = values[i]
self.assertEqual((v['id'], v['a'], v['foo']), rows[i])
# NOTE(boris-42): Update data about Table from DB.
meta = MetaData(bind=engine)
test_table = Table(table_name, meta, autoload=True)
constraints = filter(lambda c: c.name == uc_name,
test_table.constraints)
self.assertEqual(len(constraints), 0)
self.assertEqual(len(test_table.constraints), 1)
test_table.drop()
def _populate_db_for_drop_duplicate_entries(self, engine, meta,
table_name):
values = [
{'id': 11, 'a': 3, 'b': 10, 'c': 'abcdef'},
{'id': 12, 'a': 5, 'b': 10, 'c': 'abcdef'},
{'id': 13, 'a': 6, 'b': 10, 'c': 'abcdef'},
{'id': 14, 'a': 7, 'b': 10, 'c': 'abcdef'},
{'id': 21, 'a': 1, 'b': 20, 'c': 'aa'},
{'id': 31, 'a': 1, 'b': 20, 'c': 'bb'},
{'id': 41, 'a': 1, 'b': 30, 'c': 'aef'},
{'id': 42, 'a': 2, 'b': 30, 'c': 'aef'},
{'id': 43, 'a': 3, 'b': 30, 'c': 'aef'}
]
test_table = Table(table_name, meta,
Column('id', Integer, primary_key=True,
nullable=False),
Column('a', Integer),
Column('b', Integer),
Column('c', String(255)),
Column('deleted', Integer, default=0),
Column('deleted_at', DateTime),
Column('updated_at', DateTime))
test_table.create()
engine.execute(test_table.insert(), values)
return test_table, values
def test_drop_old_duplicate_entries_from_table(self):
table_name = "__test_tmp_table__"
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
test_table, values = self.\
_populate_db_for_drop_duplicate_entries(engine, meta,
table_name)
utils.drop_old_duplicate_entries_from_table(engine, table_name,
False, 'b', 'c')
uniq_values = set()
expected_ids = []
for value in sorted(values, key=lambda x: x['id'], reverse=True):
uniq_value = (('b', value['b']), ('c', value['c']))
if uniq_value in uniq_values:
continue
uniq_values.add(uniq_value)
expected_ids.append(value['id'])
real_ids = [row[0] for row in
engine.execute(select([test_table.c.id])).fetchall()]
self.assertEqual(len(real_ids), len(expected_ids))
for id_ in expected_ids:
self.assertTrue(id_ in real_ids)
def test_drop_old_duplicate_entries_from_table_soft_delete(self):
table_name = "__test_tmp_table__"
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table, values = self.\
_populate_db_for_drop_duplicate_entries(engine, meta,
table_name)
utils.drop_old_duplicate_entries_from_table(engine, table_name,
True, 'b', 'c')
uniq_values = set()
expected_values = []
soft_deleted_values = []
for value in sorted(values, key=lambda x: x['id'], reverse=True):
uniq_value = (('b', value['b']), ('c', value['c']))
if uniq_value in uniq_values:
soft_deleted_values.append(value)
continue
uniq_values.add(uniq_value)
expected_values.append(value)
base_select = table.select()
rows_select = base_select.\
where(table.c.deleted != table.c.id)
row_ids = [row['id'] for row in
engine.execute(rows_select).fetchall()]
self.assertEqual(len(row_ids), len(expected_values))
for value in expected_values:
self.assertTrue(value['id'] in row_ids)
deleted_rows_select = base_select.\
where(table.c.deleted == table.c.id)
deleted_rows_ids = [row['id'] for row in
engine.execute(deleted_rows_select).fetchall()]
self.assertEqual(len(deleted_rows_ids),
len(values) - len(row_ids))
for value in soft_deleted_values:
self.assertTrue(value['id'] in deleted_rows_ids)
def test_check_shadow_table(self):
table_name = 'abc'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer),
Column('c', String(256)))
table.create()
#check missing shadow table
self.assertRaises(NoSuchTableError,
utils.check_shadow_table, engine, table_name)
shadow_table = Table(db._SHADOW_TABLE_PREFIX + table_name, meta,
Column('id', Integer),
Column('a', Integer))
shadow_table.create()
# check missing column
self.assertRaises(exception.NovaException,
utils.check_shadow_table, engine, table_name)
# check when all is ok
c = Column('c', String(256))
shadow_table.create_column(c)
self.assertTrue(utils.check_shadow_table(engine, table_name))
# check extra column
d = Column('d', Integer)
shadow_table.create_column(d)
self.assertRaises(exception.NovaException,
utils.check_shadow_table, engine, table_name)
def test_check_shadow_table_different_types(self):
table_name = 'abc'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer))
table.create()
shadow_table = Table(db._SHADOW_TABLE_PREFIX + table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', String(256)))
shadow_table.create()
self.assertRaises(exception.NovaException,
utils.check_shadow_table, engine, table_name)
def test_check_shadow_table_with_unsupported_type(self):
table_name = 'abc'
engine = self.engines['sqlite']
meta = MetaData(bind=engine)
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer),
Column('c', CustomType))
table.create()
shadow_table = Table(db._SHADOW_TABLE_PREFIX + table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer),
Column('c', CustomType))
shadow_table.create()
self.assertTrue(utils.check_shadow_table(engine, table_name))
def test_create_shadow_table_by_table_instance(self):
table_name = 'abc'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer),
Column('b', String(256)))
table.create()
utils.create_shadow_table(engine, table=table)
self.assertTrue(utils.check_shadow_table(engine, table_name))
def test_create_shadow_table_by_name(self):
table_name = 'abc'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer),
Column('b', String(256)))
table.create()
utils.create_shadow_table(engine, table_name=table_name)
self.assertTrue(utils.check_shadow_table(engine, table_name))
def test_create_shadow_table_not_supported_type(self):
table_name = 'abc'
engine = self.engines['sqlite']
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', CustomType))
table.create()
self.assertRaises(exception.NovaException,
utils.create_shadow_table,
engine, table_name=table_name)
utils.create_shadow_table(engine, table_name=table_name,
a=Column('a', CustomType()))
self.assertTrue(utils.check_shadow_table(engine, table_name))
def test_create_shadow_both_table_and_table_name_are_none(self):
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
self.assertRaises(exception.NovaException,
utils.create_shadow_table, engine)
def test_create_shadow_both_table_and_table_name_are_specified(self):
table_name = 'abc'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer))
table.create()
self.assertRaises(exception.NovaException,
utils.create_shadow_table,
engine, table=table, table_name=table_name)
def test_create_duplicate_shadow_table(self):
table_name = 'abc'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', Integer))
table.create()
utils.create_shadow_table(engine, table_name=table_name)
self.assertRaises(exception.ShadowTableExists,
utils.create_shadow_table,
engine, table_name=table_name)
def test_change_deleted_column_type_doesnt_drop_index(self):
table_name = 'abc'
for key, engine in self.engines.items():
meta = MetaData(bind=engine)
indexes = {
'idx_a_deleted': ['a', 'deleted'],
'idx_b_deleted': ['b', 'deleted'],
'idx_a': ['a']
}
index_instances = [Index(name, *columns)
for name, columns in indexes.iteritems()]
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('a', String(255)),
Column('b', String(255)),
Column('deleted', Boolean),
*index_instances)
table.create()
utils.change_deleted_column_type_to_id_type(engine, table_name)
utils.change_deleted_column_type_to_boolean(engine, table_name)
insp = reflection.Inspector.from_engine(engine)
real_indexes = insp.get_indexes(table_name)
self.assertEqual(len(real_indexes), 3)
for index in real_indexes:
name = index['name']
self.assertIn(name, indexes)
self.assertEqual(set(index['column_names']),
set(indexes[name]))
def test_change_deleted_column_type_to_id_type_integer(self):
table_name = 'abc'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('deleted', Boolean))
table.create()
utils.change_deleted_column_type_to_id_type(engine, table_name)
table = utils.get_table(engine, table_name)
self.assertTrue(isinstance(table.c.deleted.type, Integer))
def test_change_deleted_column_type_to_id_type_string(self):
table_name = 'abc'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', String(255), primary_key=True),
Column('deleted', Boolean))
table.create()
utils.change_deleted_column_type_to_id_type(engine, table_name)
table = utils.get_table(engine, table_name)
self.assertTrue(isinstance(table.c.deleted.type, String))
def test_change_deleted_column_type_to_id_type_custom(self):
table_name = 'abc'
engine = self.engines['sqlite']
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('foo', CustomType),
Column('deleted', Boolean))
table.create()
self.assertRaises(exception.NovaException,
utils.change_deleted_column_type_to_id_type,
engine, table_name)
fooColumn = Column('foo', CustomType())
utils.change_deleted_column_type_to_id_type(engine, table_name,
foo=fooColumn)
table = utils.get_table(engine, table_name)
# NOTE(boris-42): There is no way to check has foo type CustomType.
# but sqlalchemy will set it to NullType.
self.assertTrue(isinstance(table.c.foo.type, NullType))
self.assertTrue(isinstance(table.c.deleted.type, Integer))
def test_change_deleted_column_type_to_boolean(self):
table_name = 'abc'
for key, engine in self.engines.items():
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('deleted', Integer))
table.create()
utils.change_deleted_column_type_to_boolean(engine, table_name)
table = utils.get_table(engine, table_name)
expected_type = Boolean if key != "mysql" else mysql.TINYINT
self.assertTrue(isinstance(table.c.deleted.type, expected_type))
def test_change_deleted_column_type_to_boolean_type_custom(self):
table_name = 'abc'
engine = self.engines['sqlite']
meta = MetaData()
meta.bind = engine
table = Table(table_name, meta,
Column('id', Integer, primary_key=True),
Column('foo', CustomType),
Column('deleted', Integer))
table.create()
self.assertRaises(exception.NovaException,
utils.change_deleted_column_type_to_boolean,
engine, table_name)
fooColumn = Column('foo', CustomType())
utils.change_deleted_column_type_to_boolean(engine, table_name,
foo=fooColumn)
table = utils.get_table(engine, table_name)
# NOTE(boris-42): There is no way to check has foo type CustomType.
# but sqlalchemy will set it to NullType.
self.assertTrue(isinstance(table.c.foo.type, NullType))
self.assertTrue(isinstance(table.c.deleted.type, Boolean))
|
apache-2.0
| -9,101,313,713,474,050,000 | 41.077519 | 79 | 0.517272 | false |
all-of-us/raw-data-repository
|
rdr_service/alembic/versions/cd009f1475ff_deceased_report_cause_of_death.py
|
1
|
1935
|
"""deceased report cause of death
Revision ID: cd009f1475ff
Revises: a4ab3afcb460
Create Date: 2020-09-08 11:48:27.208496
"""
from alembic import op
import sqlalchemy as sa
import rdr_service.model.utils
from rdr_service.participant_enums import PhysicalMeasurementsStatus, QuestionnaireStatus, OrderStatus
from rdr_service.participant_enums import WithdrawalStatus, WithdrawalReason, SuspensionStatus, QuestionnaireDefinitionStatus
from rdr_service.participant_enums import EnrollmentStatus, Race, SampleStatus, OrganizationType, BiobankOrderStatus
from rdr_service.participant_enums import OrderShipmentTrackingStatus, OrderShipmentStatus
from rdr_service.participant_enums import MetricSetType, MetricsKey, GenderIdentity
from rdr_service.model.base import add_table_history_table, drop_table_history_table
from rdr_service.model.code import CodeType
from rdr_service.model.site_enums import SiteStatus, EnrollingStatus, DigitalSchedulingStatus, ObsoleteStatus
# revision identifiers, used by Alembic.
revision = 'cd009f1475ff'
down_revision = 'a4ab3afcb460'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('deceased_report', sa.Column('cause_of_death', sa.String(length=1024), nullable=True))
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('deceased_report', 'cause_of_death')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
|
bsd-3-clause
| 7,756,733,859,795,774,000 | 31.25 | 125 | 0.749871 | false |
tboyce021/home-assistant
|
homeassistant/components/apple_tv/__init__.py
|
2
|
12223
|
"""The Apple TV integration."""
import asyncio
import logging
from random import randrange
from pyatv import connect, exceptions, scan
from pyatv.const import Protocol
from homeassistant.components.media_player import DOMAIN as MP_DOMAIN
from homeassistant.components.remote import DOMAIN as REMOTE_DOMAIN
from homeassistant.const import (
CONF_ADDRESS,
CONF_NAME,
CONF_PROTOCOL,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import callback
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from .const import CONF_CREDENTIALS, CONF_IDENTIFIER, CONF_START_OFF, DOMAIN
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Apple TV"
BACKOFF_TIME_UPPER_LIMIT = 300 # Five minutes
NOTIFICATION_TITLE = "Apple TV Notification"
NOTIFICATION_ID = "apple_tv_notification"
SOURCE_REAUTH = "reauth"
SIGNAL_CONNECTED = "apple_tv_connected"
SIGNAL_DISCONNECTED = "apple_tv_disconnected"
PLATFORMS = [MP_DOMAIN, REMOTE_DOMAIN]
async def async_setup(hass, config):
"""Set up the Apple TV integration."""
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry for Apple TV."""
manager = AppleTVManager(hass, entry)
hass.data.setdefault(DOMAIN, {})[entry.unique_id] = manager
async def on_hass_stop(event):
"""Stop push updates when hass stops."""
await manager.disconnect()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop)
async def setup_platforms():
"""Set up platforms and initiate connection."""
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_setup(entry, component)
for component in PLATFORMS
]
)
await manager.init()
hass.async_create_task(setup_platforms())
return True
async def async_unload_entry(hass, entry):
"""Unload an Apple TV config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, platform)
for platform in PLATFORMS
]
)
)
if unload_ok:
manager = hass.data[DOMAIN].pop(entry.unique_id)
await manager.disconnect()
return unload_ok
class AppleTVEntity(Entity):
"""Device that sends commands to an Apple TV."""
def __init__(self, name, identifier, manager):
"""Initialize device."""
self.atv = None
self.manager = manager
self._name = name
self._identifier = identifier
async def async_added_to_hass(self):
"""Handle when an entity is about to be added to Home Assistant."""
@callback
def _async_connected(atv):
"""Handle that a connection was made to a device."""
self.atv = atv
self.async_device_connected(atv)
self.async_write_ha_state()
@callback
def _async_disconnected():
"""Handle that a connection to a device was lost."""
self.async_device_disconnected()
self.atv = None
self.async_write_ha_state()
self.async_on_remove(
async_dispatcher_connect(
self.hass, f"{SIGNAL_CONNECTED}_{self._identifier}", _async_connected
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SIGNAL_DISCONNECTED}_{self._identifier}",
_async_disconnected,
)
)
def async_device_connected(self, atv):
"""Handle when connection is made to device."""
def async_device_disconnected(self):
"""Handle when connection was lost to device."""
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def unique_id(self):
"""Return a unique ID."""
return self._identifier
@property
def should_poll(self):
"""No polling needed for Apple TV."""
return False
class AppleTVManager:
"""Connection and power manager for an Apple TV.
An instance is used per device to share the same power state between
several platforms. It also manages scanning and connection establishment
in case of problems.
"""
def __init__(self, hass, config_entry):
"""Initialize power manager."""
self.config_entry = config_entry
self.hass = hass
self.atv = None
self._is_on = not config_entry.options.get(CONF_START_OFF, False)
self._connection_attempts = 0
self._connection_was_lost = False
self._task = None
async def init(self):
"""Initialize power management."""
if self._is_on:
await self.connect()
def connection_lost(self, _):
"""Device was unexpectedly disconnected.
This is a callback function from pyatv.interface.DeviceListener.
"""
_LOGGER.warning('Connection lost to Apple TV "%s"', self.atv.name)
if self.atv:
self.atv.close()
self.atv = None
self._connection_was_lost = True
self._dispatch_send(SIGNAL_DISCONNECTED)
self._start_connect_loop()
def connection_closed(self):
"""Device connection was (intentionally) closed.
This is a callback function from pyatv.interface.DeviceListener.
"""
if self.atv:
self.atv.close()
self.atv = None
self._dispatch_send(SIGNAL_DISCONNECTED)
self._start_connect_loop()
async def connect(self):
"""Connect to device."""
self._is_on = True
self._start_connect_loop()
async def disconnect(self):
"""Disconnect from device."""
_LOGGER.debug("Disconnecting from device")
self._is_on = False
try:
if self.atv:
self.atv.push_updater.listener = None
self.atv.push_updater.stop()
self.atv.close()
self.atv = None
if self._task:
self._task.cancel()
self._task = None
except Exception: # pylint: disable=broad-except
_LOGGER.exception("An error occurred while disconnecting")
def _start_connect_loop(self):
"""Start background connect loop to device."""
if not self._task and self.atv is None and self._is_on:
self._task = asyncio.create_task(self._connect_loop())
else:
_LOGGER.debug(
"Not starting connect loop (%s, %s)", self.atv is None, self._is_on
)
async def _connect_loop(self):
"""Connect loop background task function."""
_LOGGER.debug("Starting connect loop")
# Try to find device and connect as long as the user has said that
# we are allowed to connect and we are not already connected.
while self._is_on and self.atv is None:
try:
conf = await self._scan()
if conf:
await self._connect(conf)
except exceptions.AuthenticationError:
self._auth_problem()
break
except asyncio.CancelledError:
pass
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Failed to connect")
self.atv = None
if self.atv is None:
self._connection_attempts += 1
backoff = min(
randrange(2 ** self._connection_attempts), BACKOFF_TIME_UPPER_LIMIT
)
_LOGGER.debug("Reconnecting in %d seconds", backoff)
await asyncio.sleep(backoff)
_LOGGER.debug("Connect loop ended")
self._task = None
def _auth_problem(self):
"""Problem to authenticate occurred that needs intervention."""
_LOGGER.debug("Authentication error, reconfigure integration")
name = self.config_entry.data.get(CONF_NAME)
identifier = self.config_entry.unique_id
self.hass.components.persistent_notification.create(
"An irrecoverable connection problem occurred when connecting to "
f"`f{name}`. Please go to the Integrations page and reconfigure it",
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
# Add to event queue as this function is called from a task being
# cancelled from disconnect
asyncio.create_task(self.disconnect())
self.hass.async_create_task(
self.hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_REAUTH},
data={CONF_NAME: name, CONF_IDENTIFIER: identifier},
)
)
async def _scan(self):
"""Try to find device by scanning for it."""
identifier = self.config_entry.unique_id
address = self.config_entry.data[CONF_ADDRESS]
protocol = Protocol(self.config_entry.data[CONF_PROTOCOL])
_LOGGER.debug("Discovering device %s", identifier)
atvs = await scan(
self.hass.loop, identifier=identifier, protocol=protocol, hosts=[address]
)
if atvs:
return atvs[0]
_LOGGER.debug(
"Failed to find device %s with address %s, trying to scan",
identifier,
address,
)
atvs = await scan(self.hass.loop, identifier=identifier, protocol=protocol)
if atvs:
return atvs[0]
_LOGGER.debug("Failed to find device %s, trying later", identifier)
return None
async def _connect(self, conf):
"""Connect to device."""
credentials = self.config_entry.data[CONF_CREDENTIALS]
session = async_get_clientsession(self.hass)
for protocol, creds in credentials.items():
conf.set_credentials(Protocol(int(protocol)), creds)
_LOGGER.debug("Connecting to device %s", self.config_entry.data[CONF_NAME])
self.atv = await connect(conf, self.hass.loop, session=session)
self.atv.listener = self
self._dispatch_send(SIGNAL_CONNECTED, self.atv)
self._address_updated(str(conf.address))
await self._async_setup_device_registry()
self._connection_attempts = 0
if self._connection_was_lost:
_LOGGER.info(
'Connection was re-established to Apple TV "%s"', self.atv.service.name
)
self._connection_was_lost = False
async def _async_setup_device_registry(self):
attrs = {
"identifiers": {(DOMAIN, self.config_entry.unique_id)},
"manufacturer": "Apple",
"name": self.config_entry.data[CONF_NAME],
}
if self.atv:
dev_info = self.atv.device_info
attrs["model"] = "Apple TV " + dev_info.model.name.replace("Gen", "")
attrs["sw_version"] = dev_info.version
if dev_info.mac:
attrs["connections"] = {(dr.CONNECTION_NETWORK_MAC, dev_info.mac)}
device_registry = await dr.async_get_registry(self.hass)
device_registry.async_get_or_create(
config_entry_id=self.config_entry.entry_id, **attrs
)
@property
def is_connecting(self):
"""Return true if connection is in progress."""
return self._task is not None
def _address_updated(self, address):
"""Update cached address in config entry."""
_LOGGER.debug("Changing address to %s", address)
self.hass.config_entries.async_update_entry(
self.config_entry, data={**self.config_entry.data, CONF_ADDRESS: address}
)
def _dispatch_send(self, signal, *args):
"""Dispatch a signal to all entities managed by this manager."""
async_dispatcher_send(
self.hass, f"{signal}_{self.config_entry.unique_id}", *args
)
|
apache-2.0
| 6,061,294,520,486,808,000 | 31.335979 | 87 | 0.599607 | false |
Sjc1000/PyRC
|
UI/NickName.py
|
1
|
1047
|
#!/usr/bin/env python3
from gi.repository import Gtk, Pango
class NickName():
servers = {}
active_server = None
active_channel = None
def __init__(self, MainWindow):
self.MainWindow = MainWindow
self.position = [0, 9, 1, 1]
def prebuild(self):
self.MainWindow.ui_plugins['ServerList'].position[3] -= 1
return None
def build(self):
self.text = Gtk.Label()
self.text.set_text('')
self.MainWindow.grid.attach(self.text, *self.position)
return None
def add_server(self, server):
self.servers[server] = ''
return None
def change_nickname(self, server, new_nickname):
self.servers[server] = new_nickname
self.text.set_markup('<span weight="ultrabold">' + new_nickname + '</span>')
return None
def activate_path(self, server, channel, clicked=False):
self.active_channel = channel
self.active_server = server
self.change_nickname(server, self.servers[server])
return None
|
gpl-2.0
| 5,602,756,022,163,896,000 | 25.2 | 84 | 0.613181 | false |
mysociety/barnetplanning
|
applications/migrations/0002_auto__add_field_application_ward_mapit_id.py
|
1
|
1626
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Application.ward_mapit_id'
db.add_column('applications_application', 'ward_mapit_id', self.gf('django.db.models.fields.IntegerField')(null=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Application.ward_mapit_id'
db.delete_column('applications_application', 'ward_mapit_id')
models = {
'applications.application': {
'Meta': {'object_name': 'Application'},
'address': ('django.db.models.fields.TextField', [], {}),
'council_reference': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info_url': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'received': ('django.db.models.fields.DateField', [], {}),
'ward_mapit_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
}
}
complete_apps = ['applications']
|
agpl-3.0
| 2,371,838,082,828,122,600 | 42.945946 | 146 | 0.583026 | false |
DanielFSFE/test1
|
adventure.py
|
1
|
13152
|
import random
class Game(object):
"""
holds all information for a game. Later it may be
possible to run several game instances at once
"""
number = 0
def __init__(self):
self.number = Game.number
Game.number += 1
self.rooms = {} # dictionary of rooms, key is room number
self.items = {} # dictionary of items, key is item number
self.monsters = {} # dictionary of monsters, key is m. number
self.effects = {}
class Player(object):
number = 0
def __init__(self, game, where=0, name="hero"):
"""need game object, like the room class"""
self.number = Player.number
Player.number += 1
self.name = name
self.inventory = [] # list of itemnumbers (player carry items)
self.maxcarry = 100 # kg
self.carry = 0 # kg
self.where = where # start room number
class Item(object):
number = 0
def __init__(self, game, description="", mass=0):
self.number = Item.number
Item.number += 1
self.effect = None
self.description=description
if mass == 0.0:
mass = round(random.randint(1,50))
self.mass = mass
self.description=description
if self.description == "":
self.description = random.choice(("helmet","chestplate","pants",
"shoes","potion of instant healing","potion of strenght",
"potion of speed","potion of regeneration","gold","sword",
"bow","arrows","shield","teleport pill"))
if self.description == "teleport pill":
self.effect = "teleport"
self.description = description
game.items[self.number] = self # add item into game dict
def info(self):
txt = "Item Number {}: ".format(self.number)
txt += self.description + "\n"
return txt
class Monster(object):
number = 0
def __init__(self, game, adjective="", description="", boss=False):
self.number = Monster.number
Monster.number += 1
game.monsters[self.number] = self # add monster into game dict
self.adjective = adjective
self.description = description
self.hitpoints = random.randint(5,15)
if description == "":
if boss:
self.adjective = random.choice((" deadly"," dangerous",
" creepy"," ugly"," killer"))
self.description = random.choice(("Unicorn","Cat",
"Teddy Bear","Hamster","Rabbit"))
self.hitpoints *= 5
else:
self.description = random.choice(("goblin","ork","troll",
"mice","rat","dwarf","cave drake"))
def info(self):
txt = "Monster number {}: {} {} with {} hitpoints\n".format(
self.number, self.adjective, self.description, self.hitpoints)
return txt
class Effect(object):
def __init__(self, g, effectname, description="", roomnumber=-1,
affectplayer = True, summonalies = 0, summonenemy = 0,
teleport = -1, summonitem = -1, destroyitem = 0,
highweight = 0, lowweight = 0, healplayer = 0,
damageplayer = 0, killenemy = 0):
self.effectname = effectname
self.roomnumber = roomnumber
self.description = description
self.affectplayer = affectplayer
self.summonalies = summonalies
self.sommonenemy = summonenemy
self.teleport = teleport
self.summonitem = summonitem
self.destroyitem = destroyitem
self.highweight = highweight
self.lowweight = lowweight
self.healplayer = healplayer
self.damageplayer = damageplayer
self.killenemy = killenemy
g.effects[self.effectname] = self
def action(self, g, p):
"""g = Game p = Player"""
print("The effect does his job")
if self.teleport != -1:
while True:
target = random.choice(g.rooms)
if target == 4:
continue
else:
break
p.where=target
class Room(object):
number = 0
def __init__(self, game, description="", connections=[],
itemchances=[0.5,0.25,0.1],
monsterchances=[0.3,0.2,0.1,0.05],
bosschances=[0.0], explored=False):
"""need game instance"""
self.number = Room.number
game.rooms[self.number] = self # add room into game dict
Room.number += 1
self.explored = explored # True or False
self.description = description
self.connections = connections
self.itemchances = itemchances
self.monsterchances = monsterchances
self.bosschances = bosschances
self.effect = random.randint(1,100)
# create items
self.itemnumbers = [] # list of indexes of items in this room
#self.game = game
for chance in self.itemchances:
if random.random()< chance:
newItem = Item(game)
self.itemnumbers.append(newItem.number) # add reference
self.monsternumbers = [] # list of indexes of monsters in this room
for chance in self.monsterchances:
if random.random() < chance:
newMonster = Monster(game)
self.monsternumbers.append(newMonster.number) # add reference
for chance in self.bosschances:
if random.random() < chance:
newMonster = Monster(game,boss=True)
self.monsternumbers.append(newMonster.number) # add reference
def info(self, game):
"""return string with all information about this room"""
txt = "Room number {}: ".format(self.number)
txt += self.description + "\n"
# itmes ?
if len(self.itemnumbers) > 0:
txt += "You see {} itmes here: \n".format(len(self.itemnumbers))
for i in self.itemnumbers:
txt += game.items[i].info()
else:
txt += "This room has no items\n"
# monsters ?
if len(self.monsternumbers) > 0:
txt +="You see {} monster(s) here:\n".format(len(self.monsternumbers))
for m in self.monsternumbers:
txt += game.monsters[m].info()
else:
txt += "No monsters in this room, fortunately.\n"
# doors
txt += "You see {} door(s).\n".format(len(self.connections))
txt += "\n"
return txt
# this function use print, replace later with gui commands
def output(txt):
"""can be later replaced by gui or graphical output"""
print(txt)
def select_number(list_of_numbers):
"""The player select *one* number of a list of numbers"""
answer = ""
while ((not answer.isdecimal()) or int(answer) not in list_of_numbers):
answer=input("Please type selected number and press ENTER: ")
return int(answer)
def show_inventory(game, player):
output("\n\n")
output("==== Your inventory ====")
output("")
output("number, description, mass (kg)")
output("-------------------------")
output("")
tmpmass = 0.0
for i in player.inventory:
output("{}...{}...{}".format(i, game.items[i].description,
game.items[i].mass))
tmpmass += game.items[i].mass
output("")
output("You're currently carrying {} kg, that is {:.2f}% of your capacity".format(
tmpmass, (tmpmass / player.maxcarry)*100))
def drop_item(game, player):
for i in player.inventory:
output("")
print(i,"...........",game.items[i].description)
output("")
output("Please type selected number and press ENTER")
try:
selection=int(input(">>> "))
except:
output("")
output("Wrong input")
return
if len(player.inventory) > 0:
# drop chosen item in inventory
player.inventory.remove(selection)
game.rooms[player.where].itemnumbers.append(selection)
def pickup_item(game, player):
for i in game.rooms[player.where].itemnumbers:
output("")
print(i,"...........",game.items[i].description)
output("")
output("Please type selected number and press ENTER")
try:
selection=int(input(">>> "))
except:
output("")
output("Wrong input")
return
# can player carry this item
tmpmass=0
for j in player.inventory:
tmpmass += game.items[j].mass
if tmpmass + game.items[selection].mass < player.maxcarry:
if len(game.rooms[player.where].itemnumbers) > 0:
# pick up chosen item in this room
player.inventory.append(selection)
game.rooms[player.where].itemnumbers.remove(selection)
else:
output("")
output("You're carrying too much!")
output("You have to drop something to carry more stuff!")
def use_item(game, player):
for i in player.inventory:
output("")
print(i,"...........",game.items[i].description)
output("")
output("Please type selected number and press ENTER")
try:
selection=int(input(">>> "))
except:
output("")
output("Wrong input")
return
if len(player.inventory) > 0:
# use chosen item in inventory
player.inventory.remove(selection)
#game.rooms[player.where].itemnumbers.append(selection)
# this funciton use input, replace later with gui command
def nextAction(game, player):
"""ask the user to select only one of many options"""
output("What do you want to do today ?")
connections = game.rooms[player.where].connections
names = [] # temp list of room names
for c in connections:
if game.rooms[c].explored:
names.append(game.rooms[c].description)
else:
names.append("unknown room")
output("0.........other actions")
for d in enumerate(names, 1): # make list of tuples, start with 1
output("{}.........{}".format(d[0], d[1]))
#answer = ""
#while ((not answer.isdecimal()) or (int(answer) < 0) or
# (int(answer) > len(connections))):
# answer = input("please type number and press ENTER:>")
answer = select_number(range(len(names)+1))
if answer != 0:
return connections[int(answer)-1] # return new room number
# other menu options, player remain in same room
output("")
output("What do you want to do today?")
actions = {"d":"drop item",
"i":"inspect inventory",
"p":"pick up item",
"u":"use item",
"c":"cancel"}
for a in actions:
output("{}....{}".format(a, actions[a]))
answer = ""
while answer not in actions:
answer = input("please type selected letter and press ENTER: ")
if answer == "i":
show_inventory(game, player)
elif answer == "d":
drop_item(game, player)
elif answer == "p":
pickup_item(game, player)
elif answer == "u":
use_item(game, player)
return player.where # return the same room number
# create a game instance
g = Game()
# add rooms with description and connections.
# Each room will have a unique number and add himself to game
# room number 0
Room(g,"starting lobby", [1, 4], explored = True)
# room number 1
Room(g,"first room", [0,2,6])
# room number 2
Room(g,"storage room", [1,5,7])
# room number 3
# the boss room has 1 to 6 minions and 1 to 3 bosses
Room(g,"boss chamber", [6], monsterchances=[1.0,0.9,0.8,0.5,0.5,0.5],
bosschances = [1.0,0.15,0.05])
# room number 4
Room(g,"end of the world (game over)", [], explored=True)
# room number 5
Room(g,"npc room", [2,9,10])
# room number 6
Room(g,"gear room", [1,3,10])
# room number 7
Room(g,"trader", [2,5,8])
# room number 8
Room(g,"enemy room", [3,7], monsterchances=[1.0,1.0,1.0,0.9,0.7,0.5,0.2])
# room number 9
Room(g,"empty room", [5,12], itemchances=[])
# room number 10
Room(g,"mini boss", [5,6], monsterchances=[1.0,0.5,0.5,0.5], bosschances = [0.5])
# room number 11
Room(g,"random room", [10,12])
#room number 12
Room(g,"random room", [11,9])
# items
i=Item(g,"potion of instant healing",mass=0.25)
g.rooms[6].itemnumbers.append(i.number) # puts item i in room 6
# you can use another item for i
i=Item(g,"wheel of cheese",mass=0.50)
g.rooms[2].itemnumbers.append(i.number)
# add effects
e = Effect(g,"teleport",teleport=1)
e.description = "You wake up in a strange room"
# start player in lobby (room 0)
# where = 0 # the actual room number
p = Player(g, where=0) # create player in room 0
# main loop
while len(g.rooms[p.where].connections) > 0:
if not g.rooms[p.where].explored:
output("You explore a new room!")
g.rooms[p.where].explored = True # explore this room
output("\n\nYou are now here:\n\n{}".format(g.rooms[p.where].info(g)))
p.where = nextAction(g, p)
output("\n"*1)
output("\n"*1)
output("Thank you for playing. Have a nice real life")
|
gpl-2.0
| -631,206,587,626,773,600 | 34.260054 | 86 | 0.577783 | false |
lukas-bednar/jenkins-job-builder
|
tests/cmd/subcommands/test_update.py
|
1
|
6946
|
# Joint copyright:
# - Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import six
from jenkins_jobs import builder
from jenkins_jobs import cmd
from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
@mock.patch('jenkins_jobs.builder.Jenkins.get_plugins_info', mock.MagicMock)
class UpdateTests(CmdTestsBase):
@mock.patch('jenkins_jobs.cmd.Builder.update_jobs')
def test_update_jobs(self, update_jobs_mock):
"""
Test update_job is called
"""
# don't care about the value returned here
update_jobs_mock.return_value = ([], 0)
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = self.parser.parse_args(['update', path])
cmd.execute(args, self.config)
update_jobs_mock.assert_called_with([path], [], n_workers=mock.ANY)
@mock.patch('jenkins_jobs.builder.Jenkins.is_job', return_value=True)
@mock.patch('jenkins_jobs.builder.Jenkins.get_jobs')
@mock.patch('jenkins_jobs.builder.Jenkins.get_job_md5')
@mock.patch('jenkins_jobs.builder.Jenkins.update_job')
def test_update_jobs_decode_job_output(self, update_job_mock,
get_job_md5_mock, get_jobs_mock,
is_job_mock):
"""
Test that job xml output has been decoded before attempting to update
"""
# don't care about the value returned here
update_job_mock.return_value = ([], 0)
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = self.parser.parse_args(['update', path])
cmd.execute(args, self.config)
self.assertTrue(isinstance(update_job_mock.call_args[0][1],
six.text_type))
@mock.patch('jenkins_jobs.builder.Jenkins.is_job', return_value=True)
@mock.patch('jenkins_jobs.builder.Jenkins.get_jobs')
@mock.patch('jenkins_jobs.builder.Builder.delete_job')
@mock.patch('jenkins_jobs.cmd.Builder')
def test_update_jobs_and_delete_old(self, builder_mock, delete_job_mock,
get_jobs_mock, is_job_mock):
"""
Test update behaviour with --delete-old option
Test update of jobs with the --delete-old option enabled, where only
some jobs result in has_changed() to limit the number of times
update_job is called, and have the get_jobs() method return additional
jobs not in the input yaml to test that the code in cmd will call
delete_job() after update_job() when '--delete-old' is set but only
for the extra jobs.
"""
# set up some test data
jobs = ['old_job001', 'old_job002']
extra_jobs = [{'name': name} for name in jobs]
builder_obj = builder.Builder('http://jenkins.example.com',
'doesnot', 'matter',
plugins_list={})
# get the instance created by mock and redirect some of the method
# mocks to call real methods on a the above test object.
b_inst = builder_mock.return_value
b_inst.plugins_list = builder_obj.plugins_list
b_inst.update_jobs.side_effect = builder_obj.update_jobs
b_inst.delete_old_managed.side_effect = builder_obj.delete_old_managed
def _get_jobs():
return builder_obj.parser.jobs + extra_jobs
get_jobs_mock.side_effect = _get_jobs
# override cache to ensure Jenkins.update_job called a limited number
# of times
self.cache_mock.return_value.has_changed.side_effect = (
[True] * 2 + [False] * 2)
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = self.parser.parse_args(['update', '--delete-old', path])
with mock.patch('jenkins_jobs.builder.Jenkins.update_job') as update:
with mock.patch('jenkins_jobs.builder.Jenkins.is_managed',
return_value=True):
cmd.execute(args, self.config)
self.assertEqual(2, update.call_count,
"Expected Jenkins.update_job to be called '%d' "
"times, got '%d' calls instead.\n"
"Called with: %s" % (2, update.call_count,
update.mock_calls))
calls = [mock.call(name) for name in jobs]
self.assertEqual(2, delete_job_mock.call_count,
"Expected Jenkins.delete_job to be called '%d' "
"times got '%d' calls instead.\n"
"Called with: %s" % (2, delete_job_mock.call_count,
delete_job_mock.mock_calls))
delete_job_mock.assert_has_calls(calls, any_order=True)
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins')
def test_update_timeout_not_set(self, jenkins_mock):
"""Check that timeout is left unset
Test that the Jenkins object has the timeout set on it only when
provided via the config option.
"""
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = self.parser.parse_args(['update', path])
with mock.patch('jenkins_jobs.cmd.Builder.update_job') as update_mock:
update_mock.return_value = ([], 0)
cmd.execute(args, self.config)
# unless the timeout is set, should only call with 3 arguments
# (url, user, password)
self.assertEqual(len(jenkins_mock.call_args[0]), 3)
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins')
def test_update_timeout_set(self, jenkins_mock):
"""Check that timeout is set correctly
Test that the Jenkins object has the timeout set on it only when
provided via the config option.
"""
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = self.parser.parse_args(['update', path])
self.config.set('jenkins', 'timeout', '0.2')
with mock.patch('jenkins_jobs.cmd.Builder.update_job') as update_mock:
update_mock.return_value = ([], 0)
cmd.execute(args, self.config)
# when timeout is set, the fourth argument to the Jenkins api init
# should be the value specified from the config
self.assertEqual(jenkins_mock.call_args[0][3], 0.2)
|
apache-2.0
| 6,729,732,295,201,519,000 | 42.685535 | 78 | 0.610135 | false |
Spoken-tutorial/spoken-website
|
impersonate/decorators.py
|
1
|
1214
|
# -*- coding: utf-8 -*-
import django
from django.conf import settings
from django.utils.http import urlquote
from django.utils.encoding import force_str
from django.shortcuts import redirect, resolve_url
from django.contrib.auth import REDIRECT_FIELD_NAME
from .helpers import get_redir_path, check_allow_impersonate, is_authenticated
def get_login_url():
return force_str(resolve_url(settings.LOGIN_URL))
def allowed_user_required(view_func):
def _checkuser(request, *args, **kwargs):
if not is_authenticated(request.user):
return redirect(u'{0}?{1}={2}'.format(
get_login_url(),
REDIRECT_FIELD_NAME,
urlquote(request.get_full_path()),
))
if getattr(request.user, 'is_impersonate', False):
# Do not allow an impersonated session to use the
# impersonate views.
return redirect(get_redir_path())
if check_allow_impersonate(request):
# user is allowed to impersonate
return view_func(request, *args, **kwargs)
else:
# user not allowed impersonate at all
return redirect(get_redir_path())
return _checkuser
|
gpl-3.0
| -5,703,036,976,990,993,000 | 31.810811 | 78 | 0.638386 | false |
PinMeTo/big-zelda-kegerator
|
measuring/measur.py
|
1
|
5542
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# Author: Andreas Älveborn
# URL: https://github.com/aelveborn/Wii-Scale
#
# This file is part of Wii-Scale
# Copyright (C) 2015 Andreas Älveborn
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import wiiboard
import time
import sys
import getopt
from bluetooth import *
from socketIO_client import SocketIO, LoggingNamespace
# Global
board = None
sleep = True
sensitivity = 0.1 #kg
calibrate = 0 #kg
config_address = None
port = 3001
host = "localhost"
class CalculateWeight:
def formatWeight(self, weight):
return round(weight, 2)
def weight(self, data):
i = 0
total = 0
global calibrate
for i in range(len(data)):
total += data[i]
total = total / len(data)
total = total + calibrate
return self.formatWeight(total)
class WebSocketIO:
def __init__(self):
global host
global port
self.socketIO = SocketIO(host, port, LoggingNamespace)
self.socketIO.on('wiiscale-connect', self.receive_connect)
self.socketIO.on('wiiscale-disconnect', self.receive_disconnect)
def wait(self):
print "wait"
self.socketIO.wait(seconds = 1)
def send_status(self, status):
print status
self.socketIO.emit('wiiscale-status', {'status': status})
def send_weight(self, keg1, keg2, keg3):
print "keg1 %.2f, keg2 %.2f, keg3 %.2f" % (keg1, keg2, keg3)
self.socketIO.emit('wiiscale-weight', { 'keg1': keg1,
'keg2':keg2,
'keg3':keg3})
# def send_connection_status(self, status):
# self.socketIO.emit('wiiscale-connection', {'status': status})
# Accepts True or False as argument
def receive_connect(self):
global sleep
sleep = False
def receive_disconnect(self):
global board
global sleep
sleep = True
board.disconnect()
def options(argv):
try:
opts, args = getopt.getopt(argv, "h:p:c:a:", ["host=", "port=", "calibrate=", "address="])
except getopt.GetoptError:
print "wii-scale.py -h <host> -p <port> -c <calibration kg> -a <mac-addres>"
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--host"):
global host
if arg:
host = arg.strip()
elif opt in ("-p", "--port"):
global port
try:
port = int(arg)
except:
pass
elif opt in ("-c", "--calibrate"):
global calibrate
try:
calibrate = int(arg)
except:
pass
elif opt in ("-a", "--address"):
global config_address
if arg:
config_address = arg.strip()
def main(argv):
options(argv)
print "Wii-Scale started"
global sleep
global port
global config_address
global calibrate
global board
ready = False
sleep = False
connected = False
calculate = CalculateWeight()
socket = WebSocketIO()
board = wiiboard.Wiiboard()
# time.sleep(10)
# Scale
while(True):
# Check if connection status changed
if connected is not board.isConnected():
connected = board.isConnected()
if connected:
socket.send_status("CONNECTED")
else:
socket.send_status("DISCONNECTED")
#Turn off lights
time.sleep(0.1) # This is needed for wiiboard.py
board.setLight(False)
# Waiting for disconnect/sleep command
# socket.wait()
if sleep:
time.sleep(10)
#continue
# Reset
done = False
keg1 = []
keg2 = []
keg3 = []
firstStep = True
skipReadings = 5
# Connect to balance board
if not board.isConnected():
# Re initialize each run due to bug in wiiboard
# Note: Seems to be working though :/
board = wiiboard.Wiiboard()
socket.send_status("SYNC")
if not config_address:
address = board.discover()
else:
address = config_address
if not address:
sleep = True
socket.send_status("NO DEVICE FOUND")
continue
socket.send_status("CONNECTING")
board.connect(address)
if board.isConnected():
connected = True
socket.send_status("CONNECTED")
#Board is connected and ready
if board.isConnected():
# Post ready status once
if not ready:
ready = True
time.sleep(0.1) # This is needed for wiiboard.py
board.setLight(True)
socket.send_status("READY")
#Measure weight
if board.mass.totalWeight > sensitivity:
while(not done):
time.sleep(0.1)
if firstStep:
firstStep = False
socket.send_status("MEASURING")
# Skips the first readings when the user steps on the balance board
skipReadings -= 1
if(skipReadings < 0):
keg1.append(board.mass.topLeft)
keg2.append(board.mass.bottomRight)
keg3.append(board.mass.bottomLeft)
#socket.send_weight(board.mass.topLeft)
#socket.send_weight(calculate.weight(keg1), calculate.weight(keg2), calculate.weight(keg3))
socket.send_weight(board.mass.topLeft, board.mass.bottomLeft, board.mass.bottomRight)
if board.mass.totalWeight <= sensitivity and not firstStep:
done = True
socket.send_status("DONE")
ready = False
if __name__ == "__main__":
main(sys.argv[1:])
|
mit
| -8,745,683,553,835,731,000 | 22.574468 | 97 | 0.677256 | false |
rgayon/plaso
|
tests/parsers/syslog.py
|
1
|
8396
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the syslog parser."""
from __future__ import unicode_literals
import unittest
from plaso.parsers import syslog
from tests.parsers import test_lib
class SyslogParserTest(test_lib.ParserTestCase):
"""Tests for the syslog parser."""
def testParseRsyslog(self):
"""Tests the Parse function on a rsyslog file."""
parser = syslog.SyslogParser()
knowledge_base_values = {'year': 2020}
storage_writer = self._ParseFile(
['syslog_rsyslog'], parser,
knowledge_base_values=knowledge_base_values)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 5)
def testParseRsyslogTraditional(self):
"""Tests the Parse function on a traditional rsyslog file."""
parser = syslog.SyslogParser()
knowledge_base_values = {'year': 2016}
storage_writer = self._ParseFile(
['syslog_rsyslog_traditional'], parser,
knowledge_base_values=knowledge_base_values)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 8)
events = list(storage_writer.GetSortedEvents())
event = events[0]
self.CheckTimestamp(event.timestamp, '2016-01-22 07:54:32.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.data_type, 'syslog:line')
self.assertEqual(event_data.hostname, 'myhostname.myhost.com')
self.assertEqual(event_data.reporter, 'Job')
self.assertIsNone(event_data.severity)
def testParseDarwin(self):
"""Tests the Parse function on an Darwin-style syslog file."""
parser = syslog.SyslogParser()
knowledge_base_values = {'year': 2016}
storage_writer = self._ParseFile(
['syslog_osx'], parser,
knowledge_base_values=knowledge_base_values)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 2)
def testParseChromeOS(self):
"""Tests the Parse function."""
parser = syslog.SyslogParser()
knowledge_base_values = {'year': 2016}
storage_writer = self._ParseFile(
['syslog_chromeos'], parser,
knowledge_base_values=knowledge_base_values)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 8)
events = list(storage_writer.GetSortedEvents())
event = events[0]
self.CheckTimestamp(event.timestamp, '2016-10-25 19:37:23.297265')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, 'periodic_scheduler')
self.assertEqual(event_data.severity, 'INFO')
expected_message = (
'INFO [periodic_scheduler, pid: 13707] cleanup_logs: job completed')
self._TestGetMessageStrings(
event_data, expected_message, expected_message)
event = events[2]
self.CheckTimestamp(event.timestamp, '2016-10-25 19:37:24.987014')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, 'kernel')
self.assertEqual(event_data.severity, 'DEBUG')
# Testing year increment.
event = events[4]
self.CheckTimestamp(event.timestamp, '2016-10-25 19:37:24.993079')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, 'kernel')
self.assertEqual(event_data.severity, 'DEBUG')
event = events[6]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, 'kernel')
self.assertEqual(event_data.severity, 'ERR')
event = events[7]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, 'aprocess')
self.assertEqual(event_data.severity, 'INFO')
expected_message = (
'INFO [aprocess] [ 316.587330] cfg80211: This is a multi-line\t'
'message that screws up many syslog parsers.')
expected_short_message = (
'INFO [aprocess] [ 316.587330] cfg80211: This is a multi-line\t'
'message that sc...')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
def testParse(self):
"""Tests the Parse function."""
parser = syslog.SyslogParser()
knowledge_base_values = {'year': 2012}
storage_writer = self._ParseFile(
['syslog'], parser, knowledge_base_values=knowledge_base_values)
self.assertEqual(storage_writer.number_of_warnings, 1)
self.assertEqual(storage_writer.number_of_events, 16)
events = list(storage_writer.GetSortedEvents())
event = events[0]
self.CheckTimestamp(event.timestamp, '2012-01-22 07:52:33.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.data_type, 'syslog:line')
self.assertEqual(event_data.hostname, 'myhostname.myhost.com')
self.assertEqual(event_data.reporter, 'client')
self.assertIsNone(event_data.severity)
expected_message = (
'[client, pid: 30840] INFO No new content in ímynd.dd.')
self._TestGetMessageStrings(
event_data, expected_message, expected_message)
event = events[6]
self.CheckTimestamp(event.timestamp, '2012-02-29 01:15:43.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, '---')
self.assertIsNone(event_data.severity)
# Testing year increment.
event = events[9]
self.CheckTimestamp(event.timestamp, '2013-03-23 23:01:18.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(
event_data.body,
'This syslog message has a fractional value for seconds.')
self.assertEqual(event_data.reporter, 'somrandomexe')
self.assertIsNone(event_data.severity)
event = events[11]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, '/sbin/anacron')
self.assertIsNone(event_data.severity)
event = events[10]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, 'aprocess')
self.assertIsNone(event_data.severity)
expected_message = (
'[aprocess, pid: 10100] This is a multi-line message that screws up'
'\tmany syslog parsers.')
expected_short_message = (
'[aprocess, pid: 10100] This is a multi-line message that screws up'
'\tmany syslo...')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
event = events[14]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertIsNone(event_data.hostname)
self.assertEqual(event_data.reporter, 'kernel')
self.assertIsNone(event_data.severity)
expected_message = (
'[kernel] [997.390602] sda2: rw=0, want=65, limit=2')
expected_short_message = (
'[kernel] [997.390602] sda2: rw=0, want=65, limit=2')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
# Testing non-leap year.
parser = syslog.SyslogParser()
knowledge_base_values = {'year': 2013}
storage_writer = self._ParseFile(
['syslog'], parser,
knowledge_base_values=knowledge_base_values)
self.assertEqual(storage_writer.number_of_warnings, 2)
self.assertEqual(storage_writer.number_of_events, 15)
def testParseWithTimeZone(self):
"""Tests the Parse function with a time zone."""
parser = syslog.SyslogParser()
knowledge_base_values = {'year': 2016}
storage_writer = self._ParseFile(
['syslog_rsyslog_traditional'], parser,
knowledge_base_values=knowledge_base_values, timezone='CET')
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 8)
events = list(storage_writer.GetSortedEvents())
event = events[0]
self.CheckTimestamp(event.timestamp, '2016-01-22 06:54:32.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.data_type, 'syslog:line')
self.assertEqual(event_data.hostname, 'myhostname.myhost.com')
self.assertEqual(event_data.reporter, 'Job')
self.assertIsNone(event_data.severity)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
| 6,114,675,966,977,430,000 | 33.547325 | 76 | 0.694937 | false |
gallandarakhneorg/autolatex
|
plugins/gedit3/autolatexeditor/utils/gedit_runner.py
|
1
|
4936
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# autolatex/utils/gedit_runner.py
# Copyright (C) 2013-14 Stephane Galland <galland@arakhne.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
from gi.repository import GObject, Gtk, Gedit
from autolatex.utils import runner as autolatex_runner
# List of all the runners
_all_runners = []
def kill_all_runners():
autolatex_runner.kill_all_runners()
class Runner(autolatex_runner.Listener):
def __init__(self, caller, label, show_progress, directory, directive, params):
autolatex_runner.Listener.__init__(self)
self._caller = caller
self._info_bar_label = label
self._show_progress = bool(show_progress)
self._gedit_tab = None
self._info_bar = None
self._sig_info_bar_response = 0
self._sig_info_bar_remove = 0
self._automatic_bar_creation = False
self._last_fraction = 0
self._last_comment = None
self._thread = autolatex_runner.Runner(self, directory, directive, params)
def start(self):
if self._thread:
self._thread.start()
def cancel(self):
if self._thread:
self._thread.cancel()
self._thread = None
if self._info_bar:
self._info_bar.set_response_sensitive(
Gtk.ResponseType.CANCEL,
False)
def get_runner_progress(self):
return self._show_progress and self._info_bar_label is not None
def on_runner_add_ui(self):
self._gedit_tab = self._caller.window.get_active_tab()
GObject.idle_add(self._add_info_bar)
def on_runner_remove_ui(self):
GObject.idle_add(self._hide_info_bar)
def on_runner_progress(self, amount, comment):
GObject.idle_add(self._update_info_bar, amount, comment)
def on_runner_finalize_execution(self, retcode, output, latex_warnings):
self._automatic_bar_creation = False
GObject.idle_add(self._caller._update_action_validity,
True, output, latex_warnings)
def _add_info_bar(self):
if self._gedit_tab:
self._info_bar = Gedit.ProgressInfoBar()
self._info_bar.set_stock_image(Gtk.STOCK_EXECUTE)
self._info_bar.set_text(self._info_bar_label)
self._sig_info_bar_response = self._info_bar.connect(
"response",
self._on_cancel_action)
self._sig_info_bar_remove = self._info_bar.connect(
"parent-set",
self._on_parent_remove_action)
self._gedit_tab.set_info_bar(self._info_bar)
self._info_bar.show()
self._gedit_tab.grab_focus();
def _hide_info_bar(self):
if self._info_bar:
self._info_bar.hide()
self._info_bar.disconnect(self._sig_info_bar_response);
self._info_bar.disconnect(self._sig_info_bar_remove);
self._info_bar.destroy()
self._info_bar = None
self._gedit_tab.grab_focus();
def _on_cancel_action(self, widget, response, data=None):
if response == Gtk.ResponseType.CANCEL:
self.cancel()
def _on_parent_remove_action(self, widget, oldParent=None, data=None):
# The progress bar was removed by an other info bar
bar = self._info_bar
if bar and bar.get_parent() == None:
self._hide_info_bar()
self._automatic_bar_creation = True
GObject.idle_add(self._update_info_bar,
self._last_fraction, self._last_comment)
def __has_info_child(self):
if self._gedit_tab:
for child in self._gedit_tab.get_children():
if isinstance(child, Gtk.InfoBar):
return True # Search says: has info bar
return False # Search says: no info bar
return True # Assume that the panel is inside
def _update_info_bar(self, progress_value, comment):
#print "MOVE TO "+str(progress_value)+"/"+str(comment)
self._last_fraction = progress_value
self._last_comment = comment
if self._automatic_bar_creation and not self._info_bar and not self.__has_info_child():
self._automatic_bar_creation = False
GObject.idle_add(self._add_info_bar)
GObject.idle_add(self.__set_info_bar_data, progress_value, comment)
def __set_info_bar_data(self, progress_value, comment):
#print "MOVE TO "+str(progress_value)+"/"+str(comment)
if self._info_bar:
self._info_bar.set_fraction(progress_value)
if comment:
self._info_bar.set_text(comment)
self._info_bar.show()
|
gpl-2.0
| -5,963,256,746,123,358,000 | 34.510791 | 91 | 0.674635 | false |
Q-Leap-Networks/pyside-segfault
|
wizard.py
|
1
|
3656
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys
import argparse
from Qt import QtGui, QtCore, loadUi
from QtCore import Slot
from HWWizard import HWWizard
class Host:
def __init__(self, name, autodetect):
self.name = name
self.autodetect = autodetect
class Name:
def __init__(self, name):
self.name = name
class PropSet:
def __init__(self, name_id, feature_id):
self.name_id = name_id
self.feature_id = feature_id
class Prop:
def __init__(self, name_id, val):
self.name_id = name_id
self.val = val
class MainWindow(QtGui.QMainWindow):
def __init__(self, parent=None):
super().__init__(parent)
loadUi("MainWindow.ui", self)
self.hosts = {
1: Host("beo-01", {"# CPU cores": "1",
"# CPU sockets": "1",
"Size of RAM (GB)": "1",
"IB Adapter": "True",
"IPMI Adapter": "False"}),
2: Host("beo-02", {"# CPU cores": "1",
"# CPU sockets": "1",
"Size of RAM (GB)": "1",
"IB Adapter": "True",
"IPMI Adapter": "False"}),
3: Host("beo-03", {"# CPU cores": "1",
"# CPU sockets": "1",
"Size of RAM (GB)": "1",
"IB Adapter": "True",
"IPMI Adapter": "False"}),
4: Host("beo-04", {"# CPU cores": "1",
"# CPU sockets": "1",
"Size of RAM (GB)": "2",
"IB Adapter": "True",
"IPMI Adapter": "False"}),
5: Host("beo-05", {"# CPU cores": "1",
"# CPU sockets": "1",
"Size of RAM (GB)": "2",
"IB Adapter": "True",
"IPMI Adapter": "False"}),
}
self.hw_prop_set_names = {
1: Name("Existing"),
2: Name("Similar"),
3: Name("Bad"),
}
self.hw_prop_names = {
1: Name("# CPU cores"),
2: Name("# CPU sockets"),
3: Name("Size of RAM (GB)"),
4: Name("IB Adapter"),
5: Name("IPMI Adapter"),
6: Name("Other"),
}
self.hw_props = {
1: Prop(1, "1"),
2: Prop(2, "1"),
3: Prop(3, "1"),
4: Prop(4, "True"),
5: Prop(5, "False"),
6: Prop(6, "something"),
7: Prop(1, "2"),
}
self.hw_prop_sets = {
1: PropSet(1, 1),
2: PropSet(1, 2),
3: PropSet(1, 3),
4: PropSet(1, 4),
5: PropSet(1, 5),
6: PropSet(2, 1),
7: PropSet(2, 2),
8: PropSet(2, 3),
9: PropSet(2, 4),
10: PropSet(2, 5),
11: PropSet(2, 6),
12: PropSet(3, 7),
13: PropSet(3, 2),
14: PropSet(3, 3),
15: PropSet(3, 4),
16: PropSet(3, 5),
}
@Slot()
def on_wizardButton_clicked(self):
wiz = HWWizard(self)
wiz.exec_()
def main():
app = QtGui.QApplication(sys.argv)
app.setOrganizationName("test")
app.setOrganizationDomain("test")
app.setApplicationName("test")
w = MainWindow()
w.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
gpl-2.0
| -8,578,192,647,075,169,000 | 29.722689 | 57 | 0.390591 | false |
fle-internal/content-curation
|
contentcuration/contentcuration/utils/celery_signals.py
|
1
|
2657
|
import ast
import logging
import os
import traceback
from celery.signals import after_task_publish, task_failure, task_success
from celery.utils.log import get_task_logger
from django.core.exceptions import ObjectDoesNotExist
from contentcuration.models import Task, User
# because Celery connects signals upon import, we don't want to put signals into other modules that may be
# imported multiple times. Instead, we follow the advice here and use AppConfig.init to import the module:
# https://stackoverflow.com/questions/7115097/the-right-place-to-keep-my-signals-py-file-in-a-django-project/21612050#21612050
logger = get_task_logger(__name__)
@after_task_publish.connect
def before_start(sender, headers, body, **kwargs):
"""
Create a Task object before the task actually started,
set the task object status to be PENDING, with the signal
after_task_publish to indicate that the task has been
sent to the broker.
"""
task_id = headers["id"]
options = ast.literal_eval(headers["kwargsrepr"])
# We use the existence of the task_type kwarg to know if it's an async task.
if not "task_type" in options:
return
Task.objects.filter(task_id=task_id).update(status="PENDING")
logger.info("Task object {} updated with status PENDING.".format(task_id))
@task_failure.connect
def on_failure(sender, **kwargs):
try:
task = Task.objects.get(task_id=sender.request.id)
task.status = "FAILURE"
exception_data = {
'task_args': kwargs['args'],
'task_kwargs': kwargs['kwargs'],
'traceback': traceback.format_tb(kwargs['traceback'])
}
task.metadata['error'] = exception_data
task.save()
except ObjectDoesNotExist:
pass # If the object doesn't exist, that likely means the task was created outside of create_async_task
@task_success.connect
def on_success(sender, result, **kwargs):
try:
logger.info("on_success called, process is {}".format(os.getpid()))
task_id = sender.request.id
task = Task.objects.get(task_id=task_id)
task.status="SUCCESS"
task.metadata['result'] = result
# We're finished, so go ahead and record 100% progress so that getters expecting it get a value
# even though there is no longer a Celery task to query.
if task.is_progress_tracking:
task.metadata['progress'] = 100
task.save()
logger.info("Task with ID {} succeeded".format(task_id))
except ObjectDoesNotExist:
pass # If the object doesn't exist, that likely means the task was created outside of create_async_task
|
mit
| -5,897,802,746,775,080,000 | 38.073529 | 126 | 0.687618 | false |
kbussell/django-docusign
|
setup.py
|
1
|
2412
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Python packaging."""
import os
import sys
from setuptools import setup
#: Absolute path to directory containing setup.py file.
here = os.path.abspath(os.path.dirname(__file__))
#: Boolean, ``True`` if environment is running Python version 2.
IS_PYTHON2 = sys.version_info[0] == 2
NAME = 'django-docusign'
DESCRIPTION = 'Django application for DocuSign signature SAAS platform.'
README = open(os.path.join(here, 'README.rst')).read()
VERSION = open(os.path.join(here, 'VERSION')).read().strip()
AUTHOR = u'Benoît Bryon'
EMAIL = u'novafloss@people-doc.com'
LICENSE = 'BSD'
URL = 'https://{name}.readthedocs.io/'.format(name=NAME)
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Framework :: Django',
]
KEYWORDS = [
'docusign',
'signature',
'backend',
'pydocusign',
'django-anysign',
]
PACKAGES = [NAME.replace('-', '_')]
REQUIREMENTS = [
'Django>=1.8,<1.10',
'django-anysign>=1.0',
'pydocusign>=0.13.1,<1.0',
'setuptools',
]
if IS_PYTHON2:
REQUIREMENTS.append('mock')
ENTRY_POINTS = {}
TEST_REQUIREMENTS = []
CMDCLASS = {}
SETUP_REQUIREMENTS = [
'setuptools'
]
# Tox integration.
from setuptools.command.test import test as TestCommand
class Tox(TestCommand):
"""Test command that runs tox."""
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import tox # import here, cause outside the eggs aren't loaded.
errno = tox.cmdline(self.test_args)
sys.exit(errno)
TEST_REQUIREMENTS.append('tox')
CMDCLASS['test'] = Tox
if __name__ == '__main__': # Do not run setup() when we import this module.
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=README,
classifiers=CLASSIFIERS,
keywords=' '.join(KEYWORDS),
author=AUTHOR,
author_email=EMAIL,
url=URL,
license=LICENSE,
packages=PACKAGES,
include_package_data=True,
zip_safe=False,
install_requires=REQUIREMENTS,
entry_points=ENTRY_POINTS,
tests_require=TEST_REQUIREMENTS,
cmdclass=CMDCLASS,
setup_requires=SETUP_REQUIREMENTS,
)
|
bsd-3-clause
| 1,158,806,017,354,079,000 | 24.378947 | 76 | 0.636665 | false |
pastebt/yeast
|
testcase/test_acore.py
|
1
|
4379
|
import sys
import unittest
from StringIO import StringIO
sys.path.append('../yeast')
import ahttp
import acore
class FAKE_USER(acore.Acore):
def read_all(self, arw, size=0, seps=()):
for y in arw.aread(size, seps):
yield y
class FAKE(acore.ARW):
def __init__(self, data):
acore.ARW.__init__(self, user=FAKE_USER())
self.fileno = 4
self.src = iter(data)
def _read(self):
try:
return next(self.src)
except StopIteration:
self.sep_hit = acore.EOR()
return ''
#class FAKE(ahttp.AHTTP):
# def __init__(self, data):
# self.src = iter(data)
# self.rdata, self.sep = '', ''
# self.sock = FakeSock()
#
# def read(self, sock):
# return next(self.src)
#
# def get_http(self, sock, body_size_limit=0):
# for y in self.get_http_head(sock):
# yield y
# for y in self.get_http_body(sock, body_size_limit):
# yield y
class TestARW(unittest.TestCase):
def test_read_all1(self):
f = FAKE(("1234\r\n56",))
ret = [y for y in f.read_all(seps=('\n',))]
#print ret
self.assertEqual(f.request_data, '1234\r\n')
self.assertEqual(f._data_buf, '56')
def test_read_all2(self):
f = FAKE(("1234\r\n56",))
f._data_buf = '0'
ret = [y for y in f.read_all(seps=('\n',))]
self.assertEqual(f.request_data, '01234\r\n')
self.assertEqual(f._data_buf, '56')
def test_read_all3(self):
f = FAKE(("1234\r\n56",))
f._data_buf = 'abc\r\naa'
ret = [y for y in f.read_all(seps=('\n',))]
self.assertEqual(f.request_data, 'abc\r\n')
self.assertEqual(f._data_buf, 'aa')
def test_read_all4(self):
f = FAKE(("\n1234\r\n56",))
f._data_buf = 'abc\r'
ret = [y for y in f.read_all(seps=('\r\n',))]
self.assertEqual(f.request_data, 'abc\r\n')
self.assertEqual(f._data_buf, '1234\r\n56')
def test_read_all5(self):
f = FAKE(("\n1234\r\n56",))
f._data_buf = 'abc'
ret = [y for y in f.read_all(seps=('\r\n',))]
self.assertEqual(f.request_data, 'abc\n1234\r\n')
self.assertEqual(f._data_buf, '56')
def test_read_all6(self):
f = FAKE(("\r\n1234\r\n56",))
f._data_buf = 'abc'
ret = [y for y in f.read_all(seps=('\r\n',))]
self.assertEqual(f.request_data, 'abc\r\n')
self.assertEqual(f._data_buf, '1234\r\n56')
def test_read_all7(self):
# Winner's index is smaller
f = FAKE(("\n1234\r\n56",))
f._data_buf = 'abc'
ret = [y for y in f.read_all(seps=('\r\n', '\n'))]
self.assertEqual(f.request_data, 'abc\n')
self.assertEqual(f._data_buf, '1234\r\n56')
def test_read_all8(self):
f = FAKE(("\n1234\r\n56",))
f._data_buf = 'abc'
ret = [y for y in f.read_all(size=2, seps=('\r\n', '\n'))]
self.assertEqual(f.request_data, 'ab')
self.assertEqual(f._data_buf, 'c')
def test_read_all9(self):
f = FAKE(("\n1234\r\n56",))
f._data_buf = 'abc'
ret = [y for y in f.read_all(size=4, seps=('\r\n', '\n'))]
self.assertEqual(f.sep_hit, '\n')
self.assertEqual(f.request_data, 'abc\n')
self.assertEqual(f._data_buf, '1234\r\n56')
def test_read_all10(self):
f = FAKE(("\n1234\r\n56",))
f._data_buf = 'abc'
ret = [y for y in f.read_all(size=5, seps=('\r\n', '\n'))]
self.assertEqual(f.sep_hit, '\n')
self.assertEqual(f.request_data, 'abc\n')
self.assertEqual(f._data_buf, '1234\r\n56')
def test_read_all11(self):
f = FAKE(("\r\n1234\r\n56",))
f._data_buf = 'abc'
ret = [y for y in f.read_all(size=4, seps=('\r\n',))]
self.assertEqual(f.sep_hit, '')
self.assertEqual(f.request_data, 'abc\r')
self.assertEqual(f._data_buf, '\n1234\r\n56')
def test_read_all12(self):
f = FAKE(("1234\r\n56",))
ret = [y for y in f.read_all(seps=('\r\n',))]
self.assertEqual(f.sep_hit, '\r\n')
self.assertEqual(f.request_data, '1234\r\n')
ret = [y for y in f.read_all()]
self.assertEqual(f.request_data, '56')
if __name__ == '__main__':
#unittest.main()
unittest.main(defaultTest='TestARW.test_read_all12')
|
gpl-2.0
| -6,596,595,997,514,402,000 | 30.503597 | 66 | 0.530486 | false |
tilde-lab/tilde
|
tests/apps/perovskite_tilting/tilt_data.py
|
1
|
4199
|
#!/usr/bin/env python
# Euler tilting angles extraction test
# Author: Evgeny Blokhin
"""
Data for this test are published in:
[1] Surf.Sci.602, 3674 (2008), http://dx.doi.org/10.1016/j.susc.2008.10.002
[2] Evgeny Blokhin's MSc. thesis (in Russian), http://dx.doi.org/10.13140/RG.2.1.4276.2727
[3] PRB83, 134108 (2011), http://dx.doi.org/10.1103/PhysRevB.83.134108
[4] PRB88, 241407 (2013), http://dx.doi.org/10.1103/PhysRevB.88.241407
"""
from __future__ import print_function
import os
import unittest
import six
import set_path
from tilde.core.api import API
data_dir = os.path.realpath(os.path.dirname(__file__) + '/outputs')
# NB: in Euler notation delta is gamma, delta plus/minus phi is alpha
# or in another terminology: phi is gamma, phi plus/minus psi is alpha
test_data = {
'check_last_point.cryst.out': {
'comment': '[1], Table 1, calculated, Euler notation',
'data': {
5: [0.04, 12.26, 7.93], # the 1st test: corner number can be 5 or 7 FIXME
}
},
'y4h4srhfo3_62_pbe0_9hf_cis_go.cryst.out': {
'comment': '[2], Table 10, HfO2-terminated, dissociative water adsorption, monolayer coverage, Euler notation (bare slab reference data: delta=1.9, phi=9.729, psi=1.867)',
'data': {
17: [1.56, 15.07, 8.91],
}
},
'srhfo3_62_pbe0_110_9sr_go.cryst.out': {
'comment': '[1], Table 5, SrO termination, 110 surface, relaxed, Euler notation',
'data': {
13: [14.73, 12.03, 5.24],
15: [1.54, 8.74, 12.48],
}
},
'sto140afd_f3.cryst.out': {
'comment': '[3], Table 6, LCAO-PBE0 optimized basis set',
'data': {
3: [0.0, 0.0, 0.85],
}
},
'5ti_d_x2_scanned_freqs.cryst.out': {
'comment': '[4], page 241407-2, at the left, second paragraph',
'data': {
9: [0.0, 0.0, 0.36],
}
}
}
for k, v in six.iteritems(test_data):
if not os.path.exists(data_dir + os.sep + k):
raise RuntimeError(k + ': missed file for test!')
work = API()
class Data_Perovskite_Tilting_Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.results = {}
for k, v in six.iteritems(test_data):
cls.results[k] = {}
for calc, error in work.parse(data_dir + os.sep + k):
if error:
raise RuntimeError(k + ': ' + error)
calc, error = work.classify(calc)
if error:
raise RuntimeError(k + ': ' + error)
calc = work.postprocess(calc)
target_category_num = 4 # perovskite category, pre-defined in init-data.sql
assert target_category_num in calc.info['tags']
cls.results[k] = [ v['data'], calc.apps['perovskite_tilting']['data'] ]
def test_all(self):
for k, v in six.iteritems(self.results):
centers = v[0].keys()
for center in centers:
self.assertEqual(v[0][center], v[1][center]) # the 1st test: corner number can be 5 or 7 FIXME
if __name__ == "__main__":
for k, v in test_data.iteritems():
for calc, error in work.parse(data_dir + os.sep + k):
if error:
raise RuntimeError(k + ': ' + error)
calc, error = work.classify(calc)
if error:
raise RuntimeError(k + ': ' + error)
calc = work.postprocess(calc)
target_category_num = 4 # perovskite category, pre-defined in init-data.sql
assert target_category_num in calc.info['tags']
if not 'perovskite_tilting' in calc.apps:
raise RuntimeError(k + ': invalid result!')
print("\nSource", v['comment'], "(" + k + ")")
for center in v['data'].keys():
if not center in calc.apps['perovskite_tilting']['data']:
raise RuntimeError(k + ': invalid result!')
print('Octahedron N', center)
print('expected:', v['data'][center])
print('got :', calc.apps['perovskite_tilting']['data'][center])
print(__doc__)
|
mit
| 5,262,636,224,820,790,000 | 33.702479 | 179 | 0.551322 | false |
hidext/oemedical
|
oemedical_socioeconomics/oemedical_socioeconomics/oemedical_socioeconomics.py
|
1
|
6182
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, orm
class OeMedicalSocioeconomics(orm.Model):
_inherit = 'oemedical.patient'
_columns = {
'ses': fields.selection(
[(None, ''),
('0', 'Lower'),
('1', 'Lower-middle'),
('2', 'Middle'),
('3', 'Middle-upper'),
('4', 'Higher')],
'Socioeconomics', help="SES - Socioeconomic Status", sort=False),
'housing': fields.selection(
[(None, ''),
('0', 'Shanty, deficient sanitary conditions'),
('1', 'Small, crowded but with good sanitary conditions'),
('2', 'Comfortable and good sanitary conditions'),
('3', 'Roomy and excellent sanitary conditions'),
('4', 'Luxury and excellent sanitary conditions')],
'Housing conditions',
help="Housing and sanitary living conditions", sort=False),
'hostile_area': fields.boolean(
'Hostile Area',
help="Check if patient lives in a zone of high hostility (eg, war)"
),
'sewers': fields.boolean('Sanitary Sewers'),
'water': fields.boolean('Running Water'),
'trash': fields.boolean('Trash recollection'),
'electricity': fields.boolean('Electrical supply'),
'gas': fields.boolean('Gas supply'),
'telephone': fields.boolean('Telephone'),
'television': fields.boolean('Television'),
'internet': fields.boolean('Internet'),
'single_parent': fields.boolean('Single parent family'),
'domestic_violence': fields.boolean('Domestic violence'),
'working_children': fields.boolean('Working children'),
'teenage_pregnancy': fields.boolean('Teenage pregnancy'),
'sexual_abuse': fields.boolean('Sexual abuse'),
'drug_addiction': fields.boolean('Drug addiction'),
'school_withdrawal': fields.boolean('School withdrawal'),
'prison_past': fields.boolean('Has been in prison'),
'prison_current': fields.boolean('Is currently in prison'),
'relative_in_prison': fields.boolean(
'Relative in prison',
help="Check if someone from the nuclear family - parents sibblings"
" is or has been in prison"),
'ses_notes': fields.text('Extra info'),
'fam_apgar_help': fields.selection(
[(None, ''),
('0', 'None'),
('1', 'Moderately'),
('2', 'Very much')],
'Help from family',
help="Is the patient satisfied with the level of help coming from"
" the family when there is a problem?", sort=False),
'fam_apgar_discussion': fields.selection(
[(None, ''),
('0', 'None'),
('1', 'Moderately'),
('2', 'Very much')],
'Problems discussion',
help="Is the patient satisfied with the level talking over the"
" problems as family?", sort=False),
'fam_apgar_decisions': fields.selection(
[(None, ''),
('0', 'None'),
('1', 'Moderately'),
('2', 'Very much')],
'Decision making',
help="Is the patient satisfied with the level of making important"
" decisions as a group ?", sort=False),
'fam_apgar_timesharing': fields.selection(
[(None, ''),
('0', 'None'),
('1', 'Moderately'),
('2', 'Very much')],
'Time sharing',
help="Is the patient satisfied with the level of time that they"
" spend together?", sort=False),
'fam_apgar_affection': fields.selection(
[(None, ''),
('0', 'None'),
('1', 'Moderately'),
('2', 'Very much')],
'Family affection',
help="Is the patient satisfied with the level of affection coming"
" from the family ?", sort=False),
'fam_apgar_score': fields.integer(
'Score',
help="Total Family APGAR 7 - 10 : Functional Family 4 - 6 :"
"Some level of disfunction\n"
"0 - 3 : Severe disfunctional family\n"),
'income': fields.selection(
[(None, ''),
('h', 'High'),
('m', 'Medium / Average'),
('l', 'Low')],
'Income', sort=False),
'education': fields.selection(
[(None, ''),
('0', 'None'),
('1', 'Incomplete Primary School'),
('2', 'Primary School'),
('3', 'Incomplete Secondary School'),
('4', 'Secondary School'),
('5', 'University')],
'Education Level',
help="Education Level", sort=False),
'works_at_home': fields.boolean(
'Works at home',
help="Check if the patient works at his / her house"),
'hours_outside': fields.integer(
'Hours outside home',
help="Number of hours a day the patient spend outside the house"),
}
|
agpl-3.0
| -6,396,801,684,019,565,000 | 43.157143 | 79 | 0.531381 | false |
dann/python-hookable
|
docs/conf.py
|
1
|
9257
|
# -*- coding: utf-8 -*-
#
# hookable documentation build configuration file, created by
# sphinx-quickstart on Wed Jan 4 21:12:53 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'hookable'
copyright = u'2012, dann'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.01'
# The full version, including alpha/beta/rc tags.
release = '0.01'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'hookabledoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'hookable.tex', u'hookable Documentation',
u'dann', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'hookable', u'hookable Documentation',
[u'dann'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'hookable', u'hookable Documentation',
u'dann', 'hookable', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'hookable'
epub_author = u'dann'
epub_publisher = u'dann'
epub_copyright = u'2012, dann'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
|
bsd-3-clause
| -1,558,838,466,794,074,600 | 31.031142 | 215 | 0.703684 | false |
joopert/nad_receiver
|
nad_receiver/__init__.py
|
1
|
11483
|
"""
NAD has an RS232 interface to control the receiver.
Not all receivers have all functions.
Functions can be found on the NAD website: http://nadelectronics.com/software
"""
import codecs
import socket
from time import sleep
from typing import Any, Dict, Iterable, Optional, Union
from nad_receiver.nad_commands import CMDS
from nad_receiver.nad_transport import (NadTransport, SerialPortTransport, TelnetTransportWrapper,
DEFAULT_TIMEOUT)
import logging
logging.basicConfig()
_LOGGER = logging.getLogger("nad_receiver")
# Uncomment this line to see all communication with the device:
# _LOGGER.setLevel(logging.DEBUG)
class NADReceiver:
"""NAD receiver."""
transport: NadTransport
def __init__(self, serial_port: str) -> None:
"""Create RS232 connection."""
self.transport = SerialPortTransport(serial_port)
def exec_command(self, domain: str, function: str, operator: str, value: Optional[str] =None) -> Optional[str]:
"""
Write a command to the receiver and read the value it returns.
The receiver will always return a value, also when setting a value.
"""
if operator in CMDS[domain][function]['supported_operators']:
if operator == '=' and value is None:
raise ValueError('No value provided')
cmd = ''.join([CMDS[domain][function]['cmd'], operator]) # type: ignore
assert isinstance(cmd, str)
if value:
cmd = cmd + value
else:
raise ValueError('Invalid operator provided %s' % operator)
try:
msg = self.transport.communicate(cmd)
_LOGGER.debug(f"sent: '{cmd}' reply: '{msg}'")
return msg.split('=')[1]
except IndexError:
pass
return None
def main_dimmer(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.Dimmer."""
return self.exec_command('main', 'dimmer', operator, value)
def main_mute(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.Mute."""
return self.exec_command('main', 'mute', operator, value)
def main_power(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.Power."""
return self.exec_command('main', 'power', operator, value)
def main_volume(self, operator: str, value: Optional[str] =None) -> Optional[float]:
"""
Execute Main.Volume.
Returns float
"""
volume = self.exec_command('main', 'volume', operator, str(value))
if volume is None:
return None
try:
res = float(volume)
return res
except (ValueError):
pass
return None
def main_ir(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.IR."""
return self.exec_command('main', 'ir', operator, value)
def main_listeningmode(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.ListeningMode."""
return self.exec_command('main', 'listeningmode', operator, value)
def main_sleep(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.Sleep."""
return self.exec_command('main', 'sleep', operator, value)
def main_tape_monitor(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.Tape1."""
return self.exec_command('main', 'tape_monitor', operator, value)
def main_speaker_a(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.SpeakerA."""
return self.exec_command('main', 'speaker_a', operator, value)
def main_speaker_b(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.SpeakerB."""
return self.exec_command('main', 'speaker_b', operator, value)
def main_source(self, operator: str, value: Optional[str]=None) -> Optional[Union[int, str]]:
"""
Execute Main.Source.
Returns int
"""
source = self.exec_command('main', 'source', operator, str(value))
if source is None:
return None
try:
# try to return as integer, some receivers return numbers
return int(source)
except ValueError:
# return source as string
return source
return None
def main_version(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.Version."""
return self.exec_command('main', 'version', operator, value)
def main_model(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.Model."""
return self.exec_command('main', 'model', operator, value)
def tuner_am_frequency(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Tuner.AM.Frequence."""
return self.exec_command('tuner', 'am_frequency', operator, value)
def tuner_am_preset(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Tuner.AM.Preset."""
return self.exec_command('tuner', 'am_preset', operator, value)
def tuner_band(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Tuner.Band."""
return self.exec_command('tuner', 'band', operator, value)
def tuner_fm_frequency(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Tuner.FM.Frequence."""
return self.exec_command('tuner', 'fm_frequency', operator, value)
def tuner_fm_mute(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Tuner.FM.Mute."""
return self.exec_command('tuner', 'fm_mute', operator, value)
def tuner_fm_preset(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Tuner.FM.Preset."""
return self.exec_command('tuner', 'fm_preset', operator, value)
class NADReceiverTelnet(NADReceiver):
"""
Support NAD amplifiers that use telnet for communication.
Supports all commands from the RS232 base class
Known supported model: Nad T787.
"""
def __init__(self, host: str, port: int =23, timeout: int =DEFAULT_TIMEOUT):
"""Create NADTelnet."""
self.transport = TelnetTransportWrapper(host, port, timeout)
class NADReceiverTCP:
"""
Support NAD amplifiers that use tcp for communication.
Known supported model: Nad D 7050.
"""
POLL_VOLUME = "0001020204"
POLL_POWER = "0001020209"
POLL_MUTED = "000102020a"
POLL_SOURCE = "0001020203"
CMD_POWERSAVE = "00010207000001020207"
CMD_OFF = "0001020900"
CMD_ON = "0001020901"
CMD_VOLUME = "00010204"
CMD_MUTE = "0001020a01"
CMD_UNMUTE = "0001020a00"
CMD_SOURCE = "00010203"
SOURCES = {'Coaxial 1': '00', 'Coaxial 2': '01', 'Optical 1': '02',
'Optical 2': '03', 'Computer': '04', 'Airplay': '05',
'Dock': '06', 'Bluetooth': '07'}
SOURCES_REVERSED = {value: key for key, value in
SOURCES.items()}
PORT = 50001
BUFFERSIZE = 1024
def __init__(self, host: str) -> None:
"""Setup globals."""
self._host = host
def _send(self, message: str, read_reply: bool =False) -> Optional[str]:
"""Send a command string to the amplifier."""
sock: socket.socket
for tries in range(0, 3):
try:
sock = socket.create_connection((self._host, self.PORT),
timeout=5)
break
except socket.timeout:
print("Socket connection timed out.")
return None
except (ConnectionError, BrokenPipeError):
if tries == 2:
print("socket connect failed.")
return None
sleep(0.1)
if not sock:
return None
with sock:
sock.send(codecs.decode(message.encode(), encoding='hex_codec'))
if read_reply:
sleep(0.1)
reply = ''
tries = 0
max_tries = 20
while len(reply) < len(message) and tries < max_tries:
try:
reply += codecs.encode(sock.recv(self.BUFFERSIZE), 'hex')\
.decode("utf-8")
return reply
except (ConnectionError, BrokenPipeError):
pass
tries += 1
return None
def status(self) -> Optional[Dict[str, Any]]:
"""
Return the status of the device.
Returns a dictionary with keys 'volume' (int 0-200) , 'power' (bool),
'muted' (bool) and 'source' (str).
"""
nad_reply = self._send(self.POLL_VOLUME +
self.POLL_POWER +
self.POLL_MUTED +
self.POLL_SOURCE, read_reply=True)
if nad_reply is None:
return None
# split reply into parts of 10 characters
num_chars = 10
nad_status = [nad_reply[i:i + num_chars]
for i in range(0, len(nad_reply), num_chars)]
return {'volume': int(nad_status[0][-2:], 16),
'power': nad_status[1][-2:] == '01',
'muted': nad_status[2][-2:] == '01',
'source': self.SOURCES_REVERSED[nad_status[3][-2:]]}
def power_off(self) -> None:
"""Power the device off."""
status = self.status()
if not status:
return None
if status['power']:
# Setting power off when it is already off can cause hangs
self._send(self.CMD_POWERSAVE + self.CMD_OFF)
def power_on(self) -> None:
"""Power the device on."""
status = self.status()
if not status:
return None
if not status['power']:
self._send(self.CMD_ON, read_reply=True)
sleep(0.5) # Give NAD7050 some time before next command
def set_volume(self, volume: int) -> None:
"""Set volume level of the device. Accepts integer values 0-200."""
if 0 <= volume <= 200:
volume_hex = format(volume, "02x") # Convert to hex
self._send(self.CMD_VOLUME + volume_hex)
def mute(self) -> None:
"""Mute the device."""
self._send(self.CMD_MUTE, read_reply=True)
def unmute(self) -> None:
"""Unmute the device."""
self._send(self.CMD_UNMUTE)
def select_source(self, source: str) -> None:
"""Select a source from the list of sources."""
status = self.status()
if not status:
return None
if status['power']: # Changing source when off may hang NAD7050
# Setting the source to the current source will hang the NAD7050
if status['source'] != source:
if source in self.SOURCES:
self._send(self.CMD_SOURCE + self.SOURCES[source],
read_reply=True)
def available_sources(self) -> Iterable[str]:
"""Return a list of available sources."""
return list(self.SOURCES.keys())
|
mit
| 423,061,647,140,104,700 | 35.804487 | 115 | 0.570844 | false |
Tesora/tesora-python-troveclient
|
troveclient/tests/osc/v1/test_database_flavors.py
|
1
|
1636
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from troveclient.osc.v1 import database_flavors
from troveclient.tests.osc.v1 import fakes
class TestFlavors(fakes.TestDatabasev1):
fake_flavors = fakes.FakeFlavors()
def setUp(self):
super(TestFlavors, self).setUp()
self.mock_client = self.app.client_manager.database
self.flavor_client = self.app.client_manager.database.flavors
class TestFlavorList(TestFlavors):
columns = database_flavors.ListDatabaseFlavors.columns
values = (1, 'm1.tiny', 512, '', '', '')
def setUp(self):
super(TestFlavorList, self).setUp()
self.cmd = database_flavors.ListDatabaseFlavors(self.app, None)
self.data = [self.fake_flavors.get_flavors_1()]
self.flavor_client.list.return_value = self.data
def test_flavor_list_defaults(self):
parsed_args = self.check_parser(self.cmd, [], [])
columns, values = self.cmd.take_action(parsed_args)
self.flavor_client.list.assert_called_once_with()
self.assertEqual(self.columns, columns)
self.assertEqual([self.values], values)
|
apache-2.0
| 2,906,566,030,713,005,600 | 38.902439 | 77 | 0.702323 | false |
erigones/esdc-ce
|
ans/roles/cluster/library/pcs_resource.py
|
1
|
15877
|
#!/usr/bin/python
# Copyright: (c) 2018, Ondrej Famera <ondrej-xa2iel8u@famera.cz>
# GNU General Public License v3.0+ (see LICENSE-GPLv3.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0 (see LICENSE-APACHE2.txt or http://www.apache.org/licenses/LICENSE-2.0)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
author: "Ondrej Famera (@OndrejHome)"
module: pcs_resource
short_description: "wrapper module for 'pcs resource' "
description:
- "Module for creating, deleting and updating clusters resources using 'pcs' utility."
- "This module should be executed for same resorce only on one of the nodes in cluster at a time."
version_added: "2.4"
options:
state:
description:
- "'present' - ensure that cluster resource exists"
- "'absent' - ensure cluster resource doesn't exist"
required: false
default: present
choices: ['present', 'absent']
name:
description:
- "name of cluster resource - cluster resource identifier"
required: true
resource_class:
description:
- class of cluster resource
required: true
default: 'ocf'
choices: ['ocf', 'systemd', 'stonith']
resource_type:
description:
- cluster resource type
required: false
options:
description:
- "additional options passed to 'pcs' command"
required: false
force_resource_update:
description:
- "skip checking for cluster changes when updating existing resource configuration
- use 'scope=resources' when pushing the change to cluster. Useful in busy clusters,
dangerous when there are concurent updates as they can be lost."
required: false
default: no
type: bool
cib_file:
description:
- "Apply changes to specified file containing cluster CIB instead of running cluster."
- "This module requires the file to already contain cluster configuration."
required: false
notes:
- tested on CentOS 6.8, 7.3
- module can create and delete clones, groups and master resources indirectly -
resource can specify --clone, --group, --master option which will cause them to create
or become part of clone/group/master
'''
EXAMPLES = '''
- name: ensure Dummy('ocf:pacemaker:Dummy') resource with name 'test' is present
pcs_resource:
name: 'test'
resource_type: 'ocf:pacemaker:Dummy'
- name: ensure that resource with name 'vip' is not present
pcs_resource:
name: 'vip'
state: 'absent'
- name: ensure resource 'test2' of IPaddr2('ocf:heartbeat:IPaddr2') type exists an has 5 second monitor interval
pcs_resource:
name: 'test2'
resource_type: 'ocf:heartbeat:IPaddr2'
options: 'ip=192.168.1.2 op monitor interval=5'
- name: create resource in group 'testgrp'
pcs_resource:
name: 'test3'
resource_type: 'ocf:pacemaker:Dummy'
options: '--group testgrp'
- name: create complex Master/Slave resource 'test-master' of 'ocf:pacemaker:Dummy' type
pcs_resource:
name: 'test'
resource_type: 'ocf:pacemaker:Dummy'
options: >
fake=some_value --master meta master-max=1 master-node-max=1 clone-max=2 clone-node-max=1 notify=true
op monitor interval=60s meta resource-stickiness=100
'''
# TODO if group exists and is not part of group, then specifying group won't put it into group
# same problem is with clone and master - it might be better to make this functionality into separate module
import sys
import os.path
import xml.etree.ElementTree as ET
import tempfile
from distutils.spawn import find_executable
from ansible.module_utils.basic import AnsibleModule
# determine if we have 'to_native' function that we can use for 'ansible --diff' output
to_native_support = False
try:
from ansible.module_utils._text import to_native
to_native_support = True
except ImportError:
pass
def replace_element(elem, replacement):
elem.clear()
elem.text = replacement.text
elem.tail = replacement.tail
elem.tag = replacement.tag
elem.attrib = replacement.attrib
elem[:] = replacement[:]
def compare_resources(module, res1, res2):
# we now have 2 nodes that we can compare, so lets dump them into files for comparring
n1_file_fd, n1_tmp_path = tempfile.mkstemp()
n2_file_fd, n2_tmp_path = tempfile.mkstemp()
n1_file = open(n1_tmp_path, 'w')
n2_file = open(n2_tmp_path, 'w')
# dump the XML resource definitions into temporary files
sys.stdout = n1_file
ET.dump(res1)
sys.stdout = n2_file
ET.dump(res2)
sys.stdout = sys.__stdout__
# close files
n1_file.close()
n2_file.close()
# normalize the files and store results in new files - this also removes some unimportant spaces and stuff
n3_file_fd, n3_tmp_path = tempfile.mkstemp()
n4_file_fd, n4_tmp_path = tempfile.mkstemp()
rc, out, err = module.run_command('xmllint --format --output ' + n3_tmp_path + ' ' + n1_tmp_path)
rc, out, err = module.run_command('xmllint --format --output ' + n4_tmp_path + ' ' + n2_tmp_path)
# add files that should be cleaned up
module.add_cleanup_file(n1_tmp_path)
module.add_cleanup_file(n2_tmp_path)
module.add_cleanup_file(n3_tmp_path)
module.add_cleanup_file(n4_tmp_path)
# now compare files
diff = ''
rc, out, err = module.run_command('diff ' + n3_tmp_path + ' ' + n4_tmp_path)
if rc != 0:
# if there was difference then show the diff
n3_file = open(n3_tmp_path, 'r+')
n4_file = open(n4_tmp_path, 'r+')
if to_native_support:
# produce diff only where we have to_native function which give sensible output
# without 'to_native' whole text is wrapped as single line and not diffed
# seems that to_native was added in ansible-2.2 (commit 57701d7)
diff = {
'before_header': '',
'before': to_native(b''.join(n3_file.readlines())),
'after_header': '',
'after': to_native(b''.join(n4_file.readlines())),
}
return rc, diff
def find_resource(cib, resource_id):
my_resource = None
tags = ['group', 'clone', 'master', 'primitive']
for elem in list(cib):
if elem.attrib.get('id') == resource_id:
return elem
elif elem.tag in tags:
my_resource = find_resource(elem, resource_id)
if my_resource is not None:
break
return my_resource
def run_module():
module = AnsibleModule(
argument_spec=dict(
state=dict(default="present", choices=['present', 'absent']),
name=dict(required=True),
resource_class=dict(default="ocf", choices=['ocf', 'systemd', 'stonith', 'master']),
resource_type=dict(required=False),
options=dict(default="", required=False),
force_resource_update=dict(default=False, type='bool', required=False),
cib_file=dict(required=False),
),
supports_check_mode=True
)
state = module.params['state']
resource_name = module.params['name']
resource_class = module.params['resource_class']
cib_file = module.params['cib_file']
if state == 'present' and (not module.params['resource_type']):
module.fail_json(msg='When creating cluster resource you must specify the resource_type')
result = {}
if find_executable('pcs') is None:
module.fail_json(msg="'pcs' executable not found. Install 'pcs'.")
module.params['cib_file_param'] = ''
if cib_file is not None:
# use cib_file if specified
if os.path.isfile(cib_file):
try:
current_cib = ET.parse(cib_file)
except Exception as e:
module.fail_json(msg="Error encountered parsing the cib_file - %s" % (e))
current_cib_root = current_cib.getroot()
module.params['cib_file_param'] = '-f ' + cib_file
else:
module.fail_json(msg="%(cib_file)s is not a file or doesn't exists" % module.params)
else:
# get running cluster configuration
rc, out, err = module.run_command('pcs cluster cib')
if rc == 0:
current_cib_root = ET.fromstring(out)
else:
module.fail_json(msg='Failed to load cluster configuration', out=out, error=err)
# try to find the resource that we seek
resource = None
cib_resources = current_cib_root.find('./configuration/resources')
resource = find_resource(cib_resources, resource_name)
if state == 'present' and resource is None:
# resource should be present, but we don't see it in configuration - lets create it
result['changed'] = True
if not module.check_mode:
if resource_class == 'stonith':
cmd = 'pcs %(cib_file_param)s stonith create %(name)s %(resource_type)s %(options)s' % module.params
elif resource_class == 'master':
cmd = 'pcs %(cib_file_param)s resource master %(name)s %(resource_type)s %(options)s' % module.params
else:
cmd = 'pcs %(cib_file_param)s resource create %(name)s %(resource_type)s %(options)s' % module.params
rc, out, err = module.run_command(cmd)
if rc != 0 and "Call cib_replace failed (-62): Timer expired" in err:
# EL6: special retry when we failed to create resource because of timer waiting on cib expired
rc, out, err = module.run_command(cmd)
if rc == 0:
module.exit_json(changed=True)
else:
module.fail_json(msg="Failed to create resource using command '" + cmd + "'", output=out, error=err)
elif state == 'present' and resource is not None and resource_class == 'master':
# modify the master resource params directly
cmd = 'pcs resource meta %(name)s %(options)s' % module.params
rc, out, err = module.run_command(cmd)
if rc == 0:
module.exit_json(changed=True)
else:
module.fail_json(msg="Failed to modify resource using command '" + cmd + "'", output=out, error=err)
elif state == 'present' and resource is not None:
# resource should be present and we have find resource with such ID - lets compare it with definition if it needs a change
# lets simulate how the resource would look like if it was created using command we have
clean_cib_fd, clean_cib_path = tempfile.mkstemp()
module.add_cleanup_file(clean_cib_path)
module.do_cleanup_files()
# we must be sure that clean_cib_path is empty
if resource_class == 'stonith':
cmd = 'pcs -f ' + clean_cib_path + ' stonith create %(name)s %(resource_type)s %(options)s' % module.params
else:
cmd = 'pcs -f ' + clean_cib_path + ' resource create %(name)s %(resource_type)s %(options)s' % module.params
rc, out, err = module.run_command(cmd)
if rc == 0:
# we have a comparable resource created in clean cluster, so lets select it and compare it
clean_cib = ET.parse(clean_cib_path)
clean_cib_root = clean_cib.getroot()
clean_resource = None
cib_clean_resources = clean_cib_root.find('./configuration/resources')
clean_resource = find_resource(cib_clean_resources, resource_name)
if clean_resource is not None:
# remove the meta_attribute element from original cluster cib when empty to make comparison clean - Issue #10
for elem in list(resource):
if elem.tag == 'meta_attributes' and len(list(elem)) == 0:
resource.remove(elem)
rc, diff = compare_resources(module, resource, clean_resource)
if rc == 0:
# if no differnces were find there is no need to update the resource
module.exit_json(changed=False)
else:
# otherwise lets replace the resource with new one
result['changed'] = True
result['diff'] = diff
if not module.check_mode:
replace_element(resource, clean_resource)
# when we use cib_file then we can dump the changed CIB directly into file
if cib_file is not None:
try:
current_cib.write(cib_file) # FIXME add try/catch for writing into file
except Exception as e:
module.fail_json(msg="Error encountered writing result to cib_file - %s" % (e))
module.exit_json(changed=True)
# when not using cib_file then we continue preparing changes for cib-push into running cluster
new_cib = ET.ElementTree(current_cib_root)
new_cib_fd, new_cib_path = tempfile.mkstemp()
module.add_cleanup_file(new_cib_path)
new_cib.write(new_cib_path)
push_scope = 'scope=resources' if module.params['force_resource_update'] else ''
push_cmd = 'pcs cluster cib-push ' + push_scope + ' ' + new_cib_path
rc, out, err = module.run_command(push_cmd)
if rc == 0:
module.exit_json(changed=True)
else:
module.fail_json(msg="Failed to push updated configuration to cluster using command '" + push_cmd + "'", output=out, error=err)
else:
module.fail_json(msg="Unable to find simulated resource, This is most probably a bug.")
else:
module.fail_json(msg="Unable to simulate resource with given definition using command '" + cmd + "'", output=out, error=err)
elif state == 'absent' and resource is not None:
# resource should not be present but we have found something - lets remove that
result['changed'] = True
if not module.check_mode:
if resource_class == 'stonith':
cmd = 'pcs %(cib_file_param)s stonith delete %(name)s' % module.params
else:
cmd = 'pcs %(cib_file_param)s resource delete %(name)s' % module.params
rc, out, err = module.run_command(cmd)
if rc == 0:
module.exit_json(changed=True)
else:
module.fail_json(msg="Failed to delete resource using command '" + cmd + "'", output=out, error=err)
else:
# resource should not be present and is nto there, nothing to do
result['changed'] = False
# END of module
module.exit_json(**result)
def main():
run_module()
if __name__ == '__main__':
main()
|
apache-2.0
| -6,543,868,427,845,486,000 | 43.598315 | 159 | 0.580336 | false |
jolid/script.module.donnie
|
lib/donnie/hgtv.py
|
1
|
3410
|
import urllib2, urllib, sys, os, re, random, copy
import urlresolver
from BeautifulSoup import BeautifulSoup, Tag, NavigableString
import xbmc,xbmcplugin,xbmcgui,xbmcaddon
from t0mm0.common.net import Net
from t0mm0.common.addon import Addon
from scrapers import CommonScraper
net = Net()
''' ###########################################################
Usage and helper functions
############################################################'''
class HGTVServiceSracper(CommonScraper):
def __init__(self, settingsid, DB=None):
if DB:
self.DB=DB
self.service='hgtv'
self.name = 'HGTV'
self.raiseError = False
self.referrer = 'http://www.hgtv.com/'
self.base_url = 'http://www.hgtv.com/'
self.user_agent = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3'
self.provides = []
self.settingsid = settingsid
self._loadsettings()
def _getShows(self, silent=False):
if self.isFresh('tvshows'):
self._getRecentShows(silent=silent)
return
print "Getting All shows for " + self.service
url = self.base_url + '/full-episodes/package/index.html'
print "Scrapping: " + url
pDialog = xbmcgui.DialogProgress()
if not silent:
pDialog.create('Downloading shows from ' + self.service)
pagedata = self.getURL(url, append_base_url=False)
if pagedata=='':
return False
soup = BeautifulSoup(pagedata)
shows = soup.findAll('a', {'class' : 'banner'})
for show in shows:
percent = int((100 * shows.index(show))/len(shows))
img = show.find('img')
name = img['alt']
year = img['src']
year = re.search('HGTV/(.+?)/', year).group(1)
href = show['href']
print [name, href, year]
if not silent:
pDialog.update(percent, url, name)
#self.addShowToDB(name, href, character, year)
print 'Dowload complete!'
def _getRecentShows(self, silent=False):
print "Getting recent shows for: " + self.service
''' Do work here
'''
print 'Dowload complete!'
def _getEpisodes(self, showid, show, url, pDialog, percent, silent):
print "getting episodes for " + show
''' Do work here
'''
return True
def _getMovies(self, silent=False):
if self.isFresh('movies'):
self._getRecentMovies(silent=silent)
return
print "Getting All movies for " + self.service
''' Do work here
'''
print 'Dowload complete!'
def _getRecentMovies(self, silent):
print "Getting recent movies for: " + self.service
''' Do work here
'''
print 'Dowload complete!'
def _getStreams(self, episodeid=None, movieid=None):
streams = []
''' Do work here
'''
return streams
def _resolveStream(self, stream):
raw_url = stream.replace(self.service + '://', '')
resolved_url = ''
''' Do work here
Try to resolve with urlresolver otherwise insert call to local resolver here
'''
return resolved_url
def _resolveIMDB(self, uri): #Often needed if a sites movie index does not include imdb links but the movie page does
imdb = ''
print uri
pagedata = self.getURL(uri, append_base_url=True)
if pagedata=='':
return
imdb = re.search('http://www.imdb.com/title/(.+?)/', pagedata).group(1)
return imdb
def whichHost(self, host): #Sometimes needed
table = { 'Watch Blah' : 'blah.com',
'Watch Blah2' : 'blah2.com',
}
try:
host_url = table[host]
return host_url
except:
return 'Unknown'
|
gpl-2.0
| -1,216,272,242,217,462,300 | 20.049383 | 118 | 0.637243 | false |
factorybuild/stbgui
|
lib/python/Screens/InfoBarGenerics.py
|
1
|
119933
|
from ChannelSelection import ChannelSelection, BouquetSelector, SilentBouquetSelector
from Components.ActionMap import ActionMap, HelpableActionMap
from Components.ActionMap import NumberActionMap
from Components.Harddisk import harddiskmanager
from Components.Input import Input
from Components.Label import Label
from Components.MovieList import AUDIO_EXTENSIONS, MOVIE_EXTENSIONS, DVD_EXTENSIONS
from Components.PluginComponent import plugins
from Components.ServiceEventTracker import ServiceEventTracker
from Components.Sources.Boolean import Boolean
from Components.config import config, ConfigBoolean, ConfigClock, ConfigText
from Components.SystemInfo import SystemInfo
from Components.UsageConfig import preferredInstantRecordPath, defaultMoviePath, ConfigSelection
from Components.VolumeControl import VolumeControl
from Components.Sources.StaticText import StaticText
from EpgSelection import EPGSelection
from Plugins.Plugin import PluginDescriptor
from Screen import Screen
from Screens import ScreenSaver
from Screens import Standby
from Screens.ChoiceBox import ChoiceBox
from Screens.Dish import Dish
from Screens.EventView import EventViewEPGSelect, EventViewSimple
from Screens.InputBox import InputBox
from Screens.MessageBox import MessageBox
from Screens.MinuteInput import MinuteInput
from Screens.TimerSelection import TimerSelection
from Screens.PictureInPicture import PictureInPicture
import Screens.Standby
from Screens.SubtitleDisplay import SubtitleDisplay
from Screens.RdsDisplay import RdsInfoDisplay, RassInteractive
from Screens.TimeDateInput import TimeDateInput
from Screens.UnhandledKey import UnhandledKey
from ServiceReference import ServiceReference, isPlayableForCur
from Tools import Notifications, ASCIItranslit
from Tools.Directories import fileExists, getRecordingFilename, moveFiles
from enigma import eTimer, eServiceCenter, eDVBServicePMTHandler, iServiceInformation, iPlayableService, eServiceReference, eEPGCache, eActionMap, getDesktop, eDVBDB
from time import time, localtime, strftime
import os
from bisect import insort
from sys import maxint
from RecordTimer import RecordTimerEntry, RecordTimer, findSafeRecordPath
# hack alert!
from Menu import MainMenu, mdom
def isStandardInfoBar(self):
return self.__class__.__name__ == "InfoBar"
def setResumePoint(session):
global resumePointCache, resumePointCacheLast
service = session.nav.getCurrentService()
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (service is not None) and (ref is not None): # and (ref.type != 1):
# ref type 1 has its own memory...
seek = service.seek()
if seek:
pos = seek.getPlayPosition()
if not pos[0]:
key = ref.toString()
lru = int(time())
l = seek.getLength()
if l:
l = l[1]
else:
l = None
resumePointCache[key] = [lru, pos[1], l]
if len(resumePointCache) > 50:
candidate = key
for k,v in resumePointCache.items():
if v[0] < lru:
candidate = k
del resumePointCache[candidate]
if lru - resumePointCacheLast > 3600:
saveResumePoints()
def delResumePoint(ref):
global resumePointCache, resumePointCacheLast
try:
del resumePointCache[ref.toString()]
except KeyError:
pass
if int(time()) - resumePointCacheLast > 3600:
saveResumePoints()
def getResumePoint(session):
global resumePointCache
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (ref is not None) and (ref.type != 1):
try:
entry = resumePointCache[ref.toString()]
entry[0] = int(time()) # update LRU timestamp
return entry[1]
except KeyError:
return None
def saveResumePoints():
global resumePointCache, resumePointCacheLast
import cPickle
try:
f = open('/home/root/resumepoints.pkl', 'wb')
cPickle.dump(resumePointCache, f, cPickle.HIGHEST_PROTOCOL)
except Exception, ex:
print "[InfoBar] Failed to write resumepoints:", ex
resumePointCacheLast = int(time())
def loadResumePoints():
import cPickle
try:
return cPickle.load(open('/home/root/resumepoints.pkl', 'rb'))
except Exception, ex:
print "[InfoBar] Failed to load resumepoints:", ex
return {}
resumePointCache = loadResumePoints()
resumePointCacheLast = int(time())
class InfoBarDish:
def __init__(self):
self.dishDialog = self.session.instantiateDialog(Dish)
class InfoBarUnhandledKey:
def __init__(self):
self.unhandledKeyDialog = self.session.instantiateDialog(UnhandledKey)
self.hideUnhandledKeySymbolTimer = eTimer()
self.hideUnhandledKeySymbolTimer.callback.append(self.unhandledKeyDialog.hide)
self.checkUnusedTimer = eTimer()
self.checkUnusedTimer.callback.append(self.checkUnused)
self.onLayoutFinish.append(self.unhandledKeyDialog.hide)
eActionMap.getInstance().bindAction('', -maxint -1, self.actionA) #highest prio
eActionMap.getInstance().bindAction('', maxint, self.actionB) #lowest prio
self.flags = (1<<1)
self.uflags = 0
#this function is called on every keypress!
def actionA(self, key, flag):
self.unhandledKeyDialog.hide()
if flag != 4:
if self.flags & (1<<1):
self.flags = self.uflags = 0
self.flags |= (1<<flag)
if flag == 1: # break
self.checkUnusedTimer.start(0, True)
return 0
#this function is only called when no other action has handled this key
def actionB(self, key, flag):
if flag != 4:
self.uflags |= (1<<flag)
def checkUnused(self):
if self.flags == self.uflags:
self.unhandledKeyDialog.show()
self.hideUnhandledKeySymbolTimer.start(2000, True)
class InfoBarScreenSaver:
def __init__(self):
self.onExecBegin.append(self.__onExecBegin)
self.onExecEnd.append(self.__onExecEnd)
self.screenSaverTimer = eTimer()
self.screenSaverTimer.callback.append(self.screensaverTimeout)
self.screensaver = self.session.instantiateDialog(ScreenSaver.Screensaver)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self.screensaver.hide()
def __onExecBegin(self):
self.ScreenSaverTimerStart()
def __onExecEnd(self):
if self.screensaver.shown:
self.screensaver.hide()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
self.screenSaverTimer.stop()
def ScreenSaverTimerStart(self):
time = int(config.usage.screen_saver.value)
flag = self.seekstate[0]
if not flag:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref and not (hasattr(self.session, "pipshown") and self.session.pipshown):
ref = ref.toString().split(":")
flag = ref[2] == "2" or os.path.splitext(ref[10])[1].lower() in AUDIO_EXTENSIONS
if time and flag:
self.screenSaverTimer.startLongTimer(time)
else:
self.screenSaverTimer.stop()
def screensaverTimeout(self):
if self.execing and not Standby.inStandby and not Standby.inTryQuitMainloop:
self.hide()
if hasattr(self, "pvrStateDialog"):
self.pvrStateDialog.hide()
self.screensaver.show()
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressScreenSaver)
def keypressScreenSaver(self, key, flag):
if flag:
self.screensaver.hide()
self.show()
self.ScreenSaverTimerStart()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
class HideVBILine(Screen):
def __init__(self, session):
self.skin = """<screen position="0,0" size="%s,%s" flags="wfNoBorder" zPosition="1"/>""" % (getDesktop(0).size().width(), getDesktop(0).size().height() / 360 + 1)
Screen.__init__(self, session)
class SecondInfoBar(Screen):
def __init__(self, session, skinName):
Screen.__init__(self, session)
self.skinName = skinName
class InfoBarShowHide(InfoBarScreenSaver):
""" InfoBar show/hide control, accepts toggleShow and hide actions, might start
fancy animations. """
STATE_HIDDEN = 0
STATE_HIDING = 1
STATE_SHOWING = 2
STATE_SHOWN = 3
FLAG_HIDE_VBI = 512
def __init__(self):
self["ShowHideActions"] = ActionMap( ["InfobarShowHideActions"] ,
{
"toggleShow": self.okButtonCheck,
"hide": self.keyHide,
"toggleShowLong" : self.toggleShowLong,
"hideLong" : self.hideLong,
}, 1) # lower prio to make it possible to override ok and cancel..
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.serviceStarted,
})
InfoBarScreenSaver.__init__(self)
self.__state = self.STATE_SHOWN
self.__locked = 0
self.hideTimer = eTimer()
self.hideTimer.callback.append(self.doTimerHide)
self.hideTimer.start(5000, True)
self.onShow.append(self.__onShow)
self.onHide.append(self.__onHide)
self.onShowHideNotifiers = []
self.actualSecondInfoBarScreen = None
if isStandardInfoBar(self):
self.secondInfoBarScreen = self.session.instantiateDialog(SecondInfoBar, "SecondInfoBar")
self.secondInfoBarScreen.show()
self.secondInfoBarScreenSimple = self.session.instantiateDialog(SecondInfoBar, "SecondInfoBarSimple")
self.secondInfoBarScreenSimple.show()
self.actualSecondInfoBarScreen = config.usage.show_simple_second_infobar.value and self.secondInfoBarScreenSimple.skinAttributes and self.secondInfoBarScreenSimple or self.secondInfoBarScreen
self.hideVBILineScreen = self.session.instantiateDialog(HideVBILine)
self.hideVBILineScreen.show()
self.onLayoutFinish.append(self.__layoutFinished)
self.onExecBegin.append(self.__onExecBegin)
def __onExecBegin(self):
self.clearScreenPath()
self.showHideVBI()
def __layoutFinished(self):
if self.actualSecondInfoBarScreen:
self.secondInfoBarScreen.hide()
self.secondInfoBarScreenSimple.hide()
self.hideVBILineScreen.hide()
def __onShow(self):
self.__state = self.STATE_SHOWN
for x in self.onShowHideNotifiers:
x(True)
self.startHideTimer()
def __onHide(self):
self.__state = self.STATE_HIDDEN
if self.actualSecondInfoBarScreen:
self.actualSecondInfoBarScreen.hide()
for x in self.onShowHideNotifiers:
x(False)
def toggleShowLong(self):
if not config.usage.ok_is_channelselection.value:
self.toggleSecondInfoBar()
def hideLong(self):
if config.usage.ok_is_channelselection.value:
self.toggleSecondInfoBar()
def toggleSecondInfoBar(self):
if self.actualSecondInfoBarScreen and not self.shown and not self.actualSecondInfoBarScreen.shown and self.secondInfoBarScreenSimple.skinAttributes and self.secondInfoBarScreen.skinAttributes:
self.actualSecondInfoBarScreen.hide()
config.usage.show_simple_second_infobar.value = not config.usage.show_simple_second_infobar.value
config.usage.show_simple_second_infobar.save()
self.actualSecondInfoBarScreen = config.usage.show_simple_second_infobar.value and self.secondInfoBarScreenSimple or self.secondInfoBarScreen
self.showSecondInfoBar()
def keyHide(self):
if self.__state == self.STATE_HIDDEN and self.session.pipshown and "popup" in config.usage.pip_hideOnExit.value:
if config.usage.pip_hideOnExit.value == "popup":
self.session.openWithCallback(self.hidePipOnExitCallback, MessageBox, _("Disable Picture in Picture"), simple=True)
else:
self.hidePipOnExitCallback(True)
elif config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
self.toggleShow()
elif self.__state == self.STATE_SHOWN:
self.hide()
def hidePipOnExitCallback(self, answer):
if answer == True:
self.showPiP()
def connectShowHideNotifier(self, fnc):
if not fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.append(fnc)
def disconnectShowHideNotifier(self, fnc):
if fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.remove(fnc)
def serviceStarted(self):
if self.execing:
if config.usage.show_infobar_on_zap.value:
self.doShow()
self.showHideVBI()
def startHideTimer(self):
if self.__state == self.STATE_SHOWN and not self.__locked:
self.hideTimer.stop()
if self.actualSecondInfoBarScreen and self.actualSecondInfoBarScreen.shown:
idx = config.usage.show_second_infobar.index - 1
else:
idx = config.usage.infobar_timeout.index
if idx:
self.hideTimer.startLongTimer(idx)
def doShow(self):
self.show()
self.startHideTimer()
def doTimerHide(self):
self.hideTimer.stop()
if self.__state == self.STATE_SHOWN:
self.hide()
def okButtonCheck(self):
if config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
if isinstance(self, InfoBarTimeshift) and self.timeshiftEnabled() and isinstance(self, InfoBarSeek) and self.seekstate == self.SEEK_STATE_PAUSE:
return
self.openServiceList()
else:
self.toggleShow()
def toggleShow(self):
if self.__state == self.STATE_HIDDEN:
self.showFirstInfoBar()
else:
self.showSecondInfoBar()
def showSecondInfoBar(self):
if isStandardInfoBar(self) and config.usage.show_second_infobar.value == "EPG":
if not(hasattr(self, "hotkeyGlobal") and self.hotkeyGlobal("info") != 0):
self.showDefaultEPG()
elif self.actualSecondInfoBarScreen and config.usage.show_second_infobar.value and not self.actualSecondInfoBarScreen.shown:
self.show()
self.actualSecondInfoBarScreen.show()
self.startHideTimer()
else:
self.hide()
self.hideTimer.stop()
def showFirstInfoBar(self):
if self.__state == self.STATE_HIDDEN or self.actualSecondInfoBarScreen and self.actualSecondInfoBarScreen.shown:
self.actualSecondInfoBarScreen and self.actualSecondInfoBarScreen.hide()
self.show()
else:
self.hide()
self.hideTimer.stop()
def lockShow(self):
self.__locked = self.__locked + 1
if self.execing:
self.show()
self.hideTimer.stop()
def unlockShow(self):
self.__locked = self.__locked - 1
if self.execing:
self.startHideTimer()
def checkHideVBI(self):
service = self.session.nav.getCurrentlyPlayingServiceReference()
servicepath = service and service.getPath()
if servicepath and servicepath.startswith("/"):
if service.toString().startswith("1:"):
info = eServiceCenter.getInstance().info(service)
service = info and info.getInfoString(service, iServiceInformation.sServiceref)
return service and eDVBDB.getInstance().getFlag(eServiceReference(service)) & self.FLAG_HIDE_VBI and True
else:
return ".hidvbi." in servicepath.lower()
service = self.session.nav.getCurrentService()
info = service and service.info()
return info and info.getInfo(iServiceInformation.sHideVBI)
def showHideVBI(self):
if self.checkHideVBI():
self.hideVBILineScreen.show()
else:
self.hideVBILineScreen.hide()
def ToggleHideVBI(self):
service = self.session.nav.getCurrentlyPlayingServiceReference()
servicepath = service and service.getPath()
if not servicepath:
if eDVBDB.getInstance().getFlag(service) & self.FLAG_HIDE_VBI:
eDVBDB.getInstance().removeFlag(service, self.FLAG_HIDE_VBI)
else:
eDVBDB.getInstance().addFlag(service, self.FLAG_HIDE_VBI)
eDVBDB.getInstance().reloadBouquets()
self.showHideVBI()
class BufferIndicator(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self["status"] = Label()
self.mayShow = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evBuffering: self.bufferChanged,
iPlayableService.evStart: self.__evStart,
iPlayableService.evGstreamerPlayStarted: self.__evGstreamerPlayStarted,
})
def bufferChanged(self):
if self.mayShow:
service = self.session.nav.getCurrentService()
info = service and service.info()
if info:
value = info.getInfo(iServiceInformation.sBuffer)
if value and value != 100:
self["status"].setText(_("Buffering %d%%") % value)
if not self.shown:
self.show()
def __evStart(self):
self.mayShow = True
self.hide()
def __evGstreamerPlayStarted(self):
self.mayShow = False
self.hide()
class InfoBarBuffer():
def __init__(self):
self.bufferScreen = self.session.instantiateDialog(BufferIndicator)
self.bufferScreen.hide()
class NumberZap(Screen):
def quit(self):
self.Timer.stop()
self.close()
def keyOK(self):
self.Timer.stop()
self.close(self.service, self.bouquet)
def handleServiceName(self):
if self.searchNumber:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
if not self.startBouquet:
self.startBouquet = self.bouquet
def keyBlue(self):
self.startTimer()
if self.searchNumber:
if self.startBouquet == self.bouquet:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()), firstBouquetOnly = True)
else:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
def keyNumberGlobal(self, number):
self.startTimer(repeat=True)
self.numberString = self.numberString + str(number)
self["number"].text = self["number_summary"].text = self.numberString
self.handleServiceName()
if len(self.numberString) >= 5:
self.keyOK()
def __init__(self, session, number, searchNumberFunction = None):
Screen.__init__(self, session)
self.numberString = str(number)
self.searchNumber = searchNumberFunction
self.startBouquet = None
self["channel"] = Label(_("Channel:"))
self["number"] = Label(self.numberString)
self["servicename"] = Label()
self["channel_summary"] = StaticText(_("Channel:"))
self["number_summary"] = StaticText(self.numberString)
self["servicename_summary"] = StaticText()
self.handleServiceName()
self["actions"] = NumberActionMap( [ "SetupActions", "ShortcutActions" ],
{
"cancel": self.quit,
"ok": self.keyOK,
"blue": self.keyBlue,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
})
self.Timer = eTimer()
self.Timer.callback.append(self.endTimer)
self.Timer.start(250)
self.startTimer()
def startTimer(self, repeat=False):
self.timer_target = repeat and self.timer_counter < 6 and [4,4,4,5,8,10][self.timer_counter] or 12
self.timer_counter = 0
def endTimer(self):
self.timer_counter += 1
if self.timer_counter > self.timer_target:
self.keyOK()
class InfoBarNumberZap:
""" Handles an initial number for NumberZapping """
def __init__(self):
self["NumberActions"] = NumberActionMap( [ "NumberActions"],
{
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
})
def keyNumberGlobal(self, number):
if number == 0:
if isinstance(self, InfoBarPiP) and self.pipHandles0Action():
self.pipDoHandle0Action()
elif len(self.servicelist.history) > 1:
self.checkTimeshiftRunning(self.recallPrevService)
else:
if self.has_key("TimeshiftActions") and self.timeshiftEnabled():
ts = self.getTimeshift()
if ts and ts.isTimeshiftActive():
return
self.session.openWithCallback(self.numberEntered, NumberZap, number, self.searchNumber)
def recallPrevService(self, reply):
if reply:
self.servicelist.recallPrevService()
def numberEntered(self, service = None, bouquet = None):
if service:
self.selectAndStartService(service, bouquet)
def searchNumberHelper(self, serviceHandler, num, bouquet):
servicelist = serviceHandler.list(bouquet)
if servicelist:
serviceIterator = servicelist.getNext()
while serviceIterator.valid():
if num == serviceIterator.getChannelNum():
return serviceIterator
serviceIterator = servicelist.getNext()
return None
def searchNumber(self, number, firstBouquetOnly=False, bouquet=None):
bouquet = bouquet or self.servicelist.getRoot()
service = None
serviceHandler = eServiceCenter.getInstance()
if not firstBouquetOnly:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if config.usage.multibouquet.value and not service:
bouquet = self.servicelist.bouquet_root
bouquetlist = serviceHandler.list(bouquet)
if bouquetlist:
bouquet = bouquetlist.getNext()
while bouquet.valid():
if bouquet.flags & eServiceReference.isDirectory and not bouquet.flags & eServiceReference.isInvisible:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if service:
playable = not (service.flags & (eServiceReference.isMarker|eServiceReference.isDirectory)) or (service.flags & eServiceReference.isNumberedMarker)
if not playable:
service = None
break
if config.usage.alternative_number_mode.value or firstBouquetOnly:
break
bouquet = bouquetlist.getNext()
return service, bouquet
def selectAndStartService(self, service, bouquet):
if service and not service.flags & eServiceReference.isMarker:
if self.servicelist.getRoot() != bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
self.servicelist.zap(enable_pipzap = True)
self.servicelist.correctChannelNumber()
self.servicelist.startRoot = None
def zapToNumber(self, number):
service, bouquet = self.searchNumber(number)
self.selectAndStartService(service, bouquet)
config.misc.initialchannelselection = ConfigBoolean(default = True)
class InfoBarChannelSelection:
""" ChannelSelection - handles the channelSelection dialog and the initial
channelChange actions which open the channelSelection dialog """
def __init__(self):
#instantiate forever
self.servicelist = self.session.instantiateDialog(ChannelSelection)
if config.misc.initialchannelselection.value:
self.onShown.append(self.firstRun)
self["ChannelSelectActions"] = HelpableActionMap(self, "InfobarChannelSelection",
{
"keyUp": (self.keyUpCheck, self.getKeyUpHelptext),
"keyDown": (self.keyDownCheck, self.getKeyDownHelpText),
"keyLeft": (self.keyLeftCheck, self.getKeyLeftHelptext),
"keyRight": (self.keyRightCheck, self.getKeyRightHelptext),
"historyBack": (self.historyBack, _("Switch to previous channel in history")),
"historyNext": (self.historyNext, _("Switch to next channel in history")),
"keyChannelUp": (self.keyChannelUpCheck, self.getKeyChannelUpHelptext),
"keyChannelDown": (self.keyChannelDownCheck, self.getKeyChannelDownHelptext),
"openServiceList": (self.openServiceList, _("Open service list")),
"openSatellites": (self.openSatellites, _("Open satellites list")),
})
def showTvChannelList(self, zap=False):
self.servicelist.setModeTv()
if zap:
self.servicelist.zap()
def showRadioChannelList(self, zap=False):
self.servicelist.setModeRadio()
if zap:
self.servicelist.zap()
def firstRun(self):
self.onShown.remove(self.firstRun)
config.misc.initialchannelselection.value = False
config.misc.initialchannelselection.save()
self.switchChannelDown()
def historyBack(self):
self.checkTimeshiftRunning(self.historyBackCheckTimeshiftCallback)
def historyBackCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyBack()
def historyNext(self):
self.checkTimeshiftRunning(self.historyNextCheckTimeshiftCallback)
def historyNextCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyNext()
def keyUpCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapDown()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelUp()
def keyDownCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapUp()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelDown()
def keyLeftCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelUp()
else:
self.zapUp()
def keyRightCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelDown()
else:
self.zapDown()
def keyChannelUpCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapDown()
else:
self.openServiceList()
def keyChannelDownCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapUp()
else:
self.openServiceList()
def getKeyUpHelptext(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to next channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
return value
def getKeyDownHelpText(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to previous channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
return value
def getKeyLeftHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
else:
value = _("Switch to previous channel")
return value
def getKeyRightHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
else:
value = _("Switch to next channel")
return value
def getKeyChannelUpHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to next channel") or _("Open service list")
def getKeyChannelDownHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to previous channel") or _("Open service list")
def switchChannelUp(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveUp()
self.session.execDialog(self.servicelist)
def switchChannelDown(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveDown()
self.session.execDialog(self.servicelist)
def zapUp(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value:
if self.servicelist.atBegin():
self.servicelist.prevBouquet()
self.servicelist.moveUp()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveUp()
self.servicelist.zap(enable_pipzap = True)
def zapDown(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and self.servicelist.atEnd():
self.servicelist.nextBouquet()
else:
self.servicelist.moveDown()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveDown()
self.servicelist.zap(enable_pipzap = True)
def openSatellites(self):
self.servicelist.showSatellites()
self.session.execDialog(self.servicelist)
def openFavouritesList(self):
self.servicelist.showFavourites()
self.openServiceList()
def openServiceList(self):
self.session.execDialog(self.servicelist)
class InfoBarMenu:
""" Handles a menu action, to open the (main) menu """
def __init__(self):
self["MenuActions"] = HelpableActionMap(self, "InfobarMenuActions",
{
"mainMenu": (self.mainMenu, _("Enter main menu...")),
})
self.session.infobar = None
def mainMenu(self):
print "loading mainmenu XML..."
menu = mdom.getroot()
assert menu.tag == "menu", "root element in menu must be 'menu'!"
self.session.infobar = self
# so we can access the currently active infobar from screens opened from within the mainmenu
# at the moment used from the SubserviceSelection
self.session.openWithCallback(self.mainMenuClosed, MainMenu, menu)
def mainMenuClosed(self, *val):
self.session.infobar = None
class InfoBarSimpleEventView:
""" Opens the Eventview for now/next """
def __init__(self):
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.openEventView, _("Show event details")),
"showEventInfoSingleEPG": (self.openEventView, _("Show event details")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def openEventView(self):
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewSimple, epglist[0], ServiceReference(ref), self.eventViewCallback)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0] = epglist[1]
epglist[1] = tmp
setEvent(epglist[0])
class SimpleServicelist:
def __init__(self, services):
self.setServices(services)
def setServices(self, services):
self.services = services
self.length = len(services)
self.current = 0
def selectService(self, service):
if not self.length:
self.current = -1
return False
else:
self.current = 0
while self.services[self.current].ref != service:
self.current += 1
if self.current >= self.length:
return False
return True
def nextService(self):
if not self.length:
return
if self.current+1 < self.length:
self.current += 1
else:
self.current = 0
def prevService(self):
if not self.length:
return
if self.current-1 > -1:
self.current -= 1
else:
self.current = self.length - 1
def currentService(self):
if not self.length or self.current >= self.length:
return None
return self.services[self.current]
class InfoBarEPG:
""" EPG - Opens an EPG list when the showEPGList action fires """
def __init__(self):
self.is_now_next = False
self.dlg_stack = [ ]
self.bouquetSel = None
self.eventView = None
self.epglist = []
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.__evEventInfoChanged,
})
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.showDefaultEPG, _("Show EPG...")),
"showEventInfoSingleEPG": (self.showSingleEPG, _("Show single service EPG")),
"showEventInfoMultiEPG": (self.showMultiEPG, _("Show multi channel EPG")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def getEPGPluginList(self, getAll=False):
pluginlist = [(p.name, boundFunction(self.runPlugin, p), p.path) for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EVENTINFO) \
if 'selectedevent' not in p.__call__.func_code.co_varnames] or []
from Components.ServiceEventTracker import InfoBarCount
if getAll or InfoBarCount == 1:
pluginlist.append((_("Show EPG for current channel..."), self.openSingleServiceEPG, "current_channel"))
pluginlist.append((_("Multi EPG"), self.openMultiServiceEPG, "multi_epg"))
pluginlist.append((_("Current event EPG"), self.openEventView, "event_epg"))
return pluginlist
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def zapToService(self, service, preview = False, zapback = False):
if self.servicelist.startServiceRef is None:
self.servicelist.startServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if service is not None:
if self.servicelist.getRoot() != self.epg_bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != self.epg_bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(self.epg_bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
if not zapback or preview:
self.servicelist.zap(enable_pipzap = True)
if (self.servicelist.dopipzap or zapback) and not preview:
self.servicelist.zapBack()
if not preview:
self.servicelist.startServiceRef = None
self.servicelist.startRoot = None
def getBouquetServices(self, bouquet):
services = [ ]
servicelist = eServiceCenter.getInstance().list(bouquet)
if not servicelist is None:
while True:
service = servicelist.getNext()
if not service.valid(): #check if end of list
break
if service.flags & (eServiceReference.isDirectory | eServiceReference.isMarker): #ignore non playable services
continue
services.append(ServiceReference(service))
return services
def openBouquetEPG(self, bouquet, withCallback=True):
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
if withCallback:
self.dlg_stack.append(self.session.openWithCallback(self.closed, EPGSelection, services, self.zapToService, None, self.changeBouquetCB))
else:
self.session.open(EPGSelection, services, self.zapToService, None, self.changeBouquetCB)
def changeBouquetCB(self, direction, epg):
if self.bouquetSel:
if direction > 0:
self.bouquetSel.down()
else:
self.bouquetSel.up()
bouquet = self.bouquetSel.getCurrent()
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
epg.setServices(services)
def selectBouquet(self, bouquetref, epg):
services = self.getBouquetServices(bouquetref)
if services:
self.epg_bouquet = bouquetref
self.serviceSel.setServices(services)
epg.setServices(services)
def setService(self, service):
if service:
self.serviceSel.selectService(service)
def closed(self, ret=False):
closedScreen = self.dlg_stack.pop()
if self.bouquetSel and closedScreen == self.bouquetSel:
self.bouquetSel = None
elif self.eventView and closedScreen == self.eventView:
self.eventView = None
if ret:
dlgs=len(self.dlg_stack)
if dlgs > 0:
self.dlg_stack[dlgs-1].close(dlgs > 1)
def openMultiServiceEPG(self, withCallback=True):
bouquets = self.servicelist.getBouquetList()
if bouquets is None:
cnt = 0
else:
cnt = len(bouquets)
if config.usage.multiepg_ask_bouquet.value:
self.openMultiServiceEPGAskBouquet(bouquets, cnt, withCallback)
else:
self.openMultiServiceEPGSilent(bouquets, cnt, withCallback)
def openMultiServiceEPGAskBouquet(self, bouquets, cnt, withCallback):
if cnt > 1: # show bouquet list
if withCallback:
self.bouquetSel = self.session.openWithCallback(self.closed, BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
self.dlg_stack.append(self.bouquetSel)
else:
self.bouquetSel = self.session.open(BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
elif cnt == 1:
self.openBouquetEPG(bouquets[0][1], withCallback)
def openMultiServiceEPGSilent(self, bouquets, cnt, withCallback):
root = self.servicelist.getRoot()
rootstr = root.toCompareString()
current = 0
for bouquet in bouquets:
if bouquet[1].toCompareString() == rootstr:
break
current += 1
if current >= cnt:
current = 0
if cnt > 1: # create bouquet list for bouq+/-
self.bouquetSel = SilentBouquetSelector(bouquets, True, self.servicelist.getBouquetNumOffset(root))
if cnt >= 1:
self.openBouquetEPG(root, withCallback)
def changeServiceCB(self, direction, epg):
if self.serviceSel:
if direction > 0:
self.serviceSel.nextService()
else:
self.serviceSel.prevService()
epg.setService(self.serviceSel.currentService())
def SingleServiceEPGClosed(self, ret=False):
self.serviceSel = None
def openSingleServiceEPG(self):
ref = self.servicelist.getCurrentSelection()
if ref:
if self.servicelist.getMutableList(): # bouquet in channellist
current_path = self.servicelist.getRoot()
services = self.getBouquetServices(current_path)
self.serviceSel = SimpleServicelist(services)
if self.serviceSel.selectService(ref):
self.epg_bouquet = current_path
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref, self.zapToService, serviceChangeCB=self.changeServiceCB, parent=self)
else:
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref)
else:
self.session.open(EPGSelection, ref)
def runPlugin(self, plugin):
plugin(session = self.session, servicelist = self.servicelist)
def showEventInfoPlugins(self):
pluginlist = self.getEPGPluginList()
if pluginlist:
self.session.openWithCallback(self.EventInfoPluginChosen, ChoiceBox, title=_("Please choose an extension..."), list=pluginlist, skin_name="EPGExtensionsList", reorderConfig="eventinfo_order", windowTitle=_("Events info menu"))
else:
self.openSingleServiceEPG()
def EventInfoPluginChosen(self, answer):
if answer is not None:
answer[1]()
def openSimilarList(self, eventid, refstr):
self.session.open(EPGSelection, refstr, None, eventid)
def getNowNext(self):
epglist = [ ]
service = self.session.nav.getCurrentService()
info = service and service.info()
ptr = info and info.getEvent(0)
if ptr and ptr.getEventName() != "":
epglist.append(ptr)
ptr = info and info.getEvent(1)
if ptr and ptr.getEventName() != "":
epglist.append(ptr)
self.epglist = epglist
def __evEventInfoChanged(self):
if self.is_now_next and len(self.dlg_stack) == 1:
self.getNowNext()
if self.eventView and self.epglist:
self.eventView.setEvent(self.epglist[0])
def showDefaultEPG(self):
self.openEventView()
def showSingleEPG(self):
self.openSingleServiceEPG()
def showMultiEPG(self):
self.openMultiServiceEPG()
def openEventView(self):
from Components.ServiceEventTracker import InfoBarCount
if InfoBarCount > 1:
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
else:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.getNowNext()
epglist = self.epglist
if not epglist:
self.is_now_next = False
epg = eEPGCache.getInstance()
ptr = ref and ref.valid() and epg.lookupEventTime(ref, -1)
if ptr:
epglist.append(ptr)
ptr = epg.lookupEventTime(ref, ptr.getBeginTime(), +1)
if ptr:
epglist.append(ptr)
else:
self.is_now_next = True
if epglist:
self.eventView = self.session.openWithCallback(self.closed, EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
self.dlg_stack.append(self.eventView)
if not epglist:
print "no epg for the service avail.. so we show multiepg instead of eventinfo"
self.openMultiServiceEPG(False)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0]=epglist[1]
epglist[1]=tmp
setEvent(epglist[0])
class InfoBarRdsDecoder:
"""provides RDS and Rass support/display"""
def __init__(self):
self.rds_display = self.session.instantiateDialog(RdsInfoDisplay)
self.session.instantiateSummaryDialog(self.rds_display)
self.rass_interactive = None
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.__serviceStopped,
iPlayableService.evUpdatedRassSlidePic: self.RassSlidePicChanged
})
self["RdsActions"] = ActionMap(["InfobarRdsActions"],
{
"startRassInteractive": self.startRassInteractive
},-1)
self["RdsActions"].setEnabled(False)
self.onLayoutFinish.append(self.rds_display.show)
self.rds_display.onRassInteractivePossibilityChanged.append(self.RassInteractivePossibilityChanged)
def RassInteractivePossibilityChanged(self, state):
self["RdsActions"].setEnabled(state)
def RassSlidePicChanged(self):
if not self.rass_interactive:
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
if decoder:
decoder.showRassSlidePicture()
def __serviceStopped(self):
if self.rass_interactive is not None:
rass_interactive = self.rass_interactive
self.rass_interactive = None
rass_interactive.close()
def startRassInteractive(self):
self.rds_display.hide()
self.rass_interactive = self.session.openWithCallback(self.RassInteractiveClosed, RassInteractive)
def RassInteractiveClosed(self, *val):
if self.rass_interactive is not None:
self.rass_interactive = None
self.RassSlidePicChanged()
self.rds_display.show()
class InfoBarSeek:
"""handles actions like seeking, pause"""
SEEK_STATE_PLAY = (0, 0, 0, ">")
SEEK_STATE_PAUSE = (1, 0, 0, "||")
SEEK_STATE_EOF = (1, 0, 0, "END")
def __init__(self, actionmap = "InfobarSeekActions"):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evEOF: self.__evEOF,
iPlayableService.evSOF: self.__evSOF,
})
self.fast_winding_hint_message_showed = False
class InfoBarSeekActionMap(HelpableActionMap):
def __init__(self, screen, *args, **kwargs):
HelpableActionMap.__init__(self, screen, *args, **kwargs)
self.screen = screen
def action(self, contexts, action):
print "action:", action
if action[:5] == "seek:":
time = int(action[5:])
self.screen.doSeekRelative(time * 90000)
return 1
elif action[:8] == "seekdef:":
key = int(action[8:])
time = (-config.seek.selfdefined_13.value, False, config.seek.selfdefined_13.value,
-config.seek.selfdefined_46.value, False, config.seek.selfdefined_46.value,
-config.seek.selfdefined_79.value, False, config.seek.selfdefined_79.value)[key-1]
self.screen.doSeekRelative(time * 90000)
return 1
else:
return HelpableActionMap.action(self, contexts, action)
self["SeekActions"] = InfoBarSeekActionMap(self, actionmap,
{
"playpauseService": (self.playpauseService, _("Pauze/Continue playback")),
"pauseService": (self.pauseService, _("Pause playback")),
"unPauseService": (self.unPauseService, _("Continue playback")),
"okButton": (self.okButton, _("Continue playback")),
"seekFwd": (self.seekFwd, _("Seek forward")),
"seekFwdManual": (self.seekFwdManual, _("Seek forward (enter time)")),
"seekBack": (self.seekBack, _("Seek backward")),
"seekBackManual": (self.seekBackManual, _("Seek backward (enter time)")),
"jumpPreviousMark": (self.seekPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.seekNextMark, _("Jump to next marked position")),
}, prio=-1)
# give them a little more priority to win over color buttons
self["SeekActions"].setEnabled(False)
self.seekstate = self.SEEK_STATE_PLAY
self.lastseekstate = self.SEEK_STATE_PLAY
self.onPlayStateChanged = [ ]
self.lockedBecauseOfSkipping = False
self.__seekableStatusChanged()
def makeStateForward(self, n):
return (0, n, 0, ">> %dx" % n)
def makeStateBackward(self, n):
return (0, -n, 0, "<< %dx" % n)
def makeStateSlowMotion(self, n):
return (0, 0, n, "/%d" % n)
def isStateForward(self, state):
return state[1] > 1
def isStateBackward(self, state):
return state[1] < 0
def isStateSlowMotion(self, state):
return state[1] == 0 and state[2] > 1
def getHigher(self, n, lst):
for x in lst:
if x > n:
return x
return False
def getLower(self, n, lst):
lst = lst[:]
lst.reverse()
for x in lst:
if x < n:
return x
return False
def showAfterSeek(self):
if isinstance(self, InfoBarShowHide):
if isStandardInfoBar(self) and self.timeshiftEnabled():
for c in self.onPlayStateChanged:
c(self.seekstate)
else:
self.doShow()
def up(self):
pass
def down(self):
pass
def getSeek(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
seek = service.seek()
if seek is None or not seek.isCurrentlySeekable():
return None
return seek
def isSeekable(self):
if self.getSeek() is None or (isStandardInfoBar(self) and not self.timeshiftEnabled()):
return False
return True
def __seekableStatusChanged(self):
# print "seekable status changed!"
if not self.isSeekable():
self["SeekActions"].setEnabled(False)
# print "not seekable, return to play"
self.setSeekState(self.SEEK_STATE_PLAY)
else:
self["SeekActions"].setEnabled(True)
# print "seekable"
def __serviceStarted(self):
self.fast_winding_hint_message_showed = False
self.setSeekState(self.SEEK_STATE_PLAY)
self.__seekableStatusChanged()
def setSeekState(self, state):
service = self.session.nav.getCurrentService()
if service is None:
return False
if not self.isSeekable():
if state not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE):
state = self.SEEK_STATE_PLAY
pauseable = service.pause()
if pauseable is None:
print "not pauseable."
state = self.SEEK_STATE_PLAY
self.seekstate = state
if pauseable is not None:
if self.seekstate[0]:
print "resolved to PAUSE"
pauseable.pause()
elif self.seekstate[1]:
if not pauseable.setFastForward(self.seekstate[1]):
print "resolved to FAST FORWARD"
else:
self.seekstate = self.SEEK_STATE_PLAY
print "FAST FORWARD not possible: resolved to PLAY"
elif self.seekstate[2]:
if not pauseable.setSlowMotion(self.seekstate[2]):
print "resolved to SLOW MOTION"
else:
self.seekstate = self.SEEK_STATE_PAUSE
print "SLOW MOTION not possible: resolved to PAUSE"
else:
print "resolved to PLAY"
pauseable.unpause()
for c in self.onPlayStateChanged:
c(self.seekstate)
self.checkSkipShowHideLock()
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
return True
def playpauseService(self):
if self.seekstate != self.SEEK_STATE_PLAY:
self.unPauseService()
else:
self.pauseService()
def okButton(self):
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
elif self.seekstate == self.SEEK_STATE_PAUSE:
self.pauseService()
else:
self.unPauseService()
def pauseService(self):
if self.seekstate == self.SEEK_STATE_PAUSE:
if config.seek.on_pause.value == "play":
self.unPauseService()
elif config.seek.on_pause.value == "step":
self.doSeekRelative(1)
elif config.seek.on_pause.value == "last":
self.setSeekState(self.lastseekstate)
self.lastseekstate = self.SEEK_STATE_PLAY
else:
if self.seekstate != self.SEEK_STATE_EOF:
self.lastseekstate = self.seekstate
self.setSeekState(self.SEEK_STATE_PAUSE)
def unPauseService(self):
print "unpause"
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
self.setSeekState(self.SEEK_STATE_PLAY)
def doSeek(self, pts):
seekable = self.getSeek()
if seekable is None:
return
seekable.seekTo(pts)
def doSeekRelative(self, pts):
seekable = self.getSeek()
if seekable is None:
return
prevstate = self.seekstate
if self.seekstate == self.SEEK_STATE_EOF:
if prevstate == self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_PAUSE)
else:
self.setSeekState(self.SEEK_STATE_PLAY)
seekable.seekRelative(pts<0 and -1 or 1, abs(pts))
if abs(pts) > 100 and config.usage.show_infobar_on_skip.value:
self.showAfterSeek()
def seekFwd(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
if self.seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_PAUSE:
if len(config.seek.speeds_slowmotion.value):
self.setSeekState(self.makeStateSlowMotion(config.seek.speeds_slowmotion.value[-1]))
else:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_EOF:
pass
elif self.isStateForward(self.seekstate):
speed = self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_forward.value) or config.seek.speeds_forward.value[-1]
self.setSeekState(self.makeStateForward(speed))
elif self.isStateBackward(self.seekstate):
speed = -self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getLower(speed, config.seek.speeds_backward.value)
if speed:
self.setSeekState(self.makeStateBackward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateSlowMotion(self.seekstate):
speed = self.getLower(self.seekstate[2], config.seek.speeds_slowmotion.value) or config.seek.speeds_slowmotion.value[0]
self.setSeekState(self.makeStateSlowMotion(speed))
def seekBack(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
seekstate = self.seekstate
if seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
elif seekstate == self.SEEK_STATE_EOF:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
self.doSeekRelative(-6)
elif seekstate == self.SEEK_STATE_PAUSE:
self.doSeekRelative(-1)
elif self.isStateForward(seekstate):
speed = seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getLower(speed, config.seek.speeds_forward.value)
if speed:
self.setSeekState(self.makeStateForward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateBackward(seekstate):
speed = -seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_backward.value) or config.seek.speeds_backward.value[-1]
self.setSeekState(self.makeStateBackward(speed))
elif self.isStateSlowMotion(seekstate):
speed = self.getHigher(seekstate[2], config.seek.speeds_slowmotion.value)
if speed:
self.setSeekState(self.makeStateSlowMotion(speed))
else:
self.setSeekState(self.SEEK_STATE_PAUSE)
def seekFwdManual(self):
self.session.openWithCallback(self.fwdSeekTo, MinuteInput)
def fwdSeekTo(self, minutes):
print "Seek", minutes, "minutes forward"
self.doSeekRelative(minutes * 60 * 90000)
def seekBackManual(self):
self.session.openWithCallback(self.rwdSeekTo, MinuteInput)
def rwdSeekTo(self, minutes):
print "rwdSeekTo"
self.doSeekRelative(-minutes * 60 * 90000)
def checkSkipShowHideLock(self):
wantlock = self.seekstate != self.SEEK_STATE_PLAY
if config.usage.show_infobar_on_skip.value:
if self.lockedBecauseOfSkipping and not wantlock:
self.unlockShow()
self.lockedBecauseOfSkipping = False
if wantlock and not self.lockedBecauseOfSkipping:
self.lockShow()
self.lockedBecauseOfSkipping = True
def calcRemainingTime(self):
seekable = self.getSeek()
if seekable is not None:
len = seekable.getLength()
try:
tmp = self.cueGetEndCutPosition()
if tmp:
len = (False, tmp)
except:
pass
pos = seekable.getPlayPosition()
speednom = self.seekstate[1] or 1
speedden = self.seekstate[2] or 1
if not len[0] and not pos[0]:
if len[1] <= pos[1]:
return 0
time = (len[1] - pos[1])*speedden/(90*speednom)
return time
return False
def __evEOF(self):
if self.seekstate == self.SEEK_STATE_EOF:
return
# if we are seeking forward, we try to end up ~1s before the end, and pause there.
seekstate = self.seekstate
if self.seekstate != self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_EOF)
if seekstate not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE): # if we are seeking
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-1)
if seekstate == self.SEEK_STATE_PLAY: # regular EOF
self.doEofInternal(True)
else:
self.doEofInternal(False)
def doEofInternal(self, playing):
pass # Defined in subclasses
def __evSOF(self):
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(0)
# This is needed, because some Mediaplayer use InfoBarSeek but not InfoBarCueSheetSupport
def seekPreviousMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpPreviousMark()
def seekNextMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpNextMark()
from Screens.PVRState import PVRState, TimeshiftState
class InfoBarPVRState:
def __init__(self, screen=PVRState, force_show = False):
self.onPlayStateChanged.append(self.__playStateChanged)
self.pvrStateDialog = self.session.instantiateDialog(screen)
self.onShow.append(self._mayShow)
self.onHide.append(self.pvrStateDialog.hide)
self.force_show = force_show
def _mayShow(self):
if self.shown and self.seekstate != self.SEEK_STATE_PLAY:
self.pvrStateDialog.show()
def __playStateChanged(self, state):
playstateString = state[3]
self.pvrStateDialog["state"].setText(playstateString)
# if we return into "PLAY" state, ensure that the dialog gets hidden if there will be no infobar displayed
if not config.usage.show_infobar_on_skip.value and self.seekstate == self.SEEK_STATE_PLAY and not self.force_show:
self.pvrStateDialog.hide()
else:
self._mayShow()
class TimeshiftLive(Screen):
def __init__(self, session):
Screen.__init__(self, session)
class InfoBarTimeshiftState(InfoBarPVRState):
def __init__(self):
InfoBarPVRState.__init__(self, screen=TimeshiftState, force_show=True)
self.timeshiftLiveScreen = self.session.instantiateDialog(TimeshiftLive)
self.onHide.append(self.timeshiftLiveScreen.hide)
if isStandardInfoBar(self):
self.secondInfoBarScreen and self.secondInfoBarScreen.onShow.append(self.timeshiftLiveScreen.hide)
self.secondInfoBarScreenSimple and self.secondInfoBarScreenSimple.onShow.append(self.timeshiftLiveScreen.hide)
self.timeshiftLiveScreen.hide()
self.__hideTimer = eTimer()
self.__hideTimer.callback.append(self.__hideTimeshiftState)
self.onFirstExecBegin.append(self.pvrStateDialog.show)
def _mayShow(self):
if self.timeshiftEnabled():
if isStandardInfoBar(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
if self.secondInfoBarScreenSimple and self.secondInfoBarScreenSimple.shown:
self.secondInfoBarScreenSimple.hide()
if self.timeshiftActivated():
self.pvrStateDialog.show()
self.timeshiftLiveScreen.hide()
elif self.showTimeshiftState:
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.show()
self.showTimeshiftState = False
if self.seekstate == self.SEEK_STATE_PLAY and config.usage.infobar_timeout.index and (self.pvrStateDialog.shown or self.timeshiftLiveScreen.shown):
self.__hideTimer.startLongTimer(config.usage.infobar_timeout.index)
else:
self.__hideTimeshiftState()
def __hideTimeshiftState(self):
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.hide()
class InfoBarShowMovies:
# i don't really like this class.
# it calls a not further specified "movie list" on up/down/movieList,
# so this is not more than an action map
def __init__(self):
self["MovieListActions"] = HelpableActionMap(self, "InfobarMovieListActions",
{
"movieList": (self.showMovies, _("Open the movie list")),
"up": (self.up, _("Open the movie list")),
"down": (self.down, _("Open the movie list"))
})
# InfoBarTimeshift requires InfoBarSeek, instantiated BEFORE!
# Hrmf.
#
# Timeshift works the following way:
# demux0 demux1 "TimeshiftActions" "TimeshiftActivateActions" "SeekActions"
# - normal playback TUNER unused PLAY enable disable disable
# - user presses "yellow" button. FILE record PAUSE enable disable enable
# - user presess pause again FILE record PLAY enable disable enable
# - user fast forwards FILE record FF enable disable enable
# - end of timeshift buffer reached TUNER record PLAY enable enable disable
# - user backwards FILE record BACK # !! enable disable enable
#
# in other words:
# - when a service is playing, pressing the "timeshiftStart" button ("yellow") enables recording ("enables timeshift"),
# freezes the picture (to indicate timeshift), sets timeshiftMode ("activates timeshift")
# now, the service becomes seekable, so "SeekActions" are enabled, "TimeshiftEnableActions" are disabled.
# - the user can now PVR around
# - if it hits the end, the service goes into live mode ("deactivates timeshift", it's of course still "enabled")
# the service looses it's "seekable" state. It can still be paused, but just to activate timeshift right
# after!
# the seek actions will be disabled, but the timeshiftActivateActions will be enabled
# - if the user rewinds, or press pause, timeshift will be activated again
# note that a timeshift can be enabled ("recording") and
# activated (currently time-shifting).
class InfoBarTimeshift:
def __init__(self):
self["TimeshiftActions"] = HelpableActionMap(self, "InfobarTimeshiftActions",
{
"timeshiftStart": (self.startTimeshift, _("Start timeshift")), # the "yellow key"
"timeshiftStop": (self.stopTimeshift, _("Stop timeshift")) # currently undefined :), probably 'TV'
}, prio=1)
self["TimeshiftActivateActions"] = ActionMap(["InfobarTimeshiftActivateActions"],
{
"timeshiftActivateEnd": self.activateTimeshiftEnd, # something like "rewind key"
"timeshiftActivateEndAndPause": self.activateTimeshiftEndAndPause # something like "pause key"
}, prio=-1) # priority over record
self["TimeshiftActivateActions"].setEnabled(False)
self.ts_rewind_timer = eTimer()
self.ts_rewind_timer.callback.append(self.rewindService)
self.ts_start_delay_timer = eTimer()
self.ts_start_delay_timer.callback.append(self.startTimeshiftWithoutPause)
self.ts_current_event_timer = eTimer()
self.ts_current_event_timer.callback.append(self.saveTimeshiftFileForEvent)
self.save_timeshift_file = False
self.timeshift_was_activated = False
self.showTimeshiftState = False
self.save_timeshift_only_current_event = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evEnd: self.__serviceEnd
})
def getTimeshift(self):
service = self.session.nav.getCurrentService()
return service and service.timeshift()
def timeshiftEnabled(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftEnabled()
def timeshiftActivated(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftActive()
def startTimeshift(self, pauseService = True):
print "enable timeshift"
ts = self.getTimeshift()
if ts is None:
if not pauseService and not int(config.usage.timeshift_start_delay.value):
self.session.open(MessageBox, _("Timeshift not possible!"), MessageBox.TYPE_ERROR, simple = True)
print "no ts interface"
return 0
if ts.isTimeshiftEnabled():
print "hu, timeshift already enabled?"
else:
if not ts.startTimeshift():
# we remove the "relative time" for now.
#self.pvrStateDialog["timeshift"].setRelative(time.time())
if pauseService:
# PAUSE.
#self.setSeekState(self.SEEK_STATE_PAUSE)
self.activateTimeshiftEnd(False)
self.showTimeshiftState = True
else:
self.showTimeshiftState = False
# enable the "TimeshiftEnableActions", which will override
# the startTimeshift actions
self.__seekableStatusChanged()
# get current timeshift filename and calculate new
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
self.setCurrentEventTimer()
self.current_timeshift_filename = ts.getTimeshiftFilename()
self.new_timeshift_filename = self.generateNewTimeshiftFileName()
else:
print "timeshift failed"
def startTimeshiftWithoutPause(self):
self.startTimeshift(False)
def stopTimeshift(self):
ts = self.getTimeshift()
if ts and ts.isTimeshiftEnabled():
if int(config.usage.timeshift_start_delay.value):
ts.switchToLive()
else:
self.checkTimeshiftRunning(self.stopTimeshiftcheckTimeshiftRunningCallback)
else:
return 0
def stopTimeshiftcheckTimeshiftRunningCallback(self, answer):
ts = self.getTimeshift()
if answer and ts:
ts.stopTimeshift()
self.pvrStateDialog.hide()
self.setCurrentEventTimer()
# disable actions
self.__seekableStatusChanged()
# activates timeshift, and seeks to (almost) the end
def activateTimeshiftEnd(self, back = True):
self.showTimeshiftState = True
ts = self.getTimeshift()
print "activateTimeshiftEnd"
if ts is None:
return
if ts.isTimeshiftActive():
print "!! activate timeshift called - but shouldn't this be a normal pause?"
self.pauseService()
else:
print "play, ..."
ts.activateTimeshift() # activate timeshift will automatically pause
self.setSeekState(self.SEEK_STATE_PAUSE)
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-90000) # seek approx. 1 sec before end
self.timeshift_was_activated = True
if back:
self.ts_rewind_timer.start(200, 1)
def rewindService(self):
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
# generates only filename without path
def generateNewTimeshiftFileName(self):
name = "timeshift record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
service_name = ""
if isinstance(serviceref, eServiceReference):
service_name = ServiceReference(serviceref).getServiceName()
begin_date = strftime("%Y%m%d %H%M", localtime(time()))
filename = begin_date + " - " + service_name
if config.recording.filename_composition.value == "short":
filename = strftime("%Y%m%d", localtime(time())) + " - " + info["name"]
elif config.recording.filename_composition.value == "long":
filename += " - " + info["name"] + " - " + info["description"]
else:
filename += " - " + info["name"] # standard
if config.recording.ascii_filenames.value:
filename = ASCIItranslit.legacyEncode(filename)
print "New timeshift filename: ", filename
return filename
# same as activateTimeshiftEnd, but pauses afterwards.
def activateTimeshiftEndAndPause(self):
print "activateTimeshiftEndAndPause"
#state = self.seekstate
self.activateTimeshiftEnd(False)
def callServiceStarted(self):
self.__serviceStarted()
def __seekableStatusChanged(self):
self["TimeshiftActivateActions"].setEnabled(not self.isSeekable() and self.timeshiftEnabled())
state = self.getSeek() is not None and self.timeshiftEnabled()
self["SeekActions"].setEnabled(state)
if not state:
self.setSeekState(self.SEEK_STATE_PLAY)
self.restartSubtitle()
def __serviceStarted(self):
self.pvrStateDialog.hide()
self.__seekableStatusChanged()
if self.ts_start_delay_timer.isActive():
self.ts_start_delay_timer.stop()
if int(config.usage.timeshift_start_delay.value):
self.ts_start_delay_timer.start(int(config.usage.timeshift_start_delay.value) * 1000, True)
def checkTimeshiftRunning(self, returnFunction):
if self.timeshiftEnabled() and config.usage.check_timeshift.value and self.timeshift_was_activated:
message = _("Stop timeshift?")
if not self.save_timeshift_file:
choice = [(_("Yes"), "stop"), (_("No"), "continue"), (_("Yes and save"), "save"), (_("Yes and save in movie dir"), "save_movie")]
else:
choice = [(_("Yes"), "stop"), (_("No"), "continue")]
message += "\n" + _("Reminder, you have chosen to save timeshift file.")
if self.save_timeshift_only_current_event:
remaining = self.currentEventTime()
if remaining > 0:
message += "\n" + _("The %d min remaining before the end of the event.") % abs(remaining / 60)
self.session.openWithCallback(boundFunction(self.checkTimeshiftRunningCallback, returnFunction), MessageBox, message, simple = True, list = choice)
else:
returnFunction(True)
def checkTimeshiftRunningCallback(self, returnFunction, answer):
if answer:
if "movie" in answer:
self.save_timeshift_in_movie_dir = True
if "save" in answer:
self.save_timeshift_file = True
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
del ts
if "continue" not in answer:
self.saveTimeshiftFiles()
returnFunction(answer and answer != "continue")
# renames/moves timeshift files if requested
def __serviceEnd(self):
self.saveTimeshiftFiles()
self.setCurrentEventTimer()
self.timeshift_was_activated = False
def saveTimeshiftFiles(self):
if self.save_timeshift_file and self.current_timeshift_filename and self.new_timeshift_filename:
if config.usage.timeshift_path.value and not self.save_timeshift_in_movie_dir:
dirname = config.usage.timeshift_path.value
else:
dirname = defaultMoviePath()
filename = getRecordingFilename(self.new_timeshift_filename, dirname) + ".ts"
fileList = []
fileList.append((self.current_timeshift_filename, filename))
if fileExists(self.current_timeshift_filename + ".sc"):
fileList.append((self.current_timeshift_filename + ".sc", filename + ".sc"))
if fileExists(self.current_timeshift_filename + ".cuts"):
fileList.append((self.current_timeshift_filename + ".cuts", filename + ".cuts"))
moveFiles(fileList)
self.save_timeshift_file = False
self.setCurrentEventTimer()
def currentEventTime(self):
remaining = 0
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(ref, -1, 0)
if event:
now = int(time())
start = event.getBeginTime()
duration = event.getDuration()
end = start + duration
remaining = end - now
return remaining
def saveTimeshiftFileForEvent(self):
if self.timeshiftEnabled() and self.save_timeshift_only_current_event and self.timeshift_was_activated and self.save_timeshift_file:
message = _("Current event is over.\nSelect an option to save the timeshift file.")
choice = [(_("Save and stop timeshift"), "save"), (_("Save and restart timeshift"), "restart"), (_("Don't save and stop timeshift"), "stop"), (_("Do nothing"), "continue")]
self.session.openWithCallback(self.saveTimeshiftFileForEventCallback, MessageBox, message, simple = True, list = choice, timeout=15)
def saveTimeshiftFileForEventCallback(self, answer):
self.save_timeshift_only_current_event = False
if answer:
ts = self.getTimeshift()
if ts and answer in ("save", "restart", "stop"):
self.stopTimeshiftcheckTimeshiftRunningCallback(True)
if answer in ("save", "restart"):
ts.saveTimeshiftFile()
del ts
self.saveTimeshiftFiles()
if answer == "restart":
self.ts_start_delay_timer.start(1000, True)
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
def setCurrentEventTimer(self, duration=0):
self.ts_current_event_timer.stop()
self.save_timeshift_only_current_event = False
if duration > 0:
self.save_timeshift_only_current_event = True
self.ts_current_event_timer.startLongTimer(duration)
from Screens.PiPSetup import PiPSetup
class InfoBarExtensions:
EXTENSION_SINGLE = 0
EXTENSION_LIST = 1
def __init__(self):
self.list = []
self["InstantExtensionsActions"] = HelpableActionMap(self, "InfobarExtensions",
{
"extensions": (self.showExtensionSelection, _("Show extensions...")),
}, 1) # lower priority
def addExtension(self, extension, key = None, type = EXTENSION_SINGLE):
self.list.append((type, extension, key))
def updateExtension(self, extension, key = None):
self.extensionsList.append(extension)
if key is not None:
if self.extensionKeys.has_key(key):
key = None
if key is None:
for x in self.availableKeys:
if not self.extensionKeys.has_key(x):
key = x
break
if key is not None:
self.extensionKeys[key] = len(self.extensionsList) - 1
def updateExtensions(self):
self.extensionsList = []
self.availableKeys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue" ]
self.extensionKeys = {}
for x in self.list:
if x[0] == self.EXTENSION_SINGLE:
self.updateExtension(x[1], x[2])
else:
for y in x[1]():
self.updateExtension(y[0], y[1])
def showExtensionSelection(self):
self.updateExtensions()
extensionsList = self.extensionsList[:]
keys = []
list = []
for x in self.availableKeys:
if self.extensionKeys.has_key(x):
entry = self.extensionKeys[x]
extension = self.extensionsList[entry]
if extension[2]():
name = str(extension[0]())
list.append((extension[0](), extension))
keys.append(x)
extensionsList.remove(extension)
else:
extensionsList.remove(extension)
list.extend([(x[0](), x) for x in extensionsList])
keys += [""] * len(extensionsList)
self.session.openWithCallback(self.extensionCallback, ChoiceBox, title=_("Please choose an extension..."), list=list, keys=keys, skin_name="ExtensionsList", reorderConfig="extension_order", windowTitle=_("Extensions menu"))
def extensionCallback(self, answer):
if answer is not None:
answer[1][1]()
from Tools.BoundFunction import boundFunction
import inspect
# depends on InfoBarExtensions
class InfoBarPlugins:
def __init__(self):
self.addExtension(extension = self.getPluginList, type = InfoBarExtensions.EXTENSION_LIST)
def getPluginName(self, name):
return name
def getPluginList(self):
l = []
for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EXTENSIONSMENU):
args = inspect.getargspec(p.__call__)[0]
if len(args) == 1 or len(args) == 2 and isinstance(self, InfoBarChannelSelection):
l.append(((boundFunction(self.getPluginName, p.name), boundFunction(self.runPlugin, p), lambda: True), None, p.name))
l.sort(key = lambda e: e[2]) # sort by name
return l
def runPlugin(self, plugin):
if isinstance(self, InfoBarChannelSelection):
plugin(session = self.session, servicelist = self.servicelist)
else:
plugin(session = self.session)
from Components.Task import job_manager
class InfoBarJobman:
def __init__(self):
self.addExtension(extension = self.getJobList, type = InfoBarExtensions.EXTENSION_LIST)
def getJobList(self):
return [((boundFunction(self.getJobName, job), boundFunction(self.showJobView, job), lambda: True), None) for job in job_manager.getPendingJobs()]
def getJobName(self, job):
return "%s: %s (%d%%)" % (job.getStatustext(), job.name, int(100*job.progress/float(job.end)))
def showJobView(self, job):
from Screens.TaskView import JobView
job_manager.in_background = False
self.session.openWithCallback(self.JobViewCB, JobView, job)
def JobViewCB(self, in_background):
job_manager.in_background = in_background
# depends on InfoBarExtensions
class InfoBarPiP:
def __init__(self):
try:
self.session.pipshown
except:
self.session.pipshown = False
self.lastPiPService = None
if SystemInfo["PIPAvailable"]:
self["PiPActions"] = HelpableActionMap(self, "InfobarPiPActions",
{
"activatePiP": (self.activePiP, self.activePiPName),
})
if (self.allowPiP):
self.addExtension((self.getShowHideName, self.showPiP, lambda: True), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.addExtension((self.getSwapName, self.swapPiP, self.pipShown), "yellow")
self.addExtension((self.getTogglePipzapName, self.togglePipzap, lambda: True), "red")
else:
self.addExtension((self.getShowHideName, self.showPiP, self.pipShown), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.lastPiPServiceTimeoutTimer = eTimer()
self.lastPiPServiceTimeoutTimer.callback.append(self.clearLastPiPService)
def pipShown(self):
return self.session.pipshown
def pipHandles0Action(self):
return self.pipShown() and config.usage.pip_zero_button.value != "standard"
def getShowHideName(self):
if self.session.pipshown:
return _("Disable Picture in Picture")
else:
return _("Activate Picture in Picture")
def getSwapName(self):
return _("Swap services")
def getMoveName(self):
return _("Move Picture in Picture")
def getTogglePipzapName(self):
slist = self.servicelist
if slist and slist.dopipzap:
return _("Zap focus to main screen")
return _("Zap focus to Picture in Picture")
def togglePipzap(self):
if not self.session.pipshown:
self.showPiP()
slist = self.servicelist
if slist and self.session.pipshown:
slist.togglePipzap()
if slist.dopipzap:
currentServicePath = slist.getCurrentServicePath()
slist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.servicePath = currentServicePath
def showPiP(self):
self.lastPiPServiceTimeoutTimer.stop()
slist = self.servicelist
if self.session.pipshown:
if slist and slist.dopipzap:
self.togglePipzap()
if self.session.pipshown:
lastPiPServiceTimeout = int(config.usage.pip_last_service_timeout.value)
if lastPiPServiceTimeout >= 0:
self.lastPiPService = self.session.pip.getCurrentService()
if lastPiPServiceTimeout:
self.lastPiPServiceTimeoutTimer.startLongTimer(lastPiPServiceTimeout)
del self.session.pip
self.session.pipshown = False
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
else:
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
newservice = self.lastPiPService or self.session.nav.getCurrentlyPlayingServiceOrGroup() or (slist and slist.servicelist.getCurrent())
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = slist and slist.getCurrentServicePath()
else:
newservice = self.session.nav.getCurrentlyPlayingServiceOrGroup() or (slist and slist.servicelist.getCurrent())
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = slist and slist.getCurrentServicePath()
else:
self.session.pipshown = False
del self.session.pip
if self.session.pipshown and hasattr(self, "screenSaverTimer"):
self.screenSaverTimer.stop()
self.lastPiPService = None
def clearLastPiPService(self):
self.lastPiPService = None
def activePiP(self):
if self.servicelist and self.servicelist.dopipzap or not self.session.pipshown:
self.showPiP()
else:
self.togglePipzap()
def activePiPName(self):
if self.servicelist and self.servicelist.dopipzap:
return _("Disable Picture in Picture")
if self.session.pipshown:
return _("Zap focus to Picture in Picture")
else:
return _("Activate Picture in Picture")
def swapPiP(self):
if self.pipShown():
swapservice = self.session.nav.getCurrentlyPlayingServiceOrGroup()
pipref = self.session.pip.getCurrentService()
if swapservice and pipref and pipref.toString() != swapservice.toString():
slist = self.servicelist
if slist:
currentServicePath = slist.getCurrentServicePath()
currentBouquet = slist.getRoot()
slist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.playService(swapservice)
self.session.nav.playService(pipref, checkParentalControl=False, adjust=False)
if slist:
self.session.pip.servicePath = currentServicePath
self.session.pip.servicePath[1] = currentBouquet
if slist and slist.dopipzap:
slist.setCurrentSelection(self.session.pip.getCurrentService())
def movePiP(self):
if self.pipShown():
self.session.open(PiPSetup, pip = self.session.pip)
def pipDoHandle0Action(self):
use = config.usage.pip_zero_button.value
if "swap" == use:
self.swapPiP()
elif "swapstop" == use:
self.swapPiP()
self.showPiP()
elif "stop" == use:
self.showPiP()
from RecordTimer import parseEvent, RecordTimerEntry
class InfoBarInstantRecord:
"""Instant Record - handles the instantRecord action in order to
start/stop instant records"""
def __init__(self):
self["InstantRecordActions"] = HelpableActionMap(self, "InfobarInstantRecord",
{
"instantRecord": (self.instantRecord, _("Instant recording...")),
})
self.SelectedInstantServiceRef = None
if isStandardInfoBar(self):
self.recording = []
else:
from Screens.InfoBar import InfoBar
InfoBarInstance = InfoBar.instance
if InfoBarInstance:
self.recording = InfoBarInstance.recording
def moveToTrash(self, entry):
print "instantRecord stop and delete recording: ", entry.name
import Tools.Trashcan
trash = Tools.Trashcan.createTrashFolder(entry.Filename)
from MovieSelection import moveServiceFiles
moveServiceFiles(entry.Filename, trash, entry.name, allowCopy=False)
def stopCurrentRecording(self, entry = -1):
def confirm(answer=False):
if answer:
self.session.nav.RecordTimer.removeEntry(self.recording[entry])
if self.deleteRecording:
self.moveToTrash(self.recording[entry])
self.recording.remove(self.recording[entry])
if entry is not None and entry != -1:
msg = _("Stop recording:")
if self.deleteRecording:
msg = _("Stop and delete recording:")
msg += "\n"
msg += " - " + self.recording[entry].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def stopAllCurrentRecordings(self, list):
def confirm(answer=False):
if answer:
for entry in list:
self.session.nav.RecordTimer.removeEntry(entry[0])
self.recording.remove(entry[0])
if self.deleteRecording:
self.moveToTrash(entry[0])
msg = _("Stop recordings:")
if self.deleteRecording:
msg = _("Stop and delete recordings:")
msg += "\n"
for entry in list:
msg += " - " + entry[0].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def getProgramInfoAndEvent(self, info, name):
info["serviceref"] = hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef or self.session.nav.getCurrentlyPlayingServiceOrGroup()
# try to get event info
event = None
try:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(info["serviceref"], -1, 0)
if event is None:
if hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef:
service_info = eServiceCenter.getInstance().info(self.SelectedInstantServiceRef)
event = service_info and service_info.getEvent(self.SelectedInstantServiceRef)
else:
service = self.session.nav.getCurrentService()
event = service and service.info().getEvent(0)
except:
pass
info["event"] = event
info["name"] = name
info["description"] = ""
info["eventid"] = None
if event is not None:
curEvent = parseEvent(event)
info["name"] = curEvent[2]
info["description"] = curEvent[3]
info["eventid"] = curEvent[4]
info["end"] = curEvent[1]
def startInstantRecording(self, limitEvent = False):
begin = int(time())
end = begin + 3600 # dummy
name = "instant record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
event = info["event"]
if event is not None:
if limitEvent:
end = info["end"]
else:
if limitEvent:
self.session.open(MessageBox, _("No event info found, recording indefinitely."), MessageBox.TYPE_INFO)
if isinstance(serviceref, eServiceReference):
serviceref = ServiceReference(serviceref)
recording = RecordTimerEntry(serviceref, begin, end, info["name"], info["description"], info["eventid"], dirname = preferredInstantRecordPath())
recording.dontSave = True
if event is None or limitEvent == False:
recording.autoincrease = True
recording.setAutoincreaseEnd()
simulTimerList = self.session.nav.RecordTimer.record(recording)
if simulTimerList is None: # no conflict
recording.autoincrease = False
self.recording.append(recording)
else:
if len(simulTimerList) > 1: # with other recording
name = simulTimerList[1].name
name_date = ' '.join((name, strftime('%F %T', localtime(simulTimerList[1].begin))))
print "[TIMER] conflicts with", name_date
recording.autoincrease = True # start with max available length, then increment
if recording.setAutoincreaseEnd():
self.session.nav.RecordTimer.record(recording)
self.recording.append(recording)
self.session.open(MessageBox, _("Record time limited due to conflicting timer %s") % name_date, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to conflicting timer %s") % name, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to invalid service %s") % serviceref, MessageBox.TYPE_INFO)
recording.autoincrease = False
def isInstantRecordRunning(self):
print "self.recording:", self.recording
if self.recording:
for x in self.recording:
if x.isRunning():
return True
return False
def recordQuestionCallback(self, answer):
print "pre:\n", self.recording
if answer is None or answer[1] == "no":
return
list = []
recording = self.recording[:]
for x in recording:
if not x in self.session.nav.RecordTimer.timer_list:
self.recording.remove(x)
elif x.dontSave and x.isRunning():
list.append((x, False))
self.deleteRecording = False
if answer[1] == "changeduration":
if len(self.recording) == 1:
self.changeDuration(0)
else:
self.session.openWithCallback(self.changeDuration, TimerSelection, list)
elif answer[1] == "addrecordingtime":
if len(self.recording) == 1:
self.addRecordingTime(0)
else:
self.session.openWithCallback(self.addRecordingTime, TimerSelection, list)
elif answer[1] == "changeendtime":
if len(self.recording) == 1:
self.setEndtime(0)
else:
self.session.openWithCallback(self.setEndtime, TimerSelection, list)
elif answer[1] == "timer":
import TimerEdit
self.session.open(TimerEdit.TimerEditList)
elif answer[1] == "stop":
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopdelete":
self.deleteRecording = True
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopall":
self.stopAllCurrentRecordings(list)
elif answer[1] == "stopdeleteall":
self.deleteRecording = True
self.stopAllCurrentRecordings(list)
elif answer[1] in ( "indefinitely" , "manualduration", "manualendtime", "event"):
self.startInstantRecording(limitEvent = answer[1] in ("event", "manualendtime") or False)
if answer[1] == "manualduration":
self.changeDuration(len(self.recording)-1)
elif answer[1] == "manualendtime":
self.setEndtime(len(self.recording)-1)
elif "timeshift" in answer[1]:
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
self.save_timeshift_file = True
if "movie" in answer[1]:
self.save_timeshift_in_movie_dir = True
if "event" in answer[1]:
remaining = self.currentEventTime()
if remaining > 0:
self.setCurrentEventTimer(remaining-15)
print "after:\n", self.recording
def setEndtime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.endtime=ConfigClock(default = self.recording[self.selectedEntry].end)
dlg = self.session.openWithCallback(self.TimeDateInputClosed, TimeDateInput, self.endtime)
dlg.setTitle(_("Please change recording endtime"))
def TimeDateInputClosed(self, ret):
if len(ret) > 1:
if ret[0]:
print "stopping recording at", strftime("%F %T", localtime(ret[1]))
if self.recording[self.selectedEntry].end != ret[1]:
self.recording[self.selectedEntry].autoincrease = False
self.recording[self.selectedEntry].end = ret[1]
self.session.nav.RecordTimer.timeChanged(self.recording[self.selectedEntry])
def changeDuration(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputCallback, InputBox, title=_("How many minutes do you want to record?"), text="5 ", maxSize=True, type=Input.NUMBER)
def addRecordingTime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputAddRecordingTime, InputBox, title=_("How many minutes do you want add to record?"), text="5 ", maxSize=True, type=Input.NUMBER)
def inputAddRecordingTime(self, value):
if value:
print "added", int(value), "minutes for recording."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end += 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def inputCallback(self, value):
if value:
print "stopping recording after", int(value), "minutes."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end = int(time()) + 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def isTimerRecordRunning(self):
identical = timers = 0
for timer in self.session.nav.RecordTimer.timer_list:
if timer.isRunning() and not timer.justplay:
timers += 1
if self.recording:
for x in self.recording:
if x.isRunning() and x == timer:
identical += 1
return timers > identical
def instantRecord(self, serviceRef=None):
self.SelectedInstantServiceRef = serviceRef
pirr = preferredInstantRecordPath()
if not findSafeRecordPath(pirr) and not findSafeRecordPath(defaultMoviePath()):
if not pirr:
pirr = ""
self.session.open(MessageBox, _("Missing ") + "\n" + pirr +
"\n" + _("No HDD found or HDD not initialized!"), MessageBox.TYPE_ERROR)
return
if isStandardInfoBar(self):
common = ((_("Add recording (stop after current event)"), "event"),
(_("Add recording (indefinitely)"), "indefinitely"),
(_("Add recording (enter recording duration)"), "manualduration"),
(_("Add recording (enter recording endtime)"), "manualendtime"),)
else:
common = ()
if self.isInstantRecordRunning():
title =_("A recording is currently running.\nWhat do you want to do?")
list = common + \
((_("Change recording (duration)"), "changeduration"),
(_("Change recording (add time)"), "addrecordingtime"),
(_("Change recording (endtime)"), "changeendtime"),)
list += ((_("Stop recording"), "stop"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete recording"), "stopdelete"),)
if len(self.recording) > 1:
list += ((_("Stop all current recordings"), "stopall"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete all current recordings"), "stopdeleteall"),)
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
list += ((_("Do nothing"), "no"),)
else:
title=_("Start recording?")
list = common
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
if isStandardInfoBar(self):
list += ((_("Do not record"), "no"),)
if isStandardInfoBar(self) and self.timeshiftEnabled():
list = list + ((_("Save timeshift file"), "timeshift"),
(_("Save timeshift file in movie directory"), "timeshift_movie"))
if self.currentEventTime() > 0:
list += ((_("Save timeshift only for current event"), "timeshift_event"),)
if list:
self.session.openWithCallback(self.recordQuestionCallback, ChoiceBox, title=title, list=list)
else:
return 0
from Tools.ISO639 import LanguageCodes
class InfoBarAudioSelection:
def __init__(self):
self["AudioSelectionAction"] = HelpableActionMap(self, "InfobarAudioSelectionActions",
{
"audioSelection": (self.audioSelection, _("Audio options...")),
})
def audioSelection(self):
from Screens.AudioSelection import AudioSelection
self.session.openWithCallback(self.audioSelected, AudioSelection, infobar=self)
def audioSelected(self, ret=None):
print "[infobar::audioSelected]", ret
class InfoBarSubserviceSelection:
def __init__(self):
self["SubserviceSelectionAction"] = HelpableActionMap(self, "InfobarSubserviceSelectionActions",
{
"subserviceSelection": (self.subserviceSelection, _("Subservice list...")),
})
self["SubserviceQuickzapAction"] = HelpableActionMap(self, "InfobarSubserviceQuickzapActions",
{
"nextSubservice": (self.nextSubservice, _("Switch to next sub service")),
"prevSubservice": (self.prevSubservice, _("Switch to previous sub service"))
}, -1)
self["SubserviceQuickzapAction"].setEnabled(False)
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.checkSubservicesAvail
})
self.onClose.append(self.__removeNotifications)
self.bsel = None
def __removeNotifications(self):
self.session.nav.event.remove(self.checkSubservicesAvail)
def checkSubservicesAvail(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
if not subservices or subservices.getNumberOfSubservices() == 0:
self["SubserviceQuickzapAction"].setEnabled(False)
def nextSubservice(self):
self.changeSubservice(+1)
def prevSubservice(self):
self.changeSubservice(-1)
def playSubservice(self, ref):
if ref.getUnsignedData(6) == 0:
ref.setName("")
self.session.nav.playService(ref, checkParentalControl=False, adjust=False)
def changeSubservice(self, direction):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
n = subservices and subservices.getNumberOfSubservices()
if n and n > 0:
selection = -1
ref = self.session.nav.getCurrentlyPlayingServiceReference()
idx = 0
while idx < n:
if subservices.getSubservice(idx).toString() == ref.toString():
selection = idx
break
idx += 1
if selection != -1:
selection += direction
if selection >= n:
selection = 0
elif selection < 0:
selection = n - 1
newservice = subservices.getSubservice(selection)
if newservice.valid():
del subservices
del service
self.playSubservice(newservice)
def subserviceSelection(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
self.bouquets = self.servicelist.getBouquetList()
n = subservices and subservices.getNumberOfSubservices()
selection = 0
if n and n > 0:
ref = self.session.nav.getCurrentlyPlayingServiceReference()
tlist = []
idx = 0
cnt_parent = 0
while idx < n:
i = subservices.getSubservice(idx)
if i.toString() == ref.toString():
selection = idx
tlist.append((i.getName(), i))
if i.getUnsignedData(6):
cnt_parent += 1
idx += 1
if cnt_parent and self.bouquets and len(self.bouquets):
keys = ["red", "blue", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
if config.usage.multibouquet.value:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to bouquet"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to favourites"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
selection += 3
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), ("--", "")] + tlist
keys = ["red", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
selection += 2
self.session.openWithCallback(self.subserviceSelected, ChoiceBox, title=_("Please select a sub service..."), list = tlist, selection = selection, keys = keys, skin_name = "SubserviceSelection")
def subserviceSelected(self, service):
del self.bouquets
if not service is None:
if isinstance(service[1], str):
if service[1] == "quickzap":
from Screens.SubservicesQuickzap import SubservicesQuickzap
self.session.open(SubservicesQuickzap, service[2])
else:
self["SubserviceQuickzapAction"].setEnabled(True)
self.playSubservice(service[1])
def addSubserviceToBouquetCallback(self, service):
if service and len(service) > 1 and isinstance(service[1], eServiceReference):
self.selectedSubservice = service
if self.bouquets is None:
cnt = 0
else:
cnt = len(self.bouquets)
if cnt > 1: # show bouquet list
self.bsel = self.session.openWithCallback(self.bouquetSelClosed, BouquetSelector, self.bouquets, self.addSubserviceToBouquet)
elif cnt == 1: # add to only one existing bouquet
self.addSubserviceToBouquet(self.bouquets[0][1])
self.session.open(MessageBox, _("Service has been added to the favourites."), MessageBox.TYPE_INFO, timeout=5)
def bouquetSelClosed(self, confirmed):
self.bsel = None
del self.selectedSubservice
if confirmed:
self.session.open(MessageBox, _("Service has been added to the selected bouquet."), MessageBox.TYPE_INFO, timeout=5)
def addSubserviceToBouquet(self, dest):
self.servicelist.addServiceToBouquet(dest, self.selectedSubservice[1])
if self.bsel:
self.bsel.close(True)
else:
del self.selectedSubservice
class InfoBarRedButton:
def __init__(self):
self["RedButtonActions"] = HelpableActionMap(self, "InfobarRedButtonActions",
{
"activateRedButton": (self.activateRedButton, _("Red button...")),
})
self.onHBBTVActivation = [ ]
self.onRedButtonActivation = [ ]
def activateRedButton(self):
service = self.session.nav.getCurrentService()
info = service and service.info()
if info and info.getInfoString(iServiceInformation.sHBBTVUrl) != "":
for x in self.onHBBTVActivation:
x()
elif False: # TODO: other red button services
for x in self.onRedButtonActivation:
x()
class InfoBarTimerButton:
def __init__(self):
self["TimerButtonActions"] = HelpableActionMap(self, "InfobarTimerButtonActions",
{
"timerSelection": (self.timerSelection, _("Timer selection...")),
})
def timerSelection(self):
from Screens.TimerEdit import TimerEditList
self.session.open(TimerEditList)
class InfoBarVmodeButton:
def __init__(self):
self["VmodeButtonActions"] = HelpableActionMap(self, "InfobarVmodeButtonActions",
{
"vmodeSelection": (self.vmodeSelection, _("Letterbox zoom")),
})
def vmodeSelection(self):
self.session.open(VideoMode)
class VideoMode(Screen):
def __init__(self,session):
Screen.__init__(self, session)
self["videomode"] = Label()
self["actions"] = NumberActionMap( [ "InfobarVmodeButtonActions" ],
{
"vmodeSelection": self.selectVMode
})
self.Timer = eTimer()
self.Timer.callback.append(self.quit)
self.selectVMode()
def selectVMode(self):
policy = config.av.policy_43
if self.isWideScreen():
policy = config.av.policy_169
idx = policy.choices.index(policy.value)
idx = (idx + 1) % len(policy.choices)
policy.value = policy.choices[idx]
self["videomode"].setText(policy.value)
self.Timer.start(1000, True)
def isWideScreen(self):
from Components.Converter.ServiceInfo import WIDESCREEN
service = self.session.nav.getCurrentService()
info = service and service.info()
return info.getInfo(iServiceInformation.sAspect) in WIDESCREEN
def quit(self):
self.Timer.stop()
self.close()
class InfoBarAdditionalInfo:
def __init__(self):
self["RecordingPossible"] = Boolean(fixed=harddiskmanager.HDDCount() > 0)
self["TimeshiftPossible"] = self["RecordingPossible"]
self["ExtensionsAvailable"] = Boolean(fixed=1)
# TODO: these properties should be queried from the input device keymap
self["ShowTimeshiftOnYellow"] = Boolean(fixed=0)
self["ShowAudioOnYellow"] = Boolean(fixed=0)
self["ShowRecordOnRed"] = Boolean(fixed=0)
class InfoBarNotifications:
def __init__(self):
self.onExecBegin.append(self.checkNotifications)
Notifications.notificationAdded.append(self.checkNotificationsIfExecing)
self.onClose.append(self.__removeNotification)
def __removeNotification(self):
Notifications.notificationAdded.remove(self.checkNotificationsIfExecing)
def checkNotificationsIfExecing(self):
if self.execing:
self.checkNotifications()
def checkNotifications(self):
notifications = Notifications.notifications
if notifications:
n = notifications[0]
del notifications[0]
cb = n[0]
if n[3].has_key("onSessionOpenCallback"):
n[3]["onSessionOpenCallback"]()
del n[3]["onSessionOpenCallback"]
if cb:
dlg = self.session.openWithCallback(cb, n[1], *n[2], **n[3])
elif not Notifications.current_notifications and n[4] == "ZapError":
if n[3].has_key("timeout"):
del n[3]["timeout"]
n[3]["enable_input"] = False
dlg = self.session.instantiateDialog(n[1], *n[2], **n[3])
self.hide()
dlg.show()
self.notificationDialog = dlg
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressNotification)
else:
dlg = self.session.open(n[1], *n[2], **n[3])
# remember that this notification is currently active
d = (n[4], dlg)
Notifications.current_notifications.append(d)
dlg.onClose.append(boundFunction(self.__notificationClosed, d))
def closeNotificationInstantiateDialog(self):
if hasattr(self, "notificationDialog"):
self.session.deleteDialog(self.notificationDialog)
del self.notificationDialog
eActionMap.getInstance().unbindAction('', self.keypressNotification)
def keypressNotification(self, key, flag):
if flag:
self.closeNotificationInstantiateDialog()
def __notificationClosed(self, d):
Notifications.current_notifications.remove(d)
class InfoBarServiceNotifications:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.serviceHasEnded
})
def serviceHasEnded(self):
print "service end!"
try:
self.setSeekState(self.SEEK_STATE_PLAY)
except:
pass
class InfoBarCueSheetSupport:
CUT_TYPE_IN = 0
CUT_TYPE_OUT = 1
CUT_TYPE_MARK = 2
CUT_TYPE_LAST = 3
ENABLE_RESUME_SUPPORT = False
def __init__(self, actionmap = "InfobarCueSheetActions"):
self["CueSheetActions"] = HelpableActionMap(self, actionmap,
{
"jumpPreviousMark": (self.jumpPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.jumpNextMark, _("Jump to next marked position")),
"toggleMark": (self.toggleMark, _("Toggle a cut mark at the current position"))
}, prio=1)
self.cut_list = [ ]
self.is_closing = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evCuesheetChanged: self.downloadCuesheet,
})
def __serviceStarted(self):
if self.is_closing:
return
print "new service started! trying to download cuts!"
self.downloadCuesheet()
if self.ENABLE_RESUME_SUPPORT:
for (pts, what) in self.cut_list:
if what == self.CUT_TYPE_LAST:
last = pts
break
else:
last = getResumePoint(self.session)
if last is None:
return
# only resume if at least 10 seconds ahead, or <10 seconds before the end.
seekable = self.__getSeekable()
if seekable is None:
return # Should not happen?
length = seekable.getLength() or (None,0)
print "seekable.getLength() returns:", length
# Hmm, this implies we don't resume if the length is unknown...
if (last > 900000) and (not length[1] or (last < length[1] - 900000)):
self.resume_point = last
l = last / 90000
if "ask" in config.usage.on_movie_start.value or not length[1]:
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Do you want to resume this playback?") + "\n" + (_("Resume position at %s") % ("%d:%02d:%02d" % (l/3600, l%3600/60, l%60))), timeout=10, default="yes" in config.usage.on_movie_start.value)
elif config.usage.on_movie_start.value == "resume":
# TRANSLATORS: The string "Resuming playback" flashes for a moment
# TRANSLATORS: at the start of a movie, when the user has selected
# TRANSLATORS: "Resume from last position" as start behavior.
# TRANSLATORS: The purpose is to notify the user that the movie starts
# TRANSLATORS: in the middle somewhere and not from the beginning.
# TRANSLATORS: (Some translators seem to have interpreted it as a
# TRANSLATORS: question or a choice, but it is a statement.)
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Resuming playback"), timeout=2, type=MessageBox.TYPE_INFO)
def playLastCB(self, answer):
if answer == True:
self.doSeek(self.resume_point)
self.hideAfterResume()
def hideAfterResume(self):
if isinstance(self, InfoBarShowHide):
self.hide()
def __getSeekable(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.seek()
def cueGetCurrentPosition(self):
seek = self.__getSeekable()
if seek is None:
return None
r = seek.getPlayPosition()
if r[0]:
return None
return long(r[1])
def cueGetEndCutPosition(self):
ret = False
isin = True
for cp in self.cut_list:
if cp[1] == self.CUT_TYPE_OUT:
if isin:
isin = False
ret = cp[0]
elif cp[1] == self.CUT_TYPE_IN:
isin = True
return ret
def jumpPreviousNextMark(self, cmp, start=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
return False
mark = self.getNearestCutPoint(current_pos, cmp=cmp, start=start)
if mark is not None:
pts = mark[0]
else:
return False
self.doSeek(pts)
return True
def jumpPreviousMark(self):
# we add 5 seconds, so if the play position is <5s after
# the mark, the mark before will be used
self.jumpPreviousNextMark(lambda x: -x-5*90000, start=True)
def jumpNextMark(self):
if not self.jumpPreviousNextMark(lambda x: x-90000):
self.doSeek(-1)
def getNearestCutPoint(self, pts, cmp=abs, start=False):
# can be optimized
beforecut = True
nearest = None
bestdiff = -1
instate = True
if start:
bestdiff = cmp(0 - pts)
if bestdiff >= 0:
nearest = [0, False]
for cp in self.cut_list:
if beforecut and cp[1] in (self.CUT_TYPE_IN, self.CUT_TYPE_OUT):
beforecut = False
if cp[1] == self.CUT_TYPE_IN: # Start is here, disregard previous marks
diff = cmp(cp[0] - pts)
if start and diff >= 0:
nearest = cp
bestdiff = diff
else:
nearest = None
bestdiff = -1
if cp[1] == self.CUT_TYPE_IN:
instate = True
elif cp[1] == self.CUT_TYPE_OUT:
instate = False
elif cp[1] in (self.CUT_TYPE_MARK, self.CUT_TYPE_LAST):
diff = cmp(cp[0] - pts)
if instate and diff >= 0 and (nearest is None or bestdiff > diff):
nearest = cp
bestdiff = diff
return nearest
def toggleMark(self, onlyremove=False, onlyadd=False, tolerance=5*90000, onlyreturn=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
print "not seekable"
return
nearest_cutpoint = self.getNearestCutPoint(current_pos)
if nearest_cutpoint is not None and abs(nearest_cutpoint[0] - current_pos) < tolerance:
if onlyreturn:
return nearest_cutpoint
if not onlyadd:
self.removeMark(nearest_cutpoint)
elif not onlyremove and not onlyreturn:
self.addMark((current_pos, self.CUT_TYPE_MARK))
if onlyreturn:
return None
def addMark(self, point):
insort(self.cut_list, point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def removeMark(self, point):
self.cut_list.remove(point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def showAfterCuesheetOperation(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def __getCuesheet(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.cueSheet()
def uploadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "upload failed, no cuesheet interface"
return
cue.setCutList(self.cut_list)
def downloadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "download failed, no cuesheet interface"
self.cut_list = [ ]
else:
self.cut_list = cue.getCutList()
class InfoBarSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="82,18" font="Regular;16" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="82,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.Event_Now" render="Progress" position="6,46" size="46,18" borderWidth="1" >
<convert type="EventTime">Progress</convert>
</widget>
</screen>"""
# for picon: (path="piconlcd" will use LCD picons)
# <widget source="session.CurrentService" render="Picon" position="6,0" size="120,64" path="piconlcd" >
# <convert type="ServiceName">Reference</convert>
# </widget>
class InfoBarSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarSummary
class InfoBarMoviePlayerSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="64,18" font="Regular;16" halign="right" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="64,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.CurrentService" render="Progress" position="6,46" size="56,18" borderWidth="1" >
<convert type="ServicePosition">Position</convert>
</widget>
</screen>"""
class InfoBarMoviePlayerSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarMoviePlayerSummary
class InfoBarTeletextPlugin:
def __init__(self):
self.teletext_plugin = None
for p in plugins.getPlugins(PluginDescriptor.WHERE_TELETEXT):
self.teletext_plugin = p
if self.teletext_plugin is not None:
self["TeletextActions"] = HelpableActionMap(self, "InfobarTeletextActions",
{
"startTeletext": (self.startTeletext, _("View teletext..."))
})
else:
print "no teletext plugin found!"
def startTeletext(self):
self.teletext_plugin and self.teletext_plugin(session=self.session, service=self.session.nav.getCurrentService())
class InfoBarSubtitleSupport(object):
def __init__(self):
object.__init__(self)
self["SubtitleSelectionAction"] = HelpableActionMap(self, "InfobarSubtitleSelectionActions",
{
"subtitleSelection": (self.subtitleSelection, _("Subtitle selection...")),
})
self.selected_subtitle = None
if isStandardInfoBar(self):
self.subtitle_window = self.session.instantiateDialog(SubtitleDisplay)
else:
from Screens.InfoBar import InfoBar
self.subtitle_window = InfoBar.instance.subtitle_window
self.subtitle_window.hide()
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceChanged,
iPlayableService.evEnd: self.__serviceChanged,
iPlayableService.evUpdatedInfo: self.__updatedInfo
})
def getCurrentServiceSubtitle(self):
service = self.session.nav.getCurrentService()
return service and service.subtitle()
def subtitleSelection(self):
subtitle = self.getCurrentServiceSubtitle()
subtitlelist = subtitle and subtitle.getSubtitleList()
if self.selected_subtitle or subtitlelist and len(subtitlelist)>0:
from Screens.AudioSelection import SubtitleSelection
self.session.open(SubtitleSelection, self)
else:
return 0
def __serviceChanged(self):
if self.selected_subtitle:
self.selected_subtitle = None
self.subtitle_window.hide()
def __updatedInfo(self):
if not self.selected_subtitle:
subtitle = self.getCurrentServiceSubtitle()
cachedsubtitle = subtitle and subtitle.getCachedSubtitle()
if cachedsubtitle:
self.enableSubtitle(cachedsubtitle)
def enableSubtitle(self, selectedSubtitle):
subtitle = self.getCurrentServiceSubtitle()
self.selected_subtitle = selectedSubtitle
if subtitle and self.selected_subtitle:
subtitle.enableSubtitles(self.subtitle_window.instance, self.selected_subtitle)
self.subtitle_window.show()
else:
if subtitle:
subtitle.disableSubtitles(self.subtitle_window.instance)
self.subtitle_window.hide()
def restartSubtitle(self):
if self.selected_subtitle:
self.enableSubtitle(self.selected_subtitle)
class InfoBarServiceErrorPopupSupport:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evTuneFailed: self.__tuneFailed,
iPlayableService.evTunedIn: self.__serviceStarted,
iPlayableService.evStart: self.__serviceStarted
})
self.__serviceStarted()
def __serviceStarted(self):
self.closeNotificationInstantiateDialog()
self.last_error = None
Notifications.RemovePopup(id = "ZapError")
def __tuneFailed(self):
if not config.usage.hide_zap_errors.value or not config.usage.remote_fallback_enabled.value:
service = self.session.nav.getCurrentService()
info = service and service.info()
error = info and info.getInfo(iServiceInformation.sDVBState)
if not config.usage.remote_fallback_enabled.value and (error == eDVBServicePMTHandler.eventMisconfiguration or error == eDVBServicePMTHandler.eventNoResources):
self.session.nav.currentlyPlayingServiceReference = None
self.session.nav.currentlyPlayingServiceOrGroup = None
if error == self.last_error:
error = None
else:
self.last_error = error
error = {
eDVBServicePMTHandler.eventNoResources: _("No free tuner!"),
eDVBServicePMTHandler.eventTuneFailed: _("Tune failed!"),
eDVBServicePMTHandler.eventNoPAT: _("No data on transponder!\n(Timeout reading PAT)"),
eDVBServicePMTHandler.eventNoPATEntry: _("Service not found!\n(SID not found in PAT)"),
eDVBServicePMTHandler.eventNoPMT: _("Service invalid!\n(Timeout reading PMT)"),
eDVBServicePMTHandler.eventNewProgramInfo: None,
eDVBServicePMTHandler.eventTuned: None,
eDVBServicePMTHandler.eventSOF: None,
eDVBServicePMTHandler.eventEOF: None,
eDVBServicePMTHandler.eventMisconfiguration: _("Service unavailable!\nCheck tuner configuration!"),
}.get(error) #this returns None when the key not exist in the dict
if error and not config.usage.hide_zap_errors.value:
self.closeNotificationInstantiateDialog()
if hasattr(self, "dishDialog") and not self.dishDialog.dishState():
Notifications.AddPopup(text = error, type = MessageBox.TYPE_ERROR, timeout = 5, id = "ZapError")
class InfoBarPowersaver:
def __init__(self):
self.inactivityTimer = eTimer()
self.inactivityTimer.callback.append(self.inactivityTimeout)
self.restartInactiveTimer()
self.sleepTimer = eTimer()
self.sleepStartTime = 0
self.sleepTimer.callback.append(self.sleepTimerTimeout)
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypress)
def keypress(self, key, flag):
if flag:
self.restartInactiveTimer()
def restartInactiveTimer(self):
time = abs(int(config.usage.inactivity_timer.value))
if time:
self.inactivityTimer.startLongTimer(time)
else:
self.inactivityTimer.stop()
def inactivityTimeout(self):
if config.usage.inactivity_timer_blocktime.value:
curtime = localtime(time())
if curtime.tm_year > 1970: #check if the current time is valid
duration = blocktime = extra_time = False
if config.usage.inactivity_timer_blocktime_by_weekdays.value:
weekday = curtime.tm_wday
if config.usage.inactivity_timer_blocktime_day[weekday].value:
blocktime = True
begintime = tuple(config.usage.inactivity_timer_blocktime_begin_day[weekday].value)
endtime = tuple(config.usage.inactivity_timer_blocktime_end_day[weekday].value)
extra_time = config.usage.inactivity_timer_blocktime_extra_day[weekday].value
begintime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_begin_day[weekday].value)
endtime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_end_day[weekday].value)
else:
blocktime = True
begintime = tuple(config.usage.inactivity_timer_blocktime_begin.value)
endtime = tuple(config.usage.inactivity_timer_blocktime_end.value)
extra_time = config.usage.inactivity_timer_blocktime_extra.value
begintime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_begin.value)
endtime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_end.value)
curtime = (curtime.tm_hour, curtime.tm_min, curtime.tm_sec)
if blocktime and (begintime <= endtime and (curtime >= begintime and curtime < endtime) or begintime > endtime and (curtime >= begintime or curtime < endtime)):
duration = (endtime[0]*3600 + endtime[1]*60) - (curtime[0]*3600 + curtime[1]*60 + curtime[2])
elif extra_time and (begintime_extra <= endtime_extra and (curtime >= begintime_extra and curtime < endtime_extra) or begintime_extra > endtime_extra and (curtime >= begintime_extra or curtime < endtime_extra)):
duration = (endtime_extra[0]*3600 + endtime_extra[1]*60) - (curtime[0]*3600 + curtime[1]*60 + curtime[2])
if duration:
if duration < 0:
duration += 24*3600
self.inactivityTimer.startLongTimer(duration)
return
if Screens.Standby.inStandby:
self.inactivityTimeoutCallback(True)
else:
message = _("Your receiver will got to standby due to inactivity.") + "\n" + _("Do you want this?")
self.session.openWithCallback(self.inactivityTimeoutCallback, MessageBox, message, timeout=60, simple=True, default=False, timeout_default=True)
def inactivityTimeoutCallback(self, answer):
if answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
def sleepTimerState(self):
if self.sleepTimer.isActive():
return (self.sleepStartTime - time()) / 60
return 0
def setSleepTimer(self, sleepTime):
print "[InfoBarPowersaver] set sleeptimer", sleepTime
if sleepTime:
m = abs(sleepTime / 60)
message = _("The sleep timer has been activated.") + "\n" + _("And will put your receiver in standby over ") + ngettext("%d minute", "%d minutes", m) % m
self.sleepTimer.startLongTimer(sleepTime)
self.sleepStartTime = time() + sleepTime
else:
message = _("The sleep timer has been disabled.")
self.sleepTimer.stop()
Notifications.AddPopup(message, type = MessageBox.TYPE_INFO, timeout = 5)
def sleepTimerTimeout(self):
if not Screens.Standby.inStandby:
list = [ (_("Yes"), True), (_("Extend sleeptimer 15 minutes"), "extend"), (_("No"), False) ]
message = _("Your receiver will got to stand by due to the sleeptimer.")
message += "\n" + _("Do you want this?")
self.session.openWithCallback(self.sleepTimerTimeoutCallback, MessageBox, message, timeout=60, simple=True, list=list, default=False, timeout_default=True)
def sleepTimerTimeoutCallback(self, answer):
if answer == "extend":
print "[InfoBarPowersaver] extend sleeptimer"
self.setSleepTimer(900)
elif answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
self.setSleepTimer(0)
def goStandby(self):
if not Screens.Standby.inStandby:
print "[InfoBarPowersaver] goto standby"
self.session.open(Screens.Standby.Standby)
class InfoBarHDMI:
def HDMIIn(self):
slist = self.servicelist
if slist.dopipzap:
curref = self.session.pip.getCurrentService()
if curref and curref.type != 8192:
self.session.pip.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
else:
self.session.pip.playService(slist.servicelist.getCurrent())
else:
curref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if curref and curref.type != 8192:
if curref and curref.type != -1 and os.path.splitext(curref.toString().split(":")[10])[1].lower() in AUDIO_EXTENSIONS.union(MOVIE_EXTENSIONS, DVD_EXTENSIONS):
setResumePoint(self.session)
self.session.nav.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
elif isStandardInfoBar(self):
self.session.nav.playService(slist.servicelist.getCurrent())
else:
self.session.nav.playService(self.cur_service)
|
gpl-2.0
| 7,106,807,168,855,365,000 | 33.652702 | 267 | 0.721661 | false |
hpcugent/easybuild-framework
|
easybuild/toolchains/pgi.py
|
1
|
1780
|
##
# Copyright 2015 Bart Oldeman
#
# This file is triple-licensed under GPLv2 (see below), MIT, and
# BSD three-clause licenses.
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for PGI compiler toolchain.
:author: Bart Oldeman (McGill University, Calcul Quebec, Compute Canada)
"""
from easybuild.toolchains.compiler.pgi import Pgi
from easybuild.toolchains.gcccore import GCCcore
from easybuild.tools.toolchain import DUMMY_TOOLCHAIN_NAME
class PgiToolchain(Pgi):
"""Simple toolchain with just the PGI compilers."""
NAME = 'PGI'
# use GCCcore as subtoolchain rather than GCC, since two 'real' compiler-only toolchains don't mix well,
# in particular in a hierarchical module naming scheme
SUBTOOLCHAIN = [GCCcore.NAME, DUMMY_TOOLCHAIN_NAME]
OPTIONAL = False
|
gpl-2.0
| -7,992,043,115,076,133,000 | 38.555556 | 108 | 0.755056 | false |
MacHu-GWU/pyknackhq-project
|
setup.py
|
1
|
4034
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Frequent used classifiers List = [
"Development Status :: 1 - Planning",
"Development Status :: 2 - Pre-Alpha",
"Development Status :: 3 - Alpha",
"Development Status :: 4 - Beta",
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Development Status :: 7 - Inactive",
"Intended Audience :: Customer Service",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Financial and Insurance Industry",
"Intended Audience :: Healthcare Industry",
"Intended Audience :: Information Technology",
"Intended Audience :: Legal Industry",
"Intended Audience :: Manufacturing",
"Intended Audience :: Other Audience",
"Intended Audience :: Religion",
"Intended Audience :: Science/Research",
"Intended Audience :: System Administrators",
"Intended Audience :: Telecommunications Industry",
"License :: OSI Approved :: BSD License",
"License :: OSI Approved :: MIT License",
"License :: OSI Approved :: Apache Software License",
"License :: OSI Approved :: GNU General Public License (GPL)",
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
"Natural Language :: English",
"Natural Language :: Chinese (Simplified)",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"Operating System :: Unix",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2 :: Only",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3 :: Only",
]
"""
from setuptools import setup, find_packages
from datetime import datetime
import os
GITHUB_ACCOUNT = "MacHu-GWU" # your GitHub account name
RELEASE_TAG = "2015-11-20" # the GitHub release tag
NAME = "pyknackhq" # name your package
VERSION = __import__(NAME).__version__
PACKAGES = [NAME] + ["%s.%s" % (NAME, i) for i in find_packages(NAME)]
PACKAGE_DATA = {
}
SHORT_DESCRIPTION = __import__(NAME).__short_description__ # GitHub Short Description
AUTHOR = "Sanhe Hu"
AUTHOR_EMAIL = "husanhe@gmail.com"
MAINTAINER = AUTHOR
MAINTAINER_EMAIL = AUTHOR_EMAIL
PROJECT_NAME = os.path.basename(os.getcwd()) # the project dir is the project name
URL = "https://github.com/{0}/{1}".format(GITHUB_ACCOUNT, PROJECT_NAME)
DOWNLOAD_URL = "https://github.com/{0}/{1}/tarball/{2}".format(
GITHUB_ACCOUNT, PROJECT_NAME, RELEASE_TAG)
with open("readme.rst", "rb") as f:
LONG_DESCRIPTION = f.read().decode("utf-8")
LICENSE = "MIT"
PLATFORMS = ["Windows", "MacOS", "Unix"]
CLASSIFIERS = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"Operating System :: Unix",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
]
with open("requirements.txt", "rb") as f:
REQUIRES = [i.strip() for i in f.read().decode("utf-8").split("\n")]
setup(
name = NAME,
packages = PACKAGES,
include_package_data = True,
package_data = PACKAGE_DATA,
version = VERSION,
author = AUTHOR,
author_email = AUTHOR_EMAIL,
maintainer = MAINTAINER,
maintainer_email = MAINTAINER_EMAIL,
url = URL,
description = SHORT_DESCRIPTION,
long_description = LONG_DESCRIPTION,
download_url = DOWNLOAD_URL,
classifiers = CLASSIFIERS,
platforms = PLATFORMS,
license = LICENSE,
install_requires = REQUIRES,
)
|
mit
| -7,294,247,463,790,889,000 | 32.907563 | 85 | 0.65171 | false |
INFN-Catania/FedManager
|
fednodes/abstract_classes.py
|
1
|
2405
|
"""
Copyright 2015 INFN (Italy)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__author__ = 'maurizio'
from abc import ABCMeta, abstractmethod
from fednodes.messaging import MessageScheduler
class iConsumer(object):
__metaclass__ = ABCMeta
def __init__(self, messageScheduler, configuration):
self._ms = messageScheduler
self._conf = configuration
self.configure()
@abstractmethod
def configure(self):
pass
class iProducer():
__metaclass__ = ABCMeta
def __init__(self, configuration):
# self._ms=messageScheduler
self._conf = configuration
self.configure()
@abstractmethod
def configure(self):
pass
@abstractmethod
def sendMessage(self, fedMessageAsString, topic_target):
pass
# TODO: add 'add_actor' method
class Fednode():
def __init__(self, configuration, message_class, consumer_class, producer_class):
self._configuration = configuration
producer = producer_class(configuration)
self._ms = MessageScheduler(message_class, producer, configuration)
consumer = consumer_class(self._ms, configuration)
def get_configuration(self):
return self._configuration
def get_ms(self):
return self._ms
class iFedMessage():
__metaclass__ = ABCMeta
@abstractmethod
def setSource(self, source):
pass
@abstractmethod
def setId(self, id):
pass
@abstractmethod
def getId(self):
pass
@abstractmethod
def getSource(self):
pass
@abstractmethod
def getTarget(self):
pass
@abstractmethod
def getBody(self):
pass
@abstractmethod
def getBodyUriType(self):
pass
@abstractmethod
def toString(self):
pass
@classmethod
def createMessageFromString(cls, msg):
raise NotImplementedError()
|
apache-2.0
| -6,429,133,711,305,096,000 | 22.125 | 85 | 0.665696 | false |
Azure/azure-sdk-for-python
|
sdk/resources/azure-mgmt-resource/azure/mgmt/resource/policy/v2018_05_01/_configuration.py
|
1
|
3238
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from ._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any
from azure.core.credentials import TokenCredential
class PolicyClientConfiguration(Configuration):
"""Configuration for PolicyClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
"""
def __init__(
self,
credential, # type: "TokenCredential"
subscription_id, # type: str
**kwargs # type: Any
):
# type: (...) -> None
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(PolicyClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2018-05-01"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-resource/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
|
mit
| -288,816,339,190,468,300 | 44.605634 | 129 | 0.66677 | false |
town-hall-pinball/project-omega
|
pin/service/matrix.py
|
1
|
3862
|
# Copyright (c) 2014 - 2016 townhallpinball.org
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from ..lib import dmd
from ..lib.devices import devices
from ..lib.ui import Canvas
class Matrix(Canvas):
box_when = "closed"
devices = None
selected = None
pulse_color = 0x8
pulse_timer = None
handler = None
def __init__(self, handler=None, box_when=None, devices="switches"):
super(Matrix, self).__init__(left=0, top=0, width=40)
if box_when:
self.box_when = box_when
self.devices = devices
self.handler = handler
self.layout()
def redraw(self):
self.clear()
if self.devices == "switches":
self.dot_column(2, "SD")
self.vline(5, 2, dmd.height - 4, color=0x8)
col = 1
for x in xrange(8, 8 + (8 * 3), 3):
prefix = "S" if self.devices == "switches" else "L"
self.dot_column(x, prefix + str(col))
col += 1
x += 3
if self.devices == "switches":
self.vline(x, 2, dmd.height - 4, color=0x8)
x += 3
self.dot_column(x, "SF")
self.invalidate()
def select(self, switch):
self.handler.cancel(self.pulse_timer)
self.selected = switch
if self.handler and self.selected:
self.pulse_selection()
elif self.handler and not self.selected:
self.redraw()
def pulse_selection(self):
self.pulse_color += 0x2
if self.pulse_color > 0xf:
self.pulse_color = 0x8
self.redraw()
self.pulse_timer = self.handler.wait(0.1, self.pulse_selection)
def cell_rendering(self, device):
if not device:
return "empty"
if device == self.selected:
return "selected"
if self.devices == "switches":
if self.box_when == "closed" and device.is_closed():
return "box"
if self.box_when == "active" and device.active:
return "box"
else:
if device.is_active():
return "box"
return "dot"
def dot_column(self, x, prefix):
y = 5
row = 1
for y in xrange(5, 5 + (8 * 3), 3):
ident = prefix + str(row)
device = devices.get(ident)
rendering = self.cell_rendering(device)
if rendering == "box":
self.box(x - 1, y - 1, 3, 3)
elif rendering == "dot":
self.dot(x, y)
elif rendering == "selected":
self.dot(x, y, self.pulse_color)
self.dot(x-1, y-1, self.pulse_color)
self.dot(x-1, y+1, self.pulse_color)
self.dot(x+1, y-1, self.pulse_color)
self.dot(x+1, y+1, self.pulse_color)
row += 1
|
mit
| -4,653,638,042,930,319,000 | 35.093458 | 77 | 0.586225 | false |
fuku-ys/earthquake
|
pyearthquake/OLD.orchestrator/explorer.py
|
1
|
10760
|
from abc import ABCMeta, abstractmethod
import colorama
import random
import json
from eventlet.greenthread import sleep
from eventlet.timeout import Timeout
from eventlet.queue import *
import six
import time
from .. import LOG as _LOG
from ..signal.signal import EventBase, ActionBase
from .digestible import DigestibleBase
LOG = _LOG.getChild('orchestrator.explorer')
@six.add_metaclass(ABCMeta)
class ExplorerBase(object):
def __init__(self):
# self.graph = None
self._event_q = Queue()
self.oc = None
self.state = None
self.initial_state = None
self.visited_terminal_states = {} # key: state, value: count (TODO: MOVE TO LIBEARTHQUAKE.SO)
self.time_slice = 0
def init_with_orchestrator(self, oc, initial_state):
"""
:param oc: OrchestratorBase
:param initial_state: StateBase
:return: None
"""
self.oc = oc
self.initial_state = initial_state
self.state = self.initial_state.make_copy()
LOG.debug(colorama.Back.BLUE +
'set initial state=%s' +
colorama.Style.RESET_ALL, self.state.to_short_str())
# self.graph = Graph(self.state)
def send_event(self, event):
"""
Send event *to* explorer
:param event: EventBase
:return: None
"""
assert isinstance(event, EventBase)
self._event_q.put(event)
def recv_events(self, timeout_msecs):
"""
Let explorer receive events
:param timeout_msecs: int
:return:
"""
events = []
timeout = Timeout(timeout_msecs / 1000.0)
try:
while True:
event = self._event_q.get()
events.append(event)
except Timeout:
pass
except Exception as e:
raise e
finally:
timeout.cancel()
return events
def _worker__print_events_and_digestibles(self, digestibles, new_events, new_digestibles):
if digestibles:
LOG.debug('Before state %s, the following OLD %d digestibles had been yielded', self.state.to_short_str(),
len(digestibles))
for digestible in digestibles: LOG.debug('* %s', digestible)
LOG.debug('In state %s, the following %d events happend', self.state.to_short_str(), len(new_events))
for e in new_events:
try:
LOG.debug('* %f: %s', e.recv_timestamp, e.abstract_msg)
except Exception:
LOG.debug('* %s', e)
LOG.debug('In state %s, the following NEW %d digestibles were yielded for the above %d events',
self.state.to_short_str(), len(new_digestibles), len(new_events))
for new_digestible in new_digestibles: LOG.debug('* %s', new_digestible)
def worker(self):
digestibles = []
while True:
if self.oc.termination_detector.is_terminal_state(self.state): self.state = self.on_terminal_state()
new_events = self.recv_events(timeout_msecs=self.time_slice)
if not new_events and not digestibles: continue
new_digestibles = []
for e in new_events:
e_handled = False
for w in self.oc.watchers:
if w.handles(e): new_digestibles.extend(w.on_event(self.state, e)); e_handled = True
if not e_handled: new_digestibles.extend(self.oc.default_watcher.on_event(self.state, e))
self._worker__print_events_and_digestibles(digestibles, new_events, new_digestibles)
digestibles.extend(new_digestibles)
if not digestibles: LOG.warn('No DIGESTIBLE, THIS MIGHT CAUSE FALSE DEADLOCK, state=%s',
self.state.to_short_str())
next_state, digestibles = self.do_it(digestibles)
if not digestibles: LOG.warn('No DIGESTIBLE, THIS MIGHT CAUSE FALSE DEADLOCK, next_state=%s',
next_state.to_short_str())
LOG.debug('transit from %s to %s', self.state.to_short_str(), next_state.to_short_str())
self.state = next_state
def do_it(self, digestibles):
"""
select a digestible from digestibles and do it in the state.
returns: (next_state, other_digestibles)
FIXME: rename me!
"""
if not digestibles: return self.state, []
chosen_digestible = self.choose_digestible(digestibles)
LOG.debug('Chosen digestible: %s', chosen_digestible)
assert (any(digestible.event.uuid == chosen_digestible.event.uuid for digestible in digestibles))
digestibles_len_before_remove = len(digestibles)
digestibles.remove(chosen_digestible)
assert len(digestibles) == digestibles_len_before_remove - 1, 'hash race?'
other_digestibles = digestibles
if chosen_digestible:
next_state = self.do_transition(chosen_digestible)
else:
LOG.warn('No DIGESTIBLE chosen, THIS MIGHT CAUSE FALSE DEADLOCK, state=%s', self.state.to_short_str())
next_state = self.state
## NOTE: as other digestibles are also enabled in the NEXT state, we return other digestibles here.
## the worker will handle other digestibles in the next round.
return next_state, other_digestibles
@abstractmethod
def choose_digestible(self, digestibles):
pass
def call_action(self, action):
self.oc.call_action(action)
def do_transition(self, digestible):
assert isinstance(digestible, DigestibleBase)
LOG.debug(colorama.Back.BLUE +
"Invoking the action:\n" +
" action=%s\n" +
" event=%s\n" +
" state=%s\n" +
" digestible=%s\n" +
colorama.Style.RESET_ALL,
digestible.action, digestible.event,
self.state.to_short_str(),
digestible)
self.call_action(digestible.action)
next_state = self.state.make_copy()
next_state.append_digestible(digestible)
LOG.debug(colorama.Back.BLUE +
'State Transition: %s->%s' +
colorama.Style.RESET_ALL, self.state.to_short_str(), next_state.to_short_str())
# self.graph.visit_edge(self.state, next_state, digestible)
## NOTE: worker sets self.state to next_state
return next_state
def stat_on_terminal_state(self, past_all_states, past_visit_count, past_visit_count_sum):
"""
TODO: move to LIBEARTHQUAKE.SO
"""
if past_visit_count == 0:
banner = 'TERMINAL STATE(FRONTIER)'
new_all_states = past_all_states + 1
else:
banner = 'TERMINAL STATE(REVISITED)'
new_all_states = past_all_states
LOG.info(
colorama.Back.RED + '%s state %s, count=%d->%d, count_sum=%d->%d, all_states=%d->%d' + colorama.Style.RESET_ALL,
banner,
self.state.to_short_str(),
past_visit_count, past_visit_count + 1,
past_visit_count_sum, past_visit_count_sum + 1,
past_all_states, new_all_states)
def regist_state_to_libeq(self):
json_dict = self.state.to_jsondict()
json_str = json.dumps(json_dict)
short_str = self.state.to_short_str()
rc = self.oc.libearthquake.EQRegistExecutionHistory_UnstableAPI(short_str, json_str)
assert rc == 0
def on_terminal_state(self):
LOG.debug(colorama.Back.RED +
'*** REACH TERMINAL STATE (%s) ***' +
colorama.Style.RESET_ALL, self.state.to_short_str())
self.regist_state_to_libeq()
## make stat (TODO: move to LIBEARTHQUAKE.SO)
all_states = len(self.visited_terminal_states)
visit_count_sum = sum(self.visited_terminal_states.values())
if self.state in self.visited_terminal_states:
visit_count = self.visited_terminal_states[self.state]
else:
visit_count = 0
self.visited_terminal_states[self.state] = 0
self.stat_on_terminal_state(all_states, visit_count, visit_count_sum)
self.visited_terminal_states[self.state] += 1
## notify termination to watchers
for w in self.oc.watchers: w.on_terminal_state(self.state)
## Reset
next_state = self.initial_state.make_copy()
LOG.debug('Reset to %s', next_state.to_short_str())
## notify reset to watchers
for w in self.oc.watchers: w.on_reset()
return next_state
class DumbExplorer(ExplorerBase):
def __init__(self, sleep_msecs=0):
super(DumbExplorer, self).__init__()
self.sleep_msecs = sleep_msecs
def choose_digestible(self, digestibles):
assert (digestibles)
return digestibles[0]
def call_action(self, action):
if self.sleep_msecs:
sleep(self.sleep_msecs / 1000.0)
super(DumbExplorer, self).call_action(action)
class RandomExplorer(ExplorerBase):
def __init__(self, time_slice):
super(RandomExplorer, self).__init__()
self.time_slice = time_slice # msecs
def choose_digestible(self, digestibles):
assert (digestibles)
r = random.randint(0, len(digestibles) - 1)
chosen_digestible = digestibles[r]
return chosen_digestible
class TimeBoundedRandomExplorer(RandomExplorer):
def __init__(self, time_slice, time_bound):
super(TimeBoundedRandomExplorer, self).__init__(time_slice)
self.saved_time_slice = time_slice
self.time_bound = time_bound # msecs
def choose_digestible(self, digestibles):
assert (digestibles)
now = time.time()
hurried = filter(lambda d: (now - d.event.recv_timestamp) * 1000.0 > self.time_bound, digestibles)
if len(hurried) > 0:
LOG.debug('Hurried to send the following %d digestibles, now=%s', len(hurried), now)
LOG.debug(hurried)
self.time_slice = 0
chosen_digestible = hurried[0]
else:
self.time_slice = self.saved_time_slice
r = random.randint(0, len(digestibles) - 1)
chosen_digestible = digestibles[r]
return chosen_digestible
class GreedyExplorer(ExplorerBase):
def __init__(self, time_slice):
super(GreedyExplorer, self).__init__(time_slice)
raise NotImplementedError(
"GreedyExplorer is under refactoring since July 8, 2015. This will revive when new graph storage is implemented (Issue #23)")
def choose_digestible(self, digestibles):
pass
|
apache-2.0
| -7,532,178,620,348,961,000 | 37.705036 | 137 | 0.598978 | false |
nickmarton/Vivid
|
vivid/classes/parsers/point_parser.py
|
1
|
2439
|
"""This section introduces the PointParser class."""
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from point import Point
class PointParser(object):
"""
PointParser class. The PointParser class is used for parsing Point object
related expressions.
:ivar _is_Parser: An identifier to use in place of ``type`` or \
``isinstance``.
"""
def __init__(self):
"""
Construct a PointParser object.
"""
self._is_Parser = True
def __call__(self, *args):
"""
Call PointParser object (e.g., ``PointParser(expression)``).
"""
return self._eval(*args)
def _eval(self, string):
"""
Try to evaluate given string
(e.g., "``is_on(P(2.0,2.0),P(1.0,1.0),P(3.0,3.0))``").
:param string: The expression to evaluate; the PointParser object \
unstringifies Point objects in ``string`` parameter and tries to call \
a function of the Point object (also given by ``string`` parameter) \
with unstringified Points as arguments.
:type string: ``str``
:raises ValueError: Function provided in ``string`` parameter is not \
a function in the Point class, some argument is not a Point after \
trying to unstringify or the ``string`` parameter is improperly \
formatted.
"""
fn_start, fn_end = string.find("("), string.rfind(")")
fn_name, fn_args = string[:fn_start], string[fn_start + 1: fn_end]
for fn in dir(Point):
if fn_name == fn:
point_function = getattr(Point, fn)
break
else:
raise ValueError("Function not contained in dir of Point")
import re
parsed_args = []
point_pattern = r'P\(-?\d\.\d+(,-?\d\.\d+)*\)|P\(x(,x)*\)'
match_obj_iter = re.finditer(point_pattern, fn_args)
for match in match_obj_iter:
parsed_args.append(Point.unstringify(match.group()))
fn_args = fn_args.replace(match.group(), '', 1)
if not all([char == "," for char in fn_args]):
raise ValueError("Only Point arguments acceptable")
try:
return point_function(*parsed_args)
except Exception, e:
raise ValueError("Bad args provided")
def main():
"""."""
pass
if __name__ == "__main__":
main()
|
mit
| 2,184,414,222,683,700,700 | 29.111111 | 79 | 0.568676 | false |
meyerbe/pycles
|
generate_namelist.py
|
1
|
70404
|
import argparse
import json
import pprint
from sys import exit
import uuid
import ast
def main():
parser = argparse.ArgumentParser(prog='Namelist Generator')
parser.add_argument('case_name')
# Optional Arguments for CGILS
parser.add_argument('--perturbed_temperature', default='False',
help='Specify if perturbed temperature case is to be run (CGILS) as True/False')
parser.add_argument('--control_subsidence', default='False',
help='Specify if control subsidence is to be used in perturbed runs (CGILS) as True/False')
parser.add_argument('--zgils_location', default='False',
help='specify location (6/11/12)')
args = parser.parse_args()
case_name = args.case_name
#Optional Arguments for CGILS
is_p2 = ast.literal_eval(args.perturbed_temperature)
is_ctl_omega = ast.literal_eval(args.control_subsidence)
zgils_loc = ast.literal_eval(args.zgils_location)
print(zgils_loc)
if case_name == 'StableBubble':
namelist = StableBubble()
elif case_name == 'SaturatedBubble':
namelist = SaturatedBubble()
elif case_name == 'ColdPoolDry_single_3D':
namelist = ColdPoolDry_3D('single')
elif case_name == 'ColdPoolDry_double_3D':
namelist = ColdPoolDry_3D('double')
elif case_name == 'ColdPoolDry_triple_3D':
namelist = ColdPoolDry_3D('triple')
elif case_name == 'SullivanPatton':
namelist = SullivanPatton()
elif case_name == 'Bomex':
namelist = Bomex()
elif case_name == 'Gabls':
namelist = Gabls()
elif case_name == 'DYCOMS_RF01':
namelist = DYCOMS_RF01()
elif case_name == 'DYCOMS_RF02':
namelist = DYCOMS_RF02()
elif case_name == 'SMOKE':
namelist = SMOKE()
elif case_name == 'Rico':
namelist = Rico()
elif case_name == 'Isdac':
namelist = Isdac()
elif case_name == 'IsdacCC':
namelist = IsdacCC()
elif case_name == 'Mpace':
namelist = Mpace()
elif case_name == 'Sheba':
namelist = Sheba()
elif case_name == 'CGILS_S6':
namelist = CGILS_S6(is_p2, is_ctl_omega)
elif case_name == 'CGILS_S11':
namelist = CGILS_S11(is_p2, is_ctl_omega)
elif case_name == 'CGILS_S12':
namelist = CGILS_S12(is_p2, is_ctl_omega)
elif case_name == 'ZGILS':
namelist = ZGILS(zgils_loc)
elif case_name == 'DCBLSoares':
namelist = DCBLSoares()
elif case_name == 'DCBLSoares_moist':
namelist = DCBLSoares_moist()
else:
print('Not a valid case name')
exit()
write_file(namelist)
def SullivanPatton():
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 32
namelist['grid']['ny'] = 32
namelist['grid']['nz'] = 32
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 160.0
namelist['grid']['dy'] = 160.0
namelist['grid']['dz'] = 64.0
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.7
namelist['time_stepping']['dt_initial'] = 10.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 7200.0
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'constant'
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'None_Dry'
namelist['microphysics']['phase_partitioning'] = 'liquid_only'
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'Smagorinsky'
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.02
namelist['damping']['Rayleigh']['z_d'] = 500.0
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['TKE']
namelist['stats_io']['frequency'] = 60.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 1800.0
namelist['fields_io']['diagnostic_fields'] = ['temperature','buoyancy_frequency','viscosity']
namelist['conditional_stats'] ={}
namelist['conditional_stats']['classes'] = ['Spectra']
namelist['conditional_stats']['frequency'] = 600.0
namelist['conditional_stats']['stats_dir'] = 'cond_stats'
namelist['meta'] = {}
namelist['meta']['simname'] = 'SullivanPatton'
namelist['meta']['casename'] = 'SullivanPatton'
return namelist
def ColdPoolDry_3D(number):
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 200
namelist['grid']['ny'] = 200
namelist['grid']['nz'] = 120 # height of 12km is sufficient (for dTh3K_z1000_r1000)
namelist['grid']['gw'] = 5
namelist['grid']['dx'] = 100.0
namelist['grid']['dy'] = 100.0
namelist['grid']['dz'] = 100.0
namelist['init'] = {}
namelist['init']['dTh'] = 3.0 # temperature anomaly
namelist['init']['shape'] = 1 # shape of temperature anomaly: 1 = cos2-shape
namelist['init']['h'] = 2000.0 # initial height of temperature anomaly
namelist['init']['r'] = rstar # initial radius of temperature anomaly
namelist['init']['marg'] = 500. # width or margin (transition for temeprature anomaly)
if number == 'single':
namelist['init']['ic'] = namelist['grid']['nx'] / 2
namelist['init']['jc'] = namelist['grid']['ny'] / 2
elif number == 'double':
namelist['init']['sep'] = d # separation of CPs
# (ic, jc): point of collision; CP coordinates: (ic+-sep/2, jc)
elif number == 'triple':
namelist['init']['d'] = d # separation of CPs in equilateral triangle
namelist['init']['ic'] = np.int(np.double(namelist['grid']['nx']) / 2)
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.3
namelist['time_stepping']['dt_initial'] = 10.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 3600.0
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'constant'
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'None_Dry'
namelist['microphysics']['phase_partitioning'] = 'liquid_only'
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'Smagorinsky'
# namelist['sgs']['scheme'] = 'UniformViscosity'
# namelist['sgs']['UniformViscosity'] = {}
# namelist['sgs']['UniformViscosity']['viscosity'] = 0.0
# namelist['sgs']['UniformViscosity']['diffusivity'] = 0.0
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh' #'None'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.2
namelist['damping']['Rayleigh']['z_d'] = 600
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['conditional_stats'] = {}
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['None']
namelist['stats_io']['frequency'] = 100.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 100.0
# namelist['fields_io']['diagnostic_fields'] = ['ql','temperature','buoyancy_frequency','viscosity']
namelist['fields_io']['diagnostic_fields'] = ['temperature', 'theta']
namelist['meta'] = {}
if number == 'single':
namelist['meta']['casename'] = 'ColdPoolDry_single_3D'
namelist['meta']['simname'] = 'ColdPoolDry_single_3D'
elif number == 'double':
namelist['meta']['casename'] = 'ColdPoolDry_double_3D'
namelist['meta']['simname'] = 'ColdPoolDry_double_3D'
elif number == 'triple':
namelist['meta']['casename'] = 'ColdPoolDry_triple_3D'
namelist['meta']['simname'] = 'ColdPoolDry_triple_3D'
namelist['surface'] = {}
# schemes: 'none', 'bulk', 'const'
namelist['surface']['scheme'] = 'none'
namelist['visualization'] = {}
namelist['visualization']['frequency'] = 10000.0
namelist['tracers'] = {}
namelist['tracers']['use_tracers'] = 'passive'
# 1: same tracer in whole domain; 2: different tracer in initial anomaly vs. environment
namelist['tracers']['number'] = 1
namelist['tracers']['kmin'] = 0
namelist['tracers']['kmax'] = 10
return namelist
def SaturatedBubble():
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 100
namelist['grid']['ny'] = 5
namelist['grid']['nz'] = 50
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 200.0
namelist['grid']['dy'] = 200.0
namelist['grid']['dz'] = 200.0
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.3
namelist['time_stepping']['dt_initial'] = 10.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 1000.0
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'constant'
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'None_SA'
namelist['microphysics']['phase_partitioning'] = 'liquid_only'
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'UniformViscosity'
namelist['sgs']['UniformViscosity'] = {}
namelist['sgs']['UniformViscosity']['viscosity'] = 0.0
namelist['sgs']['UniformViscosity']['diffusivity'] = 0.0
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['damping'] = {}
namelist['damping']['scheme'] = 'None'
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['conditional_stats'] = {}
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['None']
namelist['stats_io']['frequency'] = 60.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 100.0
namelist['fields_io']['diagnostic_fields'] = ['ql','temperature','buoyancy_frequency','viscosity']
namelist['meta'] = {}
namelist['meta']['casename'] = 'SaturatedBubble'
namelist['meta']['simname'] = 'SaturatedBubble'
return namelist
def StableBubble():
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 512
namelist['grid']['ny'] = 7
namelist['grid']['nz'] = 64
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 100.0
namelist['grid']['dy'] = 100.0
namelist['grid']['dz'] = 100.0
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.7
namelist['time_stepping']['dt_initial'] = 10.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 1000.0
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'constant'
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'None_Dry'
namelist['microphysics']['phase_partitioning'] = 'liquid_only'
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'UniformViscosity'
namelist['sgs']['UniformViscosity'] = {}
namelist['sgs']['UniformViscosity']['viscosity'] = 75.0
namelist['sgs']['UniformViscosity']['diffusivity'] = 75.0
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['damping'] = {}
namelist['damping']['scheme'] = 'None'
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['conditional_stats'] = {}
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['None']
namelist['stats_io']['frequency'] = 60.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 100.0
namelist['fields_io']['diagnostic_fields'] = ['temperature','buoyancy_frequency']
namelist['visualization'] = {}
namelist['visualization']['frequency'] = 60.0
namelist['meta'] = {}
namelist['meta']['simname'] = 'StableBubble'
namelist['meta']['casename'] = 'StableBubble'
return namelist
def Bomex():
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 64
namelist['grid']['ny'] = 64
namelist['grid']['nz'] = 75
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 100.0
namelist['grid']['dy'] = 100.0
namelist['grid']['dz'] = 100 / 2.5
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.7
namelist['time_stepping']['dt_initial'] = 10.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 21600.0
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'constant'
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'None_SA'
namelist['microphysics']['phase_partitioning'] = 'liquid_only'
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'Smagorinsky'
namelist['sgs']['Smagorinsky'] = {}
namelist['sgs']['Smagorinsky']['cs'] = 0.17
namelist['sgs']['UniformViscosity'] = {}
namelist['sgs']['UniformViscosity']['viscosity'] = 1.2
namelist['sgs']['UniformViscosity']['diffusivity'] = 3.6
namelist['sgs']['TKE'] = {}
namelist['sgs']['TKE']['ck'] = 0.1
namelist['sgs']['TKE']['cn'] = 0.76
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.2
namelist['damping']['Rayleigh']['z_d'] = 600
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['Cumulus','TKE']
namelist['stats_io']['frequency'] = 60.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 1800.0
namelist['fields_io']['diagnostic_fields'] = ['ql','temperature','buoyancy_frequency','viscosity']
namelist['conditional_stats'] ={}
namelist['conditional_stats']['classes'] = ['Spectra']
namelist['conditional_stats']['frequency'] = 600.0
namelist['conditional_stats']['stats_dir'] = 'cond_stats'
namelist['visualization'] = {}
namelist['visualization']['frequency'] = 1800.0
namelist['meta'] = {}
namelist['meta']['simname'] = 'Bomex'
namelist['meta']['casename'] = 'Bomex'
namelist['ClausiusClapeyron'] = {}
namelist['ClausiusClapeyron']['temperature_min'] = 100.15
namelist['ClausiusClapeyron']['temperature_max'] = 500.0
namelist['initialization'] = {}
namelist['initialization']['random_seed_factor'] = 1
return namelist
def Gabls():
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 64
namelist['grid']['ny'] = 64
namelist['grid']['nz'] = 64
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 6.25
namelist['grid']['dy'] = 6.25
namelist['grid']['dz'] = 6.25
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.7
namelist['time_stepping']['dt_initial'] =1.0
namelist['time_stepping']['dt_max'] = 2.0
namelist['time_stepping']['t_max'] = 43200.0
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'constant'
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'None_Dry'
namelist['microphysics']['phase_partitioning'] = 'liquid_only'
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'Smagorinsky'
namelist['sgs']['Smagorinsky'] ={}
namelist['sgs']['Smagorinsky']['cs'] = 0.17
namelist['sgs']['Smagorinsky']['prt'] = 1.0/3.0
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.02
namelist['damping']['Rayleigh']['z_d'] = 100.0
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['StableBL']
namelist['stats_io']['frequency'] = 60.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 1800.0
namelist['fields_io']['diagnostic_fields'] = ['temperature','buoyancy_frequency','viscosity']
namelist['conditional_stats'] ={}
namelist['conditional_stats']['classes'] = ['Spectra']
namelist['conditional_stats']['frequency'] = 600.0
namelist['conditional_stats']['stats_dir'] = 'cond_stats'
namelist['meta'] = {}
namelist['meta']['simname'] = 'Gabls'
namelist['meta']['casename'] = 'Gabls'
return namelist
def DYCOMS_RF01():
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 96
namelist['grid']['ny'] = 96
namelist['grid']['nz'] = 300
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 35.0
namelist['grid']['dy'] = 35.0
namelist['grid']['dz'] = 5.0
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.7
namelist['time_stepping']['dt_initial'] = 1.0
namelist['time_stepping']['dt_max'] = 4.0
namelist['time_stepping']['t_max'] = 4.0 * 3600.0
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'constant'
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'None_SA'
namelist['microphysics']['phase_partitioning'] = 'liquid_only'
namelist['microphysics']['cloud_sedimentation'] = False
namelist['microphysics']['ccn'] = 100.0e6
namelist['radiation'] = {}
namelist['radiation']['use_RRTM'] = True
namelist['radiation']['RRTM'] = {}
namelist['radiation']['RRTM']['frequency'] = 60.0
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'Smagorinsky'
#namelist['sgs']['UniformViscosity']['diffusivity'] = 4.0
#namelist['sgs']['UniformViscosity']['viscosity'] = 3*4.0
namelist['sgs']['Smagorinsky'] = {}
namelist['sgs']['Smagorinsky']['iles'] = True
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.002
namelist['damping']['Rayleigh']['z_d'] = 500.0
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['DYCOMS', 'Flux','TKE']
namelist['stats_io']['frequency'] = 60.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 3600.0
namelist['fields_io']['diagnostic_fields'] = ['ql','temperature','buoyancy_frequency']
namelist['conditional_stats'] ={}
namelist['conditional_stats']['classes'] = ['Spectra']
namelist['conditional_stats']['frequency'] = 600.0
namelist['conditional_stats']['stats_dir'] = 'cond_stats'
namelist['visualization'] = {}
namelist['visualization']['frequency'] = 1e6
namelist['meta'] = {}
namelist['meta']['simname'] = 'DYCOMS_RF01'
namelist['meta']['casename'] = 'DYCOMS_RF01'
return namelist
def DYCOMS_RF02():
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 128
namelist['grid']['ny'] = 128
namelist['grid']['nz'] = 300
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 50.0
namelist['grid']['dy'] = 50.0
namelist['grid']['dz'] = 5.0
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.7
namelist['time_stepping']['dt_initial'] = 1.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 6.0 * 3600.0
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'constant'
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'SB_Liquid'
namelist['microphysics']['phase_partitioning'] = 'liquid_only'
namelist['microphysics']['cloud_sedimentation'] = True
namelist['microphysics']['ccn'] = 55.0e6
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'Smagorinsky'
namelist['sgs']['Smagorinsky'] = {}
namelist['sgs']['Smagorinsky']['iles'] = True
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.002
namelist['damping']['Rayleigh']['z_d'] = 500.0
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['DYCOMS', 'Flux']
namelist['stats_io']['frequency'] = 60.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 3600.0
namelist['fields_io']['diagnostic_fields'] = ['ql','temperature','buoyancy_frequency']
namelist['visualization'] = {}
namelist['visualization']['frequency'] = 1e6
namelist['conditional_stats'] ={}
namelist['conditional_stats']['classes'] = ['Spectra']
namelist['conditional_stats']['frequency'] = 600.0
namelist['conditional_stats']['stats_dir'] = 'cond_stats'
namelist['meta'] = {}
namelist['meta']['simname'] = 'DYCOMS_RF02'
namelist['meta']['casename'] = 'DYCOMS_RF02'
return namelist
def SMOKE():
'''
Namelist generator for the smoke cloud case:
Bretherton, C. S., and coauthors, 1999:
An intercomparison of radiatively- driven entrainment and turbulence in a smoke cloud,
as simulated by different numerical models. Quart. J. Roy. Meteor. Soc., 125, 391-423. Full text copy.
:return:
'''
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 64
namelist['grid']['ny'] = 64
namelist['grid']['nz'] = 50
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 50.0
namelist['grid']['dy'] = 50.0
namelist['grid']['dz'] = 25.0
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.7
namelist['time_stepping']['dt_initial'] = 1.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 4.0 * 3600.0
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'constant'
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'None_Dry'
namelist['microphysics']['phase_partitioning'] = 'liquid_only'
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'Smagorinsky'
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.002
namelist['damping']['Rayleigh']['z_d'] = 500.0
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['SMOKE']
namelist['stats_io']['frequency'] = 60.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 3600.0
namelist['fields_io']['diagnostic_fields'] = ['ql','temperature','buoyancy_frequency','viscosity']
namelist['conditional_stats'] ={}
namelist['conditional_stats']['classes'] = ['Spectra']
namelist['conditional_stats']['frequency'] = 600.0
namelist['conditional_stats']['stats_dir'] = 'cond_stats'
namelist['meta'] = {}
namelist['meta']['simname'] = 'SMOKE'
namelist['meta']['casename'] = 'SMOKE'
return namelist
def Rico(): # Rico = Rain in Cumulus Over the Ocean
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 128
namelist['grid']['ny'] = 128
namelist['grid']['nz'] = 150
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 100.0
namelist['grid']['dy'] = 100.0
namelist['grid']['dz'] = 40.0
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.7
namelist['time_stepping']['dt_initial'] = 1.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 3600.0*24.0
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'constant'
namelist['microphysics'] = {}
namelist['microphysics']['phase_partitioning'] = 'liquid_only'
namelist['microphysics']['cloud_sedimentation'] = False
namelist['microphysics']['ccn'] = 70.0e6
namelist['microphysics']['scheme'] = 'SB_Liquid'
namelist['microphysics']['SB_Liquid'] = {}
namelist['microphysics']['SB_Liquid']['nu_droplet'] = 0
namelist['microphysics']['SB_Liquid']['mu_rain'] = 1
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'Smagorinsky'
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['scalar_transport']['order_sedimentation'] = 1
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.2
namelist['damping']['Rayleigh']['z_d'] = 800
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['Cumulus']
namelist['stats_io']['frequency'] = 60.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 1800.0
namelist['fields_io']['diagnostic_fields'] = ['ql','temperature','buoyancy_frequency','viscosity']
namelist['meta'] = {}
namelist['meta']['simname'] = 'Rico'
namelist['meta']['casename'] = 'Rico'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['conditional_stats'] ={}
namelist['conditional_stats']['classes'] = ['Spectra']
namelist['conditional_stats']['frequency'] = 600.0
namelist['conditional_stats']['stats_dir'] = 'cond_stats'
return namelist
def Isdac():
namelist = {}
namelist["grid"] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 64
namelist['grid']['ny'] = 64
namelist['grid']['nz'] = 250
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 50.0
namelist['grid']['dy'] = 50.0
namelist['grid']['dz'] = 10.0
namelist["mpi"] = {}
namelist["mpi"]["nprocx"] = 1
namelist["mpi"]["nprocy"] = 1
namelist["mpi"]["nprocz"] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.5
namelist['time_stepping']['dt_initial'] = 1.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 3600.0 * 8.0
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'Arctic_1M'
namelist['microphysics']['phase_partitioning'] = 'Arctic'
namelist['microphysics']['n0_ice'] = 1.0e7
namelist["sgs"] = {}
namelist["sgs"]['scheme'] = 'Smagorinsky'
namelist['sgs']['Smagorinsky'] = {}
namelist['sgs']['Smagorinsky']['iles'] = True
namelist['radiation'] = {}
namelist['radiation']['use_RRTM'] = False
namelist['radiation']['RRTM'] = {}
namelist['radiation']['RRTM']['frequency'] = 60.0
namelist['radiation']['RRTM']['buffer_points'] = 15
namelist['radiation']['RRTM']['patch_pressure'] = 600.0*100.0
namelist['radiation']['RRTM']['adjes'] = 0.0
namelist["diffusion"] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['scalar_transport']['order_sedimentation'] = 5
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.2
namelist['damping']['Rayleigh']['z_d'] = 600
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = "stats"
namelist['stats_io']['auxiliary'] = 'None'
namelist['stats_io']['frequency'] = 30.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = "fields"
namelist['fields_io']['frequency'] = 36000.0
namelist['fields_io']['diagnostic_fields'] = ['ql','temperature','buoyancy_frequency','viscosity']
namelist['meta'] = {}
namelist['meta']['simname'] = 'Isdac'
namelist['meta']['casename'] = 'Isdac'
return namelist
def IsdacCC():
namelist = {}
namelist["grid"] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 64
namelist['grid']['ny'] = 64
namelist['grid']['nz'] = 250
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 50.0
namelist['grid']['dy'] = 50.0
namelist['grid']['dz'] = 10.0
namelist["mpi"] = {}
namelist["mpi"]["nprocx"] = 1
namelist["mpi"]["nprocy"] = 1
namelist["mpi"]["nprocz"] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.5
namelist['time_stepping']['dt_initial'] = 1.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 3600.0 * 8.0
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'Arctic_1M'
namelist['microphysics']['phase_partitioning'] = 'Arctic'
namelist['microphysics']['n0_ice'] = 1.0e7
namelist['sgs'] = {}
namelist["sgs"]['scheme'] = 'Smagorinsky'
namelist['sgs']['Smagorinsky'] = {}
namelist['sgs']['Smagorinsky']['iles'] = True
namelist["diffusion"] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['scalar_transport']['order_sedimentation'] = 5
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.2
namelist['damping']['Rayleigh']['z_d'] = 600
namelist['initial'] = {}
namelist['initial']['SST'] = 265.0 #initial surface temperature
namelist['initial']['dTi'] = 7.0 #temperature jump at the inversion
namelist['initial']['rh0'] = 0.8 #Surface relative humidity
namelist['initial']['gamma'] = 5.0/1000. #free tropospheric lapse rate
namelist['initial']['rh'] = 0.6 #free tropospheric relative humidity
namelist['initial']['z_top'] = 820.0 #top of mixed layer
namelist['initial']['dzi'] = 30.0 #inversion height
namelist['initial']['dSST'] = 8.0 #SST change (climate change)
namelist['initial']['divergence'] = 5.0e-6 # LS divergence
namelist['initial']['fix_dqt'] = True
namelist['surface'] = {}
namelist['surface']['sensible'] = 0.0 #surface sensible heat flux Wm-2
namelist['radiation'] = {}
namelist['radiation']['use_RRTM'] = True
namelist['radiation']['RRTM'] = {}
namelist['radiation']['RRTM']['frequency'] = 60.0
namelist['radiation']['RRTM']['buffer_points'] = 15
namelist['radiation']['RRTM']['patch_pressure'] = 600.0*100.0
namelist['radiation']['RRTM']['adjes'] = 0.0
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = "stats"
namelist['stats_io']['auxiliary'] = 'None'
namelist['stats_io']['frequency'] = 30.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = "fields"
namelist['fields_io']['frequency'] = 36000.0
namelist['fields_io']['diagnostic_fields'] = ['ql','temperature','buoyancy_frequency','viscosity']
namelist['meta'] = {}
namelist['meta']['simname'] = 'IsdacCC'
namelist['meta']['casename'] = 'IsdacCC'
return namelist
def Mpace():
namelist = {}
namelist["grid"] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 64
namelist['grid']['ny'] = 64
namelist['grid']['nz'] = 250
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 50.0
namelist['grid']['dy'] = 50.0
namelist['grid']['dz'] = 10.0
namelist["mpi"] = {}
namelist["mpi"]["nprocx"] = 1
namelist["mpi"]["nprocy"] = 1
namelist["mpi"]["nprocz"] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.5
namelist['time_stepping']['dt_initial'] = 1.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 3600.0 * 12.0
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'Arctic_1M'
namelist['microphysics']['phase_partitioning'] = 'Arctic'
namelist['microphysics']['n0_ice'] = 1.0e7
namelist["sgs"] = {}
namelist["sgs"]['scheme'] = 'Smagorinsky'
namelist['sgs']['Smagorinsky'] = {}
namelist['sgs']['Smagorinsky']['iles'] = True
namelist['radiation'] = {}
namelist['radiation']['use_RRTM'] = True
namelist['radiation']['RRTM'] = {}
namelist['radiation']['RRTM']['frequency'] = 60.0
namelist['radiation']['RRTM']['buffer_points'] = 15
namelist['radiation']['RRTM']['patch_pressure'] = 600.0*100.0
namelist['radiation']['RRTM']['dyofyr'] = 283
namelist['radiation']['RRTM']['daily_mean_sw'] = False
namelist['radiation']['RRTM']['hourz'] = 17.0
namelist['radiation']['RRTM']['latitude'] = 71.75
namelist['radiation']['RRTM']['longitude'] = 151.0
namelist["diffusion"] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['scalar_transport']['order_sedimentation'] = 5
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.2
namelist['damping']['Rayleigh']['z_d'] = 600
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = "stats"
namelist['stats_io']['auxiliary'] = 'None'
namelist['stats_io']['frequency'] = 30.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = "fields"
namelist['fields_io']['frequency'] = 36000.0
namelist['fields_io']['diagnostic_fields'] = ['ql','temperature']
namelist['meta'] = {}
namelist['meta']['simname'] = 'Mpace'
namelist['meta']['casename'] = 'Mpace'
return namelist
def Sheba():
namelist = {}
namelist["grid"] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 64
namelist['grid']['ny'] = 64
namelist['grid']['nz'] = 250
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 50.0
namelist['grid']['dy'] = 50.0
namelist['grid']['dz'] = 10.0
namelist["mpi"] = {}
namelist["mpi"]["nprocx"] = 1
namelist["mpi"]["nprocy"] = 1
namelist["mpi"]["nprocz"] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.5
namelist['time_stepping']['dt_initial'] = 1.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 3600.0 * 12.0
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'Arctic_1M'
namelist['microphysics']['phase_partitioning'] = 'Arctic'
namelist['microphysics']['n0_ice'] = 1.0e7
namelist["sgs"] = {}
namelist["sgs"]['scheme'] = 'Smagorinsky'
namelist['sgs']['Smagorinsky'] = {}
namelist['sgs']['Smagorinsky']['iles'] = True
namelist['radiation'] = {}
namelist['radiation']['use_RRTM'] = True
namelist['radiation']['RRTM'] = {}
namelist['radiation']['RRTM']['frequency'] = 60.0
namelist['radiation']['RRTM']['buffer_points'] = 15
namelist['radiation']['RRTM']['stretch_factor'] = 1.2
namelist['radiation']['RRTM']['patch_pressure'] = 500.0*100.0
namelist['radiation']['RRTM']['dyofyr'] = 127
namelist['radiation']['RRTM']['daily_mean_sw'] = False
namelist['radiation']['RRTM']['hourz'] = 12.0
namelist['radiation']['RRTM']['latitude'] = 76.0
namelist['radiation']['RRTM']['longitude'] = 195.0
namelist['radiation']['RRTM']['adir'] = 0.827
namelist["diffusion"] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['scalar_transport']['order_sedimentation'] = 5
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.2
namelist['damping']['Rayleigh']['z_d'] = 600
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = "stats"
namelist['stats_io']['auxiliary'] = 'None'
namelist['stats_io']['frequency'] = 30.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = "fields"
namelist['fields_io']['frequency'] = 36000.0
namelist['fields_io']['diagnostic_fields'] = ['ql','temperature']
namelist['meta'] = {}
namelist['meta']['simname'] = 'Sheba'
namelist['meta']['casename'] = 'Sheba'
return namelist
def CGILS_S6(is_p2,is_ctl_omega):
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 96
namelist['grid']['ny'] = 96
namelist['grid']['nz'] = 180
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 100.0
namelist['grid']['dy'] = 100.0
namelist['grid']['dz'] = 30.0
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.7
namelist['time_stepping']['dt_initial'] = 1.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 3600.0*24.0*10.0 # 10 days
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'variable'
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.02
namelist['damping']['Rayleigh']['z_d'] = 600.0
namelist['microphysics'] = {}
namelist['microphysics']['phase_partitioning'] = 'liquid_only'
namelist['microphysics']['cloud_sedimentation'] = True
namelist['microphysics']['ccn'] = 100.0e6
namelist['microphysics']['scheme'] = 'SB_Liquid'
namelist['microphysics']['SB_Liquid'] = {}
namelist['microphysics']['SB_Liquid']['nu_droplet'] = 0
namelist['microphysics']['SB_Liquid']['mu_rain'] = 1
namelist['radiation'] = {}
namelist['radiation']['RRTM'] = {}
namelist['radiation']['RRTM']['frequency'] = 90.0
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'Smagorinsky'
namelist['sgs']['Smagorinsky'] ={}
namelist['sgs']['Smagorinsky']['iles'] = False
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['scalar_transport']['order_sedimentation'] = 1
namelist['radiation'] = {}
namelist['radiation']['RRTM'] = {}
namelist['radiation']['RRTM']['frequency'] = 90.0
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['Cumulus']
namelist['stats_io']['frequency'] = 5 * 60.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 86400.0
namelist['fields_io']['diagnostic_fields'] = ['ql','temperature','buoyancy']
namelist['meta'] = {}
namelist['meta']['CGILS'] = {}
namelist['meta']['casename'] = 'CGILS'
namelist['meta']['CGILS']['location'] = 6
namelist['meta']['CGILS']['P2'] = is_p2
namelist['meta']['CGILS']['CTL_omega'] = is_ctl_omega
simname = 'CGILS_S' + str(namelist['meta']['CGILS']['location'] )
if namelist['meta']['CGILS']['P2']:
if namelist['meta']['CGILS']['CTL_omega']:
simname += '_P2'
else:
simname += '_P2S'
else:
simname += '_CTL'
namelist['meta']['simname'] = simname
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['restart']['delete_old'] = True
namelist['restart']['times_retained'] = range(86400, 86400*11, 86400)
namelist['conditional_stats'] ={}
namelist['conditional_stats']['classes'] = ['Spectra']
namelist['conditional_stats']['frequency'] = 43200.0
namelist['conditional_stats']['stats_dir'] = 'cond_stats'
return namelist
def CGILS_S11(is_p2,is_ctl_omega):
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 96
namelist['grid']['ny'] = 96
namelist['grid']['nz'] = 180
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 50.0
namelist['grid']['dy'] = 50.0
namelist['grid']['dz'] = 20.0
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.7
namelist['time_stepping']['dt_initial'] = 1.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 3600.0*24.0*10.0 # 10 days
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'variable'
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.02
namelist['damping']['Rayleigh']['z_d'] = 600.0
namelist['microphysics'] = {}
namelist['microphysics']['phase_partitioning'] = 'liquid_only'
namelist['microphysics']['cloud_sedimentation'] = True
namelist['microphysics']['ccn'] = 100.0e6
namelist['microphysics']['scheme'] = 'SB_Liquid'
namelist['microphysics']['SB_Liquid'] = {}
namelist['microphysics']['SB_Liquid']['nu_droplet'] = 0
namelist['microphysics']['SB_Liquid']['mu_rain'] = 1
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'Smagorinsky'
namelist['sgs']['Smagorinsky'] ={}
namelist['sgs']['Smagorinsky']['iles'] = False
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['scalar_transport']['order_sedimentation'] = 1
namelist['radiation'] = {}
namelist['radiation']['RRTM'] = {}
namelist['radiation']['RRTM']['frequency'] = 90.0
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['Flux']
namelist['stats_io']['frequency'] = 5 * 60.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 86400.0
namelist['fields_io']['diagnostic_fields'] = ['ql','temperature','buoyancy']
namelist['meta'] = {}
namelist['meta']['CGILS'] = {}
namelist['meta']['casename'] = 'CGILS'
namelist['meta']['CGILS']['location'] = 11
namelist['meta']['CGILS']['P2'] = is_p2
namelist['meta']['CGILS']['CTL_omega'] = is_ctl_omega
simname = 'CGILS_S' + str(namelist['meta']['CGILS']['location'] )
if namelist['meta']['CGILS']['P2']:
if namelist['meta']['CGILS']['CTL_omega']:
simname += '_P2'
else:
simname += '_P2S'
else:
simname += '_CTL'
namelist['meta']['simname'] = simname
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['restart']['delete_old'] = True
namelist['restart']['times_retained'] = range(86400, 86400*11, 86400)
namelist['conditional_stats'] ={}
namelist['conditional_stats']['classes'] = ['Spectra']
namelist['conditional_stats']['frequency'] = 43200.0
namelist['conditional_stats']['stats_dir'] = 'cond_stats'
return namelist
def CGILS_S12(is_p2,is_ctl_omega):
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 96
namelist['grid']['ny'] = 96
namelist['grid']['nz'] = 200
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 25.0
namelist['grid']['dy'] = 25.0
namelist['grid']['dz'] = 10.0
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.7
namelist['time_stepping']['dt_initial'] = 1.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 3600.0*24.0*10.0 # 10 days
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'variable'
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.02
namelist['damping']['Rayleigh']['z_d'] = 500.0
namelist['microphysics'] = {}
namelist['microphysics']['phase_partitioning'] = 'liquid_only'
namelist['microphysics']['cloud_sedimentation'] = True
namelist['microphysics']['ccn'] = 100.0e6
namelist['microphysics']['scheme'] = 'SB_Liquid'
namelist['microphysics']['SB_Liquid'] = {}
namelist['microphysics']['SB_Liquid']['nu_droplet'] = 0
namelist['microphysics']['SB_Liquid']['mu_rain'] = 1
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'Smagorinsky'
namelist['sgs']['Smagorinsky'] ={}
namelist['sgs']['Smagorinsky']['iles'] = False
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['scalar_transport']['order_sedimentation'] = 1
namelist['radiation'] = {}
namelist['radiation']['RRTM'] = {}
namelist['radiation']['RRTM']['frequency'] = 90.0
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['Flux']
namelist['stats_io']['frequency'] = 5 * 60.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 86400.0
namelist['fields_io']['diagnostic_fields'] = ['ql','temperature','buoyancy']
namelist['meta'] = {}
namelist['meta']['CGILS'] = {}
namelist['meta']['casename'] = 'CGILS'
namelist['meta']['CGILS']['location'] = 12
namelist['meta']['CGILS']['P2'] = is_p2
namelist['meta']['CGILS']['CTL_omega'] = is_ctl_omega
simname = 'CGILS_S' + str(namelist['meta']['CGILS']['location'] )
if namelist['meta']['CGILS']['P2']:
if namelist['meta']['CGILS']['CTL_omega']:
simname += '_P2'
else:
simname += '_P2S'
else:
simname += '_CTL'
namelist['meta']['simname'] = simname
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['restart']['delete_old'] = True
namelist['restart']['times_retained'] = range(86400, 86400*11, 86400)
namelist['conditional_stats'] ={}
namelist['conditional_stats']['classes'] = ['Spectra']
namelist['conditional_stats']['frequency'] = 43200.0
namelist['conditional_stats']['stats_dir'] = 'cond_stats'
return namelist
def ZGILS(zgils_loc):
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
namelist['grid']['nx'] = 86
namelist['grid']['ny'] = 86
namelist['grid']['nz'] = 216
namelist['grid']['gw'] = 3
namelist['grid']['dx'] = 75.0
namelist['grid']['dy'] = 75.0
namelist['grid']['dz'] = 20.0
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3
namelist['time_stepping']['cfl_limit'] = 0.7
namelist['time_stepping']['dt_initial'] = 1.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 3600.0*24.0*20.0 # 20 days
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'variable'
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh'
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.2
namelist['damping']['Rayleigh']['z_d'] = 500.0
namelist['microphysics'] = {}
namelist['microphysics']['phase_partitioning'] = 'liquid_only'
namelist['microphysics']['cloud_sedimentation'] = True
namelist['microphysics']['ccn'] = 100.0e6
namelist['microphysics']['scheme'] = 'SB_Liquid'
namelist['microphysics']['SB_Liquid'] = {}
namelist['microphysics']['SB_Liquid']['nu_droplet'] = 0
namelist['microphysics']['SB_Liquid']['mu_rain'] = 1
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'Smagorinsky'
namelist['sgs']['Smagorinsky'] ={}
namelist['sgs']['Smagorinsky']['iles'] = False
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 5
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 5
namelist['scalar_transport']['order_sedimentation'] = 1
namelist['surface_budget'] = {}
if zgils_loc == 12:
namelist['surface_budget']['ocean_heat_flux'] = 70.0
elif zgils_loc == 11:
namelist['surface_budget']['ocean_heat_flux'] = 90.0
elif zgils_loc == 6:
namelist['surface_budget']['ocean_heat_flux'] = 60.0
# To run a fixed_sst case set fixed_sst_time > t_max of simulation
namelist['surface_budget']['fixed_sst_time'] = 24.0 * 3600.0 * 30.0 # 3 days spinup
namelist['radiation'] = {}
namelist['radiation']['RRTM'] = {}
namelist['radiation']['RRTM']['frequency'] = 90.0
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['Flux']
namelist['stats_io']['frequency'] = 5 * 60.0
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 86400.0
namelist['fields_io']['diagnostic_fields'] = ['ql','temperature','buoyancy']
namelist['meta'] = {}
namelist['meta']['ZGILS'] = {}
namelist['meta']['casename'] = 'ZGILS'
namelist['meta']['ZGILS']['location'] = zgils_loc
simname = 'ZGILS_S' + str(namelist['meta']['ZGILS']['location'] )
namelist['meta']['simname'] = simname
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['restart']['delete_old'] = True
namelist['restart']['times_retained'] = range(86400, 86400*21, 86400)
namelist['conditional_stats'] ={}
namelist['conditional_stats']['classes'] = ['Spectra']
namelist['conditional_stats']['frequency'] = 43200.0
namelist['conditional_stats']['stats_dir'] = 'cond_stats'
return namelist
def DCBLSoares():
# adopted from: "An eddy-diffusivity/mass-flux parametrization for dry and shallow cumulus convection",
# By P. M. M. SOARES, P. M. A. MIRANDA, A. P. SIEBESMA and J. TEIXEIRA, Q. J. R. Meteorol. Soc. (2004)
# modifications: qt initial profile and flux set to zero, since no dry thermodynamics without condensation given
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
# Soares (2004): domain size = 6400 x 6400 m, domain height = 3000 (?) m; dx = ?, dy = ?, dz = 20 m
# Nieuwstadt: domain size = ?, domain height = 2400m; dx = dy = 60 m, dz = 50-60 m
# IOP Paper, old code: domain size = 6400 x 6400 m, domain height = 3750 m
namelist['grid']['nx'] = 256 # IOP
namelist['grid']['ny'] = 256 # IOP
namelist['grid']['nz'] = 150 # IOP
namelist['grid']['gw'] = 3 # for 2nd order
namelist['grid']['dx'] = 25.0 # IOP
namelist['grid']['dy'] = 25.0 # IOP
namelist['grid']['dz'] = 25.0 # IOP
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3 # seems to be 3 in all cases???
namelist['time_stepping']['cfl_limit'] = 0.3 # default: 0.7; IOP: 0.3
namelist['time_stepping']['dt_initial'] = 10.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 6*3600.0
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'constant' # 'constant' or 'variable', for Clausius Clapeyron calculation
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'None_Dry' # Bomex: 'None_SA'; options: 'None_Dry' (no qt as Progn. Var.), 'None_SA', 'SB_Liquid'
namelist['microphysics']['phase_partitioning'] = 'liquid_only' # seems to be this in all cases???
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'Smagorinsky'
namelist['sgs']['Smagorinsky'] = {}
namelist['sgs']['Smagorinsky']['cs'] = 0.17
namelist['sgs']['UniformViscosity'] = {}
namelist['sgs']['UniformViscosity']['viscosity'] = 1.2
namelist['sgs']['UniformViscosity']['diffusivity'] = 3.6
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False # seems to be set to False for all cases???
# 2 = second_order_m
# 32 = second_order_ml_m
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 2
# 2 = second_order_a
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 2
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh' # no more 'DampingToDomainMean' ???
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.02
namelist['damping']['Rayleigh']['z_d'] = 800.0 # ??? depth of damping layer?
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
# profile outputs
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['Flux'] # AuxiliaryStatistics
namelist['stats_io']['frequency'] = 900.0
# field outputs
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 1800.0
namelist['fields_io']['diagnostic_fields'] = ['temperature','viscosity'] # defines diagnostic variable output fields (progn. variables output in restart files?!)
# Conditional Statistics
namelist['conditional_stats'] ={}
namelist['conditional_stats']['classes'] = ['Spectra']
namelist['conditional_stats']['frequency'] = 600.0
namelist['conditional_stats']['stats_dir'] = 'cond_stats'
namelist['meta'] = {}
namelist['meta']['simname'] = 'DCBLSoares'
namelist['meta']['casename'] = 'DCBLSoares'
namelist['restart'] = {}
namelist['restart']['output'] = False
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['visualization'] = {}
namelist['visualization']['frequency'] = 1800.0
namelist['stochastic_noise'] = {}
namelist['stochastic_noise']['flag'] = False
namelist['stochastic_noise']['amplitude'] = 0.05
namelist['tracers'] = {}
namelist['tracers']['use_tracers'] = 'passive'
namelist['tracers']['kmin'] = 0
namelist['tracers']['kmax'] = 10
return namelist
def DCBLSoares_moist():
# adopted from: "An eddy-diffusivity/mass-flux parametrization for dry and shallow cumulus convection",
# By P. M. M. SOARES, P. M. A. MIRANDA, A. P. SIEBESMA and J. TEIXEIRA, Q. J. R. Meteorol. Soc. (2004)
# modifications: qt initial profile and flux set to zero, since no dry thermodynamics without condensation given
namelist = {}
namelist['grid'] = {}
namelist['grid']['dims'] = 3
# Soares (2004): domain size = 6400 x 6400 m, domain height = 3000 (?) m; dx = ?, dy = ?, dz = 20 m
# Nieuwstadt: domain size = ?, domain height = 2400m; dx = dy = 60 m, dz = 50-60 m
# IOP Paper, old code: domain size = 6400 x 6400 m, domain height = 3750 m
namelist['grid']['nx'] = 256 # IOP
namelist['grid']['ny'] = 256 # IOP
namelist['grid']['nz'] = 150 # IOP
namelist['grid']['gw'] = 3 # for 2nd order
namelist['grid']['dx'] = 25.0 # IOP
namelist['grid']['dy'] = 25.0 # IOP
namelist['grid']['dz'] = 25.0 # IOP
namelist['mpi'] = {}
namelist['mpi']['nprocx'] = 1
namelist['mpi']['nprocy'] = 1
namelist['mpi']['nprocz'] = 1
namelist['time_stepping'] = {}
namelist['time_stepping']['ts_type'] = 3 # seems to be 3 in all cases???
namelist['time_stepping']['cfl_limit'] = 0.3 # default: 0.7; IOP: 0.3
namelist['time_stepping']['dt_initial'] = 10.0
namelist['time_stepping']['dt_max'] = 10.0
namelist['time_stepping']['t_max'] = 6*3600.0
namelist['thermodynamics'] = {}
namelist['thermodynamics']['latentheat'] = 'constant' # 'constant' or 'variable', for Clausius Clapeyron calculation
namelist['microphysics'] = {}
namelist['microphysics']['scheme'] = 'None_SA' # DCBL: 'None_Dry', Bomex: 'None_SA'; options: 'None_Dry' (no qt as Progn. Var.), 'None_SA', 'SB_Liquid'
namelist['microphysics']['phase_partitioning'] = 'liquid_only' # seems to be this in all cases???
namelist['sgs'] = {}
namelist['sgs']['scheme'] = 'Smagorinsky'
namelist['sgs']['Smagorinsky'] = {}
namelist['sgs']['Smagorinsky']['cs'] = 0.17
namelist['sgs']['UniformViscosity'] = {}
namelist['sgs']['UniformViscosity']['viscosity'] = 1.2
namelist['sgs']['UniformViscosity']['diffusivity'] = 3.6
namelist['sgs']['TKE'] = {}
namelist['sgs']['TKE']['ck'] = 0.1
namelist['sgs']['TKE']['cn'] = 0.76
namelist['diffusion'] = {}
namelist['diffusion']['qt_entropy_source'] = False # seems to be set to False for all cases???
# 2 = second_order_m
# 32 = second_order_ml_m
namelist['momentum_transport'] = {}
namelist['momentum_transport']['order'] = 4
# 2 = second_order_a
namelist['scalar_transport'] = {}
namelist['scalar_transport']['order'] = 4
namelist['damping'] = {}
namelist['damping']['scheme'] = 'Rayleigh' # no more 'DampingToDomainMean' ???
namelist['damping']['Rayleigh'] = {}
namelist['damping']['Rayleigh']['gamma_r'] = 0.02
namelist['damping']['Rayleigh']['z_d'] = 800.0 # ??? depth of damping layer?
namelist['output'] = {}
namelist['output']['output_root'] = './'
namelist['restart'] = {}
namelist['restart']['output'] = True
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
# profile outputs
namelist['stats_io'] = {}
namelist['stats_io']['stats_dir'] = 'stats'
namelist['stats_io']['auxiliary'] = ['Fluxes'] # AuxiliaryStatistics
namelist['stats_io']['frequency'] = 600.0
# field outputs
namelist['fields_io'] = {}
namelist['fields_io']['fields_dir'] = 'fields'
namelist['fields_io']['frequency'] = 1800.0
namelist['fields_io']['diagnostic_fields'] = ['temperature','viscosity'] # defines diagnostic variable output fields (progn. variables output in restart files?!)
# Conditional Statistics
namelist['conditional_stats'] ={}
namelist['conditional_stats']['classes'] = ['Spectra']
namelist['conditional_stats']['frequency'] = 600.0
namelist['conditional_stats']['stats_dir'] = 'cond_stats'
namelist['meta'] = {}
namelist['meta']['simname'] = 'DCBLSoares_moist'
namelist['meta']['casename'] = 'DCBLSoares_moist'
namelist['restart'] = {}
namelist['restart']['output'] = False
namelist['restart']['init_from'] = False
namelist['restart']['input_path'] = './'
namelist['restart']['frequency'] = 600.0
namelist['visualization'] = {}
namelist['visualization']['frequency'] = 1800.0
namelist['tracers'] = {}
namelist['tracers']['use_tracers'] = 'passive'
namelist['tracers']['tracer_profile'] = 'smooth'
namelist['tracers']['kmin'] = 0
namelist['tracers']['kmax'] = 10
namelist['ClausiusClapeyron'] = {}
namelist['ClausiusClapeyron']['temperature_min'] = 100.15
namelist['ClausiusClapeyron']['temperature_max'] = 500.0
return namelist
def write_file(namelist):
try:
type(namelist['meta']['simname'])
except:
print('Casename not specified in namelist dictionary!')
print('FatalError')
exit()
namelist['meta']['uuid'] = str(uuid.uuid4())
fh = open(namelist['meta']['simname'] + '.in', 'w')
pprint.pprint(namelist)
json.dump(namelist, fh, sort_keys=True, indent=4)
fh.close()
return
if __name__ == '__main__':
main()
|
gpl-3.0
| -4,516,272,211,340,390,400 | 32.287943 | 167 | 0.590066 | false |
capitalone/cloud-custodian
|
c7n/resources/sagemaker.py
|
1
|
23243
|
# Copyright 2016-2017 Capital One Services, LLC
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from c7n.actions import BaseAction
from c7n.exceptions import PolicyValidationError
from c7n.manager import resources
from c7n.query import QueryResourceManager, TypeInfo
from c7n.utils import local_session, type_schema
from c7n.tags import RemoveTag, Tag, TagActionFilter, TagDelayedAction
from c7n.filters.vpc import SubnetFilter, SecurityGroupFilter
from c7n.filters.kms import KmsRelatedFilter
@resources.register('sagemaker-notebook')
class NotebookInstance(QueryResourceManager):
class resource_type(TypeInfo):
service = 'sagemaker'
enum_spec = ('list_notebook_instances', 'NotebookInstances', None)
detail_spec = (
'describe_notebook_instance', 'NotebookInstanceName',
'NotebookInstanceName', None)
arn = id = 'NotebookInstanceArn'
name = 'NotebookInstanceName'
date = 'CreationTime'
cfn_type = 'AWS::SageMaker::NotebookInstance'
permissions = ('sagemaker:ListTags',)
def augment(self, resources):
client = local_session(self.session_factory).client('sagemaker')
def _augment(r):
# List tags for the Notebook-Instance & set as attribute
tags = self.retry(client.list_tags,
ResourceArn=r['NotebookInstanceArn'])['Tags']
r['Tags'] = tags
return r
# Describe notebook-instance & then list tags
resources = super(NotebookInstance, self).augment(resources)
return list(map(_augment, resources))
NotebookInstance.filter_registry.register('marked-for-op', TagActionFilter)
@resources.register('sagemaker-job')
class SagemakerJob(QueryResourceManager):
class resource_type(TypeInfo):
service = 'sagemaker'
enum_spec = ('list_training_jobs', 'TrainingJobSummaries', None)
detail_spec = (
'describe_training_job', 'TrainingJobName', 'TrainingJobName', None)
arn = id = 'TrainingJobArn'
name = 'TrainingJobName'
date = 'CreationTime'
permission_augment = (
'sagemaker:DescribeTrainingJob', 'sagemaker:ListTags')
def __init__(self, ctx, data):
super(SagemakerJob, self).__init__(ctx, data)
self.queries = QueryFilter.parse(
self.data.get('query', [
{'StatusEquals': 'InProgress'}]))
def resources(self, query=None):
for q in self.queries:
if q is None:
continue
query = query or {}
for k, v in q.items():
query[k] = v
return super(SagemakerJob, self).resources(query=query)
def augment(self, jobs):
client = local_session(self.session_factory).client('sagemaker')
def _augment(j):
tags = self.retry(client.list_tags,
ResourceArn=j['TrainingJobArn'])['Tags']
j['Tags'] = tags
return j
jobs = super(SagemakerJob, self).augment(jobs)
return list(map(_augment, jobs))
@resources.register('sagemaker-transform-job')
class SagemakerTransformJob(QueryResourceManager):
class resource_type(TypeInfo):
arn_type = "transform-job"
service = 'sagemaker'
enum_spec = ('list_transform_jobs', 'TransformJobSummaries', None)
detail_spec = (
'describe_transform_job', 'TransformJobName', 'TransformJobName', None)
arn = id = 'TransformJobArn'
name = 'TransformJobName'
date = 'CreationTime'
filter_name = 'TransformJobArn'
permission_augment = ('sagemaker:DescribeTransformJob', 'sagemaker:ListTags')
def __init__(self, ctx, data):
super(SagemakerTransformJob, self).__init__(ctx, data)
self.queries = QueryFilter.parse(
self.data.get('query', [
{'StatusEquals': 'InProgress'}]))
def resources(self, query=None):
for q in self.queries:
if q is None:
continue
query = query or {}
for k, v in q.items():
query[k] = v
return super(SagemakerTransformJob, self).resources(query=query)
def augment(self, jobs):
client = local_session(self.session_factory).client('sagemaker')
def _augment(j):
tags = self.retry(client.list_tags,
ResourceArn=j['TransformJobArn'])['Tags']
j['Tags'] = tags
return j
return list(map(_augment, super(SagemakerTransformJob, self).augment(jobs)))
class QueryFilter:
JOB_FILTERS = ('StatusEquals', 'NameContains',)
@classmethod
def parse(cls, data):
results = []
names = set()
for d in data:
if not isinstance(d, dict):
raise PolicyValidationError(
"Job Query Filter Invalid structure %s" % d)
for k, v in d.items():
if isinstance(v, list):
raise ValueError(
'Job query filter invalid structure %s' % v)
query = cls(d).validate().query()
if query['Name'] in names:
# Cannot filter multiple times on the same key
continue
names.add(query['Name'])
if isinstance(query['Value'], list):
results.append({query['Name']: query['Value'][0]})
continue
results.append({query['Name']: query['Value']})
if 'StatusEquals' not in names:
# add default StatusEquals if not included
results.append({'Name': 'StatusEquals', 'Value': 'InProgress'})
return results
def __init__(self, data):
self.data = data
self.key = None
self.value = None
def validate(self):
if not len(list(self.data.keys())) == 1:
raise PolicyValidationError(
"Job Query Filter Invalid %s" % self.data)
self.key = list(self.data.keys())[0]
self.value = list(self.data.values())[0]
if self.key not in self.JOB_FILTERS and not self.key.startswith('tag:'):
raise PolicyValidationError(
"Job Query Filter invalid filter name %s" % (
self.data))
if self.value is None:
raise PolicyValidationError(
"Job Query Filters must have a value, use tag-key"
" w/ tag name as value for tag present checks"
" %s" % self.data)
return self
def query(self):
value = self.value
if isinstance(self.value, str):
value = [self.value]
return {'Name': self.key, 'Value': value}
@resources.register('sagemaker-endpoint')
class SagemakerEndpoint(QueryResourceManager):
class resource_type(TypeInfo):
service = 'sagemaker'
enum_spec = ('list_endpoints', 'Endpoints', None)
detail_spec = (
'describe_endpoint', 'EndpointName',
'EndpointName', None)
arn = id = 'EndpointArn'
name = 'EndpointName'
date = 'CreationTime'
cfn_type = 'AWS::SageMaker::Endpoint'
permissions = ('sagemaker:ListTags',)
def augment(self, endpoints):
client = local_session(self.session_factory).client('sagemaker')
def _augment(e):
tags = self.retry(client.list_tags,
ResourceArn=e['EndpointArn'])['Tags']
e['Tags'] = tags
return e
# Describe endpoints & then list tags
endpoints = super(SagemakerEndpoint, self).augment(endpoints)
return list(map(_augment, endpoints))
SagemakerEndpoint.filter_registry.register('marked-for-op', TagActionFilter)
@resources.register('sagemaker-endpoint-config')
class SagemakerEndpointConfig(QueryResourceManager):
class resource_type(TypeInfo):
service = 'sagemaker'
enum_spec = ('list_endpoint_configs', 'EndpointConfigs', None)
detail_spec = (
'describe_endpoint_config', 'EndpointConfigName',
'EndpointConfigName', None)
arn = id = 'EndpointConfigArn'
name = 'EndpointConfigName'
date = 'CreationTime'
cfn_type = 'AWS::SageMaker::EndpointConfig'
permissions = ('sagemaker:ListTags',)
def augment(self, endpoints):
client = local_session(self.session_factory).client('sagemaker')
def _augment(e):
tags = self.retry(client.list_tags,
ResourceArn=e['EndpointConfigArn'])['Tags']
e['Tags'] = tags
return e
endpoints = super(SagemakerEndpointConfig, self).augment(endpoints)
return list(map(_augment, endpoints))
SagemakerEndpointConfig.filter_registry.register('marked-for-op', TagActionFilter)
@resources.register('sagemaker-model')
class Model(QueryResourceManager):
class resource_type(TypeInfo):
service = 'sagemaker'
enum_spec = ('list_models', 'Models', None)
detail_spec = (
'describe_model', 'ModelName',
'ModelName', None)
arn = id = 'ModelArn'
name = 'ModelName'
date = 'CreationTime'
cfn_type = 'AWS::SageMaker::Model'
permissions = ('sagemaker:ListTags',)
def augment(self, resources):
client = local_session(self.session_factory).client('sagemaker')
def _augment(r):
tags = self.retry(client.list_tags,
ResourceArn=r['ModelArn'])['Tags']
r.setdefault('Tags', []).extend(tags)
return r
return list(map(_augment, resources))
Model.filter_registry.register('marked-for-op', TagActionFilter)
@SagemakerEndpoint.action_registry.register('tag')
@SagemakerEndpointConfig.action_registry.register('tag')
@NotebookInstance.action_registry.register('tag')
@SagemakerJob.action_registry.register('tag')
@SagemakerTransformJob.action_registry.register('tag')
@Model.action_registry.register('tag')
class TagNotebookInstance(Tag):
"""Action to create tag(s) on a SageMaker resource
(notebook-instance, endpoint, endpoint-config)
:example:
.. code-block:: yaml
policies:
- name: tag-sagemaker-notebook
resource: sagemaker-notebook
filters:
- "tag:target-tag": absent
actions:
- type: tag
key: target-tag
value: target-value
- name: tag-sagemaker-endpoint
resource: sagemaker-endpoint
filters:
- "tag:required-tag": absent
actions:
- type: tag
key: required-tag
value: required-value
- name: tag-sagemaker-endpoint-config
resource: sagemaker-endpoint-config
filters:
- "tag:required-tag": absent
actions:
- type: tag
key: required-tag
value: required-value
- name: tag-sagemaker-job
resource: sagemaker-job
filters:
- "tag:required-tag": absent
actions:
- type: tag
key: required-tag
value: required-value
"""
permissions = ('sagemaker:AddTags',)
def process_resource_set(self, client, resources, tags):
mid = self.manager.resource_type.id
for r in resources:
client.add_tags(ResourceArn=r[mid], Tags=tags)
@SagemakerEndpoint.action_registry.register('remove-tag')
@SagemakerEndpointConfig.action_registry.register('remove-tag')
@NotebookInstance.action_registry.register('remove-tag')
@SagemakerJob.action_registry.register('remove-tag')
@SagemakerTransformJob.action_registry.register('remove-tag')
@Model.action_registry.register('remove-tag')
class RemoveTagNotebookInstance(RemoveTag):
"""Remove tag(s) from SageMaker resources
(notebook-instance, endpoint, endpoint-config)
:example:
.. code-block:: yaml
policies:
- name: sagemaker-notebook-remove-tag
resource: sagemaker-notebook
filters:
- "tag:BadTag": present
actions:
- type: remove-tag
tags: ["BadTag"]
- name: sagemaker-endpoint-remove-tag
resource: sagemaker-endpoint
filters:
- "tag:expired-tag": present
actions:
- type: remove-tag
tags: ["expired-tag"]
- name: sagemaker-endpoint-config-remove-tag
resource: sagemaker-endpoint-config
filters:
- "tag:expired-tag": present
actions:
- type: remove-tag
tags: ["expired-tag"]
- name: sagemaker-job-remove-tag
resource: sagemaker-job
filters:
- "tag:expired-tag": present
actions:
- type: remove-tag
tags: ["expired-tag"]
"""
permissions = ('sagemaker:DeleteTags',)
def process_resource_set(self, client, resources, keys):
for r in resources:
client.delete_tags(ResourceArn=r[self.id_key], TagKeys=keys)
@SagemakerEndpoint.action_registry.register('mark-for-op')
@SagemakerEndpointConfig.action_registry.register('mark-for-op')
@NotebookInstance.action_registry.register('mark-for-op')
@Model.action_registry.register('mark-for-op')
class MarkNotebookInstanceForOp(TagDelayedAction):
"""Mark SageMaker resources for deferred action
(notebook-instance, endpoint, endpoint-config)
:example:
.. code-block:: yaml
policies:
- name: sagemaker-notebook-invalid-tag-stop
resource: sagemaker-notebook
filters:
- "tag:InvalidTag": present
actions:
- type: mark-for-op
op: stop
days: 1
- name: sagemaker-endpoint-failure-delete
resource: sagemaker-endpoint
filters:
- 'EndpointStatus': 'Failed'
actions:
- type: mark-for-op
op: delete
days: 1
- name: sagemaker-endpoint-config-invalid-size-delete
resource: sagemaker-notebook
filters:
- type: value
- key: ProductionVariants[].InstanceType
- value: 'ml.m4.10xlarge'
- op: contains
actions:
- type: mark-for-op
op: delete
days: 1
"""
@NotebookInstance.action_registry.register('start')
class StartNotebookInstance(BaseAction):
"""Start sagemaker-notebook(s)
:example:
.. code-block:: yaml
policies:
- name: start-sagemaker-notebook
resource: sagemaker-notebook
actions:
- start
"""
schema = type_schema('start')
permissions = ('sagemaker:StartNotebookInstance',)
valid_origin_states = ('Stopped',)
def process(self, resources):
resources = self.filter_resources(resources, 'NotebookInstanceStatus',
self.valid_origin_states)
if not len(resources):
return
client = local_session(self.manager.session_factory).client('sagemaker')
for n in resources:
try:
client.start_notebook_instance(
NotebookInstanceName=n['NotebookInstanceName'])
except client.exceptions.ResourceNotFound:
pass
@NotebookInstance.action_registry.register('stop')
class StopNotebookInstance(BaseAction):
"""Stop sagemaker-notebook(s)
:example:
.. code-block:: yaml
policies:
- name: stop-sagemaker-notebook
resource: sagemaker-notebook
filters:
- "tag:DeleteMe": present
actions:
- stop
"""
schema = type_schema('stop')
permissions = ('sagemaker:StopNotebookInstance',)
valid_origin_states = ('InService',)
def process(self, resources):
resources = self.filter_resources(resources, 'NotebookInstanceStatus',
self.valid_origin_states)
if not len(resources):
return
client = local_session(self.manager.session_factory).client('sagemaker')
for n in resources:
try:
client.stop_notebook_instance(
NotebookInstanceName=n['NotebookInstanceName'])
except client.exceptions.ResourceNotFound:
pass
@NotebookInstance.action_registry.register('delete')
class DeleteNotebookInstance(BaseAction):
"""Deletes sagemaker-notebook(s)
:example:
.. code-block:: yaml
policies:
- name: delete-sagemaker-notebook
resource: sagemaker-notebook
filters:
- "tag:DeleteMe": present
actions:
- delete
"""
schema = type_schema('delete')
permissions = ('sagemaker:DeleteNotebookInstance',)
valid_origin_states = ('Stopped', 'Failed',)
def process(self, resources):
resources = self.filter_resources(resources, 'NotebookInstanceStatus',
self.valid_origin_states)
if not len(resources):
return
client = local_session(self.manager.session_factory).client('sagemaker')
for n in resources:
try:
client.delete_notebook_instance(
NotebookInstanceName=n['NotebookInstanceName'])
except client.exceptions.ResourceNotFound:
pass
@NotebookInstance.filter_registry.register('security-group')
class NotebookSecurityGroupFilter(SecurityGroupFilter):
RelatedIdsExpression = "SecurityGroups[]"
@NotebookInstance.filter_registry.register('subnet')
class NotebookSubnetFilter(SubnetFilter):
RelatedIdsExpression = "SubnetId"
@NotebookInstance.filter_registry.register('kms-key')
@SagemakerEndpointConfig.filter_registry.register('kms-key')
class NotebookKmsFilter(KmsRelatedFilter):
"""
Filter a resource by its associcated kms key and optionally the aliasname
of the kms key by using 'c7n:AliasName'
:example:
.. code-block:: yaml
policies:
- name: sagemaker-kms-key-filters
resource: aws.sagemaker-notebook
filters:
- type: kms-key
key: c7n:AliasName
value: "^(alias/aws/sagemaker)"
op: regex
- name: sagemaker-endpoint-kms-key-filters
resource: aws.sagemaker-endpoint-config
filters:
- type: kms-key
key: c7n:AliasName
value: "alias/aws/sagemaker"
"""
RelatedIdsExpression = "KmsKeyId"
@Model.action_registry.register('delete')
class DeleteModel(BaseAction):
"""Deletes sagemaker-model(s)
:example:
.. code-block:: yaml
policies:
- name: delete-sagemaker-model
resource: sagemaker-model
filters:
- "tag:DeleteMe": present
actions:
- delete
"""
schema = type_schema('delete')
permissions = ('sagemaker:DeleteModel',)
def process(self, resources):
client = local_session(self.manager.session_factory).client('sagemaker')
for m in resources:
try:
client.delete_model(ModelName=m['ModelName'])
except client.exceptions.ResourceNotFound:
pass
@SagemakerJob.action_registry.register('stop')
class SagemakerJobStop(BaseAction):
"""Stops a SageMaker job
:example:
.. code-block:: yaml
policies:
- name: stop-ml-job
resource: sagemaker-job
filters:
- TrainingJobName: ml-job-10
actions:
- stop
"""
schema = type_schema('stop')
permissions = ('sagemaker:StopTrainingJob',)
def process(self, jobs):
client = local_session(self.manager.session_factory).client('sagemaker')
for j in jobs:
try:
client.stop_training_job(TrainingJobName=j['TrainingJobName'])
except client.exceptions.ResourceNotFound:
pass
@SagemakerEndpoint.action_registry.register('delete')
class SagemakerEndpointDelete(BaseAction):
"""Delete a SageMaker endpoint
:example:
.. code-block:: yaml
policies:
- name: delete-sagemaker-endpoint
resource: sagemaker-endpoint
filters:
- EndpointName: sagemaker-ep--2018-01-01-00-00-00
actions:
- type: delete
"""
permissions = (
'sagemaker:DeleteEndpoint',
'sagemaker:DeleteEndpointConfig')
schema = type_schema('delete')
def process(self, endpoints):
client = local_session(self.manager.session_factory).client('sagemaker')
for e in endpoints:
try:
client.delete_endpoint(EndpointName=e['EndpointName'])
except client.exceptions.ResourceNotFound:
pass
@SagemakerEndpointConfig.action_registry.register('delete')
class SagemakerEndpointConfigDelete(BaseAction):
"""Delete a SageMaker endpoint
:example:
.. code-block:: yaml
policies:
- name: delete-sagemaker-endpoint-config
resource: sagemaker-endpoint-config
filters:
- EndpointConfigName: sagemaker-2018-01-01-00-00-00-T00
actions:
- delete
"""
schema = type_schema('delete')
permissions = ('sagemaker:DeleteEndpointConfig',)
def process(self, endpoints):
client = local_session(self.manager.session_factory).client('sagemaker')
for e in endpoints:
try:
client.delete_endpoint_config(
EndpointConfigName=e['EndpointConfigName'])
except client.exceptions.ResourceNotFound:
pass
@SagemakerTransformJob.action_registry.register('stop')
class SagemakerTransformJobStop(BaseAction):
"""Stops a SageMaker Transform job
:example:
.. code-block:: yaml
policies:
- name: stop-tranform-job
resource: sagemaker-transform-job
filters:
- TransformJobName: ml-job-10
actions:
- stop
"""
schema = type_schema('stop')
permissions = ('sagemaker:StopTransformJob',)
def process(self, jobs):
client = local_session(self.manager.session_factory).client('sagemaker')
for j in jobs:
try:
client.stop_transform_job(TransformJobName=j['TransformJobName'])
except client.exceptions.ResourceNotFound:
pass
|
apache-2.0
| 4,352,266,930,033,509,000 | 30.451962 | 85 | 0.584907 | false |
jvs/stride
|
stride/grammar.py
|
1
|
12650
|
from sourcer import *
__all__ = [
'Collection',
'CommandHandler',
'Contract',
'Data',
'Definition',
'Dollar',
'For',
'Func',
'If',
'Import',
'Introduction',
'KeyValuePair',
'ModifiedStatement',
'Name',
'NamedElement',
'Namespace',
'Operation',
'Parameter',
'Quantification',
'Record',
'ReturnStmt',
'Test',
'Tokens',
'TypeDeclaration',
'Update',
'Var',
'While',
'parse_program',
'reserved',
]
def _memoize_indent(f):
table = {}
def wrapper(indent=''):
if indent not in table:
table[indent] = f(indent)
return table[indent]
return wrapper
class TokenDefs(TokenSyntax):
def __init__(self):
self.Word = r'[_a-zA-Z][_a-zA-Z0-9]*'
self.Newline = r'[\n\r]+'
self.Indent = r'(?<=\n) +(?=[^ \n\r#])'
self.Space = Skip(r'[ \t]+')
self.LongSymbol = AnyString(
'+=', '-=', '*=', '//=', '/=',
'==', '!=', '<=', '>=',
':=', '->', '//', '...',
)
self.Symbol = AnyChar('()[]{}.,:;=+*/-<>$@')
self.RationalNumber = r'[0-9]*\.[0-9]+'
self.WholeNumber = r'[0-9]+'
self.DoubleQuotedText = r'"([^"\n\r\\]|\\[^\n\r])*"'
self.SingleQuotedText = r"'([^'\n\r\\]|\\[^\n\r])*'"
self.Comment = Skip(r'#[^\n\r]*')
Tokens = TokenDefs()
reserved = frozenset([
'and',
'by',
'assert',
'for',
'else',
'if',
'implies',
'in',
'is',
'match',
'not',
'of',
'opt',
'or',
'requires',
'then',
'to',
'try',
'using',
'yield',
])
Name = Content(Tokens.Word) ^ (lambda x: x not in reserved)
Number = Tokens.RationalNumber | Tokens.WholeNumber
String = Tokens.DoubleQuotedText | Tokens.SingleQuotedText
DataKeyword = AnyOf(
'generator',
'interface',
'resource',
'struct',
'table',
'union',
'val',
'view',
)
FuncKeyword = AnyOf(
'command',
'func',
'operator',
)
class Data(Struct):
def parse(self, indent=''):
self.keyword = DataKeyword
self.name = Opt(Name)
self.params = Opt(Params)
self.body = Opt(InlineStruct(indent) | Body(indent))
def InlineStruct(indent):
return Wrap(':') >> (Statement(indent) // Comma)
class Func(Struct):
def __init__(self,
keyword = 'func',
is_predicate = False,
name = None,
params = None,
returns = None,
body = None):
self.keyword = keyword
self.is_predicate = is_predicate
self.name = name
self.params = params or []
self.returns = returns
self.body = body
def parse(self, indent=''):
self.keyword = FuncKeyword
self.is_predicate = Opt('is') * bool
self.name = Opt(Name)
self.params = Params
self.returns = Opt(TypeAnnotation())
self.body = Opt(Initializer(indent) | Body(indent))
class Parameter(Struct):
def __init__(self, name, type=None, default=None):
self.name = name
self.type = type
self.default = default
def parse(self):
self.name = Name
self.type = Opt(TypeAnnotation())
self.default = Opt(Initializer())
class If(Struct):
def parse(self, indent=''):
self.test = Follow('if') >> Expression(indent)
self.true_case = LeadingBlock('then', indent) | FlexBody(indent)
self.false_case = Opt(TrailingBlock('else', indent))
def LeadingBlock(keyword, indent=''):
return Seek(keyword) >> (FlexBody(indent) | Seek(Expression(indent)))
def TrailingBlock(keyword, indent=''):
return MaintainIndent(indent) >> LeadingBlock(keyword, indent)
def MaintainIndent(indent):
skip = Tokens.Newline // Opt(Tokens.Indent)
token = Content(Tokens.Indent) | Return('')
return (skip >> token) ^ (lambda token: len(indent) <= len(token))
class Test(Struct):
def parse(self, indent=''):
self.keyword = 'test'
self.description = Opt(String)
self.body = FlexBody(indent)
class Namespace(Struct):
def parse(self):
self.path = 'namespace' >> Path
class Import(Struct):
def parse(self):
self.path = 'import' >> Path
self.alias = Opt(Wrap('as') >> Name)
class CommandHandler(Struct):
def parse(self, indent=''):
self.path = 'on' >> Path
self.params = Params
self.body = Initializer(indent) | Body(indent)
Seek = lambda x: OptNewlines >> x
Follow = lambda x: x << OptNewlines
Wrap = lambda x: OptNewlines >> x << OptNewlines
WrapParens = lambda x: '(' >> Wrap(x) << ')'
WrapSquare = lambda x: '[' >> Wrap(x) << ']'
OptNewlines = List(Tokens.Newline | Tokens.Indent)
Comma = Wrap(',')
Path = Content(Tokens.Word) // Wrap('.')
NotInOperator = Follow('not') >> 'in' >> Return('not in')
WrappedName = WrapParens(
NotInOperator
| Content(Tokens.Word)
| Content(Tokens.Symbol)
| Content(Tokens.LongSymbol)
)
Cont = lambda f, x: Return(x) ** f
class Record(Struct):
def parse(self):
Element = NamedElement | Expression('') | '...'
self.elements = WrapParens(Element / Comma)
class Collection(Struct):
def parse(self):
Element = KeyValuePair | Expression('')
self.elements = WrapSquare(Element / Comma)
class For(Struct):
def parse(self, indent=''):
self.target = Follow('for') >> LeftHandSide(indent)
self.source = Wrap('in') >> Expression(indent)
self.body = LoopBody(indent)
class While(Struct):
def parse(self, indent=''):
self.test = Follow('while') >> Expression(indent)
self.body = LoopBody(indent)
class Try(Struct):
def parse(self, indent=''):
self.handler = 'try' >> Opt(Definition | Expression(indent))
self.body = FlexBody(indent)
class Match(Struct):
def parse(self, indent=''):
self.subject = 'match' >> Expression(indent)
self.cases = Body(indent)
class Case(Struct):
def parse(self, indent=''):
self.is_predicate = 'case' >> Opt('is') * bool
self.test = Expression(indent)
self.body = FlexBody(indent)
class Dollar(Struct):
def parse(self):
self.target = '$' >> (Name | Number | String)
class Quantification(Struct):
def parse(self, indent=''):
OpenParams = (Parameter / Comma) * (lambda x: [x])
self.quantifier = Or('exists', 'forall')
self.params = Params | OpenParams
self.body = TypeAnnotation(indent) | Body(indent)
@_memoize_indent
def LoopBody(indent=''):
InlineExpr = KeyValuePair | Expression(indent)
InlineBody = Wrap(':') >> InlineExpr
return InlineBody | Body(indent)
@_memoize_indent
def FlexBody(indent=''):
InlineBody = Wrap(':') >> Expression(indent)
return InlineBody | Body(indent)
@_memoize_indent
def Expression(indent=''):
Reset = ForwardRef(lambda: Expression(''))
Recur = ForwardRef(lambda: Expression(indent))
PrefixOps = AnyOf('assert', 'claim', 'found', 'fn', 'spawn', 'yield')
BuildPrefix = lambda x: Operation(None, x[0], x[1])
PrefixExpression = Transform((Follow(PrefixOps), Recur), BuildPrefix)
Basic = (
WrappedName
| WrapParens(Reset)
| Record
| Collection
| Cont(If, indent)
| Cont(For, indent)
| Cont(While, indent)
| Cont(Try, indent)
| Cont(Match, indent)
| Cont(Case, indent)
| (Follow('of') >> Recur)
| Cont(Func, indent)
| Cont(Data, indent)
| PrefixExpression
| Quantification
| Name
| Number
| String
| Dollar
)
ApplicationOps = AnyOf(
('.', Content(Tokens.Word)),
(Return('[]'), WrapSquare(Reset)),
(Return(''), Basic),
)
def BuildApplications(pair):
build = lambda x, y: Operation(x, y[0], y[1])
return reduce(build, pair[1], pair[0])
Application = Transform((Basic, List(ApplicationOps)), BuildApplications)
InfixLeftW = lambda *args: InfixLeft(*(Wrap(i) for i in args))
UnaryOps = AnyOf('borrow', 'not', 'opt', 'own')
return OperatorPrecedence(
Application,
Prefix('-'),
InfixLeftW('*', '/', '//'),
InfixLeft(Wrap('+'), Follow('-')),
InfixLeftW('to'),
InfixLeftW('by'),
InfixLeftW('<', '<=', '>=', '>', 'is', 'in', NotInOperator),
InfixLeftW('==', '!='),
Prefix(Follow(UnaryOps)),
InfixLeftW('and'),
InfixLeftW('or'),
InfixRight(Wrap('implies'), Wrap('->')),
)
class NamedElement(Struct):
def parse(self):
self.name = Name | Number | String
self.value = Wrap(':') >> Expression('')
class KeyValuePair(Struct):
def parse(self):
self.key = Expression('')
self.value = Wrap(':') >> Expression('')
Params = Some(WrapParens(Parameter / Comma))
@_memoize_indent
def TypeAnnotation(indent=''):
return Wrap(':') >> Expression(indent)
@_memoize_indent
def Initializer(indent=''):
return initializer_clause(indent, '=')
@_memoize_indent
def Assignment(indent=''):
return initializer_clause(indent, ':=')
def initializer_clause(indent, operator):
return Wrap(operator) >> Expression(indent)
def flatten(list_of_lists):
return [j for i in list_of_lists for j in i]
def Block(indent=''):
Line = CurrentIndent(indent) ** InlineStatements
return (Line // Some(Tokens.Newline)) * flatten
def InlineStatements(indent):
return Statement(indent) / Some(';')
class AnnotatedStatement(Struct):
def parse(self, indent=''):
self.left = '@' >> Expression(indent)
self.right = Seek(Cont(Statement, indent))
Modifier = AnyOf('expose', 'extend', 'private')
class ModifiedStatement(Struct):
def parse(self, indent=''):
self.modifiers = Some(Modifier)
self.statement = UnmodifiedStatement(indent)
@_memoize_indent
def UnmodifiedStatement(indent=''):
return (Namespace
| Import
| Cont(ReturnStmt, indent)
| Cont(Test, indent)
| Cont(CommandHandler, indent)
| Cont(Definition, indent)
| Cont(TypeDeclaration, indent)
| Cont(Update, indent)
| Cont(Var, indent)
| Cont(Contract, indent)
| Expression(indent))
@_memoize_indent
def Statement(indent=''):
return (Cont(AnnotatedStatement, indent)
| Cont(ModifiedStatement, indent)
| UnmodifiedStatement(indent))
@_memoize_indent
def LeftHandSide(indent=''):
Intro = Cont(Introduction, indent)
Targets = Intro / Comma
return WrapParens(Targets) | Targets
class Definition(Struct):
def __init__(self, left, right):
self.left = left
self.right = right
def parse(self, indent=''):
self.left = LeftHandSide(indent)
self.right = Initializer(indent)
class Introduction(Struct):
def parse(self, indent=''):
self.name = Name
self.type = Opt(TypeAnnotation(indent))
class TypeDeclaration(Struct):
def parse(self, indent=''):
self.name = Name
self.type = TypeAnnotation(indent)
class Update(Struct):
def parse(self, indent=''):
self.left = Expression(indent)
self.operator = Wrap(AnyOf(':=', '+=', '-=', '*=', '//=', '/='))
self.right = Expression(indent)
class Var(Struct):
def parse(self, indent=''):
self.left = 'var' >> LeftHandSide(indent)
self.right = Initializer(indent) | Assignment(indent)
class Contract(Struct):
def parse(self, indent=''):
self.keyword = Follow(AnyOf('uses', 'requires', 'returns', 'yields'))
self.body = Expression(indent)
class ReturnStmt(Struct):
def __init__(self, value):
self.value = value
def parse(self, indent=''):
self.value = 'return' >> Opt(Expression(indent))
@_memoize_indent
def Body(indent=''):
return CurlyBody | (IncreaseIndent(indent) ** Block)
def CurrentIndent(indent):
default = None >> Return('')
return default if indent == '' else Term(indent)
def IncreaseIndent(current):
token = Tokens.Newline >> Expect(Content(Tokens.Indent))
return token ^ (lambda token: len(current) < len(token))
CurlyStmt = Statement() << List(Seek(';'))
CurlyBody = Seek('{') >> List(Wrap(CurlyStmt)) << Seek('}')
Heading = Some(Tokens.Newline) // Tokens.Indent
Program = Opt(Heading) >> Block() << OptNewlines
def parse_program(source):
assert isinstance(source, basestring)
return tokenize_and_parse(Tokens, Program, source)
|
mit
| -8,854,931,243,257,825,000 | 22.958333 | 77 | 0.580158 | false |
msduketown/SublimeKodi
|
libs/polib/polib.py
|
1
|
61741
|
# -* coding: utf-8 -*-
#
# License: MIT (see LICENSE file provided)
# vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4:
"""
**polib** allows you to manipulate, create, modify gettext files (pot, po and
mo files). You can load existing files, iterate through it's entries, add,
modify entries, comments or metadata, etc. or create new po files from scratch.
**polib** provides a simple and pythonic API via the :func:`~polib.pofile` and
:func:`~polib.mofile` convenience functions.
"""
__author__ = 'David Jean Louis <izimobil@gmail.com>'
__version__ = '1.0.6'
__all__ = ['pofile', 'POFile', 'POEntry', 'mofile', 'MOFile', 'MOEntry',
'default_encoding', 'escape', 'unescape', 'detect_encoding', ]
import array
import codecs
import os
import re
import struct
import sys
import textwrap
try:
import io
except ImportError:
# replacement of io.open() for python < 2.6
# we use codecs instead
class io(object):
@staticmethod
def open(fpath, mode='r', encoding=None):
return codecs.open(fpath, mode, encoding)
# the default encoding to use when encoding cannot be detected
default_encoding = 'utf-8'
# python 2/3 compatibility helpers {{{
if sys.version_info[:2] < (3, 0):
PY3 = False
text_type = unicode
def b(s):
return s
def u(s):
return unicode(s, "unicode_escape")
else:
PY3 = True
text_type = str
def b(s):
return s.encode("latin-1")
def u(s):
return s
# }}}
# _pofile_or_mofile {{{
def _pofile_or_mofile(f, type, **kwargs):
"""
Internal function used by :func:`polib.pofile` and :func:`polib.mofile` to
honor the DRY concept.
"""
# get the file encoding
enc = kwargs.get('encoding')
if enc is None:
enc = detect_encoding(f, type == 'mofile')
# parse the file
kls = type == 'pofile' and _POFileParser or _MOFileParser
parser = kls(
f,
encoding=enc,
check_for_duplicates=kwargs.get('check_for_duplicates', False),
klass=kwargs.get('klass')
)
instance = parser.parse()
instance.wrapwidth = kwargs.get('wrapwidth', 78)
return instance
# }}}
# _is_file {{{
def _is_file(filename_or_contents):
"""
Safely returns the value of os.path.exists(filename_or_contents).
Arguments:
``filename_or_contents``
either a filename, or a string holding the contents of some file.
In the latter case, this function will always return False.
"""
try:
return os.path.exists(filename_or_contents)
except (ValueError, UnicodeEncodeError):
return False
# }}}
# function pofile() {{{
def pofile(pofile, **kwargs):
"""
Convenience function that parses the po or pot file ``pofile`` and returns
a :class:`~polib.POFile` instance.
Arguments:
``pofile``
string, full or relative path to the po/pot file or its content (data).
``wrapwidth``
integer, the wrap width, only useful when the ``-w`` option was passed
to xgettext (optional, default: ``78``).
``encoding``
string, the encoding to use (e.g. "utf-8") (default: ``None``, the
encoding will be auto-detected).
``check_for_duplicates``
whether to check for duplicate entries when adding entries to the
file (optional, default: ``False``).
``klass``
class which is used to instantiate the return value (optional,
default: ``None``, the return value with be a :class:`~polib.POFile`
instance).
"""
return _pofile_or_mofile(pofile, 'pofile', **kwargs)
# }}}
# function mofile() {{{
def mofile(mofile, **kwargs):
"""
Convenience function that parses the mo file ``mofile`` and returns a
:class:`~polib.MOFile` instance.
Arguments:
``mofile``
string, full or relative path to the mo file or its content (data).
``wrapwidth``
integer, the wrap width, only useful when the ``-w`` option was passed
to xgettext to generate the po file that was used to format the mo file
(optional, default: ``78``).
``encoding``
string, the encoding to use (e.g. "utf-8") (default: ``None``, the
encoding will be auto-detected).
``check_for_duplicates``
whether to check for duplicate entries when adding entries to the
file (optional, default: ``False``).
``klass``
class which is used to instantiate the return value (optional,
default: ``None``, the return value with be a :class:`~polib.POFile`
instance).
"""
return _pofile_or_mofile(mofile, 'mofile', **kwargs)
# }}}
# function detect_encoding() {{{
def detect_encoding(file, binary_mode=False):
"""
Try to detect the encoding used by the ``file``. The ``file`` argument can
be a PO or MO file path or a string containing the contents of the file.
If the encoding cannot be detected, the function will return the value of
``default_encoding``.
Arguments:
``file``
string, full or relative path to the po/mo file or its content.
``binary_mode``
boolean, set this to True if ``file`` is a mo file.
"""
PATTERN = r'"?Content-Type:.+? charset=([\w_\-:\.]+)'
rxt = re.compile(u(PATTERN))
rxb = re.compile(b(PATTERN))
def charset_exists(charset):
"""Check whether ``charset`` is valid or not."""
try:
codecs.lookup(charset)
except LookupError:
return False
return True
if not _is_file(file):
match = rxt.search(file)
if match:
enc = match.group(1).strip()
if charset_exists(enc):
return enc
else:
# For PY3, always treat as binary
if binary_mode or PY3:
mode = 'rb'
rx = rxb
else:
mode = 'r'
rx = rxt
f = open(file, mode)
for l in f.readlines():
match = rx.search(l)
if match:
f.close()
enc = match.group(1).strip()
if not isinstance(enc, text_type):
enc = enc.decode('utf-8')
if charset_exists(enc):
return enc
f.close()
return default_encoding
# }}}
# function escape() {{{
def escape(st):
"""
Escapes the characters ``\\\\``, ``\\t``, ``\\n``, ``\\r`` and ``"`` in
the given string ``st`` and returns it.
"""
return st.replace('\\', r'\\')\
.replace('\t', r'\t')\
.replace('\r', r'\r')\
.replace('\n', r'\n')\
.replace('\"', r'\"')
# }}}
# function unescape() {{{
def unescape(st):
"""
Unescapes the characters ``\\\\``, ``\\t``, ``\\n``, ``\\r`` and ``"`` in
the given string ``st`` and returns it.
"""
def unescape_repl(m):
m = m.group(1)
if m == 'n':
return '\n'
if m == 't':
return '\t'
if m == 'r':
return '\r'
if m == '\\':
return '\\'
return m # handles escaped double quote
return re.sub(r'\\(\\|n|t|r|")', unescape_repl, st)
# }}}
# class _BaseFile {{{
class _BaseFile(list):
"""
Common base class for the :class:`~polib.POFile` and :class:`~polib.MOFile`
classes. This class should **not** be instanciated directly.
"""
def __init__(self, *args, **kwargs):
"""
Constructor, accepts the following keyword arguments:
``pofile``
string, the path to the po or mo file, or its content as a string.
``wrapwidth``
integer, the wrap width, only useful when the ``-w`` option was
passed to xgettext (optional, default: ``78``).
``encoding``
string, the encoding to use, defaults to ``default_encoding``
global variable (optional).
``check_for_duplicates``
whether to check for duplicate entries when adding entries to the
file, (optional, default: ``False``).
"""
list.__init__(self)
# the opened file handle
pofile = kwargs.get('pofile', None)
if pofile and _is_file(pofile):
self.fpath = pofile
else:
self.fpath = kwargs.get('fpath')
# the width at which lines should be wrapped
self.wrapwidth = kwargs.get('wrapwidth', 78)
# the file encoding
self.encoding = kwargs.get('encoding', default_encoding)
# whether to check for duplicate entries or not
self.check_for_duplicates = kwargs.get('check_for_duplicates', False)
# header
self.header = ''
# both po and mo files have metadata
self.metadata = {}
self.metadata_is_fuzzy = 0
def __unicode__(self):
"""
Returns the unicode representation of the file.
"""
ret = []
entries = [self.metadata_as_entry()] + \
[e for e in self if not e.obsolete]
for entry in entries:
ret.append(entry.__unicode__(self.wrapwidth))
for entry in self.obsolete_entries():
ret.append(entry.__unicode__(self.wrapwidth))
ret = u('\n').join(ret)
assert isinstance(ret, text_type)
#if type(ret) != text_type:
# return unicode(ret, self.encoding)
return ret
if PY3:
def __str__(self):
return self.__unicode__()
else:
def __str__(self):
"""
Returns the string representation of the file.
"""
return unicode(self).encode(self.encoding)
def __contains__(self, entry):
"""
Overriden ``list`` method to implement the membership test (in and
not in).
The method considers that an entry is in the file if it finds an entry
that has the same msgid (the test is **case sensitive**) and the same
msgctxt (or none for both entries).
Argument:
``entry``
an instance of :class:`~polib._BaseEntry`.
"""
return self.find(entry.msgid, by='msgid', msgctxt=entry.msgctxt) \
is not None
def __eq__(self, other):
return str(self) == str(other)
def append(self, entry):
"""
Overriden method to check for duplicates entries, if a user tries to
add an entry that is already in the file, the method will raise a
``ValueError`` exception.
Argument:
``entry``
an instance of :class:`~polib._BaseEntry`.
"""
if self.check_for_duplicates and entry in self:
raise ValueError('Entry "%s" already exists' % entry.msgid)
super(_BaseFile, self).append(entry)
def insert(self, index, entry):
"""
Overriden method to check for duplicates entries, if a user tries to
add an entry that is already in the file, the method will raise a
``ValueError`` exception.
Arguments:
``index``
index at which the entry should be inserted.
``entry``
an instance of :class:`~polib._BaseEntry`.
"""
if self.check_for_duplicates and entry in self:
raise ValueError('Entry "%s" already exists' % entry.msgid)
super(_BaseFile, self).insert(index, entry)
def metadata_as_entry(self):
"""
Returns the file metadata as a :class:`~polib.POFile` instance.
"""
e = POEntry(msgid='')
mdata = self.ordered_metadata()
if mdata:
strs = []
for name, value in mdata:
# Strip whitespace off each line in a multi-line entry
strs.append('%s: %s' % (name, value))
e.msgstr = '\n'.join(strs) + '\n'
if self.metadata_is_fuzzy:
e.flags.append('fuzzy')
return e
def save(self, fpath=None, repr_method='__unicode__'):
"""
Saves the po file to ``fpath``.
If it is an existing file and no ``fpath`` is provided, then the
existing file is rewritten with the modified data.
Keyword arguments:
``fpath``
string, full or relative path to the file.
``repr_method``
string, the method to use for output.
"""
if self.fpath is None and fpath is None:
raise IOError('You must provide a file path to save() method')
contents = getattr(self, repr_method)()
if fpath is None:
fpath = self.fpath
if repr_method == 'to_binary':
fhandle = open(fpath, 'wb')
else:
fhandle = io.open(fpath, 'w', encoding=self.encoding)
if not isinstance(contents, text_type):
contents = contents.decode(self.encoding)
fhandle.write(contents)
fhandle.close()
# set the file path if not set
if self.fpath is None and fpath:
self.fpath = fpath
def find(self, st, by='msgid', include_obsolete_entries=False,
msgctxt=False):
"""
Find the entry which msgid (or property identified by the ``by``
argument) matches the string ``st``.
Keyword arguments:
``st``
string, the string to search for.
``by``
string, the property to use for comparison (default: ``msgid``).
``include_obsolete_entries``
boolean, whether to also search in entries that are obsolete.
``msgctxt``
string, allows to specify a specific message context for the
search.
"""
if include_obsolete_entries:
entries = self[:]
else:
entries = [e for e in self if not e.obsolete]
for e in entries:
if getattr(e, by) == st:
if msgctxt is not False and e.msgctxt != msgctxt:
continue
return e
return None
def ordered_metadata(self):
"""
Convenience method that returns an ordered version of the metadata
dictionary. The return value is list of tuples (metadata name,
metadata_value).
"""
# copy the dict first
metadata = self.metadata.copy()
data_order = [
'Project-Id-Version',
'Report-Msgid-Bugs-To',
'POT-Creation-Date',
'PO-Revision-Date',
'Last-Translator',
'Language-Team',
'Language',
'MIME-Version',
'Content-Type',
'Content-Transfer-Encoding',
'Plural-Forms'
]
ordered_data = []
for data in data_order:
try:
value = metadata.pop(data)
ordered_data.append((data, value))
except KeyError:
pass
# the rest of the metadata will be alphabetically ordered since there
# are no specs for this AFAIK
for data in sorted(metadata.keys()):
value = metadata[data]
ordered_data.append((data, value))
return ordered_data
def to_binary(self):
"""
Return the binary representation of the file.
"""
offsets = []
entries = self.translated_entries()
# the keys are sorted in the .mo file
def cmp(_self, other):
# msgfmt compares entries with msgctxt if it exists
self_msgid = _self.msgctxt and _self.msgctxt or _self.msgid
other_msgid = other.msgctxt and other.msgctxt or other.msgid
if self_msgid > other_msgid:
return 1
elif self_msgid < other_msgid:
return -1
else:
return 0
# add metadata entry
entries.sort(key=lambda o: o.msgctxt or o.msgid)
mentry = self.metadata_as_entry()
#mentry.msgstr = mentry.msgstr.replace('\\n', '').lstrip()
entries = [mentry] + entries
entries_len = len(entries)
ids, strs = b(''), b('')
for e in entries:
# For each string, we need size and file offset. Each string is
# NUL terminated; the NUL does not count into the size.
msgid = b('')
if e.msgctxt:
# Contexts are stored by storing the concatenation of the
# context, a <EOT> byte, and the original string
msgid = self._encode(e.msgctxt + '\4')
if e.msgid_plural:
msgstr = []
for index in sorted(e.msgstr_plural.keys()):
msgstr.append(e.msgstr_plural[index])
msgid += self._encode(e.msgid + '\0' + e.msgid_plural)
msgstr = self._encode('\0'.join(msgstr))
else:
msgid += self._encode(e.msgid)
msgstr = self._encode(e.msgstr)
offsets.append((len(ids), len(msgid), len(strs), len(msgstr)))
ids += msgid + b('\0')
strs += msgstr + b('\0')
# The header is 7 32-bit unsigned integers.
keystart = 7 * 4 + 16 * entries_len
# and the values start after the keys
valuestart = keystart + len(ids)
koffsets = []
voffsets = []
# The string table first has the list of keys, then the list of values.
# Each entry has first the size of the string, then the file offset.
for o1, l1, o2, l2 in offsets:
koffsets += [l1, o1 + keystart]
voffsets += [l2, o2 + valuestart]
offsets = koffsets + voffsets
output = struct.pack(
"Iiiiiii",
# Magic number
MOFile.MAGIC,
# Version
0,
# number of entries
entries_len,
# start of key index
7 * 4,
# start of value index
7 * 4 + entries_len * 8,
# size and offset of hash table, we don't use hash tables
0, keystart
)
if PY3 and sys.version_info.minor > 1: # python 3.2 or superior
output += array.array("i", offsets).tobytes()
else:
output += array.array("i", offsets).tostring()
output += ids
output += strs
return output
def _encode(self, mixed):
"""
Encodes the given ``mixed`` argument with the file encoding if and
only if it's an unicode string and returns the encoded string.
"""
if isinstance(mixed, text_type):
mixed = mixed.encode(self.encoding)
return mixed
# }}}
# class POFile {{{
class POFile(_BaseFile):
"""
Po (or Pot) file reader/writer.
This class inherits the :class:`~polib._BaseFile` class and, by extension,
the python ``list`` type.
"""
def __unicode__(self):
"""
Returns the unicode representation of the po file.
"""
ret, headers = '', self.header.split('\n')
for header in headers:
if header[:1] in [',', ':']:
ret += '#%s\n' % header
else:
ret += '# %s\n' % header
if not isinstance(ret, text_type):
ret = ret.decode(self.encoding)
return ret + _BaseFile.__unicode__(self)
def save_as_mofile(self, fpath):
"""
Saves the binary representation of the file to given ``fpath``.
Keyword argument:
``fpath``
string, full or relative path to the mo file.
"""
_BaseFile.save(self, fpath, 'to_binary')
def percent_translated(self):
"""
Convenience method that returns the percentage of translated
messages.
"""
total = len([e for e in self if not e.obsolete])
if total == 0:
return 100
translated = len(self.translated_entries())
return int(translated * 100 / float(total))
def translated_entries(self):
"""
Convenience method that returns the list of translated entries.
"""
return [e for e in self if e.translated()]
def untranslated_entries(self):
"""
Convenience method that returns the list of untranslated entries.
"""
return [e for e in self if not e.translated() and not e.obsolete
and not 'fuzzy' in e.flags]
def fuzzy_entries(self):
"""
Convenience method that returns the list of fuzzy entries.
"""
return [e for e in self if 'fuzzy' in e.flags]
def obsolete_entries(self):
"""
Convenience method that returns the list of obsolete entries.
"""
return [e for e in self if e.obsolete]
def merge(self, refpot):
"""
Convenience method that merges the current pofile with the pot file
provided. It behaves exactly as the gettext msgmerge utility:
* comments of this file will be preserved, but extracted comments and
occurrences will be discarded;
* any translations or comments in the file will be discarded, however,
dot comments and file positions will be preserved;
* the fuzzy flags are preserved.
Keyword argument:
``refpot``
object POFile, the reference catalog.
"""
# Store entries in dict/set for faster access
self_entries = dict((entry.msgid, entry) for entry in self)
refpot_msgids = set(entry.msgid for entry in refpot)
# Merge entries that are in the refpot
for entry in refpot:
e = self_entries.get(entry.msgid)
if e is None:
e = POEntry()
self.append(e)
e.merge(entry)
# ok, now we must "obsolete" entries that are not in the refpot anymore
for entry in self:
if entry.msgid not in refpot_msgids:
entry.obsolete = True
# }}}
# class MOFile {{{
class MOFile(_BaseFile):
"""
Mo file reader/writer.
This class inherits the :class:`~polib._BaseFile` class and, by
extension, the python ``list`` type.
"""
MAGIC = 0x950412de
MAGIC_SWAPPED = 0xde120495
def __init__(self, *args, **kwargs):
"""
Constructor, accepts all keywords arguments accepted by
:class:`~polib._BaseFile` class.
"""
_BaseFile.__init__(self, *args, **kwargs)
self.magic_number = None
self.version = 0
def save_as_pofile(self, fpath):
"""
Saves the mofile as a pofile to ``fpath``.
Keyword argument:
``fpath``
string, full or relative path to the file.
"""
_BaseFile.save(self, fpath)
def save(self, fpath=None):
"""
Saves the mofile to ``fpath``.
Keyword argument:
``fpath``
string, full or relative path to the file.
"""
_BaseFile.save(self, fpath, 'to_binary')
def percent_translated(self):
"""
Convenience method to keep the same interface with POFile instances.
"""
return 100
def translated_entries(self):
"""
Convenience method to keep the same interface with POFile instances.
"""
return self
def untranslated_entries(self):
"""
Convenience method to keep the same interface with POFile instances.
"""
return []
def fuzzy_entries(self):
"""
Convenience method to keep the same interface with POFile instances.
"""
return []
def obsolete_entries(self):
"""
Convenience method to keep the same interface with POFile instances.
"""
return []
# }}}
# class _BaseEntry {{{
class _BaseEntry(object):
"""
Base class for :class:`~polib.POEntry` and :class:`~polib.MOEntry` classes.
This class should **not** be instanciated directly.
"""
def __init__(self, *args, **kwargs):
"""
Constructor, accepts the following keyword arguments:
``msgid``
string, the entry msgid.
``msgstr``
string, the entry msgstr.
``msgid_plural``
string, the entry msgid_plural.
``msgstr_plural``
list, the entry msgstr_plural lines.
``msgctxt``
string, the entry context (msgctxt).
``obsolete``
bool, whether the entry is "obsolete" or not.
``encoding``
string, the encoding to use, defaults to ``default_encoding``
global variable (optional).
"""
self.msgid = kwargs.get('msgid', '')
self.msgstr = kwargs.get('msgstr', '')
self.msgid_plural = kwargs.get('msgid_plural', '')
self.msgstr_plural = kwargs.get('msgstr_plural', {})
self.msgctxt = kwargs.get('msgctxt', None)
self.obsolete = kwargs.get('obsolete', False)
self.encoding = kwargs.get('encoding', default_encoding)
def __unicode__(self, wrapwidth=78):
"""
Returns the unicode representation of the entry.
"""
if self.obsolete:
delflag = '#~ '
else:
delflag = ''
ret = []
# write the msgctxt if any
if self.msgctxt is not None:
ret += self._str_field("msgctxt", delflag, "", self.msgctxt,
wrapwidth)
# write the msgid
ret += self._str_field("msgid", delflag, "", self.msgid, wrapwidth)
# write the msgid_plural if any
if self.msgid_plural:
ret += self._str_field("msgid_plural", delflag, "",
self.msgid_plural, wrapwidth)
if self.msgstr_plural:
# write the msgstr_plural if any
msgstrs = self.msgstr_plural
keys = list(msgstrs)
keys.sort()
for index in keys:
msgstr = msgstrs[index]
plural_index = '[%s]' % index
ret += self._str_field("msgstr", delflag, plural_index, msgstr,
wrapwidth)
else:
# otherwise write the msgstr
ret += self._str_field("msgstr", delflag, "", self.msgstr,
wrapwidth)
ret.append('')
ret = u('\n').join(ret)
return ret
if PY3:
def __str__(self):
return self.__unicode__()
else:
def __str__(self):
"""
Returns the string representation of the entry.
"""
return unicode(self).encode(self.encoding)
def __eq__(self, other):
return str(self) == str(other)
def _str_field(self, fieldname, delflag, plural_index, field,
wrapwidth=78):
lines = field.splitlines(True)
if len(lines) > 1:
lines = [''] + lines # start with initial empty line
else:
escaped_field = escape(field)
specialchars_count = 0
for c in ['\\', '\n', '\r', '\t', '"']:
specialchars_count += field.count(c)
# comparison must take into account fieldname length + one space
# + 2 quotes (eg. msgid "<string>")
flength = len(fieldname) + 3
if plural_index:
flength += len(plural_index)
real_wrapwidth = wrapwidth - flength + specialchars_count
if wrapwidth > 0 and len(field) > real_wrapwidth:
# Wrap the line but take field name into account
lines = [''] + [unescape(item) for item in wrap(
escaped_field,
wrapwidth - 2, # 2 for quotes ""
drop_whitespace=False,
break_long_words=False
)]
else:
lines = [field]
if fieldname.startswith('previous_'):
# quick and dirty trick to get the real field name
fieldname = fieldname[9:]
ret = ['%s%s%s "%s"' % (delflag, fieldname, plural_index,
escape(lines.pop(0)))]
for mstr in lines:
#import pdb; pdb.set_trace()
ret.append('%s"%s"' % (delflag, escape(mstr)))
return ret
# }}}
# class POEntry {{{
class POEntry(_BaseEntry):
"""
Represents a po file entry.
"""
def __init__(self, *args, **kwargs):
"""
Constructor, accepts the following keyword arguments:
``comment``
string, the entry comment.
``tcomment``
string, the entry translator comment.
``occurrences``
list, the entry occurrences.
``flags``
list, the entry flags.
``previous_msgctxt``
string, the entry previous context.
``previous_msgid``
string, the entry previous msgid.
``previous_msgid_plural``
string, the entry previous msgid_plural.
``linenum``
integer, the line number of the entry
"""
_BaseEntry.__init__(self, *args, **kwargs)
self.comment = kwargs.get('comment', '')
self.tcomment = kwargs.get('tcomment', '')
self.occurrences = kwargs.get('occurrences', [])
self.flags = kwargs.get('flags', [])
self.previous_msgctxt = kwargs.get('previous_msgctxt', None)
self.previous_msgid = kwargs.get('previous_msgid', None)
self.previous_msgid_plural = kwargs.get('previous_msgid_plural', None)
self.linenum = kwargs.get('linenum', None)
def __unicode__(self, wrapwidth=78):
"""
Returns the unicode representation of the entry.
"""
if self.obsolete:
return _BaseEntry.__unicode__(self, wrapwidth)
ret = []
# comments first, if any (with text wrapping as xgettext does)
comments = [('comment', '#. '), ('tcomment', '# ')]
for c in comments:
val = getattr(self, c[0])
if val:
for comment in val.split('\n'):
if wrapwidth > 0 and len(comment) + len(c[1]) > wrapwidth:
ret += wrap(
comment,
wrapwidth,
initial_indent=c[1],
subsequent_indent=c[1],
break_long_words=False
)
else:
ret.append('%s%s' % (c[1], comment))
# occurrences (with text wrapping as xgettext does)
if self.occurrences:
filelist = []
for fpath, lineno in self.occurrences:
if lineno:
filelist.append('%s:%s' % (fpath, lineno))
else:
filelist.append(fpath)
filestr = ' '.join(filelist)
if wrapwidth > 0 and len(filestr) + 3 > wrapwidth:
# textwrap split words that contain hyphen, this is not
# what we want for filenames, so the dirty hack is to
# temporally replace hyphens with a char that a file cannot
# contain, like "*"
ret += [l.replace('*', '-') for l in wrap(
filestr.replace('-', '*'),
wrapwidth,
initial_indent='#: ',
subsequent_indent='#: ',
break_long_words=False
)]
else:
ret.append('#: ' + filestr)
# flags (TODO: wrapping ?)
if self.flags:
ret.append('#, %s' % ', '.join(self.flags))
# previous context and previous msgid/msgid_plural
fields = ['previous_msgctxt', 'previous_msgid',
'previous_msgid_plural']
for f in fields:
val = getattr(self, f)
if val:
ret += self._str_field(f, "#| ", "", val, wrapwidth)
ret.append(_BaseEntry.__unicode__(self, wrapwidth))
ret = u('\n').join(ret)
assert isinstance(ret, text_type)
#if type(ret) != types.UnicodeType:
# return unicode(ret, self.encoding)
return ret
def __cmp__(self, other):
"""
Called by comparison operations if rich comparison is not defined.
"""
# First: Obsolete test
if self.obsolete != other.obsolete:
if self.obsolete:
return -1
else:
return 1
# Work on a copy to protect original
occ1 = sorted(self.occurrences[:])
occ2 = sorted(other.occurrences[:])
pos = 0
for entry1 in occ1:
try:
entry2 = occ2[pos]
except IndexError:
return 1
pos = pos + 1
if entry1[0] != entry2[0]:
if entry1[0] > entry2[0]:
return 1
else:
return -1
if entry1[1] != entry2[1]:
if entry1[1] > entry2[1]:
return 1
else:
return -1
# Finally: Compare message ID
if self.msgid > other.msgid:
return 1
elif self.msgid < other.msgid:
return -1
return 0
def __gt__(self, other):
return self.__cmp__(other) > 0
def __lt__(self, other):
return self.__cmp__(other) < 0
def __ge__(self, other):
return self.__cmp__(other) >= 0
def __le__(self, other):
return self.__cmp__(other) <= 0
def __eq__(self, other):
return self.__cmp__(other) == 0
def __ne__(self, other):
return self.__cmp__(other) != 0
def translated(self):
"""
Returns ``True`` if the entry has been translated or ``False``
otherwise.
"""
if self.obsolete or 'fuzzy' in self.flags:
return False
if self.msgstr != '':
return True
if self.msgstr_plural:
for pos in self.msgstr_plural:
if self.msgstr_plural[pos] == '':
return False
return True
return False
def merge(self, other):
"""
Merge the current entry with the given pot entry.
"""
self.msgid = other.msgid
self.msgctxt = other.msgctxt
self.occurrences = other.occurrences
self.comment = other.comment
fuzzy = 'fuzzy' in self.flags
self.flags = other.flags[:] # clone flags
if fuzzy:
self.flags.append('fuzzy')
self.msgid_plural = other.msgid_plural
self.obsolete = other.obsolete
self.previous_msgctxt = other.previous_msgctxt
self.previous_msgid = other.previous_msgid
self.previous_msgid_plural = other.previous_msgid_plural
if other.msgstr_plural:
for pos in other.msgstr_plural:
try:
# keep existing translation at pos if any
self.msgstr_plural[pos]
except KeyError:
self.msgstr_plural[pos] = ''
def __hash__(self):
return hash((self.msgid, self.msgstr))
# }}}
# class MOEntry {{{
class MOEntry(_BaseEntry):
"""
Represents a mo file entry.
"""
def __init__(self, *args, **kwargs):
"""
Constructor, accepts the following keyword arguments,
for consistency with :class:`~polib.POEntry`:
``comment``
``tcomment``
``occurrences``
``flags``
``previous_msgctxt``
``previous_msgid``
``previous_msgid_plural``
Note: even though these keyword arguments are accepted,
they hold no real meaning in the context of MO files
and are simply ignored.
"""
_BaseEntry.__init__(self, *args, **kwargs)
self.comment = ''
self.tcomment = ''
self.occurrences = []
self.flags = []
self.previous_msgctxt = None
self.previous_msgid = None
self.previous_msgid_plural = None
def __hash__(self):
return hash((self.msgid, self.msgstr))
# }}}
# class _POFileParser {{{
class _POFileParser(object):
"""
A finite state machine to parse efficiently and correctly po
file format.
"""
def __init__(self, pofile, *args, **kwargs):
"""
Constructor.
Keyword arguments:
``pofile``
string, path to the po file or its content
``encoding``
string, the encoding to use, defaults to ``default_encoding``
global variable (optional).
``check_for_duplicates``
whether to check for duplicate entries when adding entries to the
file (optional, default: ``False``).
"""
enc = kwargs.get('encoding', default_encoding)
if _is_file(pofile):
try:
self.fhandle = io.open(pofile, 'rt', encoding=enc)
except LookupError:
enc = default_encoding
self.fhandle = io.open(pofile, 'rt', encoding=enc)
else:
self.fhandle = pofile.splitlines()
klass = kwargs.get('klass')
if klass is None:
klass = POFile
self.instance = klass(
pofile=pofile,
encoding=enc,
check_for_duplicates=kwargs.get('check_for_duplicates', False)
)
self.transitions = {}
self.current_line = 0
self.current_entry = POEntry(linenum=self.current_line)
self.current_state = 'st'
self.current_token = None
# two memo flags used in handlers
self.msgstr_index = 0
self.entry_obsolete = 0
# Configure the state machine, by adding transitions.
# Signification of symbols:
# * ST: Beginning of the file (start)
# * HE: Header
# * TC: a translation comment
# * GC: a generated comment
# * OC: a file/line occurence
# * FL: a flags line
# * CT: a message context
# * PC: a previous msgctxt
# * PM: a previous msgid
# * PP: a previous msgid_plural
# * MI: a msgid
# * MP: a msgid plural
# * MS: a msgstr
# * MX: a msgstr plural
# * MC: a msgid or msgstr continuation line
all = ['st', 'he', 'gc', 'oc', 'fl', 'ct', 'pc', 'pm', 'pp', 'tc',
'ms', 'mp', 'mx', 'mi']
self.add('tc', ['st', 'he'], 'he')
self.add('tc', ['gc', 'oc', 'fl', 'tc', 'pc', 'pm', 'pp', 'ms',
'mp', 'mx', 'mi'], 'tc')
self.add('gc', all, 'gc')
self.add('oc', all, 'oc')
self.add('fl', all, 'fl')
self.add('pc', all, 'pc')
self.add('pm', all, 'pm')
self.add('pp', all, 'pp')
self.add('ct', ['st', 'he', 'gc', 'oc', 'fl', 'tc', 'pc', 'pm',
'pp', 'ms', 'mx'], 'ct')
self.add('mi', ['st', 'he', 'gc', 'oc', 'fl', 'ct', 'tc', 'pc',
'pm', 'pp', 'ms', 'mx'], 'mi')
self.add('mp', ['tc', 'gc', 'pc', 'pm', 'pp', 'mi'], 'mp')
self.add('ms', ['mi', 'mp', 'tc'], 'ms')
self.add('mx', ['mi', 'mx', 'mp', 'tc'], 'mx')
self.add('mc', ['ct', 'mi', 'mp', 'ms', 'mx', 'pm', 'pp', 'pc'], 'mc')
def parse(self):
"""
Run the state machine, parse the file line by line and call process()
with the current matched symbol.
"""
keywords = {
'msgctxt': 'ct',
'msgid': 'mi',
'msgstr': 'ms',
'msgid_plural': 'mp',
}
prev_keywords = {
'msgid_plural': 'pp',
'msgid': 'pm',
'msgctxt': 'pc',
}
tokens = []
for line in self.fhandle:
self.current_line += 1
line = line.strip()
if line == '':
continue
tokens = line.split(None, 2)
nb_tokens = len(tokens)
if tokens[0] == '#~|':
continue
if tokens[0] == '#~' and nb_tokens > 1:
line = line[3:].strip()
tokens = tokens[1:]
nb_tokens -= 1
self.entry_obsolete = 1
else:
self.entry_obsolete = 0
# Take care of keywords like
# msgid, msgid_plural, msgctxt & msgstr.
if tokens[0] in keywords and nb_tokens > 1:
line = line[len(tokens[0]):].lstrip()
if re.search(r'([^\\]|^)"', line[1:-1]):
raise IOError('Syntax error in po file %s (line %s): '
'unescaped double quote found' %
(self.instance.fpath, self.current_line))
self.current_token = line
self.process(keywords[tokens[0]])
continue
self.current_token = line
if tokens[0] == '#:':
if nb_tokens <= 1:
continue
# we are on a occurrences line
self.process('oc')
elif line[:1] == '"':
# we are on a continuation line
if re.search(r'([^\\]|^)"', line[1:-1]):
raise IOError('Syntax error in po file %s (line %s): '
'unescaped double quote found' %
(self.instance.fpath, self.current_line))
self.process('mc')
elif line[:7] == 'msgstr[':
# we are on a msgstr plural
self.process('mx')
elif tokens[0] == '#,':
if nb_tokens <= 1:
continue
# we are on a flags line
self.process('fl')
elif tokens[0] == '#' or tokens[0].startswith('##'):
if line == '#':
line += ' '
# we are on a translator comment line
self.process('tc')
elif tokens[0] == '#.':
if nb_tokens <= 1:
continue
# we are on a generated comment line
self.process('gc')
elif tokens[0] == '#|':
if nb_tokens <= 1:
raise IOError('Syntax error in po file %s (line %s)' %
(self.instance.fpath, self.current_line))
# Remove the marker and any whitespace right after that.
line = line[2:].lstrip()
self.current_token = line
if tokens[1].startswith('"'):
# Continuation of previous metadata.
self.process('mc')
continue
if nb_tokens == 2:
# Invalid continuation line.
raise IOError('Syntax error in po file %s (line %s): '
'invalid continuation line' %
(self.instance.fpath, self.current_line))
# we are on a "previous translation" comment line,
if tokens[1] not in prev_keywords:
# Unknown keyword in previous translation comment.
raise IOError('Syntax error in po file %s (line %s): '
'unknown keyword %s' %
(self.instance.fpath, self.current_line,
tokens[1]))
# Remove the keyword and any whitespace
# between it and the starting quote.
line = line[len(tokens[1]):].lstrip()
self.current_token = line
self.process(prev_keywords[tokens[1]])
# Patch to fix parsing of Kodi po files
elif tokens[0].startswith("#"):
continue
else:
raise IOError('Syntax error in po file %s (line %s)' %
(self.instance.fpath, self.current_line))
if self.current_entry and len(tokens) > 0 and \
not tokens[0].startswith('#'):
# since entries are added when another entry is found, we must add
# the last entry here (only if there are lines). Trailing comments
# are ignored
self.instance.append(self.current_entry)
# before returning the instance, check if there's metadata and if
# so extract it in a dict
metadataentry = self.instance.find('')
if metadataentry: # metadata found
# remove the entry
self.instance.remove(metadataentry)
self.instance.metadata_is_fuzzy = metadataentry.flags
key = None
for msg in metadataentry.msgstr.splitlines():
try:
key, val = msg.split(':', 1)
self.instance.metadata[key] = val.strip()
except (ValueError, KeyError):
if key is not None:
self.instance.metadata[key] += '\n' + msg.strip()
# close opened file
if not isinstance(self.fhandle, list): # must be file
self.fhandle.close()
return self.instance
def add(self, symbol, states, next_state):
"""
Add a transition to the state machine.
Keywords arguments:
``symbol``
string, the matched token (two chars symbol).
``states``
list, a list of states (two chars symbols).
``next_state``
the next state the fsm will have after the action.
"""
for state in states:
action = getattr(self, 'handle_%s' % next_state)
self.transitions[(symbol, state)] = (action, next_state)
def process(self, symbol):
"""
Process the transition corresponding to the current state and the
symbol provided.
Keywords arguments:
``symbol``
string, the matched token (two chars symbol).
``linenum``
integer, the current line number of the parsed file.
"""
try:
(action, state) = self.transitions[(symbol, self.current_state)]
if action():
self.current_state = state
except Exception:
raise IOError('Syntax error in po file (line %s)' %
self.current_line)
# state handlers
def handle_he(self):
"""Handle a header comment."""
if self.instance.header != '':
self.instance.header += '\n'
self.instance.header += self.current_token[2:]
return 1
def handle_tc(self):
"""Handle a translator comment."""
if self.current_state in ['mc', 'ms', 'mx']:
self.instance.append(self.current_entry)
self.current_entry = POEntry(linenum=self.current_line)
if self.current_entry.tcomment != '':
self.current_entry.tcomment += '\n'
tcomment = self.current_token.lstrip('#')
if tcomment.startswith(' '):
tcomment = tcomment[1:]
self.current_entry.tcomment += tcomment
return True
def handle_gc(self):
"""Handle a generated comment."""
if self.current_state in ['mc', 'ms', 'mx']:
self.instance.append(self.current_entry)
self.current_entry = POEntry(linenum=self.current_line)
if self.current_entry.comment != '':
self.current_entry.comment += '\n'
self.current_entry.comment += self.current_token[3:]
return True
def handle_oc(self):
"""Handle a file:num occurence."""
if self.current_state in ['mc', 'ms', 'mx']:
self.instance.append(self.current_entry)
self.current_entry = POEntry(linenum=self.current_line)
occurrences = self.current_token[3:].split()
for occurrence in occurrences:
if occurrence != '':
try:
fil, line = occurrence.split(':')
if not line.isdigit():
fil = fil + line
line = ''
self.current_entry.occurrences.append((fil, line))
except (ValueError, AttributeError):
self.current_entry.occurrences.append((occurrence, ''))
return True
def handle_fl(self):
"""Handle a flags line."""
if self.current_state in ['mc', 'ms', 'mx']:
self.instance.append(self.current_entry)
self.current_entry = POEntry(linenum=self.current_line)
self.current_entry.flags += [c.strip() for c in
self.current_token[3:].split(',')]
return True
def handle_pp(self):
"""Handle a previous msgid_plural line."""
if self.current_state in ['mc', 'ms', 'mx']:
self.instance.append(self.current_entry)
self.current_entry = POEntry(linenum=self.current_line)
self.current_entry.previous_msgid_plural = \
unescape(self.current_token[1:-1])
return True
def handle_pm(self):
"""Handle a previous msgid line."""
if self.current_state in ['mc', 'ms', 'mx']:
self.instance.append(self.current_entry)
self.current_entry = POEntry(linenum=self.current_line)
self.current_entry.previous_msgid = \
unescape(self.current_token[1:-1])
return True
def handle_pc(self):
"""Handle a previous msgctxt line."""
if self.current_state in ['mc', 'ms', 'mx']:
self.instance.append(self.current_entry)
self.current_entry = POEntry(linenum=self.current_line)
self.current_entry.previous_msgctxt = \
unescape(self.current_token[1:-1])
return True
def handle_ct(self):
"""Handle a msgctxt."""
if self.current_state in ['mc', 'ms', 'mx']:
self.instance.append(self.current_entry)
self.current_entry = POEntry(linenum=self.current_line)
self.current_entry.msgctxt = unescape(self.current_token[1:-1])
return True
def handle_mi(self):
"""Handle a msgid."""
if self.current_state in ['mc', 'ms', 'mx']:
self.instance.append(self.current_entry)
self.current_entry = POEntry(linenum=self.current_line)
self.current_entry.obsolete = self.entry_obsolete
self.current_entry.msgid = unescape(self.current_token[1:-1])
return True
def handle_mp(self):
"""Handle a msgid plural."""
self.current_entry.msgid_plural = unescape(self.current_token[1:-1])
return True
def handle_ms(self):
"""Handle a msgstr."""
self.current_entry.msgstr = unescape(self.current_token[1:-1])
return True
def handle_mx(self):
"""Handle a msgstr plural."""
index, value = self.current_token[7], self.current_token[11:-1]
self.current_entry.msgstr_plural[int(index)] = unescape(value)
self.msgstr_index = int(index)
return True
def handle_mc(self):
"""Handle a msgid or msgstr continuation line."""
token = unescape(self.current_token[1:-1])
if self.current_state == 'ct':
self.current_entry.msgctxt += token
elif self.current_state == 'mi':
self.current_entry.msgid += token
elif self.current_state == 'mp':
self.current_entry.msgid_plural += token
elif self.current_state == 'ms':
self.current_entry.msgstr += token
elif self.current_state == 'mx':
self.current_entry.msgstr_plural[self.msgstr_index] += token
elif self.current_state == 'pp':
self.current_entry.previous_msgid_plural += token
elif self.current_state == 'pm':
self.current_entry.previous_msgid += token
elif self.current_state == 'pc':
self.current_entry.previous_msgctxt += token
# don't change the current state
return False
# }}}
# class _MOFileParser {{{
class _MOFileParser(object):
"""
A class to parse binary mo files.
"""
def __init__(self, mofile, *args, **kwargs):
"""
Constructor.
Keyword arguments:
``mofile``
string, path to the mo file or its content
``encoding``
string, the encoding to use, defaults to ``default_encoding``
global variable (optional).
``check_for_duplicates``
whether to check for duplicate entries when adding entries to the
file (optional, default: ``False``).
"""
self.fhandle = open(mofile, 'rb')
klass = kwargs.get('klass')
if klass is None:
klass = MOFile
self.instance = klass(
fpath=mofile,
encoding=kwargs.get('encoding', default_encoding),
check_for_duplicates=kwargs.get('check_for_duplicates', False)
)
def __del__(self):
"""
Make sure the file is closed, this prevents warnings on unclosed file
when running tests with python >= 3.2.
"""
if self.fhandle:
self.fhandle.close()
def parse(self):
"""
Build the instance with the file handle provided in the
constructor.
"""
# parse magic number
magic_number = self._readbinary('<I', 4)
if magic_number == MOFile.MAGIC:
ii = '<II'
elif magic_number == MOFile.MAGIC_SWAPPED:
ii = '>II'
else:
raise IOError('Invalid mo file, magic number is incorrect !')
self.instance.magic_number = magic_number
# parse the version number and the number of strings
version, numofstrings = self._readbinary(ii, 8)
# from MO file format specs: "A program seeing an unexpected major
# revision number should stop reading the MO file entirely"
if version not in (0, 1):
raise IOError('Invalid mo file, unexpected major revision number')
self.instance.version = version
# original strings and translation strings hash table offset
msgids_hash_offset, msgstrs_hash_offset = self._readbinary(ii, 8)
# move to msgid hash table and read length and offset of msgids
self.fhandle.seek(msgids_hash_offset)
msgids_index = []
for i in range(numofstrings):
msgids_index.append(self._readbinary(ii, 8))
# move to msgstr hash table and read length and offset of msgstrs
self.fhandle.seek(msgstrs_hash_offset)
msgstrs_index = []
for i in range(numofstrings):
msgstrs_index.append(self._readbinary(ii, 8))
# build entries
encoding = self.instance.encoding
for i in range(numofstrings):
self.fhandle.seek(msgids_index[i][1])
msgid = self.fhandle.read(msgids_index[i][0])
self.fhandle.seek(msgstrs_index[i][1])
msgstr = self.fhandle.read(msgstrs_index[i][0])
if i == 0 and not msgid: # metadata
raw_metadata, metadata = msgstr.split(b('\n')), {}
for line in raw_metadata:
tokens = line.split(b(':'), 1)
if tokens[0] != b(''):
try:
k = tokens[0].decode(encoding)
v = tokens[1].decode(encoding)
metadata[k] = v.strip()
except IndexError:
metadata[k] = u('')
self.instance.metadata = metadata
continue
# test if we have a plural entry
msgid_tokens = msgid.split(b('\0'))
if len(msgid_tokens) > 1:
entry = self._build_entry(
msgid=msgid_tokens[0],
msgid_plural=msgid_tokens[1],
msgstr_plural=dict((k, v) for k, v in
enumerate(msgstr.split(b('\0'))))
)
else:
entry = self._build_entry(msgid=msgid, msgstr=msgstr)
self.instance.append(entry)
# close opened file
self.fhandle.close()
return self.instance
def _build_entry(self, msgid, msgstr=None, msgid_plural=None,
msgstr_plural=None):
msgctxt_msgid = msgid.split(b('\x04'))
encoding = self.instance.encoding
if len(msgctxt_msgid) > 1:
kwargs = {
'msgctxt': msgctxt_msgid[0].decode(encoding),
'msgid': msgctxt_msgid[1].decode(encoding),
}
else:
kwargs = {'msgid': msgid.decode(encoding)}
if msgstr:
kwargs['msgstr'] = msgstr.decode(encoding)
if msgid_plural:
kwargs['msgid_plural'] = msgid_plural.decode(encoding)
if msgstr_plural:
for k in msgstr_plural:
msgstr_plural[k] = msgstr_plural[k].decode(encoding)
kwargs['msgstr_plural'] = msgstr_plural
return MOEntry(**kwargs)
def _readbinary(self, fmt, numbytes):
"""
Private method that unpack n bytes of data using format <fmt>.
It returns a tuple or a mixed value if the tuple length is 1.
"""
bytes = self.fhandle.read(numbytes)
tup = struct.unpack(fmt, bytes)
if len(tup) == 1:
return tup[0]
return tup
# }}}
# class TextWrapper {{{
class TextWrapper(textwrap.TextWrapper):
"""
Subclass of textwrap.TextWrapper that backport the
drop_whitespace option.
"""
def __init__(self, *args, **kwargs):
drop_whitespace = kwargs.pop('drop_whitespace', True)
textwrap.TextWrapper.__init__(self, *args, **kwargs)
self.drop_whitespace = drop_whitespace
def _wrap_chunks(self, chunks):
"""_wrap_chunks(chunks : [string]) -> [string]
Wrap a sequence of text chunks and return a list of lines of
length 'self.width' or less. (If 'break_long_words' is false,
some lines may be longer than this.) Chunks correspond roughly
to words and the whitespace between them: each chunk is
indivisible (modulo 'break_long_words'), but a line break can
come between any two chunks. Chunks should not have internal
whitespace; ie. a chunk is either all whitespace or a "word".
Whitespace chunks will be removed from the beginning and end of
lines, but apart from that whitespace is preserved.
"""
lines = []
if self.width <= 0:
raise ValueError("invalid width %r (must be > 0)" % self.width)
# Arrange in reverse order so items can be efficiently popped
# from a stack of chucks.
chunks.reverse()
while chunks:
# Start the list of chunks that will make up the current line.
# cur_len is just the length of all the chunks in cur_line.
cur_line = []
cur_len = 0
# Figure out which static string will prefix this line.
if lines:
indent = self.subsequent_indent
else:
indent = self.initial_indent
# Maximum width for this line.
width = self.width - len(indent)
# First chunk on line is whitespace -- drop it, unless this
# is the very beginning of the text (ie. no lines started yet).
if self.drop_whitespace and chunks[-1].strip() == '' and lines:
del chunks[-1]
while chunks:
l = len(chunks[-1])
# Can at least squeeze this chunk onto the current line.
if cur_len + l <= width:
cur_line.append(chunks.pop())
cur_len += l
# Nope, this line is full.
else:
break
# The current line is full, and the next chunk is too big to
# fit on *any* line (not just this one).
if chunks and len(chunks[-1]) > width:
self._handle_long_word(chunks, cur_line, cur_len, width)
# If the last chunk on this line is all whitespace, drop it.
if self.drop_whitespace and cur_line and not cur_line[-1].strip():
del cur_line[-1]
# Convert current line back to a string and store it in list
# of all lines (return value).
if cur_line:
lines.append(indent + ''.join(cur_line))
return lines
# }}}
# function wrap() {{{
def wrap(text, width=70, **kwargs):
"""
Wrap a single paragraph of text, returning a list of wrapped lines.
"""
if sys.version_info < (2, 6):
return TextWrapper(width=width, **kwargs).wrap(text)
return textwrap.wrap(text, width=width, **kwargs)
# }}}
|
gpl-3.0
| -7,919,191,279,515,197,000 | 32.830685 | 79 | 0.524076 | false |
google-research/language
|
language/boolq/utils/ops_test.py
|
1
|
2375
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding=utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from language.boolq.utils import ops
import tensorflow.compat.v1 as tf
class OpsTest(tf.test.TestCase):
def test_lowercase(self):
with self.test_session() as sess:
test_str = [["Abc%@||", "DZ dzD", ""]]
self.assertEqual(
sess.run(ops.lowercase_op(tf.convert_to_tensor(test_str))).tolist(),
[[x.lower() for x in test_str[0]]])
def test_lowercase_unicode(self):
with self.test_session() as sess:
test_str = ["ŠČŽɬЩЮɦ"]
self.assertEqual(
sess.run(ops.lowercase_op(tf.convert_to_tensor(test_str))).tolist(),
[test_str[0].lower()])
def test_bucket_by_quantiles(self):
with self.test_session() as sess:
data = tf.data.Dataset.from_tensor_slices(list(range(10))).repeat()
data = data.apply(ops.bucket_by_quantiles(
len_fn=lambda x: x, batch_size=4, n_buckets=2,
hist_bounds=[2, 4, 6, 8]))
it = data.make_initializable_iterator()
sess.run(it.initializer)
sess.run(tf.local_variables_initializer())
next_op = it.get_next()
# Let the model gather statistics, it sees 4*5=20 = 2 epochs,
# so each bin should have a count of 4
for _ in range(5):
sess.run(next_op)
counts = sess.run(tf.local_variables()[0])
self.assertEqual(counts.tolist(), [4, 8, 12, 16, 20])
# At this point the model should perfectly quantize the input
for _ in range(4):
out = sess.run(next_op)
if out[0] < 5:
self.assertAllInRange(out, 0, 5)
else:
self.assertAllInRange(out, 5, 10)
if __name__ == "__main__":
tf.test.main()
|
apache-2.0
| -7,743,939,903,547,159,000 | 33.318841 | 78 | 0.650338 | false |
yephper/django
|
django/db/models/sql/datastructures.py
|
1
|
5769
|
"""
Useful auxiliary data structures for query construction. Not useful outside
the SQL domain.
"""
from django.db.models.sql.constants import INNER, LOUTER
class EmptyResultSet(Exception):
pass
class MultiJoin(Exception):
"""
Used by join construction code to indicate the point at which a
multi-valued join was attempted (if the caller wants to treat that
exceptionally).
"""
def __init__(self, names_pos, path_with_names):
self.level = names_pos
# The path travelled, this includes the path to the multijoin.
self.names_with_path = path_with_names
class Empty(object):
pass
class Join(object):
"""
Used by sql.Query and sql.SQLCompiler to generate JOIN clauses into the
FROM entry. For example, the SQL generated could be
LEFT OUTER JOIN "sometable" T1 ON ("othertable"."sometable_id" = "sometable"."id")
This class is primarily used in Query.alias_map. All entries in alias_map
must be Join compatible by providing the following attributes and methods:
- table_name (string)
- table_alias (possible alias for the table, can be None)
- join_type (can be None for those entries that aren't joined from
anything)
- parent_alias (which table is this join's parent, can be None similarly
to join_type)
- as_sql()
- relabeled_clone()
"""
def __init__(self, table_name, parent_alias, table_alias, join_type,
join_field, nullable):
# Join table
self.table_name = table_name
self.parent_alias = parent_alias
# Note: table_alias is not necessarily known at instantiation time.
self.table_alias = table_alias
# LOUTER or INNER
self.join_type = join_type
# A list of 2-tuples to use in the ON clause of the JOIN.
# Each 2-tuple will create one join condition in the ON clause.
self.join_cols = join_field.get_joining_columns()
# Along which field (or ForeignObjectRel in the reverse join case)
self.join_field = join_field
# Is this join nullabled?
self.nullable = nullable
def as_sql(self, compiler, connection):
"""
Generates the full
LEFT OUTER JOIN sometable ON sometable.somecol = othertable.othercol, params
clause for this join.
"""
join_conditions = []
params = []
qn = compiler.quote_name_unless_alias
qn2 = connection.ops.quote_name
# Add a join condition for each pair of joining columns.
for index, (lhs_col, rhs_col) in enumerate(self.join_cols):
join_conditions.append('%s.%s = %s.%s' % (
qn(self.parent_alias),
qn2(lhs_col),
qn(self.table_alias),
qn2(rhs_col),
))
# Add a single condition inside parentheses for whatever
# get_extra_restriction() returns.
extra_cond = self.join_field.get_extra_restriction(
compiler.query.where_class, self.table_alias, self.parent_alias)
if extra_cond:
extra_sql, extra_params = compiler.compile(extra_cond)
join_conditions.append('(%s)' % extra_sql)
params.extend(extra_params)
if not join_conditions:
# This might be a rel on the other end of an actual declared field.
declared_field = getattr(self.join_field, 'field', self.join_field)
raise ValueError(
"Join generated an empty ON clause. %s did not yield either "
"joining columns or extra restrictions." % declared_field.__class__
)
on_clause_sql = ' AND '.join(join_conditions)
alias_str = '' if self.table_alias == self.table_name else (' %s' % self.table_alias)
sql = '%s %s%s ON (%s)' % (self.join_type, qn(self.table_name), alias_str, on_clause_sql)
return sql, params
def relabeled_clone(self, change_map):
new_parent_alias = change_map.get(self.parent_alias, self.parent_alias)
new_table_alias = change_map.get(self.table_alias, self.table_alias)
return self.__class__(
self.table_name, new_parent_alias, new_table_alias, self.join_type,
self.join_field, self.nullable)
def __eq__(self, other):
if isinstance(other, self.__class__):
return (
self.table_name == other.table_name and
self.parent_alias == other.parent_alias and
self.join_field == other.join_field
)
return False
def demote(self):
new = self.relabeled_clone({})
new.join_type = INNER
return new
def promote(self):
new = self.relabeled_clone({})
new.join_type = LOUTER
return new
class BaseTable(object):
"""
The BaseTable class is used for base table references in FROM clause. For
example, the SQL "foo" in
SELECT * FROM "foo" WHERE somecond
could be generated by this class.
"""
join_type = None
parent_alias = None
def __init__(self, table_name, alias):
self.table_name = table_name
self.table_alias = alias
def as_sql(self, compiler, connection):
alias_str = '' if self.table_alias == self.table_name else (' %s' % self.table_alias)
base_sql = compiler.quote_name_unless_alias(self.table_name)
return base_sql + alias_str, []
def relabeled_clone(self, change_map):
return self.__class__(self.table_name, change_map.get(self.table_alias, self.table_alias))
|
bsd-3-clause
| 3,852,059,462,190,992,400 | 36.46 | 98 | 0.596637 | false |
Zephrys/monica
|
monica/monica.py
|
1
|
8780
|
r"""
monica is a command line chef that brings you tasty food
Usage:
monica surprise
monica restaurant <restaurant-id>
monica search [QUERY ...]
monica reviews <restaurant-id>
monica budget <budget>
monica cuisine (<cuisine-id>| list)
monica configure
monica (-h |--help)
monica
Options:
-h --help Show this screen.
--version Show version.
"""
import requests
from docopt import docopt
import json
from config import configure
try:
from config import config
except:
print 'No Configuration File Found'
from config import flag
from tabulate import tabulate
import random
__version__ = '0.1.0'
headers = {'Accept' : 'application/json', 'user_key': config['api_key'], 'User-Agent': 'curl/7.35.0'}
def url_shorten(longurl):
url = 'https://www.googleapis.com/urlshortener/v1/url?key=AIzaSyA76APOb611GHyJS_7ly_l-0Btvr798LcE'
try:
response = requests.post(url, headers = {'Content-Type' : 'application/json'}, data = json.dumps({'longUrl': longurl}))
if response.status_code == 200:
data = response.json()
return data['id']
else:
return "Couldn't Shorten"
except:
return "Couldnt Shorten"
def surprise():
url = 'https://developers.zomato.com/api/v2.1/search?lat=%s&lon=%s&count=100' %(config['lat'], config['lon'])
try:
response =requests.get(url, headers = headers)
if response.status_code == 200:
data = response.json()
restaurants = data['restaurants']
while True:
if restaurants == []:
print 'Sorry nothing in your budget :('
return
choice = random.choice(restaurants)
budget = choice['restaurant']['average_cost_for_two']
if float(budget)/2 <= config['budget']:
restaurant = choice['restaurant']
break
else:
restaurants.remove(choice)
table = [[restaurant["id"] , restaurant["name"], restaurant["currency"] + " " + str(float(restaurant['average_cost_for_two'])/2) , restaurant["user_rating"]["aggregate_rating"], restaurant["location"]["locality"]]]
print tabulate(table, headers=["ID", "Name", "Budget", "Rating", "Locality"], tablefmt='fancy_grid')
else:
print 'Api Issues!'
except:
print 'Network Issues!'
def cuisine(cuisine):
if cuisine == 'list':
url = "https://developers.zomato.com/api/v2.1/cuisines?city_id=%s&lat%s&lon=%s" %(config['city_id'], config['lat'], config['lon'])
try:
response = requests.get(url, headers=headers)
if response.status_code == 200:
data = response.json()
cuisines = data['cuisines']
cuisine_list = []
for cuisine in cuisines:
cuisine = cuisine['cuisine']
cuisine_list.append([cuisine["cuisine_id"], cuisine["cuisine_name"]])
print tabulate(cuisine_list, headers=["ID", "Cuisine Name"],tablefmt='fancy_grid')
else:
print 'Api Error'
except:
print 'Network Error'
return
else:
url = "https://developers.zomato.com/api/v2.1/search?count=10&lat=%s&lon=%s&cuisines=%s&sort=cost" %(config['lat'], config['lon'], cuisine)
try:
response= requests.get(url, headers=headers)
if response.status_code == 200:
data = response.json()
count = data['results_found']
if count == 0:
print "Nothing Found!"
else:
restaurants = data["restaurants"]
restaurants_list = []
for restaurant in restaurants:
restaurant = restaurant['restaurant']
restaurants_list.append([restaurant["id"] , restaurant["name"], restaurant["currency"]
+ " " + str(float(restaurant['average_cost_for_two'])/2) , restaurant["user_rating"]["aggregate_rating"], restaurant["location"]["locality"]])
print tabulate(restaurants_list, headers=["ID", "Name", "Budget", "Rating", "Locality"],tablefmt='fancy_grid')
else:
print "API Issues"
except:
print 'Network Issues'
def restaurant(resid):
try:
url = 'https://developers.zomato.com/api/v2.1/restaurant?res_id=' + str(resid)
r = requests.get(url,headers=headers)
restaurants = []
if r.status_code != 200:
print "API Issues"
return
res = r.json()
rest = {}
rest['id'] = res['id']
rest['name'] = res['name']
rest['budget'] = float(res['average_cost_for_two'])/2
rest['menu'] = url_shorten(res['menu_url'])
rest['rating'] = res['user_rating']['aggregate_rating']
rest['locality'] = res['location']['locality']
restaurants.append(rest)
print tabulate([[i['id'], i['name'], i['budget'], i['rating'], i['locality']] for i in restaurants], headers=['ID', 'Name', 'Budget', 'Rating', 'Locality'],tablefmt='fancy_grid')
print "Find the menu at:\t", rest['menu']
except:
print "Network Issues!"
return
def reviews(id):
url = "https://developers.zomato.com/api/v2.1/reviews?res_id=%s&count=5"%(id)
try:
response = requests.get(url, headers=headers)
except:
print 'Network Issues!'
return
if response.status_code == 200:
data = response.json()
count= data["reviews_count"]
if count == 0:
print 'No Reviews!'
else:
for review in data["user_reviews"]:
review = review["review"]
print review["rating"]
print review["review_text"]
print "Posted: ",
print review["review_time_friendly"]
print "--------------"
else:
print 'Api Issues'
def search(query):
try:
url = 'https://developers.zomato.com/api/v2.1/search?q=' + str(" ".join(query)) + '&count=10&lat=' + str(config['lat']) + '&lon=' + str(config['lon'])
r = requests.get(url,headers=headers)
restaurants = []
if r.status_code != 200:
print "Api Issues"
return
if len(r.json()['restaurants']) <= 0:
print "Api Issues"
return
for res in r.json()['restaurants']:
rest = {}
rest['id'] = res['restaurant']['id']
rest['name'] = res['restaurant']['name']
rest['budget'] = res['restaurant']['currency'] + ' ' + str(float(res['restaurant']['average_cost_for_two'])/2)
rest['rating'] = res['restaurant']['user_rating']['aggregate_rating']
rest['locality'] = res['restaurant']['location']['locality']
restaurants.append(rest)
print tabulate([[i['id'], i['name'], i['budget'], i['rating'], i['locality']] for i in restaurants], headers=['ID', 'Name', 'Budget', 'Rating', 'Locality'],tablefmt='fancy_grid')
except:
print "Network Error!"
def budget(max_budget):
try:
url1 = 'https://developers.zomato.com/api/v2.1/search?q=&count=100&lat=' + str(config['lat']) + '&lon=' + str(config['lon']) +' &sort=cost&order=desc'
url2 = 'https://developers.zomato.com/api/v2.1/search?q=&count=100&lat=' + str(config['lat']) + '&lon=' + str(config['lon']) +' &sort=cost&order=asc'
r1 = requests.get(url1,headers=headers)
r2 = requests.get(url2, headers=headers)
restaurants = []
if r1.status_code != 200 or r2.status_code !=200:
print "API Issues"
return
if len(r1.json()['restaurants']) <= 0 and len(r2.json()['restaurants']) <= 0:
print "API Issues"
return
data = r1.json()['restaurants'] + r2.json()['restaurants']
for res in data:
if float(res['restaurant']['average_cost_for_two'])/2 <= int(max_budget):
rest = {}
rest['id'] = res['restaurant']['id']
rest['name'] = res['restaurant']['name']
rest['budget'] = res['restaurant']['currency'] + ' ' + str(float(res['restaurant']['average_cost_for_two'])/2)
rest['rating'] = res['restaurant']['user_rating']['aggregate_rating']
rest['locality'] = res['restaurant']['location']['locality']
restaurants.append(rest)
else:
continue
print tabulate([[i['id'], i['name'], i['budget'], i['rating'], i['locality']] for i in restaurants][:10], headers=['ID', 'Name', 'Budget', 'Rating', 'Locality'],tablefmt='fancy_grid')
except:
print "Network Issues"
return
def main():
'''monica helps you order food from the timeline'''
arguments = docopt(__doc__, version=__version__)
if arguments['configure'] and flag:
configure()
if arguments['cuisine']:
if arguments['list']:
cuisine('list')
else:
cuisine(arguments['<cuisine-id>'])
elif arguments['surprise']:
surprise()
elif arguments['reviews']:
reviews(arguments['<restaurant-id>'])
elif arguments['search']:
search(arguments['QUERY'])
elif arguments['budget']:
try:
money = arguments['<budget>']
money = float(money)
budget(money)
except:
print 'Budget should be a number!'
elif arguments['restaurant']:
restaurant(arguments['<restaurant-id>'])
else:
print (__doc__)
if __name__ == '__main__':
main()
|
mit
| -6,934,554,040,084,423,000 | 34.695122 | 220 | 0.609795 | false |
netvl/contrib-python-qubell-client
|
qubell/api/private/environment.py
|
1
|
9095
|
# Copyright (c) 2013 Qubell Inc., http://qubell.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from qubell.api.globals import ZONE_NAME, DEFAULT_ENV_NAME
from qubell.api.tools import lazyproperty
__author__ = "Vasyl Khomenko"
__copyright__ = "Copyright 2013, Qubell.com"
__license__ = "Apache"
__email__ = "vkhomenko@qubell.com"
import logging as log
import simplejson as json
import copy
from qubell.api.private import exceptions
from qubell.api.private.common import QubellEntityList, Entity
from qubell.api.provider.router import ROUTER as router
class Environment(Entity):
def __init__(self, organization, id):
self.organization = organization
self.organizationId = self.organization.organizationId
self.environmentId = self.id = id
#todo: make as properties
self.policies = []
self.markers = []
self.properties = []
@lazyproperty
def zoneId(self):
return self.json()['backend']
@lazyproperty
def services(self):
from qubell.api.private.instance import InstanceList
return InstanceList(list_json_method=self.list_services_json, organization=self)
@property
def name(self):
return self.json()['name']
@property
def isDefault(self):
return self.json()['isDefault']
def __getattr__(self, key):
resp = self.json()
if not resp.has_key(key):
raise exceptions.NotFoundError('Cannot get property %s' % key)
return resp[key] or False
@staticmethod
def new(organization, name, zone_id=None, default=False):
log.info("Creating environment: %s" % name)
if not zone_id:
zone_id = organization.zone.zoneId
data = {'isDefault': default,
'name': name,
'backend': zone_id,
'organizationId': organization.organizationId}
log.debug(data)
resp = router.post_organization_environment(org_id=organization.organizationId, data=json.dumps(data)).json()
env = Environment(organization, id=resp['id'])
log.info("Environment created: %s (%s)" % (name,env.environmentId))
return env
def restore(self, config, clean=False, timeout=10):
config = copy.deepcopy(config)
if clean:
self.clean()
for marker in config.pop('markers', []):
self.add_marker(marker)
for policy in config.pop('policies', []):
self.add_policy(policy)
for property in config.pop('properties', []):
self.add_property(**property)
for service in config.pop('services', []):
type=service.pop('type', None)
serv = self.organization.get_service(id=service.pop('id', None), name=service.pop('name'))
if not serv in self.services:
self.add_service(serv)
for service in self.services:
service.ready()
def json(self):
return router.get_environment(org_id=self.organizationId, env_id=self.environmentId).json()
def delete(self):
router.delete_environment(org_id=self.organizationId, env_id=self.environmentId)
return True
def set_as_default(self):
data = json.dumps({'environmentId': self.id})
return router.put_organization_default_environment(org_id=self.organizationId, data=data).json()
def list_available_services_json(self):
return router.get_environment_available_services(org_id=self.organizationId, env_id=self.environmentId).json()
def list_services_json(self):
return self.json()['services']
_put_environment = lambda self, data: router.put_environment(org_id=self.organizationId, env_id=self.environmentId, data=data)
def add_service(self, service):
resp = None
if service not in self.services:
time.sleep(3) # TODO: Need to wait until strategy comes up
data = self.json()
data['serviceIds'].append(service.instanceId)
data['services'].append(service.json())
log.info("Adding service %s (%s) to environment %s (%s)" % (service.name, service.id, self.name, self.id))
resp = self._put_environment(data=json.dumps(data))
if service.is_secure_vault:
user_data = service.userData
if 'defaultKey' in user_data:
key = user_data['defaultKey']
else:
key = service.regenerate()['id']
self.add_policy(
{"action": "provisionVms",
"parameter": "publicKeyId",
"value": key})
return resp.json() if resp else None
def remove_service(self, service):
data = self.json()
data['serviceIds'].remove(service.instanceId)
data['services']=[s for s in data['services'] if s['id'] != service.id]
log.info("Removing service %s (%s) from environment %s (%s)" % (service.name, service.id, self.name, self.id))
resp = self._put_environment(data=json.dumps(data))
return resp.json()
def add_marker(self, marker):
time.sleep(0.5) # TODO: Need to wait until strategy comes up
data = self.json()
data['markers'].append({'name': marker})
log.info("Adding marker %s to environment %s (%s)" % (marker, self.name, self.id))
resp = self._put_environment(data=json.dumps(data))
self.markers.append(marker)
return resp.json()
def remove_marker(self, marker):
data = self.json()
data['markers'].remove({'name': marker})
log.info("Removing marker %s from environment %s (%s)" % (marker, self.name, self.id))
resp = self._put_environment(data=json.dumps(data))
self.markers.remove(marker)
return resp.json()
def add_property(self, name, type, value):
time.sleep(0.5) # TODO: Need to wait until strategy comes up
data = self.json()
data['properties'].append({'name': name, 'type': type, 'value': value})
log.info("Adding property %s to environment %s (%s)" % (name, self.name, self.id))
resp = self._put_environment(data=json.dumps(data))
self.properties.append({'name': name, 'type': type, 'value': value})
return resp.json()
set_property = add_property
def remove_property(self, name):
data = self.json()
property = [p for p in data['properties'] if p['name'] == name]
if len(property) < 1:
log.error('Unable to remove property %s. Not found.' % name)
data['properties'].remove(property[0])
log.info("Removing property %s from environment %s (%s)" % (name, self.name, self.id))
return self._put_environment(data=json.dumps(data)).json()
def clean(self):
data = self.json()
data['serviceIds'] = []
data['services'] = []
log.info("Cleaning environment %s (%s)" % (self.name, self.id))
return self._put_environment(data=json.dumps(data)).json()
def add_policy(self, new):
time.sleep(0.5) # TODO: Need to wait until strategy comes up
data = self.json()
data['policies'].append(new)
log.info("Adding policy %s.%s to environment %s (%s)" % (new.get('action'), new.get('parameter'), self.name, self.id))
resp = self._put_environment(data=json.dumps(data))
self.policies.append(new)
return resp.json()
def remove_policy(self):
raise NotImplementedError
def set_backend(self, zone):
raise exceptions.ApiError("Change environment backend is not supported, since 24.x")
class EnvironmentList(QubellEntityList):
base_clz = Environment
@property
def default(self):
"""
Returns environment marked as default.
When Zone is set marked default makes no sense, special env with proper Zone is returned.
"""
if ZONE_NAME:
log.info("Getting or creating default environment for zone with name '{0}'".format(DEFAULT_ENV_NAME()))
zone_id = self.organization.zones[ZONE_NAME].id
return self.organization.get_or_create_environment(name=DEFAULT_ENV_NAME(), zone=zone_id)
def_envs = [env_j["id"] for env_j in self.json() if env_j["isDefault"] == True]
if len(def_envs)>1:
log.warning('Found more than one default environment. Picking last.')
return self[def_envs[-1]]
elif len(def_envs) == 1:
return self[def_envs[0]]
raise exceptions.NotFoundError('Unable to get default environment')
|
apache-2.0
| 8,695,219,347,984,874,000 | 37.538136 | 130 | 0.624739 | false |
tsheets/api_python
|
tsheets/model.py
|
1
|
5132
|
import pytz
from . import helpers
import dateutil.parser
from datetime import datetime, date
class Model(object):
_accessors = {}
_default_type = "anything"
def __init__(self, **kwargs):
self._dynamic_accessors = []
if kwargs:
self.__class__.mass_assign(self, kwargs)
@classmethod
def add_field(cls, fname, type_f, options={}):
setattr(cls, fname, None)
if cls not in Model._accessors:
Model._accessors[cls] = []
exclude = options.get('exclude', [])
Model._accessors[cls].append({'name': fname, 'type': type_f, 'exclude': exclude})
@classmethod
def add_default_type(cls, data_type):
cls._default_type = data_type
@classmethod
def from_raw(cls, hash):
instance = cls()
return cls.mass_assign(instance, hash)
@classmethod
def mass_assign(cls, instance, hash):
dynamic = instance._dynamic_accessors
for k,v in hash.items():
casted = cls.cast_raw(v, k)
if hasattr(instance, k):
setattr(instance, k, casted)
else:
setattr(instance, k, casted)
dynamic.append({'name': k})
instance._dynamic_accessors = dynamic
return instance
@classmethod
def type_for(cls, field_name):
accessor = Model._accessors.get(cls, [])
for i in accessor:
if i["name"] == field_name:
return i["type"]
return cls._default_type
@classmethod
def type_for_key(cls, key):
return cls.type_for(key)
@classmethod
def cast_raw(cls, value, key, type=None):
if value is None:
return None
if type:
type_symbol = type
else:
type_symbol = cls.type_for_key(key)
if isinstance(type_symbol, list):
value = [cls.cast_raw(i, key, type_symbol[0]) for i in value]
return value
elif type_symbol == str:
return value
elif type_symbol == int:
return int(value)
elif type_symbol == datetime:
try:
return dateutil.parser.parse(value)
except:
return None
elif type_symbol == date:
try:
return datetime.strptime(value, "%Y-%m-%d").date()
except:
return None
elif type_symbol == bool:
return value == True
elif type_symbol == dict:
return value
elif type_symbol == float:
return float(value)
elif type_symbol == object:
if not value:
return {}
return value
elif type_symbol == "anything":
return value
else:
return helpers.to_class(type_symbol)().from_raw(value)
def cast_to_raw(self, value, key, type = None):
type_symbol = type or self.__class__.type_for_key(key)
if isinstance(type_symbol, list):
value = [self.cast_to_raw(i, key, type_symbol[0]) for i in value]
return value
elif type_symbol == str:
return value
elif type_symbol == int:
return value
elif type_symbol == datetime:
if not value:
return ""
try:
if not value.tzinfo:
return value.replace(tzinfo=pytz.UTC).replace(microsecond=0).isoformat()
return value.isoformat()
except:
return None
elif type_symbol == date:
if not value:
return ""
try:
return value.strftime("%Y-%m-%d")
except:
return None
elif type_symbol == bool:
return value
elif type_symbol == dict:
return value
elif type_symbol == float:
return value
elif type_symbol == object:
if not value:
return ""
return value
elif type_symbol == "anything":
return value
else:
if not value:
return None
return value.to_raw()
def to_raw(self, mode=None):
attributes = self.get_attributes(mode)
obj = {}
for k, v in attributes.items():
obj[k] = self.cast_to_raw(v, k)
return obj
def allowed_for_mode(self, mode, acc):
return (mode is None) or (not bool(acc['exclude'])) or not (mode in acc['exclude'])
def attribute_for_accessors(self, accessor):
sum = {}
for acc in accessor:
sum[acc['name']] = self.__getattribute__(acc['name'])
return sum
def get_attributes(self, mode=None):
_accessors = Model._accessors[self.__class__] if self.__class__ in Model._accessors else []
acc = [a for a in _accessors if self.allowed_for_mode(mode, a)]
acc = []
for a in _accessors:
if self.allowed_for_mode(mode, a):
acc.append(a)
return self.attribute_for_accessors(acc)
|
mit
| 2,925,916,794,490,887,000 | 29.366864 | 99 | 0.521239 | false |
rupakc/Kaggle-Compendium
|
Santas Stolen Sleigh/SantaUtil.py
|
1
|
6924
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 13 23:21:29 2016
Defines a set of utility functions to be used for prediction
@author: Rupak Chakraborty
"""
import math
from trip import Trip
from gift import Gift
import random
import time
import pandas as pd
import operator
RADIUS_EARTH = 6773
NORTH_POLE_LAT = 90
NORTH_POLE_LONG = 0
EMPTY_SLEIGH_WEIGHT = 10
SLEIGH_CAPACITY = 1000
random.seed(time.time())
gift_filename = "Santa's Stolen Sleigh/gifts.csv"
"""
Calculates the haversine distance between two given points
The two points are the values of the latitude and longitude
in degrees
Params:
--------
lat_first - Latitude of the first point
long_first - Longitude of the first point
lat_second - Latitude of second point
long_second - Longitude of the second point
Returns:
---------
The haversine distance between the two given points i.e. a float
"""
def haversineDistance(lat_first,long_first,lat_second,long_second):
lat_first = math.radians(lat_first)
long_first = math.radians(long_first)
lat_second = math.radians(lat_second)
long_second = math.radians(long_second)
sine_squared_lat = math.pow(math.sin((lat_first-lat_second)/2.0),2.0)
sine_squared_long = math.pow(math.sin((long_first-long_second)/2.0),2.0)
cos_lat_term = math.cos(lat_first)*math.cos(lat_second)*sine_squared_long
total_term = cos_lat_term + sine_squared_lat
distance = 2*RADIUS_EARTH*math.asin(math.sqrt(total_term))
return distance
"""
Defines the fitness function for the trip list i.e. all deliveries
The total fitness is defined as the weighted sum of distances
Params:
--------
trip_list: A List of trips which Santa needs to take (A list containing the trip object)
Returns:
---------
Total Cost of the given trip list (i.e. Fitness)
"""
def tripFitness(trip_list):
total_cost = 0
for trip in trip_list:
total_cost = total_cost + trip.trip_cost
return total_cost
"""
Given a list of gifts calculates the cost of the trip (i.e. Weighted Distance)
Params:
--------
gift_list: A list of gifts in the order in which they have to be delivered
Returns:
---------
Cost of the trip with the given order of gifts (i.e. A Floating point number)
"""
def tripCost(gift_list):
gift_size = len(gift_list)
initial_gift_weight = tripWeightUtil(gift_list,0,gift_size-1)
weighted_distance = initial_gift_weight*haversineDistance(NORTH_POLE_LAT,NORTH_POLE_LONG,gift_list[0].latitude,gift_list[0].longitude)
for i in range(gift_size-1):
remaining_weight = tripWeightUtil(gift_list,i+1,gift_size-1)
distance = haversineDistance(gift_list[i].latitude,gift_list[i].longitude,gift_list[i+1].latitude,gift_list[i+1].longitude)
weighted_distance = weighted_distance + remaining_weight*distance
returning_distance = haversineDistance(gift_list[gift_size-1].latitude,gift_list[gift_size-1].longitude,NORTH_POLE_LAT,NORTH_POLE_LONG)
weighted_distance = weighted_distance + EMPTY_SLEIGH_WEIGHT*returning_distance
return weighted_distance
"""
Utility function to calculate the cumulative weight of gifts in a given range
Both ends of the range are included
Params:
--------
gift_list : List of gift objects
start_index : Starting index for gift list
end_index : Ending index of the gift list
Returns:
---------
Returns the sum of weights in a given range
"""
def tripWeightUtil(gift_list,start_index,end_index):
total_weight = 0
while start_index <= end_index:
total_weight = total_weight + gift_list[start_index].weight
start_index = start_index + 1
return total_weight
"""
Applies the mutation operator on trip list i.e. swaps two trips
Params:
-------
trip_list: List containing the trips taken by Santa
Returns:
--------
A new list containing the trip list with values swapped
"""
def mutateTripList(trip_list):
i,j = generateSwapIndices(len(trip_list))
temp = trip_list[i]
trip_list[i] = trip_list[j]
trip_list[j] = temp
return trip_list
"""
Applies the mutation operator on the gift list i.e. swaps two gifts in a list
Params:
-------
gift_list: List containing the gifts taken by Santa
Returns:
--------
A new list containing the gift list with values swapped
"""
def mutateGiftList(gift_list):
i,j = generateSwapIndices(len(gift_list))
temp = gift_list[i]
gift_list[i] = gift_list[j]
gift_list[j] = temp
return gift_list
"""
Utility function to generate two distinct random integers from zero to a given range
Params:
--------
max_size: Integer containing the maximum limit for generation of the random integers
Returns:
--------
Two distinct random integers between 0 and a given max_size
"""
def generateSwapIndices(max_size):
a = random.randint(0,max_size-1)
b = random.randint(0,max_size-1)
while b != a:
b = random.randint(0,max_size)
return a,b
"""
Returns the dataFrame containing the gift information
Params:
-------
String containing the filename from which the information is to be extracted
Returns:
--------
Pandas Dataframe object containing the gift information
"""
def getGiftList(filename):
giftFrame = pd.read_csv(filename)
gift_list = list([])
for i in range(len(giftFrame)):
gift_series = giftFrame.iloc[i]
gift = Gift(gift_series.GiftId,gift_series.Latitude,gift_series.Longitude,gift_series.Weight)
gift_list.append(gift)
return gift_list;
"""
Sorts a given map by the values and returns a list containing th sorted tuples
"""
def sortMapByValues(map_to_sort):
sorted_map = sorted(map_to_sort.items(), key=operator.itemgetter(1),reverse=False)
return sorted_map
"""
Sorts the given population by its fitness value
Params:
-------
initial_population: List containing the initial population
Returns:
--------
List of tuples containing the indices of the initial population and its fitness
"""
def sortPopulationByFitness(initial_population):
i = 0;
fitness_population_map = {}
for trip_gene in initial_population:
fitness_population_map[i] = tripFitness(trip_gene)
i = i + 1
ordered_fitness_list = sortMapByValues(fitness_population_map)
return ordered_fitness_list
"""
Given all the trips in a list returns the one with the maximum cost and its index
Params:
---------
trip_list: List of trips to be taken for delivery
Returns:
--------
The trip with the maximum cost and its corresponding index
"""
def maximumTripCost(trip_list):
index = 0
max_trip = trip_list[0]
for i,trip in enumerate(trip_list):
if trip.trip_cost > max_trip:
max_trip = trip.trip_cost
index = i
return index,trip
|
mit
| 4,864,461,835,213,508,000 | 24.09058 | 139 | 0.677643 | false |
michhar/ms-pythonbot
|
msbot/__init__.py
|
1
|
1583
|
"""
The flask application package.
"""
#####################################################################
# Create the Flask app
#####################################################################
from flask import Flask
from .callback_utils import Callbacks
import os
from flask_pyoidc.flask_pyoidc import OIDCAuthentication
app = Flask(__name__)
### Flask-pyoidc ###
PORT = os.getenv('SERVER_PORT', '3978')
config = {
'SERVER_NAME': os.getenv('SERVER_NAME', 'localhost'), # + ':' + PORT,
'SECRET_KEY': 'dev',
'PREFERRED_URL_SCHEME': 'https',
'DEBUG': True
}
app.config.update(config)
client_info = {
'client_id': os.getenv('MICROSOFT_CLIENT_ID', 'foo'),
'client_secret': os.getenv('MICROSOFT_CLIENT_SECRET', 'bar'),
'scope': 'https://api.botframework.com/.default'
}
provider_config = {
'issuer': 'https://api.botframework.com',
'authorization_endpoint': 'https://login.microsoftonline.com/botframework.com/oauth2/v2.0/token',
'token_endpoint': 'https://login.microsoftonline.com/botframework.com/oauth2/v2.0/token',
# 'userinfo_endpoint': 'https://login.microsoftonline.com/common/oauth2/v2.0/userinfo',
# 'grant_type': 'client_credentials',
'scope': 'https://api.botframework.com/.default'
}
auth = OIDCAuthentication(app,
provider_configuration_info=provider_config,
client_registration_info=client_info)
app_backend = Callbacks()
import msbot.views
|
mit
| -4,044,530,430,755,034,000 | 29.442308 | 109 | 0.561592 | false |
DataONEorg/d1_python
|
lib_common/src/d1_common/system_metadata.py
|
1
|
14840
|
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for handling the DataONE SystemMetadata type.
DataONE API methods such as `MNStorage.create()` require a Science Object and System
Metadata pair.
Examples:
Example v2 SystemMetadata XML document with all optional values included:
::
<v2:systemMetadata xmlns:v2="http://ns.dataone.org/service/types/v2.0">
<!--Optional:-->
<serialVersion>11</serialVersion>
<identifier>string</identifier>
<formatId>string</formatId>
<size>11</size>
<checksum algorithm="string">string</checksum>
<!--Optional:-->
<submitter>string</submitter>
<rightsHolder>string</rightsHolder>
<!--Optional:-->
<accessPolicy>
<!--1 or more repetitions:-->
<allow>
<!--1 or more repetitions:-->
<subject>string</subject>
<!--1 or more repetitions:-->
<permission>read</permission>
</allow>
</accessPolicy>
<!--Optional:-->
<replicationPolicy replicationAllowed="true" numberReplicas="3">
<!--Zero or more repetitions:-->
<preferredMemberNode>string</preferredMemberNode>
<!--Zero or more repetitions:-->
<blockedMemberNode>string</blockedMemberNode>
</replicationPolicy>
<!--Optional:-->
<obsoletes>string</obsoletes>
<obsoletedBy>string</obsoletedBy>
<archived>true</archived>
<dateUploaded>2014-09-18T17:18:33</dateUploaded>
<dateSysMetadataModified>2006-08-19T11:27:14-06:00</dateSysMetadataModified>
<originMemberNode>string</originMemberNode>
<authoritativeMemberNode>string</authoritativeMemberNode>
<!--Zero or more repetitions:-->
<replica>
<replicaMemberNode>string</replicaMemberNode>
<replicationStatus>failed</replicationStatus>
<replicaVerified>2013-05-21T19:02:49-06:00</replicaVerified>
</replica>
<!--Optional:-->
<seriesId>string</seriesId>
<!--Optional:-->
<mediaType name="string">
<!--Zero or more repetitions:-->
<property name="string">string</property>
</mediaType>
<!--Optional:-->
<fileName>string</fileName>
</v2:systemMetadata>
"""
import datetime
import logging
import os
import d1_common.checksum
import d1_common.date_time
import d1_common.type_conversions
import d1_common.types.dataoneTypes
import d1_common.wrap.access_policy
import d1_common.xml
logger = logging.getLogger(__name__)
SYSMETA_ROOT_CHILD_LIST = [
"serialVersion",
"identifier",
"formatId",
"size",
"checksum",
"submitter",
"rightsHolder",
"accessPolicy",
"replicationPolicy",
"obsoletes",
"obsoletedBy",
"archived",
"dateUploaded",
"dateSysMetadataModified",
"originMemberNode",
"authoritativeMemberNode",
"replica",
"seriesId",
"mediaType",
"fileName",
]
def is_sysmeta_pyxb(sysmeta_pyxb):
"""Args: sysmeta_pyxb: Object that may or may not be a SystemMetadata PyXB object.
Returns:
bool:
- ``True`` if ``sysmeta_pyxb`` is a SystemMetadata PyXB object.
- ``False`` if ``sysmeta_pyxb`` is not a PyXB object or is a PyXB object of a
type other than SystemMetadata.
"""
return (
d1_common.type_conversions.is_pyxb_d1_type(sysmeta_pyxb)
and d1_common.type_conversions.pyxb_get_type_name(sysmeta_pyxb)
== "SystemMetadata"
)
def normalize_in_place(sysmeta_pyxb, reset_timestamps=False, reset_filename=False):
"""Normalize SystemMetadata PyXB object in-place.
Args:
sysmeta_pyxb:
SystemMetadata PyXB object to normalize.
reset_timestamps: bool
``True``: Timestamps in the SystemMetadata are set to a standard value so that
objects that are compared after normalization register as equivalent if only
their timestamps differ.
Notes:
The SystemMetadata is normalized by removing any redundant information and
ordering all sections where there are no semantics associated with the order. The
normalized SystemMetadata is intended to be semantically equivalent to the
un-normalized one.
"""
if sysmeta_pyxb.accessPolicy is not None:
sysmeta_pyxb.accessPolicy = d1_common.wrap.access_policy.get_normalized_pyxb(
sysmeta_pyxb.accessPolicy
)
if getattr(sysmeta_pyxb, "mediaType", False):
d1_common.xml.sort_value_list_pyxb(sysmeta_pyxb.mediaType.property_)
if getattr(sysmeta_pyxb, "replicationPolicy", False):
d1_common.xml.sort_value_list_pyxb(
sysmeta_pyxb.replicationPolicy.preferredMemberNode
)
d1_common.xml.sort_value_list_pyxb(
sysmeta_pyxb.replicationPolicy.blockedMemberNode
)
d1_common.xml.sort_elements_by_child_values(
sysmeta_pyxb.replica,
["replicaVerified", "replicaMemberNode", "replicationStatus"],
)
sysmeta_pyxb.archived = bool(sysmeta_pyxb.archived)
if reset_timestamps:
epoch_dt = datetime.datetime(1970, 1, 1, tzinfo=d1_common.date_time.UTC())
sysmeta_pyxb.dateUploaded = epoch_dt
sysmeta_pyxb.dateSysMetadataModified = epoch_dt
for replica_pyxb in getattr(sysmeta_pyxb, "replica", []):
replica_pyxb.replicaVerified = epoch_dt
else:
sysmeta_pyxb.dateUploaded = d1_common.date_time.round_to_nearest(
sysmeta_pyxb.dateUploaded
)
sysmeta_pyxb.dateSysMetadataModified = d1_common.date_time.round_to_nearest(
sysmeta_pyxb.dateSysMetadataModified
)
for replica_pyxb in getattr(sysmeta_pyxb, "replica", []):
replica_pyxb.replicaVerified = d1_common.date_time.round_to_nearest(
replica_pyxb.replicaVerified
)
if reset_filename:
sysmeta_pyxb.fileName = None
def are_equivalent_pyxb(a_pyxb, b_pyxb, ignore_timestamps=False, ignore_filename=False):
"""Determine if SystemMetadata PyXB objects are semantically equivalent.
Normalize then compare SystemMetadata PyXB objects for equivalency.
Args:
a_pyxb, b_pyxb : SystemMetadata PyXB objects to compare
ignore_timestamps: bool
``True``: Timestamps are ignored during the comparison.
ignore_filename: bool
``True``: FileName elements are ignored during the comparison.
This is necessary in cases where GMN returns a generated filename because one
was not provided in the SysMeta.
Returns: bool:
``True`` if SystemMetadata PyXB objects are semantically equivalent.
Notes:
The SystemMetadata is normalized by removing any redundant information and
ordering all sections where there are no semantics associated with the order. The
normalized SystemMetadata is intended to be semantically equivalent to the
un-normalized one.
"""
normalize_in_place(a_pyxb, ignore_timestamps, ignore_filename)
normalize_in_place(b_pyxb, ignore_timestamps, ignore_filename)
a_xml = d1_common.xml.serialize_to_xml_str(a_pyxb)
b_xml = d1_common.xml.serialize_to_xml_str(b_pyxb)
are_equivalent = d1_common.xml.are_equivalent(a_xml, b_xml)
if not are_equivalent:
logger.debug("XML documents not equivalent:")
logger.debug(d1_common.xml.format_diff_xml(a_xml, b_xml))
return are_equivalent
def are_equivalent_xml(a_xml, b_xml, ignore_timestamps=False):
"""Determine if two SystemMetadata XML docs are semantically equivalent.
Normalize then compare SystemMetadata XML docs for equivalency.
Args:
a_xml, b_xml: bytes
UTF-8 encoded SystemMetadata XML docs to compare
ignore_timestamps: bool
``True``: Timestamps in the SystemMetadata are ignored so that objects that are
compared register as equivalent if only their timestamps differ.
Returns: bool:
``True`` if SystemMetadata XML docs are semantically equivalent.
Notes:
The SystemMetadata is normalized by removing any redundant information and
ordering all sections where there are no semantics associated with the order. The
normalized SystemMetadata is intended to be semantically equivalent to the
un-normalized one.
"""
"""Normalizes then compares SystemMetadata XML docs for equivalency.
``a_xml`` and ``b_xml`` should be utf-8 encoded DataONE System Metadata XML
documents.
"""
return are_equivalent_pyxb(
d1_common.xml.deserialize(a_xml),
d1_common.xml.deserialize(b_xml),
ignore_timestamps,
)
def clear_elements(sysmeta_pyxb, clear_replica=True, clear_serial_version=True):
"""{clear_replica} causes any replica information to be removed from the object.
{clear_replica} ignores any differences in replica information, as this information
is often different between MN and CN.
"""
if clear_replica:
sysmeta_pyxb.replica = None
if clear_serial_version:
sysmeta_pyxb.serialVersion = None
sysmeta_pyxb.replicationPolicy = None
def update_elements(dst_pyxb, src_pyxb, el_list):
"""Copy elements specified in ``el_list`` from ``src_pyxb`` to ``dst_pyxb``
Only elements that are children of root are supported. See
SYSMETA_ROOT_CHILD_LIST.
If an element in ``el_list`` does not exist in ``src_pyxb``, it is removed from
``dst_pyxb``.
"""
invalid_element_set = set(el_list) - set(SYSMETA_ROOT_CHILD_LIST)
if invalid_element_set:
raise ValueError(
'Passed one or more invalid elements. invalid="{}"'.format(
", ".join(sorted(list(invalid_element_set)))
)
)
for el_str in el_list:
setattr(dst_pyxb, el_str, getattr(src_pyxb, el_str, None))
def generate_system_metadata_pyxb(
pid,
format_id,
sciobj_stream,
submitter_str,
rights_holder_str,
authoritative_mn_urn,
# SeriesID and obsolescence
sid=None,
obsoletes_pid=None,
obsoleted_by_pid=None,
is_archived=False,
#
serial_version=1,
uploaded_datetime=None,
modified_datetime=None,
file_name=None,
origin_mn_urn=None,
# Access Policy
is_private=False,
access_list=None,
# Media Type
media_name=None,
media_property_list=None,
# Replication Policy
is_replication_allowed=False,
preferred_mn_list=None,
blocked_mn_list=None,
#
pyxb_binding=None,
):
"""Generate a System Metadata PyXB object
Args:
pid:
format_id:
sciobj_stream:
submitter_str:
rights_holder_str:
authoritative_mn_urn:
pyxb_binding:
sid:
obsoletes_pid:
obsoleted_by_pid:
is_archived:
serial_version:
uploaded_datetime:
modified_datetime:
file_name:
origin_mn_urn:
access_list:
is_private:
media_name:
media_property_list:
is_replication_allowed:
preferred_mn_list:
blocked_mn_list:
Returns:
systemMetadata PyXB object
"""
pyxb_binding = pyxb_binding or d1_common.types.dataoneTypes
sysmeta_pyxb = pyxb_binding.systemMetadata()
sysmeta_pyxb.identifier = pid
sysmeta_pyxb.seriesId = sid
sysmeta_pyxb.formatId = format_id
sysmeta_pyxb.checksum, sysmeta_pyxb.size = gen_checksum_and_size(sciobj_stream)
sysmeta_pyxb.submitter = submitter_str
sysmeta_pyxb.rightsHolder = rights_holder_str
sysmeta_pyxb.authoritativeMemberNode = authoritative_mn_urn
sysmeta_pyxb.originMemberNode = origin_mn_urn or authoritative_mn_urn
sysmeta_pyxb.obsoletes = obsoletes_pid
sysmeta_pyxb.obsoletedBy = obsoleted_by_pid
sysmeta_pyxb.archived = is_archived
sysmeta_pyxb.serialVersion = serial_version
sysmeta_pyxb.dateUploaded = uploaded_datetime or d1_common.date_time.utc_now()
sysmeta_pyxb.dateSysMetadataModified = (
modified_datetime or sysmeta_pyxb.dateUploaded
)
sysmeta_pyxb.fileName = file_name
sysmeta_pyxb.replica = None
gen_access_policy(pyxb_binding, sysmeta_pyxb, is_private, access_list)
sysmeta_pyxb.replicationPolicy = gen_replication_policy(
pyxb_binding, preferred_mn_list, blocked_mn_list, is_replication_allowed
)
if media_name or media_property_list:
sysmeta_pyxb.mediaType = gen_media_type(
pyxb_binding, media_name, media_property_list
)
return sysmeta_pyxb
def gen_checksum_and_size(sciobj_stream):
sciobj_stream.seek(0)
checksum_pyxb = d1_common.checksum.create_checksum_object_from_stream(sciobj_stream)
sciobj_stream.seek(0, os.SEEK_END)
sciobj_size = sciobj_stream.tell()
sciobj_stream.seek(0)
return checksum_pyxb, sciobj_size
def gen_access_policy(pyxb_binding, sysmeta_pyxb, is_private, access_list):
with d1_common.wrap.access_policy.wrap_sysmeta_pyxb(
sysmeta_pyxb, pyxb_binding
) as ap:
if not is_private:
ap.add_public_read()
if access_list is not None:
for subj_str, perm_str in access_list:
ap.add_perm(subj_str, perm_str)
ap.update()
def gen_replication_policy(
pyxb_binding,
preferred_mn_list=None,
blocked_mn_list=None,
is_replication_allowed=False,
):
rp_pyxb = pyxb_binding.replicationPolicy()
rp_pyxb.preferredMemberNode = preferred_mn_list
rp_pyxb.blockedMemberNode = blocked_mn_list
rp_pyxb.replicationAllowed = is_replication_allowed
rp_pyxb.numberReplicas = 3 if is_replication_allowed else 0
return rp_pyxb
def gen_media_type(pyxb_binding, media_name, media_property_list=None):
assert (
media_name is not None
), "When a media_property_list is set, the media_name must also be set"
media_type_pyxb = pyxb_binding.MediaType(name=media_name)
for name_str, value_str in media_property_list or []:
media_type_pyxb.property_.append(
pyxb_binding.MediaTypeProperty(value_str, name=name_str)
)
return media_type_pyxb
|
apache-2.0
| 5,772,235,243,419,589,000 | 31.121212 | 88 | 0.674124 | false |
rhiever/MarkovNetwork
|
MarkovNetwork/MarkovNetwork.py
|
1
|
10471
|
# -*- coding: utf-8 -*-
"""
Copyright 2016 Randal S. Olson
Permission is hereby granted, free of charge, to any person obtaining a copy of this software
and associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import print_function
import numpy as np
class MarkovNetwork(object):
"""A Markov Network for neural computing."""
max_markov_gate_inputs = 4
max_markov_gate_outputs = 4
def __init__(self, num_input_states, num_memory_states, num_output_states,
random_genome_length=10000, seed_num_markov_gates=4,
probabilistic=True, genome=None):
"""Sets up a Markov Network
Parameters
----------
num_input_states: int
The number of input states in the Markov Network
num_memory_states: int
The number of internal memory states in the Markov Network
num_output_states: int
The number of output states in the Markov Network
random_genome_length: int (default: 10000)
Length of the genome if it is being randomly generated
This parameter is ignored if "genome" is not None
seed_num_markov_gates: int (default: 4)
The number of Markov Gates with which to seed the Markov Network
It is important to ensure that randomly-generated Markov Networks have at least a few Markov Gates to begin with
May sometimes result in fewer Markov Gates if the Markov Gates are randomly seeded in the same location
This parameter is ignored if "genome" is not None
probabilistic: bool (default: True)
Flag indicating whether the Markov Gates are probabilistic or deterministic
genome: array-like (default: None)
An array representation of the Markov Network to construct
All values in the array must be integers in the range [0, 255]
If None, then a random Markov Network will be generated
Returns
-------
None
"""
self.num_input_states = num_input_states
self.num_memory_states = num_memory_states
self.num_output_states = num_output_states
self.states = np.zeros(num_input_states + num_memory_states + num_output_states, dtype=np.bool)
self.markov_gates = []
self.markov_gate_input_ids = []
self.markov_gate_output_ids = []
if genome is None:
self.genome = np.random.randint(0, 256, random_genome_length).astype(np.uint8)
# Seed the random genome with seed_num_markov_gates Markov Gates
for _ in range(seed_num_markov_gates):
start_index = np.random.randint(0, int(len(self.genome) * 0.8))
self.genome[start_index] = 42
self.genome[start_index + 1] = 213
else:
self.genome = np.array(genome, dtype=np.uint8)
self._setup_markov_network(probabilistic)
def _setup_markov_network(self, probabilistic):
"""Interprets the internal genome into the corresponding Markov Gates
Parameters
----------
probabilistic: bool
Flag indicating whether the Markov Gates are probabilistic or deterministic
Returns
-------
None
"""
for index_counter in range(self.genome.shape[0] - 1):
# Sequence of 42 then 213 indicates a new Markov Gate
if self.genome[index_counter] == 42 and self.genome[index_counter + 1] == 213:
internal_index_counter = index_counter + 2
# Determine the number of inputs and outputs for the Markov Gate
num_inputs = (self.genome[internal_index_counter] % MarkovNetwork.max_markov_gate_inputs) + 1
internal_index_counter += 1
num_outputs = (self.genome[internal_index_counter] % MarkovNetwork.max_markov_gate_outputs) + 1
internal_index_counter += 1
# Make sure that the genome is long enough to encode this Markov Gate
if (internal_index_counter +
(MarkovNetwork.max_markov_gate_inputs + MarkovNetwork.max_markov_gate_outputs) +
(2 ** num_inputs) * (2 ** num_outputs)) > self.genome.shape[0]:
continue
# Determine the states that the Markov Gate will connect its inputs and outputs to
input_state_ids = self.genome[internal_index_counter:internal_index_counter + MarkovNetwork.max_markov_gate_inputs][:num_inputs]
input_state_ids = np.mod(input_state_ids, self.states.shape[0])
internal_index_counter += MarkovNetwork.max_markov_gate_inputs
output_state_ids = self.genome[internal_index_counter:internal_index_counter + MarkovNetwork.max_markov_gate_outputs][:num_outputs]
output_state_ids = np.mod(output_state_ids, self.states.shape[0])
internal_index_counter += MarkovNetwork.max_markov_gate_outputs
self.markov_gate_input_ids.append(input_state_ids)
self.markov_gate_output_ids.append(output_state_ids)
# Interpret the probability table for the Markov Gate
markov_gate = np.copy(self.genome[internal_index_counter:internal_index_counter + (2 ** num_inputs) * (2 ** num_outputs)])
markov_gate = markov_gate.reshape((2 ** num_inputs, 2 ** num_outputs))
if probabilistic: # Probabilistic Markov Gates
markov_gate = markov_gate.astype(np.float64) / np.sum(markov_gate, axis=1, dtype=np.float64)[:, None]
# Precompute the cumulative sums for the activation function
markov_gate = np.cumsum(markov_gate, axis=1, dtype=np.float64)
else: # Deterministic Markov Gates
row_max_indices = np.argmax(markov_gate, axis=1)
markov_gate[:, :] = 0
markov_gate[np.arange(len(row_max_indices)), row_max_indices] = 1
self.markov_gates.append(markov_gate)
def activate_network(self, num_activations=1):
"""Activates the Markov Network
Parameters
----------
num_activations: int (default: 1)
The number of times the Markov Network should be activated
Returns
-------
None
"""
# Save original input values
original_input_values = np.copy(self.states[:self.num_input_states])
for _ in range(num_activations):
# NOTE: This routine can be refactored to use NumPy if larger MNs are being used
# See implementation at https://github.com/rhiever/MarkovNetwork/blob/a381aa9919bb6898b56f678e08127ba6e0eef98f/MarkovNetwork/MarkovNetwork.py#L162:L169
for markov_gate, mg_input_ids, mg_output_ids in zip(self.markov_gates, self.markov_gate_input_ids,
self.markov_gate_output_ids):
mg_input_index, marker = 0, 1
# Create an integer from bytes representation (loop is faster than previous implementation)
for mg_input_id in reversed(mg_input_ids):
if self.states[mg_input_id]:
mg_input_index += marker
marker *= 2
# Determine the corresponding output values for this Markov Gate
roll = np.random.uniform() # sets a roll value
markov_gate_subarray = markov_gate[mg_input_index] # selects a Markov Gate subarray
# Searches for the first value where markov_gate > roll
for i, markov_gate_element in enumerate(markov_gate_subarray):
if markov_gate_element >= roll:
mg_output_index = i
break
# Converts the index into a string of '1's and '0's (binary representation)
mg_output_values = bin(mg_output_index) # bin() is much faster than np.binaryrepr()
# diff_len deals with the lack of the width argument there was on np.binaryrepr()
diff_len = mg_output_ids.shape[0] - (len(mg_output_values) - 2)
# Loops through 'mg_output_values' and alter 'self.states'
for i, mg_output_value in enumerate(mg_output_values[2:]):
if mg_output_value == '1':
self.states[mg_output_ids[i + diff_len]] = True
# Replace original input values
self.states[:self.num_input_states] = original_input_values
def update_input_states(self, input_values):
"""Updates the input states with the provided inputs
Parameters
----------
input_values: array-like
An array of integers containing the inputs for the Markov Network
len(input_values) must be equal to num_input_states
Returns
-------
None
"""
if len(input_values) != self.num_input_states:
raise ValueError('Invalid number of input values provided')
self.states[:self.num_input_states] = input_values
def get_output_states(self):
"""Returns an array of the current output state's values
Parameters
----------
None
Returns
-------
output_states: array-like
An array of the current output state's values
"""
return np.array(self.states[-self.num_output_states:])
|
mit
| 3,013,961,939,012,819,500 | 44.724891 | 163 | 0.615892 | false |
huggingface/transformers
|
src/transformers/models/deberta_v2/modeling_deberta_v2.py
|
1
|
61698
|
# coding=utf-8
# Copyright 2020 Microsoft and the Hugging Face Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch DeBERTa-v2 model. """
import math
from collections.abc import Sequence
import numpy as np
import torch
from torch import _softmax_backward_data, nn
from torch.nn import CrossEntropyLoss, LayerNorm
from ...activations import ACT2FN
from ...file_utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward
from ...modeling_outputs import (
BaseModelOutput,
MaskedLMOutput,
QuestionAnsweringModelOutput,
SequenceClassifierOutput,
TokenClassifierOutput,
)
from ...modeling_utils import PreTrainedModel
from ...utils import logging
from .configuration_deberta_v2 import DebertaV2Config
logger = logging.get_logger(__name__)
_CONFIG_FOR_DOC = "DebertaV2Config"
_TOKENIZER_FOR_DOC = "DebertaV2Tokenizer"
_CHECKPOINT_FOR_DOC = "microsoft/deberta-v2-xlarge"
DEBERTA_V2_PRETRAINED_MODEL_ARCHIVE_LIST = [
"microsoft/deberta-v2-xlarge",
"microsoft/deberta-v2-xxlarge",
"microsoft/deberta-v2-xlarge-mnli",
"microsoft/deberta-v2-xxlarge-mnli",
]
# Copied from transformers.models.deberta.modeling_deberta.ContextPooler
class ContextPooler(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.pooler_hidden_size, config.pooler_hidden_size)
self.dropout = StableDropout(config.pooler_dropout)
self.config = config
def forward(self, hidden_states):
# We "pool" the model by simply taking the hidden state corresponding
# to the first token.
context_token = hidden_states[:, 0]
context_token = self.dropout(context_token)
pooled_output = self.dense(context_token)
pooled_output = ACT2FN[self.config.pooler_hidden_act](pooled_output)
return pooled_output
@property
def output_dim(self):
return self.config.hidden_size
# Copied from transformers.models.deberta.modeling_deberta.XSoftmax with deberta->deberta_v2
class XSoftmax(torch.autograd.Function):
"""
Masked Softmax which is optimized for saving memory
Args:
input (:obj:`torch.tensor`): The input tensor that will apply softmax.
mask (:obj:`torch.IntTensor`): The mask matrix where 0 indicate that element will be ignored in the softmax calculation.
dim (int): The dimension that will apply softmax
Example::
>>> import torch
>>> from transformers.models.deberta_v2.modeling_deberta_v2 import XSoftmax
>>> # Make a tensor
>>> x = torch.randn([4,20,100])
>>> # Create a mask
>>> mask = (x>0).int()
>>> y = XSoftmax.apply(x, mask, dim=-1)
"""
@staticmethod
def forward(self, input, mask, dim):
self.dim = dim
rmask = ~(mask.bool())
output = input.masked_fill(rmask, float("-inf"))
output = torch.softmax(output, self.dim)
output.masked_fill_(rmask, 0)
self.save_for_backward(output)
return output
@staticmethod
def backward(self, grad_output):
(output,) = self.saved_tensors
inputGrad = _softmax_backward_data(grad_output, output, self.dim, output)
return inputGrad, None, None
# Copied from transformers.models.deberta.modeling_deberta.DropoutContext
class DropoutContext(object):
def __init__(self):
self.dropout = 0
self.mask = None
self.scale = 1
self.reuse_mask = True
# Copied from transformers.models.deberta.modeling_deberta.get_mask
def get_mask(input, local_context):
if not isinstance(local_context, DropoutContext):
dropout = local_context
mask = None
else:
dropout = local_context.dropout
dropout *= local_context.scale
mask = local_context.mask if local_context.reuse_mask else None
if dropout > 0 and mask is None:
mask = (1 - torch.empty_like(input).bernoulli_(1 - dropout)).bool()
if isinstance(local_context, DropoutContext):
if local_context.mask is None:
local_context.mask = mask
return mask, dropout
# Copied from transformers.models.deberta.modeling_deberta.XDropout
class XDropout(torch.autograd.Function):
"""Optimized dropout function to save computation and memory by using mask operation instead of multiplication."""
@staticmethod
def forward(ctx, input, local_ctx):
mask, dropout = get_mask(input, local_ctx)
ctx.scale = 1.0 / (1 - dropout)
if dropout > 0:
ctx.save_for_backward(mask)
return input.masked_fill(mask, 0) * ctx.scale
else:
return input
@staticmethod
def backward(ctx, grad_output):
if ctx.scale > 1:
(mask,) = ctx.saved_tensors
return grad_output.masked_fill(mask, 0) * ctx.scale, None
else:
return grad_output, None
# Copied from transformers.models.deberta.modeling_deberta.StableDropout
class StableDropout(nn.Module):
"""
Optimized dropout module for stabilizing the training
Args:
drop_prob (float): the dropout probabilities
"""
def __init__(self, drop_prob):
super().__init__()
self.drop_prob = drop_prob
self.count = 0
self.context_stack = None
def forward(self, x):
"""
Call the module
Args:
x (:obj:`torch.tensor`): The input tensor to apply dropout
"""
if self.training and self.drop_prob > 0:
return XDropout.apply(x, self.get_context())
return x
def clear_context(self):
self.count = 0
self.context_stack = None
def init_context(self, reuse_mask=True, scale=1):
if self.context_stack is None:
self.context_stack = []
self.count = 0
for c in self.context_stack:
c.reuse_mask = reuse_mask
c.scale = scale
def get_context(self):
if self.context_stack is not None:
if self.count >= len(self.context_stack):
self.context_stack.append(DropoutContext())
ctx = self.context_stack[self.count]
ctx.dropout = self.drop_prob
self.count += 1
return ctx
else:
return self.drop_prob
# Copied from transformers.models.deberta.modeling_deberta.DebertaSelfOutput with DebertaLayerNorm->LayerNorm
class DebertaV2SelfOutput(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.LayerNorm = LayerNorm(config.hidden_size, config.layer_norm_eps)
self.dropout = StableDropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
# Copied from transformers.models.deberta.modeling_deberta.DebertaAttention with Deberta->DebertaV2
class DebertaV2Attention(nn.Module):
def __init__(self, config):
super().__init__()
self.self = DisentangledSelfAttention(config)
self.output = DebertaV2SelfOutput(config)
self.config = config
def forward(
self,
hidden_states,
attention_mask,
return_att=False,
query_states=None,
relative_pos=None,
rel_embeddings=None,
):
self_output = self.self(
hidden_states,
attention_mask,
return_att,
query_states=query_states,
relative_pos=relative_pos,
rel_embeddings=rel_embeddings,
)
if return_att:
self_output, att_matrix = self_output
if query_states is None:
query_states = hidden_states
attention_output = self.output(self_output, query_states)
if return_att:
return (attention_output, att_matrix)
else:
return attention_output
# Copied from transformers.models.bert.modeling_bert.BertIntermediate with Bert->DebertaV2
class DebertaV2Intermediate(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.intermediate_size)
if isinstance(config.hidden_act, str):
self.intermediate_act_fn = ACT2FN[config.hidden_act]
else:
self.intermediate_act_fn = config.hidden_act
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.intermediate_act_fn(hidden_states)
return hidden_states
# Copied from transformers.models.deberta.modeling_deberta.DebertaOutput with DebertaLayerNorm->LayerNorm
class DebertaV2Output(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.intermediate_size, config.hidden_size)
self.LayerNorm = LayerNorm(config.hidden_size, config.layer_norm_eps)
self.dropout = StableDropout(config.hidden_dropout_prob)
self.config = config
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
# Copied from transformers.models.deberta.modeling_deberta.DebertaLayer with Deberta->DebertaV2
class DebertaV2Layer(nn.Module):
def __init__(self, config):
super().__init__()
self.attention = DebertaV2Attention(config)
self.intermediate = DebertaV2Intermediate(config)
self.output = DebertaV2Output(config)
def forward(
self,
hidden_states,
attention_mask,
return_att=False,
query_states=None,
relative_pos=None,
rel_embeddings=None,
):
attention_output = self.attention(
hidden_states,
attention_mask,
return_att=return_att,
query_states=query_states,
relative_pos=relative_pos,
rel_embeddings=rel_embeddings,
)
if return_att:
attention_output, att_matrix = attention_output
intermediate_output = self.intermediate(attention_output)
layer_output = self.output(intermediate_output, attention_output)
if return_att:
return (layer_output, att_matrix)
else:
return layer_output
class ConvLayer(nn.Module):
def __init__(self, config):
super().__init__()
kernel_size = getattr(config, "conv_kernel_size", 3)
groups = getattr(config, "conv_groups", 1)
self.conv_act = getattr(config, "conv_act", "tanh")
self.conv = nn.Conv1d(
config.hidden_size, config.hidden_size, kernel_size, padding=(kernel_size - 1) // 2, groups=groups
)
self.LayerNorm = LayerNorm(config.hidden_size, config.layer_norm_eps)
self.dropout = StableDropout(config.hidden_dropout_prob)
self.config = config
def forward(self, hidden_states, residual_states, input_mask):
out = self.conv(hidden_states.permute(0, 2, 1).contiguous()).permute(0, 2, 1).contiguous()
rmask = (1 - input_mask).bool()
out.masked_fill_(rmask.unsqueeze(-1).expand(out.size()), 0)
out = ACT2FN[self.conv_act](self.dropout(out))
layer_norm_input = residual_states + out
output = self.LayerNorm(layer_norm_input).to(layer_norm_input)
if input_mask is None:
output_states = output
else:
if input_mask.dim() != layer_norm_input.dim():
if input_mask.dim() == 4:
input_mask = input_mask.squeeze(1).squeeze(1)
input_mask = input_mask.unsqueeze(2)
input_mask = input_mask.to(output.dtype)
output_states = output * input_mask
return output_states
class DebertaV2Encoder(nn.Module):
"""Modified BertEncoder with relative position bias support"""
def __init__(self, config):
super().__init__()
self.layer = nn.ModuleList([DebertaV2Layer(config) for _ in range(config.num_hidden_layers)])
self.relative_attention = getattr(config, "relative_attention", False)
if self.relative_attention:
self.max_relative_positions = getattr(config, "max_relative_positions", -1)
if self.max_relative_positions < 1:
self.max_relative_positions = config.max_position_embeddings
self.position_buckets = getattr(config, "position_buckets", -1)
pos_ebd_size = self.max_relative_positions * 2
if self.position_buckets > 0:
pos_ebd_size = self.position_buckets * 2
self.rel_embeddings = nn.Embedding(pos_ebd_size, config.hidden_size)
self.norm_rel_ebd = [x.strip() for x in getattr(config, "norm_rel_ebd", "none").lower().split("|")]
if "layer_norm" in self.norm_rel_ebd:
self.LayerNorm = LayerNorm(config.hidden_size, config.layer_norm_eps, elementwise_affine=True)
self.conv = ConvLayer(config) if getattr(config, "conv_kernel_size", 0) > 0 else None
def get_rel_embedding(self):
rel_embeddings = self.rel_embeddings.weight if self.relative_attention else None
if rel_embeddings is not None and ("layer_norm" in self.norm_rel_ebd):
rel_embeddings = self.LayerNorm(rel_embeddings)
return rel_embeddings
def get_attention_mask(self, attention_mask):
if attention_mask.dim() <= 2:
extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2)
attention_mask = extended_attention_mask * extended_attention_mask.squeeze(-2).unsqueeze(-1)
attention_mask = attention_mask.byte()
elif attention_mask.dim() == 3:
attention_mask = attention_mask.unsqueeze(1)
return attention_mask
def get_rel_pos(self, hidden_states, query_states=None, relative_pos=None):
if self.relative_attention and relative_pos is None:
q = query_states.size(-2) if query_states is not None else hidden_states.size(-2)
relative_pos = build_relative_position(
q, hidden_states.size(-2), bucket_size=self.position_buckets, max_position=self.max_relative_positions
)
return relative_pos
def forward(
self,
hidden_states,
attention_mask,
output_hidden_states=True,
output_attentions=False,
query_states=None,
relative_pos=None,
return_dict=True,
):
if attention_mask.dim() <= 2:
input_mask = attention_mask
else:
input_mask = (attention_mask.sum(-2) > 0).byte()
attention_mask = self.get_attention_mask(attention_mask)
relative_pos = self.get_rel_pos(hidden_states, query_states, relative_pos)
all_hidden_states = () if output_hidden_states else None
all_attentions = () if output_attentions else None
if isinstance(hidden_states, Sequence):
next_kv = hidden_states[0]
else:
next_kv = hidden_states
rel_embeddings = self.get_rel_embedding()
output_states = next_kv
for i, layer_module in enumerate(self.layer):
if output_hidden_states:
all_hidden_states = all_hidden_states + (output_states,)
output_states = layer_module(
next_kv,
attention_mask,
output_attentions,
query_states=query_states,
relative_pos=relative_pos,
rel_embeddings=rel_embeddings,
)
if output_attentions:
output_states, att_m = output_states
if i == 0 and self.conv is not None:
output_states = self.conv(hidden_states, output_states, input_mask)
if query_states is not None:
query_states = output_states
if isinstance(hidden_states, Sequence):
next_kv = hidden_states[i + 1] if i + 1 < len(self.layer) else None
else:
next_kv = output_states
if output_attentions:
all_attentions = all_attentions + (att_m,)
if output_hidden_states:
all_hidden_states = all_hidden_states + (output_states,)
if not return_dict:
return tuple(v for v in [output_states, all_hidden_states, all_attentions] if v is not None)
return BaseModelOutput(
last_hidden_state=output_states, hidden_states=all_hidden_states, attentions=all_attentions
)
def make_log_bucket_position(relative_pos, bucket_size, max_position):
sign = np.sign(relative_pos)
mid = bucket_size // 2
abs_pos = np.where((relative_pos < mid) & (relative_pos > -mid), mid - 1, np.abs(relative_pos))
log_pos = np.ceil(np.log(abs_pos / mid) / np.log((max_position - 1) / mid) * (mid - 1)) + mid
bucket_pos = np.where(abs_pos <= mid, relative_pos, log_pos * sign).astype(np.int)
return bucket_pos
def build_relative_position(query_size, key_size, bucket_size=-1, max_position=-1):
"""
Build relative position according to the query and key
We assume the absolute position of query :math:`P_q` is range from (0, query_size) and the absolute position of key
:math:`P_k` is range from (0, key_size), The relative positions from query to key is :math:`R_{q \\rightarrow k} =
P_q - P_k`
Args:
query_size (int): the length of query
key_size (int): the length of key
bucket_size (int): the size of position bucket
max_position (int): the maximum allowed absolute position
Return:
:obj:`torch.LongTensor`: A tensor with shape [1, query_size, key_size]
"""
q_ids = np.arange(0, query_size)
k_ids = np.arange(0, key_size)
rel_pos_ids = q_ids[:, None] - np.tile(k_ids, (q_ids.shape[0], 1))
if bucket_size > 0 and max_position > 0:
rel_pos_ids = make_log_bucket_position(rel_pos_ids, bucket_size, max_position)
rel_pos_ids = torch.tensor(rel_pos_ids, dtype=torch.long)
rel_pos_ids = rel_pos_ids[:query_size, :]
rel_pos_ids = rel_pos_ids.unsqueeze(0)
return rel_pos_ids
@torch.jit.script
# Copied from transformers.models.deberta.modeling_deberta.c2p_dynamic_expand
def c2p_dynamic_expand(c2p_pos, query_layer, relative_pos):
return c2p_pos.expand([query_layer.size(0), query_layer.size(1), query_layer.size(2), relative_pos.size(-1)])
@torch.jit.script
# Copied from transformers.models.deberta.modeling_deberta.p2c_dynamic_expand
def p2c_dynamic_expand(c2p_pos, query_layer, key_layer):
return c2p_pos.expand([query_layer.size(0), query_layer.size(1), key_layer.size(-2), key_layer.size(-2)])
@torch.jit.script
# Copied from transformers.models.deberta.modeling_deberta.pos_dynamic_expand
def pos_dynamic_expand(pos_index, p2c_att, key_layer):
return pos_index.expand(p2c_att.size()[:2] + (pos_index.size(-2), key_layer.size(-2)))
class DisentangledSelfAttention(nn.Module):
"""
Disentangled self-attention module
Parameters:
config (:obj:`DebertaV2Config`):
A model config class instance with the configuration to build a new model. The schema is similar to
`BertConfig`, for more details, please refer :class:`~transformers.DebertaV2Config`
"""
def __init__(self, config):
super().__init__()
if config.hidden_size % config.num_attention_heads != 0:
raise ValueError(
f"The hidden size ({config.hidden_size}) is not a multiple of the number of attention "
f"heads ({config.num_attention_heads})"
)
self.num_attention_heads = config.num_attention_heads
_attention_head_size = config.hidden_size // config.num_attention_heads
self.attention_head_size = getattr(config, "attention_head_size", _attention_head_size)
self.all_head_size = self.num_attention_heads * self.attention_head_size
self.query_proj = nn.Linear(config.hidden_size, self.all_head_size, bias=True)
self.key_proj = nn.Linear(config.hidden_size, self.all_head_size, bias=True)
self.value_proj = nn.Linear(config.hidden_size, self.all_head_size, bias=True)
self.share_att_key = getattr(config, "share_att_key", False)
self.pos_att_type = config.pos_att_type if config.pos_att_type is not None else []
self.relative_attention = getattr(config, "relative_attention", False)
if self.relative_attention:
self.position_buckets = getattr(config, "position_buckets", -1)
self.max_relative_positions = getattr(config, "max_relative_positions", -1)
if self.max_relative_positions < 1:
self.max_relative_positions = config.max_position_embeddings
self.pos_ebd_size = self.max_relative_positions
if self.position_buckets > 0:
self.pos_ebd_size = self.position_buckets
self.pos_dropout = StableDropout(config.hidden_dropout_prob)
if not self.share_att_key:
if "c2p" in self.pos_att_type or "p2p" in self.pos_att_type:
self.pos_key_proj = nn.Linear(config.hidden_size, self.all_head_size, bias=True)
if "p2c" in self.pos_att_type or "p2p" in self.pos_att_type:
self.pos_query_proj = nn.Linear(config.hidden_size, self.all_head_size)
self.dropout = StableDropout(config.attention_probs_dropout_prob)
def transpose_for_scores(self, x, attention_heads):
new_x_shape = x.size()[:-1] + (attention_heads, -1)
x = x.view(*new_x_shape)
return x.permute(0, 2, 1, 3).contiguous().view(-1, x.size(1), x.size(-1))
def forward(
self,
hidden_states,
attention_mask,
return_att=False,
query_states=None,
relative_pos=None,
rel_embeddings=None,
):
"""
Call the module
Args:
hidden_states (:obj:`torch.FloatTensor`):
Input states to the module usually the output from previous layer, it will be the Q,K and V in
`Attention(Q,K,V)`
attention_mask (:obj:`torch.ByteTensor`):
An attention mask matrix of shape [`B`, `N`, `N`] where `B` is the batch size, `N` is the maximum
sequence length in which element [i,j] = `1` means the `i` th token in the input can attend to the `j`
th token.
return_att (:obj:`bool`, optional):
Whether return the attention matrix.
query_states (:obj:`torch.FloatTensor`, optional):
The `Q` state in `Attention(Q,K,V)`.
relative_pos (:obj:`torch.LongTensor`):
The relative position encoding between the tokens in the sequence. It's of shape [`B`, `N`, `N`] with
values ranging in [`-max_relative_positions`, `max_relative_positions`].
rel_embeddings (:obj:`torch.FloatTensor`):
The embedding of relative distances. It's a tensor of shape [:math:`2 \\times
\\text{max_relative_positions}`, `hidden_size`].
"""
if query_states is None:
query_states = hidden_states
query_layer = self.transpose_for_scores(self.query_proj(query_states), self.num_attention_heads)
key_layer = self.transpose_for_scores(self.key_proj(hidden_states), self.num_attention_heads)
value_layer = self.transpose_for_scores(self.value_proj(hidden_states), self.num_attention_heads)
rel_att = None
# Take the dot product between "query" and "key" to get the raw attention scores.
scale_factor = 1
if "c2p" in self.pos_att_type:
scale_factor += 1
if "p2c" in self.pos_att_type:
scale_factor += 1
if "p2p" in self.pos_att_type:
scale_factor += 1
scale = math.sqrt(query_layer.size(-1) * scale_factor)
attention_scores = torch.bmm(query_layer, key_layer.transpose(-1, -2)) / scale
if self.relative_attention:
rel_embeddings = self.pos_dropout(rel_embeddings)
rel_att = self.disentangled_attention_bias(
query_layer, key_layer, relative_pos, rel_embeddings, scale_factor
)
if rel_att is not None:
attention_scores = attention_scores + rel_att
attention_scores = attention_scores
attention_scores = attention_scores.view(
-1, self.num_attention_heads, attention_scores.size(-2), attention_scores.size(-1)
)
# bsz x height x length x dimension
attention_probs = XSoftmax.apply(attention_scores, attention_mask, -1)
attention_probs = self.dropout(attention_probs)
context_layer = torch.bmm(
attention_probs.view(-1, attention_probs.size(-2), attention_probs.size(-1)), value_layer
)
context_layer = (
context_layer.view(-1, self.num_attention_heads, context_layer.size(-2), context_layer.size(-1))
.permute(0, 2, 1, 3)
.contiguous()
)
new_context_layer_shape = context_layer.size()[:-2] + (-1,)
context_layer = context_layer.view(*new_context_layer_shape)
if return_att:
return (context_layer, attention_probs)
else:
return context_layer
def disentangled_attention_bias(self, query_layer, key_layer, relative_pos, rel_embeddings, scale_factor):
if relative_pos is None:
q = query_layer.size(-2)
relative_pos = build_relative_position(
q, key_layer.size(-2), bucket_size=self.position_buckets, max_position=self.max_relative_positions
)
if relative_pos.dim() == 2:
relative_pos = relative_pos.unsqueeze(0).unsqueeze(0)
elif relative_pos.dim() == 3:
relative_pos = relative_pos.unsqueeze(1)
# bsz x height x query x key
elif relative_pos.dim() != 4:
raise ValueError(f"Relative position ids must be of dim 2 or 3 or 4. {relative_pos.dim()}")
att_span = self.pos_ebd_size
relative_pos = relative_pos.long().to(query_layer.device)
rel_embeddings = rel_embeddings[self.pos_ebd_size - att_span : self.pos_ebd_size + att_span, :].unsqueeze(0)
if self.share_att_key:
pos_query_layer = self.transpose_for_scores(
self.query_proj(rel_embeddings), self.num_attention_heads
).repeat(query_layer.size(0) // self.num_attention_heads, 1, 1)
pos_key_layer = self.transpose_for_scores(self.key_proj(rel_embeddings), self.num_attention_heads).repeat(
query_layer.size(0) // self.num_attention_heads, 1, 1
)
else:
if "c2p" in self.pos_att_type or "p2p" in self.pos_att_type:
pos_key_layer = self.transpose_for_scores(
self.pos_key_proj(rel_embeddings), self.num_attention_heads
).repeat(
query_layer.size(0) // self.num_attention_heads, 1, 1
) # .split(self.all_head_size, dim=-1)
if "p2c" in self.pos_att_type or "p2p" in self.pos_att_type:
pos_query_layer = self.transpose_for_scores(
self.pos_query_proj(rel_embeddings), self.num_attention_heads
).repeat(
query_layer.size(0) // self.num_attention_heads, 1, 1
) # .split(self.all_head_size, dim=-1)
score = 0
# content->position
if "c2p" in self.pos_att_type:
scale = math.sqrt(pos_key_layer.size(-1) * scale_factor)
c2p_att = torch.bmm(query_layer, pos_key_layer.transpose(-1, -2))
c2p_pos = torch.clamp(relative_pos + att_span, 0, att_span * 2 - 1)
c2p_att = torch.gather(
c2p_att,
dim=-1,
index=c2p_pos.squeeze(0).expand([query_layer.size(0), query_layer.size(1), relative_pos.size(-1)]),
)
score += c2p_att / scale
# position->content
if "p2c" in self.pos_att_type or "p2p" in self.pos_att_type:
scale = math.sqrt(pos_query_layer.size(-1) * scale_factor)
if key_layer.size(-2) != query_layer.size(-2):
r_pos = build_relative_position(
key_layer.size(-2),
key_layer.size(-2),
bucket_size=self.position_buckets,
max_position=self.max_relative_positions,
).to(query_layer.device)
r_pos = r_pos.unsqueeze(0)
else:
r_pos = relative_pos
p2c_pos = torch.clamp(-r_pos + att_span, 0, att_span * 2 - 1)
if query_layer.size(-2) != key_layer.size(-2):
pos_index = relative_pos[:, :, :, 0].unsqueeze(-1)
if "p2c" in self.pos_att_type:
p2c_att = torch.bmm(key_layer, pos_query_layer.transpose(-1, -2))
p2c_att = torch.gather(
p2c_att,
dim=-1,
index=p2c_pos.squeeze(0).expand([query_layer.size(0), key_layer.size(-2), key_layer.size(-2)]),
).transpose(-1, -2)
if query_layer.size(-2) != key_layer.size(-2):
p2c_att = torch.gather(
p2c_att,
dim=-2,
index=pos_index.expand(p2c_att.size()[:2] + (pos_index.size(-2), key_layer.size(-2))),
)
score += p2c_att / scale
# position->position
if "p2p" in self.pos_att_type:
pos_query = pos_query_layer[:, :, att_span:, :]
p2p_att = torch.matmul(pos_query, pos_key_layer.transpose(-1, -2))
p2p_att = p2p_att.expand(query_layer.size()[:2] + p2p_att.size()[2:])
if query_layer.size(-2) != key_layer.size(-2):
p2p_att = torch.gather(
p2p_att,
dim=-2,
index=pos_index.expand(query_layer.size()[:2] + (pos_index.size(-2), p2p_att.size(-1))),
)
p2p_att = torch.gather(
p2p_att,
dim=-1,
index=c2p_pos.expand(
[query_layer.size(0), query_layer.size(1), query_layer.size(2), relative_pos.size(-1)]
),
)
score += p2p_att
return score
# Copied from transformers.models.deberta.modeling_deberta.DebertaEmbeddings with DebertaLayerNorm->LayerNorm
class DebertaV2Embeddings(nn.Module):
"""Construct the embeddings from word, position and token_type embeddings."""
def __init__(self, config):
super().__init__()
pad_token_id = getattr(config, "pad_token_id", 0)
self.embedding_size = getattr(config, "embedding_size", config.hidden_size)
self.word_embeddings = nn.Embedding(config.vocab_size, self.embedding_size, padding_idx=pad_token_id)
self.position_biased_input = getattr(config, "position_biased_input", True)
if not self.position_biased_input:
self.position_embeddings = None
else:
self.position_embeddings = nn.Embedding(config.max_position_embeddings, self.embedding_size)
if config.type_vocab_size > 0:
self.token_type_embeddings = nn.Embedding(config.type_vocab_size, self.embedding_size)
if self.embedding_size != config.hidden_size:
self.embed_proj = nn.Linear(self.embedding_size, config.hidden_size, bias=False)
self.LayerNorm = LayerNorm(config.hidden_size, config.layer_norm_eps)
self.dropout = StableDropout(config.hidden_dropout_prob)
self.config = config
# position_ids (1, len position emb) is contiguous in memory and exported when serialized
self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1)))
def forward(self, input_ids=None, token_type_ids=None, position_ids=None, mask=None, inputs_embeds=None):
if input_ids is not None:
input_shape = input_ids.size()
else:
input_shape = inputs_embeds.size()[:-1]
seq_length = input_shape[1]
if position_ids is None:
position_ids = self.position_ids[:, :seq_length]
if token_type_ids is None:
token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=self.position_ids.device)
if inputs_embeds is None:
inputs_embeds = self.word_embeddings(input_ids)
if self.position_embeddings is not None:
position_embeddings = self.position_embeddings(position_ids.long())
else:
position_embeddings = torch.zeros_like(inputs_embeds)
embeddings = inputs_embeds
if self.position_biased_input:
embeddings += position_embeddings
if self.config.type_vocab_size > 0:
token_type_embeddings = self.token_type_embeddings(token_type_ids)
embeddings += token_type_embeddings
if self.embedding_size != self.config.hidden_size:
embeddings = self.embed_proj(embeddings)
embeddings = self.LayerNorm(embeddings)
if mask is not None:
if mask.dim() != embeddings.dim():
if mask.dim() == 4:
mask = mask.squeeze(1).squeeze(1)
mask = mask.unsqueeze(2)
mask = mask.to(embeddings.dtype)
embeddings = embeddings * mask
embeddings = self.dropout(embeddings)
return embeddings
# Copied from transformers.models.deberta.modeling_deberta.DebertaPreTrainedModel with Deberta->DebertaV2
class DebertaV2PreTrainedModel(PreTrainedModel):
"""
An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
models.
"""
config_class = DebertaV2Config
base_model_prefix = "deberta"
_keys_to_ignore_on_load_missing = ["position_ids"]
_keys_to_ignore_on_load_unexpected = ["position_embeddings"]
def __init__(self, config):
super().__init__(config)
self._register_load_state_dict_pre_hook(self._pre_load_hook)
def _init_weights(self, module):
"""Initialize the weights."""
if isinstance(module, nn.Linear):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
def _pre_load_hook(self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs):
"""
Removes the classifier if it doesn't have the correct number of labels.
"""
self_state = self.state_dict()
if (
("classifier.weight" in self_state)
and ("classifier.weight" in state_dict)
and self_state["classifier.weight"].size() != state_dict["classifier.weight"].size()
):
logger.warning(
f"The checkpoint classifier head has a shape {state_dict['classifier.weight'].size()} and this model "
f"classifier head has a shape {self_state['classifier.weight'].size()}. Ignoring the checkpoint "
f"weights. You should train your model on new data."
)
del state_dict["classifier.weight"]
if "classifier.bias" in state_dict:
del state_dict["classifier.bias"]
DEBERTA_START_DOCSTRING = r"""
The DeBERTa model was proposed in `DeBERTa: Decoding-enhanced BERT with Disentangled Attention
<https://arxiv.org/abs/2006.03654>`_ by Pengcheng He, Xiaodong Liu, Jianfeng Gao, Weizhu Chen. It's build on top of
BERT/RoBERTa with two improvements, i.e. disentangled attention and enhanced mask decoder. With those two
improvements, it out perform BERT/RoBERTa on a majority of tasks with 80GB pretraining data.
This model is also a PyTorch `torch.nn.Module <https://pytorch.org/docs/stable/nn.html#torch.nn.Module>`__
subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to
general usage and behavior.```
Parameters:
config (:class:`~transformers.DebertaV2Config`): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model
weights.
"""
DEBERTA_INPUTS_DOCSTRING = r"""
Args:
input_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`):
Indices of input sequence tokens in the vocabulary.
Indices can be obtained using :class:`transformers.DebertaV2Tokenizer`. See
:func:`transformers.PreTrainedTokenizer.encode` and :func:`transformers.PreTrainedTokenizer.__call__` for
details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`torch.FloatTensor` of shape :obj:`{0}`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
token_type_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
Segment token indices to indicate first and second portions of the inputs. Indices are selected in ``[0,
1]``:
- 0 corresponds to a `sentence A` token,
- 1 corresponds to a `sentence B` token.
`What are token type IDs? <../glossary.html#token-type-ids>`_
position_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
Indices of positions of each input sequence tokens in the position embeddings. Selected in the range ``[0,
config.max_position_embeddings - 1]``.
`What are position IDs? <../glossary.html#position-ids>`_
inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
This is useful if you want more control over how to convert `input_ids` indices into associated vectors
than the model's internal embedding lookup matrix.
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned
tensors for more detail.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for
more detail.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
"""
@add_start_docstrings(
"The bare DeBERTa Model transformer outputting raw hidden-states without any specific head on top.",
DEBERTA_START_DOCSTRING,
)
# Copied from transformers.models.deberta.modeling_deberta.DebertaModel with Deberta->DebertaV2
class DebertaV2Model(DebertaV2PreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.embeddings = DebertaV2Embeddings(config)
self.encoder = DebertaV2Encoder(config)
self.z_steps = 0
self.config = config
self.init_weights()
def get_input_embeddings(self):
return self.embeddings.word_embeddings
def set_input_embeddings(self, new_embeddings):
self.embeddings.word_embeddings = new_embeddings
def _prune_heads(self, heads_to_prune):
"""
Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
class PreTrainedModel
"""
raise NotImplementedError("The prune function is not implemented in DeBERTa model.")
@add_start_docstrings_to_model_forward(DEBERTA_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=SequenceClassifierOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
inputs_embeds=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
device = input_ids.device if input_ids is not None else inputs_embeds.device
if attention_mask is None:
attention_mask = torch.ones(input_shape, device=device)
if token_type_ids is None:
token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=device)
embedding_output = self.embeddings(
input_ids=input_ids,
token_type_ids=token_type_ids,
position_ids=position_ids,
mask=attention_mask,
inputs_embeds=inputs_embeds,
)
encoder_outputs = self.encoder(
embedding_output,
attention_mask,
output_hidden_states=True,
output_attentions=output_attentions,
return_dict=return_dict,
)
encoded_layers = encoder_outputs[1]
if self.z_steps > 1:
hidden_states = encoded_layers[-2]
layers = [self.encoder.layer[-1] for _ in range(self.z_steps)]
query_states = encoded_layers[-1]
rel_embeddings = self.encoder.get_rel_embedding()
attention_mask = self.encoder.get_attention_mask(attention_mask)
rel_pos = self.encoder.get_rel_pos(embedding_output)
for layer in layers[1:]:
query_states = layer(
hidden_states,
attention_mask,
return_att=False,
query_states=query_states,
relative_pos=rel_pos,
rel_embeddings=rel_embeddings,
)
encoded_layers.append(query_states)
sequence_output = encoded_layers[-1]
if not return_dict:
return (sequence_output,) + encoder_outputs[(1 if output_hidden_states else 2) :]
return BaseModelOutput(
last_hidden_state=sequence_output,
hidden_states=encoder_outputs.hidden_states if output_hidden_states else None,
attentions=encoder_outputs.attentions,
)
@add_start_docstrings("""DeBERTa Model with a `language modeling` head on top. """, DEBERTA_START_DOCSTRING)
# Copied from transformers.models.deberta.modeling_deberta.DebertaForMaskedLM with Deberta->DebertaV2
class DebertaV2ForMaskedLM(DebertaV2PreTrainedModel):
_keys_to_ignore_on_load_unexpected = [r"pooler"]
_keys_to_ignore_on_load_missing = [r"position_ids", r"predictions.decoder.bias"]
def __init__(self, config):
super().__init__(config)
self.deberta = DebertaV2Model(config)
self.cls = DebertaV2OnlyMLMHead(config)
self.init_weights()
def get_output_embeddings(self):
return self.cls.predictions.decoder
def set_output_embeddings(self, new_embeddings):
self.cls.predictions.decoder = new_embeddings
@add_start_docstrings_to_model_forward(DEBERTA_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=MaskedLMOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
inputs_embeds=None,
labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Labels for computing the masked language modeling loss. Indices should be in ``[-100, 0, ...,
config.vocab_size]`` (see ``input_ids`` docstring) Tokens with indices set to ``-100`` are ignored
(masked), the loss is only computed for the tokens with labels in ``[0, ..., config.vocab_size]``
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.deberta(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = outputs[0]
prediction_scores = self.cls(sequence_output)
masked_lm_loss = None
if labels is not None:
loss_fct = CrossEntropyLoss() # -100 index = padding token
masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), labels.view(-1))
if not return_dict:
output = (prediction_scores,) + outputs[1:]
return ((masked_lm_loss,) + output) if masked_lm_loss is not None else output
return MaskedLMOutput(
loss=masked_lm_loss,
logits=prediction_scores,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
# copied from transformers.models.bert.BertPredictionHeadTransform with bert -> deberta
class DebertaV2PredictionHeadTransform(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
if isinstance(config.hidden_act, str):
self.transform_act_fn = ACT2FN[config.hidden_act]
else:
self.transform_act_fn = config.hidden_act
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.transform_act_fn(hidden_states)
hidden_states = self.LayerNorm(hidden_states)
return hidden_states
# copied from transformers.models.bert.BertLMPredictionHead with bert -> deberta
class DebertaV2LMPredictionHead(nn.Module):
def __init__(self, config):
super().__init__()
self.transform = DebertaV2PredictionHeadTransform(config)
# The output weights are the same as the input embeddings, but there is
# an output-only bias for each token.
self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
self.bias = nn.Parameter(torch.zeros(config.vocab_size))
# Need a link between the two variables so that the bias is correctly resized with `resize_token_embeddings`
self.decoder.bias = self.bias
def forward(self, hidden_states):
hidden_states = self.transform(hidden_states)
hidden_states = self.decoder(hidden_states)
return hidden_states
# copied from transformers.models.bert.BertOnlyMLMHead with bert -> deberta
class DebertaV2OnlyMLMHead(nn.Module):
def __init__(self, config):
super().__init__()
self.predictions = DebertaV2LMPredictionHead(config)
def forward(self, sequence_output):
prediction_scores = self.predictions(sequence_output)
return prediction_scores
@add_start_docstrings(
"""
DeBERTa Model transformer with a sequence classification/regression head on top (a linear layer on top of the
pooled output) e.g. for GLUE tasks.
""",
DEBERTA_START_DOCSTRING,
)
# Copied from transformers.models.deberta.modeling_deberta.DebertaForSequenceClassification with Deberta->DebertaV2
class DebertaV2ForSequenceClassification(DebertaV2PreTrainedModel):
def __init__(self, config):
super().__init__(config)
num_labels = getattr(config, "num_labels", 2)
self.num_labels = num_labels
self.deberta = DebertaV2Model(config)
self.pooler = ContextPooler(config)
output_dim = self.pooler.output_dim
self.classifier = nn.Linear(output_dim, num_labels)
drop_out = getattr(config, "cls_dropout", None)
drop_out = self.config.hidden_dropout_prob if drop_out is None else drop_out
self.dropout = StableDropout(drop_out)
self.init_weights()
def get_input_embeddings(self):
return self.deberta.get_input_embeddings()
def set_input_embeddings(self, new_embeddings):
self.deberta.set_input_embeddings(new_embeddings)
@add_start_docstrings_to_model_forward(DEBERTA_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=SequenceClassifierOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
inputs_embeds=None,
labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for computing the sequence classification/regression loss. Indices should be in :obj:`[0, ...,
config.num_labels - 1]`. If :obj:`config.num_labels == 1` a regression loss is computed (Mean-Square loss),
If :obj:`config.num_labels > 1` a classification loss is computed (Cross-Entropy).
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.deberta(
input_ids,
token_type_ids=token_type_ids,
attention_mask=attention_mask,
position_ids=position_ids,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
encoder_layer = outputs[0]
pooled_output = self.pooler(encoder_layer)
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output)
loss = None
if labels is not None:
if self.num_labels == 1:
# regression task
loss_fn = nn.MSELoss()
logits = logits.view(-1).to(labels.dtype)
loss = loss_fn(logits, labels.view(-1))
elif labels.dim() == 1 or labels.size(-1) == 1:
label_index = (labels >= 0).nonzero()
labels = labels.long()
if label_index.size(0) > 0:
labeled_logits = torch.gather(logits, 0, label_index.expand(label_index.size(0), logits.size(1)))
labels = torch.gather(labels, 0, label_index.view(-1))
loss_fct = CrossEntropyLoss()
loss = loss_fct(labeled_logits.view(-1, self.num_labels).float(), labels.view(-1))
else:
loss = torch.tensor(0).to(logits)
else:
log_softmax = nn.LogSoftmax(-1)
loss = -((log_softmax(logits) * labels).sum(-1)).mean()
if not return_dict:
output = (logits,) + outputs[1:]
return ((loss,) + output) if loss is not None else output
else:
return SequenceClassifierOutput(
loss=loss,
logits=logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
@add_start_docstrings(
"""
DeBERTa Model with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for
Named-Entity-Recognition (NER) tasks.
""",
DEBERTA_START_DOCSTRING,
)
# Copied from transformers.models.deberta.modeling_deberta.DebertaForTokenClassification with Deberta->DebertaV2
class DebertaV2ForTokenClassification(DebertaV2PreTrainedModel):
_keys_to_ignore_on_load_unexpected = [r"pooler"]
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.deberta = DebertaV2Model(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.classifier = nn.Linear(config.hidden_size, config.num_labels)
self.init_weights()
@add_start_docstrings_to_model_forward(DEBERTA_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=TokenClassifierOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
inputs_embeds=None,
labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Labels for computing the token classification loss. Indices should be in ``[0, ..., config.num_labels -
1]``.
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.deberta(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = outputs[0]
sequence_output = self.dropout(sequence_output)
logits = self.classifier(sequence_output)
loss = None
if labels is not None:
loss_fct = CrossEntropyLoss()
# Only keep active parts of the loss
if attention_mask is not None:
active_loss = attention_mask.view(-1) == 1
active_logits = logits.view(-1, self.num_labels)
active_labels = torch.where(
active_loss, labels.view(-1), torch.tensor(loss_fct.ignore_index).type_as(labels)
)
loss = loss_fct(active_logits, active_labels)
else:
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
if not return_dict:
output = (logits,) + outputs[1:]
return ((loss,) + output) if loss is not None else output
return TokenClassifierOutput(
loss=loss,
logits=logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
@add_start_docstrings(
"""
DeBERTa Model with a span classification head on top for extractive question-answering tasks like SQuAD (a linear
layers on top of the hidden-states output to compute `span start logits` and `span end logits`).
""",
DEBERTA_START_DOCSTRING,
)
# Copied from transformers.models.deberta.modeling_deberta.DebertaForQuestionAnswering with Deberta->DebertaV2
class DebertaV2ForQuestionAnswering(DebertaV2PreTrainedModel):
_keys_to_ignore_on_load_unexpected = [r"pooler"]
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.deberta = DebertaV2Model(config)
self.qa_outputs = nn.Linear(config.hidden_size, config.num_labels)
self.init_weights()
@add_start_docstrings_to_model_forward(DEBERTA_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=QuestionAnsweringModelOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
inputs_embeds=None,
start_positions=None,
end_positions=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
start_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for position (index) of the start of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (:obj:`sequence_length`). Position outside of the
sequence are not taken into account for computing the loss.
end_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for position (index) of the end of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (:obj:`sequence_length`). Position outside of the
sequence are not taken into account for computing the loss.
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.deberta(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = outputs[0]
logits = self.qa_outputs(sequence_output)
start_logits, end_logits = logits.split(1, dim=-1)
start_logits = start_logits.squeeze(-1).contiguous()
end_logits = end_logits.squeeze(-1).contiguous()
total_loss = None
if start_positions is not None and end_positions is not None:
# If we are on multi-GPU, split add a dimension
if len(start_positions.size()) > 1:
start_positions = start_positions.squeeze(-1)
if len(end_positions.size()) > 1:
end_positions = end_positions.squeeze(-1)
# sometimes the start/end positions are outside our model inputs, we ignore these terms
ignored_index = start_logits.size(1)
start_positions = start_positions.clamp(0, ignored_index)
end_positions = end_positions.clamp(0, ignored_index)
loss_fct = CrossEntropyLoss(ignore_index=ignored_index)
start_loss = loss_fct(start_logits, start_positions)
end_loss = loss_fct(end_logits, end_positions)
total_loss = (start_loss + end_loss) / 2
if not return_dict:
output = (start_logits, end_logits) + outputs[1:]
return ((total_loss,) + output) if total_loss is not None else output
return QuestionAnsweringModelOutput(
loss=total_loss,
start_logits=start_logits,
end_logits=end_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
|
apache-2.0
| 7,409,934,508,913,914,000 | 39.564103 | 128 | 0.620604 | false |
googleapis/python-compute
|
tests/unit/gapic/compute_v1/test_reservations.py
|
1
|
72826
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import packaging.version
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from requests import Response
from requests.sessions import Session
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.compute_v1.services.reservations import ReservationsClient
from google.cloud.compute_v1.services.reservations import pagers
from google.cloud.compute_v1.services.reservations import transports
from google.cloud.compute_v1.services.reservations.transports.base import (
_GOOGLE_AUTH_VERSION,
)
from google.cloud.compute_v1.types import compute
from google.oauth2 import service_account
import google.auth
# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
# through google-api-core:
# - Delete the auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
reason="This test requires google-auth < 1.25.0",
)
requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
reason="This test requires google-auth >= 1.25.0",
)
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert ReservationsClient._get_default_mtls_endpoint(None) is None
assert (
ReservationsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
)
assert (
ReservationsClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
ReservationsClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
ReservationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert ReservationsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [ReservationsClient,])
def test_reservations_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "compute.googleapis.com:443"
@pytest.mark.parametrize("client_class", [ReservationsClient,])
def test_reservations_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "compute.googleapis.com:443"
def test_reservations_client_get_transport_class():
transport = ReservationsClient.get_transport_class()
available_transports = [
transports.ReservationsRestTransport,
]
assert transport in available_transports
transport = ReservationsClient.get_transport_class("rest")
assert transport == transports.ReservationsRestTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(ReservationsClient, transports.ReservationsRestTransport, "rest"),],
)
@mock.patch.object(
ReservationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReservationsClient)
)
def test_reservations_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(ReservationsClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(ReservationsClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(ReservationsClient, transports.ReservationsRestTransport, "rest", "true"),
(ReservationsClient, transports.ReservationsRestTransport, "rest", "false"),
],
)
@mock.patch.object(
ReservationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReservationsClient)
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_reservations_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(ReservationsClient, transports.ReservationsRestTransport, "rest"),],
)
def test_reservations_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(ReservationsClient, transports.ReservationsRestTransport, "rest"),],
)
def test_reservations_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_aggregated_list_rest(
transport: str = "rest", request_type=compute.AggregatedListReservationsRequest
):
client = ReservationsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.ReservationAggregatedList(
id="id_value",
items={
"key_value": compute.ReservationsScopedList(
reservations=[compute.Reservation(commitment="commitment_value")]
)
},
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
unreachables=["unreachables_value"],
warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED),
)
# Wrap the value into a proper Response obj
json_return_value = compute.ReservationAggregatedList.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.aggregated_list(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.AggregatedListPager)
assert response.id == "id_value"
assert response.items == {
"key_value": compute.ReservationsScopedList(
reservations=[compute.Reservation(commitment="commitment_value")]
)
}
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
assert response.unreachables == ["unreachables_value"]
assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED)
def test_aggregated_list_rest_from_dict():
test_aggregated_list_rest(request_type=dict)
def test_aggregated_list_rest_flattened():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.ReservationAggregatedList()
# Wrap the value into a proper Response obj
json_return_value = compute.ReservationAggregatedList.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.aggregated_list(project="project_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, http_call, http_params = req.mock_calls[0]
body = http_params.get("data")
assert "project_value" in http_call[1] + str(body)
def test_aggregated_list_rest_flattened_error():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.aggregated_list(
compute.AggregatedListReservationsRequest(), project="project_value",
)
def test_aggregated_list_pager():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Set the response as a series of pages
response = (
compute.ReservationAggregatedList(
items={
"a": compute.ReservationsScopedList(),
"b": compute.ReservationsScopedList(),
"c": compute.ReservationsScopedList(),
},
next_page_token="abc",
),
compute.ReservationAggregatedList(items={}, next_page_token="def",),
compute.ReservationAggregatedList(
items={"g": compute.ReservationsScopedList(),}, next_page_token="ghi",
),
compute.ReservationAggregatedList(
items={
"h": compute.ReservationsScopedList(),
"i": compute.ReservationsScopedList(),
},
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(compute.ReservationAggregatedList.to_json(x) for x in response)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
metadata = ()
pager = client.aggregated_list(request={})
assert pager._metadata == metadata
assert isinstance(pager.get("a"), compute.ReservationsScopedList)
assert pager.get("h") is None
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, tuple) for i in results)
for result in results:
assert isinstance(result, tuple)
assert tuple(type(t) for t in result) == (
str,
compute.ReservationsScopedList,
)
assert pager.get("a") is None
assert isinstance(pager.get("h"), compute.ReservationsScopedList)
pages = list(client.aggregated_list(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_delete_rest(
transport: str = "rest", request_type=compute.DeleteReservationRequest
):
client = ReservationsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
error=compute.Error(errors=[compute.Errors(code="code_value")]),
http_error_message="http_error_message_value",
http_error_status_code=2374,
id="id_value",
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id="target_id_value",
target_link="target_link_value",
user="user_value",
warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)],
zone="zone_value",
)
# Wrap the value into a proper Response obj
json_return_value = compute.Operation.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.error == compute.Error(errors=[compute.Errors(code="code_value")])
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == "id_value"
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == "target_id_value"
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.warnings == [
compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)
]
assert response.zone == "zone_value"
def test_delete_rest_from_dict():
test_delete_rest(request_type=dict)
def test_delete_rest_flattened():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
json_return_value = compute.Operation.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete(
project="project_value", zone="zone_value", reservation="reservation_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, http_call, http_params = req.mock_calls[0]
body = http_params.get("data")
assert "project_value" in http_call[1] + str(body)
assert "zone_value" in http_call[1] + str(body)
assert "reservation_value" in http_call[1] + str(body)
def test_delete_rest_flattened_error():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete(
compute.DeleteReservationRequest(),
project="project_value",
zone="zone_value",
reservation="reservation_value",
)
def test_get_rest(transport: str = "rest", request_type=compute.GetReservationRequest):
client = ReservationsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Reservation(
commitment="commitment_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
id="id_value",
kind="kind_value",
name="name_value",
satisfies_pzs=True,
self_link="self_link_value",
specific_reservation=compute.AllocationSpecificSKUReservation(
count="count_value"
),
specific_reservation_required=True,
status=compute.Reservation.Status.CREATING,
zone="zone_value",
)
# Wrap the value into a proper Response obj
json_return_value = compute.Reservation.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Reservation)
assert response.commitment == "commitment_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.satisfies_pzs is True
assert response.self_link == "self_link_value"
assert response.specific_reservation == compute.AllocationSpecificSKUReservation(
count="count_value"
)
assert response.specific_reservation_required is True
assert response.status == compute.Reservation.Status.CREATING
assert response.zone == "zone_value"
def test_get_rest_from_dict():
test_get_rest(request_type=dict)
def test_get_rest_flattened():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Reservation()
# Wrap the value into a proper Response obj
json_return_value = compute.Reservation.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get(
project="project_value", zone="zone_value", reservation="reservation_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, http_call, http_params = req.mock_calls[0]
body = http_params.get("data")
assert "project_value" in http_call[1] + str(body)
assert "zone_value" in http_call[1] + str(body)
assert "reservation_value" in http_call[1] + str(body)
def test_get_rest_flattened_error():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get(
compute.GetReservationRequest(),
project="project_value",
zone="zone_value",
reservation="reservation_value",
)
def test_get_iam_policy_rest(
transport: str = "rest", request_type=compute.GetIamPolicyReservationRequest
):
client = ReservationsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Policy(
audit_configs=[
compute.AuditConfig(
audit_log_configs=[
compute.AuditLogConfig(
exempted_members=["exempted_members_value"]
)
]
)
],
bindings=[compute.Binding(binding_id="binding_id_value")],
etag="etag_value",
iam_owned=True,
rules=[compute.Rule(action=compute.Rule.Action.ALLOW)],
version=774,
)
# Wrap the value into a proper Response obj
json_return_value = compute.Policy.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_iam_policy(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Policy)
assert response.audit_configs == [
compute.AuditConfig(
audit_log_configs=[
compute.AuditLogConfig(exempted_members=["exempted_members_value"])
]
)
]
assert response.bindings == [compute.Binding(binding_id="binding_id_value")]
assert response.etag == "etag_value"
assert response.iam_owned is True
assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)]
assert response.version == 774
def test_get_iam_policy_rest_from_dict():
test_get_iam_policy_rest(request_type=dict)
def test_get_iam_policy_rest_flattened():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Policy()
# Wrap the value into a proper Response obj
json_return_value = compute.Policy.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_iam_policy(
project="project_value", zone="zone_value", resource="resource_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, http_call, http_params = req.mock_calls[0]
body = http_params.get("data")
assert "project_value" in http_call[1] + str(body)
assert "zone_value" in http_call[1] + str(body)
assert "resource_value" in http_call[1] + str(body)
def test_get_iam_policy_rest_flattened_error():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_iam_policy(
compute.GetIamPolicyReservationRequest(),
project="project_value",
zone="zone_value",
resource="resource_value",
)
def test_insert_rest(
transport: str = "rest", request_type=compute.InsertReservationRequest
):
client = ReservationsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
error=compute.Error(errors=[compute.Errors(code="code_value")]),
http_error_message="http_error_message_value",
http_error_status_code=2374,
id="id_value",
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id="target_id_value",
target_link="target_link_value",
user="user_value",
warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)],
zone="zone_value",
)
# Wrap the value into a proper Response obj
json_return_value = compute.Operation.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.insert(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.error == compute.Error(errors=[compute.Errors(code="code_value")])
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == "id_value"
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == "target_id_value"
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.warnings == [
compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)
]
assert response.zone == "zone_value"
def test_insert_rest_from_dict():
test_insert_rest(request_type=dict)
def test_insert_rest_flattened():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
json_return_value = compute.Operation.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
reservation_resource = compute.Reservation(commitment="commitment_value")
client.insert(
project="project_value",
zone="zone_value",
reservation_resource=reservation_resource,
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, http_call, http_params = req.mock_calls[0]
body = http_params.get("data")
assert "project_value" in http_call[1] + str(body)
assert "zone_value" in http_call[1] + str(body)
assert compute.Reservation.to_json(
reservation_resource,
including_default_value_fields=False,
use_integers_for_enums=False,
) in http_call[1] + str(body)
def test_insert_rest_flattened_error():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.insert(
compute.InsertReservationRequest(),
project="project_value",
zone="zone_value",
reservation_resource=compute.Reservation(commitment="commitment_value"),
)
def test_list_rest(
transport: str = "rest", request_type=compute.ListReservationsRequest
):
client = ReservationsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.ReservationList(
id="id_value",
items=[compute.Reservation(commitment="commitment_value")],
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED),
)
# Wrap the value into a proper Response obj
json_return_value = compute.ReservationList.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListPager)
assert response.id == "id_value"
assert response.items == [compute.Reservation(commitment="commitment_value")]
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED)
def test_list_rest_from_dict():
test_list_rest(request_type=dict)
def test_list_rest_flattened():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.ReservationList()
# Wrap the value into a proper Response obj
json_return_value = compute.ReservationList.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list(
project="project_value", zone="zone_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, http_call, http_params = req.mock_calls[0]
body = http_params.get("data")
assert "project_value" in http_call[1] + str(body)
assert "zone_value" in http_call[1] + str(body)
def test_list_rest_flattened_error():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list(
compute.ListReservationsRequest(),
project="project_value",
zone="zone_value",
)
def test_list_pager():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Set the response as a series of pages
response = (
compute.ReservationList(
items=[
compute.Reservation(),
compute.Reservation(),
compute.Reservation(),
],
next_page_token="abc",
),
compute.ReservationList(items=[], next_page_token="def",),
compute.ReservationList(
items=[compute.Reservation(),], next_page_token="ghi",
),
compute.ReservationList(
items=[compute.Reservation(), compute.Reservation(),],
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(compute.ReservationList.to_json(x) for x in response)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
metadata = ()
pager = client.list(request={})
assert pager._metadata == metadata
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, compute.Reservation) for i in results)
pages = list(client.list(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_resize_rest(
transport: str = "rest", request_type=compute.ResizeReservationRequest
):
client = ReservationsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
error=compute.Error(errors=[compute.Errors(code="code_value")]),
http_error_message="http_error_message_value",
http_error_status_code=2374,
id="id_value",
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id="target_id_value",
target_link="target_link_value",
user="user_value",
warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)],
zone="zone_value",
)
# Wrap the value into a proper Response obj
json_return_value = compute.Operation.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.resize(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.error == compute.Error(errors=[compute.Errors(code="code_value")])
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == "id_value"
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == "target_id_value"
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.warnings == [
compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)
]
assert response.zone == "zone_value"
def test_resize_rest_from_dict():
test_resize_rest(request_type=dict)
def test_resize_rest_flattened():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
json_return_value = compute.Operation.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
reservations_resize_request_resource = compute.ReservationsResizeRequest(
specific_sku_count="specific_sku_count_value"
)
client.resize(
project="project_value",
zone="zone_value",
reservation="reservation_value",
reservations_resize_request_resource=reservations_resize_request_resource,
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, http_call, http_params = req.mock_calls[0]
body = http_params.get("data")
assert "project_value" in http_call[1] + str(body)
assert "zone_value" in http_call[1] + str(body)
assert "reservation_value" in http_call[1] + str(body)
assert compute.ReservationsResizeRequest.to_json(
reservations_resize_request_resource,
including_default_value_fields=False,
use_integers_for_enums=False,
) in http_call[1] + str(body)
def test_resize_rest_flattened_error():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.resize(
compute.ResizeReservationRequest(),
project="project_value",
zone="zone_value",
reservation="reservation_value",
reservations_resize_request_resource=compute.ReservationsResizeRequest(
specific_sku_count="specific_sku_count_value"
),
)
def test_set_iam_policy_rest(
transport: str = "rest", request_type=compute.SetIamPolicyReservationRequest
):
client = ReservationsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Policy(
audit_configs=[
compute.AuditConfig(
audit_log_configs=[
compute.AuditLogConfig(
exempted_members=["exempted_members_value"]
)
]
)
],
bindings=[compute.Binding(binding_id="binding_id_value")],
etag="etag_value",
iam_owned=True,
rules=[compute.Rule(action=compute.Rule.Action.ALLOW)],
version=774,
)
# Wrap the value into a proper Response obj
json_return_value = compute.Policy.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_iam_policy(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Policy)
assert response.audit_configs == [
compute.AuditConfig(
audit_log_configs=[
compute.AuditLogConfig(exempted_members=["exempted_members_value"])
]
)
]
assert response.bindings == [compute.Binding(binding_id="binding_id_value")]
assert response.etag == "etag_value"
assert response.iam_owned is True
assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)]
assert response.version == 774
def test_set_iam_policy_rest_from_dict():
test_set_iam_policy_rest(request_type=dict)
def test_set_iam_policy_rest_flattened():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Policy()
# Wrap the value into a proper Response obj
json_return_value = compute.Policy.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
zone_set_policy_request_resource = compute.ZoneSetPolicyRequest(
bindings=[compute.Binding(binding_id="binding_id_value")]
)
client.set_iam_policy(
project="project_value",
zone="zone_value",
resource="resource_value",
zone_set_policy_request_resource=zone_set_policy_request_resource,
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, http_call, http_params = req.mock_calls[0]
body = http_params.get("data")
assert "project_value" in http_call[1] + str(body)
assert "zone_value" in http_call[1] + str(body)
assert "resource_value" in http_call[1] + str(body)
assert compute.ZoneSetPolicyRequest.to_json(
zone_set_policy_request_resource,
including_default_value_fields=False,
use_integers_for_enums=False,
) in http_call[1] + str(body)
def test_set_iam_policy_rest_flattened_error():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_iam_policy(
compute.SetIamPolicyReservationRequest(),
project="project_value",
zone="zone_value",
resource="resource_value",
zone_set_policy_request_resource=compute.ZoneSetPolicyRequest(
bindings=[compute.Binding(binding_id="binding_id_value")]
),
)
def test_test_iam_permissions_rest(
transport: str = "rest", request_type=compute.TestIamPermissionsReservationRequest
):
client = ReservationsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.TestPermissionsResponse(
permissions=["permissions_value"],
)
# Wrap the value into a proper Response obj
json_return_value = compute.TestPermissionsResponse.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.test_iam_permissions(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.TestPermissionsResponse)
assert response.permissions == ["permissions_value"]
def test_test_iam_permissions_rest_from_dict():
test_test_iam_permissions_rest(request_type=dict)
def test_test_iam_permissions_rest_flattened():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.TestPermissionsResponse()
# Wrap the value into a proper Response obj
json_return_value = compute.TestPermissionsResponse.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
test_permissions_request_resource = compute.TestPermissionsRequest(
permissions=["permissions_value"]
)
client.test_iam_permissions(
project="project_value",
zone="zone_value",
resource="resource_value",
test_permissions_request_resource=test_permissions_request_resource,
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, http_call, http_params = req.mock_calls[0]
body = http_params.get("data")
assert "project_value" in http_call[1] + str(body)
assert "zone_value" in http_call[1] + str(body)
assert "resource_value" in http_call[1] + str(body)
assert compute.TestPermissionsRequest.to_json(
test_permissions_request_resource,
including_default_value_fields=False,
use_integers_for_enums=False,
) in http_call[1] + str(body)
def test_test_iam_permissions_rest_flattened_error():
client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.test_iam_permissions(
compute.TestIamPermissionsReservationRequest(),
project="project_value",
zone="zone_value",
resource="resource_value",
test_permissions_request_resource=compute.TestPermissionsRequest(
permissions=["permissions_value"]
),
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.ReservationsRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReservationsClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.ReservationsRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReservationsClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.ReservationsRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReservationsClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.ReservationsRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = ReservationsClient(transport=transport)
assert client.transport is transport
@pytest.mark.parametrize("transport_class", [transports.ReservationsRestTransport,])
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_reservations_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.ReservationsTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_reservations_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.compute_v1.services.reservations.transports.ReservationsTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.ReservationsTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"aggregated_list",
"delete",
"get",
"get_iam_policy",
"insert",
"list",
"resize",
"set_iam_policy",
"test_iam_permissions",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
@requires_google_auth_gte_1_25_0
def test_reservations_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.compute_v1.services.reservations.transports.ReservationsTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReservationsTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
@requires_google_auth_lt_1_25_0
def test_reservations_base_transport_with_credentials_file_old_google_auth():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.compute_v1.services.reservations.transports.ReservationsTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReservationsTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
def test_reservations_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.compute_v1.services.reservations.transports.ReservationsTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReservationsTransport()
adc.assert_called_once()
@requires_google_auth_gte_1_25_0
def test_reservations_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ReservationsClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
@requires_google_auth_lt_1_25_0
def test_reservations_auth_adc_old_google_auth():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ReservationsClient()
adc.assert_called_once_with(
scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
def test_reservations_http_transport_client_cert_source_for_mtls():
cred = ga_credentials.AnonymousCredentials()
with mock.patch(
"google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
) as mock_configure_mtls_channel:
transports.ReservationsRestTransport(
credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
)
mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
def test_reservations_host_no_port():
client = ReservationsClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com"
),
)
assert client.transport._host == "compute.googleapis.com:443"
def test_reservations_host_with_port():
client = ReservationsClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com:8000"
),
)
assert client.transport._host == "compute.googleapis.com:8000"
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = ReservationsClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = ReservationsClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = ReservationsClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder,)
actual = ReservationsClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = ReservationsClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = ReservationsClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization,)
actual = ReservationsClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = ReservationsClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = ReservationsClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project,)
actual = ReservationsClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = ReservationsClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = ReservationsClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = ReservationsClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = ReservationsClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = ReservationsClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.ReservationsTransport, "_prep_wrapped_messages"
) as prep:
client = ReservationsClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.ReservationsTransport, "_prep_wrapped_messages"
) as prep:
transport_class = ReservationsClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
|
apache-2.0
| -2,268,545,840,816,535,800 | 38.882804 | 111 | 0.654615 | false |
wangxiaomo/rivercrab
|
setup.py
|
1
|
1426
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from distutils.core import setup
VERSION = '0.8'
LONG_DESCRIPTION = open('README.rst').read()
INSTALL_REQUIRES = [
'beautifulsoup4',
]
PY_MAJOR, PY_MINOR = sys.version_info[:2]
if (PY_MAJOR, PY_MINOR) == (2, 6):
INSTALL_REQUIRES.append('argparse')
setup(
name='rivercrab',
version=VERSION,
description='River Crab',
long_description=LONG_DESCRIPTION,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
author='Philip Xu',
author_email='pyx@xrefactor.com',
url='https://bitbucket.org/pyx/rivercrab',
download_url=(
'https://bitbucket.org/pyx/rivercrab/get/v%s.tar.bz2' % VERSION),
scripts=['rivercrab'],
license='BSD-New',
install_requires=INSTALL_REQUIRES,
)
|
bsd-3-clause
| -344,059,370,627,083,260 | 29.340426 | 73 | 0.607994 | false |
crawfordsm/pyspectrograph
|
PySpectrograph/Utilities/makeplots.py
|
1
|
1688
|
#
# MAKEPLOTS--A library for making plots for demaniacs
#
#
#
from pylab import *
import numpy
def plotframe(data):
"""Plot the entire data array
returns a figure
"""
nimg = 10
ywidth = 0.08
xlen = len(data[0]) / nimg
for i in range(nimg):
yax = 0.90 - ywidth * 1.1 * i
x1 = xlen * i
x2 = xlen * (1 + i)
f = axes([0.1, yax, 0.8, ywidth])
f.imshow(data[:, x1:x2], cmap=cm.gray, aspect='auto', vmin=-5, vmax=50)
f.axis('off')
return f
def plotfeature(f, wave, data, w1, w2, z):
"""Plot a section of the data array
as indicated by w1 and w2
"""
w1 = w1 * (1 + z)
w2 = w2 * (1 + z)
if w1 > wave.max():
return f
mask = (w1 < wave) * (wave < w2)
mdata = data[:, mask]
f.imshow(mdata, cmap=cm.gray, aspect='auto', vmin=-5, vmax=50)
# set up the axis labels
x1 = wave[mask][0]
x2 = wave[mask][-1]
dw = (x2 - x1) / 5
xtarr = arange(x1, x2 + dw, dw)
xtlab = []
for x in xticks()[0]:
if x >= 0 and x < len(wave[mask]):
x = wave[mask][x]
xtlab.append('%4.2f' % x)
else:
xtlab.append('0')
f.set_yticklabels([])
f.set_xticklabels([])
return f
def plotlinefeature(f, wave, flux, w1, w2, z):
w1 = w1 * (1 + z)
w2 = w2 * (1 + z)
mask = (w1 < wave) * (wave < w2)
f = plotline(f, wave[mask], flux[mask])
return f
def plotline(f, wave, flux, color=None):
if color:
f.plot(wave, flux, ls='-', color=color, lw=1.55)
else:
f.plot(wave, flux, ls='-', lw=1.55)
f.set_xlim((wave[0], wave[-1]))
# f.set_yticklabels([])
return f
|
bsd-3-clause
| -7,940,486,904,649,295,000 | 22.774648 | 79 | 0.507109 | false |
shuangshuangwang/spark
|
python/pyspark/sql/tests/test_pandas_udf.py
|
1
|
10216
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from pyspark.sql.functions import udf, pandas_udf, PandasUDFType
from pyspark.sql.types import DoubleType, StructType, StructField, LongType
from pyspark.sql.utils import ParseException, PythonException
from pyspark.rdd import PythonEvalType
from pyspark.testing.sqlutils import ReusedSQLTestCase, have_pandas, have_pyarrow, \
pandas_requirement_message, pyarrow_requirement_message
from pyspark.testing.utils import QuietTest
@unittest.skipIf(
not have_pandas or not have_pyarrow,
pandas_requirement_message or pyarrow_requirement_message) # type: ignore[arg-type]
class PandasUDFTests(ReusedSQLTestCase):
def test_pandas_udf_basic(self):
udf = pandas_udf(lambda x: x, DoubleType())
self.assertEqual(udf.returnType, DoubleType())
self.assertEqual(udf.evalType, PythonEvalType.SQL_SCALAR_PANDAS_UDF)
udf = pandas_udf(lambda x: x, DoubleType(), PandasUDFType.SCALAR)
self.assertEqual(udf.returnType, DoubleType())
self.assertEqual(udf.evalType, PythonEvalType.SQL_SCALAR_PANDAS_UDF)
udf = pandas_udf(lambda x: x, 'double', PandasUDFType.SCALAR)
self.assertEqual(udf.returnType, DoubleType())
self.assertEqual(udf.evalType, PythonEvalType.SQL_SCALAR_PANDAS_UDF)
udf = pandas_udf(lambda x: x, StructType([StructField("v", DoubleType())]),
PandasUDFType.GROUPED_MAP)
self.assertEqual(udf.returnType, StructType([StructField("v", DoubleType())]))
self.assertEqual(udf.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
udf = pandas_udf(lambda x: x, 'v double', PandasUDFType.GROUPED_MAP)
self.assertEqual(udf.returnType, StructType([StructField("v", DoubleType())]))
self.assertEqual(udf.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
udf = pandas_udf(lambda x: x, 'v double',
functionType=PandasUDFType.GROUPED_MAP)
self.assertEqual(udf.returnType, StructType([StructField("v", DoubleType())]))
self.assertEqual(udf.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
udf = pandas_udf(lambda x: x, returnType='v double',
functionType=PandasUDFType.GROUPED_MAP)
self.assertEqual(udf.returnType, StructType([StructField("v", DoubleType())]))
self.assertEqual(udf.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
def test_pandas_udf_decorator(self):
@pandas_udf(DoubleType())
def foo(x):
return x
self.assertEqual(foo.returnType, DoubleType())
self.assertEqual(foo.evalType, PythonEvalType.SQL_SCALAR_PANDAS_UDF)
@pandas_udf(returnType=DoubleType())
def foo(x):
return x
self.assertEqual(foo.returnType, DoubleType())
self.assertEqual(foo.evalType, PythonEvalType.SQL_SCALAR_PANDAS_UDF)
schema = StructType([StructField("v", DoubleType())])
@pandas_udf(schema, PandasUDFType.GROUPED_MAP)
def foo(x):
return x
self.assertEqual(foo.returnType, schema)
self.assertEqual(foo.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
@pandas_udf('v double', PandasUDFType.GROUPED_MAP)
def foo(x):
return x
self.assertEqual(foo.returnType, schema)
self.assertEqual(foo.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
@pandas_udf(schema, functionType=PandasUDFType.GROUPED_MAP)
def foo(x):
return x
self.assertEqual(foo.returnType, schema)
self.assertEqual(foo.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
@pandas_udf(returnType='double', functionType=PandasUDFType.SCALAR)
def foo(x):
return x
self.assertEqual(foo.returnType, DoubleType())
self.assertEqual(foo.evalType, PythonEvalType.SQL_SCALAR_PANDAS_UDF)
@pandas_udf(returnType=schema, functionType=PandasUDFType.GROUPED_MAP)
def foo(x):
return x
self.assertEqual(foo.returnType, schema)
self.assertEqual(foo.evalType, PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF)
def test_udf_wrong_arg(self):
with QuietTest(self.sc):
with self.assertRaises(ParseException):
@pandas_udf('blah')
def foo(x):
return x
with self.assertRaisesRegexp(ValueError, 'Invalid return type.*None'):
@pandas_udf(functionType=PandasUDFType.SCALAR)
def foo(x):
return x
with self.assertRaisesRegexp(ValueError, 'Invalid function'):
@pandas_udf('double', 100)
def foo(x):
return x
with self.assertRaisesRegexp(ValueError, '0-arg pandas_udfs.*not.*supported'):
pandas_udf(lambda: 1, LongType(), PandasUDFType.SCALAR)
with self.assertRaisesRegexp(ValueError, '0-arg pandas_udfs.*not.*supported'):
@pandas_udf(LongType(), PandasUDFType.SCALAR)
def zero_with_type():
return 1
with self.assertRaisesRegexp(TypeError, 'Invalid return type'):
@pandas_udf(returnType=PandasUDFType.GROUPED_MAP)
def foo(df):
return df
with self.assertRaisesRegexp(TypeError, 'Invalid return type'):
@pandas_udf(returnType='double', functionType=PandasUDFType.GROUPED_MAP)
def foo(df):
return df
with self.assertRaisesRegexp(ValueError, 'Invalid function'):
@pandas_udf(returnType='k int, v double', functionType=PandasUDFType.GROUPED_MAP)
def foo(k, v, w):
return k
def test_stopiteration_in_udf(self):
def foo(x):
raise StopIteration()
def foofoo(x, y):
raise StopIteration()
exc_message = "Caught StopIteration thrown from user's code; failing the task"
df = self.spark.range(0, 100)
# plain udf (test for SPARK-23754)
self.assertRaisesRegexp(
PythonException,
exc_message,
df.withColumn('v', udf(foo)('id')).collect
)
# pandas scalar udf
self.assertRaisesRegexp(
PythonException,
exc_message,
df.withColumn(
'v', pandas_udf(foo, 'double', PandasUDFType.SCALAR)('id')
).collect
)
# pandas grouped map
self.assertRaisesRegexp(
PythonException,
exc_message,
df.groupBy('id').apply(
pandas_udf(foo, df.schema, PandasUDFType.GROUPED_MAP)
).collect
)
self.assertRaisesRegexp(
PythonException,
exc_message,
df.groupBy('id').apply(
pandas_udf(foofoo, df.schema, PandasUDFType.GROUPED_MAP)
).collect
)
# pandas grouped agg
self.assertRaisesRegexp(
PythonException,
exc_message,
df.groupBy('id').agg(
pandas_udf(foo, 'double', PandasUDFType.GROUPED_AGG)('id')
).collect
)
def test_pandas_udf_detect_unsafe_type_conversion(self):
import pandas as pd
import numpy as np
values = [1.0] * 3
pdf = pd.DataFrame({'A': values})
df = self.spark.createDataFrame(pdf).repartition(1)
@pandas_udf(returnType="int")
def udf(column):
return pd.Series(np.linspace(0, 1, len(column)))
# Since 0.11.0, PyArrow supports the feature to raise an error for unsafe cast.
with self.sql_conf({
"spark.sql.execution.pandas.convertToArrowArraySafely": True}):
with self.assertRaisesRegexp(Exception,
"Exception thrown when converting pandas.Series"):
df.select(['A']).withColumn('udf', udf('A')).collect()
# Disabling Arrow safe type check.
with self.sql_conf({
"spark.sql.execution.pandas.convertToArrowArraySafely": False}):
df.select(['A']).withColumn('udf', udf('A')).collect()
def test_pandas_udf_arrow_overflow(self):
import pandas as pd
df = self.spark.range(0, 1)
@pandas_udf(returnType="byte")
def udf(column):
return pd.Series([128] * len(column))
# When enabling safe type check, Arrow 0.11.0+ disallows overflow cast.
with self.sql_conf({
"spark.sql.execution.pandas.convertToArrowArraySafely": True}):
with self.assertRaisesRegexp(Exception,
"Exception thrown when converting pandas.Series"):
df.withColumn('udf', udf('id')).collect()
# Disabling safe type check, let Arrow do the cast anyway.
with self.sql_conf({"spark.sql.execution.pandas.convertToArrowArraySafely": False}):
df.withColumn('udf', udf('id')).collect()
if __name__ == "__main__":
from pyspark.sql.tests.test_pandas_udf import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
|
apache-2.0
| -3,917,267,020,206,477,000 | 39.701195 | 97 | 0.625979 | false |
robert-giaquinto/text-analysis
|
src/topic_model/build_cdtm_data.py
|
1
|
2688
|
from __future__ import division, print_function, absolute_import
import os
import argparse
def day_counts(keys_file, rounding_days=None):
"""
keys_file: input file with float of date (normalized so first
journals begin at zero) in the 4th column.
rounding_days: number of days to round the days to.
ex) 7 means round to nearest week)
"""
rval = {}
with open(keys_file, "r") as f:
for line in f:
fields = line.split("\t")
day = int(round(float(fields[3])))
if rounding_days is not None:
day = round(day / rounding_days) * rounding_days
if day in rval:
rval[day] += 1
else:
rval[day] = 1
return rval
def build_data(keys_file, out_file, ldac_file, rounding_days=None):
days = day_counts(keys_file, rounding_days)
n_days = len(days)
print("Found", n_days, "unique timestamps")
print("Writing day ", end='')
with open(out_file, 'wb') as out, open(ldac_file, "r") as ldac:
out.write(str(n_days) + '\n')
for day, n_docs in sorted(days.iteritems()):
print(day, end=', ')
out.write(str(day) + '\n')
out.write(str(n_docs) + '\n')
for i in range(n_docs):
bow = ldac.readline()
out.write(bow)
print('\nDone!')
def main():
parser = argparse.ArgumentParser(description='build cdtm data')
parser.add_argument('--data_dir', type=str, help='Data directory where input and output files should be/go.')
parser.add_argument('--train_keys', type=str, help='train keys file.')
parser.add_argument('--test_keys', type=str, help='test keys file.')
parser.add_argument('--train_out', type=str, help='train out file.')
parser.add_argument('--test_out', type=str, help='test out file.')
parser.add_argument('--rounding_days', type=int, default=1, help='number of days to round relative date to (to reduce number of time points)')
args = parser.parse_args()
print('build_cdtm_data.py')
print(args)
train_keys = os.path.join(args.data_dir, args.train_keys)
test_keys = os.path.join(args.data_dir, args.test_keys)
train_out = os.path.join(args.data_dir, args.train_out)
test_out = os.path.join(args.data_dir, args.test_out)
train_ldac = os.path.join(args.data_dir, 'train-mult.dat')
test_ldac = os.path.join(args.data_dir, 'test-mult.dat')
build_data(train_keys, train_out, train_ldac, args.rounding_days)
build_data(test_keys, test_out, test_ldac, args.rounding_days)
if __name__ == "__main__":
main()
|
mit
| 6,814,561,433,682,251,000 | 36.859155 | 146 | 0.59747 | false |
PhloxAR/phloxar
|
PhloxAR/dc1394/mode.py
|
1
|
12891
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function
from __future__ import absolute_import, unicode_literals
from .core import *
from ctypes import c_int32, c_uint32, c_uint64, c_float, byref, pointer
__all__ = [
'Mode', 'Format7', 'mode_map'
]
class Mode(object):
"""
Video mode for a DC1394 camera.
Do not instantiate this class directly. Instead use one of the modes
in 'Camera.modes' or 'Camera.modes_dict' and assign it to 'Camera.mode'.
"""
_mode_id = None
_cam = None
_color_coding = None
_dtype = None
def __init__(self, cam, mode_id):
self._mode_id = mode_id
self._cam = cam
self._dtype_shape()
def __repr__(self):
return self.name
def __eq__(self, other):
return self._mode_id == other.mode_id
def _dtype_shape(self):
"""
Data type and shape.
"""
import numpy
w = c_int32()
h = c_int32()
dll.dc1394_get_image_size_from_video_mode(self._cam, self._mode_id,
byref(w), byref(h))
self._shape = (h.value, w.value)
cc = color_coding_t()
dll.dc1394_get_color_coding_from_video_mode(
self._cam, self._mode_id, byref(cc))
self._color_coding = color_codings[cc.value]
self._dtype = '<u1'
if '8' in self._color_coding:
self._dtype = '>u1'
elif '16' in self._color_coding:
self._dtype = '>u2'
elif 'YUV' in self._color_coding:
print("Warning: YUV image format!")
# the data depth is 8 bit in the buffer,
# but 12 or 16 bit in a _color pixel.
self._dtype = ">u1"
else:
print("Nonstandard image format: %s" % mode[-1])
self._dtype = ">u1"
if "RGB" in self._color_coding:
self._shape.append(3)
@property
def mode_id(self):
return self._mode_id
@property
def name(self):
"""
A descriptive name for this mode.
"""
return video_modes[self._mode_id]
@property
def framerate(self):
"""
Allowed framerate if the camera is in this mode.
"""
return video_modes[self._mode_id]
@property
def shape(self):
"""
The size in pixels of frames acquired in this mode.
"""
return self._shape
@property
def color_coding(self):
"""
The type of _color coding of pixels.
"""
return self._color_coding
@property
def scalable(self):
"""
Is this video scalable?
"""
return bool(dll.dc1394_is_video_mode_scalable(self._mode_id))
@property
def dtype(self):
"""
The numpy data type of an image of this mode.
"""
return self._dtype
class Exif(Mode):
pass
class Format7(Mode):
"""
Format7 modes are flexible modes that support:
* acquiring and transferring only a subsection of the frame for
faster acquisition: regio-of-interes (ROI)
* binning the pixels of the sensor for faster acquisition and
reduced readout noise. The binning strategy in the different
Format7 modes is defined by the vendor.
Many aspects of Format7 modes can be altered while an acquisition is
in progress. A notable exception from this is the size of thepacket.
Use 'max_image_size', 'unit_size', 'unit_position, 'color_codings',
and 'data_depth' to obtain information about the mode and then set
its parameters via the attributes 'image_size', 'image_position',
'color_coding', and 'packet_size' or all of them via the 'roi'
attribute or with a call to 'setup.
All settings are sent to the hardware right away.
"""
@property
def frame_interval(self):
"""
The current frame interval in this format7 mode in seconds.
Read-only.
Use the 'Camera.framerate' and 'Camera.shutter'
features (if present) to influence the framerate.
"""
fi = c_float()
dll.dc1394_get_frame_interval(self._cam, self._mode_id, byref(fi))
return fi.value
@property
def max_image_size(self):
"""
The maximum size (horizontal and vertical) of the ROI in pixels.
Read-only.
"""
hsize = c_uint32()
vsize = c_uint32()
dll.dc1394_format7_get_max_image_size(self._cam, self._mode_id,
byref(hsize), byref(vsize))
return hsize.value, vsize.value
@property
def image_size(self):
"""
The current size (horizontal and vertical) of the ROI in pixels.
The image size can only be a multiple of the :attr:`unit_size`, and
cannot be smaller than it.
"""
hsize = c_uint32()
vsize = c_uint32()
dll.dc1394_format7_get_image_size(self._cam, self._mode_id,
byref(hsize), byref(vsize))
return hsize.value, vsize.value
@image_size.setter
def image_size(self, width, height):
x = c_uint32()
y = c_uint32()
dll.dc1394_format7_get_image_position(self._cam, self._mode_id,
byref(x), byref(y))
return x.value, y.value
@property
def image_position(self):
"""
The start position of the upper left corner of the ROI in
pixels (horizontal and vertical).
The image position can only be a multiple of the unit position
(zero is acceptable).
"""
x = c_uint32()
y = c_uint32()
dll.dc1394_format7_get_image_position(self._cam, self._mode_id,
byref(x), byref(y))
return x.value, y.value
@image_position.setter
def image_position(self, pos):
x, y = pos
dll.dc1394_format7_set_image_position(self._cam, self._mode_id, x, y)
@property
def color_codings(self):
"""
Allowed _color codings in this mode. Read-only.
"""
pos_codings = color_codings_t()
dll.dc1394_format7_get_color_codings(self._cam, self._mode_id,
byref(pos_codings))
return [color_codings[i] for i in pos_codings.codings[:pos_codings.num]]
@property
def color_coding(self):
"""
The current _color coding.
"""
cc = color_coding_t()
dll.dc1394_format7_get_color_coding(self._cam, self._mode_id, byref(cc))
return color_codings[cc.value]
@color_coding.setter
def color_coding(self, color):
code = color_codings[color]
dll.dc1394_format7_set_color_coding(self._cam, self._mode_id, code)
@property
def unit_position(self):
"""
Horizontal and vertical 'image_position' multiples.
Read-only.
"""
h_unit = c_uint32()
v_unit = c_uint32()
dll.dc1394_format7_get_unit_position(self._cam, self._mode_id,
byref(h_unit), byref(v_unit))
return h_unit.value, v_unit.value
@property
def unit_size(self):
"""
Horizontal and vertical :attr:`image_size` multiples. Read-only.
"""
h_unit = c_uint32()
v_unit = c_uint32()
dll.dc1394_format7_get_unit_size(self._cam, self._mode_id,
byref(h_unit), byref(v_unit))
return h_unit.value, v_unit.value
@property
def roi(self):
"""
Get and set all Format7 parameters at once.
The following definitions can be used to set ROI of Format7 in
a simpler fashion:
* QUERY_FROM_CAMERA (-1) will use the current value used by the
camera,
* USE_MAX_AVAIL will (-2) set the value to its maximum and
* USE_RECOMMENDED (-3) can be used for the bytes-per-packet
setting.
"""
w, h, x, y = c_int32(), c_int32(), c_int32(), c_int32()
cco, packet_size = color_coding_t(), c_int32()
dll.dc1394_format7_get_roi(self._cam, self._mode_id, pointer(cco),
byref(packet_size),
byref(x), byref(y), byref(w), byref(h))
return ((w.value, h.value), (x.value, y.value),
color_codings[cco.value], packet_size.value)
@roi.setter
def roi(self, args):
size, position, color, packet_size = args
dll.dc1394_format7_set_roi(self._cam, self._mode_id,
color_codings[color], packet_size,
position[0], position[1], size[0], size[1])
@property
def dtype(self):
self._dtype_shape()
return self._dtype
@property
def shape(self):
self._dtype_shape()
return self._shape
@property
def recommended_packet_size(self):
"""
Recommended number of bytes per packet. Read-only.
"""
packet_size = c_uint32()
dll.dc1394_format7_get_recommended_packet_size(self._cam, self._mode_id,
byref(packet_size))
return packet_size.value
@property
def packet_parameters(self):
"""
Maximum number and unit size of bytes per packet. Read-only.
Get the parameters of the packet size: its maximal size and its
unit size. The packet size is always a multiple of the unit
bytes and cannot be zero.
"""
packet_size_max = c_uint32()
packet_size_unit = c_uint32()
dll.dc1394_format7_get_packet_parameters(self._cam, self._mode_id,
byref(packet_size_unit),
byref(packet_size_max))
return packet_size_unit.value, packet_size_max.value
@property
def packet_size(self):
"""
Current number of bytes per packet.
"""
packet_size = c_uint32()
dll.dc1394_format7_get_packet_size(self._cam, self._mode_id,
byref(packet_size))
return packet_size.value
@packet_size.setter
def packet_size(self, packet_size):
dll.dc1394_format7_set_packet_size(self._cam, self._mode_id,
int(packet_size))
@property
def total_bytes(self):
"""
Current total number of bytes per frame. Read-only.
This includes padding (to reach an entire number of packets).
Use :attr:`packet_size` to influence its value.
"""
ppf = c_uint64()
dll.dc1394_format7_get_total_bytes(self._cam, self._mode_id, byref(ppf))
return ppf.value
@property
def pixel_number(self):
"""
The number of pixels per frame. Read-only.
"""
px = c_uint32()
dll.dc1394_format7_get_pixel_number(self._cam, self._mode_id, byref(px))
return px.value
@property
def data_depth(self):
"""
The number of bits per pixel. Read-only.
Need not be a multiple of 8.
"""
dd = c_uint32()
dll.dc1394_format7_get_data_depth(self._cam, self._mode_id, byref(dd))
return dd.value
def setup(self, image_size=(QUERY_FROM_CAMERA, QUERY_FROM_CAMERA),
image_position=(QUERY_FROM_CAMERA, QUERY_FROM_CAMERA),
color_coding=QUERY_FROM_CAMERA, packet_size=USE_RECOMMANDED):
"""
Setup this Format7 mode.
Similar to setting :attr:`roi` but size and positiogn are made
multiples of :attr:`unit_size` and :attr:`unit_position`. All
arguments are optional and default to not changing the current
value. :attr:`packet_size` is set to the recommended value.
"""
wu, hu = self.unit_size
xu, yu = self.unit_position
position = xu*int(image_position[0]/xu), yu*int(image_position[1]/yu)
size = wu*int(image_size[0]/wu), hu*int(image_size[1]/hu)
self.roi = size, position, color_coding, packet_size
return self.roi
mode_map = {
64: Mode,
65: Mode,
66: Mode,
67: Mode,
68: Mode,
69: Mode,
70: Mode,
71: Mode,
72: Mode,
73: Mode,
74: Mode,
75: Mode,
76: Mode,
77: Mode,
78: Mode,
79: Mode,
80: Mode,
81: Mode,
82: Mode,
83: Mode,
84: Mode,
85: Mode,
86: Mode,
87: Exif,
88: Format7,
89: Format7,
90: Format7,
91: Format7,
92: Format7,
93: Format7,
94: Format7,
95: Format7,
}
def create_mode(cam, m):
if isinstance(m, tuple):
m = "%sx%s_%s" % m
return Mode(cam, video_modes[m])
|
apache-2.0
| -2,508,890,066,847,390,700 | 29.839713 | 80 | 0.551082 | false |
bdestombe/flopy-1
|
flopy/utils/mflistfile.py
|
1
|
25207
|
"""
This is a set of classes for reading budget information out of MODFLOW-style
listing files. Cumulative and incremental budgets are returned as numpy
recarrays, which can then be easily plotted.
"""
import collections
import os
import re
import sys
from datetime import timedelta
import numpy as np
from ..utils.utils_def import totim_to_datetime
class ListBudget(object):
"""
MODFLOW family list file handling
Parameters
----------
file_name : str
the list file name
budgetkey : str
the text string identifying the budget table. (default is None)
timeunit : str
the time unit to return in the recarray. (default is 'days')
Notes
-----
The ListBudget class should not be instantiated directly. Access is
through derived classes: MfListBudget (MODFLOW), SwtListBudget (SEAWAT)
and SwrListBudget (MODFLOW with the SWR process)
Examples
--------
>>> mf_list = MfListBudget("my_model.list")
>>> incremental, cumulative = mf_list.get_budget()
>>> df_in, df_out = mf_list.get_dataframes(start_datetime="10-21-2015")
"""
def __init__(self, file_name, budgetkey=None, timeunit='days'):
# Set up file reading
assert os.path.exists(file_name)
self.file_name = file_name
if sys.version_info[0] == 2:
self.f = open(file_name, 'r')
elif sys.version_info[0] == 3:
self.f = open(file_name, 'r', encoding='ascii', errors='replace')
self.tssp_lines = 0
# Assign the budgetkey, which should have been overriden
if budgetkey is None:
self.set_budget_key()
else:
self.budgetkey = budgetkey
self.totim = []
self.timeunit = timeunit
self.idx_map = []
self.entries = []
self.null_entries = []
self.time_line_idx = 20
if timeunit.upper() == 'SECONDS':
self.timeunit = 'S'
self.time_idx = 0
elif timeunit.upper() == 'MINUTES':
self.timeunit = 'M'
self.time_idx = 1
elif timeunit.upper() == 'HOURS':
self.timeunit = 'H'
self.time_idx = 2
elif timeunit.upper() == 'DAYS':
self.timeunit = 'D'
self.time_idx = 3
elif timeunit.upper() == 'YEARS':
self.timeunit = 'Y'
self.time_idx = 4
else:
raise Exception('need to reset time_idxs attribute to '
'use units other than days and check usage of '
'timedelta')
# Fill budget recarrays
self._load()
self._isvalid = False
if len(self.idx_map) > 0:
self._isvalid = True
# Close the open file
self.f.close()
# return
return
def set_budget_key(self):
raise Exception('Must be overridden...')
def isvalid(self):
"""
Get a boolean indicating if budget data are available in the file.
Returns
-------
out : boolean
Boolean indicating if budget data are available in the file.
Examples
--------
>>> mf_list = MfListBudget('my_model.list')
>>> valid = mf_list.isvalid()
"""
return self._isvalid
def get_record_names(self):
"""
Get a list of water budget record names in the file.
Returns
-------
out : list of strings
List of unique text names in the binary file.
Examples
--------
>>> mf_list = MfListBudget('my_model.list')
>>> names = mf_list.get_record_names()
"""
if not self._isvalid:
return None
return self.inc.dtype.names
def get_times(self):
"""
Get a list of unique water budget times in the list file.
Returns
-------
out : list of floats
List contains unique water budget simulation times (totim) in list file.
Examples
--------
>>> mf_list = MfListBudget('my_model.list')
>>> times = mf_list.get_times()
"""
if not self._isvalid:
return None
return self.inc['totim'].tolist()
def get_kstpkper(self):
"""
Get a list of unique stress periods and time steps in the list file
water budgets.
Returns
----------
out : list of (kstp, kper) tuples
List of unique kstp, kper combinations in list file. kstp and
kper values are zero-based.
Examples
--------
>>> mf_list = MfListBudget("my_model.list")
>>> kstpkper = mf_list.get_kstpkper()
"""
if not self._isvalid:
return None
kstpkper = []
for kstp, kper in zip(self.inc['time_step'],
self.inc['stress_period']):
kstpkper.append((kstp, kper))
return kstpkper
def get_incremental(self, names=None):
"""
Get a recarray with the incremental water budget items in the list file.
Parameters
----------
names : str or list of strings
Selection of column names to return. If names is not None then
totim, time_step, stress_period, and selection(s) will be returned.
(default is None).
Returns
-------
out : recarray
Numpy recarray with the water budget items in list file. The
recarray also includes totim, time_step, and stress_period.
Examples
--------
>>> mf_list = MfListBudget("my_model.list")
>>> incremental = mf_list.get_incremental()
"""
if not self._isvalid:
return None
if names is None:
return self.inc
else:
if not isinstance(names, list):
names = [names]
names.insert(0, 'stress_period')
names.insert(0, 'time_step')
names.insert(0, 'totim')
return self.inc[names].view(np.recarray)
def get_cumulative(self, names=None):
"""
Get a recarray with the cumulative water budget items in the list file.
Parameters
----------
names : str or list of strings
Selection of column names to return. If names is not None then
totim, time_step, stress_period, and selection(s) will be returned.
(default is None).
Returns
-------
out : recarray
Numpy recarray with the water budget items in list file. The
recarray also includes totim, time_step, and stress_period.
Examples
--------
>>> mf_list = MfListBudget("my_model.list")
>>> cumulative = mf_list.get_cumulative()
"""
if not self._isvalid:
return None
if names is None:
return self.cum
else:
if not isinstance(names, list):
names = [names]
names.insert(0, 'stress_period')
names.insert(0, 'time_step')
names.insert(0, 'totim')
return self.cum[names].view(np.recarray)
def get_budget(self, names=None):
"""
Get the recarrays with the incremental and cumulative water budget items
in the list file.
Parameters
----------
names : str or list of strings
Selection of column names to return. If names is not None then
totim, time_step, stress_period, and selection(s) will be returned.
(default is None).
Returns
-------
out : recarrays
Numpy recarrays with the water budget items in list file. The
recarray also includes totim, time_step, and stress_period. A
separate recarray is returned for the incremental and cumulative
water budget entries.
Examples
--------
>>> mf_list = MfListBudget("my_model.list")
>>> budget = mf_list.get_budget()
"""
if not self._isvalid:
return None
if names is None:
return self.inc, self.cum
else:
if not isinstance(names, list):
names = [names]
names.insert(0, 'stress_period')
names.insert(0, 'time_step')
names.insert(0, 'totim')
return self.inc[names].view(np.recarray), self.cum[names].view(
np.recarray)
def get_data(self, kstpkper=None, idx=None, totim=None, incremental=False):
"""
Get water budget data from the list file for the specified conditions.
Parameters
----------
idx : int
The zero-based record number. The first record is record 0.
(default is None).
kstpkper : tuple of ints
A tuple containing the time step and stress period (kstp, kper).
These are zero-based kstp and kper values. (default is None).
totim : float
The simulation time. (default is None).
incremental : bool
Boolean flag used to determine if incremental or cumulative water
budget data for the specified conditions will be returned. If
incremental=True, incremental water budget data will be returned.
If incremental=False, cumulative water budget data will be
returned. (default is False).
Returns
-------
data : numpy recarray
Array has size (number of budget items, 3). Recarray names are 'index',
'value', 'name'.
See Also
--------
Notes
-----
if both kstpkper and totim are None, will return the last entry
Examples
--------
>>> import matplotlib.pyplot as plt
>>> import flopy
>>> mf_list = flopy.utils.MfListBudget("my_model.list")
>>> data = mf_list.get_data(kstpkper=(0,0))
>>> plt.bar(data['index'], data['value'])
>>> plt.xticks(data['index'], data['name'], rotation=45, size=6)
>>> plt.show()
"""
if not self._isvalid:
return None
ipos = None
if kstpkper is not None:
try:
ipos = self.get_kstpkper().index(kstpkper)
except:
pass
elif totim is not None:
try:
ipos = self.get_times().index(totim)
except:
pass
elif idx is not None:
ipos = idx
else:
ipos = -1
if ipos is None:
print('Could not find specified condition.')
print(' kstpkper = {}'.format(kstpkper))
print(' totim = {}'.format(totim))
return None
if incremental:
t = self.inc[ipos]
else:
t = self.cum[ipos]
dtype = np.dtype(
[('index', np.int32), ('value', np.float32), ('name', '|S25')])
v = np.recarray(shape=(len(self.inc.dtype.names[3:])), dtype=dtype)
for i, name in enumerate(self.inc.dtype.names[3:]):
mult = 1.
if '_OUT' in name:
mult = -1.
v[i]['index'] = i
v[i]['value'] = mult * t[name]
v[i]['name'] = name
return v
def get_dataframes(self, start_datetime='1-1-1970',diff=False):
"""
Get pandas dataframes with the incremental and cumulative water budget
items in the list file.
Parameters
----------
start_datetime : str
If start_datetime is passed as None, the rows are indexed on totim.
Otherwise, a DatetimeIndex is set. (default is 1-1-1970).
Returns
-------
out : panda dataframes
Pandas dataframes with the incremental and cumulative water budget
items in list file. A separate pandas dataframe is returned for the
incremental and cumulative water budget entries.
Examples
--------
>>> mf_list = MfListBudget("my_model.list")
>>> incrementaldf, cumulativedf = mf_list.get_dataframes()
"""
try:
import pandas as pd
except Exception as e:
raise Exception(
"ListBudget.get_dataframe() error import pandas: " + \
str(e))
if not self._isvalid:
return None
totim = self.get_times()
if start_datetime is not None:
totim = totim_to_datetime(totim,
start=pd.to_datetime(start_datetime),
timeunit=self.timeunit)
df_flux = pd.DataFrame(self.inc, index=totim).loc[:, self.entries]
df_vol = pd.DataFrame(self.cum, index=totim).loc[:, self.entries]
if not diff:
return df_flux, df_vol
else:
in_names = [col for col in df_flux.columns if col.endswith("_IN")]
out_names = [col for col in df_flux.columns if col.endswith("_OUT")]
#print(in_names,out_names)
#print(df_flux.columns)
base_names = [name.replace("_IN",'') for name in in_names]
for name in base_names:
in_name = name + "_IN"
out_name = name + "_OUT"
df_flux.loc[:,name.lower()] = df_flux.loc[:,in_name] - df_flux.loc[:,out_name]
df_flux.pop(in_name)
df_flux.pop(out_name)
df_vol.loc[:,name.lower()] = df_vol.loc[:,in_name] - df_vol.loc[:,out_name]
df_vol.pop(in_name)
df_vol.pop(out_name)
cols = list(df_flux.columns)
cols.sort()
cols = [col.lower() for col in cols]
df_flux.columns = cols
df_vol.columns = cols
return df_flux, df_vol
def _build_index(self, maxentries):
self.idx_map = self._get_index(maxentries)
return
def _get_index(self, maxentries):
# --parse through the file looking for matches and parsing ts and sp
idxs = []
l_count = 1
while True:
seekpoint = self.f.tell()
line = self.f.readline()
if line == '':
break
if self.budgetkey in line:
for l in range(self.tssp_lines):
line = self.f.readline()
try:
ts, sp = self._get_ts_sp(line)
except:
print('unable to cast ts,sp on line number', l_count,
' line: ', line)
break
# print('info found for timestep stress period',ts,sp)
idxs.append([ts, sp, seekpoint])
if maxentries and len(idxs) >= maxentries:
break
return idxs
def _seek_to_string(self, s):
"""
Parameters
----------
s : str
Seek through the file to the next occurrence of s. Return the
seek location when found.
Returns
-------
seekpoint : int
Next location of the string
"""
while True:
seekpoint = self.f.tell()
line = self.f.readline()
if line == '':
break
if s in line:
break
return seekpoint
def _get_ts_sp(self, line):
"""
From the line string, extract the time step and stress period numbers.
"""
# Old method. Was not generic enough.
# ts = int(line[self.ts_idxs[0]:self.ts_idxs[1]])
# sp = int(line[self.sp_idxs[0]:self.sp_idxs[1]])
# Get rid of nasty things
line = line.replace(',', '')
searchstring = 'TIME STEP'
idx = line.index(searchstring) + len(searchstring)
ll = line[idx:].strip().split()
ts = int(ll[0])
searchstring = 'STRESS PERIOD'
idx = line.index(searchstring) + len(searchstring)
ll = line[idx:].strip().split()
sp = int(ll[0])
return ts, sp
def _set_entries(self):
if len(self.idx_map) < 1:
return None, None
if len(self.entries) > 0:
raise Exception('entries already set:' + str(self.entries))
if not self.idx_map:
raise Exception('must call build_index before call set_entries')
try:
incdict, cumdict = self._get_sp(self.idx_map[0][0],
self.idx_map[0][1],
self.idx_map[0][2])
except:
raise Exception('unable to read budget information from first '
'entry in list file')
self.entries = incdict.keys()
null_entries = collections.OrderedDict()
incdict = collections.OrderedDict()
cumdict = collections.OrderedDict()
for entry in self.entries:
incdict[entry] = []
cumdict[entry] = []
null_entries[entry] = np.NaN
self.null_entries = [null_entries, null_entries]
return incdict, cumdict
def _load(self, maxentries=None):
self._build_index(maxentries)
incdict, cumdict = self._set_entries()
if incdict is None and cumdict is None:
return
totim = []
for ts, sp, seekpoint in self.idx_map:
tinc, tcum = self._get_sp(ts, sp, seekpoint)
for entry in self.entries:
incdict[entry].append(tinc[entry])
cumdict[entry].append(tcum[entry])
# Get the time for this record
seekpoint = self._seek_to_string('TIME SUMMARY AT END')
tslen, sptim, tt = self._get_totim(ts, sp, seekpoint)
totim.append(tt)
# get kstp and kper
idx_array = np.array(self.idx_map)
# build dtype for recarray
dtype_tups = [('totim', np.float32), ("time_step", np.int32),
("stress_period", np.int32)]
for entry in self.entries:
dtype_tups.append((entry, np.float32))
dtype = np.dtype(dtype_tups)
# create recarray
nentries = len(incdict[entry])
self.inc = np.recarray(shape=(nentries,), dtype=dtype)
self.cum = np.recarray(shape=(nentries,), dtype=dtype)
# fill each column of the recarray
for entry in self.entries:
self.inc[entry] = incdict[entry]
self.cum[entry] = cumdict[entry]
# file the totim, time_step, and stress_period columns for the
# incremental and cumulative recarrays (zero-based kstp,kper)
self.inc['totim'] = np.array(totim)[:]
self.inc["time_step"] = idx_array[:, 0] - 1
self.inc["stress_period"] = idx_array[:, 1] - 1
self.cum['totim'] = np.array(totim)[:]
self.cum["time_step"] = idx_array[:, 0] - 1
self.cum["stress_period"] = idx_array[:, 1] - 1
return
def _get_sp(self, ts, sp, seekpoint):
self.f.seek(seekpoint)
# --read to the start of the "in" budget information
while True:
line = self.f.readline()
if line == '':
print(
'end of file found while seeking budget information for ts,sp',
ts, sp)
return self.null_entries
# --if there are two '=' in this line, then it is a budget line
if len(re.findall('=', line)) == 2:
break
tag = 'IN'
incdict = collections.OrderedDict()
cumdict = collections.OrderedDict()
while True:
if line == '':
# raise Exception('end of file found while seeking budget information')
print(
'end of file found while seeking budget information for ts,sp',
ts, sp)
return self.null_entries
if len(re.findall('=', line)) == 2:
try:
entry, flux, cumu = self._parse_budget_line(line)
except e:
print('error parsing budget line in ts,sp', ts, sp)
return self.null_entries
if flux is None:
print(
'error casting in flux for', entry,
' to float in ts,sp',
ts, sp)
return self.null_entries
if cumu is None:
print(
'error casting in cumu for', entry,
' to float in ts,sp',
ts, sp)
return self.null_entries
if entry.endswith(tag.upper()):
if ' - ' in entry.upper():
key = entry.replace(' ', '')
else:
key = entry.replace(' ', '_')
elif 'PERCENT DISCREPANCY' in entry.upper():
key = entry.replace(' ', '_')
else:
key = '{}_{}'.format(entry.replace(' ', '_'), tag)
incdict[key] = flux
cumdict[key] = cumu
else:
if 'OUT:' in line.upper():
tag = 'OUT'
line = self.f.readline()
if entry.upper() == 'PERCENT DISCREPANCY':
break
return incdict, cumdict
def _parse_budget_line(self, line):
# get the budget item name
entry = line.strip().split('=')[0].strip()
# get the cumulative string
idx = line.index('=') + 1
line2 = line[idx:]
ll = line2.strip().split()
cu_str = ll[0]
idx = line2.index('=') + 1
fx_str = line2[idx:].strip()
#
# cu_str = line[self.cumu_idxs[0]:self.cumu_idxs[1]]
# fx_str = line[self.flux_idxs[0]:self.flux_idxs[1]]
flux, cumu = None, None
try:
cumu = float(cu_str)
except:
if 'NAN' in cu_str.strip().upper():
cumu = np.NaN
try:
flux = float(fx_str)
except:
if 'NAN' in fx_str.strip().upper():
flux = np.NaN
return entry, flux, cumu
def _get_totim(self, ts, sp, seekpoint):
self.f.seek(seekpoint)
# --read header lines
ihead = 0
while True:
line = self.f.readline()
ihead += 1
if line == '':
print(
'end of file found while seeking time information for ts,sp',
ts, sp)
return np.NaN, np.NaN, np.Nan
elif ihead == 2 and 'SECONDS MINUTES HOURS DAYS YEARS' not in line:
break
elif '-----------------------------------------------------------' in line:
line = self.f.readline()
break
tslen = self._parse_time_line(line)
if tslen == None:
print('error parsing tslen for ts,sp', ts, sp)
return np.NaN, np.NaN, np.Nan
sptim = self._parse_time_line(self.f.readline())
if sptim == None:
print('error parsing sptim for ts,sp', ts, sp)
return np.NaN, np.NaN, np.Nan
totim = self._parse_time_line(self.f.readline())
if totim == None:
print('error parsing totim for ts,sp', ts, sp)
return np.NaN, np.NaN, np.Nan
return tslen, sptim, totim
def _parse_time_line(self, line):
if line == '':
print('end of file found while parsing time information')
return None
try:
time_str = line[self.time_line_idx:]
raw = time_str.split()
idx = self.time_idx
# catch case where itmuni is undefined
# in this case, the table format is different
try:
v = float(raw[0])
except:
time_str = line[45:]
raw = time_str.split()
idx = 0
tval = float(raw[idx])
except:
print('error parsing tslen information', time_str)
return None
return tval
class SwtListBudget(ListBudget):
"""
"""
def set_budget_key(self):
self.budgetkey = 'MASS BUDGET FOR ENTIRE MODEL'
return
class MfListBudget(ListBudget):
"""
"""
def set_budget_key(self):
self.budgetkey = 'VOLUMETRIC BUDGET FOR ENTIRE MODEL'
return
class MfusgListBudget(ListBudget):
"""
"""
def set_budget_key(self):
self.budgetkey = 'VOLUMETRIC BUDGET FOR ENTIRE MODEL'
return
class SwrListBudget(ListBudget):
"""
"""
def set_budget_key(self):
self.budgetkey = 'VOLUMETRIC SURFACE WATER BUDGET FOR ENTIRE MODEL'
self.tssp_lines = 1
return
|
bsd-3-clause
| 148,176,429,000,663,900 | 30.907595 | 101 | 0.509105 | false |
ericdill/travis-little-helper
|
template.py
|
1
|
3049
|
from argparse import ArgumentParser
import yaml
from jinja2 import Environment, FileSystemLoader
import os
TEMPLATE_DIR = os.path.join(os.path.dirname(__file__), 'travis-template')
def main():
p = ArgumentParser()
p.add_argument(
"-tc", "--travis-config",
help="The yaml file specifying the configuration details for the travis yaml file",
nargs="?",
)
p.add_argument(
"-o", "--output-dir",
help="The location to output the completed .travis.yml file. Will be output to \"output-dir/.travis.yml\"",
nargs="?",
default="."
)
p.set_defaults(func=execute)
args = p.parse_args()
execute(args, p)
def execute(args, p):
output_dir = args.output_dir
input_config_yaml = args.travis_config
execute_programmatically(input_config_yaml, output_dir)
def nest_all_the_loops(iterable, matrix=None, matrices=None):
if matrix is None:
matrix = {}
if matrices is None:
matrices = []
local_iterable = iterable.copy()
try:
lib, versions = local_iterable.pop(0)
except IndexError:
matrices.append(matrix.copy())
return
for version in versions:
matrix[lib] = version
nest_all_the_loops(local_iterable, matrix, matrices)
return matrices
def execute_programmatically(input_config_yaml, output_dir):
print("input_config_yaml = %s" % input_config_yaml)
print("output_directory = %s" % output_dir)
travis_config = yaml.load(open(input_config_yaml, 'r'))
print('travis_config = %s' % travis_config)
# turn the env section of the travis config into the outer product of environments
env = travis_config.get('env', {})
print('env from yaml = %s', env)
env_list = [(k, v) for k, v in env.items()]
print('library matrix = %s' % env_list)
if env_list:
env_outer_prod = nest_all_the_loops(env_list.copy())
matrix = []
for mat in env_outer_prod:
repos = ' '.join(['%s="{%s}"' % (k.upper(), k) for k in sorted(mat.keys()) if k != 'python'])
matrix.append(('%s' % repos).format(**mat))
print('env matrix = %s' % matrix)
travis_config['matrix'] = matrix
travis_config['env'] = {k.lower(): k.upper() for k in env.keys()}
#explicitly format the allow_failures section
allow_failures = travis_config.get('allow_failures', {})
allow_failure_rows = ["%s: %s" % (k, v) for row in allow_failures for k, v in row.items()]
travis_config['allow_failure_rows'] = allow_failure_rows
# create the jinja environment
jinja_env = Environment(loader=FileSystemLoader(TEMPLATE_DIR))
template = jinja_env.get_template('nsls2.tmpl')
try:
os.makedirs(output_dir)
except FileExistsError:
# the file, uh, already exists
pass
travis_yml = template.render(**travis_config)
travis_fname = os.path.join(output_dir, '.travis.yml')
with open(travis_fname, 'w') as f:
f.write(travis_yml)
if __name__ == "__main__":
main()
|
gpl-3.0
| -5,244,402,429,933,902,000 | 32.505495 | 115 | 0.620859 | false |
glaubitz/fs-uae-debian
|
launcher/amitools/fs/validate/DirScan.py
|
1
|
6950
|
from __future__ import absolute_import
from __future__ import print_function
from .BlockScan import BlockScan
from amitools.fs.FSString import FSString
from amitools.fs.FileName import FileName
from amitools.fs.validate.Log import Log
import amitools.fs.DosType as DosType
class DirChainEntry:
"""entry of the hash chain"""
def __init__(self, blk_info):
self.blk_info = blk_info
self.parent_ok = False
self.fn_hash_ok = False
self.valid = False
self.end = False
self.orphaned = False
self.sub = None
def __str__(self):
l = []
if self.parent_ok:
l.append("parent_ok")
if self.fn_hash_ok:
l.append("fn_hash_ok")
if self.valid:
l.append("valid")
if self.end:
l.append("end")
if self.orphaned:
l.append("orphaned")
return "[DCE @%d '%s': %s]" % \
(self.blk_info.blk_num, self.blk_info.name, " ".join(l))
class DirChain:
"""representing a chain of the hashtable in a directory"""
def __init__(self, hash_val):
self.hash_val = hash_val
self.chain = []
def add(self, dce):
self.chain.append(dce)
def get_entries(self):
return self.chain
def __str__(self):
return "{DirChain +%d: #%d}" % (self.hash_val, len(self.chain))
class DirInfo:
"""information structure on a directory"""
def __init__(self, blk_info):
self.blk_info = blk_info
self.chains = {}
self.children = []
def add(self, dc):
self.chains[dc.hash_val] = dc
def add_child(self, c):
self.children.append(c)
def get(self, hash_val):
if hash_val in self.chains:
return self.chains[hash_val]
else:
return None
def get_chains(self):
return self.chains
def __str__(self):
bi = self.blk_info
blk_num = bi.blk_num
name = bi.name
parent_blk = bi.parent_blk
return "<DirInfo @%d '%s' #%d parent:%d child:#%d>" % (blk_num, name, len(self.chains), parent_blk, len(self.children))
class DirScan:
"""directory tree scanner"""
def __init__(self, block_scan, log):
self.log = log
self.block_scan = block_scan
self.root_di = None
self.intl = DosType.is_intl(block_scan.dos_type)
self.files = []
self.dirs = []
def scan_tree(self, root_blk_num, progress=None):
"""scan the root tree"""
# get root block info
root_bi = self.block_scan.get_block(root_blk_num)
if root_bi == None:
self.log.msg(Log.ERROR,"Root block not found?!",root_blk_num)
return None
# do tree scan
if progress != None:
progress.begin("dir")
self.root_di = self.scan_dir(root_bi, progress)
if progress != None:
progress.end()
return self.root_di
def scan_dir(self, dir_bi, progress):
"""check a directory by scanning through the hash table entries and follow the chains
Returns (all_chains_ok, dir_obj)
"""
# create new dir info
di = DirInfo(dir_bi)
self.dirs.append(di)
# run through hash_table of directory and build chains
chains = {}
hash_val = 0
for blk_num in dir_bi.hash_table:
if blk_num != 0:
# build chain
chain = DirChain(hash_val)
self.build_chain(chain, dir_bi, blk_num, progress)
di.add(chain)
hash_val += 1
return di
def build_chain(self, chain, dir_blk_info, blk_num, progress):
"""build a block chain"""
dir_blk_num = dir_blk_info.blk_num
dir_name = dir_blk_info.name
hash_val = chain.hash_val
# make sure entry block is first used
block_used = self.block_scan.is_block_available(blk_num)
# get entry block
blk_info = self.block_scan.read_block(blk_num)
# create dir chain entry
dce = DirChainEntry(blk_info)
chain.add(dce)
# account
if progress != None:
progress.add()
# block already used?
if block_used:
self.log.msg(Log.ERROR, "dir block already used in chain #%d of dir '%s (%d)" % (hash_val, dir_name, dir_blk_num), blk_num)
dce.end = True
return
# self reference?
if blk_num == dir_blk_num:
self.log.msg(Log.ERROR, "dir block in its own chain #%d of dir '%s' (%d)" % (hash_val, dir_name, dir_blk_num), blk_num)
dce.end = True
return
# not a block in range
if blk_info == None:
self.log.msg(Log.ERROR, "out-of-range block terminates chain #%d of dir '%s' (%d)" % (hash_val, dir_name, dir_blk_num), blk_num)
dce.end = True
return
# check type of entry block
b_type = blk_info.blk_type
if b_type not in (BlockScan.BT_DIR, BlockScan.BT_FILE_HDR):
self.log.msg(Log.ERROR, "invalid block terminates chain #%d of dir '%s' (%d)" % (hash_val, dir_name, dir_blk_num), blk_num)
dce.end = True
return
# check referenceed block type in chain
blk_type = blk_info.blk_type
if blk_type in (BlockScan.BT_ROOT, BlockScan.BT_FILE_LIST, BlockScan.BT_FILE_DATA):
self.log.msg(Log.ERROR, "invalid block type %d terminates chain #%d of dir '%s' (%d)" % (blk_type, hash_val, dir_name, dir_blk_num), blk_num)
dce.end = True
return
# all following are ok
dce.valid = True
# check parent of block
name = blk_info.name
dce.parent_ok = (blk_info.parent_blk == dir_blk_num)
if not dce.parent_ok:
self.log.msg(Log.ERROR, "invalid parent in '%s' chain #%d of dir '%s' (%d)" % (name, hash_val, dir_name, dir_blk_num), blk_num)
# check name hash
fn = FileName(name, self.intl)
fn_hash = fn.hash()
dce.fn_hash_ok = (fn_hash == hash_val)
if not dce.fn_hash_ok:
self.log.msg(Log.ERROR, "invalid name hash in '%s' chain #%d of dir '%s' (%d)" % (name, hash_val, dir_name, dir_blk_num), blk_num)
# recurse into dir?
if blk_type == BlockScan.BT_DIR:
dce.sub = self.scan_dir(blk_info, progress)
elif blk_type == BlockScan.BT_FILE_HDR:
self.files.append(dce)
# check next block in chain
next_blk = blk_info.next_blk
if next_blk != 0:
self.build_chain(chain, dir_blk_info, next_blk, progress)
else:
dce.end = True
def get_all_file_hdr_blk_infos(self):
"""return all file chain entries"""
result = []
for f in self.files:
result.append(f.blk_info)
return result
def get_all_dir_infos(self):
"""return all dir infos"""
return self.dirs
def dump(self):
"""dump whole dir info structure"""
self.dump_dir_info(self.root_di, 0)
def dump_dir_info(self, di, indent):
"""dump a single dir info structure and its sub dirs"""
istr = " " * indent
print(istr, di)
for hash_value in sorted(di.get_chains().keys()):
dc = di.get(hash_value)
print(istr, " ", dc)
for dce in dc.get_entries():
print(istr, " ", dce)
sub = dce.sub
if sub != None and dce.blk_info.blk_type == BlockScan.BT_DIR:
self.dump_dir_info(sub, indent+1)
|
gpl-2.0
| -242,199,672,101,419,070 | 28.324895 | 147 | 0.602734 | false |
zjurelinac/Linker
|
utils.py
|
1
|
2492
|
"""Module containing utility functions"""
import base64
import hashlib
import random
import re
import string
from datetime import datetime
from math import *
def hashfunc( str ):
"""Returns a hash value of a given string
Takes a string and returns its SHA512 hash value, encoded in base64
"""
return base64.b64encode( hashlib.sha512( str.encode() ).digest() ).decode( 'ascii' )
def markup_to_html( str ):
"""Transforms a simple markup string into html
Supported are: bold as '**bold part**' and italic as '*italicized part*'
"""
str = re.sub( r'\*\*(.*?)\*\*', r'<b>\1</b>', str )
str = re.sub( r'\*(.*?)\*', r'<i>\1</i>', str )
return str
def pretty_date( time = False ):
"""Returns a string containing a human-readable date
Get a datetime object or a int() Epoch timestamp and return a
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
'just now', etc
"""
now = datetime.now()
if isinstance( time, datetime ):
diff = now - time
else:
return time
if time == datetime.min:
return "Never"
second_diff = diff.seconds
day_diff = diff.days
if day_diff < 0:
return ''
if day_diff == 0:
if second_diff < 10:
return "just now"
if second_diff < 60:
return str( second_diff ) + " seconds ago"
if second_diff < 120:
return "a minute ago"
if second_diff < 3600:
return str( round( second_diff / 60 ) ) + " minutes ago"
if second_diff < 7200:
return "an hour ago"
if second_diff < 86400:
return str( round( second_diff / 3600 ) ) + " hours ago"
if day_diff == 1:
return "Yesterday"
if day_diff < 7:
return str( day_diff ) + " days ago"
if day_diff < 31:
return str( round( day_diff / 7 ) ) + " weeks ago"
if day_diff < 365:
return str( round( day_diff / 30 ) ) + " months ago"
return str( round( day_diff / 365 ) ) + " years ago"
def random_string( len ):
"""Returns a random string of a given length
"""
return ''.join( [ random.choice( string.ascii_letters + string.digits + '$%' )
for _ in range( 0, len ) ] )
def shorten_array( a, n ):
"""Shortens a given array to at most n elements, appending the number of elements that were cut off
"""
return a[ :n ] + ( [ str( len( a ) - n ) + ' others' ] if len( a ) > n else [] )
|
mit
| 6,123,698,279,064,488,000 | 28.666667 | 103 | 0.567014 | false |
hirokiky/django-webcommands
|
webcommands/forms.py
|
1
|
1779
|
from django import forms
from django.utils import six
from webcommands import utils as webcommands_utils
def field_for_option(option):
if option.type == 'string':
field = forms.CharField(label=str(option), max_length='255')
elif option.type == 'int':
field = forms.IntegerField(label=str(option))
elif option.type == 'long':
field = forms.IntegerField(label=str(option))
elif option.type == 'choice':
choices = zip(map(lambda x: x.upper(), option.choices), option.choices)
field = forms.ChoiceField(label=str(option),
choices=choices)
else:
field = forms.CharField(label=str(option), max_length=255)
return field
class CommandFormMetaClass(type):
def __new__(cls, name, bases, attrs):
super_new = super(CommandFormMetaClass, cls).__new__
new_class = super_new(cls, name, bases, attrs)
if 'command_class' in attrs:
command_class = attrs['command_class']
fields = {str(option): field_for_option(option)
for option in command_class.option_list}
else:
fields = {}
new_class.base_fields = fields
return new_class
class BaseCommandForm(forms.BaseForm):
def execute(self):
pass
class CommandForm(six.with_metaclass(CommandFormMetaClass, BaseCommandForm)):
pass
def commandform_factory(command_class):
"""Factory to return CommandForm correspond to gotten command instance
"""
command_name = command_class.__module__.rsplit('.', 1)[-1]
command_name = webcommands_utils.funcname_to_classname(command_name)
attrs = {'command_class': command_class}
return type(command_name + str('CommandForm'), (CommandForm,), attrs)
|
mit
| 8,279,747,194,203,811,000 | 30.767857 | 79 | 0.641372 | false |
ksurct/MercuryRoboticsEmbedded2016
|
ksurobot/hardware/_wiringpi.py
|
1
|
1766
|
from ctypes import cdll, c_int, CFUNCTYPE, POINTER, pointer, c_ubyte
from enum import Enum, IntEnum
from ..util import get_config
from .utils import Wrapper
libwiringpi = Wrapper('/usr/local/lib/libwiringPi.so.2.32')
wiringPiISR_cb = CFUNCTYPE(None)
wiringPiPiSPIDataRW_data = POINTER(c_ubyte)
class PinModes(IntEnum):
INPUT = 0
OUTPUT = 1
PWM_OUTPUT = 2
GPIO_CLOCK = 3
SOFT_PWM_OUTPUT = 4
SOFT_TONE_OUTPUT = 5
PWM_TONE_OUTPUT = 6
class PullModes(IntEnum):
PUD_OFF = 0
PUD_DOWN = 1
PUD_UP = 2
class InterruptModes(IntEnum):
INT_EDGE_SETUP = 0
INT_EDGE_FALLING = 1
INT_EDGE_RISING = 2
INT_EDGE_BOTH = 3
@libwiringpi.wrap([], None)
def wiringPiSetup():
pass
@libwiringpi.wrap([], None)
def wiringPiSetupSys():
pass
@libwiringpi.wrap([], None)
def wiringPiSetupGpio():
pass
@libwiringpi.wrap([], None)
def wiringPiSetupPhys():
pass
@libwiringpi.wrap([c_int, c_int], None)
def pinModeAlt(pin, mode):
pass
@libwiringpi.wrap([c_int], None)
def pwmSetClock(speed):
pass
@libwiringpi.wrap([c_int, c_int], None)
def pinMode(pin, mode):
pass
@libwiringpi.wrap([c_int, c_int], None)
def pullUpDnControl(pin, pud):
pass
@libwiringpi.wrap([c_int], c_int)
def digitalRead(pin):
pass
@libwiringpi.wrap([c_int, c_int], None)
def digitalWrite(pin, value):
pass
@libwiringpi.wrap([c_int, c_int], None)
def pwmWrite(pin, value):
pass
@libwiringpi.wrap([c_int, c_int, wiringPiISR_cb], None)
def wiringPiISR(pin, mode, callback):
pass
@libwiringpi.wrap([c_int, c_int], c_int)
def wiringPiSPISetup (channel, speed):
pass
@libwiringpi.wrap([c_int, wiringPiPiSPIDataRW_data, c_int], c_int)
def wiringPiSPIDataRW (channel, data, len):
pass
|
apache-2.0
| 712,612,528,227,094,300 | 15.819048 | 68 | 0.671574 | false |
vanzhiganov/jftpgw
|
support/cachepurgy.py
|
1
|
2239
|
#!/usr/bin/env python
#
# cachepurgy - script to reduce the size of a jftpgw cache
#
# (C) 2001 Julian Einwag <julian@brightstar.swin.de>
#
from sys import *
from os import *
from string import *
from stat import *
from time import *
maxsize = 40*1024*1024
cachedir = "/tmp/cache"
# This class stores the information of an object in cache (size, age, etc...)
class fileinfo:
def __init__(self, name):
statobj = stat(name)
self.age = time()-statobj[ST_CTIME]
self.size = statobj[ST_SIZE]
self.name = name
self.isempty = 0
if path.isdir(name):
self.isdir = 1
self.isempty = isempty(name)
else:
self.isdir = 0
def __cmp__(self, other):
# We want to have older items first
return cmp(other.age, self.age)
# Checks if a dir is empty
def isempty(dir):
if len(listdir(dir)) == 0:
return 1
else:
return 0
# Caclulates the size of the cache
def cachesize(stats):
size = 0
for file in stats:
size = size + file.size
return size
# This removes empty dirs from the cache
def removedirs(stats):
for file in stats:
if file.isdir and file.isempty:
print "Removing directory: ", file.name
rmdir(file.name)
# Cleans the cache
def cleancache(stats):
if cachesize(stats) > maxsize:
if (not stats[0].isdir):
print "Delete: %s" % stats[0].name
try:
unlink(stats[0].name)
except OSError:
stdout.write("File %s does not exist!" % stats[0].name)
# Yeah, I love LISP and recursion
cleancache(stats[1:])
else:
return
def main():
input = popen("find %s -print 2> /dev/null" % cachedir, 'r')
cacheindex = input.readlines()
input.close()
try:
chdir(cachedir)
except OSError:
stderr.write("Cachedir %s does not exist!\n" % cachedir)
exit(1)
cacheindex = map(rstrip, cacheindex)
stats = map(fileinfo, cacheindex)
stats.sort()
cleancache(stats)
removedirs(stats)
if __name__ == '__main__':
main()
|
gpl-2.0
| -3,612,532,231,312,575,000 | 20.528846 | 77 | 0.562305 | false |
teeheee/RobocupSoccerSimulator
|
robotsimul.py
|
1
|
5679
|
#!/usr/bin/env python3
from game import *
from debugger import Debugger
from logSaver import Logger
from gameconfig import gc
from popup_menu import *
import sys
import time
#TODO more comments
#TODO clean up this mess
class App:
def __init__(self):
# flag for shutdown of the simulation
self._running = True
# flags for the keyboard control Interface
self.robotcontrol = False #True for manual control
self.pause = False #True for game paused
self.focusedrobot = 0 #Id of the robot which sensor values are displayed on the debugger
self._display_surf = None # dubble buffered display to minimize lag
self.size = self.width, self.height = 243*3, 182*3 # Window size is fixed TODO: variable window size
self.menu_data = (
'Robot Simulator',
'Restart',
'Quit',
)
self.menu = NonBlockingPopupMenu(self.menu_data)
def on_init(self):
pygame.init()
if gc.GUI["Debugger"]:
width = self.width+self.height
else:
width = self.width
self._display_surf = pygame.display.set_mode((width, self.height), pygame.DOUBLEBUF)
self._game_display = pygame.Surface( self.size )
self.ppcm = 3 #TODO: variable window size
self.center = [self._game_display.get_height() / (2 * self.ppcm),
self._game_display.get_width() / (2 * self.ppcm)]
self._display_surf.set_alpha(None)
self._running = True
game_display_data = {"display": self._game_display,
"ppcm": self.ppcm,
"center": self.center}
self.game = Game(game_display_data)
if gc.GUI["Debugger"]:
self.debugger = Debugger(self._display_surf, self.game.robotProgramHandlers)
self.debugger.setFocusedRobot(self.focusedrobot)
if gc.GUI["Logger"]:
self.logger = Logger(self.game)
self.logger.startLogging()
pygame.mixer.quit()
def on_event(self, event):
for e in self.menu.handle_events(event):
if e.type == USEREVENT:
if e.code == 'MENU':
if e.name is None:
self.menu.hide()
elif e.text == "Quit":
self._running = False
elif e.text == "Restart":
self.game.restart()
else:
print('TODO: handle this Menu event: %s' % (e.text)) #TODO menu handling
elif e.type == MOUSEBUTTONUP:
self.menu.show()
elif e.type == pygame.QUIT:
self._running = False
def on_loop(self):
if not self.pause:
self.logger.tick()
self.game.tick(30) #calculate in ms steps
speed = 0.5
motor = np.array([0.0, 0.0, 0.0, 0.0])
key = pygame.key.get_pressed()
if key[pygame.K_UP]:
motor += np.array([-speed, -speed, speed, speed])
if key[pygame.K_DOWN]:
motor += np.array([speed, speed, -speed, -speed])
if key[pygame.K_RIGHT]:
motor += np.array([speed/2, 0, speed/2, 0])
if key[pygame.K_LEFT]:
motor += np.array([-speed/2, 0, -speed/2, 0])
if key[pygame.K_m]:
motor += np.array([-speed, speed, speed, -speed])
if key[pygame.K_j]:
motor += np.array([speed, -speed, -speed, speed])
if key[pygame.K_1]:
self.focusedrobot = 0
if gc.GUI["Debugger"]:
self.debugger.setFocusedRobot(0)
if key[pygame.K_2]:
self.focusedrobot = 1
if gc.GUI["Debugger"]:
self.debugger.setFocusedRobot(1)
if key[pygame.K_3]:
self.focusedrobot = 2
if gc.GUI["Debugger"]:
self.debugger.setFocusedRobot(2)
if key[pygame.K_4]:
self.focusedrobot = 3
if gc.GUI["Debugger"]:
self.debugger.setFocusedRobot(3)
if key[pygame.K_v]:
if gc.GUI["Debugger"]:
self.debugger.togglePixyMode()
if key[pygame.K_p]:
self.pause = True
else:
self.pause = False
if key[pygame.K_SPACE]:
self.robotcontrol=True
else:
self.robotcontrol=False
if self.robotcontrol:
motor *= 100
self.game.robotInterfaceHandlers[self.focusedrobot].setMotorSpeed(motor[0], motor[1], motor[2], motor[3])
self.game.robotProgramHandlers[self.focusedrobot].block()
else:
self.game.robotProgramHandlers[self.focusedrobot].unBlock()
def on_render(self):
self._display_surf.fill(GREEN)
self.game.draw()
self._display_surf.blit(self._game_display,(0, 0))
if gc.GUI["Debugger"]:
self.debugger.draw()
self.menu.draw()
pygame.display.update()
def on_cleanup(self):
self.game.shutdown()
pygame.quit()
def on_execute(self):
if self.on_init() is False:
self._running = False
while(self._running):
self.on_event(pygame.event.get())
self.on_loop()
if gc.GUI["Fast"] == False:
time.sleep(0.03)
self.on_render()
self.on_cleanup()
if __name__ == "__main__":
#load config file if it is given as argument
if len(sys.argv) == 2:
gc.load(str(sys.argv[1]))
else:
gc.load(None)
theApp = App()
theApp.on_execute()
|
gpl-3.0
| 7,155,189,578,462,399,000 | 32.803571 | 117 | 0.534777 | false |
mikkokeskinen/tunnistamo
|
oidc_apis/models.py
|
1
|
6847
|
from django.core.exceptions import ValidationError
from django.core.validators import RegexValidator
from django.db import models
from django.utils.crypto import get_random_string
from django.utils.translation import ugettext_lazy as _
from multiselectfield import MultiSelectField
from oidc_provider.models import Client
from parler.fields import TranslatedField
from parler.managers import TranslatableQuerySet
from parler.models import TranslatableModel, TranslatedFieldsModel
from oidc_apis.utils import combine_uniquely
from .mixins import AutoFilledIdentifier, ImmutableFields
alphanumeric_validator = RegexValidator(
'^[a-z0-9]*$',
message=_("May contain only lower case letters and digits."))
SCOPE_CHOICES = [
('email', _("E-mail")),
('profile', _("Profile")),
('address', _("Address")),
('github_username', _("GitHub username")),
('ad_groups', _("AD Groups")),
]
class ApiDomain(models.Model):
identifier = models.CharField(
max_length=50, unique=True,
verbose_name=_("identifier"),
help_text=_("API domain identifier, e.g. https://api.hel.fi/auth"))
class Meta:
verbose_name = _("API domain")
verbose_name_plural = _("API domains")
def __str__(self):
return self.identifier
class Api(models.Model):
domain = models.ForeignKey(
ApiDomain,
verbose_name=("domain"),
on_delete=models.CASCADE
)
name = models.CharField(
max_length=50,
validators=[alphanumeric_validator],
verbose_name=_("name")
)
required_scopes = MultiSelectField(
choices=SCOPE_CHOICES, max_length=1000, blank=True,
verbose_name=_("required scopes"),
help_text=_(
"Select the scopes that this API needs information from. "
"Information from the selected scopes will be included to "
"the API Tokens.")
)
oidc_client = models.OneToOneField(
Client, related_name='+',
on_delete=models.CASCADE,
verbose_name=_("OIDC client")
)
class Meta:
unique_together = [('domain', 'name')]
verbose_name = _("API")
verbose_name_plural = _("APIs")
def __str__(self):
return self.identifier
@property
def identifier(self):
return '{domain}/{name}'.format(
domain=self.domain.identifier.rstrip('/'),
name=self.name)
def required_scopes_string(self):
return ' '.join(sorted(self.required_scopes))
required_scopes_string.short_description = _("required scopes")
def clean(self):
if getattr(self, 'oidc_client', None) is None:
self.oidc_client = _get_or_create_oidc_client_for_api(self)
else:
if self.oidc_client.client_id != self.identifier:
raise ValidationError(
{'oidc_client': _(
"OIDC Client ID must match with the identifier")})
super(Api, self).clean()
def save(self, *args, **kwargs):
self.clean()
super(Api, self).save(*args, **kwargs)
def _get_or_create_oidc_client_for_api(api):
(client, _created) = Client.objects.get_or_create(
client_id=api.identifier,
defaults={
'name': api.name,
'client_type': 'confidential',
'client_secret': get_random_string(length=50),
'response_type': 'code',
'jwt_alg': 'RS256',
})
return client
class ApiScopeQuerySet(TranslatableQuerySet):
def by_identifiers(self, identifiers):
return self.filter(identifier__in=identifiers)
def allowed_for_client(self, client):
return self.filter(allowed_apps=client)
class ApiScope(AutoFilledIdentifier, ImmutableFields, TranslatableModel):
immutable_fields = ['api', 'specifier', 'identifier']
identifier = models.CharField(
max_length=150, unique=True, editable=False,
verbose_name=_("identifier"),
help_text=_(
"The scope identifier as known by the API application "
"(i.e. the Resource Server). Generated automatically from "
"the API identifier and the scope specifier."))
api = models.ForeignKey(
Api, related_name='scopes', on_delete=models.CASCADE,
verbose_name=_("API"),
help_text=_("The API that this scope is for."))
specifier = models.CharField(
max_length=30, blank=True,
validators=[alphanumeric_validator],
verbose_name=_("specifier"),
help_text=_(
"If there is a need for multiple scopes per API, "
"this can specify what kind of scope this is about, "
"e.g. \"readonly\". For general API scope "
"just leave this empty."))
name = TranslatedField()
description = TranslatedField()
allowed_apps = models.ManyToManyField(
Client, related_name='granted_api_scopes',
verbose_name=_("allowed applications"),
help_text=_("Select client applications which are allowed "
"to get access to this API scope."))
objects = ApiScopeQuerySet.as_manager()
class Meta:
unique_together = [('api', 'specifier')]
verbose_name = _("API scope")
verbose_name_plural = _("API scopes")
@property
def relative_identifier(self):
return '{api_name}{suffix}'.format(
api_name=self.api.name,
suffix=('.' + self.specifier if self.specifier else '')
)
def _generate_identifier(self):
return '{api_identifier}{suffix}'.format(
api_identifier=self.api.identifier,
suffix=('.' + self.specifier if self.specifier else '')
)
@classmethod
def extend_scope(cls, scopes):
required_scopes = cls._get_required_scopes(scopes)
extended_scopes = combine_uniquely(scopes, sorted(required_scopes))
return extended_scopes
@classmethod
def _get_required_scopes(cls, scopes):
api_scopes = ApiScope.objects.by_identifiers(scopes)
apis = {x.api for x in api_scopes}
return set(sum((list(api.required_scopes) for api in apis), []))
class ApiScopeTranslation(TranslatedFieldsModel):
master = models.ForeignKey(
ApiScope, related_name='translations', null=True,
on_delete=models.CASCADE,
verbose_name=_("API scope"))
name = models.CharField(
max_length=200, verbose_name=_("name"))
description = models.CharField(
max_length=1000, verbose_name=_("description"))
class Meta:
unique_together = [('language_code', 'master')]
verbose_name = _("API scope translation")
verbose_name_plural = _("API scope translations")
def __str__(self):
return "{obj}[{lang}]".format(obj=self.master, lang=self.language_code)
|
mit
| 4,226,253,380,893,948,400 | 32.89604 | 79 | 0.621732 | false |
UltrosBot/Ultros-repos
|
control/system/manager.py
|
1
|
2416
|
__author__ = 'Gareth'
import os
from twisted.internet import reactor
import yaml
# import control.system.servers as servers
import control.system.ssl as ssl
from control.utils.log import getLogger
from control.system.singleton import Singleton
CONF_DIR = "control/config/"
DATA_DIR = "control/data/"
LOGS_DIR = "control/logs/"
class Manager(object):
__metaclass__ = Singleton
config = {}
def __init__(self):
self.logger = getLogger("Manager")
try:
self.logger.info("Ensuring directories exist..")
self.create_dirs()
except Exception:
self.logger.exception("Error while creating directories")
return
self.logger.info("Loading configuration..")
if not self.load_config():
self.logger.error("Unable to find control/config/config.yml")
return
try:
self.logger.info("Ensuring SSL cert exists..")
self.create_ssl()
except Exception:
self.logger.exception("Error while creating SSL cert")
return
reactor.run()
def create_dirs(self):
paths = [CONF_DIR, DATA_DIR,
DATA_DIR + "ssl"]
for path in paths:
if not os.path.exists(path):
self.logger.trace("Creating directory: %s" % path)
os.mkdir(path)
def load_config(self):
if not os.path.exists(CONF_DIR + "config.yml"):
return False
self.config = yaml.load(open(CONF_DIR + "config.yml", "r"))
return True
def create_ssl(self):
if not os.path.exists(DATA_DIR + "ssl/ssl.crt"):
self.logger.trace("No SSL cert found; generating..")
self.logger.info("Generating SSL cert. This may take a while.")
ssl.create_self_signed_cert(
DATA_DIR + "ssl",
self.config.get("ssl", {})
)
self.logger.info("Done!")
elif not os.path.exists(DATA_DIR + "ssl/ssl.key"):
self.logger.trace("No private key found; generating..")
self.logger.info("Generating SSL cert. This may take a while.")
ssl.create_self_signed_cert(
DATA_DIR + "ssl",
self.config.get("ssl", {})
)
self.logger.info("Done!")
else:
self.logger.info("SSL cert and key found.")
|
artistic-2.0
| -2,818,796,045,469,644,300 | 28.108434 | 75 | 0.563742 | false |
PeteE/roro
|
python/driver.py
|
1
|
2957
|
import time
import signal
import sys
import smbus
import robot_data_pb2
from oled_display import OledDisplay
class RobotDriver:
SERVO_STOP = 90
def __init__(self, i2c_address=0x04, i2c_bus=1, oled_display=None):
self.i2c_address = i2c_address
self.i2c_bus = smbus.SMBus(i2c_bus)
self.oled_display = oled_display
signal.signal(signal.SIGINT, self.exit_gracefully)
signal.signal(signal.SIGTERM, self.exit_gracefully)
self.current_state = robot_data_pb2.RobotData()
self.set_state(s0_pos=90, s1_pos=90, led_pattern=robot_data_pb2.RobotData.OFF)
def exit_gracefully(self, signum, frame):
print('Exiting.')
self.set_state(s0_pos=90, s1_pos=90, led_pattern=robot_data_pb2.RobotData.OFF)
if self.oled_display:
self.oled_display.clear()
sys.exit(0)
def get_state(self):
try:
data_length = self.i2c_bus.read_byte(self.i2c_address)
#print('Length: {}'.format(data_length))
i = 0;
data = []
while i < data_length:
data.append(self.i2c_bus.read_byte(self.i2c_address))
i+=1
rd = robot_data_pb2.RobotData()
rd.ParseFromString("".join(map(chr, data)))
print(rd)
if self.oled_display:
oled_text = ['RobotState:',
's0: {}, s1: {}'.format(rd.s0_pos, rd.s1_pos),
'sF: {}, sB: {}'.format(rd.sonarf, rd.sonarb),
]
self.oled_display.display_text('\n'.join(oled_text))
except Exception as e:
print('Error getting state from robot.')
def set_state(self, s0_pos, s1_pos, led_pattern):
try:
self.current_state.s0_pos=s0_pos
self.current_state.s1_pos=s1_pos
self.current_state.led_pattern=led_pattern
self.current_state.sonarf=0
self.current_state.sonarb=0
data = self.current_state.SerializeToString()
data_size = len(data)
# write header
self.i2c_bus.write_byte(self.i2c_address, (data_size >> 8) & 0xFF)
self.i2c_bus.write_byte(self.i2c_address, data_size & 0xFF)
# write data
for c in data:
self.i2c_bus.write_byte(self.i2c_address, ord(c))
except Exception as e:
print(e)
if __name__ == '__main__':
oled = OledDisplay()
driver = RobotDriver(oled_display=oled)
while True:
for i in range(90, 40, -5):
driver.set_state(s0_pos=i, s1_pos=i, led_pattern=robot_data_pb2.RobotData.RAINBOW)
time.sleep(.5)
driver.get_state()
for i in range(40, 90, 5):
driver.set_state(s0_pos=i, s1_pos=i, led_pattern=robot_data_pb2.RobotData.RAINBOW)
time.sleep(.5)
driver.get_state()
|
bsd-3-clause
| 8,227,827,704,568,035,000 | 32.602273 | 94 | 0.554616 | false |
expyriment/expyriment
|
expyriment/io/_mouse.py
|
1
|
25968
|
"""
Mouse input.
This module contains a class implementing pygame mouse input.
"""
__author__ = 'Florian Krause <florian@expyriment.org>, \
Oliver Lindemann <oliver@expyriment.org>'
__version__ = ''
__revision__ = ''
__date__ = ''
from types import FunctionType
import pygame
from . import defaults
from ..misc._timer import get_time
from ..misc import is_android_running
from ._input_output import Input
from .. import _internals, misc
class Mouse(Input):
"""A class implementing a mouse input.
Calling ``expyriment.control.initialize(exp)`` will automatically create a
mouse instance and will reference it in exp.mouse for easy access.
"""
#static class properties for quit_events
_quit_corner_location = None
_corner_rect_size = (30, 30)
_quit_action_events = []
def __init__(self, show_cursor=True, track_button_events=None,
track_motion_events=None):
"""Initialize a mouse input.
Parameters
----------
show_cursor : bool, optional
shows mouse cursor (default = True)
track_button_events : bool, optional
track button events via Pygame queue (default = True)
track_motion_events : bool, optional
track motion events via Pygame queue (default = False)
Notes
-----
(a) It is strongly suggest to avoid tracking of motions events,
(track_motion_events=True), because it quickly causes an overflow in
the Pygame event queue and you might consequently loose important
events.
(b) Turning the mouse wheel causes button_down_events. Thus,
turning the mouse wheel rather extensively causes an overflow of
the Pygame event queue. You might consider turn off the default
tracking of mouse button event by calling
`experiment.mouse.track_button_events = False`.
(c) See ``process_quit_event`` for the forced quitting of experiments
via mouse events.
"""
Input.__init__(self)
if is_android_running():
Mouse._quit_corner_location = 1
if show_cursor is None:
show_cursor = defaults.mouse_show_cursor
if track_button_events is None:
track_button_events = defaults.mouse_track_button_events
if track_motion_events is None:
track_motion_events = defaults.mouse_track_motion_events
if show_cursor:
self.show_cursor(track_button_events, track_motion_events)
else:
self.track_button_events = track_button_events
self.track_motion_events = track_motion_events
@staticmethod
def set_quit_corner_location(corner, corner_rect_size=(None, None)):
"""Activate the possibility to quit experiment using a mouse event.
Defines the corner on which the user has to click to elicit a
quit dialog.
If quit corner location has been defined, clicking quickly three
times (i.e., within 1 second) in the specified corner forces
experiment to quit.
To switch off the detection of mouse quit events, please call
``Mouse.set_quit_corner_location(corner=None)``.
Changing the corner and rect_size always affects all mouse
instances.
Parameters
----------
corner: int or None
location code (0,1, 2 or 3) of the quit corner; the default value
under Android is 1, otherwise None; see also the notes
below.
corner_rect_size = tuple (int, int), optional
size of the field (rect) that detects the quit action in one
corner of the screen; default = (30, 30)
Notes
-----
Mouse quit events are especially useful for experiments on devices
without hardware keyboard, such as tablet PCs or smartphones.
Corner location codes::
0 = upper left corner, 1 = upper right corner (0) (1)
2 = lower right corner, 3 = lower left corner (3) (2)
otherwise the detection of mouse quit events is deactivated
The detection of mouse quit events is activated by default under
Android.
"""
if corner is not None:
if not isinstance(corner, int) or corner<0 or corner >3:
corner = None
print("Warning: {} is an unkown corner location. Mouse quit "
"event is deactivated.".format(corner))
Mouse._quit_corner_location = corner
try:
Mouse._corner_rect_size = (int(corner_rect_size[0]),
int(corner_rect_size[1]))
except:
pass
@staticmethod
def process_quit_event(click_position=None):
"""Check if mouse exit action has been performed.
If quit corner location is has been defined via
``Mouse.set_quit_corner_location()`` (i.e. 0, 1, 2 or 3), clicking
quickly three times (i.e., within 1 second) in one of the corners of
the screen forces the experiment to quit.
The function is called automatically by all mouse get event and wait
methods (similar to ``Keyboard.process_control_keys``). If no mouse
functions are called by your program, this function can be polled to
ensure quitting experiment by mouse.
Parameters
----------
click_position : tuple of int (x,y), optional
clicked location to be processed. If not defined, the Pygame event
queue will be checked for mouse down events and the current
position is taken
Returns
-------
out : bool, optional
True if exit action has been performed
False otherwise
See Also
--------
set_quit_corner_location
"""
if Mouse._quit_corner_location not in (0, 1, 2, 3):
return False
if click_position is None:
# check Pygame queu
pos = None
# pygame.event.pump() # not sure if it is required!
for event in pygame.event.get(pygame.MOUSEBUTTONDOWN):
if event.button > 0:
screen_size = _internals.active_exp.screen.surface.get_size()
pos = pygame.mouse.get_pos()
pos = (pos[0] - screen_size[0] // 2,
-pos[1] + screen_size[1] // 2)
break
if pos is None:
return False
else:
return Mouse.process_quit_event(click_position=pos)
# determine threshold x & y
if Mouse._quit_corner_location == 0 or Mouse._quit_corner_location == 3: # left
threshold_x = -_internals.active_exp.screen.center_x + \
Mouse._corner_rect_size[0]
else:# right
threshold_x = _internals.active_exp.screen.center_x - \
Mouse._corner_rect_size[0]
if Mouse._quit_corner_location == 0 or Mouse._quit_corner_location == 1: # upper
threshold_y = _internals.active_exp.screen.center_y - \
Mouse._corner_rect_size[1]
else:# lower
threshold_y = -_internals.active_exp.screen.center_y + \
Mouse._corner_rect_size[1]
# check
if (Mouse._quit_corner_location == 0 and \
click_position[0] < threshold_x and \
click_position[1] > threshold_y) \
or (Mouse._quit_corner_location == 1 and \
click_position[0] > threshold_x and \
click_position[1] > threshold_y) \
or (Mouse._quit_corner_location == 2 and \
click_position[0] > threshold_x and \
click_position[1] < threshold_y) \
or (Mouse._quit_corner_location == 3 and \
click_position[0] < threshold_x and \
click_position[1] < threshold_y):
Mouse._quit_action_events.append(get_time())
if len(Mouse._quit_action_events)>=3:
diff = get_time()-Mouse._quit_action_events.pop(0)
if (diff < 1):
# simulate quit key
simulated_key = pygame.event.Event(
pygame.KEYDOWN,
{'key': _internals.active_exp.keyboard.get_quit_key()})
return _internals.active_exp.keyboard.process_control_keys(
key_event=simulated_key)
return False
@property
def track_button_events(self):
"""Getter for track_button_events."""
return self._track_button_events
@track_button_events.setter
def track_button_events(self, value):
"""Setter for track_button_events.
Switch on/off the processing of button and wheel events.
"""
self._track_button_events = value
if value:
pygame.event.set_allowed(pygame.MOUSEBUTTONDOWN)
pygame.event.set_allowed(pygame.MOUSEBUTTONUP)
else:
pygame.event.set_blocked(pygame.MOUSEBUTTONDOWN)
pygame.event.set_blocked(pygame.MOUSEBUTTONUP)
@property
def track_motion_events(self):
"""Getter for track_motion_events.
Switch on/off the buffering of motion events in the Pygame event queue.
Notes
-----
It is strongly suggest to avoid tracking of motions events,
(track_motion_events=True), because it quickly causes an overflow in
the Pygame event queue and you might consequently loose important
events.
"""
return self._track_motion_events
@track_motion_events.setter
def track_motion_events(self, value):
"""Setter for track_motion_events.
Switch on/off the processing of motion events.
"""
self._track_motion_events = value
if value:
pygame.event.set_allowed(pygame.MOUSEMOTION)
else:
pygame.event.set_blocked(pygame.MOUSEMOTION)
@property
def pressed_buttons(self):
"""Getter for pressed_buttons."""
pygame.event.pump()
return pygame.mouse.get_pressed()
@property
def is_cursor_visible(self):
"""Getter for is_cursor_visible"""
visible = pygame.mouse.set_visible(False)
pygame.mouse.set_visible(visible)
return visible
def get_last_button_down_event(self, process_quit_event=True):
"""Get the last button down event.
All earlier button down events will be removed from the queue.
Parameters
----------
process_quit_event : boolean, optional
if False, the current location will not be processed for mouse
quitting events in the case that a button down event has been
found (default = True).
Returns
-------
btn_id : int
button number (0,1,2) or 3 for wheel up or 4 for wheel down,
if quit screen mouse action has been performed, the method
returns -1
"""
rtn = None
for event in pygame.event.get(pygame.MOUSEBUTTONDOWN):
if event.button > 0:
rtn = event.button - 1
if rtn==0:
if process_quit_event and Mouse.process_quit_event(self.position):
return -1
return rtn
def get_last_button_up_event(self):
"""Get the last button up event.
All earlier button up events will be removed from the queue.
Returns
-------
btn_id : int
button number (0,1,2)
if quit screen mouse action has been performed, the method
returns -1
"""
rtn = None
for event in pygame.event.get(pygame.MOUSEBUTTONUP):
if event.button > 0:
rtn = event.button - 1
return rtn
def check_button_pressed(self, button_number):
"""Return False or button id if a specific button is currently pressed.
Parameters
----------
button_number : int
the button number (0,1,2) to be checked
Returns
-------
is_pressed: boolean
"""
btns = self.pressed_buttons
if len(btns) >= 1 and button_number >= 0:
return btns[button_number]
else:
return False
def check_wheel(self):
"""Check the mouse wheel.
Returns
-------
direction : str
"up" or "down" if mouse wheel has been recently rotated
upwards or downwards otherwise it returns None.
"""
evt = self.get_last_button_down_event()
if evt == 3:
return "up"
elif evt == 4:
return "down"
else:
return None
@property
def position(self):
"""Getter for position."""
pygame.event.pump()
screen_size = _internals.active_exp.screen.surface.get_size()
pos = pygame.mouse.get_pos()
return (pos[0] - screen_size[0] // 2, -pos[1] + screen_size[1] // 2)
@position.setter
def position(self, position):
"""Setter for position."""
screen_size = _internals.active_exp.screen.surface.get_size()
pos = (position[0] + screen_size[0] // 2,
- position[1] + screen_size[1] // 2)
pygame.mouse.set_pos(pos)
def set_cursor(self, size, hotspot, xormasks, andmasks):
"""Set the cursor.
Parameters
----------
size : (int, int)
size of the cursor
hotspot : (int, int)
position of the hotspot (0,0 is top left)
xormask : list
sequence of bytes with cursor xor data masks
andmask : list
sequence of bytes with cursor bitmask data
"""
return pygame.mouse.set_cursor(size, hotspot, xormasks, andmasks)
def get_cursor(self):
"""Get the cursor."""
return pygame.mouse.get_cursor()
def clear(self):
"""Clear the event cue from mouse events."""
pygame.event.clear(pygame.MOUSEBUTTONDOWN)
pygame.event.clear(pygame.MOUSEBUTTONUP)
pygame.event.clear(pygame.MOUSEMOTION)
if self._logging:
_internals.active_exp._event_file_log("Mouse,cleared", 2)
def wait_event(self, wait_button=True, wait_motion=True, buttons=None,
duration=None, wait_for_buttonup=False,
callback_function=None, process_control_events=True):
"""Wait for a mouse event (i.e., motion, button press or wheel event).
Button id coding:
- None for no mouse button event or
- 0,1,2 for left. middle and right button or
- 3 for wheel up or
- 4 for wheel down (wheel works only for keydown events).
Parameters
----------
wait_button : bool, optional
set 'False' to ignore for a button presses (default=True)
wait_motion : bool, optional
set 'False' to ignore for a mouse motions (default=True)
buttons : int or list, optional
a specific button or list of buttons to wait for
duration : int, optional
the maximal time to wait in ms
wait_for_buttonup : bool, optional
if True it waits for button-up default=False)
callback_function : function, optional
function to repeatedly execute during waiting loop
process_control_events : bool, optional
process ``io.keyboard.process_control_keys()`` and
``io.mouse.process_quit_event()`` (default = True)
Returns
-------
event_id : int
id of the event that quited waiting
move : bool
True if a motion occurred
pos : (int, int)
mouse position (tuple)
rt : int
reaction time
Notes
------
This will also by default process control events (quit and pause).
Thus, keyboard events will be cleared from the cue and cannot be
received by a ``Keyboard().check()`` anymore!
See Also
--------
expyriment.design.Experiment.register_wait_callback_function
"""
if _internals.skip_wait_methods:
return None, None, None, None
start = get_time()
self.clear()
old_pos = pygame.mouse.get_pos()
btn_id = None
rt = None
motion_occured = False
if buttons is None:
buttons = [0, 1, 2, 3, 4]
else:
try:
buttons = list(buttons)
except:
buttons = [buttons]
while True:
if isinstance(callback_function, FunctionType):
rtn_callback = callback_function()
if isinstance(rtn_callback, _internals.CallbackQuitEvent):
btn_id = rtn_callback
rt = int((get_time() - start) * 1000)
break
if _internals.active_exp.is_initialized:
rtn_callback = _internals.active_exp._execute_wait_callback()
if isinstance(rtn_callback, _internals.CallbackQuitEvent):
btn_id = rtn_callback
rt = int((get_time() - start) * 1000)
break
if process_control_events:
if _internals.active_exp.keyboard.process_control_keys():
break
if wait_motion:
motion_occured = old_pos != pygame.mouse.get_pos()
if wait_button:
if wait_for_buttonup:
btn_id = self.get_last_button_up_event()
else:
btn_id = self.get_last_button_down_event(
process_quit_event=process_control_events)
if btn_id ==-1:
btn_id = None
break
elif btn_id in buttons or motion_occured:
rt = int((get_time() - start) * 1000)
break
elif (duration is not None and \
int((get_time() - start) * 1000) >= duration):
break
position_in_expy_coordinates = self.position
if self._logging:
_internals.active_exp._event_file_log(
"Mouse,received,{0}-{1},wait_event".format(btn_id, motion_occured))
return btn_id, motion_occured, position_in_expy_coordinates, rt
def wait_press(self, buttons=None, duration=None, wait_for_buttonup=False,
callback_function=None, process_control_events=True):
"""Wait for a mouse button press or mouse wheel event.
Parameters
----------
buttons : int or list, optional
a specific button or list of buttons to wait for
duration : int, optional
maximal time to wait in ms
wait_for_buttonup : bool, optional
if True it waits for button-up
callback_function : function, optional
function to repeatedly execute during waiting loop
process_control_events : bool, optional
process ``io.keyboard.process_control_keys()`` and
``io.mouse.process_quit_event()`` (default = false)
Returns
-------
event_id : int
id of the event that quited waiting
pos : (int, int)
mouse position (tuple)
rt : int
reaction time
Notes
------
button id coding
- None for no mouse button event or
- 0,1,2 for left. middle and right button or
- 3 for wheel up or
- 4 for wheel down (wheel works only for keydown events).
"""
rtn = self.wait_event(wait_button=True, wait_motion=False,
buttons=buttons, duration=duration,
wait_for_buttonup=wait_for_buttonup,
callback_function=callback_function,
process_control_events=process_control_events)
return rtn[0], rtn[2], rtn[3]
def wait_motion(self, duration=None, callback_function=None,
process_control_events=True):
"""Wait for a mouse motion.
Parameters
----------
duration : int, optional
maximal time to wait in ms
callback_function : function, optional
function to repeatedly execute during waiting loop
process_control_events : bool, optional
process ``io.keyboard.process_control_keys()`` and
``io.mouse.process_quit_event()`` (default = false)
Returns
-------
pos : (int, int)
mouse position (tuple)
rt : int
reaction time
"""
rtn = self.wait_event(wait_button=False, wait_motion=True, buttons=[],
duration=duration, wait_for_buttonup=False,
callback_function=callback_function,
process_control_events=process_control_events)
if isinstance(rtn[0], _internals.CallbackQuitEvent):
return rtn[0], rtn[3]
else:
return rtn[2], rtn[3]
def show_cursor(self, track_button_events=True, track_motion_events=False):
"""Show the cursor.
Parameters
----------
track_button_events : bool, optional
tracking button events (default = True)
track_motion_events : bool, optional
tracking motion events (default = False)
"""
pygame.mouse.set_visible(True)
self.track_button_events = track_button_events
self.track_motion_events = track_motion_events
def hide_cursor(self, track_button_events=False, track_motion_events=False):
"""Hide the cursor.
Parameters
----------
track_button_events : bool, optional
tracking button events (default = False)
track_motion_events : bool, optional
tracking motion events (default = False)
"""
pygame.mouse.set_visible(False)
self.track_button_events = track_button_events
self.track_motion_events = track_motion_events
@staticmethod
def _self_test(exp):
"""Test the mouse.
Returns
-------
polling_time : int
polling time
buttons_work : int
1 -- if mouse test was ended with mouse button,
0 -- if testing has been quit with q
"""
from .. import stimuli
# measure mouse polling time
info = """This will test how timing accurate your mouse is.
[Press RETURN to continue]"""
stimuli.TextScreen("Mouse test (1)", info).present()
exp.keyboard.wait(misc.constants.K_RETURN)
mouse = Mouse()
go = stimuli.TextLine("Keep on moving...")
go.preload()
stimuli.TextLine("Please move the mouse").present()
mouse.wait_motion()
go.present()
exp.clock.reset_stopwatch()
motion = []
while exp.clock.stopwatch_time < 200:
_pos, rt = mouse.wait_motion()
motion.append(rt)
stimuli.TextLine("Thanks").present()
polling_time = misc.statistics.mode(motion)
info = """Your mouse polling time is {0} ms.
[Press RETURN to continue] """.format(polling_time)
text = stimuli.TextScreen("Results", info)
text.present()
exp.keyboard.wait([misc.constants.K_RETURN])
info = """This will test if you mouse buttons work.
Please press all buttons one after the other to see if the corresponding buttons on the screen light up.
When done, click inside one of the buttons on the screen to end the test.
If your mouse buttons do not work, you can quit by pressing q.
[Press RETURN to continue]"""
stimuli.TextScreen("Mouse test (2)", info).present()
exp.keyboard.wait(misc.constants.K_RETURN)
# test mouse clicking
rects = [stimuli.Rectangle(size=[30, 30], position=[-50, 0]),
stimuli.Rectangle(size=[30, 30], position=[0, 0]),
stimuli.Rectangle(size=[30, 30], position=[50, 0])]
canvas = stimuli.Canvas(size=[350, 500])
btn = None
go_on = True
while go_on:
canvas.clear_surface()
for cnt, r in enumerate(rects):
r.unload()
if cnt == btn:
r.colour = misc.constants.C_YELLOW
else:
r.colour = misc.constants.C_RED
r.plot(canvas)
if btn == 3:
text = "Mouse wheel UP"
elif btn == 4:
text = "Mouse wheel DOWN"
else:
text = ""
stimuli.TextLine(text, position=[0, 50]).plot(canvas)
canvas.present()
btn = None
while btn is None:
btn = mouse.get_last_button_down_event()
if btn is not None:
position = mouse.position
for r in rects:
if r.overlapping_with_position(position):
buttons_work = 1
mouse.hide_cursor()
go_on = False
break
elif exp.keyboard.check(keys=misc.constants.K_q):
buttons_work = 0
mouse.hide_cursor()
go_on = False
break
result = {}
result["testsuite_mouse_polling_time"] = str(polling_time) + " ms"
result["testsuite_mouse_buttons_work"] = buttons_work
return result
|
gpl-3.0
| -3,320,490,465,013,967,400 | 33.123522 | 104 | 0.558611 | false |
Factr/newspaper
|
tests/unit_tests.py
|
1
|
24005
|
# -*- coding: utf-8 -*-
"""
All unit tests for the newspaper library should be contained in this file.
"""
import sys
import os
import unittest
import time
import traceback
from collections import defaultdict, OrderedDict
import concurrent.futures
TEST_DIR = os.path.abspath(os.path.dirname(__file__))
PARENT_DIR = os.path.join(TEST_DIR, '..')
# newspaper's unit tests are in their own separate module, so
# insert the parent directory manually to gain scope of the
# core module
sys.path.insert(0, PARENT_DIR)
TEXT_FN = os.path.join(TEST_DIR, 'data', 'text')
HTML_FN = os.path.join(TEST_DIR, 'data', 'html')
URLS_FILE = os.path.join(TEST_DIR, 'data', 'fulltext_url_list.txt')
import newspaper
from newspaper import Article, fulltext, Source, ArticleException, news_pool
from newspaper.configuration import Configuration
from newspaper.urls import get_domain
def print_test(method):
"""
Utility method for print verbalizing test suite, prints out
time taken for test and functions name, and status
"""
def run(*args, **kw):
ts = time.time()
print('\ttesting function %r' % method.__name__)
method(*args, **kw)
te = time.time()
print('\t[OK] in %r %2.2f sec' % (method.__name__, te - ts))
return run
def mock_resource_with(filename, resource_type):
"""
Mocks an HTTP request by pulling text from a pre-downloaded file
"""
VALID_RESOURCES = ['html', 'txt']
if resource_type not in VALID_RESOURCES:
raise Exception('Mocked resource must be one of: %s' %
', '.join(VALID_RESOURCES))
subfolder = 'text' if resource_type == 'txt' else 'html'
resource_path = os.path.join(TEST_DIR, "data/%s/%s.%s" %
(subfolder, filename, resource_type))
with open(resource_path, 'r') as f:
return f.read()
def get_base_domain(url):
"""
For example, the base url of uk.reuters.com => reuters.com
"""
domain = get_domain(url)
tld = '.'.join(domain.split('.')[-2:])
if tld in ['co.uk', 'com.au', 'au.com']: # edge cases
end_chunks = domain.split('.')[-3:]
else:
end_chunks = domain.split('.')[-2:]
base_domain = '.'.join(end_chunks)
return base_domain
def check_url(*args, **kwargs):
return ExhaustiveFullTextCase.check_url(*args, **kwargs)
@unittest.skipIf('fulltext' not in sys.argv, 'Skipping fulltext tests')
class ExhaustiveFullTextCase(unittest.TestCase):
@staticmethod
def check_url(args):
"""
:param (basestr, basestr) url, res_filename:
:return: (pubdate_failed, fulltext_failed)
"""
url, res_filename = args
pubdate_failed, fulltext_failed = False, False
html = mock_resource_with(res_filename, 'html')
try:
a = Article(url)
a.download(html)
a.parse()
if a.publish_date is None:
pubdate_failed = True
except Exception:
print('<< URL: %s parse ERROR >>' % url)
traceback.print_exc()
pubdate_failed, fulltext_failed = True, True
else:
correct_text = mock_resource_with(res_filename, 'txt')
if not (a.text == correct_text):
# print('Diff: ', simplediff.diff(correct_text, a.text))
# `correct_text` holds the reason of failure if failure
print('%s -- %s -- %s' %
('Fulltext failed',
res_filename, correct_text.strip()))
fulltext_failed = True
# TODO: assert statements are commented out for full-text
# extraction tests because we are constantly tweaking the
# algorithm and improving
# assert a.text == correct_text
return pubdate_failed, fulltext_failed
@print_test
def test_exhaustive(self):
with open(URLS_FILE, 'r') as f:
urls = [d.strip() for d in f.readlines() if d.strip()]
domain_counters = {}
def get_filename(url):
domain = get_base_domain(url)
domain_counters[domain] = domain_counters.get(domain, 0) + 1
return '{}{}'.format(domain, domain_counters[domain])
filenames = map(get_filename, urls)
with concurrent.futures.ProcessPoolExecutor() as executor:
test_results = list(executor.map(check_url, zip(urls, filenames)))
total_pubdates_failed, total_fulltext_failed = \
list(map(sum, zip(*test_results)))
print('%s fulltext extractions failed out of %s' %
(total_fulltext_failed, len(urls)))
print('%s pubdate extractions failed out of %s' %
(total_pubdates_failed, len(urls)))
self.assertGreaterEqual(47, total_pubdates_failed)
self.assertGreaterEqual(20, total_fulltext_failed)
class ArticleTestCase(unittest.TestCase):
def setup_stage(self, stage_name):
stages = OrderedDict([
('initial', lambda: None),
('download', lambda: self.article.download(
mock_resource_with('cnn_article', 'html'))),
('parse', lambda: self.article.parse()),
('meta', lambda: None), # Alias for nlp
('nlp', lambda: self.article.nlp())
])
assert stage_name in stages
for name, action in stages.items():
if name == stage_name:
break
action()
def setUp(self):
"""Called before the first test case of this unit begins
"""
self.article = Article(
url='http://www.cnn.com/2013/11/27/travel/weather-'
'thanksgiving/index.html?iref=allsearch')
@print_test
def test_url(self):
self.assertEqual(
'http://www.cnn.com/2013/11/27/travel/weather-'
'thanksgiving/index.html?iref=allsearch',
self.article.url)
@print_test
def test_download_html(self):
self.setup_stage('download')
html = mock_resource_with('cnn_article', 'html')
self.article.download(html)
self.assertEqual(75406, len(self.article.html))
@print_test
def test_meta_refresh_redirect(self):
# TODO: We actually hit example.com in this unit test ... which is bad
# Figure out how to mock an actual redirect
config = Configuration()
config.follow_meta_refresh = True
article = Article(
'', config=config)
html = mock_resource_with('google_meta_refresh', 'html')
article.download(input_html=html)
article.parse()
self.assertEqual(article.title, 'Example Domain')
@print_test
def test_meta_refresh_no_url_redirect(self):
config = Configuration()
config.follow_meta_refresh = True
article = Article(
'', config=config)
html = mock_resource_with('ap_meta_refresh', 'html')
article.download(input_html=html)
article.parse()
self.assertEqual(article.title, 'News from The Associated Press')
@print_test
def test_pre_download_parse(self):
"""Calling `parse()` before `download()` should yield an error
"""
article = Article(self.article.url)
self.assertRaises(ArticleException, article.parse)
@print_test
def test_parse_html(self):
self.setup_stage('parse')
AUTHORS = ['Chien-Ming Wang', 'Dana A. Ford', 'James S.A. Corey',
'Tom Watkins']
TITLE = 'After storm, forecasters see smooth sailing for Thanksgiving'
LEN_IMGS = 46
META_LANG = 'en'
self.article.parse()
self.article.nlp()
text = mock_resource_with('cnn', 'txt')
self.assertEqual(text, self.article.text)
self.assertEqual(text, fulltext(self.article.html))
# NOTE: top_img extraction requires an internet connection
# unlike the rest of this test file
TOP_IMG = ('http://i2.cdn.turner.com/cnn/dam/assets/131129200805-'
'01-weather-1128-story-top.jpg')
self.assertEqual(TOP_IMG, self.article.top_img)
self.assertCountEqual(AUTHORS, self.article.authors)
self.assertEqual(TITLE, self.article.title)
self.assertEqual(LEN_IMGS, len(self.article.imgs))
self.assertEqual(META_LANG, self.article.meta_lang)
self.assertEqual('2013-11-27 00:00:00', str(self.article.publish_date))
@print_test
def test_meta_type_extraction(self):
self.setup_stage('meta')
meta_type = self.article.extractor.get_meta_type(
self.article.clean_doc)
self.assertEqual('article', meta_type)
@print_test
def test_meta_extraction(self):
self.setup_stage('meta')
meta = self.article.extractor.get_meta_data(self.article.clean_doc)
META_DATA = defaultdict(dict, {
'medium': 'news',
'googlebot': 'noarchive',
'pubdate': '2013-11-27T08:36:32Z',
'title': 'After storm, forecasters see smooth sailing for Thanksgiving - CNN.com',
'og': {'site_name': 'CNN',
'description': 'A strong storm struck much of the eastern United States on Wednesday, complicating holiday plans for many of the 43 million Americans expected to travel.',
'title': 'After storm, forecasters see smooth sailing for Thanksgiving',
'url': 'http://www.cnn.com/2013/11/27/travel/weather-thanksgiving/index.html',
'image': 'http://i2.cdn.turner.com/cnn/dam/assets/131129200805-01-weather-1128-story-top.jpg',
'type': 'article'},
'section': 'travel',
'author': 'Dana A. Ford, James S.A. Corey, Chien-Ming Wang, and Tom Watkins, CNN',
'robots': 'index,follow',
'vr': {
'canonical': 'http://edition.cnn.com/2013/11/27/travel/weather-thanksgiving/index.html'},
'source': 'CNN',
'fb': {'page_id': 18793419640, 'app_id': 80401312489},
'keywords': 'winter storm,holiday travel,Thanksgiving storm,Thanksgiving winter storm',
'article': {
'publisher': 'https://www.facebook.com/cnninternational'},
'lastmod': '2013-11-28T02:03:23Z',
'twitter': {'site': {'identifier': '@CNNI', 'id': 2097571},
'card': 'summary',
'creator': {'identifier': '@cnntravel',
'id': 174377718}},
'viewport': 'width=1024',
'news_keywords': 'winter storm,holiday travel,Thanksgiving storm,Thanksgiving winter storm'
})
self.assertDictEqual(META_DATA, meta)
# if the value for a meta key is another dict, that dict ought to be
# filled with keys and values
dict_values = [v for v in list(meta.values()) if isinstance(v, dict)]
self.assertTrue(all([len(d) > 0 for d in dict_values]))
# there are exactly 5 top-level "og:type" type keys
is_dict = lambda v: isinstance(v, dict)
self.assertEqual(5, len([i for i in meta.values() if is_dict(i)]))
# there are exactly 12 top-level "pubdate" type keys
is_string = lambda v: isinstance(v, str)
self.assertEqual(12, len([i for i in meta.values() if is_string(i)]))
@print_test
def test_pre_download_nlp(self):
"""Test running NLP algos before even downloading the article
"""
self.setup_stage('initial')
new_article = Article(self.article.url)
self.assertRaises(ArticleException, new_article.nlp)
@print_test
def test_pre_parse_nlp(self):
"""Test running NLP algos before parsing the article
"""
self.setup_stage('parse')
self.assertRaises(ArticleException, self.article.nlp)
@print_test
def test_nlp_body(self):
self.setup_stage('nlp')
self.article.nlp()
KEYWORDS = ['balloons', 'delays', 'flight', 'forecasters',
'good', 'sailing', 'smooth', 'storm', 'thanksgiving',
'travel', 'weather', 'winds', 'york']
SUMMARY = mock_resource_with('cnn_summary', 'txt')
self.assertEqual(SUMMARY, self.article.summary)
self.assertCountEqual(KEYWORDS, self.article.keywords)
class ContentExtractorTestCase(unittest.TestCase):
"""Test specific element extraction cases"""
def setUp(self):
self.extractor = newspaper.extractors.ContentExtractor(Configuration())
self.parser = newspaper.parsers.Parser
def _get_title(self, html):
doc = self.parser.fromstring(html)
return self.extractor.get_title(doc)
def test_get_title_basic(self):
html = '<title>Test title</title>'
self.assertEqual(self._get_title(html), 'Test title')
def test_get_title_split(self):
html = '<title>Test page » Test title</title>'
self.assertEqual(self._get_title(html), 'Test title')
def test_get_title_split_escaped(self):
html = '<title>Test page » Test title</title>'
self.assertEqual(self._get_title(html), 'Test title')
def test_get_title_quotes(self):
title = 'Test page and «something in quotes»'
html = '<title>{}</title>'.format(title)
self.assertEqual(self._get_title(html), title)
def _get_canonical_link(self, article_url, html):
doc = self.parser.fromstring(html)
return self.extractor.get_canonical_link(article_url, doc)
def test_get_canonical_link_rel_canonical(self):
url = 'http://www.example.com/article.html'
html = '<link rel="canonical" href="{}">'.format(url)
self.assertEqual(self._get_canonical_link('', html), url)
def test_get_canonical_link_rel_canonical_absolute_url(self):
url = 'http://www.example.com/article.html'
html = '<link rel="canonical" href="article.html">'
article_url = 'http://www.example.com/article?foo=bar'
self.assertEqual(self._get_canonical_link(article_url, html), url)
def test_get_canonical_link_og_url_absolute_url(self):
url = 'http://www.example.com/article.html'
html = '<meta property="og:url" content="article.html">'
article_url = 'http://www.example.com/article?foo=bar'
self.assertEqual(self._get_canonical_link(article_url, html), url)
def test_get_canonical_link_hostname_og_url_absolute_url(self):
url = 'http://www.example.com/article.html'
html = '<meta property="og:url" content="www.example.com/article.html">'
article_url = 'http://www.example.com/article?foo=bar'
self.assertEqual(self._get_canonical_link(article_url, html), url)
class SourceTestCase(unittest.TestCase):
@print_test
def test_source_url_input_none(self):
with self.assertRaises(Exception):
Source(url=None)
@unittest.skip("Need to mock download")
@print_test
def test_source_build(self):
"""
builds a source object, validates it has no errors, prints out
all valid categories and feed urls
"""
DESC = ('CNN.com International delivers breaking news from across '
'the globe and information on the latest top stories, '
'business, sports and entertainment headlines. Follow the '
'news as it happens through: special reports, videos, '
'audio, photo galleries plus interactive maps and timelines.')
CATEGORY_URLS = [
'http://cnn.com/ASIA', 'http://connecttheworld.blogs.cnn.com',
'http://cnn.com/HLN', 'http://cnn.com/MIDDLEEAST',
'http://cnn.com', 'http://ireport.cnn.com',
'http://cnn.com/video', 'http://transcripts.cnn.com',
'http://cnn.com/espanol',
'http://partners.cnn.com', 'http://www.cnn.com',
'http://cnn.com/US', 'http://cnn.com/EUROPE',
'http://cnn.com/TRAVEL', 'http://cnn.com/cnni',
'http://cnn.com/SPORT', 'http://cnn.com/mostpopular',
'http://arabic.cnn.com', 'http://cnn.com/WORLD',
'http://cnn.com/LATINAMERICA', 'http://us.cnn.com',
'http://travel.cnn.com', 'http://mexico.cnn.com',
'http://cnn.com/SHOWBIZ', 'http://edition.cnn.com',
'http://amanpour.blogs.cnn.com', 'http://money.cnn.com',
'http://cnn.com/tools/index.html', 'http://cnnespanol.cnn.com',
'http://cnn.com/CNNI', 'http://business.blogs.cnn.com',
'http://cnn.com/AFRICA', 'http://cnn.com/TECH',
'http://cnn.com/BUSINESS']
FEEDS = ['http://rss.cnn.com/rss/edition.rss']
BRAND = 'cnn'
s = Source('http://cnn.com', verbose=False, memoize_articles=False)
# html = mock_resource_with('http://cnn.com', 'cnn_main_site')
s.clean_memo_cache()
s.build()
# TODO: The rest of the source extraction features will be fully tested
# after I figure out a way to sensibly mock the HTTP requests for all
# of the category and feeed URLs
# assert s.brand == BRAND
# assert s.description == DESC
# assert s.size() == 266
# assert s.category_urls() == CATEGORY_URLS
# TODO: A lot of the feed extraction is NOT being tested because feeds
# are primarly extracted from the HTML of category URLs. We lose this
# effect by just mocking CNN's main page HTML. Warning: tedious fix.
# assert s.feed_urls() == FEEDS
@unittest.skip("Need to mock download")
@print_test
def test_cache_categories(self):
"""Builds two same source objects in a row examines speeds of both
"""
url = 'http://uk.yahoo.com'
html = mock_resource_with('yahoo_main_site', 'html')
s = Source(url)
s.download()
s.parse()
s.set_categories()
saved_urls = s.category_urls()
s.categories = []
s.set_categories()
self.assertCountEqual(saved_urls, s.category_urls())
class UrlTestCase(unittest.TestCase):
@print_test
def test_valid_urls(self):
"""Prints out a list of urls with our heuristic guess if it is a
valid news url purely based on the url
"""
from newspaper.urls import valid_url
with open(os.path.join(TEST_DIR, 'data/test_urls.txt'), 'r') as f:
lines = f.readlines()
test_tuples = [tuple(l.strip().split(' ')) for l in lines]
# tuples are ('1', 'url_goes_here') form, '1' means valid,
# '0' otherwise
for lst, url in test_tuples:
truth_val = bool(int(lst))
try:
self.assertEqual(truth_val, valid_url(url, test=True))
except AssertionError:
print('\t\turl: %s is supposed to be %s' % (url, truth_val))
raise
@unittest.skip("Need to write an actual test")
@print_test
def test_prepare_url(self):
"""Normalizes a url, removes arguments, hashtags. If a relative url, it
merges it with the source domain to make an abs url, etc
"""
pass
class APITestCase(unittest.TestCase):
@print_test
def test_hot_trending(self):
"""Grab google trending, just make sure this runs
"""
newspaper.hot()
@print_test
def test_popular_urls(self):
"""Just make sure this method runs
"""
newspaper.popular_urls()
@unittest.skip("Need to mock download")
class MThreadingTestCase(unittest.TestCase):
@print_test
def test_download_works(self):
config = Configuration()
config.memoize_articles = False
slate_paper = newspaper.build('http://slate.com', config=config)
tc_paper = newspaper.build('http://techcrunch.com', config=config)
espn_paper = newspaper.build('http://espn.com', config=config)
print(('Slate has %d articles TC has %d articles ESPN has %d articles'
% (slate_paper.size(), tc_paper.size(), espn_paper.size())))
papers = [slate_paper, tc_paper, espn_paper]
news_pool.set(papers, threads_per_source=2)
news_pool.join()
print('Downloaded Slate mthread len',
len(slate_paper.articles[0].html))
print('Downloaded ESPN mthread len',
len(espn_paper.articles[-1].html))
print('Downloaded TC mthread len',
len(tc_paper.articles[1].html))
class ConfigBuildTestCase(unittest.TestCase):
"""Test if our **kwargs to config building setup actually works.
NOTE: No need to mock responses as we are just initializing the
objects, not actually calling download(..)
"""
@print_test
def test_article_default_params(self):
a = Article(url='http://www.cnn.com/2013/11/27/'
'travel/weather-thanksgiving/index.html')
self.assertEqual('en', a.config.language)
self.assertTrue(a.config.memoize_articles)
self.assertTrue(a.config.use_meta_language)
@print_test
def test_article_custom_params(self):
a = Article(url='http://www.cnn.com/2013/11/27/travel/'
'weather-thanksgiving/index.html',
language='zh', memoize_articles=False)
self.assertEqual('zh', a.config.language)
self.assertFalse(a.config.memoize_articles)
self.assertFalse(a.config.use_meta_language)
@print_test
def test_source_default_params(self):
s = Source(url='http://cnn.com')
self.assertEqual('en', s.config.language)
self.assertEqual(20000, s.config.MAX_FILE_MEMO)
self.assertTrue(s.config.memoize_articles)
self.assertTrue(s.config.use_meta_language)
@print_test
def test_source_custom_params(self):
s = Source(url="http://cnn.com", memoize_articles=False,
MAX_FILE_MEMO=10000, language='en')
self.assertFalse(s.config.memoize_articles)
self.assertEqual(10000, s.config.MAX_FILE_MEMO)
self.assertEqual('en', s.config.language)
self.assertFalse(s.config.use_meta_language)
class MultiLanguageTestCase(unittest.TestCase):
@print_test
def test_chinese_fulltext_extract(self):
url = 'http://news.sohu.com/20050601/n225789219.shtml'
article = Article(url=url, language='zh')
html = mock_resource_with('chinese_article', 'html')
article.download(html)
article.parse()
text = mock_resource_with('chinese', 'txt')
self.assertEqual(text, article.text)
self.assertEqual(text, fulltext(article.html, 'zh'))
@print_test
def test_arabic_fulltext_extract(self):
url = 'http://arabic.cnn.com/2013/middle_east/8/3/syria.clashes/' \
'index.html'
article = Article(url=url)
html = mock_resource_with('arabic_article', 'html')
article.download(html)
article.parse()
self.assertEqual('ar', article.meta_lang)
text = mock_resource_with('arabic', 'txt')
self.assertEqual(text, article.text)
self.assertEqual(text, fulltext(article.html, 'ar'))
@print_test
def test_spanish_fulltext_extract(self):
url = 'http://ultimahora.es/mallorca/noticia/noticias/local/fiscal' \
'ia-anticorrupcion-estudia-recurre-imputacion-infanta.html'
article = Article(url=url, language='es')
html = mock_resource_with('spanish_article', 'html')
article.download(html)
article.parse()
text = mock_resource_with('spanish', 'txt')
self.assertEqual(text, article.text)
self.assertEqual(text, fulltext(article.html, 'es'))
if __name__ == '__main__':
argv = list(sys.argv)
if 'fulltext' in argv:
argv.remove('fulltext') # remove it here, so it doesn't pass to unittest
unittest.main(verbosity=0, argv=argv)
|
mit
| -8,295,913,205,737,813,000 | 38.283142 | 190 | 0.603158 | false |
dansbecker/what-celebrity
|
data_grabbers.py
|
1
|
4607
|
import concurrent.futures
import indicoio
import json
import os
import socket
import urllib.request
from os.path import join, exists
from PIL import Image, ImageDraw
class Grabber(object):
def __enter__(self):
try:
with open(self._captured_data_path, 'r') as f:
self.captured_data = json.load(f)
except:
self.captured_data = []
try:
with open(self._failed_to_capture_path, 'r') as f:
self.failed_to_capture = json.load(f)
except:
self.failed_to_capture = []
return(self)
def __exit__(self, *args):
with open(self._captured_data_path, 'w') as f:
json.dump(self.captured_data, f)
with open(self._failed_to_capture_path, 'w') as f:
json.dump(self.failed_to_capture, f)
def run(self):
with concurrent.futures.ThreadPoolExecutor(max_workers=32) as worker_pool:
list_to_capture = self._make_list_to_capture()
for img_src, search_term in list_to_capture: # img_src can be url or local file path
worker_pool.submit(self._grab_one(img_src, search_term))
class ImageGrabber(Grabber):
def __init__(self, celeb_urls_dict):
self.celeb_urls_dict = celeb_urls_dict
self._failed_to_capture_path = join('work', 'failed_to_capture_images.json')
self._captured_data_path = join('work', 'captured_image_info.json')
socket.setdefaulttimeout(5)
def _url_to_fname(self, url):
return ''.join([i for i in url if i.isalpha()])
def _make_target_dir(self, celeb_name):
name_for_path = celeb_name.replace(" ", "_").casefold()
path = join('work', name_for_path)
if not exists(path):
os.mkdir(path)
return path
def _grab_one(self, url, search_term):
print(url)
local_img_path = self._get_file_path(url, search_term)
try:
url, _ = urllib.request.urlretrieve(url, local_img_path)
self.captured_data.append((url, local_img_path, search_term))
except:
self.failed_to_capture.append((url, local_img_path, search_term))
def _get_file_path(self, url, search_term):
search_term_dir = self._make_target_dir(search_term)
local_img_path = join(search_term_dir, self._url_to_fname(url)+".jpg")
return local_img_path
def _make_list_to_capture(self):
output = []
for search_term, url_list in self.celeb_urls_dict.items():
for url in url_list:
if not exists(self._get_file_path(url, search_term)):
output.append((url, search_term))
return output
class FacialFeatsGrabber(Grabber):
def __init__(self):
self._failed_to_capture_path = join('work', 'failed_to_featurize.json')
self._captured_data_path = join('work', 'facial_feats_data.json')
indicoio.config.api_key = os.environ['INDICO_API_KEY']
socket.setdefaulttimeout(5)
def _grab_one(self, local_img_path, search_term):
try:
img = Image.open(local_img_path)
self.captured_data.append( {'celeb': search_term,
'face_feats': indicoio.facial_features(img),
'face_corners': self._get_single_face_corners(img),
'local_img_path': local_img_path
})
except:
print('failed to grab facial feats for ' + local_img_path)
self.failed_to_capture.append((local_img_path, search_term))
def _get_single_face_corners(self, img):
"""
returns x and y coords of upper and lower left pixel of face in img (a PIL Image object)
"""
try:
face_corners = indicoio.facial_localization(img)[0]
x0, y0 = face_corners['top_left_corner']
x1, y1 = face_corners['bottom_right_corner']
return (x0, y0, x1, y1)
except:
return ()
def _make_list_to_capture(self):
output = []
already_featurized_paths = [img['local_img_path'] for img in self.captured_data]
celeb_dirs = [d for d in os.listdir('work') if os.path.isdir(join('work', d))]
for celeb in celeb_dirs:
for fname in os.listdir(join('work', celeb)):
local_img_path = join('work', celeb, fname)
if local_img_path not in already_featurized_paths:
output.append((local_img_path, celeb))
return output
|
mit
| -6,785,651,822,177,585,000 | 38.042373 | 96 | 0.572173 | false |
ashutosh-mishra/youtube-dl
|
youtube_dl/extractor/spiegel.py
|
1
|
2462
|
import re
import xml.etree.ElementTree
from .common import InfoExtractor
class SpiegelIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?spiegel\.de/video/[^/]*-(?P<videoID>[0-9]+)(?:\.html)?(?:#.*)?$'
_TESTS = [{
u'url': u'http://www.spiegel.de/video/vulkan-tungurahua-in-ecuador-ist-wieder-aktiv-video-1259285.html',
u'file': u'1259285.mp4',
u'md5': u'2c2754212136f35fb4b19767d242f66e',
u'info_dict': {
u"title": u"Vulkanausbruch in Ecuador: Der \"Feuerschlund\" ist wieder aktiv"
}
},
{
u'url': u'http://www.spiegel.de/video/schach-wm-videoanalyse-des-fuenften-spiels-video-1309159.html',
u'file': u'1309159.mp4',
u'md5': u'f2cdf638d7aa47654e251e1aee360af1',
u'info_dict': {
u'title': u'Schach-WM in der Videoanalyse: Carlsen nutzt die Fehlgriffe des Titelverteidigers'
}
}]
def _real_extract(self, url):
m = re.match(self._VALID_URL, url)
video_id = m.group('videoID')
webpage = self._download_webpage(url, video_id)
video_title = self._html_search_regex(
r'<div class="module-title">(.*?)</div>', webpage, u'title')
xml_url = u'http://video2.spiegel.de/flash/' + video_id + u'.xml'
xml_code = self._download_webpage(
xml_url, video_id,
note=u'Downloading XML', errnote=u'Failed to download XML')
idoc = xml.etree.ElementTree.fromstring(xml_code)
formats = [
{
'format_id': n.tag.rpartition('type')[2],
'url': u'http://video2.spiegel.de/flash/' + n.find('./filename').text,
'width': int(n.find('./width').text),
'height': int(n.find('./height').text),
'abr': int(n.find('./audiobitrate').text),
'vbr': int(n.find('./videobitrate').text),
'vcodec': n.find('./codec').text,
'acodec': 'MP4A',
}
for n in list(idoc)
# Blacklist type 6, it's extremely LQ and not available on the same server
if n.tag.startswith('type') and n.tag != 'type6'
]
formats.sort(key=lambda f: f['vbr'])
duration = float(idoc[0].findall('./duration')[0].text)
info = {
'id': video_id,
'title': video_title,
'duration': duration,
'formats': formats,
}
return info
|
unlicense
| -2,908,200,738,147,478,000 | 36.30303 | 112 | 0.542242 | false |
abramhindle/UnnaturalCodeFork
|
python/testdata/launchpad/lib/lp/registry/tests/test_distroseriesparent.py
|
1
|
9245
|
# Copyright 2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests for DistroSeriesParent model class."""
__metaclass__ = type
from testtools.matchers import MatchesStructure
from zope.component import getUtility
from zope.interface.verify import verifyObject
from zope.security.interfaces import Unauthorized
from lp.registry.interfaces.distroseriesparent import (
IDistroSeriesParent,
IDistroSeriesParentSet,
)
from lp.registry.interfaces.pocket import PackagePublishingPocket
from lp.soyuz.interfaces.component import IComponentSet
from lp.testing import (
login,
person_logged_in,
TestCaseWithFactory,
)
from lp.testing.layers import (
DatabaseFunctionalLayer,
ZopelessDatabaseLayer,
)
from lp.testing.sampledata import LAUNCHPAD_ADMIN
class TestDistroSeriesParent(TestCaseWithFactory):
"""Test the `DistroSeriesParent` model."""
layer = ZopelessDatabaseLayer
def test_verify_interface(self):
# Test the interface for the model.
dsp = self.factory.makeDistroSeriesParent()
verified = verifyObject(IDistroSeriesParent, dsp)
self.assertTrue(verified)
def test_properties(self):
# Test the model properties.
parent_series = self.factory.makeDistroSeries()
derived_series = self.factory.makeDistroSeries()
dsp = self.factory.makeDistroSeriesParent(
derived_series=derived_series,
parent_series=parent_series,
initialized=True)
self.assertThat(
dsp,
MatchesStructure.byEquality(
derived_series=derived_series,
parent_series=parent_series,
initialized=True,
is_overlay=False,
component=None,
pocket=None,
))
def test_properties_overlay(self):
# Test the model properties if the DSP represents an overlay.
parent_series = self.factory.makeDistroSeries()
derived_series = self.factory.makeDistroSeries()
universe_component = getUtility(IComponentSet).ensure('universe')
dsp = self.factory.makeDistroSeriesParent(
derived_series=derived_series,
parent_series=parent_series,
initialized=True,
is_overlay=True,
component=universe_component,
pocket=PackagePublishingPocket.SECURITY,
)
self.assertThat(
dsp,
MatchesStructure.byEquality(
derived_series=derived_series,
parent_series=parent_series,
initialized=True,
is_overlay=True,
component=universe_component,
pocket=PackagePublishingPocket.SECURITY,
))
def test_getByDerivedSeries(self):
parent_series = self.factory.makeDistroSeries()
derived_series = self.factory.makeDistroSeries()
self.factory.makeDistroSeriesParent(
derived_series, parent_series)
results = getUtility(IDistroSeriesParentSet).getByDerivedSeries(
derived_series)
self.assertEqual(1, results.count())
self.assertEqual(parent_series, results[0].parent_series)
# Making a second parent should add it to the results.
self.factory.makeDistroSeriesParent(
derived_series, self.factory.makeDistroSeries())
results = getUtility(IDistroSeriesParentSet).getByDerivedSeries(
derived_series)
self.assertEqual(2, results.count())
def test_getByParentSeries(self):
parent_series = self.factory.makeDistroSeries()
derived_series = self.factory.makeDistroSeries()
self.factory.makeDistroSeriesParent(
derived_series, parent_series)
results = getUtility(IDistroSeriesParentSet).getByParentSeries(
parent_series)
self.assertEqual(1, results.count())
self.assertEqual(derived_series, results[0].derived_series)
# Making a second child should add it to the results.
self.factory.makeDistroSeriesParent(
self.factory.makeDistroSeries(), parent_series)
results = getUtility(IDistroSeriesParentSet).getByParentSeries(
parent_series)
self.assertEqual(2, results.count())
class TestDistroSeriesParentSecurity(TestCaseWithFactory):
layer = DatabaseFunctionalLayer
def test_random_person_is_unauthorized(self):
dsp = self.factory.makeDistroSeriesParent()
person = self.factory.makePerson()
with person_logged_in(person):
self.assertRaises(
Unauthorized,
setattr, dsp, "derived_series", dsp.parent_series)
def assertCanEdit(self, dsp):
dsp.initialized = False
self.assertEquals(False, dsp.initialized)
def test_distroseries_drivers_can_edit(self):
# Test that distroseries drivers can edit the data.
dsp = self.factory.makeDistroSeriesParent()
person = self.factory.makePerson()
login(LAUNCHPAD_ADMIN)
dsp.derived_series.driver = person
with person_logged_in(person):
self.assertCanEdit(dsp)
def test_admins_can_edit(self):
dsp = self.factory.makeDistroSeriesParent()
login(LAUNCHPAD_ADMIN)
self.assertCanEdit(dsp)
def test_distro_owners_can_edit(self):
dsp = self.factory.makeDistroSeriesParent()
person = self.factory.makePerson()
login(LAUNCHPAD_ADMIN)
dsp.derived_series.distribution.owner = person
with person_logged_in(person):
self.assertCanEdit(dsp)
class TestOverlayTree(TestCaseWithFactory):
"""Test the overlay tree."""
layer = DatabaseFunctionalLayer
def test_getFlattenedOverlayTree(self):
#
# series
# |
# ----------------------------------
# | | | |
# o o | o
# | | | |
# parent11 parent21 parent31 parent41
# | |
# o o
# | | type of relation:
# parent12 parent22 | |
# | | o
# | | |
# | no overlay overlay
# parent13
#
distroseries = self.factory.makeDistroSeries()
parent11 = self.factory.makeDistroSeries()
parent12 = self.factory.makeDistroSeries()
parent21 = self.factory.makeDistroSeries()
universe_component = getUtility(IComponentSet).ensure('universe')
# series -> parent11
dsp_series_parent11 = self.factory.makeDistroSeriesParent(
derived_series=distroseries, parent_series=parent11,
initialized=True, is_overlay=True,
pocket=PackagePublishingPocket.RELEASE,
component=universe_component)
# parent11 -> parent12
dsp_parent11_parent12 = self.factory.makeDistroSeriesParent(
derived_series=parent11, parent_series=parent12,
initialized=True, is_overlay=True,
pocket=PackagePublishingPocket.RELEASE,
component=universe_component)
# parent12 -> parent13
self.factory.makeDistroSeriesParent(derived_series=parent12,
initialized=True, is_overlay=False)
# series -> parent21
dsp_series_parent21 = self.factory.makeDistroSeriesParent(
derived_series=distroseries, parent_series=parent21,
initialized=True, is_overlay=True,
pocket=PackagePublishingPocket.RELEASE,
component=universe_component)
# parent21 -> parent22
dsp_parent21_parent22 = self.factory.makeDistroSeriesParent(
derived_series=parent21, initialized=True, is_overlay=True,
pocket=PackagePublishingPocket.RELEASE,
component=universe_component)
# series -> parent31
self.factory.makeDistroSeriesParent(derived_series=distroseries,
initialized=True, is_overlay=False)
# series -> parent41
dsp_series_parent41 = self.factory.makeDistroSeriesParent(
derived_series=distroseries, initialized=True, is_overlay=True,
pocket=PackagePublishingPocket.RELEASE,
component=universe_component)
overlays = getUtility(
IDistroSeriesParentSet).getFlattenedOverlayTree(distroseries)
self.assertContentEqual(
[dsp_series_parent11, dsp_parent11_parent12, dsp_series_parent21,
dsp_parent21_parent22, dsp_series_parent41],
overlays)
def test_getFlattenedOverlayTree_empty(self):
distroseries = self.factory.makeDistroSeries()
self.factory.makeDistroSeriesParent(derived_series=distroseries,
initialized=True, is_overlay=False)
overlays = getUtility(
IDistroSeriesParentSet).getFlattenedOverlayTree(distroseries)
self.assertTrue(overlays.is_empty())
|
agpl-3.0
| -6,653,642,816,786,933,000 | 38.008439 | 77 | 0.630611 | false |
hyperized/ansible
|
lib/ansible/modules/net_tools/basics/uri.py
|
1
|
25199
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2013, Romeo Theriault <romeot () hawaii.edu>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: uri
short_description: Interacts with webservices
description:
- Interacts with HTTP and HTTPS web services and supports Digest, Basic and WSSE
HTTP authentication mechanisms.
- For Windows targets, use the M(win_uri) module instead.
version_added: "1.1"
options:
url:
description:
- HTTP or HTTPS URL in the form (http|https)://host.domain[:port]/path
type: str
required: true
dest:
description:
- A path of where to download the file to (if desired). If I(dest) is a
directory, the basename of the file on the remote server will be used.
type: path
url_username:
description:
- A username for the module to use for Digest, Basic or WSSE authentication.
type: str
aliases: [ user ]
url_password:
description:
- A password for the module to use for Digest, Basic or WSSE authentication.
type: str
aliases: [ password ]
body:
description:
- The body of the http request/response to the web service. If C(body_format) is set
to 'json' it will take an already formatted JSON string or convert a data structure
into JSON. If C(body_format) is set to 'form-urlencoded' it will convert a dictionary
or list of tuples into an 'application/x-www-form-urlencoded' string. (Added in v2.7)
type: raw
body_format:
description:
- The serialization format of the body. When set to C(json) or C(form-urlencoded), encodes the
body argument, if needed, and automatically sets the Content-Type header accordingly.
As of C(2.3) it is possible to override the `Content-Type` header, when
set to C(json) or C(form-urlencoded) via the I(headers) option.
type: str
choices: [ form-urlencoded, json, raw ]
default: raw
version_added: "2.0"
method:
description:
- The HTTP method of the request or response.
- In more recent versions we do not restrict the method at the module level anymore
but it still must be a valid method accepted by the service handling the request.
type: str
default: GET
return_content:
description:
- Whether or not to return the body of the response as a "content" key in
the dictionary result.
- Independently of this option, if the reported Content-type is "application/json", then the JSON is
always loaded into a key called C(json) in the dictionary results.
type: bool
default: no
force_basic_auth:
description:
- Force the sending of the Basic authentication header upon initial request.
- The library used by the uri module only sends authentication information when a webservice
responds to an initial request with a 401 status. Since some basic auth services do not properly
send a 401, logins will fail.
type: bool
default: no
follow_redirects:
description:
- Whether or not the URI module should follow redirects. C(all) will follow all redirects.
C(safe) will follow only "safe" redirects, where "safe" means that the client is only
doing a GET or HEAD on the URI to which it is being redirected. C(none) will not follow
any redirects. Note that C(yes) and C(no) choices are accepted for backwards compatibility,
where C(yes) is the equivalent of C(all) and C(no) is the equivalent of C(safe). C(yes) and C(no)
are deprecated and will be removed in some future version of Ansible.
type: str
choices: ['all', 'no', 'none', 'safe', 'urllib2', 'yes']
default: safe
creates:
description:
- A filename, when it already exists, this step will not be run.
type: path
removes:
description:
- A filename, when it does not exist, this step will not be run.
type: path
status_code:
description:
- A list of valid, numeric, HTTP status codes that signifies success of the request.
type: list
default: [ 200 ]
timeout:
description:
- The socket level timeout in seconds
type: int
default: 30
headers:
description:
- Add custom HTTP headers to a request in the format of a YAML hash. As
of C(2.3) supplying C(Content-Type) here will override the header
generated by supplying C(json) or C(form-urlencoded) for I(body_format).
type: dict
version_added: '2.1'
validate_certs:
description:
- If C(no), SSL certificates will not be validated.
- This should only set to C(no) used on personally controlled sites using self-signed certificates.
- Prior to 1.9.2 the code defaulted to C(no).
type: bool
default: yes
version_added: '1.9.2'
client_cert:
description:
- PEM formatted certificate chain file to be used for SSL client authentication.
- This file can also include the key as well, and if the key is included, I(client_key) is not required
type: path
version_added: '2.4'
client_key:
description:
- PEM formatted file that contains your private key to be used for SSL client authentication.
- If I(client_cert) contains both the certificate and key, this option is not required.
type: path
version_added: '2.4'
src:
description:
- Path to file to be submitted to the remote server.
- Cannot be used with I(body).
type: path
version_added: '2.7'
remote_src:
description:
- If C(no), the module will search for src on originating/master machine.
- If C(yes) the module will use the C(src) path on the remote/target machine.
type: bool
default: no
version_added: '2.7'
force:
description:
- If C(yes) do not get a cached copy.
- Alias C(thirsty) has been deprecated and will be removed in 2.13.
type: bool
default: no
aliases: [ thirsty ]
use_proxy:
description:
- If C(no), it will not use a proxy, even if one is defined in an environment variable on the target hosts.
type: bool
default: yes
unix_socket:
description:
- Path to Unix domain socket to use for connection
version_added: '2.8'
http_agent:
description:
- Header to identify as, generally appears in web server logs.
type: str
default: ansible-httpget
notes:
- The dependency on httplib2 was removed in Ansible 2.1.
- The module returns all the HTTP headers in lower-case.
- For Windows targets, use the M(win_uri) module instead.
seealso:
- module: get_url
- module: win_uri
author:
- Romeo Theriault (@romeotheriault)
extends_documentation_fragment: files
'''
EXAMPLES = r'''
- name: Check that you can connect (GET) to a page and it returns a status 200
uri:
url: http://www.example.com
- name: Check that a page returns a status 200 and fail if the word AWESOME is not in the page contents
uri:
url: http://www.example.com
return_content: yes
register: this
failed_when: "'AWESOME' not in this.content"
- name: Create a JIRA issue
uri:
url: https://your.jira.example.com/rest/api/2/issue/
user: your_username
password: your_pass
method: POST
body: "{{ lookup('file','issue.json') }}"
force_basic_auth: yes
status_code: 201
body_format: json
- name: Login to a form based webpage, then use the returned cookie to access the app in later tasks
uri:
url: https://your.form.based.auth.example.com/index.php
method: POST
body_format: form-urlencoded
body:
name: your_username
password: your_password
enter: Sign in
status_code: 302
register: login
- name: Login to a form based webpage using a list of tuples
uri:
url: https://your.form.based.auth.example.com/index.php
method: POST
body_format: form-urlencoded
body:
- [ name, your_username ]
- [ password, your_password ]
- [ enter, Sign in ]
status_code: 302
register: login
- name: Connect to website using a previously stored cookie
uri:
url: https://your.form.based.auth.example.com/dashboard.php
method: GET
return_content: yes
headers:
Cookie: "{{ login.set_cookie }}"
- name: Queue build of a project in Jenkins
uri:
url: http://{{ jenkins.host }}/job/{{ jenkins.job }}/build?token={{ jenkins.token }}
user: "{{ jenkins.user }}"
password: "{{ jenkins.password }}"
method: GET
force_basic_auth: yes
status_code: 201
- name: POST from contents of local file
uri:
url: https://httpbin.org/post
method: POST
src: file.json
- name: POST from contents of remote file
uri:
url: https://httpbin.org/post
method: POST
src: /path/to/my/file.json
remote_src: yes
- name: Pause play until a URL is reachable from this host
uri:
url: "http://192.0.2.1/some/test"
follow_redirects: none
method: GET
register: _result
until: _result.status == 200
retries: 720 # 720 * 5 seconds = 1hour (60*60/5)
delay: 5 # Every 5 seconds
# There are issues in a supporting Python library that is discussed in
# https://github.com/ansible/ansible/issues/52705 where a proxy is defined
# but you want to bypass proxy use on CIDR masks by using no_proxy
- name: Work around a python issue that doesn't support no_proxy envvar
uri:
follow_redirects: none
validate_certs: false
timeout: 5
url: "http://{{ ip_address }}:{{ port | default(80) }}"
register: uri_data
failed_when: false
changed_when: false
vars:
ip_address: 192.0.2.1
environment: |
{
{% for no_proxy in (lookup('env', 'no_proxy') | regex_replace('\s*,\s*', ' ') ).split() %}
{% if no_proxy | regex_search('\/') and
no_proxy | ipaddr('net') != '' and
no_proxy | ipaddr('net') != false and
ip_address | ipaddr(no_proxy) is not none and
ip_address | ipaddr(no_proxy) != false %}
'no_proxy': '{{ ip_address }}'
{% elif no_proxy | regex_search(':') != '' and
no_proxy | regex_search(':') != false and
no_proxy == ip_address + ':' + (port | default(80)) %}
'no_proxy': '{{ ip_address }}:{{ port | default(80) }}'
{% elif no_proxy | ipaddr('host') != '' and
no_proxy | ipaddr('host') != false and
no_proxy == ip_address %}
'no_proxy': '{{ ip_address }}'
{% elif no_proxy | regex_search('^(\*|)\.') != '' and
no_proxy | regex_search('^(\*|)\.') != false and
no_proxy | regex_replace('\*', '') in ip_address %}
'no_proxy': '{{ ip_address }}'
{% endif %}
{% endfor %}
}
'''
RETURN = r'''
# The return information includes all the HTTP headers in lower-case.
elapsed:
description: The number of seconds that elapsed while performing the download
returned: on success
type: int
sample: 23
msg:
description: The HTTP message from the request
returned: always
type: str
sample: OK (unknown bytes)
redirected:
description: Whether the request was redirected
returned: on success
type: bool
sample: false
status:
description: The HTTP status code from the request
returned: always
type: int
sample: 200
url:
description: The actual URL used for the request
returned: always
type: str
sample: https://www.ansible.com/
'''
import cgi
import datetime
import json
import os
import re
import shutil
import sys
import tempfile
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import PY2, iteritems, string_types
from ansible.module_utils.six.moves.urllib.parse import urlencode, urlsplit
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.common._collections_compat import Mapping, Sequence
from ansible.module_utils.urls import fetch_url, url_argument_spec
JSON_CANDIDATES = ('text', 'json', 'javascript')
def format_message(err, resp):
msg = resp.pop('msg')
return err + (' %s' % msg if msg else '')
def write_file(module, url, dest, content, resp):
# create a tempfile with some test content
fd, tmpsrc = tempfile.mkstemp(dir=module.tmpdir)
f = open(tmpsrc, 'wb')
try:
f.write(content)
except Exception as e:
os.remove(tmpsrc)
msg = format_message("Failed to create temporary content file: %s" % to_native(e), resp)
module.fail_json(msg=msg, **resp)
f.close()
checksum_src = None
checksum_dest = None
# raise an error if there is no tmpsrc file
if not os.path.exists(tmpsrc):
os.remove(tmpsrc)
msg = format_message("Source '%s' does not exist" % tmpsrc, resp)
module.fail_json(msg=msg, **resp)
if not os.access(tmpsrc, os.R_OK):
os.remove(tmpsrc)
msg = format_message("Source '%s' not readable" % tmpsrc, resp)
module.fail_json(msg=msg, **resp)
checksum_src = module.sha1(tmpsrc)
# check if there is no dest file
if os.path.exists(dest):
# raise an error if copy has no permission on dest
if not os.access(dest, os.W_OK):
os.remove(tmpsrc)
msg = format_message("Destination '%s' not writable" % dest, resp)
module.fail_json(msg=msg, **resp)
if not os.access(dest, os.R_OK):
os.remove(tmpsrc)
msg = format_message("Destination '%s' not readable" % dest, resp)
module.fail_json(msg=msg, **resp)
checksum_dest = module.sha1(dest)
else:
if not os.access(os.path.dirname(dest), os.W_OK):
os.remove(tmpsrc)
msg = format_message("Destination dir '%s' not writable" % os.path.dirname(dest), resp)
module.fail_json(msg=msg, **resp)
if checksum_src != checksum_dest:
try:
shutil.copyfile(tmpsrc, dest)
except Exception as e:
os.remove(tmpsrc)
msg = format_message("failed to copy %s to %s: %s" % (tmpsrc, dest, to_native(e)), resp)
module.fail_json(msg=msg, **resp)
os.remove(tmpsrc)
def url_filename(url):
fn = os.path.basename(urlsplit(url)[2])
if fn == '':
return 'index.html'
return fn
def absolute_location(url, location):
"""Attempts to create an absolute URL based on initial URL, and
next URL, specifically in the case of a ``Location`` header.
"""
if '://' in location:
return location
elif location.startswith('/'):
parts = urlsplit(url)
base = url.replace(parts[2], '')
return '%s%s' % (base, location)
elif not location.startswith('/'):
base = os.path.dirname(url)
return '%s/%s' % (base, location)
else:
return location
def kv_list(data):
''' Convert data into a list of key-value tuples '''
if data is None:
return None
if isinstance(data, Sequence):
return list(data)
if isinstance(data, Mapping):
return list(data.items())
raise TypeError('cannot form-urlencode body, expect list or dict')
def form_urlencoded(body):
''' Convert data into a form-urlencoded string '''
if isinstance(body, string_types):
return body
if isinstance(body, (Mapping, Sequence)):
result = []
# Turn a list of lists into a list of tupples that urlencode accepts
for key, values in kv_list(body):
if isinstance(values, string_types) or not isinstance(values, (Mapping, Sequence)):
values = [values]
for value in values:
if value is not None:
result.append((to_text(key), to_text(value)))
return urlencode(result, doseq=True)
return body
def uri(module, url, dest, body, body_format, method, headers, socket_timeout):
# is dest is set and is a directory, let's check if we get redirected and
# set the filename from that url
redirected = False
redir_info = {}
r = {}
src = module.params['src']
if src:
try:
headers.update({
'Content-Length': os.stat(src).st_size
})
data = open(src, 'rb')
except OSError:
module.fail_json(msg='Unable to open source file %s' % src, elapsed=0)
else:
data = body
kwargs = {}
if dest is not None:
# Stash follow_redirects, in this block we don't want to follow
# we'll reset back to the supplied value soon
follow_redirects = module.params['follow_redirects']
module.params['follow_redirects'] = False
if os.path.isdir(dest):
# first check if we are redirected to a file download
_, redir_info = fetch_url(module, url, data=body,
headers=headers,
method=method,
timeout=socket_timeout, unix_socket=module.params['unix_socket'])
# if we are redirected, update the url with the location header,
# and update dest with the new url filename
if redir_info['status'] in (301, 302, 303, 307):
url = redir_info['location']
redirected = True
dest = os.path.join(dest, url_filename(url))
# if destination file already exist, only download if file newer
if os.path.exists(dest):
kwargs['last_mod_time'] = datetime.datetime.utcfromtimestamp(os.path.getmtime(dest))
# Reset follow_redirects back to the stashed value
module.params['follow_redirects'] = follow_redirects
resp, info = fetch_url(module, url, data=data, headers=headers,
method=method, timeout=socket_timeout, unix_socket=module.params['unix_socket'],
**kwargs)
try:
content = resp.read()
except AttributeError:
# there was no content, but the error read()
# may have been stored in the info as 'body'
content = info.pop('body', '')
if src:
# Try to close the open file handle
try:
data.close()
except Exception:
pass
r['redirected'] = redirected or info['url'] != url
r.update(redir_info)
r.update(info)
return r, content, dest
def main():
argument_spec = url_argument_spec()
argument_spec.update(
dest=dict(type='path'),
url_username=dict(type='str', aliases=['user']),
url_password=dict(type='str', aliases=['password'], no_log=True),
body=dict(type='raw'),
body_format=dict(type='str', default='raw', choices=['form-urlencoded', 'json', 'raw']),
src=dict(type='path'),
method=dict(type='str', default='GET'),
return_content=dict(type='bool', default=False),
follow_redirects=dict(type='str', default='safe', choices=['all', 'no', 'none', 'safe', 'urllib2', 'yes']),
creates=dict(type='path'),
removes=dict(type='path'),
status_code=dict(type='list', default=[200]),
timeout=dict(type='int', default=30),
headers=dict(type='dict', default={}),
unix_socket=dict(type='path'),
)
module = AnsibleModule(
argument_spec=argument_spec,
add_file_common_args=True,
mutually_exclusive=[['body', 'src']],
)
if module.params.get('thirsty'):
module.deprecate('The alias "thirsty" has been deprecated and will be removed, use "force" instead', version='2.13')
url = module.params['url']
body = module.params['body']
body_format = module.params['body_format'].lower()
method = module.params['method'].upper()
dest = module.params['dest']
return_content = module.params['return_content']
creates = module.params['creates']
removes = module.params['removes']
status_code = [int(x) for x in list(module.params['status_code'])]
socket_timeout = module.params['timeout']
dict_headers = module.params['headers']
if not re.match('^[A-Z]+$', method):
module.fail_json(msg="Parameter 'method' needs to be a single word in uppercase, like GET or POST.")
if body_format == 'json':
# Encode the body unless its a string, then assume it is pre-formatted JSON
if not isinstance(body, string_types):
body = json.dumps(body)
if 'content-type' not in [header.lower() for header in dict_headers]:
dict_headers['Content-Type'] = 'application/json'
elif body_format == 'form-urlencoded':
if not isinstance(body, string_types):
try:
body = form_urlencoded(body)
except ValueError as e:
module.fail_json(msg='failed to parse body as form_urlencoded: %s' % to_native(e), elapsed=0)
if 'content-type' not in [header.lower() for header in dict_headers]:
dict_headers['Content-Type'] = 'application/x-www-form-urlencoded'
if creates is not None:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of uri executions.
if os.path.exists(creates):
module.exit_json(stdout="skipped, since '%s' exists" % creates, changed=False)
if removes is not None:
# do not run the command if the line contains removes=filename
# and the filename does not exist. This allows idempotence
# of uri executions.
if not os.path.exists(removes):
module.exit_json(stdout="skipped, since '%s' does not exist" % removes, changed=False)
# Make the request
start = datetime.datetime.utcnow()
resp, content, dest = uri(module, url, dest, body, body_format, method,
dict_headers, socket_timeout)
resp['elapsed'] = (datetime.datetime.utcnow() - start).seconds
resp['status'] = int(resp['status'])
resp['changed'] = False
# Write the file out if requested
if dest is not None:
if resp['status'] in status_code and resp['status'] != 304:
write_file(module, url, dest, content, resp)
# allow file attribute changes
resp['changed'] = True
module.params['path'] = dest
file_args = module.load_file_common_arguments(module.params)
file_args['path'] = dest
resp['changed'] = module.set_fs_attributes_if_different(file_args, resp['changed'])
resp['path'] = dest
# Transmogrify the headers, replacing '-' with '_', since variables don't
# work with dashes.
# In python3, the headers are title cased. Lowercase them to be
# compatible with the python2 behaviour.
uresp = {}
for key, value in iteritems(resp):
ukey = key.replace("-", "_").lower()
uresp[ukey] = value
if 'location' in uresp:
uresp['location'] = absolute_location(url, uresp['location'])
# Default content_encoding to try
content_encoding = 'utf-8'
if 'content_type' in uresp:
# Handle multiple Content-Type headers
charsets = []
content_types = []
for value in uresp['content_type'].split(','):
ct, params = cgi.parse_header(value)
if ct not in content_types:
content_types.append(ct)
if 'charset' in params:
if params['charset'] not in charsets:
charsets.append(params['charset'])
if content_types:
content_type = content_types[0]
if len(content_types) > 1:
module.warn(
'Received multiple conflicting Content-Type values (%s), using %s' % (', '.join(content_types), content_type)
)
if charsets:
content_encoding = charsets[0]
if len(charsets) > 1:
module.warn(
'Received multiple conflicting charset values (%s), using %s' % (', '.join(charsets), content_encoding)
)
u_content = to_text(content, encoding=content_encoding)
if any(candidate in content_type for candidate in JSON_CANDIDATES):
try:
js = json.loads(u_content)
uresp['json'] = js
except Exception:
if PY2:
sys.exc_clear() # Avoid false positive traceback in fail_json() on Python 2
else:
u_content = to_text(content, encoding=content_encoding)
if resp['status'] not in status_code:
uresp['msg'] = 'Status code was %s and not %s: %s' % (resp['status'], status_code, uresp.get('msg', ''))
module.fail_json(content=u_content, **uresp)
elif return_content:
module.exit_json(content=u_content, **uresp)
else:
module.exit_json(**uresp)
if __name__ == '__main__':
main()
|
gpl-3.0
| -2,665,481,652,045,576,700 | 34.692635 | 129 | 0.620183 | false |
memeticlabs/mongokit
|
listembed-test.py
|
1
|
4479
|
try:
import unittest2 as unittest
except ImportError:
import unittest
from mongokit import Document, Connection
class DescriptorsTestCase(unittest.TestCase):
def setUp(self):
self.connection = Connection()
self.col = self.connection['test']['mongokit']
def tearDown(self):
self.connection.drop_database('test')
def test_list_embed_dot_notation(self):
"""Attempt to set a default for a sub element using dot notation
Either this or test_list_embed_list_notation should pass
"""
class ListEmbed(Document):
use_dot_notation = True
structure = {
'list': [
{
'name': basestring,
'age': int
}
]
}
default_values = {
'list.name': 'default'
}
self.connection.register([ListEmbed])
doc = self.col.ListEmbed()
self.assertDictEqual(doc, {'list': []})
doc.list.append({'age': 23})
self.assertDictEqual(
doc, {
'list': [
{
'name': 'default',
'age': 23
}
]
}
)
def test_list_embed_list_notation(self):
"""Attempt to set a default for a sub element using list notation
Either this or test_list_embed_dot_notation should pass
"""
class ListEmbed(Document):
use_dot_notation = True
structure = {
'list': [
{
'name': basestring,
'age': int
}
]
}
default_values = {
'list': [
{
'name': 'default'
}
]
}
self.connection.register([ListEmbed])
doc = self.col.ListEmbed()
self.assertDictEqual(doc, {'list': []})
doc.list.append({'age': 23})
self.assertDictEqual(
doc, {
'list': [
{
'name': 'default',
'age': 23
}
]
}
)
def test_list_embed_non_required_fields(self):
"""Confirm all fields are not required"""
class ListEmbed(Document):
use_dot_notation = True
structure = {
'list': [
{
'name': basestring,
'age': int
}
]
}
self.connection.register([ListEmbed])
doc = self.col.ListEmbed()
self.assertDictEqual(doc, {'list': []})
doc.list.append({'age': 23})
self.assertDictEqual(
doc, {
'list': [
{
'age': 23
}
]
}
)
# Should validate fine
doc.validate()
def test_list_embed_required_fields_dot_notation(self):
"""Confirm list of object required field validation works"""
class ListEmbed(Document):
use_dot_notation = True
structure = {
'list': [
{
'name': basestring,
'age': int
}
]
}
required_fields = ['list.name']
self.connection.register([ListEmbed])
doc = self.col.ListEmbed()
self.assertDictEqual(doc, {'list': []})
doc.list = [{'name': 'bob'}]
self.assertDictEqual(
doc, {
'list': [
{
'name': 'bob'
}
]
}
)
# Should validate fine
doc.validate()
doc.list = [{'age': 23}]
self.assertDictEqual(
doc, {
'list': [
{
'age': 23
}
]
}
)
try:
doc.validate()
self.fail('Not a valid document')
except:
pass
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
| -1,479,203,696,639,042,800 | 22.698413 | 73 | 0.385577 | false |
TheWardoctor/Wardoctors-repo
|
script.module.uncoded/lib/resources/lib/modules/sources.py
|
1
|
56371
|
# -*- coding: utf-8 -*-
'''
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import sys,re,json,urllib,urlparse,random,datetime,time
from resources.lib.modules import trakt
from resources.lib.modules import tvmaze
from resources.lib.modules import cache
from resources.lib.modules import control
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import debrid
from resources.lib.modules import workers
from resources.lib.modules import source_utils
from resources.lib.modules import log_utils
from resources.lib.modules import thexem
try: from sqlite3 import dbapi2 as database
except: from pysqlite2 import dbapi2 as database
try: import urlresolver
except: pass
try: import xbmc
except: pass
class sources:
def __init__(self):
self.getConstants()
self.sources = []
def play(self, title, year, imdb, tvdb, season, episode, tvshowtitle, premiered, meta, select):
try:
url = None
items = self.getSources(title, year, imdb, tvdb, season, episode, tvshowtitle, premiered)
select = control.setting('hosts.mode') if select == None else select
title = tvshowtitle if not tvshowtitle == None else title
if control.window.getProperty('PseudoTVRunning') == 'True':
return control.resolve(int(sys.argv[1]), True, control.item(path=str(self.sourcesDirect(items))))
if len(items) > 0:
if select == '1' and 'plugin' in control.infoLabel('Container.PluginName'):
control.window.clearProperty(self.itemProperty)
control.window.setProperty(self.itemProperty, json.dumps(items))
control.window.clearProperty(self.metaProperty)
control.window.setProperty(self.metaProperty, meta)
control.sleep(200)
return control.execute('Container.Update(%s?action=addItem&title=%s)' % (sys.argv[0], urllib.quote_plus(title)))
elif select == '0' or select == '1':
url = self.sourcesDialog(items)
else:
url = self.sourcesDirect(items)
if url == None:
return self.errorForSources()
try: meta = json.loads(meta)
except: pass
from resources.lib.modules.player import player
player().run(title, year, season, episode, imdb, tvdb, url, meta)
except:
pass
def addItem(self, title):
control.playlist.clear()
items = control.window.getProperty(self.itemProperty)
items = json.loads(items)
if items == None or len(items) == 0: control.idle() ; sys.exit()
meta = control.window.getProperty(self.metaProperty)
meta = json.loads(meta)
# (Kodi bug?) [name,role] is incredibly slow on this directory, [name] is barely tolerable, so just nuke it for speed!
if 'cast' in meta: del(meta['cast'])
sysaddon = sys.argv[0]
syshandle = int(sys.argv[1])
downloads = True if control.setting('downloads') == 'true' and not (control.setting('movie.download.path') == '' or control.setting('tv.download.path') == '') else False
systitle = sysname = urllib.quote_plus(title)
if 'tvshowtitle' in meta and 'season' in meta and 'episode' in meta:
sysname += urllib.quote_plus(' S%02dE%02d' % (int(meta['season']), int(meta['episode'])))
elif 'year' in meta:
sysname += urllib.quote_plus(' (%s)' % meta['year'])
poster = meta['poster3'] if 'poster3' in meta else '0'
if poster == '0': poster = meta['poster'] if 'poster' in meta else '0'
fanart = meta['fanart2'] if 'fanart2' in meta else '0'
if fanart == '0': fanart = meta['fanart'] if 'fanart' in meta else '0'
thumb = meta['thumb'] if 'thumb' in meta else '0'
if thumb == '0': thumb = poster
if thumb == '0': thumb = fanart
banner = meta['banner'] if 'banner' in meta else '0'
if banner == '0': banner = poster
if poster == '0': poster = control.addonPoster()
if banner == '0': banner = control.addonBanner()
if not control.setting('fanart') == 'true': fanart = '0'
if fanart == '0': fanart = control.addonFanart()
if thumb == '0': thumb = control.addonFanart()
sysimage = urllib.quote_plus(poster.encode('utf-8'))
downloadMenu = control.lang(32403).encode('utf-8')
for i in range(len(items)):
try:
label = items[i]['label']
syssource = urllib.quote_plus(json.dumps([items[i]]))
sysurl = '%s?action=playItem&title=%s&source=%s' % (sysaddon, systitle, syssource)
cm = []
if downloads == True:
cm.append((downloadMenu, 'RunPlugin(%s?action=download&name=%s&image=%s&source=%s)' % (sysaddon, sysname, sysimage, syssource)))
item = control.item(label=label)
item.setArt({'icon': thumb, 'thumb': thumb, 'poster': poster, 'banner': banner})
item.setProperty('Fanart_Image', fanart)
video_streaminfo = {'codec': 'h264'}
item.addStreamInfo('video', video_streaminfo)
item.addContextMenuItems(cm)
item.setInfo(type='Video', infoLabels = meta)
control.addItem(handle=syshandle, url=sysurl, listitem=item, isFolder=False)
except:
pass
control.content(syshandle, 'files')
control.directory(syshandle, cacheToDisc=True)
def playItem(self, title, source):
try:
meta = control.window.getProperty(self.metaProperty)
meta = json.loads(meta)
year = meta['year'] if 'year' in meta else None
season = meta['season'] if 'season' in meta else None
episode = meta['episode'] if 'episode' in meta else None
imdb = meta['imdb'] if 'imdb' in meta else None
tvdb = meta['tvdb'] if 'tvdb' in meta else None
next = [] ; prev = [] ; total = []
for i in range(1,1000):
try:
u = control.infoLabel('ListItem(%s).FolderPath' % str(i))
if u in total: raise Exception()
total.append(u)
u = dict(urlparse.parse_qsl(u.replace('?','')))
u = json.loads(u['source'])[0]
next.append(u)
except:
break
for i in range(-1000,0)[::-1]:
try:
u = control.infoLabel('ListItem(%s).FolderPath' % str(i))
if u in total: raise Exception()
total.append(u)
u = dict(urlparse.parse_qsl(u.replace('?','')))
u = json.loads(u['source'])[0]
prev.append(u)
except:
break
items = json.loads(source)
items = [i for i in items+next+prev][:40]
header = control.addonInfo('name')
header2 = header.upper()
progressDialog = control.progressDialog if control.setting('progress.dialog') == '0' else control.progressDialogBG
progressDialog.create(header, '')
progressDialog.update(0)
block = None
for i in range(len(items)):
try:
try:
if progressDialog.iscanceled(): break
progressDialog.update(int((100 / float(len(items))) * i), str(items[i]['label']), str(' '))
except:
progressDialog.update(int((100 / float(len(items))) * i), str(header2), str(items[i]['label']))
if items[i]['source'] == block: raise Exception()
w = workers.Thread(self.sourcesResolve, items[i])
w.start()
offset = 60 * 2 if items[i].get('source') in self.hostcapDict else 0
m = ''
for x in range(3600):
try:
if xbmc.abortRequested == True: return sys.exit()
if progressDialog.iscanceled(): return progressDialog.close()
except:
pass
k = control.condVisibility('Window.IsActive(virtualkeyboard)')
if k: m += '1'; m = m[-1]
if (w.is_alive() == False or x > 30 + offset) and not k: break
k = control.condVisibility('Window.IsActive(yesnoDialog)')
if k: m += '1'; m = m[-1]
if (w.is_alive() == False or x > 30 + offset) and not k: break
time.sleep(0.5)
for x in range(30):
try:
if xbmc.abortRequested == True: return sys.exit()
if progressDialog.iscanceled(): return progressDialog.close()
except:
pass
if m == '': break
if w.is_alive() == False: break
time.sleep(0.5)
if w.is_alive() == True: block = items[i]['source']
if self.url == None: raise Exception()
try: progressDialog.close()
except: pass
control.sleep(200)
control.execute('Dialog.Close(virtualkeyboard)')
control.execute('Dialog.Close(yesnoDialog)')
from resources.lib.modules.player import player
player().run(title, year, season, episode, imdb, tvdb, self.url, meta)
return self.url
except:
pass
try: progressDialog.close()
except: pass
self.errorForSources()
except:
pass
def getSources(self, title, year, imdb, tvdb, season, episode, tvshowtitle, premiered, quality='HD', timeout=30):
progressDialog = control.progressDialog if control.setting('progress.dialog') == '0' else control.progressDialogBG
progressDialog.create(control.addonInfo('name'), '')
progressDialog.update(0)
self.prepareSources()
sourceDict = self.sourceDict
progressDialog.update(0, control.lang(32600).encode('utf-8'))
content = 'movie' if tvshowtitle == None else 'episode'
if content == 'movie':
sourceDict = [(i[0], i[1], getattr(i[1], 'movie', None)) for i in sourceDict]
genres = trakt.getGenre('movie', 'imdb', imdb)
else:
sourceDict = [(i[0], i[1], getattr(i[1], 'tvshow', None)) for i in sourceDict]
genres = trakt.getGenre('show', 'tvdb', tvdb)
sourceDict = [(i[0], i[1], i[2]) for i in sourceDict if not hasattr(i[1], 'genre_filter') or not i[1].genre_filter or any(x in i[1].genre_filter for x in genres)]
sourceDict = [(i[0], i[1]) for i in sourceDict if not i[2] == None]
language = self.getLanguage()
sourceDict = [(i[0], i[1], i[1].language) for i in sourceDict]
sourceDict = [(i[0], i[1]) for i in sourceDict if any(x in i[2] for x in language)]
try: sourceDict = [(i[0], i[1], control.setting('provider.' + i[0])) for i in sourceDict]
except: sourceDict = [(i[0], i[1], 'true') for i in sourceDict]
sourceDict = [(i[0], i[1]) for i in sourceDict if not i[2] == 'false']
sourceDict = [(i[0], i[1], i[1].priority) for i in sourceDict]
random.shuffle(sourceDict)
sourceDict = sorted(sourceDict, key=lambda i: i[2])
threads = []
if content == 'movie':
title = self.getTitle(title)
localtitle = self.getLocalTitle(title, imdb, tvdb, content)
aliases = self.getAliasTitles(imdb, localtitle, content)
for i in sourceDict: threads.append(workers.Thread(self.getMovieSource, title, localtitle, aliases, year, imdb, i[0], i[1]))
else:
tvshowtitle = self.getTitle(tvshowtitle)
localtvshowtitle = self.getLocalTitle(tvshowtitle, imdb, tvdb, content)
aliases = self.getAliasTitles(imdb, localtvshowtitle, content)
#Disabled on 11/11/17 due to hang. Should be checked in the future and possible enabled again.
#season, episode = thexem.get_scene_episode_number(tvdb, season, episode)
for i in sourceDict: threads.append(workers.Thread(self.getEpisodeSource, title, year, imdb, tvdb, season, episode, tvshowtitle, localtvshowtitle, aliases, premiered, i[0], i[1]))
s = [i[0] + (i[1],) for i in zip(sourceDict, threads)]
s = [(i[3].getName(), i[0], i[2]) for i in s]
mainsourceDict = [i[0] for i in s if i[2] == 0]
sourcelabelDict = dict([(i[0], i[1].upper()) for i in s])
[i.start() for i in threads]
string1 = control.lang(32404).encode('utf-8')
string2 = control.lang(32405).encode('utf-8')
string3 = control.lang(32406).encode('utf-8')
string4 = control.lang(32601).encode('utf-8')
string5 = control.lang(32602).encode('utf-8')
string6 = control.lang(32606).encode('utf-8')
string7 = control.lang(32607).encode('utf-8')
try: timeout = int(control.setting('scrapers.timeout.1'))
except: pass
quality = control.setting('hosts.quality')
if quality == '': quality = '0'
line1 = line2 = line3 = ""
source_4k = d_source_4k = 0
source_1080 = d_source_1080 = 0
source_720 = d_source_720 = 0
source_sd = d_source_sd = 0
total = d_total = 0
debrid_list = debrid.debrid_resolvers
debrid_status = debrid.status()
total_format = '[COLOR %s][B]%s[/B][/COLOR]'
pdiag_format = ' 4K: %s | 1080p: %s | 720p: %s | SD: %s | %s: %s'.split('|')
pdiag_bg_format = '4K:%s(%s)|1080p:%s(%s)|720p:%s(%s)|SD:%s(%s)|T:%s(%s)'.split('|')
for i in range(0, 4 * timeout):
try:
if xbmc.abortRequested == True: return sys.exit()
try:
if progressDialog.iscanceled(): break
except:
pass
if len(self.sources) > 0:
if quality in ['0']:
source_4k = len([e for e in self.sources if e['quality'] == '4K' and e['debridonly'] == False])
source_1080 = len([e for e in self.sources if e['quality'] in ['1440p','1080p'] and e['debridonly'] == False])
source_720 = len([e for e in self.sources if e['quality'] in ['720p','HD'] and e['debridonly'] == False])
source_sd = len([e for e in self.sources if e['quality'] == 'SD' and e['debridonly'] == False])
elif quality in ['1']:
source_1080 = len([e for e in self.sources if e['quality'] in ['1440p','1080p'] and e['debridonly'] == False])
source_720 = len([e for e in self.sources if e['quality'] in ['720p','HD'] and e['debridonly'] == False])
source_sd = len([e for e in self.sources if e['quality'] == 'SD' and e['debridonly'] == False])
elif quality in ['2']:
source_1080 = len([e for e in self.sources if e['quality'] in ['1080p'] and e['debridonly'] == False])
source_720 = len([e for e in self.sources if e['quality'] in ['720p','HD'] and e['debridonly'] == False])
source_sd = len([e for e in self.sources if e['quality'] == 'SD' and e['debridonly'] == False])
elif quality in ['3']:
source_720 = len([e for e in self.sources if e['quality'] in ['720p','HD'] and e['debridonly'] == False])
source_sd = len([e for e in self.sources if e['quality'] == 'SD' and e['debridonly'] == False])
else:
source_sd = len([e for e in self.sources if e['quality'] == 'SD' and e['debridonly'] == False])
total = source_4k + source_1080 + source_720 + source_sd
if debrid_status:
if quality in ['0']:
for d in debrid_list:
d_source_4k = len([e for e in self.sources if e['quality'] == '4K' and d.valid_url('', e['source'])])
d_source_1080 = len([e for e in self.sources if e['quality'] in ['1440p','1080p'] and d.valid_url('', e['source'])])
d_source_720 = len([e for e in self.sources if e['quality'] in ['720p','HD'] and d.valid_url('', e['source'])])
d_source_sd = len([e for e in self.sources if e['quality'] == 'SD' and d.valid_url('', e['source'])])
elif quality in ['1']:
for d in debrid_list:
d_source_1080 = len([e for e in self.sources if e['quality'] in ['1440p','1080p'] and d.valid_url('', e['source'])])
d_source_720 = len([e for e in self.sources if e['quality'] in ['720p','HD'] and d.valid_url('', e['source'])])
d_source_sd = len([e for e in self.sources if e['quality'] == 'SD' and d.valid_url('', e['source'])])
elif quality in ['2']:
for d in debrid_list:
d_source_1080 = len([e for e in self.sources if e['quality'] in ['1080p'] and d.valid_url('', e['source'])])
d_source_720 = len([e for e in self.sources if e['quality'] in ['720p','HD'] and d.valid_url('', e['source'])])
d_source_sd = len([e for e in self.sources if e['quality'] == 'SD' and d.valid_url('', e['source'])])
elif quality in ['3']:
for d in debrid_list:
d_source_720 = len([e for e in self.sources if e['quality'] in ['720p','HD'] and d.valid_url('', e['source'])])
d_source_sd = len([e for e in self.sources if e['quality'] == 'SD' and d.valid_url('', e['source'])])
else:
for d in debrid_list:
d_source_sd = len([e for e in self.sources if e['quality'] == 'SD' and d.valid_url('', e['source'])])
d_total = d_source_4k + d_source_1080 + d_source_720 + d_source_sd
if debrid_status:
d_4k_label = total_format % ('red', d_source_4k) if d_source_4k == 0 else total_format % ('lime', d_source_4k)
d_1080_label = total_format % ('red', d_source_1080) if d_source_1080 == 0 else total_format % ('lime', d_source_1080)
d_720_label = total_format % ('red', d_source_720) if d_source_720 == 0 else total_format % ('lime', d_source_720)
d_sd_label = total_format % ('red', d_source_sd) if d_source_sd == 0 else total_format % ('lime', d_source_sd)
d_total_label = total_format % ('red', d_total) if d_total == 0 else total_format % ('lime', d_total)
source_4k_label = total_format % ('red', source_4k) if source_4k == 0 else total_format % ('lime', source_4k)
source_1080_label = total_format % ('red', source_1080) if source_1080 == 0 else total_format % ('lime', source_1080)
source_720_label = total_format % ('red', source_720) if source_720 == 0 else total_format % ('lime', source_720)
source_sd_label = total_format % ('red', source_sd) if source_sd == 0 else total_format % ('lime', source_sd)
source_total_label = total_format % ('red', total) if total == 0 else total_format % ('lime', total)
if (i / 2) < timeout:
try:
mainleft = [sourcelabelDict[x.getName()] for x in threads if x.is_alive() == True and x.getName() in mainsourceDict]
info = [sourcelabelDict[x.getName()] for x in threads if x.is_alive() == True]
if i >= timeout and len(mainleft) == 0 and len(self.sources) >= 100 * len(info): break # improve responsiveness
if debrid_status:
if quality in ['0']:
if not progressDialog == control.progressDialogBG:
line1 = ('%s:' + '|'.join(pdiag_format)) % (string6, d_4k_label, d_1080_label, d_720_label, d_sd_label, str(string4), d_total_label)
line2 = ('%s:' + '|'.join(pdiag_format)) % (string7, source_4k_label, source_1080_label, source_720_label, source_sd_label, str(string4), source_total_label)
print line1, line2
else:
line1 = '|'.join(pdiag_bg_format[:-1]) % (source_4k_label, d_4k_label, source_1080_label, d_1080_label, source_720_label, d_720_label, source_sd_label, d_sd_label)
elif quality in ['1']:
if not progressDialog == control.progressDialogBG:
line1 = ('%s:' + '|'.join(pdiag_format[1:])) % (string6, d_1080_label, d_720_label, d_sd_label, str(string4), d_total_label)
line2 = ('%s:' + '|'.join(pdiag_format[1:])) % (string7, source_1080_label, source_720_label, source_sd_label, str(string4), source_total_label)
else:
line1 = '|'.join(pdiag_bg_format[1:]) % (source_1080_label, d_1080_label, source_720_label, d_720_label, source_sd_label, d_sd_label, source_total_label, d_total_label)
elif quality in ['2']:
if not progressDialog == control.progressDialogBG:
line1 = ('%s:' + '|'.join(pdiag_format[1:])) % (string6, d_1080_label, d_720_label, d_sd_label, str(string4), d_total_label)
line2 = ('%s:' + '|'.join(pdiag_format[1:])) % (string7, source_1080_label, source_720_label, source_sd_label, str(string4), source_total_label)
else:
line1 = '|'.join(pdiag_bg_format[1:]) % (source_1080_label, d_1080_label, source_720_label, d_720_label, source_sd_label, d_sd_label, source_total_label, d_total_label)
elif quality in ['3']:
if not progressDialog == control.progressDialogBG:
line1 = ('%s:' + '|'.join(pdiag_format[2:])) % (string6, d_720_label, d_sd_label, str(string4), d_total_label)
line2 = ('%s:' + '|'.join(pdiag_format[2:])) % (string7, source_720_label, source_sd_label, str(string4), source_total_label)
else:
line1 = '|'.join(pdiag_bg_format[2:]) % (source_720_label, d_720_label, source_sd_label, d_sd_label, source_total_label, d_total_label)
else:
if not progressDialog == control.progressDialogBG:
line1 = ('%s:' + '|'.join(pdiag_format[3:])) % (string6, d_sd_label, str(string4), d_total_label)
line2 = ('%s:' + '|'.join(pdiag_format[3:])) % (string7, source_sd_label, str(string4), source_total_label)
else:
line1 = '|'.join(pdiag_bg_format[3:]) % (source_sd_label, d_sd_label, source_total_label, d_total_label)
else:
if quality in ['0']:
line1 = '|'.join(pdiag_format) % (source_4k_label, source_1080_label, source_720_label, source_sd_label, str(string4), source_total_label)
elif quality in ['1']:
line1 = '|'.join(pdiag_format[1:]) % (source_1080_label, source_720_label, source_sd_label, str(string4), source_total_label)
elif quality in ['2']:
line1 = '|'.join(pdiag_format[1:]) % (source_1080_label, source_720_label, source_sd_label, str(string4), source_total_label)
elif quality in ['3']:
line1 = '|'.join(pdiag_format[2:]) % (source_720_label, source_sd_label, str(string4), source_total_label)
else:
line1 = '|'.join(pdiag_format[3:]) % (source_sd_label, str(string4), source_total_label)
if debrid_status:
if len(info) > 6: line3 = string3 % (str(len(info)))
elif len(info) > 0: line3 = string3 % (', '.join(info))
else: break
percent = int(100 * float(i) / (2 * timeout) + 0.5)
if not progressDialog == control.progressDialogBG: progressDialog.update(max(1, percent), line1, line2, line3)
else: progressDialog.update(max(1, percent), line1, line3)
else:
if len(info) > 6: line2 = string3 % (str(len(info)))
elif len(info) > 0: line2 = string3 % (', '.join(info))
else: break
percent = int(100 * float(i) / (2 * timeout) + 0.5)
progressDialog.update(max(1, percent), line1, line2)
except Exception as e:
log_utils.log('Exception Raised: %s' % str(e), log_utils.LOGERROR)
else:
try:
mainleft = [sourcelabelDict[x.getName()] for x in threads if x.is_alive() == True and x.getName() in mainsourceDict]
info = mainleft
if debrid_status:
if len(info) > 6: line3 = 'Waiting for: %s' % (str(len(info)))
elif len(info) > 0: line3 = 'Waiting for: %s' % (', '.join(info))
else: break
percent = int(100 * float(i) / (2 * timeout) + 0.5) % 100
if not progressDialog == control.progressDialogBG: progressDialog.update(max(1, percent), line1, line2, line3)
else: progressDialog.update(max(1, percent), line1, line3)
else:
if len(info) > 6: line2 = 'Waiting for: %s' % (str(len(info)))
elif len(info) > 0: line2 = 'Waiting for: %s' % (', '.join(info))
else: break
percent = int(100 * float(i) / (2 * timeout) + 0.5) % 100
progressDialog.update(max(1, percent), line1, line2)
except:
break
time.sleep(0.5)
except:
pass
if control.addonInfo('id') == 'plugin.video.bennu':
try:
if progressDialog: progressDialog.update(100, control.lang(30726).encode('utf-8'), control.lang(30731).encode('utf-8'))
items = self.sourcesFilter()
if quality == 'RD': items = [i for i in items if i['debrid'] != '']
elif quality == 'SD': items = [i for i in items if i['quality'] == 'SD' and i['debrid'] == '']
elif quality == 'HD': items = [i for i in items if i['quality'] != 'SD']
if control.setting('bennu.dev.log') == 'true':
log_utils.log('Sources Returned: %s' % str(items), log_utils.LOGNOTICE)
try: progressDialog.close()
except: pass
if quality == 'AUTO':
u = self.sourcesDirect(items)
return u
else:
meta = '{"title": "%s", "year": "%s", "imdb": "%s"}' % (title, year, imdb)
'''control.window.clearProperty("plugin.video.bennu.container.items")
control.window.setProperty("plugin.video.bennu.container.items", json.dumps(items))
control.window.clearProperty("plugin.video.bennu.container.meta")
control.window.setProperty("plugin.video.bennu.container.meta", meta)'''
control.window.clearProperty(self.itemProperty)
control.window.setProperty(self.itemProperty, json.dumps(items))
control.window.clearProperty(self.metaProperty)
control.window.setProperty(self.metaProperty, meta)
control.sleep(200)
control.execute('Container.Update(%s?action=addItem&title=%s)' % (sys.argv[0], urllib.quote_plus(title)))
return "DIR"
except:
try: progressDialog.close()
except: pass
return
else:
try: progressDialog.close()
except: pass
self.sourcesFilter()
return self.sources
def prepareSources(self):
try:
control.makeFile(control.dataPath)
self.sourceFile = control.providercacheFile
dbcon = database.connect(self.sourceFile)
dbcur = dbcon.cursor()
dbcur.execute("CREATE TABLE IF NOT EXISTS rel_url (""source TEXT, ""imdb_id TEXT, ""season TEXT, ""episode TEXT, ""rel_url TEXT, ""UNIQUE(source, imdb_id, season, episode)"");")
dbcur.execute("CREATE TABLE IF NOT EXISTS rel_src (""source TEXT, ""imdb_id TEXT, ""season TEXT, ""episode TEXT, ""hosts TEXT, ""added TEXT, ""UNIQUE(source, imdb_id, season, episode)"");")
except:
pass
def getMovieSource(self, title, localtitle, aliases, year, imdb, source, call):
try:
dbcon = database.connect(self.sourceFile)
dbcur = dbcon.cursor()
except:
pass
''' Fix to stop items passed with a 0 IMDB id pulling old unrelated sources from the database. '''
if imdb == '0':
try:
dbcur.execute("DELETE FROM rel_src WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'" % (source, imdb, '', ''))
dbcur.execute("DELETE FROM rel_url WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'" % (source, imdb, '', ''))
dbcon.commit()
except:
pass
''' END '''
try:
sources = []
dbcur.execute("SELECT * FROM rel_src WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'" % (source, imdb, '', ''))
match = dbcur.fetchone()
t1 = int(re.sub('[^0-9]', '', str(match[5])))
t2 = int(datetime.datetime.now().strftime("%Y%m%d%H%M"))
update = abs(t2 - t1) > 60
if update == False:
sources = eval(match[4].encode('utf-8'))
return self.sources.extend(sources)
except:
pass
try:
url = None
dbcur.execute("SELECT * FROM rel_url WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'" % (source, imdb, '', ''))
url = dbcur.fetchone()
url = eval(url[4].encode('utf-8'))
except:
pass
try:
if url == None: url = call.movie(imdb, title, localtitle, aliases, year)
if url == None: raise Exception()
dbcur.execute("DELETE FROM rel_url WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'" % (source, imdb, '', ''))
dbcur.execute("INSERT INTO rel_url Values (?, ?, ?, ?, ?)", (source, imdb, '', '', repr(url)))
dbcon.commit()
except:
pass
try:
sources = []
sources = call.sources(url, self.hostDict, self.hostprDict)
if sources == None or sources == []: raise Exception()
sources = [json.loads(t) for t in set(json.dumps(d, sort_keys=True) for d in sources)]
for i in sources: i.update({'provider': source})
self.sources.extend(sources)
dbcur.execute("DELETE FROM rel_src WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'" % (source, imdb, '', ''))
dbcur.execute("INSERT INTO rel_src Values (?, ?, ?, ?, ?, ?)", (source, imdb, '', '', repr(sources), datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
dbcon.commit()
except:
pass
def getEpisodeSource(self, title, year, imdb, tvdb, season, episode, tvshowtitle, localtvshowtitle, aliases, premiered, source, call):
try:
dbcon = database.connect(self.sourceFile)
dbcur = dbcon.cursor()
except:
pass
try:
sources = []
dbcur.execute("SELECT * FROM rel_src WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'" % (source, imdb, season, episode))
match = dbcur.fetchone()
t1 = int(re.sub('[^0-9]', '', str(match[5])))
t2 = int(datetime.datetime.now().strftime("%Y%m%d%H%M"))
update = abs(t2 - t1) > 60
if update == False:
sources = eval(match[4].encode('utf-8'))
return self.sources.extend(sources)
except:
pass
try:
url = None
dbcur.execute("SELECT * FROM rel_url WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'" % (source, imdb, '', ''))
url = dbcur.fetchone()
url = eval(url[4].encode('utf-8'))
except:
pass
try:
if url == None: url = call.tvshow(imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year)
if url == None: raise Exception()
dbcur.execute("DELETE FROM rel_url WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'" % (source, imdb, '', ''))
dbcur.execute("INSERT INTO rel_url Values (?, ?, ?, ?, ?)", (source, imdb, '', '', repr(url)))
dbcon.commit()
except:
pass
try:
ep_url = None
dbcur.execute("SELECT * FROM rel_url WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'" % (source, imdb, season, episode))
ep_url = dbcur.fetchone()
ep_url = eval(ep_url[4].encode('utf-8'))
except:
pass
try:
if url == None: raise Exception()
if ep_url == None: ep_url = call.episode(url, imdb, tvdb, title, premiered, season, episode)
if ep_url == None: raise Exception()
dbcur.execute("DELETE FROM rel_url WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'" % (source, imdb, season, episode))
dbcur.execute("INSERT INTO rel_url Values (?, ?, ?, ?, ?)", (source, imdb, season, episode, repr(ep_url)))
dbcon.commit()
except:
pass
try:
sources = []
sources = call.sources(ep_url, self.hostDict, self.hostprDict)
if sources == None or sources == []: raise Exception()
sources = [json.loads(t) for t in set(json.dumps(d, sort_keys=True) for d in sources)]
for i in sources: i.update({'provider': source})
self.sources.extend(sources)
dbcur.execute("DELETE FROM rel_src WHERE source = '%s' AND imdb_id = '%s' AND season = '%s' AND episode = '%s'" % (source, imdb, season, episode))
dbcur.execute("INSERT INTO rel_src Values (?, ?, ?, ?, ?, ?)", (source, imdb, season, episode, repr(sources), datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
dbcon.commit()
except:
pass
def alterSources(self, url, meta):
try:
if control.setting('hosts.mode') == '2': url += '&select=1'
else: url += '&select=2'
control.execute('RunPlugin(%s)' % url)
except:
pass
def clearSources(self):
try:
control.idle()
yes = control.yesnoDialog(control.lang(32407).encode('utf-8'), '', '')
if not yes: return
control.makeFile(control.dataPath)
dbcon = database.connect(control.providercacheFile)
dbcur = dbcon.cursor()
dbcur.execute("DROP TABLE IF EXISTS rel_src")
dbcur.execute("DROP TABLE IF EXISTS rel_url")
dbcur.execute("VACUUM")
dbcon.commit()
control.infoDialog(control.lang(32408).encode('utf-8'), sound=True, icon='INFO')
except:
pass
def sourcesFilter(self):
provider = control.setting('hosts.sort.provider')
if provider == '': provider = 'false'
quality = control.setting('hosts.quality')
if quality == '': quality = '0'
captcha = control.setting('hosts.captcha')
if captcha == '': captcha = 'true'
HEVC = control.setting('HEVC')
random.shuffle(self.sources)
if provider == 'true':
self.sources = sorted(self.sources, key=lambda k: k['provider'])
for i in self.sources:
if 'checkquality' in i and i['checkquality'] == True:
if not i['source'].lower() in self.hosthqDict and i['quality'] not in ['SD', 'SCR', 'CAM']: i.update({'quality': 'SD'})
local = [i for i in self.sources if 'local' in i and i['local'] == True]
for i in local: i.update({'language': self._getPrimaryLang() or 'en'})
self.sources = [i for i in self.sources if not i in local]
filter = []
filter += [i for i in self.sources if i['direct'] == True]
filter += [i for i in self.sources if i['direct'] == False]
self.sources = filter
filter = []
for d in debrid.debrid_resolvers:
valid_hoster = set([i['source'] for i in self.sources])
valid_hoster = [i for i in valid_hoster if d.valid_url('', i)]
filter += [dict(i.items() + [('debrid', d.name)]) for i in self.sources if i['source'] in valid_hoster]
filter += [i for i in self.sources if not i['source'].lower() in self.hostprDict and i['debridonly'] == False]
self.sources = filter
for i in range(len(self.sources)):
q = self.sources[i]['quality']
if q == 'HD': self.sources[i].update({'quality': '720p'})
filter = []
filter += local
if quality in ['0']: filter += [i for i in self.sources if i['quality'] == '4K' and 'debrid' in i]
if quality in ['0']: filter += [i for i in self.sources if i['quality'] == '4K' and not 'debrid' in i and 'memberonly' in i]
if quality in ['0']: filter += [i for i in self.sources if i['quality'] == '4K' and not 'debrid' in i and not 'memberonly' in i]
if quality in ['0', '1']: filter += [i for i in self.sources if i['quality'] == '1440p' and 'debrid' in i]
if quality in ['0', '1']: filter += [i for i in self.sources if i['quality'] == '1440p' and not 'debrid' in i and 'memberonly' in i]
if quality in ['0', '1']: filter += [i for i in self.sources if i['quality'] == '1440p' and not 'debrid' in i and not 'memberonly' in i]
if quality in ['0', '1', '2']: filter += [i for i in self.sources if i['quality'] == '1080p' and 'debrid' in i]
if quality in ['0', '1', '2']: filter += [i for i in self.sources if i['quality'] == '1080p' and not 'debrid' in i and 'memberonly' in i]
if quality in ['0', '1', '2']: filter += [i for i in self.sources if i['quality'] == '1080p' and not 'debrid' in i and not 'memberonly' in i]
if quality in ['0', '1', '2', '3']: filter += [i for i in self.sources if i['quality'] == '720p' and 'debrid' in i]
if quality in ['0', '1', '2', '3']: filter += [i for i in self.sources if i['quality'] == '720p' and not 'debrid' in i and 'memberonly' in i]
if quality in ['0', '1', '2', '3']: filter += [i for i in self.sources if i['quality'] == '720p' and not 'debrid' in i and not 'memberonly' in i]
filter += [i for i in self.sources if i['quality'] in ['SD', 'SCR', 'CAM']]
self.sources = filter
if not captcha == 'true':
filter = [i for i in self.sources if i['source'].lower() in self.hostcapDict and not 'debrid' in i]
self.sources = [i for i in self.sources if not i in filter]
filter = [i for i in self.sources if i['source'].lower() in self.hostblockDict and not 'debrid' in i]
self.sources = [i for i in self.sources if not i in filter]
multi = [i['language'] for i in self.sources]
multi = [x for y,x in enumerate(multi) if x not in multi[:y]]
multi = True if len(multi) > 1 else False
if multi == True:
self.sources = [i for i in self.sources if not i['language'] == 'en'] + [i for i in self.sources if i['language'] == 'en']
self.sources = self.sources[:2000]
extra_info = control.setting('sources.extrainfo')
prem_identify = control.setting('prem.identify')
if prem_identify == '': prem_identify = 'blue'
prem_identify = self.getPremColor(prem_identify)
for i in range(len(self.sources)):
if extra_info == 'true': t = source_utils.getFileType(self.sources[i]['url'])
else: t = None
u = self.sources[i]['url']
p = self.sources[i]['provider']
q = self.sources[i]['quality']
s = self.sources[i]['source']
s = s.rsplit('.', 1)[0]
l = self.sources[i]['language']
try: f = (' | '.join(['[I]%s [/I]' % info.strip() for info in self.sources[i]['info'].split('|')]))
except: f = ''
try: d = self.sources[i]['debrid']
except: d = self.sources[i]['debrid'] = ''
if d.lower() == 'real-debrid': d = 'RD'
if not d == '': label = '%02d | [B]%s | %s[/B] | ' % (int(i+1), d, p)
else: label = '%02d | [B]%s[/B] | ' % (int(i+1), p)
if multi == True and not l == 'en': label += '[B]%s[/B] | ' % l
if t:
if q in ['4K', '1440p', '1080p', '720p']: label += '%s | [B][I]%s [/I][/B] | [I]%s[/I] | %s' % (s, q, t, f)
elif q == 'SD': label += '%s | %s | [I]%s[/I]' % (s, f, t)
else: label += '%s | %s | [I]%s [/I] | [I]%s[/I]' % (s, f, q, t)
else:
if q in ['4K', '1440p', '1080p', '720p']: label += '%s | [B][I]%s [/I][/B] | %s' % (s, q, f)
elif q == 'SD': label += '%s | %s' % (s, f)
else: label += '%s | %s | [I]%s [/I]' % (s, f, q)
label = label.replace('| 0 |', '|').replace(' | [I]0 [/I]', '')
label = re.sub('\[I\]\s+\[/I\]', ' ', label)
label = re.sub('\|\s+\|', '|', label)
label = re.sub('\|(?:\s+|)$', '', label)
if d:
if not prem_identify == 'nocolor':
self.sources[i]['label'] = ('[COLOR %s]' % (prem_identify)) + label.upper() + '[/COLOR]'
else: self.sources[i]['label'] = label.upper()
else: self.sources[i]['label'] = label.upper()
try:
if not HEVC == 'true': self.sources = [i for i in self.sources if not 'HEVC' in i['label']]
except: pass
self.sources = [i for i in self.sources if 'label' in i]
return self.sources
def sourcesResolve(self, item, info=False):
try:
self.url = None
u = url = item['url']
d = item['debrid'] ; direct = item['direct']
local = item.get('local', False)
provider = item['provider']
call = [i[1] for i in self.sourceDict if i[0] == provider][0]
u = url = call.resolve(url)
if url == None or (not '://' in str(url) and not local): raise Exception()
if not local:
url = url[8:] if url.startswith('stack:') else url
urls = []
for part in url.split(' , '):
u = part
if not d == '':
part = debrid.resolver(part, d)
elif not direct == True:
hmf = urlresolver.HostedMediaFile(url=u, include_disabled=True, include_universal=False)
if hmf.valid_url() == True: part = hmf.resolve()
urls.append(part)
url = 'stack://' + ' , '.join(urls) if len(urls) > 1 else urls[0]
if url == False or url == None: raise Exception()
ext = url.split('?')[0].split('&')[0].split('|')[0].rsplit('.')[-1].replace('/', '').lower()
if ext == 'rar': raise Exception()
try: headers = url.rsplit('|', 1)[1]
except: headers = ''
headers = urllib.quote_plus(headers).replace('%3D', '=') if ' ' in headers else headers
headers = dict(urlparse.parse_qsl(headers))
if url.startswith('http') and '.m3u8' in url:
result = client.request(url.split('|')[0], headers=headers, output='geturl', timeout='20')
if result == None: raise Exception()
elif url.startswith('http'):
result = client.request(url.split('|')[0], headers=headers, output='chunk', timeout='20')
if result == None: raise Exception()
self.url = url
return url
except:
if info == True: self.errorForSources()
return
def sourcesDialog(self, items):
try:
labels = [i['label'] for i in items]
select = control.selectDialog(labels)
if select == -1: return 'close://'
next = [y for x,y in enumerate(items) if x >= select]
prev = [y for x,y in enumerate(items) if x < select][::-1]
items = [items[select]]
items = [i for i in items+next+prev][:40]
header = control.addonInfo('name')
header2 = header.upper()
progressDialog = control.progressDialog if control.setting('progress.dialog') == '0' else control.progressDialogBG
progressDialog.create(header, '')
progressDialog.update(0)
block = None
for i in range(len(items)):
try:
if items[i]['source'] == block: raise Exception()
w = workers.Thread(self.sourcesResolve, items[i])
w.start()
try:
if progressDialog.iscanceled(): break
progressDialog.update(int((100 / float(len(items))) * i), str(items[i]['label']), str(' '))
except:
progressDialog.update(int((100 / float(len(items))) * i), str(header2), str(items[i]['label']))
m = ''
for x in range(3600):
try:
if xbmc.abortRequested == True: return sys.exit()
if progressDialog.iscanceled(): return progressDialog.close()
except:
pass
k = control.condVisibility('Window.IsActive(virtualkeyboard)')
if k: m += '1'; m = m[-1]
if (w.is_alive() == False or x > 30) and not k: break
k = control.condVisibility('Window.IsActive(yesnoDialog)')
if k: m += '1'; m = m[-1]
if (w.is_alive() == False or x > 30) and not k: break
time.sleep(0.5)
for x in range(30):
try:
if xbmc.abortRequested == True: return sys.exit()
if progressDialog.iscanceled(): return progressDialog.close()
except:
pass
if m == '': break
if w.is_alive() == False: break
time.sleep(0.5)
if w.is_alive() == True: block = items[i]['source']
if self.url == None: raise Exception()
self.selectedSource = items[i]['label']
try: progressDialog.close()
except: pass
control.execute('Dialog.Close(virtualkeyboard)')
control.execute('Dialog.Close(yesnoDialog)')
return self.url
except:
pass
try: progressDialog.close()
except: pass
except Exception as e:
try: progressDialog.close()
except: pass
log_utils.log('Error %s' % str(e), log_utils.LOGNOTICE)
def sourcesDirect(self, items):
filter = [i for i in items if i['source'].lower() in self.hostcapDict and i['debrid'] == '']
items = [i for i in items if not i in filter]
filter = [i for i in items if i['source'].lower() in self.hostblockDict and i['debrid'] == '']
items = [i for i in items if not i in filter]
items = [i for i in items if ('autoplay' in i and i['autoplay'] == True) or not 'autoplay' in i]
if control.setting('autoplay.sd') == 'true':
items = [i for i in items if not i['quality'] in ['4K', '1440p', '1080p', 'HD']]
u = None
header = control.addonInfo('name')
header2 = header.upper()
try:
control.sleep(1000)
progressDialog = control.progressDialog if control.setting('progress.dialog') == '0' else control.progressDialogBG
progressDialog.create(header, '')
progressDialog.update(0)
except:
pass
for i in range(len(items)):
try:
if progressDialog.iscanceled(): break
progressDialog.update(int((100 / float(len(items))) * i), str(items[i]['label']), str(' '))
except:
progressDialog.update(int((100 / float(len(items))) * i), str(header2), str(items[i]['label']))
try:
if xbmc.abortRequested == True: return sys.exit()
url = self.sourcesResolve(items[i])
if u == None: u = url
if not url == None: break
except:
pass
try: progressDialog.close()
except: pass
return u
def errorForSources(self):
control.infoDialog(control.lang(32401).encode('utf-8'), sound=False, icon='INFO')
def getLanguage(self):
langDict = {'English': ['en'], 'German': ['de'], 'German+English': ['de','en'], 'French': ['fr'], 'French+English': ['fr', 'en'], 'Portuguese': ['pt'], 'Portuguese+English': ['pt', 'en'], 'Polish': ['pl'], 'Polish+English': ['pl', 'en'], 'Korean': ['ko'], 'Korean+English': ['ko', 'en'], 'Russian': ['ru'], 'Russian+English': ['ru', 'en'], 'Spanish': ['es'], 'Spanish+English': ['es', 'en'], 'Greek': ['gr'], 'Italian': ['it'], 'Italian+English': ['it', 'en'], 'Greek+English': ['gr', 'en']}
name = control.setting('providers.lang')
return langDict.get(name, ['en'])
def getLocalTitle(self, title, imdb, tvdb, content):
lang = self._getPrimaryLang()
if not lang:
return title
if content == 'movie':
t = trakt.getMovieTranslation(imdb, lang)
else:
t = tvmaze.tvMaze().getTVShowTranslation(tvdb, lang)
return t or title
def getAliasTitles(self, imdb, localtitle, content):
lang = self._getPrimaryLang()
try:
t = trakt.getMovieAliases(imdb) if content == 'movie' else trakt.getTVShowAliases(imdb)
t = [i for i in t if i.get('country', '').lower() in [lang, '', 'us'] and i.get('title', '').lower() != localtitle.lower()]
return t
except:
return []
def _getPrimaryLang(self):
langDict = {'English': 'en', 'German': 'de', 'German+English': 'de', 'French': 'fr', 'French+English': 'fr', 'Portuguese': 'pt', 'Portuguese+English': 'pt', 'Polish': 'pl', 'Polish+English': 'pl', 'Korean': 'ko', 'Korean+English': 'ko', 'Russian': 'ru', 'Russian+English': 'ru', 'Spanish': 'es', 'Spanish+English': 'es', 'Italian': 'it', 'Italian+English': 'it', 'Greek': 'gr', 'Greek+English': 'gr'}
name = control.setting('providers.lang')
lang = langDict.get(name)
return lang
def getTitle(self, title):
title = cleantitle.normalize(title)
return title
def getConstants(self):
self.itemProperty = 'plugin.video.covenant.container.items'
self.metaProperty = 'plugin.video.covenant.container.meta'
from resources.lib.sources import sources
self.sourceDict = sources()
try:
self.hostDict = urlresolver.relevant_resolvers(order_matters=True)
self.hostDict = [i.domains for i in self.hostDict if not '*' in i.domains]
self.hostDict = [i.lower() for i in reduce(lambda x, y: x+y, self.hostDict)]
self.hostDict = [x for y,x in enumerate(self.hostDict) if x not in self.hostDict[:y]]
except:
self.hostDict = []
self.hostprDict = ['1fichier.com', 'oboom.com', 'rapidgator.net', 'rg.to', 'uploaded.net', 'uploaded.to', 'ul.to', 'filefactory.com', 'nitroflare.com', 'turbobit.net', 'uploadrocket.net']
self.hostcapDict = ['hugefiles.net', 'kingfiles.net', 'openload.io', 'openload.co', 'oload.tv', 'thevideo.me', 'vidup.me', 'streamin.to', 'torba.se']
self.hosthqDict = ['gvideo', 'google.com', 'openload.io', 'openload.co', 'oload.tv', 'thevideo.me', 'rapidvideo.com', 'raptu.com', 'filez.tv', 'uptobox.com', 'uptobox.com', 'uptostream.com', 'xvidstage.com', 'streamango.com']
self.hostblockDict = []
def getPremColor(self, n):
if n == '0': n = 'blue'
elif n == '1': n = 'red'
elif n == '2': n = 'yellow'
elif n == '3': n = 'deeppink'
elif n == '4': n = 'cyan'
elif n == '5': n = 'lawngreen'
elif n == '6': n = 'gold'
elif n == '7': n = 'magenta'
elif n == '8': n = 'yellowgreen'
elif n == '9': n = 'nocolor'
else: n == 'blue'
return n
|
apache-2.0
| 7,397,716,890,154,723,000 | 46.370588 | 499 | 0.509021 | false |
vgrem/Office365-REST-Python-Client
|
office365/runtime/queries/client_query.py
|
1
|
1772
|
class ClientQuery(object):
"""Client query"""
def __init__(self, context, binding_type=None, parameter_type=None, parameter_name=None, return_type=None):
"""
Base query
:type context: office365.runtime.client_runtime_context.ClientRuntimeContext
:type binding_type: office365.runtime.client_object.ClientObject or None
:type parameter_type: office365.runtime.client_object.ClientObject or ClientValue or dict or bytes or None
:type parameter_name: str or None
:type return_type: office365.runtime.client_object.ClientObject or office365.runtime.client_result.ClientResult
or office365.runtime.client_value.ClientValue or None
"""
self._context = context
self._binding_type = binding_type
self._parameter_type = parameter_type
self._parameter_name = parameter_name
self._return_type = return_type
def build_url(self):
return self._binding_type.resource_url
def build_request(self):
return self.context.build_single_request(self)
def execute_query(self):
self.context.execute_query()
return self.return_type
@property
def context(self):
return self._context
@property
def id(self):
return id(self)
@property
def binding_type(self):
return self._binding_type
@property
def parameter_name(self):
return self._parameter_name
@property
def parameter_type(self):
if isinstance(self._parameter_type, dict):
return {k: v for k, v in self._parameter_type.items() if v is not None}
else:
return self._parameter_type
@property
def return_type(self):
return self._return_type
|
mit
| -7,357,655,738,722,110,000 | 30.642857 | 120 | 0.649549 | false |
mago1chi/cTPR
|
parse_proc.py
|
1
|
3278
|
import os
import re
import cTPR
class Parser():
def __init__(self, fileName="tweet.txt"):
self.fileName = fileName
self.parsed_list = []
self.count_dic = {}
self.raw_list = []
def parse(self, tweet):
self.parsed_list = []
self.count_dic = {}
self.raw_list = []
filtered_tweet = self.filter_tweet(tweet)
f = open(self.fileName, 'w')
f.write(filtered_tweet)
f.close()
cmd = 'mecab ' + self.fileName
proc = os.popen(cmd)
result = proc.read()
proc.close()
result = re.sub(r'\n', '\t', result)
taggered_list = result.split('\t')
pos = 1
while pos <= len(taggered_list)-1:
term = ""
while cTPR.cTPR.detect_noise(taggered_list[pos]):
term += taggered_list[pos-1]
if pos < len(taggered_list)-1:
pos += 2
else:
break
if term != "":
if len(term) > 1:
self.parsed_list.append(term)
if not term in self.count_dic.keys():
self.count_dic[term] = 1
else:
self.count_dic[term] += 1
else:
self.parsed_list.append(-1)
self.raw_list.append(term)
if pos is len(taggered_list)-1:
break
else:
self.parsed_list.append(-1)
self.raw_list.append(taggered_list[pos-1])
pos += 2
#for i in range(len(taggered_list))[1::2]:
# surface = taggered_list[i-1]
# feature = taggered_list[i]
#
# self.raw_list.append(surface)
#
# if cTPR.cTPR.detect_noise(surface, feature):
# self.parsed_list.append(surface)
#
# if not surface in self.count_dic.keys():
# self.count_dic[surface] = 1
# else:
# self.count_dic[surface] += 1
# else:
# self.parsed_list.append(-1)
@staticmethod
def filter_tweet(tweet):
# RT、ハッシュタグ記号の除去
tweet = re.sub(r'RT|rt|#', ' ', tweet)
# 半角スペースを全角スペースに変更
#(半角スペースで区切られた名詞が結合してしまうのを防ぐため)
tweet = re.sub(r' ', ' ', tweet)
# URL除去
tweet = re.sub(r"http[s]*://[a-zA-Z0-9./-_!*'();%s:@&=+$,%]+", ' ', tweet)
# 記号除去
tweet = re.sub(r'!|\?|!|?', ' ', tweet)
# 顔文字除去
match_tweet = '[0-9A-Za-zぁ-ヶ一-龠]'
non_tweet = '[^0-9A-Za-zぁ-ヶ一-龠]'
allow_tweet = '[ovっつ゜ニノ三二]'
hw_kana = '[ヲ-゚]'
open_branket = '[\(∩(]'
close_branket = '[\)∩)]'
arround_face = '(%s:' + non_tweet + '|' + allow_tweet + ')*'
face = '(%s!(%s:' + match_tweet + '|' + hw_kana + '){3,}).{3,}'
face_char = arround_face + open_branket + face + close_branket + arround_face
tweet = re.sub(r"%s" % face_char, ' ', tweet)
# カッコ記号を除去
tweet = re.sub(r"[()\[\]]", ' ', tweet)
# 笑い記号"w"の除去
tweet = re.sub(r"[wWwW]{2,}", ' ', tweet)
# 意味のわからない数字の羅列を除去(6桁-8桁のもの)
tweet = re.sub(r"[0-9]{6,7}", ' ', tweet)
return tweet
|
gpl-2.0
| 7,673,179,145,309,621,000 | 24.033333 | 85 | 0.494008 | false |
fedora-infra/fmn
|
fmn/fasjson_client.py
|
1
|
2072
|
import logging
import requests
import requests.exceptions
from gssapi import Credentials, exceptions
from requests.compat import urlencode, urljoin
from requests_gssapi import HTTPSPNEGOAuth
log = logging.getLogger(__name__)
class Client(object):
"""
A fasjson client to make very specific requests to fasjson.
Necessary because the official fasjson-client library does not support
python2.
"""
def __init__(self, url, principal=None):
self.url = url
self.principal = principal
creds = None
try:
creds = Credentials(usage="initiate")
except exceptions.GSSError as e:
log.error("GSError. Unable to create credentials store.", e)
gssapi_auth = HTTPSPNEGOAuth(opportunistic_auth=True, creds=creds)
self.session = requests.Session()
self.session.auth = gssapi_auth
def search(self, email):
"""
A very limited search built to only serve fmn's requirement of
finding a user based on an email.
"""
# email must be an exact match in fasjson, so we will either have
# 1 result or empty result
search_string = "search/users" + "?" + urlencode({"email": email})
endpoint = urljoin(self.url, search_string)
return self.session.get(endpoint).json()
def get_user(self, username):
"""
Get a specific user based on their username
"""
url_string = "users/" + username + "/"
endpoint = urljoin(self.url, url_string)
return self.session.get(endpoint).json()
def list_all_entities(self, ent_name):
"""
Return all entities of a certain type. In fmn's case it is users.
"""
endpoint = urljoin(self.url, ent_name + "/")
next_page_url = endpoint + "?" + urlencode({"page_number": 1})
while next_page_url:
res = self.session.get(next_page_url).json()
for item in res["result"]:
yield item
next_page_url = res.get("page", {}).get("next_page")
|
lgpl-2.1
| 297,541,774,851,337,700 | 31.888889 | 74 | 0.6139 | false |
dorianamouroux/snak
|
snak/commands/init.py
|
1
|
1780
|
import os
from collections import namedtuple
import click
from ..config import GlobalConfig
_, current_folder = os.path.split(os.getcwd())
UserInput = namedtuple('UserInput', ['name', 'version', 'author', 'description'])
class Init(object):
def __init__(self, filename=None):
self.config = GlobalConfig(filename)
self.default = UserInput(current_folder, '1.0.0', '', '')
if self.config.exists():
self.import_default_from_existing_config()
def import_default_from_existing_config(self):
click.echo('{} found ! Using it for default values.'\
.format(self.config.get_filename()))
self.config.load()
self.default = UserInput(self.config.get('name'),
self.config.get('version'),
self.config.get('author'),
self.config.get('description'))
def run(self):
user_input = self.prompt_information()
conf = self.build_conf(user_input)
click.echo(conf)
click.confirm('Is that correct ?', default=True, abort=True)
conf.write()
click.echo(conf.get_filename() + ' written !')
def prompt_information(self):
return UserInput(
click.prompt('Name', default=self.default.name),
click.prompt('Version', default=self.default.version),
click.prompt('Author', default=self.default.author),
click.prompt('Description', default=self.default.description))
def build_conf(self, user_input):
return self.config\
.set('name', user_input.name)\
.set('version', user_input.version)\
.set('author', user_input.author)\
.set('description', user_input.description)
|
mit
| 2,407,176,973,942,881,000 | 34.6 | 81 | 0.594944 | false |
blorgon9000/pyopus
|
demo/optimizer/de.py
|
1
|
1232
|
# Optimize SchwefelA function with differential evolution
# Collect cost function and plot progress
from pyopus.optimizer.de import DifferentialEvolution
from pyopus.problems import glbc
from pyopus.optimizer.base import Reporter, CostCollector, RandomDelay
import pyopus.wxmplplot as pyopl
from numpy import array, zeros, arange
from numpy.random import seed
from pyopus.parallel.cooperative import cOS
from pyopus.parallel.mpi import MPI
if __name__=='__main__':
cOS.setVM(MPI())
seed(0)
ndim=30
popSize=50
prob=glbc.SchwefelA(n=ndim)
slowProb=RandomDelay(prob, [0.001, 0.010])
opt=DifferentialEvolution(
slowProb, prob.xl, prob.xh, debug=0,
maxGen=1500, populationSize=popSize, w=0.5, pc=0.3
)
cc=CostCollector()
opt.installPlugin(cc)
opt.installPlugin(Reporter(onIterStep=1000))
opt.reset()
opt.run()
cc.finalize()
pyopl.init()
pyopl.close()
f1=pyopl.figure()
pyopl.lock(True)
if pyopl.alive(f1):
ax=f1.add_subplot(1,1,1)
ax.semilogy(arange(len(cc.fval))/popSize, cc.fval)
ax.set_xlabel('generations')
ax.set_ylabel('f')
ax.set_title('Progress of differential evolution')
ax.grid()
pyopl.lock(False)
print("x=%s f=%e" % (str(opt.x), opt.f))
pyopl.join()
cOS.finalize()
|
gpl-3.0
| -972,430,029,362,716,300 | 22.245283 | 70 | 0.729708 | false |
cwilhelm/django-teams
|
teams/admin.py
|
1
|
5240
|
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from models import *
class PersonAttributeInline(admin.TabularInline):
model = PersonAttribute
list_display = ('email', 'birthdate', 'height', 'weight')
class PlayerInline(admin.TabularInline):
model = Player
extra = 1
list_display = ('squad', 'person', 'number')
raw_id_fields = ('person',)
filter_horizontal = ('positions', )
class StaffInline(admin.TabularInline):
model = Staff
extra = 1
list_display = ('squad', 'person', 'function')
raw_id_fields = ('person',)
class ContactInline(admin.TabularInline):
model = Contact
extra = 1
list_display = ('person', 'value', 'sortorder')
raw_id_fields = ('person',)
class ResultInline(admin.TabularInline):
model = RemoteResult
extra = 1
list_display = ('name', )
class TeamAdmin(admin.ModelAdmin):
class Media:
js = ('/static/WYMEditor/jquery/jquery.js',
'/static/WYMEditor/wymeditor/jquery.wymeditor.pack.js',
'/static/WYMEditor/wymeditor/admin_textarea.js')
css = {
"all": ("/static/WYMEditor/wymeditor/skins/default/skin.css",)
}
fieldsets = (
(None, {
'fields': (
('name', 'slug'),
('sortorder'),
'lastsquad'
)
}),
)
prepopulated_fields = {'slug': ('name',)}
list_display = ('slug', 'name', 'sortorder')
admin.site.register(Team, TeamAdmin)
class SquadAdmin(admin.ModelAdmin):
class Media:
js = ('/static/WYMEditor/jquery/jquery.js',
'/static/WYMEditor/wymeditor/jquery.wymeditor.pack.js',
'/static/WYMEditor/wymeditor/admin_textarea.js')
css = {
"all": ("/static/WYMEditor/wymeditor/skins/default/skin.css",)
}
fieldsets = (
(None, {
'fields': (
('name', 'slug', 'team', 'season'),
('sortorder'),
('predecessor', 'successor'),
)
}),
)
inlines = (PlayerInline, StaffInline, ContactInline, ResultInline)
#filter_horizontal = ('images', 'calendars')
prepopulated_fields = {'slug': ('season', 'team', 'name')}
list_display = ('slug', 'name', 'team', 'season', 'sortorder')
admin.site.register(Squad, SquadAdmin)
class TransferUpdateAdmin(admin.ModelAdmin):
def has_change_permission(self, request, obj=None):
return False # To remove the 'Save and continue editing' button
admin.site.register(TransferUpdate, TransferUpdateAdmin)
class SquadCopyAdmin(admin.ModelAdmin):
def has_change_permission(self, request, obj=None):
return False # To remove the 'Save and continue editing' button
admin.site.register(SquadPlayerCopy, SquadCopyAdmin)
class PersonalSponsorAdmin(admin.ModelAdmin):
list_display = ('image', 'url', 'person')
admin.site.register(PersonalSponsor, PersonalSponsorAdmin)
class PersonAdmin(admin.ModelAdmin):
class Media:
js = ('/static/WYMEditor/jquery/jquery.js',
'/static/WYMEditor/wymeditor/jquery.wymeditor.pack.js',
'/static/WYMEditor/wymeditor/admin_textarea.js')
css = {
"all": ("/static/WYMEditor/wymeditor/skins/default/skin.css",)
}
prepopulated_fields = {'slug': ('first_name', 'last_name')}
fieldsets = (
(None, {
'fields': (
('first_name', 'last_name', 'slug'),
'sortorder',
)
}),
)
inlines = (PersonAttributeInline, PlayerInline, StaffInline)
search_fields = ('first_name', 'last_name')
list_display = ('slug', 'first_name', 'last_name', 'sortorder')
admin.site.register(Person, PersonAdmin)
class RemoteResultAdmin(admin.ModelAdmin):
list_display = ('name', )
prepopulated_fields = {'slug': ('name',)}
admin.site.register(RemoteResult, RemoteResultAdmin)
class DateAdmin(admin.ModelAdmin):
list_display = ('datum', 'name')
admin.site.register(Date, DateAdmin)
class TransferAdmin(admin.ModelAdmin):
raw_id_fields = ('person', )
list_display = ('person', 'old', 'oldextern', 'new', 'newextern')
admin.site.register(Transfer, TransferAdmin)
class ExternalTeamAdmin(admin.ModelAdmin):
list_display = ('name', 'slug', 'url')
prepopulated_fields = {'slug': ('name',)}
admin.site.register(ExternalTeam, ExternalTeamAdmin)
class PositionAdmin(admin.ModelAdmin):
list_display = ('name', 'slug')
prepopulated_fields = {'slug': ('name',)}
admin.site.register(Position, PositionAdmin)
class SeasonAdmin(admin.ModelAdmin):
list_display = ('name', 'slug')
prepopulated_fields = {'slug': ('name',)}
admin.site.register(Season, SeasonAdmin)
class TeamImageAdmin(admin.ModelAdmin):
list_display = ('team', 'image', 'sort')
admin.site.register(TeamImage, TeamImageAdmin)
class SquadImageAdmin(admin.ModelAdmin):
list_display = ('squad', 'image', 'sort')
admin.site.register(SquadImage, SquadImageAdmin)
class PersonImageAdmin(admin.ModelAdmin):
list_display = ('person', 'image', 'sort')
admin.site.register(PersonImage, PersonImageAdmin)
|
bsd-3-clause
| 5,131,194,923,734,271,000 | 28.772727 | 74 | 0.627481 | false |
elimence/edx-platform
|
common/lib/capa/capa/util.py
|
1
|
3147
|
from calc import evaluator, UndefinedVariable
from cmath import isinf
#-----------------------------------------------------------------------------
#
# Utility functions used in CAPA responsetypes
def compare_with_tolerance(v1, v2, tol):
''' Compare v1 to v2 with maximum tolerance tol
tol is relative if it ends in %; otherwise, it is absolute
- v1 : student result (number)
- v2 : instructor result (number)
- tol : tolerance (string representing a number)
'''
relative = tol.endswith('%')
if relative:
tolerance_rel = evaluator(dict(), dict(), tol[:-1]) * 0.01
tolerance = tolerance_rel * max(abs(v1), abs(v2))
else:
tolerance = evaluator(dict(), dict(), tol)
if isinf(v1) or isinf(v2):
# If an input is infinite, we can end up with `abs(v1-v2)` and
# `tolerance` both equal to infinity. Then, below we would have
# `inf <= inf` which is a fail. Instead, compare directly.
return v1 == v2
else:
return abs(v1 - v2) <= tolerance
def contextualize_text(text, context): # private
''' Takes a string with variables. E.g. $a+$b.
Does a substitution of those variables from the context '''
if not text:
return text
for key in sorted(context, lambda x, y: cmp(len(y), len(x))):
# TODO (vshnayder): This whole replacement thing is a big hack
# right now--context contains not just the vars defined in the
# program, but also e.g. a reference to the numpy module.
# Should be a separate dict of variables that should be
# replaced.
if '$' + key in text:
try:
s = str(context[key])
except UnicodeEncodeError:
s = context[key].encode('utf8', errors='ignore')
text = text.replace('$' + key, s)
return text
def convert_files_to_filenames(answers):
'''
Check for File objects in the dict of submitted answers,
convert File objects to their filename (string)
'''
new_answers = dict()
for answer_id in answers.keys():
answer = answers[answer_id]
# Files are stored as a list, even if one file
if is_list_of_files(answer):
new_answers[answer_id] = [f.name for f in answer]
else:
new_answers[answer_id] = answers[answer_id]
return new_answers
def is_list_of_files(files):
return isinstance(files, list) and all(is_file(f) for f in files)
def is_file(file_to_test):
'''
Duck typing to check if 'file_to_test' is a File object
'''
return all(hasattr(file_to_test, method) for method in ['read', 'name'])
def find_with_default(node, path, default):
"""
Look for a child of node using , and return its text if found.
Otherwise returns default.
Arguments:
node: lxml node
path: xpath search expression
default: value to return if nothing found
Returns:
node.find(path).text if the find succeeds, default otherwise.
"""
v = node.find(path)
if v is not None:
return v.text
else:
return default
|
agpl-3.0
| 1,461,137,611,740,416,500 | 30.787879 | 78 | 0.597394 | false |
cysuncn/python
|
spark/crm/PROC_O_CEN_CBOD_REACCACC.py
|
1
|
8459
|
#coding=UTF-8
from pyspark import SparkContext, SparkConf, SQLContext, Row, HiveContext
from pyspark.sql.types import *
from datetime import date, datetime, timedelta
import sys, re, os
st = datetime.now()
conf = SparkConf().setAppName('PROC_O_CEN_CBOD_REACCACC').setMaster(sys.argv[2])
sc = SparkContext(conf = conf)
sc.setLogLevel('WARN')
if len(sys.argv) > 5:
if sys.argv[5] == "hive":
sqlContext = HiveContext(sc)
else:
sqlContext = SQLContext(sc)
hdfs = sys.argv[3]
dbname = sys.argv[4]
#处理需要使用的日期
etl_date = sys.argv[1]
#etl日期
V_DT = etl_date
#上一日日期
V_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime("%Y%m%d")
#月初日期
V_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime("%Y%m%d")
#上月末日期
V_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime("%Y%m%d")
#10位日期
V_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).strftime("%Y-%m-%d")
V_STEP = 0
O_RE_CBOD_REACCACC = sqlContext.read.parquet(hdfs+'/O_RE_CBOD_REACCACC/*')
O_RE_CBOD_REACCACC.registerTempTable("O_RE_CBOD_REACCACC")
#任务[12] 001-01::
V_STEP = V_STEP + 1
F_RE_CBOD_REACCACC = sqlContext.read.parquet(hdfs+'/F_RE_CBOD_REACCACC_BK/'+V_DT_LD+'.parquet/*')
F_RE_CBOD_REACCACC.registerTempTable("F_RE_CBOD_REACCACC")
sql = """
SELECT ETLDT AS ETLDT
,RE_DOC_TYP AS RE_DOC_TYP
,RE_DD_NO AS RE_DD_NO
,RE_ACCP_APPLN_ACCT_NO AS RE_ACCP_APPLN_ACCT_NO
,RE_DP_AMT AS RE_DP_AMT
,RE_RMT_AMT AS RE_RMT_AMT
,RE_DD_STS AS RE_DD_STS
,RE_DD_DUEDT AS RE_DD_DUEDT
,RE_CUST_NO AS RE_CUST_NO
,RE_BA_STS AS RE_BA_STS
,RE_OPUN_COD AS RE_OPUN_COD
,RE_TX_SEQ_NO AS RE_TX_SEQ_NO
,RE_DL_STS AS RE_DL_STS
,RE_DL_DT AS RE_DL_DT
,RE_DL_SVC AS RE_DL_SVC
,RE_LST_TX_DT AS RE_LST_TX_DT
,RE_ACCP_LN_AMT AS RE_ACCP_LN_AMT
,RE_CRLMT_NO AS RE_CRLMT_NO
,RE_DL_DUE_DT AS RE_DL_DUE_DT
,RE_ACCP_APPLN_NAME AS RE_ACCP_APPLN_NAME
,RE_PAYEE_AWBK_NAME AS RE_PAYEE_AWBK_NAME
,RE_DP_ACCT_NO AS RE_DP_ACCT_NO
,RE_PAYEE_AWBK_NO_FL AS RE_PAYEE_AWBK_NO_FL
,RE_PAYEE_AWBK_NAME_FL AS RE_PAYEE_AWBK_NAME_FL
,RE_HOLDER_NAME AS RE_HOLDER_NAME
,RE_HOLDER_ACCT_NO AS RE_HOLDER_ACCT_NO
,RE_SVC AS RE_SVC
,RE_APPL_BRH_STD AS RE_APPL_BRH_STD
,RE_SIG_DT AS RE_SIG_DT
,RE_GUAR_RATE AS RE_GUAR_RATE
,RE_BILL_BANK_NO AS RE_BILL_BANK_NO
,RE_BILL_TELLER_NO AS RE_BILL_TELLER_NO
,RE_PAY_TELLER_NO AS RE_PAY_TELLER_NO
,FR_ID AS FR_ID
,V_DT AS ODS_ST_DATE
,'CEN' AS ODS_SYS_ID
FROM O_RE_CBOD_REACCACC A --银行承兑汇票主档
"""
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
F_RE_CBOD_REACCACC_INNTMP1 = sqlContext.sql(sql)
F_RE_CBOD_REACCACC_INNTMP1.registerTempTable("F_RE_CBOD_REACCACC_INNTMP1")
#F_RE_CBOD_REACCACC = sqlContext.read.parquet(hdfs+'/F_RE_CBOD_REACCACC_BK/'+V_DT_LD+'.parquet/*')
#F_RE_CBOD_REACCACC.registerTempTable("F_RE_CBOD_REACCACC")
sql = """
SELECT DST.ETLDT --平台日期:src.ETLDT
,DST.RE_DOC_TYP --凭证种类(DOC):src.RE_DOC_TYP
,DST.RE_DD_NO --汇票号码:src.RE_DD_NO
,DST.RE_ACCP_APPLN_ACCT_NO --出票人帐号:src.RE_ACCP_APPLN_ACCT_NO
,DST.RE_DP_AMT --保证金金额(dp):src.RE_DP_AMT
,DST.RE_RMT_AMT --汇款金额:src.RE_RMT_AMT
,DST.RE_DD_STS --票据状态(DD):src.RE_DD_STS
,DST.RE_DD_DUEDT --汇票到期日:src.RE_DD_DUEDT
,DST.RE_CUST_NO --客户编号:src.RE_CUST_NO
,DST.RE_BA_STS --承兑汇票状态:src.RE_BA_STS
,DST.RE_OPUN_COD --营业单位代码:src.RE_OPUN_COD
,DST.RE_TX_SEQ_NO --交易序号:src.RE_TX_SEQ_NO
,DST.RE_DL_STS --挂失状态:src.RE_DL_STS
,DST.RE_DL_DT --挂失日期:src.RE_DL_DT
,DST.RE_DL_SVC --挂失手续费:src.RE_DL_SVC
,DST.RE_LST_TX_DT --最后交易日期:src.RE_LST_TX_DT
,DST.RE_ACCP_LN_AMT --承兑垫款:src.RE_ACCP_LN_AMT
,DST.RE_CRLMT_NO --贷款额度编号:src.RE_CRLMT_NO
,DST.RE_DL_DUE_DT --挂失止付止期:src.RE_DL_DUE_DT
,DST.RE_ACCP_APPLN_NAME --出票人名称:src.RE_ACCP_APPLN_NAME
,DST.RE_PAYEE_AWBK_NAME --收款人开户行名称:src.RE_PAYEE_AWBK_NAME
,DST.RE_DP_ACCT_NO --保证金帐号(dp):src.RE_DP_ACCT_NO
,DST.RE_PAYEE_AWBK_NO_FL --收款行行号:src.RE_PAYEE_AWBK_NO_FL
,DST.RE_PAYEE_AWBK_NAME_FL --收款人开户银行名称FL:src.RE_PAYEE_AWBK_NAME_FL
,DST.RE_HOLDER_NAME --持票人名称(60位):src.RE_HOLDER_NAME
,DST.RE_HOLDER_ACCT_NO --持票人帐号:src.RE_HOLDER_ACCT_NO
,DST.RE_SVC --手续费:src.RE_SVC
,DST.RE_APPL_BRH_STD --申请行机构号:src.RE_APPL_BRH_STD
,DST.RE_SIG_DT --出票日期(合同建立):src.RE_SIG_DT
,DST.RE_GUAR_RATE --保证金利率:src.RE_GUAR_RATE
,DST.RE_BILL_BANK_NO --承兑行行号:src.RE_BILL_BANK_NO
,DST.RE_BILL_TELLER_NO --签发柜员号:src.RE_BILL_TELLER_NO
,DST.RE_PAY_TELLER_NO --解付柜员号:src.RE_PAY_TELLER_NO
,DST.FR_ID --法人代码:src.FR_ID
,DST.ODS_ST_DATE --系统平台日期:src.ODS_ST_DATE
,DST.ODS_SYS_ID --系统代码:src.ODS_SYS_ID
FROM F_RE_CBOD_REACCACC DST
LEFT JOIN F_RE_CBOD_REACCACC_INNTMP1 SRC
ON SRC.RE_DD_NO = DST.RE_DD_NO
WHERE SRC.RE_DD_NO IS NULL """
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
F_RE_CBOD_REACCACC_INNTMP2 = sqlContext.sql(sql)
dfn="F_RE_CBOD_REACCACC/"+V_DT+".parquet"
UNION=F_RE_CBOD_REACCACC_INNTMP2.unionAll(F_RE_CBOD_REACCACC_INNTMP1)
F_RE_CBOD_REACCACC_INNTMP1.cache()
F_RE_CBOD_REACCACC_INNTMP2.cache()
nrowsi = F_RE_CBOD_REACCACC_INNTMP1.count()
nrowsa = F_RE_CBOD_REACCACC_INNTMP2.count()
UNION.write.save(path = hdfs + '/' + dfn, mode='overwrite')
F_RE_CBOD_REACCACC_INNTMP1.unpersist()
F_RE_CBOD_REACCACC_INNTMP2.unpersist()
et = datetime.now()
print("Step %d start[%s] end[%s] use %d seconds, insert F_RE_CBOD_REACCACC lines %d, all lines %d") % (V_STEP, st.strftime("%H:%M:%S"), et.strftime("%H:%M:%S"), (et-st).seconds, nrowsi, nrowsa)
#ret = os.system("hdfs dfs -mv /"+dbname+"/F_RE_CBOD_REACCACC/"+V_DT_LD+".parquet /"+dbname+"/F_RE_CBOD_REACCACC_BK/")
#备份
ret = os.system("hdfs dfs -rm -r /"+dbname+"/F_RE_CBOD_REACCACC/"+V_DT_LD+".parquet ")
ret = os.system("hdfs dfs -rm -r /"+dbname+"/F_RE_CBOD_REACCACC_BK/"+V_DT+".parquet ")
ret = os.system("hdfs dfs -cp /"+dbname+"/F_RE_CBOD_REACCACC/"+V_DT+".parquet /"+dbname+"/F_RE_CBOD_REACCACC_BK/")
|
gpl-3.0
| 3,693,343,431,423,341,600 | 53.503401 | 193 | 0.486831 | false |
stevelle/glance
|
glance/common/exception.py
|
1
|
17727
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Glance exception subclasses"""
import six
import six.moves.urllib.parse as urlparse
from glance.i18n import _
_FATAL_EXCEPTION_FORMAT_ERRORS = False
class RedirectException(Exception):
def __init__(self, url):
self.url = urlparse.urlparse(url)
class GlanceException(Exception):
"""
Base Glance Exception
To correctly use this class, inherit from it and define
a 'message' property. That message will get printf'd
with the keyword arguments provided to the constructor.
"""
message = _("An unknown exception occurred")
def __init__(self, message=None, *args, **kwargs):
if not message:
message = self.message
try:
if kwargs:
message = message % kwargs
except Exception:
if _FATAL_EXCEPTION_FORMAT_ERRORS:
raise
else:
# at least get the core message out if something happened
pass
self.msg = message
super(GlanceException, self).__init__(message)
def __unicode__(self):
# NOTE(flwang): By default, self.msg is an instance of Message, which
# can't be converted by str(). Based on the definition of
# __unicode__, it should return unicode always.
return six.text_type(self.msg)
class MissingCredentialError(GlanceException):
message = _("Missing required credential: %(required)s")
class BadAuthStrategy(GlanceException):
message = _("Incorrect auth strategy, expected \"%(expected)s\" but "
"received \"%(received)s\"")
class NotFound(GlanceException):
message = _("An object with the specified identifier was not found.")
class BadStoreUri(GlanceException):
message = _("The Store URI was malformed.")
class Duplicate(GlanceException):
message = _("An object with the same identifier already exists.")
class Conflict(GlanceException):
message = _("An object with the same identifier is currently being "
"operated on.")
class StorageQuotaFull(GlanceException):
message = _("The size of the data %(image_size)s will exceed the limit. "
"%(remaining)s bytes remaining.")
class AuthBadRequest(GlanceException):
message = _("Connect error/bad request to Auth service at URL %(url)s.")
class AuthUrlNotFound(GlanceException):
message = _("Auth service at URL %(url)s not found.")
class AuthorizationFailure(GlanceException):
message = _("Authorization failed.")
class NotAuthenticated(GlanceException):
message = _("You are not authenticated.")
class UploadException(GlanceException):
message = _('Image upload problem: %s')
class Forbidden(GlanceException):
message = _("You are not authorized to complete %(action)s action.")
class ForbiddenPublicImage(Forbidden):
message = _("You are not authorized to complete this action.")
class ProtectedImageDelete(Forbidden):
message = _("Image %(image_id)s is protected and cannot be deleted.")
class ProtectedMetadefNamespaceDelete(Forbidden):
message = _("Metadata definition namespace %(namespace)s is protected"
" and cannot be deleted.")
class ProtectedMetadefNamespacePropDelete(Forbidden):
message = _("Metadata definition property %(property_name)s is protected"
" and cannot be deleted.")
class ProtectedMetadefObjectDelete(Forbidden):
message = _("Metadata definition object %(object_name)s is protected"
" and cannot be deleted.")
class ProtectedMetadefResourceTypeAssociationDelete(Forbidden):
message = _("Metadata definition resource-type-association"
" %(resource_type)s is protected and cannot be deleted.")
class ProtectedMetadefResourceTypeSystemDelete(Forbidden):
message = _("Metadata definition resource-type %(resource_type_name)s is"
" a seeded-system type and cannot be deleted.")
class ProtectedMetadefTagDelete(Forbidden):
message = _("Metadata definition tag %(tag_name)s is protected"
" and cannot be deleted.")
class Invalid(GlanceException):
message = _("Data supplied was not valid.")
class InvalidSortKey(Invalid):
message = _("Sort key supplied was not valid.")
class InvalidSortDir(Invalid):
message = _("Sort direction supplied was not valid.")
class InvalidPropertyProtectionConfiguration(Invalid):
message = _("Invalid configuration in property protection file.")
class InvalidSwiftStoreConfiguration(Invalid):
message = _("Invalid configuration in glance-swift conf file.")
class InvalidFilterOperatorValue(Invalid):
message = _("Unable to filter using the specified operator.")
class InvalidFilterRangeValue(Invalid):
message = _("Unable to filter using the specified range.")
class InvalidOptionValue(Invalid):
message = _("Invalid value for option %(option)s: %(value)s")
class ReadonlyProperty(Forbidden):
message = _("Attribute '%(property)s' is read-only.")
class ReservedProperty(Forbidden):
message = _("Attribute '%(property)s' is reserved.")
class AuthorizationRedirect(GlanceException):
message = _("Redirecting to %(uri)s for authorization.")
class ClientConnectionError(GlanceException):
message = _("There was an error connecting to a server")
class ClientConfigurationError(GlanceException):
message = _("There was an error configuring the client.")
class MultipleChoices(GlanceException):
message = _("The request returned a 302 Multiple Choices. This generally "
"means that you have not included a version indicator in a "
"request URI.\n\nThe body of response returned:\n%(body)s")
class LimitExceeded(GlanceException):
message = _("The request returned a 413 Request Entity Too Large. This "
"generally means that rate limiting or a quota threshold was "
"breached.\n\nThe response body:\n%(body)s")
def __init__(self, *args, **kwargs):
self.retry_after = (int(kwargs['retry']) if kwargs.get('retry')
else None)
super(LimitExceeded, self).__init__(*args, **kwargs)
class ServiceUnavailable(GlanceException):
message = _("The request returned 503 Service Unavailable. This "
"generally occurs on service overload or other transient "
"outage.")
def __init__(self, *args, **kwargs):
self.retry_after = (int(kwargs['retry']) if kwargs.get('retry')
else None)
super(ServiceUnavailable, self).__init__(*args, **kwargs)
class ServerError(GlanceException):
message = _("The request returned 500 Internal Server Error.")
class UnexpectedStatus(GlanceException):
message = _("The request returned an unexpected status: %(status)s."
"\n\nThe response body:\n%(body)s")
class InvalidContentType(GlanceException):
message = _("Invalid content type %(content_type)s")
class BadRegistryConnectionConfiguration(GlanceException):
message = _("Registry was not configured correctly on API server. "
"Reason: %(reason)s")
class BadDriverConfiguration(GlanceException):
message = _("Driver %(driver_name)s could not be configured correctly. "
"Reason: %(reason)s")
class MaxRedirectsExceeded(GlanceException):
message = _("Maximum redirects (%(redirects)s) was exceeded.")
class InvalidRedirect(GlanceException):
message = _("Received invalid HTTP redirect.")
class NoServiceEndpoint(GlanceException):
message = _("Response from Keystone does not contain a Glance endpoint.")
class RegionAmbiguity(GlanceException):
message = _("Multiple 'image' service matches for region %(region)s. This "
"generally means that a region is required and you have not "
"supplied one.")
class WorkerCreationFailure(GlanceException):
message = _("Server worker creation failed: %(reason)s.")
class SchemaLoadError(GlanceException):
message = _("Unable to load schema: %(reason)s")
class InvalidObject(GlanceException):
message = _("Provided object does not match schema "
"'%(schema)s': %(reason)s")
class ImageSizeLimitExceeded(GlanceException):
message = _("The provided image is too large.")
class FailedToGetScrubberJobs(GlanceException):
message = _("Scrubber encountered an error while trying to fetch "
"scrub jobs.")
class ImageMemberLimitExceeded(LimitExceeded):
message = _("The limit has been exceeded on the number of allowed image "
"members for this image. Attempted: %(attempted)s, "
"Maximum: %(maximum)s")
class ImagePropertyLimitExceeded(LimitExceeded):
message = _("The limit has been exceeded on the number of allowed image "
"properties. Attempted: %(attempted)s, Maximum: %(maximum)s")
class ImageTagLimitExceeded(LimitExceeded):
message = _("The limit has been exceeded on the number of allowed image "
"tags. Attempted: %(attempted)s, Maximum: %(maximum)s")
class ImageLocationLimitExceeded(LimitExceeded):
message = _("The limit has been exceeded on the number of allowed image "
"locations. Attempted: %(attempted)s, Maximum: %(maximum)s")
class SIGHUPInterrupt(GlanceException):
message = _("System SIGHUP signal received.")
class RPCError(GlanceException):
message = _("%(cls)s exception was raised in the last rpc call: %(val)s")
class TaskException(GlanceException):
message = _("An unknown task exception occurred")
class BadTaskConfiguration(GlanceException):
message = _("Task was not configured properly")
class ImageNotFound(NotFound):
message = _("Image with the given id %(image_id)s was not found")
class TaskNotFound(TaskException, NotFound):
message = _("Task with the given id %(task_id)s was not found")
class InvalidTaskStatus(TaskException, Invalid):
message = _("Provided status of task is unsupported: %(status)s")
class InvalidTaskType(TaskException, Invalid):
message = _("Provided type of task is unsupported: %(type)s")
class InvalidTaskStatusTransition(TaskException, Invalid):
message = _("Status transition from %(cur_status)s to"
" %(new_status)s is not allowed")
class ImportTaskError(TaskException, Invalid):
message = _("An import task exception occurred")
class DuplicateLocation(Duplicate):
message = _("The location %(location)s already exists")
class InvalidParameterValue(Invalid):
message = _("Invalid value '%(value)s' for parameter '%(param)s': "
"%(extra_msg)s")
class InvalidImageStatusTransition(Invalid):
message = _("Image status transition from %(cur_status)s to"
" %(new_status)s is not allowed")
class MetadefDuplicateNamespace(Duplicate):
message = _("The metadata definition namespace=%(namespace_name)s"
" already exists.")
class MetadefDuplicateObject(Duplicate):
message = _("A metadata definition object with name=%(object_name)s"
" already exists in namespace=%(namespace_name)s.")
class MetadefDuplicateProperty(Duplicate):
message = _("A metadata definition property with name=%(property_name)s"
" already exists in namespace=%(namespace_name)s.")
class MetadefDuplicateResourceType(Duplicate):
message = _("A metadata definition resource-type with"
" name=%(resource_type_name)s already exists.")
class MetadefDuplicateResourceTypeAssociation(Duplicate):
message = _("The metadata definition resource-type association of"
" resource-type=%(resource_type_name)s to"
" namespace=%(namespace_name)s"
" already exists.")
class MetadefDuplicateTag(Duplicate):
message = _("A metadata tag with name=%(name)s"
" already exists in namespace=%(namespace_name)s."
" (Please note that metadata tag names are"
" case insensitive).")
class MetadefForbidden(Forbidden):
message = _("You are not authorized to complete this action.")
class MetadefIntegrityError(Forbidden):
message = _("The metadata definition %(record_type)s with"
" name=%(record_name)s not deleted."
" Other records still refer to it.")
class MetadefNamespaceNotFound(NotFound):
message = _("Metadata definition namespace=%(namespace_name)s"
" was not found.")
class MetadefObjectNotFound(NotFound):
message = _("The metadata definition object with"
" name=%(object_name)s was not found in"
" namespace=%(namespace_name)s.")
class MetadefPropertyNotFound(NotFound):
message = _("The metadata definition property with"
" name=%(property_name)s was not found in"
" namespace=%(namespace_name)s.")
class MetadefResourceTypeNotFound(NotFound):
message = _("The metadata definition resource-type with"
" name=%(resource_type_name)s, was not found.")
class MetadefResourceTypeAssociationNotFound(NotFound):
message = _("The metadata definition resource-type association of"
" resource-type=%(resource_type_name)s to"
" namespace=%(namespace_name)s,"
" was not found.")
class MetadefTagNotFound(NotFound):
message = _("The metadata definition tag with"
" name=%(name)s was not found in"
" namespace=%(namespace_name)s.")
class InvalidVersion(Invalid):
message = _("Version is invalid: %(reason)s")
class InvalidArtifactTypePropertyDefinition(Invalid):
message = _("Invalid property definition")
class InvalidArtifactTypeDefinition(Invalid):
message = _("Invalid type definition")
class InvalidArtifactPropertyValue(Invalid):
message = _("Property '%(name)s' may not have value '%(val)s': %(msg)s")
def __init__(self, message=None, *args, **kwargs):
super(InvalidArtifactPropertyValue, self).__init__(message, *args,
**kwargs)
self.name = kwargs.get('name')
self.value = kwargs.get('val')
class ArtifactNotFound(NotFound):
message = _("Artifact with id=%(id)s was not found")
class ArtifactForbidden(Forbidden):
message = _("Artifact with id=%(id)s is not accessible")
class ArtifactDuplicateNameTypeVersion(Duplicate):
message = _("Artifact with the specified type, name and version"
" already exists")
class InvalidArtifactStateTransition(Invalid):
message = _("Artifact cannot change state from %(source)s to %(target)s")
class ArtifactDuplicateDirectDependency(Duplicate):
message = _("Artifact with the specified type, name and version"
" already has the direct dependency=%(dep)s")
class ArtifactDuplicateTransitiveDependency(Duplicate):
message = _("Artifact with the specified type, name and version"
" already has the transitive dependency=%(dep)s")
class ArtifactCircularDependency(Invalid):
message = _("Artifact with a circular dependency can not be created")
class ArtifactUnsupportedPropertyOperator(Invalid):
message = _("Operator %(op)s is not supported")
class ArtifactUnsupportedShowLevel(Invalid):
message = _("Show level %(shl)s is not supported in this operation")
class ArtifactPropertyValueNotFound(NotFound):
message = _("Property's %(prop)s value has not been found")
class ArtifactInvalidProperty(Invalid):
message = _("Artifact has no property %(prop)s")
class ArtifactInvalidPropertyParameter(Invalid):
message = _("Cannot use this parameter with the operator %(op)s")
class ArtifactLoadError(GlanceException):
message = _("Cannot load artifact '%(name)s'")
class ArtifactNonMatchingTypeName(ArtifactLoadError):
message = _("Plugin name '%(plugin)s' should match "
"artifact typename '%(name)s'")
class ArtifactPluginNotFound(NotFound):
message = _("No plugin for '%(name)s' has been loaded")
class UnknownArtifactType(NotFound):
message = _("Artifact type with name '%(name)s' and version '%(version)s' "
"is not known")
class ArtifactInvalidStateTransition(Invalid):
message = _("Artifact state cannot be changed from %(curr)s to %(to)s")
class JsonPatchException(GlanceException):
message = _("Invalid jsonpatch request")
class InvalidJsonPatchBody(JsonPatchException):
message = _("The provided body %(body)s is invalid "
"under given schema: %(schema)s")
class InvalidJsonPatchPath(JsonPatchException):
message = _("The provided path '%(path)s' is invalid: %(explanation)s")
def __init__(self, message=None, *args, **kwargs):
self.explanation = kwargs.get("explanation")
super(InvalidJsonPatchPath, self).__init__(message, *args, **kwargs)
|
apache-2.0
| 6,056,371,063,913,521,000 | 30.430851 | 79 | 0.677328 | false |
calebtrahan/KujiIn_Python
|
backup/guitemplates/setgoaldialog.py
|
1
|
5432
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'setgoaldialog.ui'
#
# Created: Tue Dec 23 18:15:13 2014
# by: PyQt4 UI code generator 4.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_setgoalsdialog(object):
def setupUi(self, setgoalsdialog):
setgoalsdialog.setObjectName(_fromUtf8("setgoalsdialog"))
setgoalsdialog.resize(434, 241)
self.setgoaldialogtopLabel = QtGui.QLabel(setgoalsdialog)
self.setgoaldialogtopLabel.setGeometry(QtCore.QRect(40, 30, 381, 16))
self.setgoaldialogtopLabel.setObjectName(_fromUtf8("setgoaldialogtopLabel"))
self.setgoaldialoggoalLabel = QtGui.QLabel(setgoalsdialog)
self.setgoaldialoggoalLabel.setGeometry(QtCore.QRect(130, 70, 59, 15))
self.setgoaldialoggoalLabel.setObjectName(_fromUtf8("setgoaldialoggoalLabel"))
self.horizontalLayoutWidget = QtGui.QWidget(setgoalsdialog)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(100, 90, 177, 41))
self.horizontalLayoutWidget.setObjectName(_fromUtf8("horizontalLayoutWidget"))
self.setgoalsdialoggoallayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)
self.setgoalsdialoggoallayout.setMargin(0)
self.setgoalsdialoggoallayout.setObjectName(_fromUtf8("setgoalsdialoggoallayout"))
self.setgoaldialogvalue = QtGui.QSpinBox(self.horizontalLayoutWidget)
self.setgoaldialogvalue.setLayoutDirection(QtCore.Qt.RightToLeft)
self.setgoaldialogvalue.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.setgoaldialogvalue.setButtonSymbols(QtGui.QAbstractSpinBox.PlusMinus)
self.setgoaldialogvalue.setObjectName(_fromUtf8("setgoaldialogvalue"))
self.setgoalsdialoggoallayout.addWidget(self.setgoaldialogvalue)
self.setgoaldialoghrslabel = QtGui.QLabel(self.horizontalLayoutWidget)
self.setgoaldialoghrslabel.setObjectName(_fromUtf8("setgoaldialoghrslabel"))
self.setgoalsdialoggoallayout.addWidget(self.setgoaldialoghrslabel)
self.setgoaldialogDueDate = QtGui.QDateEdit(setgoalsdialog)
self.setgoaldialogDueDate.setGeometry(QtCore.QRect(220, 100, 110, 22))
self.setgoaldialogDueDate.setLayoutDirection(QtCore.Qt.RightToLeft)
self.setgoaldialogDueDate.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.setgoaldialogDueDate.setButtonSymbols(QtGui.QAbstractSpinBox.PlusMinus)
self.setgoaldialogDueDate.setDisplayFormat(_fromUtf8(""))
self.setgoaldialogDueDate.setObjectName(_fromUtf8("setgoaldialogDueDate"))
self.setgoalduedateLabel = QtGui.QLabel(setgoalsdialog)
self.setgoalduedateLabel.setGeometry(QtCore.QRect(240, 70, 61, 20))
self.setgoalduedateLabel.setObjectName(_fromUtf8("setgoalduedateLabel"))
self.horizontalLayoutWidget_2 = QtGui.QWidget(setgoalsdialog)
self.horizontalLayoutWidget_2.setGeometry(QtCore.QRect(90, 180, 334, 41))
self.horizontalLayoutWidget_2.setObjectName(_fromUtf8("horizontalLayoutWidget_2"))
self.setdialogbuttonslayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget_2)
self.setdialogbuttonslayout.setMargin(0)
self.setdialogbuttonslayout.setObjectName(_fromUtf8("setdialogbuttonslayout"))
self.pushButton = QtGui.QPushButton(self.horizontalLayoutWidget_2)
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.setdialogbuttonslayout.addWidget(self.pushButton)
self.setgoaldialogAcceptButton = QtGui.QPushButton(self.horizontalLayoutWidget_2)
self.setgoaldialogAcceptButton.setObjectName(_fromUtf8("setgoaldialogAcceptButton"))
self.setdialogbuttonslayout.addWidget(self.setgoaldialogAcceptButton)
self.setgoaldialogCancelButton = QtGui.QPushButton(self.horizontalLayoutWidget_2)
self.setgoaldialogCancelButton.setObjectName(_fromUtf8("setgoaldialogCancelButton"))
self.setdialogbuttonslayout.addWidget(self.setgoaldialogCancelButton)
self.retranslateUi(setgoalsdialog)
QtCore.QMetaObject.connectSlotsByName(setgoalsdialog)
def retranslateUi(self, setgoalsdialog):
setgoalsdialog.setWindowTitle(_translate("setgoalsdialog", "Dialog", None))
self.setgoaldialogtopLabel.setText(_translate("setgoalsdialog", "You Are Currently At num Hours. Please Set A New Goal:", None))
self.setgoaldialoggoalLabel.setText(_translate("setgoalsdialog", "GOAL", None))
self.setgoaldialoghrslabel.setText(_translate("setgoalsdialog", "hrs", None))
self.setgoalduedateLabel.setText(_translate("setgoalsdialog", "Due Date", None))
self.pushButton.setText(_translate("setgoalsdialog", "VIEW CURRENT GOALS", None))
self.setgoaldialogAcceptButton.setText(_translate("setgoalsdialog", "ACCEPT", None))
self.setgoaldialogCancelButton.setText(_translate("setgoalsdialog", "CANCEL", None))
|
mit
| -2,910,544,525,462,350,300 | 60.033708 | 136 | 0.760677 | false |
preprocessed-connectomes-project/quality-assessment-protocol
|
scripts/qap_check_output_csv.py
|
1
|
1302
|
#!/usr/bin/env python
def main():
import os
import argparse
from qap.script_utils import check_csv_missing_subs, csv_to_pandas_df, \
write_inputs_dict_to_yaml_file, read_yml_file
from qap.qap_utils import raise_smart_exception
parser = argparse.ArgumentParser()
parser.add_argument("output_csv", type=str,
help="the main output directory of the QAP run "
"which contains the participant directories")
parser.add_argument("data_config", type=str,
help="the main output directory of the QAP run "
"which contains the participant directories")
parser.add_argument("data_type", type=str,
help="the main output directory of the QAP run "
"which contains the participant directories")
args = parser.parse_args()
csv_df = csv_to_pandas_df(args.output_csv)
data_dict = read_yml_file(args.data_config)
new_dict = check_csv_missing_subs(csv_df, data_dict, args.data_type)
if new_dict:
out_file = os.path.join(os.getcwd(),
"missing_%s_data.yml" % args.data_type)
write_inputs_dict_to_yaml_file(new_dict, out_file)
if __name__ == "__main__":
main()
|
bsd-3-clause
| 6,100,006,291,971,816,000 | 34.216216 | 76 | 0.596006 | false |
euklyd/PenguinBot3K
|
core/Webhook.py
|
1
|
3305
|
"""
Class Name : Webhook
Description:
Contributors:
- Patrick Hennessy
License:
Arcbot is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License v3; as published
by the Free Software Foundation
@webhook("/notify/git", type="POST")
def example(self, request):
request
sender
type
body
headers
"""
import socket
import threading
import sys
import time
class WebhookManager():
def __init__(self, core, port):
self.core = core
self.server = HTTPServer(port)
self.listening = False
def register(self, method, uri, callback):
pass
def unregister(self):
pass
class HTTPServer():
def __init__(self, port):
self.socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM
)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind( ('127.0.0.1', port) )
self.socket.listen(5)
self.socket.setblocking(0)
self.thread = None
def start(self):
self.listening = True
self.thread = threading.Thread(target=self.listen)
self.thread.start()
def stop(self):
self.listening = False
self.thread.join()
def listen(self):
while self.listening:
time.sleep(0.5)
try:
connection, address = self.socket.accept()
except socket.error as e:
# Resource timeout
if(e.errno == 11):
continue
requestData = connection.recv(1024)
request = self.parseRequest(requestData, connection)
print request
def parseRequest(self, requestData, connection):
request = {}
requestData = requestData.splitlines()
for index, line in enumerate(requestData):
if(index == 0):
try:
method, uri, version = line.split(" ")
request["method"] = method
request["uri"] = uri
request["version"] = version
except:
# When they send a malformed header
connection.send("HTTP/1.1 400 Bad Request")
connection.close()
# Check HTTP Method
if method not in ["POST", "GET", "HEAD", "OPTIONS", "PUT", "DELETE", "TRACE", "PATCH"]:
connection.send("HTTP/1.1 405 Method Not Allowed")
connection.close()
# Check HTTP version
if version not in ["HTTP/1.1", "HTTP/1.0"]:
connection.send("HTTP/1.1 505 HTTP Version Not Supported")
connection.close()
elif(line.startswith("Content-Type")):
request["contentType"] = line.split(" ")[1]
elif(line.startswith("Content-Length")):
request["contentLength"] = line.split(" ")[1]
else:
if(line != ""):
request["payload"] = line
return request
|
gpl-3.0
| -4,199,915,346,113,456,600 | 25.653226 | 103 | 0.508926 | false |
epssy/hue
|
apps/beeswax/src/beeswax/server/hive_server2_lib.py
|
1
|
34891
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import itertools
import re
from itertools import imap
from operator import itemgetter
from django.utils.translation import ugettext as _
from desktop.lib import thrift_util
from desktop.conf import get_ldap_password, LDAP_USERNAME
from desktop.conf import DEFAULT_USER
from hadoop import cluster
from TCLIService import TCLIService
from TCLIService.ttypes import TOpenSessionReq, TGetTablesReq, TFetchResultsReq,\
TStatusCode, TGetResultSetMetadataReq, TGetColumnsReq, TTypeId,\
TExecuteStatementReq, TGetOperationStatusReq, TFetchOrientation,\
TCloseSessionReq, TGetSchemasReq, TGetLogReq, TCancelOperationReq,\
TCloseOperationReq, TFetchResultsResp, TRowSet, TProtocolVersion
from beeswax import conf as beeswax_conf
from beeswax import hive_site
from beeswax.hive_site import hiveserver2_use_ssl
from beeswax.models import Session, HiveServerQueryHandle, HiveServerQueryHistory
from beeswax.server.dbms import Table, NoSuchObjectException, DataTable,\
QueryServerException
LOG = logging.getLogger(__name__)
IMPALA_RESULTSET_CACHE_SIZE = 'impala.resultset.cache.size'
DEFAULT_USER = DEFAULT_USER.get()
class HiveServerTable(Table):
"""
We get the table details from a DESCRIBE FORMATTED.
"""
def __init__(self, table_results, table_schema, desc_results, desc_schema):
if beeswax_conf.THRIFT_VERSION.get() >= 7:
if not table_results.columns:
raise NoSuchObjectException()
self.table = table_results.columns
else: # Deprecated. To remove in Hue 4.
if not table_results.rows:
raise NoSuchObjectException()
self.table = table_results.rows and table_results.rows[0] or ''
self.table_schema = table_schema
self.desc_results = desc_results
self.desc_schema = desc_schema
self.describe = HiveServerTTableSchema(self.desc_results, self.desc_schema).cols()
@property
def name(self):
return HiveServerTRow(self.table, self.table_schema).col('TABLE_NAME')
@property
def is_view(self):
return HiveServerTRow(self.table, self.table_schema).col('TABLE_TYPE') == 'VIEW'
@property
def partition_keys(self):
try:
return [PartitionKeyCompatible(row['col_name'], row['data_type'], row['comment']) for row in self._get_partition_column()]
except:
LOG.exception('failed to get partition keys')
return []
@property
def path_location(self):
try:
rows = self.describe
rows = [row for row in rows if row['col_name'].startswith('Location:')]
if rows:
return rows[0]['data_type']
except:
LOG.exception('failed to get path location')
return None
@property
def cols(self):
rows = self.describe
try:
col_row_index = 2
end_cols_index = map(itemgetter('col_name'), rows[col_row_index:]).index('')
return rows[col_row_index:][:end_cols_index] + self._get_partition_column()
except:
LOG.exception('failed to extract columns')
return rows
def _get_partition_column(self):
rows = self.describe
try:
col_row_index = map(itemgetter('col_name'), rows).index('# Partition Information') + 3
end_cols_index = map(itemgetter('col_name'), rows[col_row_index:]).index('')
return rows[col_row_index:][:end_cols_index]
except:
LOG.exception('failed to get partition column')
return []
@property
def comment(self):
return HiveServerTRow(self.table, self.table_schema).col('REMARKS')
@property
def properties(self):
rows = self.describe
col_row_index = 2
end_cols_index = map(itemgetter('col_name'), rows[col_row_index:]).index('')
return [{
'col_name': prop['col_name'].strip() if prop['col_name'] else prop['col_name'],
'data_type': prop['data_type'].strip() if prop['data_type'] else prop['data_type'],
'comment': prop['comment'].strip() if prop['comment'] else prop['comment']
} for prop in rows[col_row_index + end_cols_index + 1:]
]
@property
def stats(self):
rows = self.properties
col_row_index = map(itemgetter('col_name'), rows).index('Table Parameters:') + 1
end_cols_index = map(itemgetter('data_type'), rows[col_row_index:]).index(None)
return rows[col_row_index:][:end_cols_index]
class HiveServerTRowSet2:
def __init__(self, row_set, schema):
self.row_set = row_set
self.rows = row_set.rows
self.schema = schema
self.startRowOffset = row_set.startRowOffset
def is_empty(self):
return not self.row_set.columns or not HiveServerTColumnValue2(self.row_set.columns[0]).val
def cols(self, col_names):
cols_rows = []
rs = HiveServerTRow2(self.row_set.columns, self.schema)
cols = [rs.full_col(name) for name in col_names]
for cols_row in itertools.izip(*cols):
cols_rows.append(dict(itertools.izip(col_names, cols_row)))
return cols_rows
def __iter__(self):
return self
def next(self):
if self.row_set.columns:
return HiveServerTRow2(self.row_set.columns, self.schema)
else:
raise StopIteration
class HiveServerTRow2:
def __init__(self, cols, schema):
self.cols = cols
self.schema = schema
def col(self, colName):
pos = self._get_col_position(colName)
return HiveServerTColumnValue2(self.cols[pos]).val[0] # Return only first element
def full_col(self, colName):
pos = self._get_col_position(colName)
return HiveServerTColumnValue2(self.cols[pos]).val # Return the full column and its values
def _get_col_position(self, column_name):
return filter(lambda (i, col): col.columnName == column_name, enumerate(self.schema.columns))[0][0]
def fields(self):
try:
return [HiveServerTColumnValue2(field).val.pop(0) for field in self.cols]
except IndexError:
raise StopIteration
class HiveServerTColumnValue2:
def __init__(self, tcolumn_value):
self.column_value = tcolumn_value
@property
def val(self):
# Could directly get index from schema but would need to cache the schema
if self.column_value.stringVal:
return self._get_val(self.column_value.stringVal)
elif self.column_value.i16Val is not None:
return self._get_val(self.column_value.i16Val)
elif self.column_value.i32Val is not None:
return self._get_val(self.column_value.i32Val)
elif self.column_value.i64Val is not None:
return self._get_val(self.column_value.i64Val)
elif self.column_value.doubleVal is not None:
return self._get_val(self.column_value.doubleVal)
elif self.column_value.boolVal is not None:
return self._get_val(self.column_value.boolVal)
elif self.column_value.byteVal is not None:
return self._get_val(self.column_value.byteVal)
elif self.column_value.binaryVal is not None:
return self._get_val(self.column_value.binaryVal)
@classmethod
def _get_val(cls, column):
column.values = cls.set_nulls(column.values, column.nulls)
column.nulls = '' # Clear the null values for not re-marking again the column with nulls at the next call
return column.values
@classmethod
def mark_nulls(cls, values, bytestring):
mask = bytearray(bytestring)
for n in mask:
yield n & 0x01
yield n & 0x02
yield n & 0x04
yield n & 0x08
yield n & 0x10
yield n & 0x20
yield n & 0x40
yield n & 0x80
@classmethod
def set_nulls(cls, values, bytestring):
if bytestring == '' or re.match('^(\x00)+$', bytestring): # HS2 has just \x00 or '', Impala can have \x00\x00...
return values
else:
_values = [None if is_null else value for value, is_null in itertools.izip(values, cls.mark_nulls(values, bytestring))]
if len(values) != len(_values): # HS2 can have just \x00\x01 instead of \x00\x01\x00...
_values.extend(values[len(_values):])
return _values
class HiveServerDataTable(DataTable):
def __init__(self, results, schema, operation_handle, query_server):
self.schema = schema and schema.schema
self.row_set = HiveServerTRowSet(results.results, schema)
self.operation_handle = operation_handle
if query_server['server_name'] == 'impala':
self.has_more = results.hasMoreRows
else:
self.has_more = not self.row_set.is_empty() # Should be results.hasMoreRows but always True in HS2
self.startRowOffset = self.row_set.startRowOffset # Always 0 in HS2
@property
def ready(self):
return True
def cols(self):
if self.schema:
return [HiveServerTColumnDesc(col) for col in self.schema.columns]
else:
return []
def rows(self):
for row in self.row_set:
yield row.fields()
class HiveServerTTableSchema:
def __init__(self, columns, schema):
self.columns = columns
self.schema = schema
def cols(self):
try:
return HiveServerTRowSet(self.columns, self.schema).cols(('col_name', 'data_type', 'comment'))
except:
LOG.exception('failed to get columns')
# Impala API is different
cols = HiveServerTRowSet(self.columns, self.schema).cols(('name', 'type', 'comment'))
for col in cols:
col['col_name'] = col.pop('name')
col['data_type'] = col.pop('type')
return cols
def col(self, colName):
pos = self._get_col_position(colName)
return HiveServerTColumnDesc(self.columns[pos]).val
def _get_col_position(self, column_name):
return filter(lambda (i, col): col.columnName == column_name, enumerate(self.schema.columns))[0][0]
if beeswax_conf.THRIFT_VERSION.get() >= 7:
HiveServerTRow = HiveServerTRow2
HiveServerTRowSet = HiveServerTRowSet2
else:
# Deprecated. To remove in Hue 4.
class HiveServerTRow:
def __init__(self, row, schema):
self.row = row
self.schema = schema
def col(self, colName):
pos = self._get_col_position(colName)
return HiveServerTColumnValue(self.row.colVals[pos]).val
def _get_col_position(self, column_name):
return filter(lambda (i, col): col.columnName == column_name, enumerate(self.schema.columns))[0][0]
def fields(self):
return [HiveServerTColumnValue(field).val for field in self.row.colVals]
class HiveServerTRowSet:
def __init__(self, row_set, schema):
self.row_set = row_set
self.rows = row_set.rows
self.schema = schema
self.startRowOffset = row_set.startRowOffset
def is_empty(self):
return len(self.rows) == 0
def cols(self, col_names):
cols_rows = []
for row in self.rows:
row = HiveServerTRow(row, self.schema)
cols = {}
for col_name in col_names:
cols[col_name] = row.col(col_name)
cols_rows.append(cols)
return cols_rows
def __iter__(self):
return self
def next(self):
if self.rows:
return HiveServerTRow(self.rows.pop(0), self.schema)
else:
raise StopIteration
class HiveServerTColumnValue:
def __init__(self, tcolumn_value):
self.column_value = tcolumn_value
@property
def val(self):
if self.column_value.boolVal is not None:
return self.column_value.boolVal.value
elif self.column_value.byteVal is not None:
return self.column_value.byteVal.value
elif self.column_value.i16Val is not None:
return self.column_value.i16Val.value
elif self.column_value.i32Val is not None:
return self.column_value.i32Val.value
elif self.column_value.i64Val is not None:
return self.column_value.i64Val.value
elif self.column_value.doubleVal is not None:
return self.column_value.doubleVal.value
elif self.column_value.stringVal is not None:
return self.column_value.stringVal.value
class HiveServerTColumnDesc:
def __init__(self, column):
self.column = column
@property
def name(self):
return self.column.columnName
@property
def comment(self):
return self.column.comment
@property
def type(self):
return self.get_type(self.column.typeDesc)
@classmethod
def get_type(self, typeDesc):
for ttype in typeDesc.types:
if ttype.primitiveEntry is not None:
return TTypeId._VALUES_TO_NAMES[ttype.primitiveEntry.type]
elif ttype.mapEntry is not None:
return ttype.mapEntry
elif ttype.unionEntry is not None:
return ttype.unionEntry
elif ttype.arrayEntry is not None:
return ttype.arrayEntry
elif ttype.structEntry is not None:
return ttype.structEntry
elif ttype.userDefinedTypeEntry is not None:
return ttype.userDefinedTypeEntry
class HiveServerClient:
HS2_MECHANISMS = {'KERBEROS': 'GSSAPI', 'NONE': 'PLAIN', 'NOSASL': 'NOSASL', 'LDAP': 'PLAIN'}
def __init__(self, query_server, user):
self.query_server = query_server
self.user = user
use_sasl, mechanism, kerberos_principal_short_name, impersonation_enabled, ldap_username, ldap_password = self.get_security()
LOG.info('use_sasl=%s, mechanism=%s, kerberos_principal_short_name=%s, impersonation_enabled=%s' % (
use_sasl, mechanism, kerberos_principal_short_name, impersonation_enabled))
self.use_sasl = use_sasl
self.kerberos_principal_short_name = kerberos_principal_short_name
self.impersonation_enabled = impersonation_enabled
if self.query_server['server_name'] == 'impala':
from impala import conf as impala_conf
ssl_enabled = impala_conf.SSL.ENABLED.get()
ca_certs = impala_conf.SSL.CACERTS.get()
keyfile = impala_conf.SSL.KEY.get()
certfile = impala_conf.SSL.CERT.get()
validate = impala_conf.SSL.VALIDATE.get()
timeout = impala_conf.SERVER_CONN_TIMEOUT.get()
else:
ssl_enabled = hiveserver2_use_ssl()
ca_certs = beeswax_conf.SSL.CACERTS.get()
keyfile = beeswax_conf.SSL.KEY.get()
certfile = beeswax_conf.SSL.CERT.get()
validate = beeswax_conf.SSL.VALIDATE.get()
timeout = beeswax_conf.SERVER_CONN_TIMEOUT.get()
if ldap_username:
username = ldap_username
password = ldap_password
else:
username = user.username
password = None
self._client = thrift_util.get_client(TCLIService.Client,
query_server['server_host'],
query_server['server_port'],
service_name=query_server['server_name'],
kerberos_principal=kerberos_principal_short_name,
use_sasl=use_sasl,
mechanism=mechanism,
username=username,
password=password,
timeout_seconds=timeout,
use_ssl=ssl_enabled,
ca_certs=ca_certs,
keyfile=keyfile,
certfile=certfile,
validate=validate,
transport_mode=query_server.get('transport_mode', 'socket'),
http_url=query_server.get('http_url', '')
)
def get_security(self):
principal = self.query_server['principal']
impersonation_enabled = False
ldap_username = None
ldap_password = get_ldap_password()
if ldap_password is not None: # Pass-through LDAP authentication
ldap_username = LDAP_USERNAME.get()
if principal:
kerberos_principal_short_name = principal.split('/', 1)[0]
else:
kerberos_principal_short_name = None
if self.query_server['server_name'] == 'impala':
if ldap_password: # Force LDAP auth if ldap_password is provided
use_sasl = True
mechanism = HiveServerClient.HS2_MECHANISMS['NONE']
else:
cluster_conf = cluster.get_cluster_conf_for_job_submission()
use_sasl = cluster_conf is not None and cluster_conf.SECURITY_ENABLED.get()
mechanism = HiveServerClient.HS2_MECHANISMS['KERBEROS']
impersonation_enabled = self.query_server['impersonation_enabled']
else:
hive_mechanism = hive_site.get_hiveserver2_authentication()
if hive_mechanism not in HiveServerClient.HS2_MECHANISMS:
raise Exception(_('%s server authentication not supported. Valid are %s.') % (hive_mechanism, HiveServerClient.HS2_MECHANISMS.keys()))
use_sasl = hive_mechanism in ('KERBEROS', 'NONE', 'LDAP')
mechanism = HiveServerClient.HS2_MECHANISMS[hive_mechanism]
impersonation_enabled = hive_site.hiveserver2_impersonation_enabled()
return use_sasl, mechanism, kerberos_principal_short_name, impersonation_enabled, ldap_username, ldap_password
def open_session(self, user):
kwargs = {
'client_protocol': beeswax_conf.THRIFT_VERSION.get() - 1,
'username': user.username, # If SASL or LDAP, it gets the username from the authentication mechanism" since it dependents on it.
'configuration': {},
}
if self.impersonation_enabled:
kwargs.update({'username': DEFAULT_USER})
if self.query_server['server_name'] == 'impala': # Only when Impala accepts it
kwargs['configuration'].update({'impala.doas.user': user.username})
if self.query_server['server_name'] == 'beeswax': # All the time
kwargs['configuration'].update({'hive.server2.proxy.user': user.username})
req = TOpenSessionReq(**kwargs)
res = self._client.OpenSession(req)
if res.status is not None and res.status.statusCode not in (TStatusCode.SUCCESS_STATUS,):
if hasattr(res.status, 'errorMessage') and res.status.errorMessage:
message = res.status.errorMessage
else:
message = ''
raise QueryServerException(Exception('Bad status for request %s:\n%s' % (req, res)), message=message)
sessionId = res.sessionHandle.sessionId
LOG.info('Opening session %s' % sessionId)
encoded_status, encoded_guid = HiveServerQueryHandle(secret=sessionId.secret, guid=sessionId.guid).get()
return Session.objects.create(owner=user,
application=self.query_server['server_name'],
status_code=res.status.statusCode,
secret=encoded_status,
guid=encoded_guid,
server_protocol_version=res.serverProtocolVersion)
def call(self, fn, req, status=TStatusCode.SUCCESS_STATUS):
session = Session.objects.get_session(self.user, self.query_server['server_name'])
if session is None:
session = self.open_session(self.user)
if hasattr(req, 'sessionHandle') and req.sessionHandle is None:
req.sessionHandle = session.get_handle()
res = fn(req)
# Not supported currently in HS2 and Impala: TStatusCode.INVALID_HANDLE_STATUS
if res.status.statusCode == TStatusCode.ERROR_STATUS and \
re.search('Invalid SessionHandle|Invalid session|Client session expired', res.status.errorMessage or '', re.I):
LOG.info('Retrying with a new session because for %s of %s' % (self.user, res))
session = self.open_session(self.user)
req.sessionHandle = session.get_handle()
# Get back the name of the function to call
res = getattr(self._client, fn.attr)(req)
if status is not None and res.status.statusCode not in (
TStatusCode.SUCCESS_STATUS, TStatusCode.SUCCESS_WITH_INFO_STATUS, TStatusCode.STILL_EXECUTING_STATUS):
if hasattr(res.status, 'errorMessage') and res.status.errorMessage:
message = res.status.errorMessage
else:
message = ''
raise QueryServerException(Exception('Bad status for request %s:\n%s' % (req, res)), message=message)
else:
return res
def close_session(self, sessionHandle):
req = TCloseSessionReq(sessionHandle=sessionHandle)
return self._client.CloseSession(req)
def get_databases(self):
# GetCatalogs() is not implemented in HS2
req = TGetSchemasReq()
res = self.call(self._client.GetSchemas, req)
results, schema = self.fetch_result(res.operationHandle, orientation=TFetchOrientation.FETCH_NEXT)
self.close_operation(res.operationHandle)
col = 'TABLE_SCHEM'
return HiveServerTRowSet(results.results, schema.schema).cols((col,))
def get_tables(self, database, table_names):
req = TGetTablesReq(schemaName=database, tableName=table_names)
res = self.call(self._client.GetTables, req)
results, schema = self.fetch_result(res.operationHandle, orientation=TFetchOrientation.FETCH_NEXT, max_rows=5000)
self.close_operation(res.operationHandle)
return HiveServerTRowSet(results.results, schema.schema).cols(('TABLE_NAME',))
def get_table(self, database, table_name):
req = TGetTablesReq(schemaName=database, tableName=table_name)
res = self.call(self._client.GetTables, req)
table_results, table_schema = self.fetch_result(res.operationHandle, orientation=TFetchOrientation.FETCH_NEXT)
self.close_operation(res.operationHandle)
query = 'DESCRIBE FORMATTED `%s`.`%s`' % (database, table_name)
(desc_results, desc_schema), operation_handle = self.execute_statement(query, max_rows=5000, orientation=TFetchOrientation.FETCH_NEXT)
self.close_operation(operation_handle)
return HiveServerTable(table_results.results, table_schema.schema, desc_results.results, desc_schema.schema)
def execute_query(self, query, max_rows=1000):
configuration = self._get_query_configuration(query)
return self.execute_query_statement(statement=query.query['query'], max_rows=max_rows, configuration=configuration)
def execute_query_statement(self, statement, max_rows=1000, configuration={}, orientation=TFetchOrientation.FETCH_FIRST):
(results, schema), operation_handle = self.execute_statement(statement=statement, max_rows=max_rows, configuration=configuration, orientation=orientation)
return HiveServerDataTable(results, schema, operation_handle, self.query_server)
def execute_async_query(self, query, statement=0):
if statement == 0:
# Impala just has settings currently
if self.query_server['server_name'] == 'beeswax':
for resource in query.get_configuration_statements():
self.execute_statement(resource.strip())
configuration = {}
if self.query_server['server_name'] == 'impala' and self.query_server['querycache_rows'] > 0:
configuration[IMPALA_RESULTSET_CACHE_SIZE] = str(self.query_server['querycache_rows'])
# The query can override the default configuration
configuration.update(self._get_query_configuration(query))
query_statement = query.get_query_statement(statement)
return self.execute_async_statement(statement=query_statement, confOverlay=configuration)
def execute_statement(self, statement, max_rows=1000, configuration={}, orientation=TFetchOrientation.FETCH_NEXT):
if self.query_server['server_name'] == 'impala' and self.query_server['QUERY_TIMEOUT_S'] > 0:
configuration['QUERY_TIMEOUT_S'] = str(self.query_server['QUERY_TIMEOUT_S'])
req = TExecuteStatementReq(statement=statement.encode('utf-8'), confOverlay=configuration)
res = self.call(self._client.ExecuteStatement, req)
return self.fetch_result(res.operationHandle, max_rows=max_rows, orientation=orientation), res.operationHandle
def execute_async_statement(self, statement, confOverlay):
if self.query_server['server_name'] == 'impala' and self.query_server['QUERY_TIMEOUT_S'] > 0:
confOverlay['QUERY_TIMEOUT_S'] = str(self.query_server['QUERY_TIMEOUT_S'])
req = TExecuteStatementReq(statement=statement.encode('utf-8'), confOverlay=confOverlay, runAsync=True)
res = self.call(self._client.ExecuteStatement, req)
return HiveServerQueryHandle(secret=res.operationHandle.operationId.secret,
guid=res.operationHandle.operationId.guid,
operation_type=res.operationHandle.operationType,
has_result_set=res.operationHandle.hasResultSet,
modified_row_count=res.operationHandle.modifiedRowCount)
def fetch_data(self, operation_handle, orientation=TFetchOrientation.FETCH_NEXT, max_rows=1000):
# Fetch until the result is empty dues to a HS2 bug instead of looking at hasMoreRows
results, schema = self.fetch_result(operation_handle, orientation, max_rows)
return HiveServerDataTable(results, schema, operation_handle, self.query_server)
def cancel_operation(self, operation_handle):
req = TCancelOperationReq(operationHandle=operation_handle)
return self.call(self._client.CancelOperation, req)
def close_operation(self, operation_handle):
req = TCloseOperationReq(operationHandle=operation_handle)
return self.call(self._client.CloseOperation, req)
def get_columns(self, database, table):
req = TGetColumnsReq(schemaName=database, tableName=table)
res = self.call(self._client.GetColumns, req)
res, schema = self.fetch_result(res.operationHandle, orientation=TFetchOrientation.FETCH_NEXT)
self.close_operation(res.operationHandle)
return res, schema
def fetch_result(self, operation_handle, orientation=TFetchOrientation.FETCH_FIRST, max_rows=1000):
if operation_handle.hasResultSet:
fetch_req = TFetchResultsReq(operationHandle=operation_handle, orientation=orientation, maxRows=max_rows)
res = self.call(self._client.FetchResults, fetch_req)
else:
res = TFetchResultsResp(results=TRowSet(startRowOffset=0, rows=[], columns=[]))
if operation_handle.hasResultSet and TFetchOrientation.FETCH_FIRST: # Only fetch for the first call that should be with start_over
meta_req = TGetResultSetMetadataReq(operationHandle=operation_handle)
schema = self.call(self._client.GetResultSetMetadata, meta_req)
else:
schema = None
return res, schema
def fetch_log(self, operation_handle, orientation=TFetchOrientation.FETCH_NEXT, max_rows=1000):
req = TFetchResultsReq(operationHandle=operation_handle, orientation=orientation, maxRows=max_rows, fetchType=1)
res = self.call(self._client.FetchResults, req)
if beeswax_conf.THRIFT_VERSION.get() >= 7:
lines = res.results.columns[0].stringVal.values
else:
lines = imap(lambda r: r.colVals[0].stringVal.value, res.results.rows)
return '\n'.join(lines)
def get_operation_status(self, operation_handle):
req = TGetOperationStatusReq(operationHandle=operation_handle)
return self.call(self._client.GetOperationStatus, req)
def explain(self, query):
query_statement = query.get_query_statement(0)
configuration = self._get_query_configuration(query)
return self.execute_query_statement(statement='EXPLAIN %s' % query_statement, configuration=configuration, orientation=TFetchOrientation.FETCH_NEXT)
def get_log(self, operation_handle):
try:
req = TGetLogReq(operationHandle=operation_handle)
res = self.call(self._client.GetLog, req)
return res.log
except:
LOG.exception('server does not support GetLog')
return 'Server does not support GetLog()'
def get_partitions(self, database, table_name, max_parts, reverse_sort=True):
table = self.get_table(database, table_name)
if max_parts is None or max_parts <= 0:
max_rows = 10000
else:
max_rows = 1000 if max_parts <= 250 else max_parts
partitionTable = self.execute_query_statement('SHOW PARTITIONS %s.%s' % (database, table_name), max_rows=max_rows)
partitions = [PartitionValueCompatible(partition, table) for partition in partitionTable.rows()][-max_parts:]
if reverse_sort:
partitions.reverse()
return partitions
def _get_query_configuration(self, query):
return dict([(setting['key'], setting['value']) for setting in query.settings])
class HiveServerTableCompatible(HiveServerTable):
"""Same API as Beeswax"""
def __init__(self, hive_table):
self.table = hive_table.table
self.table_schema = hive_table.table_schema
self.desc_results = hive_table.desc_results
self.desc_schema = hive_table.desc_schema
self.describe = HiveServerTTableSchema(self.desc_results, self.desc_schema).cols()
@property
def cols(self):
return [
type('Col', (object,), {
'name': col.get('col_name', '').strip(),
'type': col.get('data_type', '').strip(),
'comment': col.get('comment', '').strip() if col.get('comment') else ''
}) for col in HiveServerTable.cols.fget(self)
]
class ResultCompatible:
def __init__(self, data_table):
self.data_table = data_table
self.rows = data_table.rows
self.has_more = data_table.has_more
self.start_row = data_table.startRowOffset
self.ready = True
@property
def columns(self):
return self.cols()
def cols(self):
return [col.name for col in self.data_table.cols()]
class PartitionKeyCompatible:
def __init__(self, name, type, comment):
self.name = name
self.type = type
self.comment = comment
def __eq__(self, other):
return isinstance(other, PartitionKeyCompatible) and \
self.name == other.name and \
self.type == other.type and \
self.comment == other.comment
def __repr__(self):
return 'PartitionKey(name:%s, type:%s, comment:%s)' % (self.name, self.type, self.comment)
class PartitionValueCompatible:
def __init__(self, partition, table):
# Parses: ['datehour=2013022516'] or ['month=2011-07/dt=2011-07-01/hr=12']
self.values = [val.split('=')[1] for part in partition for val in part.split('/')]
self.sd = type('Sd', (object,), {'location': '%s/%s' % (table.path_location, ','.join(partition)),})
class ExplainCompatible:
def __init__(self, data_table):
self.textual = '\n'.join([line[0] for line in data_table.rows()])
class ResultMetaCompatible:
def __init__(self):
self.in_tablename = True
class HiveServerClientCompatible(object):
"""Same API as Beeswax"""
def __init__(self, client):
self._client = client
self.user = client.user
self.query_server = client.query_server
def query(self, query, statement=0):
return self._client.execute_async_query(query, statement)
def get_state(self, handle):
operationHandle = handle.get_rpc_handle()
res = self._client.get_operation_status(operationHandle)
return HiveServerQueryHistory.STATE_MAP[res.operationState]
def get_operation_status(self, handle):
operationHandle = handle.get_rpc_handle()
return self._client.get_operation_status(operationHandle)
def use(self, query):
data = self._client.execute_query(query)
self._client.close_operation(data.operation_handle)
return data
def explain(self, query):
data_table = self._client.explain(query)
data = ExplainCompatible(data_table)
self._client.close_operation(data_table.operation_handle)
return data
def fetch(self, handle, start_over=False, max_rows=None):
operationHandle = handle.get_rpc_handle()
if max_rows is None:
max_rows = 1000
if start_over and not (self.query_server['server_name'] == 'impala' and self.query_server['querycache_rows'] == 0): # Backward compatibility for impala
orientation = TFetchOrientation.FETCH_FIRST
else:
orientation = TFetchOrientation.FETCH_NEXT
data_table = self._client.fetch_data(operationHandle, orientation=orientation, max_rows=max_rows)
return ResultCompatible(data_table)
def cancel_operation(self, handle):
operationHandle = handle.get_rpc_handle()
return self._client.cancel_operation(operationHandle)
def close(self, handle):
return self.close_operation(handle)
def close_operation(self, handle):
operationHandle = handle.get_rpc_handle()
return self._client.close_operation(operationHandle)
def close_session(self, session):
operationHandle = session.get_handle()
return self._client.close_session(operationHandle)
def dump_config(self):
return 'Does not exist in HS2'
def get_log(self, handle, start_over=True):
operationHandle = handle.get_rpc_handle()
if beeswax_conf.USE_GET_LOG_API.get() or self.query_server['server_name'] == 'impala':
return self._client.get_log(operationHandle)
else:
if start_over:
orientation = TFetchOrientation.FETCH_FIRST
else:
orientation = TFetchOrientation.FETCH_NEXT
return self._client.fetch_log(operationHandle, orientation=orientation, max_rows=-1)
def get_databases(self):
col = 'TABLE_SCHEM'
return [table[col] for table in self._client.get_databases()]
def get_tables(self, database, table_names):
tables = [table['TABLE_NAME'] for table in self._client.get_tables(database, table_names)]
tables.sort()
return tables
def get_table(self, database, table_name):
table = self._client.get_table(database, table_name)
return HiveServerTableCompatible(table)
def get_columns(self, database, table):
return self._client.get_columns(database, table)
def get_default_configuration(self, *args, **kwargs):
return {}
def get_results_metadata(self, handle):
# We just need to mock
return ResultMetaCompatible()
def create_database(self, name, description): raise NotImplementedError()
def get_database(self, *args, **kwargs): raise NotImplementedError()
def alter_table(self, dbname, tbl_name, new_tbl): raise NotImplementedError()
def open_session(self, user):
return self._client.open_session(user)
def add_partition(self, new_part): raise NotImplementedError()
def get_partition(self, *args, **kwargs): raise NotImplementedError()
def get_partitions(self, database, table_name, max_parts, reverse_sort=True):
return self._client.get_partitions(database, table_name, max_parts, reverse_sort)
def alter_partition(self, db_name, tbl_name, new_part): raise NotImplementedError()
|
apache-2.0
| -8,079,258,786,321,912,000 | 34.350557 | 158 | 0.680749 | false |
qstokkink/py-ipv8
|
ipv8/keyvault/keys.py
|
1
|
1041
|
import abc
from hashlib import sha1
class Key(metaclass=abc.ABCMeta):
"""
Interface for a public or private key.
"""
@abc.abstractmethod
def pub(self):
pass
@abc.abstractmethod
def has_secret_key(self):
pass
@abc.abstractmethod
def key_to_bin(self):
pass
def key_to_hash(self):
if self.has_secret_key():
return sha1(self.pub().key_to_bin()).digest()
return sha1(self.key_to_bin()).digest()
class PrivateKey(Key, metaclass=abc.ABCMeta):
"""
Interface for a private key.
"""
def has_secret_key(self):
return True
@abc.abstractmethod
def signature(self, msg):
pass
class PublicKey(Key, metaclass=abc.ABCMeta):
"""
Interface for a public key.
"""
def pub(self):
return self
def has_secret_key(self):
return False
@abc.abstractmethod
def verify(self, signature, msg):
pass
@abc.abstractmethod
def get_signature_length(self):
pass
|
lgpl-3.0
| 2,042,628,768,410,937,900 | 16.948276 | 57 | 0.592699 | false |
jonge-democraten/zues
|
appolo/models.py
|
1
|
1227
|
from django.db import models
class Locatie(models.Model):
def __str__(self):
return self.naam
naam = models.CharField(max_length=200)
lat = models.FloatField()
long = models.FloatField()
class Meta:
verbose_name_plural = 'locaties'
class Dag(models.Model):
def __str__(self):
return str(self.datum)
datum = models.DateField()
class Meta:
verbose_name_plural = 'dagen'
class Activiteit(models.Model):
def __str__(self):
return self.naam
naam = models.CharField(max_length=200)
begintijd = models.DateTimeField()
eindtijd = models.DateTimeField()
dag = models.ForeignKey(Dag, on_delete=models.PROTECT)
locatie = models.ForeignKey(Locatie, on_delete=models.PROTECT)
class Meta:
verbose_name_plural = 'activiteiten'
class Nieuwsitem(models.Model):
def __str__(self):
return self.titel
titel = models.CharField(max_length=200)
tekst = models.TextField()
class Meta:
verbose_name_plural = 'nieuwsitems'
class Hashtag(models.Model):
def __str__(self):
return self.tekst
tekst = models.CharField(max_length=200)
class Meta:
verbose_name_plural = 'hashtags'
|
mit
| 6,851,658,738,776,405,000 | 24.5625 | 66 | 0.647922 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.