text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
import _plotly_utils.basevalidators
class NticksValidator(_plotly_utils.basevalidators.IntegerValidator):
def __init__(
self, plotly_name="nticks", parent_name="layout.ternary.baxis", **kwargs
):
super(NticksValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
min=kwargs.pop("min", 1),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/layout/ternary/baxis/_nticks.py
|
Python
|
mit
| 459 | 0.002179 |
from pydatastream import Datastream
import json
import datetime
import sys
import os.path
#hardcoded directories
dir_input = "input/"
dir_output = "output/"
#check that the login credentials and input file location are being passed in
numOfArgs = len(sys.argv) - 1
if numOfArgs != 3:
print "Please run this python script with username,password and input file location in that order respectively."
exit()
#Setup login credentials and input file location
username = str(sys.argv[1])
pw = str(sys.argv[2])
input_file_loc = dir_input + str(sys.argv[3])
#Ensure that the input file location exists
if ( not os.path.isfile(str(input_file_loc)) ):
print "The file " + str(input_file_loc) + " does not exist."
exit()
#login credentials to datastream
DWE = Datastream(username=username,password=pw)
#other info from datastream
info = DWE.system_info()
subscribed_sources = DWE.sources()
#replace missing data with NaNs
DWE.raise_on_error = False
#get all codes, groups, start dates from input file
with open(input_file_loc,'r') as input_file:
symbol_ref = json.load(input_file)
#download timestamp
download_date = {'Custom_Download_Date' : datetime.datetime.now().isoformat()}
#calculate time taken for entire process
time_taken = datetime.datetime.now()
time_taken = time_taken - time_taken
for desc,desc_value in symbol_ref.iteritems():
for group,group_value in desc_value.iteritems():
#create list for custom fields
custom_fields = list()
for code_key,code_value in group_value.iteritems():
for key,value in code_value.iteritems():
if(key == 'code'):
search_code = value
search_symbol = {'Custom_Ticker' : value}
if(key == 'start_date'):
start_date = value
if(key == 'custom_field'):
custom_fields[:] = []
custom_fields.append(value)
startTime = datetime.datetime.now()
#send request to retrieve the data from Datastream
req = DWE.fetch(str(search_code),custom_fields,date_from=str(start_date),only_data=False)
time_taken = time_taken + datetime.datetime.now() - startTime
#format date and convert to json
raw_json = req[0].to_json(date_format='iso')
raw_metadata = req[1].to_json()
#Data cleaning and processing
#remove the time component including the '.' char from the key values of datetime in the data
raw_json = raw_json.replace("T00:00:00.000Z","")
#replace the metadata's keys from "0" to "default_ws_key"
raw_metadata = raw_metadata.replace("\"0\"","\"Custom_WS_Key\"")
#combine the data and the metadata about the code
allData_str = json.loads(raw_json)
metadata_str = json.loads(raw_metadata)
datastream_combined = {key : value for (key,value) in (allData_str.items() + metadata_str.items())}
#create symbol json string and append to data
data_with_symbol = {key : value for (key,value) in (search_symbol.items() + datastream_combined.items())}
#append group
group_code = {'Custom_Group' : group}
data_with_group = {key : value for (key,value) in (group_code.items() + data_with_symbol.items())}
#append category
category = {'Custom_Description' : desc}
data_with_category = {key : value for (key,value) in (category.items() + data_with_group.items())}
#append download timestamp
final_data = {key : value for (key,value) in (download_date.items() + data_with_category.items())}
final_data_json = json.dumps(final_data)
#decode to the right format for saving to disk
json_file = json.JSONDecoder().decode((final_data_json))
#save to json file on server
if(len(group_value) > 1):
filename = dir_output + desc + '_' + group + '_' + code_key + '.json'
else:
filename = dir_output + desc + '_' + group + '.json'
with open(filename,'w') as outfile:
json.dump(json_file,outfile,sort_keys=True)
print "time taken for " + str(sys.argv[3]) + " to be retrieved: " + str(time_taken)
|
jinser/automate_pydatastream
|
getcustom.py
|
Python
|
mit
| 3,917 | 0.038295 |
import unittest
from .Weather_analyzer import is_not_number
class BtcPriceTestCase(unittest.TestCase):
def test_checking_of_input_in_form(self):
input = 46
answer = is_not_number(input) # The bitcoin returned changes over time!
self.assertEqual(answer, False)
|
AntonKuksov/Weather_analyzer
|
test_form.py
|
Python
|
gpl-3.0
| 300 | 0.006667 |
import datetime
from django.shortcuts import render_to_response, get_object_or_404, HttpResponse, HttpResponseRedirect, Http404
from django.template import RequestContext
from django.core.urlresolvers import reverse
from articles.models import Article
from taxonomy.models import TaxonomyMap
from core.views import update_online_users
@update_online_users
def index(request):
articles = Article.objects.all()[:10]
return render_to_response('articles/index.html', {'articles': articles}, context_instance = RequestContext(request))
@update_online_users
def category(request, category_id):
article_ids = TaxonomyMap.objects.filter(term__id = category_id, type__type = 'Category', content_type__model = 'article').values_list('object_id', flat = True)
category_title = TaxonomyMap.objects.filter(term__id = category_id, type__type = 'Category', content_type__model = 'article')[0].term.term
articles = Article.objects.filter(id__in = article_ids)
return render_to_response('articles/category.html', {'category_id': category_id, 'category_title': category_title, 'articles': articles}, context_instance = RequestContext(request))
@update_online_users
def details(request, title_slug):
article = get_object_or_404(Article, title_slug = title_slug)
return render_to_response('articles/details.html', {'article': article}, context_instance = RequestContext(request))
|
Kami/sgrstats.com
|
sgrstats/articles/views.py
|
Python
|
apache-2.0
| 1,447 | 0.026261 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2014 Sébastien Helleu <flashcode@flashtux.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Documentation generator for WeeChat: build include files with commands,
options, infos, infolists, hdata and completions for WeeChat core and
plugins.
Instructions to build config files yourself in WeeChat directories (replace
all paths with your path to WeeChat):
1. run WeeChat and load this script, with following command:
/python load ~/src/weechat/doc/docgen.py
2. change path to build in your doc/ directory:
/set plugins.var.python.docgen.path "~/src/weechat/doc"
3. run docgen command:
/docgen
Note: it is recommended to load only this script when building doc.
Files should be in ~/src/weechat/doc/xx/autogen/ (where xx is language).
"""
from __future__ import print_function
SCRIPT_NAME = 'docgen'
SCRIPT_AUTHOR = 'Sébastien Helleu <flashcode@flashtux.org>'
SCRIPT_VERSION = '0.1'
SCRIPT_LICENSE = 'GPL3'
SCRIPT_DESC = 'Documentation generator for WeeChat'
SCRIPT_COMMAND = 'docgen'
IMPORT_OK = True
try:
import weechat # pylint: disable=import-error
except ImportError:
print('This script must be run under WeeChat.')
print('Get WeeChat now at: http://weechat.org/')
IMPORT_OK = False
try:
import gettext
import hashlib
import os
import re
from collections import defaultdict
from operator import itemgetter
except ImportError as message:
print('Missing package(s) for {0}: {1}'.format(SCRIPT_NAME, message))
IMPORT_OK = False
# default path where doc files will be written (should be doc/ in sources
# package tree)
# path must have subdirectories with languages and autogen directory:
# path
# |-- en
# | |-- autogen
# |-- fr
# | |-- autogen
# ...
DEFAULT_PATH = '~/src/weechat/doc'
# list of locales for which we want to build doc files to include
LOCALE_LIST = ('en_US', 'fr_FR', 'it_IT', 'de_DE', 'ja_JP', 'pl_PL')
# all commands/options/.. of following plugins will produce a file
# non-listed plugins will be ignored
# value: "c" = plugin may have many commands
# "o" = write config options for plugin
# if plugin is listed without "c", that means plugin has only one command
# /name (where "name" is name of plugin)
# Note: we consider core is a plugin called "weechat"
PLUGIN_LIST = {
'sec': 'o',
'weechat': 'co',
'alias': '',
'aspell': 'o',
'charset': 'o',
'exec': 'o',
'fifo': 'o',
'irc': 'co',
'logger': 'o',
'relay': 'o',
'script': 'o',
'perl': '',
'python': '',
'ruby': '',
'lua': '',
'tcl': '',
'guile': '',
'trigger': 'o',
'xfer': 'co',
}
# options to ignore
IGNORE_OPTIONS = (
r'aspell\.dict\..*',
r'aspell\.option\..*',
r'charset\.decode\..*',
r'charset\.encode\..*',
r'irc\.msgbuffer\..*',
r'irc\.ctcp\..*',
r'irc\.ignore\..*',
r'irc\.server\..*',
r'jabber\.server\..*',
r'logger\.level\..*',
r'logger\.mask\..*',
r'relay\.port\..*',
r'trigger\.trigger\..*',
r'weechat\.palette\..*',
r'weechat\.proxy\..*',
r'weechat\.bar\..*',
r'weechat\.debug\..*',
r'weechat\.notify\..*',
)
# completions to ignore
IGNORE_COMPLETIONS_ITEMS = (
'docgen.*',
'jabber.*',
'weeget.*',
)
def get_commands():
"""
Get list of commands in a dict with 3 indexes: plugin, command, xxx.
"""
commands = defaultdict(lambda: defaultdict(defaultdict))
infolist = weechat.infolist_get('hook', '', 'command')
while weechat.infolist_next(infolist):
plugin = weechat.infolist_string(infolist, 'plugin_name') or 'weechat'
if plugin in PLUGIN_LIST:
command = weechat.infolist_string(infolist, 'command')
if command == plugin or 'c' in PLUGIN_LIST[plugin]:
for key in ('description', 'args', 'args_description',
'completion'):
commands[plugin][command][key] = \
weechat.infolist_string(infolist, key)
weechat.infolist_free(infolist)
return commands
def get_options():
"""
Get list of config options in a dict with 4 indexes: config,
section, option, xxx.
"""
options = \
defaultdict(lambda: defaultdict(lambda: defaultdict(defaultdict)))
infolist = weechat.infolist_get('option', '', '')
while weechat.infolist_next(infolist):
full_name = weechat.infolist_string(infolist, 'full_name')
if not re.search('|'.join(IGNORE_OPTIONS), full_name):
config = weechat.infolist_string(infolist, 'config_name')
if config in PLUGIN_LIST and 'o' in PLUGIN_LIST[config]:
section = weechat.infolist_string(infolist, 'section_name')
option = weechat.infolist_string(infolist, 'option_name')
for key in ('type', 'string_values', 'default_value',
'description'):
options[config][section][option][key] = \
weechat.infolist_string(infolist, key)
for key in ('min', 'max', 'null_value_allowed'):
options[config][section][option][key] = \
weechat.infolist_integer(infolist, key)
weechat.infolist_free(infolist)
return options
def get_infos():
"""
Get list of infos hooked by plugins in a dict with 3 indexes:
plugin, name, xxx.
"""
infos = defaultdict(lambda: defaultdict(defaultdict))
infolist = weechat.infolist_get('hook', '', 'info')
while weechat.infolist_next(infolist):
info_name = weechat.infolist_string(infolist, 'info_name')
plugin = weechat.infolist_string(infolist, 'plugin_name') or 'weechat'
for key in ('description', 'args_description'):
infos[plugin][info_name][key] = \
weechat.infolist_string(infolist, key)
weechat.infolist_free(infolist)
return infos
def get_infos_hashtable():
"""
Get list of infos (hashtable) hooked by plugins in a dict with 3
indexes: plugin, name, xxx.
"""
infos_hashtable = defaultdict(lambda: defaultdict(defaultdict))
infolist = weechat.infolist_get('hook', '', 'info_hashtable')
while weechat.infolist_next(infolist):
info_name = weechat.infolist_string(infolist, 'info_name')
plugin = weechat.infolist_string(infolist, 'plugin_name') or 'weechat'
for key in ('description', 'args_description', 'output_description'):
infos_hashtable[plugin][info_name][key] = \
weechat.infolist_string(infolist, key)
weechat.infolist_free(infolist)
return infos_hashtable
def get_infolists():
"""
Get list of infolists hooked by plugins in a dict with 3 indexes:
plugin, name, xxx.
"""
infolists = defaultdict(lambda: defaultdict(defaultdict))
infolist = weechat.infolist_get('hook', '', 'infolist')
while weechat.infolist_next(infolist):
infolist_name = weechat.infolist_string(infolist, 'infolist_name')
plugin = weechat.infolist_string(infolist, 'plugin_name') or 'weechat'
for key in ('description', 'pointer_description', 'args_description'):
infolists[plugin][infolist_name][key] = \
weechat.infolist_string(infolist, key)
weechat.infolist_free(infolist)
return infolists
# pylint: disable=too-many-locals
def get_hdata():
"""
Get list of hdata hooked by plugins in a dict with 3 indexes:
plugin, name, xxx.
"""
hdata = defaultdict(lambda: defaultdict(defaultdict))
infolist = weechat.infolist_get('hook', '', 'hdata')
while weechat.infolist_next(infolist):
hdata_name = weechat.infolist_string(infolist, 'hdata_name')
plugin = weechat.infolist_string(infolist, 'plugin_name') or 'weechat'
hdata[plugin][hdata_name]['description'] = \
weechat.infolist_string(infolist, 'description')
variables = ''
variables_update = ''
lists = ''
ptr_hdata = weechat.hdata_get(hdata_name)
if ptr_hdata:
hdata2 = []
string = weechat.hdata_get_string(ptr_hdata, 'var_keys_values')
if string:
for item in string.split(','):
key = item.split(':')[0]
var_offset = weechat.hdata_get_var_offset(ptr_hdata, key)
var_array_size = \
weechat.hdata_get_var_array_size_string(ptr_hdata, '',
key)
if var_array_size:
var_array_size = \
', array_size: "{0}"'.format(var_array_size)
var_hdata = weechat.hdata_get_var_hdata(ptr_hdata, key)
if var_hdata:
var_hdata = ', hdata: "{0}"'.format(var_hdata)
type_string = weechat.hdata_get_var_type_string(ptr_hdata,
key)
hdata2.append({
'offset': var_offset,
'text': '\'{0}\' ({1})'.format(key, type_string),
'textlong': '\'{0}\' ({1}{2}{3})'.format(
key, type_string, var_array_size, var_hdata),
'update': weechat.hdata_update(
ptr_hdata, '', {'__update_allowed': key}),
})
hdata2 = sorted(hdata2, key=itemgetter('offset'))
for item in hdata2:
variables += '*** {0}\n'.format(item['textlong'])
if item['update']:
variables_update += '*** {0}\n'.format(item['text'])
if weechat.hdata_update(ptr_hdata, '',
{'__create_allowed': ''}):
variables_update += '*** \'__create\'\n'
if weechat.hdata_update(ptr_hdata, '',
{'__delete_allowed': ''}):
variables_update += '*** \'__delete\'\n'
hdata[plugin][hdata_name]['vars'] = variables
hdata[plugin][hdata_name]['vars_update'] = variables_update
string = weechat.hdata_get_string(ptr_hdata, 'list_keys')
if string:
for item in sorted(string.split(',')):
lists += '*** \'{0}\'\n'.format(item)
hdata[plugin][hdata_name]['lists'] = lists
weechat.infolist_free(infolist)
return hdata
def get_completions():
"""
Get list of completions hooked by plugins in a dict with 3 indexes:
plugin, item, xxx.
"""
completions = defaultdict(lambda: defaultdict(defaultdict))
infolist = weechat.infolist_get('hook', '', 'completion')
while weechat.infolist_next(infolist):
completion_item = weechat.infolist_string(infolist, 'completion_item')
if not re.search('|'.join(IGNORE_COMPLETIONS_ITEMS), completion_item):
plugin = weechat.infolist_string(infolist, 'plugin_name') or \
'weechat'
completions[plugin][completion_item]['description'] = \
weechat.infolist_string(infolist, 'description')
weechat.infolist_free(infolist)
return completions
def get_url_options():
"""
Get list of completions hooked by plugins in a dict with 3 indexes:
plugin, item, xxx.
"""
url_options = []
infolist = weechat.infolist_get('url_options', '', '')
while weechat.infolist_next(infolist):
url_options.append({
'name': weechat.infolist_string(infolist, 'name').lower(),
'option': weechat.infolist_integer(infolist, 'option'),
'type': weechat.infolist_string(infolist, 'type'),
'constants': weechat.infolist_string(
infolist, 'constants').lower().replace(',', ', ')
})
weechat.infolist_free(infolist)
return url_options
def update_file(oldfile, newfile, num_files, num_files_updated, obj):
"""Update a doc file."""
try:
with open(oldfile, 'r') as _file:
shaold = hashlib.sha224(_file.read()).hexdigest()
except IOError:
shaold = ''
try:
with open(newfile, 'r') as _file:
shanew = hashlib.sha224(_file.read()).hexdigest()
except IOError:
shanew = ''
if shaold != shanew:
if os.path.exists(oldfile):
os.unlink(oldfile)
os.rename(newfile, oldfile)
num_files_updated['total1'] += 1
num_files_updated['total2'] += 1
num_files_updated[obj] += 1
else:
if os.path.exists(oldfile):
os.unlink(newfile)
num_files['total1'] += 1
num_files['total2'] += 1
num_files[obj] += 1
# pylint: disable=too-many-locals, too-many-branches, too-many-statements
def docgen_cmd_cb(data, buf, args):
"""Callback for /docgen command."""
if args:
locales = args.split(' ')
else:
locales = LOCALE_LIST
commands = get_commands()
options = get_options()
infos = get_infos()
infos_hashtable = get_infos_hashtable()
infolists = get_infolists()
hdata = get_hdata()
completions = get_completions()
url_options = get_url_options()
# get path and replace ~ by home if needed
path = weechat.config_get_plugin('path')
if path.startswith('~'):
path = os.environ['HOME'] + path[1:]
# write to doc files, by locale
num_files = defaultdict(int)
num_files_updated = defaultdict(int)
# pylint: disable=undefined-variable
translate = lambda s: (s and _(s)) or s
escape = lambda s: s.replace('|', '\\|')
for locale in locales:
for key in num_files:
if key != 'total2':
num_files[key] = 0
num_files_updated[key] = 0
trans = gettext.translation('weechat',
weechat.info_get('weechat_localedir', ''),
languages=[locale + '.UTF-8'],
fallback=True)
trans.install()
directory = path + '/' + locale[0:2] + '/autogen'
if not os.path.isdir(directory):
weechat.prnt('',
'{0}docgen error: directory "{1}" does not exist'
''.format(weechat.prefix('error'), directory))
continue
# write commands
for plugin in commands:
filename = directory + '/user/' + plugin + '_commands.asciidoc'
tmpfilename = filename + '.tmp'
_file = open(tmpfilename, 'w')
for command in sorted(commands[plugin]):
_cmd = commands[plugin][command]
args = translate(_cmd['args'])
args_formats = args.split(' || ')
desc = translate(_cmd['description'])
args_desc = translate(_cmd['args_description'])
_file.write('[[command_{0}_{1}]]\n'.format(plugin, command))
_file.write('[command]*`{0}`* {1}::\n\n'.format(command, desc))
_file.write('----\n')
prefix = '/' + command + ' '
if args_formats != ['']:
for fmt in args_formats:
_file.write(prefix + fmt + '\n')
prefix = ' ' * len(prefix)
if args_desc:
_file.write('\n')
for line in args_desc.split('\n'):
_file.write(line + '\n')
_file.write('----\n\n')
_file.close()
update_file(filename, tmpfilename, num_files, num_files_updated,
'commands')
# write config options
for config in options:
filename = directory + '/user/' + config + '_options.asciidoc'
tmpfilename = filename + '.tmp'
_file = open(tmpfilename, 'w')
for section in sorted(options[config]):
for option in sorted(options[config][section]):
_opt = options[config][section][option]
opt_type = _opt['type']
string_values = _opt['string_values']
default_value = _opt['default_value']
opt_min = _opt['min']
opt_max = _opt['max']
null_value_allowed = _opt['null_value_allowed']
desc = translate(_opt['description'])
type_nls = translate(opt_type)
values = ''
if opt_type == 'boolean':
values = 'on, off'
elif opt_type == 'integer':
if string_values:
values = string_values.replace('|', ', ')
else:
values = '{0} .. {1}'.format(opt_min, opt_max)
elif opt_type == 'string':
if opt_max <= 0:
values = _('any string')
elif opt_max == 1:
values = _('any char')
elif opt_max > 1:
values = '{0} ({1}: {2})'.format(_('any string'),
_('max chars'),
opt_max)
else:
values = _('any string')
default_value = '"{0}"'.format(
default_value.replace('"', '\\"'))
elif opt_type == 'color':
values = _('a WeeChat color name (default, black, '
'(dark)gray, white, (light)red, '
'(light)green, brown, yellow, (light)blue, '
'(light)magenta, (light)cyan), a terminal '
'color number or an alias; attributes are '
'allowed before color (for text color '
'only, not background): \"*\" for bold, '
'\"!\" for reverse, \"/\" for italic, '
'\"_\" for underline')
_file.write('* [[option_{0}.{1}.{2}]] *{3}.{4}.{5}*\n'
''.format(config, section, option, config,
section, option))
_file.write('** {0}: `{1}`\n'.format(_('description'),
desc))
_file.write('** {0}: {1}\n'.format(_('type'), type_nls))
_file.write('** {0}: {1} ({2}: `{3}`)\n'
''.format(_('values'), values,
_('default value'), default_value))
if null_value_allowed:
_file.write('** {0}\n'
''.format(
_('undefined value allowed (null)')))
_file.write('\n')
_file.close()
update_file(filename, tmpfilename, num_files, num_files_updated,
'options')
# write infos hooked
filename = directory + '/plugin_api/infos.asciidoc'
tmpfilename = filename + '.tmp'
_file = open(tmpfilename, 'w')
_file.write('[width="100%",cols="^1,^2,6,6",options="header"]\n')
_file.write('|===\n')
_file.write('| {0} | {1} | {2} | {3}\n\n'
''.format(_('Plugin'), _('Name'), _('Description'),
_('Arguments')))
for plugin in sorted(infos):
for info in sorted(infos[plugin]):
_inf = infos[plugin][info]
desc = translate(_inf['description'])
args_desc = translate(_inf['args_description'] or '-')
_file.write('| {0} | {1} | {2} | {3}\n\n'
''.format(escape(plugin), escape(info),
escape(desc), escape(args_desc)))
_file.write('|===\n')
_file.close()
update_file(filename, tmpfilename, num_files, num_files_updated,
'infos')
# write infos (hashtable) hooked
filename = directory + '/plugin_api/infos_hashtable.asciidoc'
tmpfilename = filename + '.tmp'
_file = open(tmpfilename, 'w')
_file.write('[width="100%",cols="^1,^2,6,6,6",options="header"]\n')
_file.write('|===\n')
_file.write('| {0} | {1} | {2} | {3} | {4}\n\n'
''.format(_('Plugin'), _('Name'), _('Description'),
_('Hashtable (input)'), _('Hashtable (output)')))
for plugin in sorted(infos_hashtable):
for info in sorted(infos_hashtable[plugin]):
_inh = infos_hashtable[plugin][info]
desc = translate(_inh['description'])
args_desc = translate(_inh['args_description'])
output_desc = translate(_inh['output_description']) or '-'
_file.write('| {0} | {1} | {2} | {3} | {4}\n\n'
''.format(escape(plugin), escape(info),
escape(desc), escape(args_desc),
escape(output_desc)))
_file.write('|===\n')
_file.close()
update_file(filename, tmpfilename, num_files, num_files_updated,
'infos_hashtable')
# write infolists hooked
filename = directory + '/plugin_api/infolists.asciidoc'
tmpfilename = filename + '.tmp'
_file = open(tmpfilename, 'w')
_file.write('[width="100%",cols="^1,^2,5,5,5",options="header"]\n')
_file.write('|===\n')
_file.write('| {0} | {1} | {2} | {3} | {4}\n\n'
''.format(_('Plugin'), _('Name'), _('Description'),
_('Pointer'), _('Arguments')))
for plugin in sorted(infolists):
for infolist in sorted(infolists[plugin]):
_inl = infolists[plugin][infolist]
desc = translate(_inl['description'])
pointer_desc = translate(_inl['pointer_description']) or '-'
args_desc = translate(_inl['args_description']) or '-'
_file.write('| {0} | {1} | {2} | {3} | {4}\n\n'
''.format(escape(plugin), escape(infolist),
escape(desc), escape(pointer_desc),
escape(args_desc)))
_file.write('|===\n')
_file.close()
update_file(filename, tmpfilename, num_files, num_files_updated,
'infolists')
# write hdata hooked
filename = directory + '/plugin_api/hdata.asciidoc'
tmpfilename = filename + '.tmp'
_file = open(tmpfilename, 'w')
for plugin in sorted(hdata):
for hdata_name in sorted(hdata[plugin]):
_hda = hdata[plugin][hdata_name]
desc = translate(_hda['description'])
variables = _hda['vars']
variables_update = _hda['vars_update']
lists = _hda['lists']
_file.write('* \'{0}\': {1}\n'.format(escape(hdata_name),
escape(desc)))
_file.write('** {0}: {1}\n'.format(_('plugin'),
escape(plugin)))
_file.write('** {0}:\n{1}'.format(_('variables'),
escape(variables)))
if variables_update:
_file.write('** {0}:\n{1}'.format(
_('update allowed'),
escape(variables_update)))
if lists:
_file.write('** {0}:\n{1}'.format(_('lists'),
escape(lists)))
_file.close()
update_file(filename, tmpfilename, num_files, num_files_updated,
'hdata')
# write completions hooked
filename = directory + '/plugin_api/completions.asciidoc'
tmpfilename = filename + '.tmp'
_file = open(tmpfilename, 'w')
_file.write('[width="65%",cols="^1,^2,8",options="header"]\n')
_file.write('|===\n')
_file.write('| {0} | {1} | {2}\n\n'
''.format(_('Plugin'), _('Name'), _('Description')))
for plugin in sorted(completions):
for completion_item in sorted(completions[plugin]):
_cmp = completions[plugin][completion_item]
desc = translate(_cmp['description'])
_file.write('| {0} | {1} | {2}\n\n'
''.format(escape(plugin), escape(completion_item),
escape(desc)))
_file.write('|===\n')
_file.close()
update_file(filename, tmpfilename, num_files, num_files_updated,
'completions')
# write url options
filename = directory + '/plugin_api/url_options.asciidoc'
tmpfilename = filename + '.tmp'
_file = open(tmpfilename, 'w')
_file.write('[width="100%",cols="2,^1,7",options="header"]\n')
_file.write('|===\n')
_file.write('| {0} | {1} | {2}\n\n'
''.format(_('Option'), _('Type'),
_('Constants') + ' ^(1)^'))
for option in url_options:
constants = option['constants']
if constants:
constants = ' ' + constants
_file.write('| {0} | {1} |{2}\n\n'
''.format(escape(option['name']),
escape(option['type']),
escape(constants)))
_file.write('|===\n')
_file.close()
update_file(filename, tmpfilename, num_files, num_files_updated,
'url_options')
# write counters
weechat.prnt('',
'docgen: {0}: {1:3d} files '
'({2:2d} cmd, {3:2d} opt, {4:2d} infos, '
'{5:2d} infos_hash, {6:2d} infolists, {7:2d} hdata, '
'{8:2d} complt)'
''.format(locale,
num_files['total1'],
num_files['commands'],
num_files['options'],
num_files['infos'],
num_files['infos_hashtable'],
num_files['infolists'],
num_files['hdata'],
num_files['completions']))
weechat.prnt('',
' '
'{0:3d} updated ({1:2d} cmd, {2:2d} opt, {3:2d} infos, '
'{4:2d} infos_hash, {5:2d} infolists, {6:2d} hdata, '
'{7:2d} complt)'
''.format(num_files_updated['total1'],
num_files_updated['commands'],
num_files_updated['options'],
num_files_updated['infos'],
num_files_updated['infos_hashtable'],
num_files_updated['infolists'],
num_files_updated['hdata'],
num_files_updated['completions']))
weechat.prnt('',
'docgen: total: {0} files, {1} updated'
''.format(num_files['total2'], num_files_updated['total2']))
return weechat.WEECHAT_RC_OK
def docgen_completion_cb(data, completion_item, buf, completion):
"""Callback for completion."""
for locale in LOCALE_LIST:
weechat.hook_completion_list_add(completion, locale, 0,
weechat.WEECHAT_LIST_POS_SORT)
return weechat.WEECHAT_RC_OK
if __name__ == '__main__' and IMPORT_OK:
if weechat.register(SCRIPT_NAME, SCRIPT_AUTHOR, SCRIPT_VERSION,
SCRIPT_LICENSE, SCRIPT_DESC, '', ''):
weechat.hook_command(SCRIPT_COMMAND,
'Documentation generator.',
'[locales]',
'locales: list of locales to build (by default '
'build all locales)',
'%(docgen_locales)|%*',
'docgen_cmd_cb', '')
weechat.hook_completion('docgen_locales', 'locales for docgen',
'docgen_completion_cb', '')
if not weechat.config_is_set_plugin('path'):
weechat.config_set_plugin('path', DEFAULT_PATH)
|
guns/weechat
|
doc/docgen.py
|
Python
|
gpl-3.0
| 29,781 | 0.000067 |
from six import iteritems
class Playlist:
is_folder = False
playlist_persistent_id = None
parent_persistent_id = None
distinguished_kind = None
playlist_id = None
def __init__(self, playListName=None):
self.name = playListName
self.tracks = []
def __iter__(self):
for attr, value in iteritems(self.__dict__):
yield attr, value
def ToDict(self):
return {key: value for (key, value) in self}
|
liamks/pyitunes
|
libpytunes/Playlist.py
|
Python
|
mit
| 470 | 0.002128 |
#!/usr/bin/python
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: k5_novnc_console
short_description: Display the URL to the NoVNC Console
version_added: "1.0"
description:
- returns a URL to the noVNC console.
options:
server_name:
description:
- Name of the server.
required: true
default: None
k5_auth:
description:
- dict of k5_auth module output.
required: true
default: None
requirements:
- "python >= 2.6"
'''
EXAMPLES = '''
# Get novnc url
- k5_novnc_console:
server_name: test01
k5_auth: "{{ k5_auth_facts }}"
'''
RETURN = '''
k5_novnc_console_facts
description: Dictionary describing the novnc details.
returned: On success when the server is found
type: dictionary
contains:
id:
description: Router ID.
type: string
sample: "474acfe5-be34-494c-b339-50f06aa143e4"
'''
import requests
import os
import json
from ansible.module_utils.basic import *
############## Common debug ###############
k5_debug = False
k5_debug_out = []
def k5_debug_get():
"""Return our debug list"""
return k5_debug_out
def k5_debug_clear():
"""Clear our debug list"""
k5_debug_out = []
def k5_debug_add(s):
"""Add string to debug list if env K5_DEBUG is defined"""
if k5_debug:
k5_debug_out.append(s)
############## functions #############
def k5_get_endpoint(e,name):
"""Pull particular endpoint name from dict"""
return e['endpoints'][name]
def k5_get_server_facts(module, k5_facts):
"""Get server facts"""
endpoint = k5_facts['endpoints']['compute']
auth_token = k5_facts['auth_token']
session = requests.Session()
headers = {'Content-Type': 'application/json', 'Accept': 'application/json', 'X-Auth-Token': auth_token }
url = endpoint + '/servers/detail'
k5_debug_add('endpoint: {0}'.format(endpoint))
k5_debug_add('REQ: {0}'.format(url))
k5_debug_add('headers: {0}'.format(headers))
try:
response = session.request('GET', url, headers=headers)
except requests.exceptions.RequestException as e:
module.fail_json(msg=e)
# we failed to get data
if response.status_code not in (200,):
module.fail_json(msg="RESP: HTTP Code:" + str(response.status_code) + " " + str(response.content), debug=k5_debug_out)
if 'servers' in response.json():
return response.json()
else:
module.fail_json(msg="Missing servers in response to server details request")
def k5_get_novnc_console(module):
"""Get novnc url"""
global k5_debug
k5_debug_clear()
if 'K5_DEBUG' in os.environ:
k5_debug = True
if 'auth_spec' in module.params['k5_auth']:
k5_facts = module.params['k5_auth']
else:
module.fail_json(msg="k5_auth_facts not found, have you run k5_auth?")
endpoint = k5_facts['endpoints']['compute']
auth_token = k5_facts['auth_token']
server_name = module.params['server_name']
# we need the server_id not server_name, so grab it
server_facts = k5_get_server_facts(module, k5_facts)
server_id = ''
for s in server_facts['servers']:
if s['name'] == server_name:
server_id = s['id']
break
if server_id == '':
if k5_debug:
module.exit_json(changed=False, msg="Server " + server_name + " not found", debug=k5_debug_out)
else:
module.exit_json(changed=False, msg="Server " + server_name + " not found")
k5_debug_add('auth_token: {0}'.format(auth_token))
k5_debug_add('server_name: {0}'.format(server_name))
session = requests.Session()
headers = {'Content-Type': 'application/json', 'Accept': 'application/json', 'X-Auth-Token': auth_token }
url = endpoint + '/servers/' + server_id + '/action'
query_json = { 'os-getVNCConsole': {'type': 'novnc' }}
k5_debug_add('endpoint: {0}'.format(endpoint))
k5_debug_add('REQ: {0}'.format(url))
k5_debug_add('headers: {0}'.format(headers))
k5_debug_add('json: {0}'.format(query_json))
try:
response = session.request('POST', url, headers=headers, json=query_json)
except requests.exceptions.RequestException as e:
module.fail_json(msg=e)
# we failed to make a change
if response.status_code not in (200,):
module.fail_json(msg="RESP: HTTP Code:" + str(response.status_code) + " " + str(response.content), debug=k5_debug_out)
if k5_debug:
module.exit_json(changed=True, msg="Get URL Successful", k5_novnc_console_facts=response.json(), debug=k5_debug_out )
module.exit_json(changed=True, msg="Get URL Successful", k5_novnc_console_facts=response.json() )
######################################################################################
def main():
module = AnsibleModule( argument_spec=dict(
server_name = dict(required=True, default=None, type='str'),
k5_auth = dict(required=True, default=None, type='dict')
) )
k5_get_novnc_console(module)
######################################################################################
if __name__ == '__main__':
main()
|
mohclips/k5-ansible-modules
|
k5_novnc_console.py
|
Python
|
gpl-3.0
| 5,289 | 0.008697 |
# -*- coding: utf-8 -*-
"""
The Yacas domain.
:copyright: Copyright 2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from docutils import nodes
from docutils.parsers.rst import directives
from sphinx import addnodes
from sphinx.roles import XRefRole
from sphinx.locale import l_, _
from sphinx.domains import Domain, ObjType, Index
from sphinx.directives import ObjectDescription
from sphinx.util.nodes import make_refnode
from sphinx.util.compat import Directive
from sphinx.util.docfields import Field, GroupedField, TypedField
# REs for Yacas signatures
yacas_sig_re = re.compile(
r'''^ (prefix|infix|postfix|bodied)? \s* # syntax
([\@a-zA-Z0-9'!*+-/^<>:=]+) \s* # thing name
(?: \((.*)\) # optional: arguments
)? $ # and nothing more
''', re.VERBOSE)
def _pseudo_parse_arglist(signode, arglist):
""""Parse" a list of arguments separated by commas.
Arguments can have "optional" annotations given by enclosing them in
brackets. Currently, this will split at any comma, even if it's inside a
string literal (e.g. default argument value).
"""
paramlist = addnodes.desc_parameterlist()
stack = [paramlist]
try:
for argument in arglist.split(','):
argument = argument.strip()
ends_open = ends_close = 0
while argument.startswith('['):
stack.append(addnodes.desc_optional())
stack[-2] += stack[-1]
argument = argument[1:].strip()
while argument.startswith(']'):
stack.pop()
argument = argument[1:].strip()
while argument.endswith(']'):
ends_close += 1
argument = argument[:-1].strip()
while argument.endswith('['):
ends_open += 1
argument = argument[:-1].strip()
if argument:
stack[-1] += addnodes.desc_parameter(argument, argument)
while ends_open:
stack.append(addnodes.desc_optional())
stack[-2] += stack[-1]
ends_open -= 1
while ends_close:
stack.pop()
ends_close -= 1
if len(stack) != 1:
raise IndexError
except IndexError:
# if there are too few or too many elements on the stack, just give up
# and treat the whole argument list as one argument, discarding the
# already partially populated paramlist node
signode += addnodes.desc_parameterlist()
signode[-1] += addnodes.desc_parameter(arglist, arglist)
else:
signode += paramlist
class YacasObject(ObjectDescription):
"""
Description of a general Yacas object.
"""
option_spec = {
'noindex': directives.flag,
'module': directives.unchanged,
'annotation': directives.unchanged,
}
doc_field_types = [
Field('parameter', label=l_('Arguments'), names=('param')),
Field('returnvalue', label=l_('Returns'), has_arg=False,
names=('returns', 'return')),
]
def get_signature_prefix(self, sig):
"""May return a prefix to put before the object name in the
signature.
"""
return ''
def needs_arglist(self):
"""May return true if an empty argument list is to be generated even if
the document contains none.
"""
return self.objtype == 'function'
def handle_signature(self, sig, signode):
"""Transform a Yacas signature into RST nodes.
Return (fully qualified name of the thing, classname if any).
If inside a class, the current class name is handled intelligently:
* it is stripped from the displayed name if present
* it is added to the full name (return value) if not present
"""
m = yacas_sig_re.match(sig)
if m is None:
raise ValueError
syntax, name, arglist = m.groups()
add_module = False
fullname = name
signode['fullname'] = fullname
sig_prefix = self.get_signature_prefix(sig)
if sig_prefix:
signode += addnodes.desc_annotation(sig_prefix, sig_prefix)
if add_module and self.env.config.add_module_names:
modname = self.options.get(
'module', self.env.temp_data.get('ys:module'))
if modname:
nodetext = modname + '.'
signode += addnodes.desc_addname(nodetext, nodetext)
anno = self.options.get('annotation')
if syntax == 'prefix':
signode += addnodes.desc_name(name, name)
signode += addnodes.desc_type(arglist, arglist)
return fullname, ''
if syntax == 'infix':
left, right = arglist.split(',')
left = left + ' '
right = ' ' + right
signode += addnodes.desc_type(left, left)
signode += addnodes.desc_name(name, name)
signode += addnodes.desc_type(right, right)
return fullname, ''
if syntax == 'postfix':
signode += addnodes.desc_type(arglist, arglist)
signode += addnodes.desc_name(name, name)
return fullname, ''
signode += addnodes.desc_name(name, name)
if not arglist:
if self.needs_arglist():
# for callables, add an empty parameter list
signode += addnodes.desc_parameterlist()
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno)
return fullname, ''
if (syntax == 'bodied'):
body = arglist.split(',')[0]
arglist = str.join(',', arglist.split(',')[1:])
_pseudo_parse_arglist(signode, arglist)
if (syntax == 'bodied'):
signode += addnodes.desc_type(' ' + body, ' ' + body)
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno)
return fullname, ''
def get_index_text(self, modname, name):
"""Return the text for the index entry of the object."""
if self.objtype == 'function':
return _('%s()') % name[0]
elif self.objtype == 'data':
return _('%s') % name[0]
else:
return ''
def add_target_and_index(self, name_cls, sig, signode):
modname = self.options.get(
'module', self.env.temp_data.get('ys:module'))
fullname = (modname and modname + '.' or '') + name_cls[0]
# note target
if fullname not in self.state.document.ids:
signode['names'].append(fullname)
signode['ids'].append(fullname)
signode['first'] = (not self.names)
self.state.document.note_explicit_target(signode)
objects = self.env.domaindata['ys']['objects']
if fullname in objects:
self.state_machine.reporter.warning(
'duplicate object description of %s, ' % fullname +
'other instance in ' +
self.env.doc2path(objects[fullname][0]) +
', use :noindex: for one of them',
line=self.lineno)
objects[fullname] = (self.env.docname, self.objtype)
indextext = self.get_index_text(modname, name_cls)
if indextext:
self.indexnode['entries'].append(('single', indextext,
fullname, '', None))
def before_content(self):
# needed for automatic qualification of members (reset in subclasses)
self.clsname_set = False
def after_content(self):
if self.clsname_set:
self.env.temp_data['yacas:class'] = None
class YacasXRefRole(XRefRole):
def process_link(self, env, refnode, has_explicit_title, title, target):
refnode['ys:module'] = env.temp_data.get('ys:module')
refnode['ys:class'] = env.temp_data.get('ys:class')
if not has_explicit_title:
title = title.lstrip('.') # only has a meaning for the target
target = target.lstrip('~') # only has a meaning for the title
# if the first character is a tilde, don't display the module/class
# parts of the contents
if title[0:1] == '~':
title = title[1:]
dot = title.rfind('.')
if dot != -1:
title = title[dot+1:]
# if the first character is a dot, search more specific namespaces first
# else search builtins first
if target[0:1] == '.':
target = target[1:]
refnode['refspecific'] = True
return title, target
class YacasDomain(Domain):
"""Yacas language domain."""
name = 'ys'
label = 'Yacas'
object_types = {
'function': ObjType(l_('function'), 'func', 'obj'),
'data': ObjType(l_('data'), 'data', 'obj'),
}
directives = {
'function': YacasObject,#YacasModulelevel,
'data': YacasObject,#YacasModulelevel,
}
roles = {
'data': YacasXRefRole(),
'func': YacasXRefRole(fix_parens=True),
'const': YacasXRefRole(),
}
initial_data = {
'objects': {}, # fullname -> docname, objtype
}
def clear_doc(self, docname):
for fullname, (fn, _) in list(self.data['objects'].items()):
if fn == docname:
del self.data['objects'][fullname]
def find_obj(self, env, modname, classname, name, type, searchmode=0):
"""Find a Yacas object for "name", perhaps using the given module
and/or classname. Returns a list of (name, object entry) tuples.
"""
# skip parens
if name[-2:] == '()':
name = name[:-2]
if not name:
return []
objects = self.data['objects']
matches = []
newname = None
if searchmode == 1:
objtypes = self.objtypes_for_role(type)
if objtypes is not None:
if modname and classname:
fullname = modname + '.' + classname + '.' + name
if fullname in objects and objects[fullname][1] in objtypes:
newname = fullname
if not newname:
if modname and modname + '.' + name in objects and \
objects[modname + '.' + name][1] in objtypes:
newname = modname + '.' + name
elif name in objects and objects[name][1] in objtypes:
newname = name
else:
# "fuzzy" searching mode
searchname = '.' + name
matches = [(oname, objects[oname]) for oname in objects
if oname.endswith(searchname)
and objects[oname][1] in objtypes]
else:
# NOTE: searching for exact match, object type is not considered
if name in objects:
newname = name
elif type == 'mod':
# only exact matches allowed for modules
return []
elif classname and classname + '.' + name in objects:
newname = classname + '.' + name
elif modname and modname + '.' + name in objects:
newname = modname + '.' + name
elif modname and classname and \
modname + '.' + classname + '.' + name in objects:
newname = modname + '.' + classname + '.' + name
# special case: builtin exceptions have module "exceptions" set
elif type == 'exc' and '.' not in name and \
'exceptions.' + name in objects:
newname = 'exceptions.' + name
# special case: object methods
elif type in ('func', 'meth') and '.' not in name and \
'object.' + name in objects:
newname = 'object.' + name
if newname is not None:
matches.append((newname, objects[newname]))
return matches
def resolve_xref(self, env, fromdocname, builder,
type, target, node, contnode):
modname = node.get('ys:module')
clsname = node.get('ys:class')
searchmode = node.hasattr('refspecific') and 1 or 0
matches = self.find_obj(env, modname, clsname, target,
type, searchmode)
if not matches:
return None
elif len(matches) > 1:
env.warn_node(
'more than one target found for cross-reference '
'%r: %s' % (target, ', '.join(match[0] for match in matches)),
node)
name, obj = matches[0]
if obj[1] == 'module':
# get additional info for modules
docname, synopsis, platform, deprecated = self.data['modules'][name]
assert docname == obj[0]
title = name
if synopsis:
title += ': ' + synopsis
if deprecated:
title += _(' (deprecated)')
if platform:
title += ' (' + platform + ')'
return make_refnode(builder, fromdocname, docname,
'module-' + name, contnode, title)
else:
return make_refnode(builder, fromdocname, obj[0], name,
contnode, name)
def get_objects(self):
for refname, (docname, type) in self.data['objects'].items():
yield (refname, refname, type, docname, refname, 1)
def setup(sphinx):
sphinx.add_domain(YacasDomain)
|
martanoga/yacas
|
docs/util/yacasdomain.py
|
Python
|
lgpl-2.1
| 13,972 | 0.001288 |
import unittest
from datetime import timedelta, datetime
import sys
import json
sys.path.append("../../config")
sys.path.append("../../html")
import ghObjects
import ghObjectRecipe
class testObjects(unittest.TestCase):
def setUp(self):
# nothin yet
self.test = "rad"
def test_spawnHTML(self):
# arrange
spawnName = "testspawn"
s = ghObjects.resourceSpawn()
s.spawnID = 42
s.spawnName = spawnName
s.spawnGalaxy = 1
s.resourceType = "wood_deciduous_yavin4"
s.resourceTypeName = "Yavinian Deciduous Wood"
s.containerType = "flora_structural"
s.stats.CR = 0
s.stats.CD = 0
s.stats.DR = 780
s.stats.FL = 0
s.stats.HR = 0
s.stats.MA = 560
s.stats.PE = 0
s.stats.OQ = 656
s.stats.SR = 450
s.stats.UT = 800
s.stats.ER = 0
s.percentStats.CR = None
s.percentStats.CD = None
s.percentStats.DR = 780.0/800
s.percentStats.FL = None
s.percentStats.HR = None
s.percentStats.MA = 160.0/400
s.percentStats.PE = None
s.percentStats.OQ = 656.0/1000
s.percentStats.SR = 150.0/400
s.percentStats.UT = 800.0/800
s.percentStats.ER = None
s.entered = daysago = datetime.now() - timedelta(4)
s.enteredBy = "ioscode"
s.verified = daysago = datetime.now() - timedelta(3)
s.verifiedBy = "tester"
s.unavailable = None
s.unavailableBy = None
s.maxWaypointConc = None
# act
mobileHTML = s.getMobileHTML("", 0, 0)
normalHTML = s.getHTML(0, "", "", 0, 0)
rowHTML = s.getRow(False)
invHTML = s.getInventoryObject()
spawnJSON = s.getJSON()
spawnJSON = "{ " + spawnJSON[:-2] + " }"
#assert
self.assertIn("ioscode", mobileHTML, "Username not in mobile HTML.")
self.assertIn("ioscode", normalHTML, "Username not in normal HTML.")
self.assertIn(spawnName, rowHTML, "No spawn name in row HTML.")
self.assertIn(spawnName, invHTML, "No spawn name in inventory HTML.")
try:
jsonObject = json.loads(spawnJSON)
jsonValid = True
except ValueError:
jsonValid = False
self.assertTrue(jsonValid, "Generated Spawn JSON output not valid.")
def test_recipeRender(self):
# arrage
r = ghObjectRecipe.schematicRecipe()
r.recipeID = 1
r.schematicID = "armor_segment_composite_advanced"
r.recipeName = "Test Recipe"
i1 = ghObjectRecipe.recipeIngredient("steel_kiirium", "17895", "armor_layer_weld_tabs", 8, "0", "Kiirium Steel", 455, "stuff steel")
i2 = ghObjectRecipe.recipeIngredient("copper_polysteel", "13455", "segment_mounting_tabs", 5, "0", "Polysteel Copper", 877, "This is great")
r.recipeIngredients.append(i1)
r.recipeIngredients.append(i2)
# act
slotHTML = r.getIngredientSlots()
rowHTML = r.getRow()
# assert
self.assertIn("steel_kiirium", slotHTML, "Resource id not in slot html.")
self.assertIn("Test Recipe", rowHTML, "Title not in row html.")
self.assertIn("yellow", slotHTML, "Expected quality color not present in slot HTML.")
if __name__ == '__main__':
unittest.main()
|
pwillworth/galaxyharvester
|
test/pyunit/testObjects.py
|
Python
|
gpl-3.0
| 2,899 | 0.029665 |
import json
import dnot
from mock import patch
import unittest2
class NotifierTest(unittest2.TestCase):
@patch("dnot.sns.connect_to_region")
def test_parameters_are_submitted(self, connect_to_region_mock):
topic = "abc"
region = "eu-west-2"
result_topic = "result"
stack_name = "stack1"
params = '{"key": "value"}'
notifier = dnot.Notifier(sns_region=region)
notifier.publish(sns_topic_arn=topic, stack_name=stack_name, result_topic=result_topic, params=params)
connect_to_region_mock.assert_called_with(region)
message = json.loads('{{"stackName": "{0}", "notificationARN": "{1}", "region": "eu-west-1", "params": {2}}}'
.format(stack_name, result_topic, params))
connect_to_region_mock.return_value.publish.assert_called_with(
topic=topic,
message=json.dumps(message))
|
ImmobilienScout24/aws-deployment-notifier
|
src/unittest/python/notifier_tests.py
|
Python
|
apache-2.0
| 900 | 0.003333 |
from __future__ import unicode_literals
from django.db import migrations, models
import multiselectfield.db.fields
class Migration(migrations.Migration):
dependencies = [
('user', '0018_auto_20160922_1258'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='activity_cantons',
field=multiselectfield.db.fields.MultiSelectField(default='', verbose_name='Défi Vélo mobile', choices=[('BS', 'Basel-Stadt'), ('BE', 'Berne'), ('FR', 'Fribourg'), ('GE', 'Geneva'), ('LU', 'Lucerne'), ('NE', 'Neuchatel'), ('SG', 'St. Gallen'), ('VS', 'Valais'), ('VD', 'Vaud'), ('ZH', 'Zurich')], max_length=29),
preserve_default=False,
),
migrations.AlterField(
model_name='userprofile',
name='affiliation_canton',
field=models.CharField(verbose_name="Canton d'affiliation", choices=[('', '---------'), ('BS', 'Basel-Stadt'), ('BE', 'Berne'), ('FR', 'Fribourg'), ('GE', 'Geneva'), ('LU', 'Lucerne'), ('NE', 'Neuchatel'), ('SG', 'St. Gallen'), ('VS', 'Valais'), ('VD', 'Vaud'), ('ZH', 'Zurich')], max_length=2),
),
]
|
defivelo/db
|
apps/user/migrations/0019_auto_20160922_1342.py
|
Python
|
agpl-3.0
| 1,161 | 0.001726 |
from __future__ import print_function
import inspect
import numpy as np
import theano
from ..layers.advanced_activations import LeakyReLU, PReLU
from ..layers.core import Dense, Merge, Dropout, Activation, Reshape, Flatten, RepeatVector, Layer
from ..layers.core import ActivityRegularization, TimeDistributedDense, AutoEncoder, MaxoutDense
from ..layers.embeddings import Embedding, WordContextProduct
from ..layers.noise import GaussianNoise, GaussianDropout
from ..layers.normalization import BatchNormalization
from ..layers.recurrent import SimpleRNN, SimpleDeepRNN, GRU, LSTM, JZS1, JZS2, JZS3
from ..layers import containers
from .. import regularizers
from .. import constraints
def container_from_config(layer_dict):
name = layer_dict.get('name')
hasParams = False
if name == 'Merge':
mode = layer_dict.get('mode')
layers = layer_dict.get('layers')
layer_list = []
for layer in layers:
init_layer = container_from_config(layer)
layer_list.append(init_layer)
merge_layer = Merge(layer_list, mode)
return merge_layer
elif name == 'Sequential':
layers = layer_dict.get('layers')
layer_list = []
for layer in layers:
init_layer = container_from_config(layer)
layer_list.append(init_layer)
seq_layer = containers.Sequential(layer_list)
return seq_layer
elif name == 'Graph':
graph_layer = containers.Graph()
inputs = layer_dict.get('input_config')
for input in inputs:
graph_layer.add_input(**input)
nodes = layer_dict.get('node_config')
for node in nodes:
layer = container_from_config(layer_dict['nodes'].get(node['name']))
node['layer'] = layer
graph_layer.add_node(**node)
outputs = layer_dict.get('output_config')
for output in outputs:
graph_layer.add_output(**output)
return graph_layer
else:
# The case in which layer_dict represents an "atomic" layer
layer_dict.pop('name')
if 'parameters' in layer_dict:
params = layer_dict.get('parameters')
layer_dict.pop('parameters')
hasParams = True
for k, v in layer_dict.items():
# For now, this can only happen for regularizers and constraints
if isinstance(v, dict):
vname = v.get('name')
v.pop('name')
if vname in [x for x, y in inspect.getmembers(constraints, predicate=inspect.isclass)]:
layer_dict[k] = constraints.get(vname, v)
if vname in [x for x, y in inspect.getmembers(regularizers, predicate=inspect.isclass)]:
layer_dict[k] = regularizers.get(vname, v)
base_layer = get_layer(name, layer_dict)
if hasParams:
shaped_params = []
for param in params:
data = np.asarray(param.get('data'))
shape = tuple(param.get('shape'))
shaped_params.append(data.reshape(shape))
base_layer.set_weights(shaped_params)
return base_layer
def print_layer_shapes(model, input_shapes):
"""
Utility function to print the shape of the output at each layer of a Model
Arguments:
model: instance of Model / Merge
input_shapes: dict (Graph), list of tuples (Merge) or tuple (Sequential)
"""
if model.__class__.__name__ in ['Sequential', 'Merge']:
# in this case input_shapes is a tuple, or a list [shape1, shape2]
if not isinstance(input_shapes[0], tuple):
input_shapes = [input_shapes]
inputs = model.get_input(train=False)
if not isinstance(inputs, list):
inputs = [inputs]
input_dummy = [np.zeros(shape, dtype=np.float32)
for shape in input_shapes]
layers = model.layers
elif model.__class__.__name__ == 'Graph':
# in this case input_shapes is a dictionary
inputs = [model.inputs[name].input
for name in model.input_order]
input_dummy = [np.zeros(input_shapes[name], dtype=np.float32)
for name in model.input_order]
layers = [model.nodes[c['name']] for c in model.node_config]
print("input shapes : ", input_shapes)
for l in layers:
shape_f = theano.function(inputs, l.get_output(train=False).shape,
on_unused_input='ignore')
out_shape = tuple(shape_f(*input_dummy))
config = l.get_config()
print('shape after %s: %s' % (config['name'], out_shape))
from .generic_utils import get_from_module
def get_layer(identifier, kwargs=None):
return get_from_module(identifier, globals(), 'layer', instantiate=True, kwargs=kwargs)
|
Cadene/keras
|
keras/utils/layer_utils.py
|
Python
|
mit
| 4,856 | 0.002265 |
import bpy
from bpy.props import StringProperty, IntProperty, CollectionProperty, EnumProperty, BoolProperty, FloatProperty
from bpy.types import PropertyGroup, UIList, Operator, Panel
from bpy_extras.io_utils import ImportHelper
from .rman_ui_base import _RManPanelHeader
from ..rfb_utils import texture_utils
from ..rfb_utils import shadergraph_utils
from ..rfb_utils import scene_utils
from ..rfb_utils import object_utils
from ..rfb_utils.prefs_utils import get_pref
from ..rfb_logger import rfb_log
from ..rman_config import __RFB_CONFIG_DICT__ as rfb_config
from .. import rman_render
from rman_utils.txmanager import txparams
from rman_utils import txmanager as txmngr
from .. import rfb_icons
import os
import uuid
class TxFileItem(PropertyGroup):
"""UIList item representing a TxFile"""
name: StringProperty(
name="Name",
description="Image name",
default="")
tooltip: StringProperty(
name="tooltip",
description="Tool Tip",
default="")
nodeID: StringProperty(
name="nodeID",
description="Node ID (hidden)",
default="")
state: IntProperty(
name="state",
description="",
default=0
)
enable: BoolProperty(
name="enable",
description="Enable or disable this TxFileItem",
default=True
)
def colorspace_names(self, context):
items = []
items.append(('0', '', ''))
try:
mdict = texture_utils.get_txmanager().txmanager.color_manager.colorspace_names()
for nm in mdict:
items.append((nm, nm, ""))
except AttributeError:
pass
return items
ocioconvert: EnumProperty(
name="Color Space",
description="colorspace",
items=colorspace_names
)
txsettings = ['texture_type',
's_mode',
't_mode',
'texture_format',
'data_type',
'resize',
'ocioconvert']
items = []
for item in txparams.TX_TYPES:
items.append((item, item, ''))
texture_type: EnumProperty(
name="Texture Type",
items=items,
description="Texture Type",
default=txparams.TX_TYPE_REGULAR)
items = []
for item in txparams.TX_WRAP_MODES:
items.append((item, item, ''))
s_mode: EnumProperty(
name="S Wrap",
items=items,
default=txparams.TX_WRAP_MODE_PERIODIC)
t_mode: EnumProperty(
name="T Wrap",
items=items,
default=txparams.TX_WRAP_MODE_PERIODIC)
items = []
for item in txparams.TX_FORMATS:
items.append((item, item, ''))
texture_format: EnumProperty(
name="Format",
default=txparams.TX_FORMAT_PIXAR,
items=items,
description="Texture format")
items = []
items.append(('default', 'default', ''))
for item in txparams.TX_DATATYPES:
items.append((item, item, ''))
data_type: EnumProperty(
name="Data Type",
default=txparams.TX_DATATYPE_FLOAT,
items=items,
description="The data storage txmake uses")
items = []
for item in txparams.TX_RESIZES:
items.append((item, item, ''))
resize: EnumProperty(
name="Resize",
default=txparams.TX_RESIZE_UP_DASH,
items=items,
description="The type of resizing flag to pass to txmake")
bumpRough: EnumProperty(
name="Bump Rough",
default="-1",
items=(
("-1", "Off", ""),
("0", "Bump Map", ""),
("1", "Normal Map", "")
)
)
bumpRough_factor: FloatProperty(
name="Scale",
default=2.0
)
bumpRough_invert: BoolProperty(
name="Invert",
default=False
)
bumpRough_invertU: BoolProperty(
name="InvertU",
default=False
)
bumpRough_invertV: BoolProperty(
name="InvertV",
default=False
)
bumpRough_refit: BoolProperty(
name="Refit",
default=False
)
class PRMAN_UL_Renderman_txmanager_list(UIList):
"""RenderMan TxManager UIList."""
def draw_item(self, context, layout, data, item, icon, active_data,
active_propname, index):
icons_map = {txmngr.STATE_MISSING: 'ERROR',
txmngr.STATE_EXISTS: 'CHECKBOX_HLT',
txmngr.STATE_IS_TEX: 'TEXTURE',
txmngr.STATE_IN_QUEUE: 'PLUS',
txmngr.STATE_PROCESSING: 'TIME',
txmngr.STATE_ERROR: 'CANCEL',
txmngr.STATE_REPROCESS: 'TIME',
txmngr.STATE_UNKNOWN: 'CANCEL',
txmngr.STATE_INPUT_MISSING: 'ERROR'}
txfile = None
if item.nodeID != "":
txfile = texture_utils.get_txmanager().txmanager.get_txfile_from_id(item.nodeID)
if txfile:
custom_icon = icons_map[txfile.state]
else:
custom_icon = 'CANCEL'
if self.layout_type in {'DEFAULT', 'COMPACT'}:
layout.label(text=item.name, icon = custom_icon)
elif self.layout_type in {'GRID'}:
layout.alignment = 'CENTER'
layout.label(text="", icon = custom_icon)
class PRMAN_OT_Renderman_txmanager_parse_scene(Operator):
"""Parse scene for textures to add to to the txmanager"""
bl_idname = "rman_txmgr_list.parse_scene"
bl_label = "Parse Scene"
bl_description = "Parse the scene and look for textures that need converting."
def execute(self, context):
rman_txmgr_list = context.scene.rman_txmgr_list
texture_utils.parse_for_textures(context.scene)
texture_utils.get_txmanager().txmake_all(blocking=False)
bpy.ops.rman_txmgr_list.refresh('EXEC_DEFAULT')
return{'FINISHED'}
class PRMAN_OT_Renderman_txmanager_reset_state(Operator):
"""Reset State"""
bl_idname = "rman_txmgr_list.reset_state"
bl_label = "Reset State"
bl_description = "All texture settings will be erased and the scene will be re-parsed. All manual edits will be lost."
def execute(self, context):
rman_txmgr_list = context.scene.rman_txmgr_list
rman_txmgr_list.clear()
texture_utils.get_txmanager().txmanager.reset()
texture_utils.parse_for_textures(context.scene)
texture_utils.get_txmanager().txmake_all(blocking=False)
texture_utils.get_txmanager().txmanager.reset_state()
return{'FINISHED'}
class PRMAN_OT_Renderman_txmanager_clear_unused(Operator):
"""Clear Unused"""
bl_idname = "rman_txmgr_list.clear_unused"
bl_label = "Clear Unused"
bl_description = "Clear unused textures"
def execute(self, context):
rman_txmgr_list = context.scene.rman_txmgr_list
nodeIDs = list()
for item in rman_txmgr_list:
nodeID = item.nodeID
if item.nodeID != "":
txfile = texture_utils.get_txmanager().txmanager.get_txfile_from_id(item.nodeID)
if not txfile:
nodeIDs.append(nodeID)
continue
tokens = nodeID.split('|')
if len(tokens) < 3:
continue
node_name,param,ob_name = tokens
node, ob = scene_utils.find_node_by_name(node_name, ob_name)
if not node:
continue
if getattr(node, param) != item.name:
nodeIDs.append(nodeID)
for nodeID in nodeIDs:
bpy.ops.rman_txmgr_list.remove_texture('EXEC_DEFAULT', nodeID=nodeID)
return{'FINISHED'}
class PRMAN_OT_Renderman_txmanager_pick_images(Operator, ImportHelper):
"""Pick images from a directory."""
bl_idname = "rman_txmgr_list.pick_images"
bl_label = "Pick Images"
bl_description = "Manually choose images on disk to convert."
filename: StringProperty(maxlen=1024)
directory: StringProperty(maxlen=1024)
files: CollectionProperty(type=bpy.types.PropertyGroup)
def execute(self, context):
rman_txmgr_list = context.scene.rman_txmgr_list
if len(self.files) > 0:
for f in self.files:
img = os.path.join(self.directory, f.name)
nodeID = str(uuid.uuid1())
texture_utils.get_txmanager().txmanager.add_texture(nodeID, img)
bpy.ops.rman_txmgr_list.add_texture('EXEC_DEFAULT', filepath=img, nodeID=nodeID)
texture_utils.get_txmanager().txmake_all(blocking=False)
texture_utils.get_txmanager().txmanager.save_state()
PRMAN_PT_Renderman_txmanager_list.refresh_panel(context)
return{'FINISHED'}
class PRMAN_OT_Renderman_txmanager_clear_all_cache(Operator):
"""Clear RenderMan Texture cache"""
bl_idname = "rman_txmgr_list.clear_all_cache"
bl_label = "Flush Texture Cache"
bl_description = "Tell the core RenderMan to flush its texture cache."
def execute(self, context):
rr = rman_render.RmanRender.get_rman_render()
if rr.rman_interactive_running and rr.sg_scene:
texture_list = list()
for item in context.scene.rman_txmgr_list:
if item.nodeID != "":
output_texture = texture_utils.get_txmanager().get_output_tex_from_id(item.nodeID)
texture_list.append(output_texture)
if texture_list:
rr.rman_scene_sync.flush_texture_cache(texture_list)
return{'FINISHED'}
class PRMAN_OT_Renderman_txmanager_reconvert_all(Operator):
"""Clear all .tex files and re-convert."""
bl_idname = "rman_txmgr_list.reconvert_all"
bl_label = "RE-Convert All"
bl_description = "Clear all .tex files for all input images and re-convert."
def execute(self, context):
texture_utils.get_txmanager().txmanager.delete_texture_files()
texture_utils.get_txmanager().txmake_all(blocking=False)
return{'FINISHED'}
class PRMAN_OT_Renderman_txmanager_reconvert_selected(Operator):
"""Clear all .tex files and re-convert selected."""
bl_idname = "rman_txmgr_list.reconvert_selected"
bl_label = "RE-Convert Selected"
bl_description = "Clear all .tex files for selected image and re-convert"
def execute(self, context):
idx = context.scene.rman_txmgr_list_index
item = context.scene.rman_txmgr_list[idx]
txfile = None
txfile = texture_utils.get_txmanager().txmanager.get_txfile_from_id(item.nodeID)
if txfile:
rr = rman_render.RmanRender.get_rman_render()
txfile.delete_texture_files()
txfile.build_texture_dict()
if item.nodeID:
rr.rman_scene_sync.texture_updated(item.nodeID)
texture_utils.get_txmanager().txmake_all(blocking=False)
return{'FINISHED'}
class PRMAN_OT_Renderman_txmanager_apply_preset(Operator):
"""Apply current settings to the selected texture."""
bl_idname = "rman_txmgr_list.apply_preset"
bl_label = "Apply preset"
bl_description = "Apply the current settings for this input image and re-convert."
def execute(self, context):
idx = context.scene.rman_txmgr_list_index
item = context.scene.rman_txmgr_list[idx]
txsettings = dict()
for attr in item.txsettings:
val = getattr(item, attr)
if attr == 'data_type' and val == 'default':
val = None
txsettings[attr] = val
# b2r
bumprough = dict()
if item.bumpRough != "-1":
bumprough['normalmap'] = int(item.bumpRough)
bumprough['factor'] = item.bumpRough_factor
bumprough['invert'] = int(item.bumpRough_invert)
bumprough['invertU'] = int(item.bumpRough_invertU)
bumprough['invertV'] = int(item.bumpRough_invertV)
bumprough['refit'] = int(item.bumpRough_refit)
else:
bumprough = list()
txsettings['bumprough'] = bumprough
if txsettings:
txfile = None
txfile = texture_utils.get_txmanager().txmanager.get_txfile_from_id(item.nodeID)
if txfile:
txfile.params.from_dict(txsettings)
txfile.delete_texture_files()
txfile.build_texture_dict()
texture_utils.get_txmanager().txmake_all(blocking=False)
texture_utils.get_txmanager().txmanager.save_state()
# update any nodes with colorspace in it
tokens = item.nodeID.split('|')
if len(tokens) < 3:
return {'FINISHED'}
node_name,param,ob_name = tokens
prop_colorspace_name = '%s_colorspace' % param
try:
mdict = texture_utils.get_txmanager().txmanager.color_manager.colorspace_names()
val = 0
for i, nm in enumerate(mdict):
if nm == item.ocioconvert:
val = i+1
break
node, ob = scene_utils.find_node_by_name(node_name, ob_name)
if node:
node[prop_colorspace_name] = val
except AttributeError:
pass
return {'FINISHED'}
class PRMAN_OT_Renderman_txmanager_add_texture(Operator):
"""Add texture."""
bl_idname = "rman_txmgr_list.add_texture"
bl_label = "add_texture"
filepath: StringProperty()
nodeID: StringProperty()
def execute(self, context):
if self.nodeID == "":
return {'FINISHED'}
txfile = texture_utils.get_txmanager().txmanager.get_txfile_from_id(self.nodeID)
if not txfile:
return{'FINISHED'}
item = None
# check if nodeID already exists in the list
for idx, i in enumerate(context.scene.rman_txmgr_list):
if i.nodeID == self.nodeID:
item = i
break
if not item:
item = context.scene.rman_txmgr_list.add()
item.nodeID = self.nodeID
item.name = txfile.input_image
params = txfile.params
item.texture_type = params.texture_type
item.s_mode = params.s_mode
item.t_mode = params.t_mode
item.texture_format = params.texture_format
if params.data_type is not None:
item.data_type = params.data_type
item.resize = params.resize
item.state = txfile.state
if txfile.state == txmngr.STATE_IS_TEX:
item.enable = False
else:
item.enable = True
if params.ocioconvert:
item.ocioconvert = params.ocioconvert
if params.bumprough:
bumprough = params.bumprough_as_dict()
item.bumpRough = str(bumprough['normalmap'])
item.bumpRough_factor = float(bumprough['factor'])
item.bumpRough_invert = bool(bumprough['invert'])
item.bumpRough_invertU = bool(bumprough['invertU'])
item.bumpRough_invertV = bool(bumprough['invertV'])
item.bumpRough_refit = bool(bumprough['refit'])
else:
params.bumpRough = "-1"
item.tooltip = '\nNode ID: ' + item.nodeID + "\n" + str(txfile)
# FIXME: should also add the nodes that this texture is referenced in
return{'FINISHED'}
class PRMAN_OT_Renderman_txmanager_refresh(Operator):
"""Refresh Texture Manager"""
bl_idname = "rman_txmgr_list.refresh"
bl_label = "refresh"
filepath: StringProperty()
nodeID: StringProperty()
def execute(self, context):
for item in context.scene.rman_txmgr_list:
txfile = texture_utils.get_txmanager().txmanager.get_txfile_from_id(item.nodeID)
if not txfile:
continue
item.name = txfile.input_image
params = txfile.params
item.texture_type = params.texture_type
item.s_mode = params.s_mode
item.t_mode = params.t_mode
item.texture_type = params.texture_type
if params.data_type is not None:
item.data_type = params.data_type
item.resize = params.resize
item.state = txfile.state
if txfile.state == txmngr.STATE_IS_TEX:
item.enable = False
else:
item.enable = True
if params.ocioconvert:
item.ocioconvert = params.ocioconvert
if params.bumprough:
bumprough = params.bumprough_as_dict()
item.bumpRough = str(bumprough['normalmap'])
item.bumpRough_factor = float(bumprough['factor'])
item.bumpRough_invert = bool(int(bumprough['invert']))
item.bumpRough_invertU = bool(int(bumprough['invertU']))
item.bumpRough_invertV = bool(int(bumprough['invertV']))
item.bumpRough_refit = bool(int(bumprough['refit']))
else:
params.bumpRough = "-1"
item.tooltip = '\n' + item.nodeID + "\n" + str(txfile)
PRMAN_PT_Renderman_txmanager_list.refresh_panel(context)
return {'FINISHED'}
class PRMAN_OT_Renderman_txmanager_remove_texture(Operator):
bl_idname = "rman_txmgr_list.remove_texture"
bl_label = "remove texture"
nodeID: StringProperty()
def execute(self, context):
for i, item in enumerate(context.scene.rman_txmgr_list):
if item.nodeID == self.properties.nodeID:
context.scene.rman_txmgr_list.remove(i)
break
return{'FINISHED'}
class PRMAN_PT_Renderman_txmanager_list(_RManPanelHeader, Panel):
"""RenderMan Texture Manager Panel."""
bl_label = "RenderMan Texture Manager"
bl_idname = "PRMAN_PT_Renderman_txmanager_list"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "scene"
nodeID: StringProperty()
@classmethod
def refresh_panel(cls, context):
for window in context.window_manager.windows:
for area in window.screen.areas:
for space in area.spaces:
if space.type == 'PROPERTIES':
for region in area.regions:
if region.type == 'WINDOW':
region.tag_redraw()
@classmethod
def draw_txmanager_layout(cls, context, layout):
scene = context.scene
row = layout.row()
txmanager = texture_utils.get_txmanager().txmanager
row.operator('rman_txmgr_list.parse_scene', text='Parse Scene')
row.operator("rman_txmgr_list.clear_unused", icon='GPBRUSH_ERASE_HARD')
row.operator('rman_txmgr_list.reset_state', text='Reset', icon='FILE_REFRESH')
row.operator('rman_txmgr_list.pick_images', text='Pick Images', icon='FILE_FOLDER')
row.operator('rman_txmgr_list.reconvert_all', text='Reconvert All')
if scene.rman_txmgr_list_index >= 0 and scene.rman_txmgr_list:
row = layout.row()
row.template_list("PRMAN_UL_Renderman_txmanager_list", "The_List", scene,
"rman_txmgr_list", scene, "rman_txmgr_list_index", item_dyntip_propname="tooltip")
if scene.rman_txmgr_list_index < len(scene.rman_txmgr_list):
item = scene.rman_txmgr_list[scene.rman_txmgr_list_index]
row = layout.row()
row.label(text='Texture Settings')
row = layout.row()
row.enabled = item.enable
row.prop(item, "texture_type")
row = layout.row()
row.enabled = item.enable
row.prop(item, "s_mode")
row.prop(item, "t_mode")
row = layout.row()
row.enabled = item.enable
row.prop(item, "texture_format")
row = layout.row()
row.enabled = item.enable
row.prop(item, "data_type")
row = layout.row()
row.enabled = item.enable
row.prop(item, "resize")
if item.ocioconvert != '0':
row = layout.row()
row.enabled = item.enable
row.prop(item, "ocioconvert")
dst = txmanager.color_manager.scene_colorspace_name
row.label(text='%s' % dst if dst else txmngr.NO_COLORSPACE)
# b2r
row = layout.row()
row.enabled = item.enable
row.prop(item, "bumpRough")
if item.bumpRough != "-1":
row = layout.row()
row.enabled = item.enable
row.alignment = "RIGHT"
row.label(text="")
row.prop(item, "bumpRough_factor")
row.prop(item, "bumpRough_invert")
row.prop(item, "bumpRough_invertU")
row.prop(item, "bumpRough_invertV")
row.prop(item, "bumpRough_refit")
row = layout.row()
row.enabled = item.enable
row.alignment = 'RIGHT'
row.operator('rman_txmgr_list.reconvert_selected', text='Reconvert')
row.operator('rman_txmgr_list.apply_preset', text='Apply')
row = layout.row()
row.alignment='CENTER'
in_list = len(context.scene.rman_txmgr_list)
progress = 'All Converted'
qsize = txmanager.work_queue.qsize()
if qsize != 0:
progress = 'Converting... %d left to convert' % (qsize)
else:
t_size = txmanager.file_size()
# t_size in bytes
t_size /= 1024.0 * 1024.0
unit = 'MB'
if t_size > 1024:
t_size /= 1024.0
unit = 'GB'
progress = 'All Converted (Texture Disk Space: %.2f %s)' % (t_size, unit)
row.label(text=progress)
def draw(self, context):
layout = self.layout
if get_pref('rman_ui_framework') == 'QT':
try:
from . import rman_ui_txmanager_qt
if rman_ui_txmanager_qt.__QT_LOADED__:
rman_icon = rfb_icons.get_icon('rman_txmanager')
layout.operator("rman_txmgr_list.open_txmanager", icon_value=rman_icon.icon_id)
except:
PRMAN_PT_Renderman_txmanager_list.draw_txmanager_layout(context, layout)
else:
PRMAN_PT_Renderman_txmanager_list.draw_txmanager_layout(context, layout)
class PRMAN_OT_Renderman_open_txmanager(Operator):
bl_idname = "rman_txmgr_list.open_txmanager"
bl_label = "Open TxManager"
nodeID: StringProperty(default='')
def execute(self, context):
return{'FINISHED'}
def draw(self, context):
layout = self.layout
PRMAN_PT_Renderman_txmanager_list.draw_txmanager_layout(context, layout)
def cancel(self, context):
if self.event and self.event.type == 'LEFTMOUSE':
bpy.ops.rman_txmgr_list.open_txmanager('INVOKE_DEFAULT')
def __init__(self):
self.event = None
def invoke(self, context, event):
if self.properties.nodeID != '':
for i, item in enumerate(context.scene.rman_txmgr_list):
if item.nodeID == self.properties.nodeID:
context.scene.rman_txmgr_list_index = i
break
wm = context.window_manager
width = rfb_config['editor_preferences']['texture_manager']['width']
self.event = event
return wm.invoke_props_dialog(self, width=width)
def index_updated(self, context):
'''
When the index updates, make sure the texture settings
are in sync with the txmanager.
'''
idx = context.scene.rman_txmgr_list_index
if idx < 0:
return
item = context.scene.rman_txmgr_list[idx]
txfile = None
txfile = texture_utils.get_txmanager().txmanager.get_txfile_from_id(item.nodeID)
if txfile:
params = txfile.params
item.texture_type = params.texture_type
item.s_mode = params.s_mode
item.t_mode = params.t_mode
item.texture_format = params.texture_format
item.texture_type = params.texture_type
if params.data_type is not None:
item.data_type = params.data_type
item.resize = params.resize
if txfile.state == txmngr.STATE_IS_TEX:
item.enable = False
else:
item.enable = True
if params.ocioconvert:
item.ocioconvert = params.ocioconvert
if params.bumprough:
bumprough = params.bumprough_as_dict()
item.bumpRough = str(bumprough['normalmap'])
item.bumpRough_factor = float(bumprough['factor'])
item.bumpRough_invert = bool(int(bumprough['invert']))
item.bumpRough_invertU = bool(int(bumprough['invertU']))
item.bumpRough_invertV = bool(int(bumprough['invertV']))
item.bumpRough_refit = bool(int(bumprough['refit']))
else:
params.bumpRough = "-1"
item.tooltip = '\nNode ID: ' + item.nodeID + "\n" + str(txfile)
classes = [
TxFileItem,
PRMAN_UL_Renderman_txmanager_list,
PRMAN_OT_Renderman_txmanager_parse_scene,
PRMAN_OT_Renderman_txmanager_reset_state,
PRMAN_OT_Renderman_txmanager_clear_unused,
PRMAN_OT_Renderman_txmanager_pick_images,
PRMAN_OT_Renderman_txmanager_clear_all_cache,
PRMAN_OT_Renderman_txmanager_reconvert_all,
PRMAN_OT_Renderman_txmanager_reconvert_selected,
PRMAN_OT_Renderman_txmanager_apply_preset,
PRMAN_OT_Renderman_txmanager_add_texture,
PRMAN_OT_Renderman_txmanager_refresh,
PRMAN_PT_Renderman_txmanager_list,
PRMAN_OT_Renderman_txmanager_remove_texture
]
def register():
if get_pref('rman_ui_framework') == 'QT':
try:
from . import rman_ui_txmanager_qt
rman_ui_txmanager_qt.register()
except:
bpy.utils.register_class(PRMAN_OT_Renderman_open_txmanager)
else:
bpy.utils.register_class(PRMAN_OT_Renderman_open_txmanager)
for cls in classes:
bpy.utils.register_class(cls)
bpy.types.Scene.rman_txmgr_list = CollectionProperty(type = TxFileItem)
bpy.types.Scene.rman_txmgr_list_index = IntProperty(name = "RenderMan Texture Manager",
default = 0, update=index_updated)
def unregister():
del bpy.types.Scene.rman_txmgr_list
del bpy.types.Scene.rman_txmgr_list_index
for cls in classes:
try:
bpy.utils.unregister_class(cls)
except RuntimeError:
rfb_log().debug('Could not unregister class: %s' % str(cls))
pass
try:
from . import rman_ui_txmanager_qt
rman_ui_txmanager_qt.unregister()
except:
pass
|
prman-pixar/RenderManForBlender
|
rman_ui/rman_ui_txmanager.py
|
Python
|
mit
| 27,755 | 0.00508 |
'''
Created on Feb 23, 2015
@author: rgroten
'''
import ConfigParser
import ssl
from datetime import datetime
from flask.globals import g
# Import NetApp API libraries
from NaElement import NaElement
from NaServer import NaServer
# from flask.globals import g
def connect():
try:
_create_unverified_https_context = ssl._create_unverified_context
except AttributeError:
# Legacy Python that doesn't verify HTTPS certificates by default
pass
else:
# Handle target environment that doesn't support HTTPS verification
ssl._create_default_https_context = _create_unverified_https_context
naHost = getConfigOption("NAHost")
user = getConfigOption("User")
password = getConfigOption("Password")
s = NaServer(naHost, 1 , 21)
s.set_server_type("FILER")
s.set_transport_type("HTTPS")
s.set_port(443)
s.set_style("LOGIN")
s.set_admin_user(user, password)
return s
def getConfigOption(option, section=None):
config = ConfigParser.ConfigParser()
config.read("config.ini")
# If section is not provided, first check if g.env is set and use that.
# Otherwise, set section to GENERAL
if not section:
try:
if g.env:
section = g.env
except:
section = "GENERAL"
return config.get(section, option)
def executeCmd(cmd):
isDebug = getConfigOption("Debug")
s= connect()
if (isDebug == 'True'):
print("Request Object: " + cmd.sprintf())
ret = s.invoke_elem(cmd)
if (ret.results_status() == "failed"):
print("Error: ")
print(ret.sprintf())
# Print object for debugging
if (isDebug == 'True'):
print( "Response Object: " + ret.sprintf())
return ret
def listVolumes():
isDebug = getConfigOption("Debug")
# Build command to list volumes
cmd = NaElement("volume-get-iter")
xi = NaElement("desired-attributes")
xi1 = NaElement("volume-attributes")
xi1.child_add(NaElement("volume-id-attributes"))
xi1.child_add(NaElement("volume-snapshot-attributes"))
xi1.child_add(NaElement("volume-space-attributes"))
xi2 = NaElement("volume-clone-attributes")
xi2.child_add(NaElement("volume-clone-parent-attributes"))
xi1.child_add(xi2)
xi.child_add(xi1)
cmd.child_add(xi)
cmd.child_add_string("max-records", "500")
ret = executeCmd(cmd)
# Remove volumes from list that contain filterStrings
filterString = getConfigOption("VolFilters")
filterList = filterString.replace(" ","").split(",")
filteredVolumes = NaElement("attributes-list")
for vol in ret.child_get("attributes-list").children_get():
volattrs = vol.child_get('volume-id-attributes')
if any(x in volattrs.child_get_string('name') for x in filterList):
if (isDebug == 'True'):
print "Skipping filtered vol : %s" % volattrs.child_get_string('name')
continue
if (isDebug == 'True'):
print 'Volume Name : %s' % volattrs.child_get_string('name')
filteredVolumes.child_add(vol)
filteredRet = NaElement("results")
filteredRet.attr_set("status", "passed")
filteredRet.child_add(filteredVolumes)
if (isDebug == 'True'):
print "Number of volumes (after filtering): " + str(ret.child_get("attributes-list").children_get().__len__())
return filteredRet
def listSnapshots(volume):
cmd = NaElement('snapshot-list-info')
cmd.child_add_string('volume', volume)
ret = executeCmd(cmd)
return ret
def createSnapshot(volume, customname=None):
if customname:
snapshotName = customname
else:
# Create snapshot format name
snapshotName = "snap_" + volume + "_" + datetime.strftime(datetime.now(), "%Y%m%d%H%M%S")
cmd = NaElement('snapshot-create')
cmd.child_add_string("volume", volume)
cmd.child_add_string("snapshot", snapshotName)
return executeCmd(cmd)
def deleteSnapshot(volume, snapshot):
cmd = NaElement('snapshot-delete')
cmd.child_add_string("snapshot", snapshot)
cmd.child_add_string("volume", volume)
return executeCmd(cmd)
def restoreSnapshot(volume, snapshot):
cmd = NaElement('snapshot-restore-volume')
cmd.child_add_string("snapshot", snapshot)
cmd.child_add_string("volume", volume)
return executeCmd(cmd)
def renameSnapshot(volume, snapshot, newName):
cmd = NaElement('snapshot-rename')
cmd.child_add_string("current-name", snapshot)
cmd.child_add_string("volume", volume)
cmd.child_add_string("new-name", newName)
return executeCmd(cmd)
def createClone(parentVolume, volume):
cmd = NaElement('volume-clone-create')
cmd.child_add_string("parent-volume", parentVolume)
cmd.child_add_string("volume", volume)
# Feature disabled for now
debugret = NaElement("results")
debugret.attr_set("status", "failed")
debugret.attr_set("reason", "Creating clones not supported...yet!")
return debugret
def getEnvs():
envs = getConfigOption("Environments", "GENERAL").split(",")
envObjs = []
for env in envs:
try:
envObj = EnvObj(env)
envObjs.append(envObj)
except Exception as e:
print str(e)
print "Error: couldn't load options for environment: " + env
return envObjs
class EnvObj:
name = ""
rfcRequired = False
def __init__(self, envName):
self.get_env_properties(envName)
def get_env_properties(self, envName):
self.name = envName
self.rfcRequired = getConfigOption("RFCRequired", envName)
return self
def get_name(self):
return self.__name
def get_rfc_required(self):
return self.rfcRequired
def set_name(self, value):
self.__name = value
def set_rfc_required(self, value):
self.__rfcRequired = value
def del_name(self):
del self.__name
def del_rfc_required(self):
del self.__rfcRequired
name = property(get_name, set_name, del_name, "name's docstring")
rfcRequired = property(get_rfc_required, set_rfc_required, del_rfc_required, "rfcRequired's docstring")
|
rgroten/NetApp-Snapshot-Manager
|
snapmgr/NaFunctions.py
|
Python
|
gpl-2.0
| 6,288 | 0.00493 |
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext as _
from selvbetjening.sadmin2 import menu
from selvbetjening.sadmin2.decorators import sadmin_prerequisites
from selvbetjening.sadmin2.forms import UserForm, PasswordForm
from selvbetjening.sadmin2.views.generic import generic_create_view
@sadmin_prerequisites
def user_change(request, user_pk):
user = get_object_or_404(get_user_model(), pk=user_pk)
context = {
'sadmin2_menu_main_active': 'userportal',
'sadmin2_breadcrumbs_active': 'user',
'sadmin2_menu_tab': menu.sadmin2_menu_tab_user,
'sadmin2_menu_tab_active': 'user',
'user': user
}
return generic_create_view(request,
UserForm,
reverse('sadmin2:user', kwargs={'user_pk': user.pk}),
message_success=_('User updated'),
context=context,
instance=user)
@sadmin_prerequisites
def user_password(request, user_pk):
user = get_object_or_404(get_user_model(), pk=user_pk)
context = {
'sadmin2_menu_main_active': 'userportal',
'sadmin2_breadcrumbs_active': 'user_password',
'sadmin2_menu_tab': menu.sadmin2_menu_tab_user,
'sadmin2_menu_tab_active': 'password',
'user': user
}
return generic_create_view(request,
PasswordForm,
redirect_success_url=reverse('sadmin2:user_password', kwargs={'user_pk': user.pk}),
message_success=_('Password updated'),
context=context,
instance=user)
|
animekita/selvbetjening
|
selvbetjening/sadmin2/views/user.py
|
Python
|
mit
| 1,851 | 0.001621 |
# -*- coding: utf-8 -*-
from django.shortcuts import render, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.utils.translation import ugettext as _
from inviMarket.models import User
@login_required
def del_partner(request, partner_id):
"""
Delete the :model:`auth.User` passed by argument from the partners list.
**Context**
``message``
A string variable used to inform the user.
**Template:**
:template:`inviMarket/addpartner.html`
"""
user = request.user
partner = get_object_or_404(User.objects.select_related('profile'),
pk=partner_id)
message = _("Ther user is not your partner.")
if partner.profile.partners.filter(pk=user.id).exists():
partner.profile.partners.remove(user)
message = _("The partnership proposal has been rejected.")
user.notification_set.filter(code=20, sender=partner).delete()
if user.profile.partners.filter(pk=partner_id).exists():
user.profile.partners.remove(partner)
message = _("The user is no longer your partner.")
return render(request, 'message.html', {'message': message})
|
moiseshiraldo/inviMarket
|
inviMarket/views/del_partner.py
|
Python
|
agpl-3.0
| 1,187 | 0.001685 |
from typing import Iterable, Mapping, Optional
from lib import data
from ..channel import pyramid
from ..channel import wall
def filterMessage() -> Iterable[data.ChatCommand]:
return []
def commands() -> Mapping[str, Optional[data.ChatCommand]]:
if not hasattr(commands, 'commands'):
setattr(commands, 'commands', {
'!pyramid': pyramid.commandPyramid,
'!rpyramid': pyramid.commandRandomPyramid,
'!wall': wall.commandWall,
})
return getattr(commands, 'commands')
def commandsStartWith() -> Mapping[str, Optional[data.ChatCommand]]:
if not hasattr(commandsStartWith, 'commands'):
setattr(commandsStartWith, 'commands', {
'!pyramid-': pyramid.commandPyramidLong,
'!wall-': wall.commandWallLong,
})
return getattr(commandsStartWith, 'commands')
def processNoCommand() -> Iterable[data.ChatCommand]:
return []
|
MeGotsThis/BotGotsThis
|
pkg/spam/items/channel.py
|
Python
|
gpl-3.0
| 941 | 0 |
""" Class defining a production step """
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__RCSID__ = "$Id$"
import json
from DIRAC import S_OK, S_ERROR
class ProductionStep(object):
"""Define the Production Step object"""
def __init__(self, **kwargs):
"""Simple constructor"""
# Default values for transformation step parameters
self.Name = ""
self.Description = "description"
self.LongDescription = "longDescription"
self.Type = "MCSimulation"
self.Plugin = "Standard"
self.AgentType = "Manual"
self.FileMask = ""
#########################################
self.ParentStep = None
self.Inputquery = None
self.Outputquery = None
self.GroupSize = 1
self.Body = "body"
def getAsDict(self):
"""It returns the Step description as a dictionary"""
prodStepDict = {}
prodStepDict["name"] = self.Name
prodStepDict["parentStep"] = []
# check the ParentStep format
if self.ParentStep:
if isinstance(self.ParentStep, list):
prodStepDict["parentStep"] = []
for parentStep in self.ParentStep: # pylint: disable=not-an-iterable
if not parentStep.Name:
return S_ERROR("Parent Step does not exist")
prodStepDict["parentStep"].append(parentStep.Name)
elif isinstance(self.ParentStep, ProductionStep):
if not self.ParentStep.Name:
return S_ERROR("Parent Step does not exist")
prodStepDict["parentStep"] = [self.ParentStep.Name]
else:
return S_ERROR("Invalid Parent Step")
prodStepDict["description"] = self.Description
prodStepDict["longDescription"] = self.LongDescription
prodStepDict["stepType"] = self.Type
prodStepDict["plugin"] = self.Plugin
prodStepDict["agentType"] = self.AgentType
prodStepDict["fileMask"] = self.FileMask
# Optional fields
prodStepDict["inputquery"] = json.dumps(self.Inputquery)
prodStepDict["outputquery"] = json.dumps(self.Outputquery)
prodStepDict["groupsize"] = self.GroupSize
prodStepDict["body"] = json.dumps(self.Body)
return S_OK(prodStepDict)
|
ic-hep/DIRAC
|
src/DIRAC/ProductionSystem/Client/ProductionStep.py
|
Python
|
gpl-3.0
| 2,408 | 0.000415 |
# coding=UTF8
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mydjangoapp.settings")
app = Celery('mydjangoapp')
CELERY_TIMEZONE = 'UTC'
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
|
wenxinwilliam/docker-django-celery
|
mydjangoapp/mydjangoapp/celeryconf.py
|
Python
|
mit
| 354 | 0.002825 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'billdetails.end_date'
db.alter_column(u'employee_billdetails', 'end_date', self.gf('django.db.models.fields.DateField')(null=True))
# Changing field 'billdetails.start_date'
db.alter_column(u'employee_billdetails', 'start_date', self.gf('django.db.models.fields.DateField')(null=True))
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'billdetails.end_date'
raise RuntimeError("Cannot reverse this migration. 'billdetails.end_date' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration
# Changing field 'billdetails.end_date'
db.alter_column(u'employee_billdetails', 'end_date', self.gf('django.db.models.fields.DateField')())
# User chose to not deal with backwards NULL issues for 'billdetails.start_date'
raise RuntimeError("Cannot reverse this migration. 'billdetails.start_date' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration
# Changing field 'billdetails.start_date'
db.alter_column(u'employee_billdetails', 'start_date', self.gf('django.db.models.fields.DateField')())
models = {
u'employee.billdetails': {
'Meta': {'object_name': 'billdetails'},
'bill_type': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'emp_name': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['employee.Employee']"}),
'emp_proj': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['employee.Project']"}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
u'employee.employee': {
'Add1': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'Add2': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'City': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'Designation': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'Major_Subject': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'Meta': {'object_name': 'Employee'},
'Qualification': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'Skill_sets': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'Visa_Status': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'Zip_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'}),
'bill': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'dob': ('django.db.models.fields.DateField', [], {}),
'doj': ('django.db.models.fields.DateField', [], {}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '50'}),
'exp': ('django.db.models.fields.DecimalField', [], {'max_digits': '4', 'decimal_places': '2'}),
'id': ('django.db.models.fields.IntegerField', [], {'max_length': '6', 'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'mobile': ('django.db.models.fields.IntegerField', [], {'max_length': '12'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'personal_email': ('django.db.models.fields.EmailField', [], {'max_length': '50', 'blank': 'True'}),
'proj': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['employee.Project']"}),
'start_date': ('django.db.models.fields.DateField', [], {'blank': 'True'})
},
u'employee.project': {
'Meta': {'object_name': 'Project'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['employee']
|
asm-technologies/management
|
employee/migrations/0006_auto__chg_field_billdetails_end_date__chg_field_billdetails_start_date.py
|
Python
|
mit
| 4,781 | 0.008576 |
# -*- test-case-name: twisted.test.test_newcred -*-
from twisted.internet import defer
from twisted.python import components, failure
from twisted.cred import error, credentials
class ICredentialsChecker(components.Interface):
"""I check sub-interfaces of ICredentials.
@cvar credentialInterfaces: A list of sub-interfaces of ICredentials which
specifies which I may check.
"""
def requestAvatarId(self, credentials):
"""
@param credentials: something which implements one of the interfaces in
self.credentialInterfaces.
@return: a Deferred which will fire a string which identifies an
avatar, an empty tuple to specify an authenticated anonymous user
(provided as checkers.ANONYMOUS) or fire a Failure(UnauthorizedLogin).
A note on anonymity - We do not want None as the value for anonymous
because it is too easy to accidentally return it. We do not want the
empty string, because it is too easy to mistype a password file. For
example, an .htpasswd file may contain the lines: ['hello:asdf',
'world:asdf', 'goodbye', ':world']. This misconfiguration will have an
ill effect in any case, but accidentally granting anonymous access is a
worse failure mode than simply granting access to an untypeable
username. We do not want an instance of 'object', because that would
create potential problems with persistence.
"""
ANONYMOUS = ()
class AllowAnonymousAccess:
__implements__ = ICredentialsChecker
credentialInterfaces = credentials.IAnonymous,
def requestAvatarId(self, credentials):
return defer.succeed(ANONYMOUS)
class InMemoryUsernamePasswordDatabaseDontUse:
credentialInterfaces = credentials.IUsernamePassword,
__implements__ = ICredentialsChecker
def __init__(self):
self.users = {}
def addUser(self, username, password):
self.users[username] = password
def _cbPasswordMatch(self, matched, username):
if matched:
return username
else:
return failure.Failure(error.UnauthorizedLogin())
def requestAvatarId(self, credentials):
if self.users.has_key(credentials.username):
return defer.maybeDeferred(
credentials.checkPassword,
self.users[credentials.username]).addCallback(
self._cbPasswordMatch, credentials.username)
else:
return defer.fail(error.UnauthorizedLogin())
|
fxia22/ASM_xf
|
PythonD/site_python/twisted/cred/checkers.py
|
Python
|
gpl-2.0
| 2,547 | 0.003141 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Worker.ping_response_dts'
db.add_column('job_runner_worker', 'ping_response_dts',
self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Worker.ping_response_dts'
db.delete_column('job_runner_worker', 'ping_response_dts')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'job_runner.job': {
'Meta': {'ordering': "('title',)", 'unique_together': "(('title', 'job_template'),)", 'object_name': 'Job'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'disable_enqueue_after_fails': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'enqueue_is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'fail_times': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job_template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.JobTemplate']"}),
'notification_addresses': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['job_runner.Job']"}),
'reschedule_interval': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'reschedule_interval_type': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'blank': 'True'}),
'reschedule_type': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '18', 'blank': 'True'}),
'script_content': ('django.db.models.fields.TextField', [], {}),
'script_content_partial': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'job_runner.jobtemplate': {
'Meta': {'ordering': "('title',)", 'object_name': 'JobTemplate'},
'auth_groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'body': ('django.db.models.fields.TextField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'enqueue_is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notification_addresses': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'worker': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Worker']"})
},
'job_runner.killrequest': {
'Meta': {'object_name': 'KillRequest'},
'enqueue_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'execute_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'run': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Run']"}),
'schedule_dts': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'})
},
'job_runner.project': {
'Meta': {'ordering': "('title',)", 'object_name': 'Project'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'enqueue_is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notification_addresses': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'job_runner.rescheduleexclude': {
'Meta': {'object_name': 'RescheduleExclude'},
'end_time': ('django.db.models.fields.TimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Job']"}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'start_time': ('django.db.models.fields.TimeField', [], {})
},
'job_runner.run': {
'Meta': {'ordering': "('-return_dts', '-start_dts', '-enqueue_dts', 'schedule_dts')", 'object_name': 'Run'},
'enqueue_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_manual': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'job': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Job']"}),
'pid': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True'}),
'return_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'return_success': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'schedule_children': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'schedule_dts': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'start_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'})
},
'job_runner.runlog': {
'Meta': {'ordering': "('-run',)", 'object_name': 'RunLog'},
'content': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'run': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'run_log'", 'unique': 'True', 'to': "orm['job_runner.Run']"})
},
'job_runner.worker': {
'Meta': {'ordering': "('title',)", 'object_name': 'Worker'},
'api_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'enqueue_is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notification_addresses': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'ping_response_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Project']"}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['job_runner']
|
spilgames/job-runner
|
job_runner/apps/job_runner/migrations/0013_auto__add_field_worker_ping_response_dts.py
|
Python
|
bsd-3-clause
| 9,629 | 0.007789 |
from django.conf.urls import url
from . import views
urlpatterns = [
# ex: /album/
url(r'^$', views.index, name='index'),
# ex: /album/create/
url(r'^welcome/$', views.welcome, name='welcome'),
# ex: /album/create/
url(r'^create/$', views.create, name='create'),
# ex: /album/vietnam_2016/
url(r'^(?P<album_permalink>[\w_]+)/$', views.detail, name='detail'),
# ex: /album/vietnam_2016/settings
url(r'^(?P<album_permalink>[\w_]+)/settings/', views.settings, name='settings'),
]
|
AlexandreGuinaudeau/ClasseurPhoto
|
classeur_photo/classeur_photo/album/urls.py
|
Python
|
gpl-3.0
| 519 | 0.001927 |
# -*- coding: utf-8 -*-
## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011, 2012, 2013, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""mod_python->WSGI Framework"""
import sys
import os
import re
import cgi
import gc
import inspect
import socket
from fnmatch import fnmatch
from six.moves.urllib.parse import urlparse, urlunparse
from six import iteritems
from wsgiref.util import FileWrapper
from invenio.legacy.wsgi.utils import table
from invenio.utils.apache import \
HTTP_STATUS_MAP, SERVER_RETURN, OK, DONE, \
HTTP_NOT_FOUND, HTTP_INTERNAL_SERVER_ERROR
from invenio.config import CFG_WEBDIR, CFG_SITE_LANG, \
CFG_WEBSTYLE_HTTP_STATUS_ALERT_LIST, CFG_DEVEL_SITE, CFG_SITE_URL, \
CFG_SITE_SECURE_URL, CFG_WEBSTYLE_REVERSE_PROXY_IPS
from invenio.ext.logging import register_exception
from invenio.utils.datastructures import flatten_multidict
## TODO for future reimplementation of stream_file
#from invenio.legacy.bibdocfile.api import StreamFileException
from flask import request, after_this_request
## Magic regexp to search for usage of CFG_SITE_URL within src/href or
## any src usage of an external website
_RE_HTTPS_REPLACES = re.compile(r"\b((?:src\s*=|url\s*\()\s*[\"']?)http\://", re.I)
## Regexp to verify that the IP starts with a number (filter cases where 'unknown')
## It is faster to verify only the start (585 ns) compared with verifying
## the whole ip address - re.compile('^\d+\.\d+\.\d+\.\d+$') (1.01 µs)
_RE_IPADDRESS_START = re.compile("^\d+\.")
def _http_replace_func(match):
## src external_site -> CFG_SITE_SECURE_URL/sslredirect/external_site
return match.group(1) + CFG_SITE_SECURE_URL + '/sslredirect/'
_ESCAPED_CFG_SITE_URL = cgi.escape(CFG_SITE_URL, True)
_ESCAPED_CFG_SITE_SECURE_URL = cgi.escape(CFG_SITE_SECURE_URL, True)
def https_replace(html):
html = html.decode('utf-8').replace(_ESCAPED_CFG_SITE_URL,
_ESCAPED_CFG_SITE_SECURE_URL)
return _RE_HTTPS_REPLACES.sub(_http_replace_func, html)
class InputProcessed(object):
"""
Auxiliary class used when reading input.
@see: <http://www.wsgi.org/wsgi/Specifications/handling_post_forms>.
"""
def read(self, *args):
raise EOFError('The wsgi.input stream has already been consumed')
readline = readlines = __iter__ = read
from werkzeug import (BaseResponse, ResponseStreamMixin,
CommonResponseDescriptorsMixin)
class Response(BaseResponse, ResponseStreamMixin,
CommonResponseDescriptorsMixin):
"""
Full featured response object implementing :class:`ResponseStreamMixin`
to add support for the `stream` property.
"""
class SimulatedModPythonRequest(object):
"""
mod_python like request object.
Minimum and cleaned implementation to make moving out of mod_python
easy.
@see: <http://www.modpython.org/live/current/doc-html/pyapi-mprequest.html>
"""
def __init__(self, environ, start_response):
self.response = Response()
self.__environ = environ
self.__start_response = start_response
self.__response_sent_p = False
self.__content_type_set_p = False
self.__buffer = ''
self.__low_level_headers = []
self.__filename = None
self.__disposition_type = None
self.__bytes_sent = 0
self.__allowed_methods = []
self.__cleanups = []
self.headers_out = {'Cache-Control': None}
#self.headers_out.update(dict(request.headers))
## See: <http://www.python.org/dev/peps/pep-0333/#the-write-callable>
self.__write = None
self.__write_error = False
self.__errors = environ['wsgi.errors']
self.__headers_in = table([])
self.__tainted = False
self.__is_https = self.__environ.get('wsgi.url_scheme') == 'https'
self.__replace_https = False
self.track_writings = False
self.__what_was_written = ""
self.__cookies_out = {}
self.g = {} ## global dictionary in case it's needed
for key, value in iteritems(environ):
if key.startswith('HTTP_'):
self.__headers_in[key[len('HTTP_'):].replace('_', '-')] = value
if environ.get('CONTENT_LENGTH'):
self.__headers_in['content-length'] = environ['CONTENT_LENGTH']
if environ.get('CONTENT_TYPE'):
self.__headers_in['content-type'] = environ['CONTENT_TYPE']
def get_wsgi_environ(self):
return self.__environ
def get_post_form(self):
""" Returns only POST form. """
self.__tainted = True
form = flatten_multidict(request.values)
if request.files:
for name, file_ in iteritems(request.files):
setattr(file_, 'file', file_.stream)
form[name] = file_
return form
def get_response_sent_p(self):
return self.__response_sent_p
def get_low_level_headers(self):
return self.__low_level_headers
def get_buffer(self):
return self.__buffer
def write(self, string, flush=1):
if isinstance(string, unicode):
self.__buffer += string.encode('utf8')
else:
self.__buffer += string
if flush:
self.flush()
def flush(self):
self.send_http_header()
if self.__buffer:
self.__bytes_sent += len(self.__buffer)
try:
if not self.__write_error:
if self.__replace_https:
self.__write(https_replace(self.__buffer))
else:
if self.__buffer:
self.__write(self.__buffer)
if self.track_writings:
if self.__replace_https:
self.__what_was_written += https_replace(self.__buffer)
else:
self.__what_was_written += self.__buffer
except IOError as err:
if "failed to write data" in str(err) or "client connection closed" in str(err):
## Let's just log this exception without alerting the admin:
register_exception(req=self)
self.__write_error = True ## This flag is there just
## to not report later other errors to the admin.
else:
raise
self.__buffer = ''
def set_content_type(self, content_type):
self.__content_type_set_p = True
self.response.content_type = content_type
if self.__is_https:
if content_type.startswith("text/html") or content_type.startswith("application/rss+xml"):
self.__replace_https = True
def get_content_type(self):
return self.response.content_type
def send_http_header(self):
for (k, v) in self.__low_level_headers:
self.response.headers[k] = v
for k, v in iteritems(self.headers_out):
self.response.headers[k] = v
self.__write = self.response.stream.write
def get_unparsed_uri(self):
return '?'.join([self.__environ['PATH_INFO'], self.__environ['QUERY_STRING']])
def get_uri(self):
return request.environ['PATH_INFO']
def get_full_uri(self):
if self.is_https():
return CFG_SITE_SECURE_URL + self.get_unparsed_uri()
else:
return CFG_SITE_URL + self.get_unparsed_uri()
def get_headers_in(self):
return request.headers
def get_subprocess_env(self):
return self.__environ
def add_common_vars(self):
pass
def get_args(self):
return request.environ['QUERY_STRING']
def get_remote_ip(self):
if 'X-FORWARDED-FOR' in self.__headers_in and \
self.__headers_in.get('X-FORWARDED-SERVER', '') == \
self.__headers_in.get('X-FORWARDED-HOST', '') == \
urlparse(CFG_SITE_URL)[1]:
# we are using proxy setup
if self.__environ.get('REMOTE_ADDR') in CFG_WEBSTYLE_REVERSE_PROXY_IPS:
# we trust this proxy
ip_list = self.__headers_in['X-FORWARDED-FOR'].split(',')
for ip in ip_list:
if _RE_IPADDRESS_START.match(ip):
return ip
# no IP has the correct format, return a default IP
return '10.0.0.10'
else:
# we don't trust this proxy
register_exception(prefix="You are running in a proxy configuration, but the " + \
"CFG_WEBSTYLE_REVERSE_PROXY_IPS variable does not contain " + \
"the IP of your proxy, thus the remote IP addresses of your " + \
"clients are not trusted. Please configure this variable.",
alert_admin=True)
return '10.0.0.11'
return request.remote_addr
def get_remote_host(self):
return request.environ.get('REMOTE_HOST', # apache
request.environ.get('HTTP_HOST',
'0.0.0.0')) # not found
def get_header_only(self):
return request.environ['REQUEST_METHOD'] == 'HEAD'
def set_status(self, status):
self.response.status_code = status
def get_status(self):
return self.response.status_code
def get_wsgi_status(self):
return '%s %s' % (self.response.status_code,
HTTP_STATUS_MAP.get(int(self.response.status_code),
'Explanation not available'))
def sendfile(self, path, offset=0, the_len=-1):
try:
self.send_http_header()
file_to_send = open(path)
file_to_send.seek(offset)
file_wrapper = FileWrapper(file_to_send)
count = 0
if the_len < 0:
for chunk in file_wrapper:
count += len(chunk)
self.__bytes_sent += len(chunk)
self.__write(chunk)
else:
for chunk in file_wrapper:
if the_len >= len(chunk):
the_len -= len(chunk)
count += len(chunk)
self.__bytes_sent += len(chunk)
self.__write(chunk)
else:
count += the_len
self.__bytes_sent += the_len
self.__write(chunk[:the_len])
break
except socket.error as e:
if e.errno == 54:
# Client disconnected, ignore
pass
else:
raise
except IOError as err:
if "failed to write data" in str(err) or "client connection closed" in str(err):
## Let's just log this exception without alerting the admin:
register_exception(req=self)
else:
raise
return self.__bytes_sent
def set_content_length(self, content_length):
if content_length is not None:
self.response.headers['content-length'] = str(content_length)
else:
del self.response.headers['content-length']
def is_https(self):
return self.__is_https
def get_method(self):
return request.environ['REQUEST_METHOD']
def get_hostname(self):
return request.environ.get('HTTP_HOST', '')
def set_filename(self, filename):
self.__filename = filename
if self.__disposition_type is None:
self.__disposition_type = 'inline'
self.response.headers['content-disposition'] = '%s; filename=%s' % (self.__disposition_type, self.__filename)
def set_encoding(self, encoding):
if encoding:
self.response.headers['content-encoding'] = str(encoding)
else:
del self.response.headers['content-encoding']
def get_bytes_sent(self):
return self.__bytes_sent
def log_error(self, message):
self.__errors.write(message.strip() + '\n')
def get_content_type_set_p(self):
return self.__content_type_set_p and \
bool(self.response.headers['content-type'])
def allow_methods(self, methods, reset=0):
if reset:
self.__allowed_methods = []
self.__allowed_methods += [method.upper().strip() for method in methods]
def get_allowed_methods(self):
return self.__allowed_methods
def readline(self, hint=None):
try:
return request.stream.readline(hint)
except TypeError:
## the hint param is not part of wsgi pep, although
## it's great to exploit it in when reading FORM
## with large files, in order to avoid filling up the memory
## Too bad it's not there :-(
return request.stream.readline()
def readlines(self, hint=None):
return request.stream.readlines(hint)
def read(self, hint=None):
return request.stream.read(hint)
def register_cleanup(self, callback, data=None):
@after_this_request
def f(response):
callback(data)
def get_cleanups(self):
return self.__cleanups
def get_referer(self):
return request.referrer
def get_what_was_written(self):
return self.__what_was_written
def __str__(self):
from pprint import pformat
out = ""
for key in dir(self):
try:
if not callable(getattr(self, key)) and not key.startswith("_SimulatedModPythonRequest") and not key.startswith('__'):
out += 'req.%s: %s\n' % (key, pformat(getattr(self, key)))
except:
pass
return out
def get_original_wsgi_environment(self):
"""
Return the original WSGI environment used to initialize this request
object.
@return: environ, start_response
@raise AssertionError: in case the environment has been altered, i.e.
either the input has been consumed or something has already been
written to the output.
"""
assert not self.__tainted, "The original WSGI environment is tainted since at least req.write or req.form has been used."
return self.__environ, self.__start_response
def get_environ(self):
return self.__environ
environ = property(get_environ)
content_type = property(get_content_type, set_content_type)
unparsed_uri = property(get_unparsed_uri)
uri = property(get_uri)
full_uri = property(get_full_uri)
headers_in = property(get_headers_in)
subprocess_env = property(get_subprocess_env)
args = property(get_args)
header_only = property(get_header_only)
status = property(get_status, set_status)
method = property(get_method)
hostname = property(get_hostname)
filename = property(fset=set_filename)
encoding = property(fset=set_encoding)
bytes_sent = property(get_bytes_sent)
content_type_set_p = property(get_content_type_set_p)
allowed_methods = property(get_allowed_methods)
response_sent_p = property(get_response_sent_p)
form = property(get_post_form)
remote_ip = property(get_remote_ip)
remote_host = property(get_remote_host)
referer = property(get_referer)
what_was_written = property(get_what_was_written)
def alert_admin_for_server_status_p(status, referer):
"""
Check the configuration variable
CFG_WEBSTYLE_HTTP_STATUS_ALERT_LIST to see if the exception should
be registered and the admin should be alerted.
"""
status = str(status)
for pattern in CFG_WEBSTYLE_HTTP_STATUS_ALERT_LIST:
pattern = pattern.lower()
must_have_referer = False
if pattern.endswith('r'):
## e.g. "404 r"
must_have_referer = True
pattern = pattern[:-1].strip() ## -> "404"
if fnmatch(status, pattern) and (not must_have_referer or referer):
return True
return False
def application(environ, start_response, handler=None):
"""
Entry point for wsgi.
"""
## Needed for mod_wsgi, see: <http://code.google.com/p/modwsgi/wiki/ApplicationIssues>
req = SimulatedModPythonRequest(environ, start_response)
#print 'Starting mod_python simulation'
try:
if handler is None:
from invenio.ext.legacy.layout import invenio_handler
invenio_handler(req)
else:
handler(req)
req.flush()
## TODO for future reimplementation of stream_file
#except StreamFileException as e:
# return e.value
except SERVER_RETURN as status:
redirection, = status.args
from werkzeug.wrappers import BaseResponse
if isinstance(redirection, BaseResponse):
return redirection
status = int(str(status))
if status == 404:
from werkzeug.exceptions import NotFound
raise NotFound()
if status not in (OK, DONE):
req.status = status
req.headers_out['content-type'] = 'text/html'
admin_to_be_alerted = alert_admin_for_server_status_p(status,
req.headers_in.get('referer'))
if admin_to_be_alerted:
register_exception(req=req, alert_admin=True)
if not req.response_sent_p:
start_response(req.get_wsgi_status(), req.get_low_level_headers(), sys.exc_info())
map(req.write, generate_error_page(req, admin_to_be_alerted))
req.flush()
finally:
##for (callback, data) in req.get_cleanups():
## callback(data)
#if hasattr(req, '_session'):
# ## The session handler saves for caching a request_wrapper
# ## in req.
# ## This saves req as an attribute, creating a circular
# ## reference.
# ## Since we have have reached the end of the request handler
# ## we can safely drop the request_wrapper so to avoid
# ## memory leaks.
# delattr(req, '_session')
#if hasattr(req, '_user_info'):
# ## For the same reason we can delete the user_info.
# delattr(req, '_user_info')
## as suggested in
## <http://www.python.org/doc/2.3.5/lib/module-gc.html>
del gc.garbage[:]
return req.response
def generate_error_page(req, admin_was_alerted=True, page_already_started=False):
"""
Returns an iterable with the error page to be sent to the user browser.
"""
from invenio.legacy.webpage import page
from invenio.legacy import template
webstyle_templates = template.load('webstyle')
ln = req.form.get('ln', CFG_SITE_LANG)
if page_already_started:
return [webstyle_templates.tmpl_error_page(status=req.get_wsgi_status(), ln=ln, admin_was_alerted=admin_was_alerted)]
else:
return [page(title=req.get_wsgi_status(), body=webstyle_templates.tmpl_error_page(status=req.get_wsgi_status(), ln=ln, admin_was_alerted=admin_was_alerted), language=ln, req=req)]
def is_static_path(path):
"""
Returns True if path corresponds to an exsting file under CFG_WEBDIR.
@param path: the path.
@type path: string
@return: True if path corresponds to an exsting file under CFG_WEBDIR.
@rtype: bool
"""
path = os.path.abspath(CFG_WEBDIR + path)
if path.startswith(CFG_WEBDIR) and os.path.isfile(path):
return path
return None
def is_mp_legacy_publisher_path(path):
"""
Checks path corresponds to an exsting Python file under CFG_WEBDIR.
@param path: the path.
@type path: string
@return: the path of the module to load and the function to call there.
@rtype: tuple
"""
from invenio.legacy.registry import webadmin
path = path.split('/')
module = ''
for index, component in enumerate(path):
if component.endswith('.py'):
possible_module = webadmin.get(module+component[:-3])
possible_handler = '/'.join(path[index + 1:]).strip()
if possible_handler.startswith('_'):
return None, None
if not possible_handler:
possible_handler = 'index'
if possible_module and os.path.exists(possible_module.__file__):
return (possible_module.__file__, possible_handler)
module = component + '/'
else:
return None, None
def mp_legacy_publisher(req, possible_module, possible_handler):
"""
mod_python legacy publisher minimum implementation.
"""
from invenio.legacy.websession.session import get_session
from invenio.ext.legacy.handler import CFG_HAS_HTTPS_SUPPORT, CFG_FULL_HTTPS
if possible_module.endswith('.pyc'):
possible_module = possible_module[:-1]
the_module = open(possible_module).read()
module_globals = {}
exec(the_module, module_globals)
if possible_handler in module_globals and callable(module_globals[possible_handler]):
from invenio.ext.legacy.handler import _check_result
## req is the required first parameter of any handler
expected_args = list(inspect.getargspec(module_globals[possible_handler])[0])
if not expected_args or 'req' != expected_args[0]:
## req was not the first argument. Too bad!
raise SERVER_RETURN, HTTP_NOT_FOUND
## the req.form must be casted to dict because of Python 2.4 and earlier
## otherwise any object exposing the mapping interface can be
## used with the magic **
form = dict()
for key, value in req.form.items():
## FIXME: this is a backward compatibility workaround
## because most of the old administration web handler
## expect parameters to be of type str.
## When legacy publisher will be removed all this
## pain will go away anyway :-)
if isinstance(value, unicode):
form[key] = value.encode('utf8')
else:
## NOTE: this is a workaround for e.g. legacy webupload
## that is still using legacy publisher and expect to
## have a file (Field) instance instead of a string.
form[key] = value
if (CFG_FULL_HTTPS or CFG_HAS_HTTPS_SUPPORT and get_session(req).need_https) and not req.is_https():
from invenio.utils.url import redirect_to_url
# We need to isolate the part of the URI that is after
# CFG_SITE_URL, and append that to our CFG_SITE_SECURE_URL.
original_parts = urlparse(req.unparsed_uri)
plain_prefix_parts = urlparse(CFG_SITE_URL)
secure_prefix_parts = urlparse(CFG_SITE_SECURE_URL)
# Compute the new path
plain_path = original_parts[2]
plain_path = secure_prefix_parts[2] + \
plain_path[len(plain_prefix_parts[2]):]
# ...and recompose the complete URL
final_parts = list(secure_prefix_parts)
final_parts[2] = plain_path
final_parts[-3:] = original_parts[-3:]
target = urlunparse(final_parts)
redirect_to_url(req, target)
try:
return _check_result(req, module_globals[possible_handler](req, **form))
except TypeError as err:
if ("%s() got an unexpected keyword argument" % possible_handler) in str(err) or ('%s() takes at least' % possible_handler) in str(err):
inspected_args = inspect.getargspec(module_globals[possible_handler])
expected_args = list(inspected_args[0])
expected_defaults = list(inspected_args[3])
expected_args.reverse()
expected_defaults.reverse()
register_exception(req=req, prefix="Wrong GET parameter set in calling a legacy publisher handler for %s: expected_args=%s, found_args=%s" % (possible_handler, repr(expected_args), repr(req.form.keys())), alert_admin=CFG_DEVEL_SITE)
cleaned_form = {}
for index, arg in enumerate(expected_args):
if arg == 'req':
continue
if index < len(expected_defaults):
cleaned_form[arg] = form.get(arg, expected_defaults[index])
else:
cleaned_form[arg] = form.get(arg, None)
return _check_result(req, module_globals[possible_handler](req, **cleaned_form))
else:
raise
else:
raise SERVER_RETURN, HTTP_NOT_FOUND
|
egabancho/invenio
|
invenio/legacy/wsgi/__init__.py
|
Python
|
gpl-2.0
| 25,601 | 0.004336 |
import interact
class EvtInteract(interact.Interact):
def __init__(self):
self.events = []
def checkEventInteraction(self, events):
self.events = events
self.checkInteraction()
|
m4nolo/steering-all
|
src/interact/evtInteract.py
|
Python
|
mit
| 196 | 0.015306 |
from . import (
Application,
Category,
Course,
Designation,
Major,
Project,
Requirement,
User,
Year,
)
Application = Application.Application
Category = Category.Category
Course = Course.Course
Designation = Designation.Designation
Major = Major.Major
Project = Project.Project
Requirement = Requirement.Requirement
User = User.User
Year = Year.Year
|
BunsenMcDubbs/cs4400-project
|
app/models/__init__.py
|
Python
|
mit
| 386 | 0 |
"""empty message
Revision ID: 0047 add smtp
Revises: 0046 remove long description
Create Date: 2020-11-08 01:28:28.386704
"""
# revision identifiers, used by Alembic.
revision = '0047 add smtp'
down_revision = '0046 remove long description'
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('email_providers', sa.Column('smtp_password', sa.String(), nullable=True))
op.add_column('email_providers', sa.Column('smtp_server', sa.String(), nullable=True))
op.add_column('email_providers', sa.Column('smtp_user', sa.String(), nullable=True))
op.add_column('email_providers', sa.Column('available', sa.Boolean(), nullable=True))
op.add_column('email_providers', sa.Column('created_at', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('email_providers', 'smtp_user')
op.drop_column('email_providers', 'smtp_server')
op.drop_column('email_providers', 'smtp_password')
op.drop_column('email_providers', 'available')
op.drop_column('email_providers', 'created_at')
# ### end Alembic commands ###
|
NewAcropolis/api
|
migrations/versions/0047.py
|
Python
|
mit
| 1,247 | 0.005613 |
# -*- coding: UTF-8 -*-
# YaBlog
# (c) Regis FLORET
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Regis FLORET BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
regisf/yablog
|
blog/templatetags/__init__.py
|
Python
|
bsd-3-clause
| 1,525 | 0.001311 |
import numpy as np
import os
import pandas as pd
import statsmodels.formula.api as smf
import sys
# @params: takes mobaid codes string
# @returns: list of mobaid strings
def splitCode(x):
if type(x) is str:
codes = x.split(',')
return codes
else:
return []
# @returns binary T/F if string code is in string/list x
def containsCode(code, x):
if code in x:
return 1
else:
return 0
# @param: takes char to be repeated c and number of repeats n
# @returns: a string with c repeated n times
def characterString(c, n):
r = ''
for i in range(n):
r = r + c
return r
# to debug lambda functions
def test(x):
print(x)
# combines boardings at the same stop
def combineRows(data):
# temp = data # debug
# print(temp.columns.values) # debug
# temp.drop('MobAids', 1 ,inplace=True) # debug
data = data.groupby(['ServiceDate','Run','ETA','DwellTime','Activity']).sum()
# 55-60 removes colums that have all 0 data
bool_column_df = data.apply(lambda x: (min(x) == 0) and (max(x) == 0))
bool_column_df.columns = ['values']
print(bool_column_df.values) # debug
columns = bool_column_df[bool_column_df.values].index.values
print(columns) # debug
data.drop(columns,1,inplace=True)
data.reset_index(inplace=True)
# print(data.columns.values) # debug
# print(data.equals(temp)) # debug
return(data)
# get data file from 1st argument
data = None
try:
data_path = os.path.join(os.pardir,'data',sys.argv[1])
data = pd.read_csv(data_path)
except IOError:
print('\n\tError: No file at ../data/' + sys.argv[1] + ' from ' + os.getcwd() + '\n')
quit()
except IndexError:
print('\n\tdwellTimeAnalysis.py takes a csv file from\n\n\t\tmain_repo\data\n\n\tassuming that the file is run in the Python_Scripts folder\n')
quit()
# gathers needed data
data.Activity = data.Activity.apply(lambda x: int(x))
# data = data.iloc(np.where((data.Activity == 0) | (data.Activity == 1)))
data = data[['ServiceDate','Run','ETA','DwellTime','Activity',
'MobAids']].loc[(data.Activity == 0) | (data.Activity == 1)]
allCodes = ['A','AM','AP','AR','BB','CA','CB','CI','CS','CT','H','H1','H2','HD','LI',
'MO','N','NR','OR','OX','PEL','PK','SA','SC','ST','SVC','U','V','V1','V2',
'WA','WG','WH','WK','WT','WX','0T']
data.MobAids = data.MobAids.apply(lambda x: splitCode(x))
# creates a column with binary values for each code
for code in allCodes:
data[code] = data.MobAids.apply(lambda x: containsCode(code, x))
# print(data) # debug
# Attempt to fix an error caused in the regression by this 0T
data.rename(columns={'0T' : 'OT'}, inplace=True)
# splits data into boading and deboarding
boardings = combineRows(data[data.Activity == 0])
# print(boardings) # debug
deboardings = combineRows(data[data.Activity == 1])
# for debugging
boardings.to_csv('../data/single_day_boardings.csv')
deboardings.to_csv('../data/single_day_deboardings.csv')
###################################################################
# Need to check with Matthew #
# ----------------------------- #
# is total dwell time for a stop is included for each client row? #
# or is total dwell time sum is divided among client rows? #
###################################################################
# regression for boarding dwell times
x = ' + '.join(boardings.columns.values[6:])
y = 'DwellTime'
reg_formula = y + ' ~ ' + x
# print reg_formula # debug
# boarding regression
lmb = smf.ols(formula=reg_formula, data=boardings).fit()
# deboarding regression
lmd = smf.ols(formula=reg_formula, data=deboardings).fit()
# writes data to file
orig_stdout = sys.stdout
output = open("../data/dwell_time_mobaid_regression.txt", 'w')
sys.stdout = output
top = characterString('#', 78) + '\n'
bottom = characterString('-', 78)
print top + characterString(' ', 34) + 'Boardings\n' + bottom
print lmb.summary()
print '\n\n' + top + characterString(' ', 33) + 'Deboardings\n' + bottom
print lmd.summary()
sys.stdout = orig_stdout
output.close()
#prints (debug purposes)
print top + characterString(' ', 34) + 'Boardings\n' + bottom
print lmb.summary()
print '\n\n' + top + characterString(' ', 33) + 'Deboardings\n' + bottom
print lmd.summary()
|
DSSG-paratransit/main_repo
|
Access_Analysis_Project/Scripts/dwellTimeAnalysis.py
|
Python
|
agpl-3.0
| 4,203 | 0.025696 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'template.ui'
#
# Created: Sun Sep 18 19:19:10 2016
# by: pyside2-uic running on PySide2 2.0.0~alpha0
#
# WARNING! All changes made in this file will be lost!
from PySide2 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(694, 497)
self.gridLayout = QtWidgets.QGridLayout(Form)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName("gridLayout")
self.splitter = QtWidgets.QSplitter(Form)
self.splitter.setOrientation(QtCore.Qt.Vertical)
self.splitter.setObjectName("splitter")
self.layoutWidget = QtWidgets.QWidget(self.splitter)
self.layoutWidget.setObjectName("layoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.layoutWidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.output = QtWidgets.QPlainTextEdit(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Monospace")
self.output.setFont(font)
self.output.setReadOnly(True)
self.output.setObjectName("output")
self.verticalLayout.addWidget(self.output)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.input = CmdInput(self.layoutWidget)
self.input.setObjectName("input")
self.horizontalLayout.addWidget(self.input)
self.historyBtn = QtWidgets.QPushButton(self.layoutWidget)
self.historyBtn.setCheckable(True)
self.historyBtn.setObjectName("historyBtn")
self.horizontalLayout.addWidget(self.historyBtn)
self.exceptionBtn = QtWidgets.QPushButton(self.layoutWidget)
self.exceptionBtn.setCheckable(True)
self.exceptionBtn.setObjectName("exceptionBtn")
self.horizontalLayout.addWidget(self.exceptionBtn)
self.verticalLayout.addLayout(self.horizontalLayout)
self.historyList = QtWidgets.QListWidget(self.splitter)
font = QtGui.QFont()
font.setFamily("Monospace")
self.historyList.setFont(font)
self.historyList.setObjectName("historyList")
self.exceptionGroup = QtWidgets.QGroupBox(self.splitter)
self.exceptionGroup.setObjectName("exceptionGroup")
self.gridLayout_2 = QtWidgets.QGridLayout(self.exceptionGroup)
self.gridLayout_2.setSpacing(0)
self.gridLayout_2.setContentsMargins(-1, 0, -1, 0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.clearExceptionBtn = QtWidgets.QPushButton(self.exceptionGroup)
self.clearExceptionBtn.setEnabled(False)
self.clearExceptionBtn.setObjectName("clearExceptionBtn")
self.gridLayout_2.addWidget(self.clearExceptionBtn, 0, 6, 1, 1)
self.catchAllExceptionsBtn = QtWidgets.QPushButton(self.exceptionGroup)
self.catchAllExceptionsBtn.setCheckable(True)
self.catchAllExceptionsBtn.setObjectName("catchAllExceptionsBtn")
self.gridLayout_2.addWidget(self.catchAllExceptionsBtn, 0, 1, 1, 1)
self.catchNextExceptionBtn = QtWidgets.QPushButton(self.exceptionGroup)
self.catchNextExceptionBtn.setCheckable(True)
self.catchNextExceptionBtn.setObjectName("catchNextExceptionBtn")
self.gridLayout_2.addWidget(self.catchNextExceptionBtn, 0, 0, 1, 1)
self.onlyUncaughtCheck = QtWidgets.QCheckBox(self.exceptionGroup)
self.onlyUncaughtCheck.setChecked(True)
self.onlyUncaughtCheck.setObjectName("onlyUncaughtCheck")
self.gridLayout_2.addWidget(self.onlyUncaughtCheck, 0, 4, 1, 1)
self.exceptionStackList = QtWidgets.QListWidget(self.exceptionGroup)
self.exceptionStackList.setAlternatingRowColors(True)
self.exceptionStackList.setObjectName("exceptionStackList")
self.gridLayout_2.addWidget(self.exceptionStackList, 2, 0, 1, 7)
self.runSelectedFrameCheck = QtWidgets.QCheckBox(self.exceptionGroup)
self.runSelectedFrameCheck.setChecked(True)
self.runSelectedFrameCheck.setObjectName("runSelectedFrameCheck")
self.gridLayout_2.addWidget(self.runSelectedFrameCheck, 3, 0, 1, 7)
self.exceptionInfoLabel = QtWidgets.QLabel(self.exceptionGroup)
self.exceptionInfoLabel.setObjectName("exceptionInfoLabel")
self.gridLayout_2.addWidget(self.exceptionInfoLabel, 1, 0, 1, 7)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem, 0, 5, 1, 1)
self.label = QtWidgets.QLabel(self.exceptionGroup)
self.label.setObjectName("label")
self.gridLayout_2.addWidget(self.label, 0, 2, 1, 1)
self.filterText = QtWidgets.QLineEdit(self.exceptionGroup)
self.filterText.setObjectName("filterText")
self.gridLayout_2.addWidget(self.filterText, 0, 3, 1, 1)
self.gridLayout.addWidget(self.splitter, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtWidgets.QApplication.translate("Form", "Console", None, -1))
self.historyBtn.setText(QtWidgets.QApplication.translate("Form", "History..", None, -1))
self.exceptionBtn.setText(QtWidgets.QApplication.translate("Form", "Exceptions..", None, -1))
self.exceptionGroup.setTitle(QtWidgets.QApplication.translate("Form", "Exception Handling", None, -1))
self.clearExceptionBtn.setText(QtWidgets.QApplication.translate("Form", "Clear Exception", None, -1))
self.catchAllExceptionsBtn.setText(QtWidgets.QApplication.translate("Form", "Show All Exceptions", None, -1))
self.catchNextExceptionBtn.setText(QtWidgets.QApplication.translate("Form", "Show Next Exception", None, -1))
self.onlyUncaughtCheck.setText(QtWidgets.QApplication.translate("Form", "Only Uncaught Exceptions", None, -1))
self.runSelectedFrameCheck.setText(QtWidgets.QApplication.translate("Form", "Run commands in selected stack frame", None, -1))
self.exceptionInfoLabel.setText(QtWidgets.QApplication.translate("Form", "Exception Info", None, -1))
self.label.setText(QtWidgets.QApplication.translate("Form", "Filter (regex):", None, -1))
from .CmdInput import CmdInput
|
SpaceGroupUCL/qgisSpaceSyntaxToolkit
|
esstoolkit/external/pyqtgraph/console/template_pyside2.py
|
Python
|
gpl-3.0
| 6,517 | 0.002302 |
#
#
#
import requests
from bs4 import BeautifulSoup
import re
import os
def all_links(URL,abs=False,session=None):
'''Generator function for all links in a page.
ARGS:
URL -> url of the page
abs -> (True) returns actual 'href's of each <a> tag (False) process each 'href' to generate the full link (WARNING: on false, skips the javascript links in page)
RETS
yields every link'''
if(session):
response=session.get(URL)
else:
response=requests.get(URL)
mysoup=BeautifulSoup(response.text)
for link in mysoup.find_all('a'):
ret=link.get('href')
if(abs):
yield ret
else:
if(ret[0:10]=="javascript"):
continue
if(ret[0]=='/'):
mat=re.match("(.+?\..+?\..{2,5})/",URL)
print(mat.group(1))
ret = mat.group(1) + ret
elif(ret[0] =='#'):
ret = URL + ret
elif(not re.match(".+?:.+",ret)):
ret = re.sub("/[^/]+$", "/"+ret , URL)
yield ret
def save_file(URL,session=None,dir="",replace=False,max_size=None,altname=None,chunksize=2048):
'''Saves a file from web to disk.
ARGS:
URL -> URL of the file to be downloaded
session -> requests session if the file is only available in a session (typically login/auth/etc)
dir -> directory of the saved file can be either reletive to the script or absoloute path. example: "archive/" saves files in a folder named archive
replace -> if the file exists (True) replace it / (False) skip
max_size -> max size of the file in Bytes , if the size exceeds this, download will be aborted
altname -> name of the saved file ( if None: will attemp to retrive name from server, if fail: will attemp to pars the last part of URL into a file name , if fail: will name the file 'undefined'
chunksize -> size of each chunk for writing to disk in Bytes (A.K.A buffer size) default is 2KB
RETS:
True -> File already Exists
Number -> Bytes Written to disk
False -> Download Failed (max_size exceeded)
'''
if(altname==None):
if(session):
dlh = session.head(URL)
else:
dlh= requests.head(URL)
if (dlh.status_code != 200):
raise Exception(dlh.status_code)
try:
fileheader=dlh.headers['Content-Disposition']
mat=re.search('filename="(.*)"',fileheader)
filename=mat.group(1)
except:
mat2=re.search("/([^/]+?)$",URL)
if(mat2):
filename=mat2.group(1)
else:
filename='undefined'
else:
filename=altname
if (dir!="" and not os.path.exists(dir)):
os.makedirs(dir)
path=dir+filename
if(replace==False and os.path.exists(path)) :
return True
else:
if(session):
dl = session.get(URL, stream=True)
else:
dl = requests.get(URL, stream=True)
if (dl.status_code != 200):
raise Exception(dl.status_code)
with open(path, 'wb') as f:
for i,chunk in enumerate(dl.iter_content(chunksize)):
f.write(chunk)
if(max_size and f.tell()>max_size):
dl.close()
break;
else:
return f.tell()
return False
|
RRostami/Spiderpy
|
spiderpy/core.py
|
Python
|
gpl-3.0
| 2,986 | 0.045211 |
from airflow.models import DAG
from airflow.operators.dummy_operator import DummyOperator
from datetime import datetime, timedelta
from airflow.operators import PythonOperator
from airflow.hooks import RedisHook
from airflow.models import Variable
from airflow.hooks import MemcacheHook
from etl_tasks_functions import get_time
from etl_tasks_functions import subtract_time
from subdags.utilization_utility import calculate_wimax_utilization
from subdags.utilization_utility import calculate_cambium_ss_utilization
from subdags.utilization_utility import calculate_radwin5k_ss_utilization
from subdags.utilization_utility import calculate_radwin5k_bs_utilization
from subdags.utilization_utility import calculate_radwin5kjet_ss_utilization
from subdags.utilization_utility import calculate_radwin5kjet_bs_utilization
from subdags.utilization_utility import calculate_radwin5k_bs_and_ss_dyn_tl_kpi
from subdags.utilization_utility import calculate_backhaul_utilization
from subdags.utilization_utility import calculate_ptp_utilization
from subdags.utilization_utility import calculate_mrotek_utilization
from subdags.utilization_utility import backtrack_x_min
from subdags.utilization_utility import get_severity_values
from subdags.utilization_utility import calculate_age
from subdags.utilization_utility import calculate_severity
from airflow.operators import MySqlLoaderOperator
import logging
import itertools
import socket
import random
import traceback
import time
from pprint import pprint
default_args = {
'owner': 'wireless',
'depends_on_past': False,
'start_date': datetime.now() - timedelta(minutes=2),
'email': ['vipulsharma144@gmail.com'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=1),
'provide_context': True,
'catchup': False,
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
}
redis_hook_util_10 = RedisHook(redis_conn_id="redis_hook_util_10")
memc_con_cluster = MemcacheHook(memc_cnx_id = 'memc_cnx')
vrfprv_memc_con = MemcacheHook(memc_cnx_id = 'vrfprv_memc_cnx')
pub_memc_con = MemcacheHook(memc_cnx_id = 'pub_memc_cnx')
redis_hook_static_5 = RedisHook(redis_conn_id="redis_hook_5")
INSERT_HEADER = "INSERT INTO %s.performance_utilization"
INSERT_TAIL = """
(machine_name,current_value,service_name,avg_value,max_value,age,min_value,site_name,data_source,critical_threshold,device_name,severity,sys_timestamp,ip_address,warning_threshold,check_timestamp,refer )
values
(%(machine_name)s,%(current_value)s,%(service_name)s,%(avg_value)s,%(max_value)s,%(age)s,%(min_value)s,%(site_name)s,%(data_source)s,%(critical_threshold)s,%(device_name)s,%(severity)s,%(sys_timestamp)s,%(ip_address)s,%(warning_threshold)s,%(check_timestamp)s,%(refer)s)
"""
UPDATE_HEADER = "INSERT INTO %s.performance_utilizationstatus"
UPDATE_TAIL = """
(machine_name,current_value,service_name,avg_value,max_value,age,min_value,site_name,data_source,critical_threshold,device_name,severity,sys_timestamp,ip_address,warning_threshold,check_timestamp,refer )
values
(%(machine_name)s,%(current_value)s,%(service_name)s,%(avg_value)s,%(max_value)s,%(age)s,%(min_value)s,%(site_name)s,%(data_source)s,%(critical_threshold)s,%(device_name)s,%(severity)s,%(sys_timestamp)s,%(ip_address)s,%(warning_threshold)s,%(check_timestamp)s,%(refer)s)
ON DUPLICATE KEY UPDATE machine_name = VALUES(machine_name),current_value = VALUES(current_value),age=VALUES(age),site_name=VALUES(site_name),critical_threshold=VALUES(critical_threshold),severity=VALUES(severity),sys_timestamp=VALUES(sys_timestamp),ip_address=VALUES(ip_address),warning_threshold=VALUES(warning_threshold),check_timestamp=VALUES(check_timestamp),refer=VALUES(refer)
"""
ERROR_DICT ={404:'Device not found yet',405:'No SS Connected to BS-BS is not skipped'}
ERROR_FOR_DEVICE_OMITTED = [404]
kpi_rules = eval(Variable.get("kpi_rules"))
DEBUG = False
sv_to_ds_mapping = {}
#O7_CALC_Q = "calculation_q"
O7_CALC_Q = "poller_queue"
down_and_unresponsive_devices = eval(redis_hook_static_5.get("current_down_devices_all"))
def process_utilization_kpi(
parent_dag_name,
child_dag_name,
start_date,
schedule_interval,
celery_queue,
ss_tech_sites,
hostnames_ss_per_site,
ss_name,
utilization_attributes,
config_sites): #here config site is list of all sites in system_config var
utilization_kpi_subdag_dag = DAG(
dag_id="%s.%s"%(parent_dag_name, child_dag_name),
schedule_interval=schedule_interval,
start_date=start_date,
)
for service in utilization_attributes:
sv_to_ds_mapping [service.get("service_name")] ={"data_source":service.get("data_source"),"sector_type":service.get("sector_type")}
def get_calculated_ss_data():
ss_data = redis_hook_util_10.rget("calculated_ss_utilization_kpi")
combined_site_data = {}
for site_data in ss_data:
site_data = eval(site_data)
combined_site_data.update(site_data)
return combined_site_data
#To create SS dict
def format_data(**kwargs):
device_type = kwargs.get("params").get("technology")
utilization_attributes = kwargs.get("params").get("attributes")
machine_name = kwargs.get("params").get("machine_name")
ss_kpi_dict = {
'site_name': 'unknown' ,
'device_name': 'unknown',
'service_name': 'unknown',
'ip_address': 'unknown',
'severity': 'unknown',
'age': 'unknown',
'data_source': 'unknown',
'current_value': 'unknown',
'warning_threshold': 'unknown',
'critical_threshold': 'unknown',
'check_timestamp': 'unknown',
'sys_timestamp': 'unknown' ,
'refer':'unknown',
'min_value':'unknown',
'max_value':'unknown',
'avg_value':'unknown',
'machine_name':'unknown'
}
ss_data =redis_hook_util_10.rget("calculated_utilization_%s_%s"%(device_type,machine_name))
cur_processing_time = backtrack_x_min(time.time(),300) + 120 # this is used to rewind the time to previous multiple of 5 value so that kpi can be shown accordingly
ss_devices_list = []
for ss_device in ss_data:
ss_device = eval(ss_device)
hostname = ss_device.get('hostname')
for service in ss_device.get('services'):
data_source = sv_to_ds_mapping.get(service).get("data_source")
pmp_type = sv_to_ds_mapping.get(service).get("sector_type")
thresholds = get_severity_values(service)
ss_kpi_dict['critical_threshold']=thresholds[0]
ss_kpi_dict['data_source']=data_source
ss_kpi_dict['site_name']=ss_device.get('site')
#TODO: ok and unknown are only 2 sev for ss we can incluudethis in rules later
ss_kpi_dict['service_name']= service
ss_kpi_dict['machine_name']= machine_name
ss_kpi_dict['check_timestamp']=cur_processing_time
ss_kpi_dict['device_name']=ss_device.get('hostname')
ss_kpi_dict['sys_timestamp']=cur_processing_time
ss_kpi_dict['refer']=ss_device.get("%s_sector"%(pmp_type))
ss_kpi_dict['ip_address']=ss_device.get('ipaddress')
ss_kpi_dict['warning_threshold']= thresholds[1]
if not isinstance(ss_device.get(service),dict):
#handling cur_value if it is greater than 100
cur_value=ss_device.get(service)
if ss_device.get(service) and ss_device.get(service) != None:
cur_value=ss_device.get(service)
try:
if isinstance(curr_value,float) and cur_value and cur_value > 100.00:
cur_value = 100
except Exception:
logging.error("Exception while handling above 100 entries")
ss_kpi_dict['severity']= calculate_severity(service,ss_device.get(service))
ss_kpi_dict['age']= calculate_age(hostname,ss_kpi_dict['severity'],ss_device.get('device_type'),cur_processing_time,service)
ss_kpi_dict['current_value']=cur_value
ss_kpi_dict['avg_value']=cur_value
ss_kpi_dict['min_value']=cur_value
ss_kpi_dict['max_value']=cur_value
if ss_kpi_dict['current_value'] != None:
ss_devices_list.append(ss_kpi_dict.copy())
else:
for data_source in ss_device.get(service):
ds_values = ss_device.get(service)
curr_value= ss_device.get(service).get(data_source)
if isinstance(curr_value,str):
try:
curr_value=float(curr_value)
if isinstance(curr_value,float):
if curr_value > 100.00:
curr_value=100
except Exception:
logging.error("Unable to convert to float")
else:
if curr_value > 100.00:
curr_value=100
ss_kpi_dict['data_source']=data_source
ss_kpi_dict['severity']= calculate_severity(service,ds_values.get(data_source))
ss_kpi_dict['age']= calculate_age(hostname,ss_kpi_dict['severity'],ss_device.get('device_type'),cur_processing_time,service)
ss_kpi_dict['current_value'] = curr_value
ss_kpi_dict['avg_value']=curr_value
ss_kpi_dict['min_value']=curr_value
ss_kpi_dict['max_value']=curr_value
if ss_kpi_dict['current_value'] != None:
ss_devices_list.append(ss_kpi_dict.copy())
try:
if len(ss_devices_list) > 0:
redis_hook_util_10.rpush("formatted_util_%s_%s"%(device_type,machine_name),ss_devices_list)
else:
logging.info("No %s device found in %s after formatting "%(device_type,machine_name))
except Exception:
logging.error("Unable to push formatted SS data to redis")
def get_required_data_ss(**kwargs):
site_name = kwargs.get("params").get("site_name")
device_type = kwargs.get("params").get("technology")
utilization_attributes = kwargs.get("params").get("attributes")
if "vrfprv" in site_name:
memc_con = vrfprv_memc_con
elif "pub" in site_name:
memc_con = pub_memc_con
else:
memc_con = memc_con_cluster
ss_data_dict = {}
all_ss_data = []
if site_name not in hostnames_ss_per_site.keys():
logging.warning("No SS devices found for %s"%(site_name))
return 1
for hostnames_dict in hostnames_ss_per_site.get(site_name):
host_name = hostnames_dict.get("hostname")
ip_address = hostnames_dict.get("ip_address")
ss_data_dict['hostname'] = host_name
ss_data_dict['ipaddress'] = ip_address
ss_data_dict['site_name'] = site_name
if host_name not in down_and_unresponsive_devices:
for service in utilization_attributes:
ss_data_dict[service.get('service_name')] = memc_con.get(service.get('utilization_key')%(host_name))
all_ss_data.append(ss_data_dict.copy())
if len(all_ss_data) == 0:
logging.info("No data Fetched ! Aborting Successfully")
return 0
try:
#redis_hook_util_10.rpush("%s_%s"%(device_type,site_name),all_ss_data)
print "++++++++++++"
print site_name.split("_")[0]
redis_hook_util_10.rpush("%s_%s"%(device_type,site_name.split("_")[0]),all_ss_data)
except Exception:
logging.warning("Unable to insert ss data into redis")
#pprint(all_ss_data)
def calculate_utilization_data_ss(**kwargs):
machine_name = kwargs.get("params").get("machine_name")
device_type = kwargs.get("params").get("technology")
utilization_attributes = kwargs.get("params").get("attributes")
devices_data_dict = redis_hook_util_10.rget("%s_%s"%(device_type,machine_name))
if len(devices_data_dict) == 0:
logging.info("No Data found for ss %s "%(machine_name))
return 1
ss_data = []
for devices in devices_data_dict:
devices = eval(devices)
site_name = devices.get("site_name")
devices['site'] = site_name
devices['device_type'] = device_type
for service_attributes in utilization_attributes: #loop for the all the configured services
service = service_attributes.get('service_name')
if service_attributes.get('isKpi'):
if 'services' in devices.keys() and devices.get('services') != None:
devices.get('services').append(service)
elif service and devices.get('services') == None:
devices['services'] = [service]
else:
devices['services'] = []
if service_attributes.get('isKpi'):
utilization_type = service_attributes.get("utilization_type")
capacity = None
if "capacity" in service_attributes.keys():
capacity = service_attributes.get("capacity")
try:
formula = kpi_rules.get(service).get('formula')
devices[service] = eval(formula)
except Exception:
print "Exception in calculating data"
pass
else:
continue
#ip_ul_mapper[devices.get('ipaddress')] = devices
ss_data.append(devices.copy())
#ss_utilization_list.append(ip_ul_mapper.copy())
key="calculated_utilization_%s_%s"%(device_type,machine_name)
redis_hook_util_10.rpush(key,ss_data)
print "Setting ....."
print "calculated_utilization_%s_%s"%(device_type,machine_name)
#redis_hook_util_10.rpush("calculated_ss_utilization_kpi",ss_utilization_list)
def aggregate_utilization_data(*args,**kwargs):
print "Aggregating Data"
machine_name = kwargs.get("params").get("machine_name")
device_type = kwargs.get("params").get("technology")
#device_type = kwargs.get("params").get("device_type")
formatted_data=redis_hook_util_10.rget("formatted_util_%s_%s"%(device_type,machine_name))
machine_data = []
for site_data in formatted_data:
machine_data.append(eval(site_data))
redis_hook_util_10.set("aggregated_utilization_%s_%s"%(machine_name,device_type),str(machine_data))
machine_names = set([site.split("_")[0] for site in ss_tech_sites])
config_machines = set([site.split("_")[0] for site in config_sites])
aggregate_dependency_ss = {}
aggregate_dependency_bs = {}
calculate_task_list={}
format_task_list={}
#TODo Remove this if ss >> bs task
# calculate_utilization_lost_ss_bs_task = PythonOperator(
# task_id = "calculate_bs_utilization_lost_ss",
# provide_context=True,
# python_callable=calculate_utilization_data_bs,
# params={"lost_n_found":True},
# dag=utilization_kpi_subdag_dag
# )
for each_machine_name in machine_names:
if each_machine_name in config_machines:
aggregate_utilization_data_ss_task = PythonOperator(
task_id = "aggregate_utilization_ss_%s"%each_machine_name,
provide_context=True,
python_callable=aggregate_utilization_data,
params={"machine_name":each_machine_name,"technology":ss_name},
dag=utilization_kpi_subdag_dag,
queue = O7_CALC_Q,
trigger_rule = 'all_done'
)
aggregate_dependency_ss[each_machine_name] = aggregate_utilization_data_ss_task
calculate_utilization_data_ss_task = PythonOperator(
task_id = "calculate_ss_utilization_kpi_of_%s"%each_machine_name,
provide_context=True,
trigger_rule = 'all_done',
python_callable=calculate_utilization_data_ss,
params={"machine_name":each_machine_name,"technology":ss_name,'attributes':utilization_attributes},
dag=utilization_kpi_subdag_dag,
queue = O7_CALC_Q,
)
format_data_ss_task = PythonOperator(
task_id = "format_data_of_ss_%s"%each_machine_name,
provide_context=True,
python_callable=format_data,
trigger_rule = 'all_done',
params={"machine_name":each_machine_name,"technology":ss_name,'attributes':utilization_attributes},
dag=utilization_kpi_subdag_dag,
queue = celery_queue,
)
calculate_task_list[each_machine_name] = calculate_utilization_data_ss_task
calculate_utilization_data_ss_task >> format_data_ss_task
format_data_ss_task >> aggregate_utilization_data_ss_task
#we gotta create teh crazy queries WTF this is so unsafe
INSERT_QUERY = INSERT_HEADER%("nocout_"+each_machine_name) + INSERT_TAIL
UPDATE_QUERY = UPDATE_HEADER%("nocout_"+each_machine_name) + UPDATE_TAIL
INSERT_QUERY = INSERT_QUERY.replace('\n','')
UPDATE_QUERY = UPDATE_QUERY.replace('\n','')
#ss_name == Device_type
if not DEBUG:
insert_data_in_mysql = MySqlLoaderOperator(
task_id ="upload_data_%s"%(each_machine_name),
dag=utilization_kpi_subdag_dag,
query=INSERT_QUERY,
#data="",
redis_key="aggregated_utilization_%s_%s"%(each_machine_name,ss_name),
redis_conn_id = "redis_hook_util_10",
mysql_conn_id='mysql_uat',
queue = O7_CALC_Q,
trigger_rule = 'all_done'
)
update_data_in_mysql = MySqlLoaderOperator(
task_id ="update_data_%s"%(each_machine_name),
query=UPDATE_QUERY ,
#data="",
redis_key="aggregated_utilization_%s_%s"%(each_machine_name,ss_name),
redis_conn_id = "redis_hook_util_10",
mysql_conn_id='mysql_uat',
dag=utilization_kpi_subdag_dag,
queue = O7_CALC_Q,
trigger_rule = 'all_done'
)
update_data_in_mysql << aggregate_utilization_data_ss_task
insert_data_in_mysql << aggregate_utilization_data_ss_task
db_list=[]
for each_site_name in ss_tech_sites:
if each_site_name in config_sites:
machine = each_site_name.split("_")[0]
get_required_data_ss_task = PythonOperator(
task_id = "get_utilization_data_of_ss_%s"%each_site_name,
provide_context=True,
trigger_rule = 'all_done',
python_callable=get_required_data_ss,
params={"site_name":each_site_name,"technology":ss_name,'attributes':utilization_attributes},
dag=utilization_kpi_subdag_dag,
queue = celery_queue
)
get_required_data_ss_task >> calculate_task_list.get(machine)
#calculate_utilization_data_ss_task >> format_data_ss_task
#calculate_utilization_data_ss_task >> calculate_utilization_data_bs_task
# try:
# aggregate_dependency_ss[machine_name] << format_data_ss_task
# except:
# logging.info("Site Not Found %s"%(machine_name))
# pass
else:
logging.info("Skipping %s"%(each_site_name))
return utilization_kpi_subdag_dag
|
vipul-tm/DAG
|
dags-ttpl/subdags/utilization_kpi_subdag.py
|
Python
|
bsd-3-clause
| 17,641 | 0.03832 |
'''
Various tools to interface with pyGSTi for running GST experiments.
Created on May 16, 2018
Original Author: Guilhem Ribeill
Copyright 2018 Raytheon BBN Technologies
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from .PulsePrimitives import *
from .Cliffords import *
from .BasicSequences.helpers import create_cal_seqs
from .Compiler import compile_to_hardware
from itertools import chain
from random import choices
PYGSTI_PRESENT = False
try:
from pygsti.objects.circuit import Circuit
PYGSTI_PRESENT = True
except:
pass
#Default mapping from pyGSTi naming convention to QGL gates.
gst_gate_map = {"Gx": X90,
"Gy": Y90,
"Gi": Id}
def gst_map_1Q(gst_list, qubit, qgl_map=gst_gate_map, append_meas=True):
"""
Helper function that takes an arbitrarily nested list of pygsti gatestrings
and converts them into QGL sequences, keeping the same nesting of lists.
Inputs:
gst_list: GateString to convert, or possibly nested list of pyGSTi GateStrings.
qubit: QGL qubit to apply the sequence to
qgl_map: Dictionary that maps between pyGSTi "Gx" string to QGL pulse
append_meas: Append a measurement to each sequence.
Returns:
QGL sequences, preserving the input list nesting (as a generator)
"""
if isinstance(gst_list, Circuit):
gst_list = [gst_list]
for item in gst_list:
if isinstance(item, Circuit):
mapped = map(lambda x: qgl_map[str(x)](qubit), item.tup)
if append_meas:
yield list(chain(mapped, [MEAS(qubit)]))
else:
yield list(mapped)
elif isinstance(item, list):
yield list(gst_map_1Q(item, qubit, qgl_map=qgl_map, append_meas=append_meas))
def gst_map_2Q(gst_list, qubits, qgl_map=None, append_meas=False):
"""
Helper function that takes an arbitrarily nested list of pygsti gatestrings
and converts them into QGL sequences, keeping the same nesting of lists.
Inputs:
gst_list: GateString to convert, or possibly nested list of pyGSTi GateStrings.
qubit: QGL qubit to apply the sequence to
qgl_map: Dictionary that maps between pyGSTi "Gx" string to QGL pulse
append_meas: Append a measurement to each sequence.
Returns:
QGL sequences, preserving the input list nesting (as a generator)
"""
if isinstance(gst_list, GateString):
gst_list = [gst_list]
for item in gst_list:
if isinstance(item, GateString):
mapped = map(lambda x: qgl_map[x], item.tup)
if append_meas:
yield list(chain(mapped, [reduce(lambda x,y: x*y, map(MEAS, qubits))]))
else:
yield list(mapped)
elif isinstance(item, list):
yield list(gst_map_2Q(item, qubit, qgl_map=qgl_map, append_meas=append_meas))
def create_gst_sequence_from_pygsti(gst_list, qubit, gate_map=gst_gate_map):
""" Returns list of QGL sequences from a pyGSTi GateString list. See gst_map_1Q.
The return value is a list of sequences that can be complied by QGL.
"""
return list(gst_map_1Q(gst_list, qubit, qgl_map=gate_map, append_meas=True))
def pygsti_to_cliffords(gst_seq):
#Map from GST convention to cliffords
cliff_map = {"{}": 0,
"Gi": 1,
"Gx": 2,
"Gy": 5}
#convert to dictionary of lambdas for compatibility with gst_map_1Q
lambda_map = {k: lambda x, v=v: v for k, v in cliff_map.items()}
return list(gst_map_1Q(gst_seq, None, qgl_map=lambda_map,
append_meas=False))
def pauli_rand_clifford_circuit(gst_seq):
def seqreduce(s):
if not s:
return 0
else:
return reduce(lambda x,y: clifford_multiply(x,y), s)
def inv_cliff(c):
return inverse_clifford(clifford_mat(c, 1))
c_ps = [0, 2, 5, 8]
c_seqs = pygsti_to_cliffords(gst_seq)
r_seqs = []
for seq in c_seqs:
if not seq:
r_seqs.append([])
else:
rand_pauli = choices(c_ps, k=len(seq))
inter = 0
bare = 0
rseq = []
for j in range(len(seq)):
inter = clifford_multiply(clifford_multiply(inter, rand_pauli[j]), seq[j])
bare = clifford_multiply(bare, seq[j])
rseq.append(clifford_multiply(rand_pauli[j], seq[j]))
recovery = clifford_multiply(inv_cliff(inter), bare)
rseq[-1] = clifford_multiply(rseq[-1], recovery)
r_seqs.append(rseq)
all_ok = all((r == i for r, i in zip(map(seqreduce, r_seqs), map(seqreduce, c_seqs))))
assert all_ok, "Something went wrong when Pauli-frame randomizing!"
return r_seqs
def SingleQubitCliffordGST(qubit, pygsti_seq, pulse_library="Standard", randomized=False, num_cals=100, diac_compiled=True):
pulse_library = pulse_library.upper()
# QGL pulse libraries handle the Id pulse differently. In the standard
# case, the Id is of finite length equal to all the other one-pulse
# elements of the library. In the Atomic and DiAtomic cases, the ID is
# of length 0 by default. In GST, we need access to both types of the ID
# gate with the first experiment in any GST experiment equal to {} =
# Id(length = 0). All other Id gates in the sequence should be of finite
# length. So we'll modify the Clifford indexing here to make Id(length=0)
# the first element in the library and Id(length=length) the second.
if pulse_library == "STANDARD":
#clifford_pulse = lambda x: clifford_seq(x, qubit)
clifford_pulse = [clifford_seq(i, qubit) for i in range(24)]
clifford_pulse.insert(0, Id(qubit, length=0.0))
elif pulse_library == "DIAC":
#clifford_pulse = lambda x: DiAC(qubit, x, diac_compiled)
clifford_pulse = [AC(qubit, i, diac_compiled) for i in range(24)]
clifford_pulse.insert(1, Id(qubit))
elif pulse_library == "AC":
#clifford_pulse = lambda x: AC(qubit, x)
clifford_pulse = [AC(qubit, i) for i in range(24)]
clifford_pulse.insert(1, Id(qubit))
raise ValueError("Pulse library must be one of 'standard', 'diac', or 'ac'. Got {} instead".format(pulse_library))
if randomized:
seqs = pauli_rand_clifford_circuit(pygsti_seq)
else:
seqs = pygsti_to_cliffords(pygsti_seq)
qgl_seqs = []
for seq in seqs:
qgl_seqs.append([clifford_pulse[c] for c in seq])
qgl_seqs[-1].append(MEAS(qubit))
if num_cals != 0:
qgl_seqs += create_cal_seqs((qubit, ), abs(num_cals))
metafile = compile_to_hardware(qgl_seqs, 'GST/GST')
return metafile
|
BBN-Q/QGL
|
QGL/GSTTools.py
|
Python
|
apache-2.0
| 7,229 | 0.004288 |
"""
Based on http://vaig.be/2009/03/getting-client-os-in-django.html
"""
import re
def client_os(user_agent):
'''
Context processor for Django that provides operating system
information base on HTTP user agent.
A user agent looks like (line break added):
"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.6) \
Gecko/2009020409 Iceweasel/3.0.6 (Debian-3.0.6-1)"
'''
# Mozilla/5.0
regex = '(?P<application_name>\w+)/(?P<application_version>[\d\.]+)'
regex += ' \('
# X11
regex += '(?P<compatibility_flag>\w+)'
regex += '; '
# U
if "U;" in user_agent or "MSIE" in user_agent: # some UA strings leave out the U;
regex += '(?P<version_token>[\w .]+)'
regex += '; '
# Linux i686
regex += '(?P<platform_token>[\w ._]+)'
# anything else
regex += '; .*'
result = re.match(regex, user_agent)
if result:
result_dict = result.groupdict()
full_platform = result_dict['platform_token']
platform_values = full_platform.split(' ')
if platform_values[0] in ('Windows', 'Linux', 'Mac'):
platform = platform_values[0]
elif platform_values[1] in ('Mac',):
# Mac is given as "PPC Mac" or "Intel Mac"
platform = platform_values[1]
else:
platform = None
else:
# Total hack to avoid dealing with regex nightmares
if 'mac' in user_agent.lower():
full_platform = "Intel Mac 10.6"
platform = 'Mac'
elif 'windows' in user_agent.lower():
full_platform = "Windows"
platform = 'Windows'
else:
full_platform = None
platform = None
return {
'full_platform': full_platform,
'platform': platform,
}
|
Alwnikrotikz/marinemap
|
lingcod/common/uaparser/clientos.py
|
Python
|
bsd-3-clause
| 1,804 | 0.007206 |
import os
class Config(object):
SPOTIPY_REDIRECT_URI = os.environ['SPOTIPY_REDIRECT_URI']
SPOTIPY_CLIENT_ID = os.environ['SPOTIPY_CLIENT_ID']
SPOTIPY_CLIENT_SECRET = os.environ['SPOTIPY_CLIENT_SECRET']
SPOTIFY_ACCESS_SCOPE = 'playlist-modify-public playlist-modify-private playlist-read-private user-library-read'
###########
# Options #
###########
# TRACKS_PER_ARTIST #
# Number of tracks per artist to add to the playlist.
# I recommend 5 or less. Max is 10.
TRACKS_PER_ARTIST = 3
# COLLATE #
# By default, the playlist will be ordered like:
# - ARTIST A TRACK 1
# - ARTIST A TRACK 2
# - ARTIST A TRACK 3
# - ARTIST A TRACK 4
# - ARTIST A TRACK 5
# - ARTIST B TRACK 1
# - ARTIST B TRACK 2
# - ARTIST B TRACK 3
# ...
# if COLLATE is set to True, it will instead be ordered like so:
# - ARTIST A TRACK 1
# - ARTIST B TRACK 1
# - ARTIST C TRACK 1
# ...
# - ARTIST Z TRACK 1
# - ARTIST A TRACK 2
# - ARTIST B TRACK 2
# ...
COLLATE = False
# PUBLIC #
# Default False. Set True to make your generated playlist public.
PUBLIC = False
|
jzimbel/artist-expander
|
config.py
|
Python
|
mit
| 1,176 | 0.001701 |
"""
This migration script adds a user actions table to Galaxy.
"""
from sqlalchemy import *
from migrate import *
import datetime
now = datetime.datetime.utcnow
import logging
log = logging.getLogger( __name__ )
metadata = MetaData()
def display_migration_details():
print ""
print "This migration script adds a user actions table to Galaxy."
print ""
# New table to store user actions.
UserAction_table = Table( "user_action", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True ),
Column( "action", Unicode( 255 ) ),
Column( "context", Unicode( 512 ) ),
Column( "params", Unicode( 1024 ) ) )
def upgrade(migrate_engine):
metadata.bind = migrate_engine
display_migration_details()
metadata.reflect()
try:
UserAction_table.create()
except Exception, e:
print str(e)
log.debug( "Creating user_action table failed: %s" % str( e ) )
def downgrade(migrate_engine):
metadata.bind = migrate_engine
metadata.reflect()
try:
UserAction_table.drop()
except Exception, e:
print str(e)
log.debug( "Dropping user_action table failed: %s" % str( e ) )
|
mikel-egana-aranguren/SADI-Galaxy-Docker
|
galaxy-dist/lib/galaxy/model/migrate/versions/0029_user_actions.py
|
Python
|
gpl-3.0
| 1,371 | 0.030635 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import tornado.ioloop
try:
import WebApp
except ImportError, ImportWarning:
import entire as WebApp
if __name__ == "__main__":
ip = os.environ['OPENSHIFT_DIY_IP']
port = int(os.environ['OPENSHIFT_DIY_PORT'])
WebApp.application.listen(port, ip)
tornado.ioloop.IOLoop.instance().start()
|
swoiow/iabe-tool
|
openshift.py
|
Python
|
mit
| 370 | 0.005405 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import shutil
import time
import subprocess
import numpy as np
from .phonopy_conf_creator import PhonopyConfCreator
from vasp.poscar import Poscar
from autotools import symlink_force
class PhononCalculator(object):
def __init__(self,
directory_data="./",
poscar_filename="POSCAR",
poscar_average_filename=None,
is_average_mass=False,
dim_sqs=None,
is_primitive=False,
is_band=True,
is_partial_dos=False,
is_tetrahedron=False,
is_tprop=False,
mesh=None,
nac=None):
if dim_sqs is None:
dim_sqs = np.array([1, 1, 1])
if mesh is None:
mesh = np.array([1, 1, 1])
self._variables = None
self._home = os.path.expanduser("~")
self._phonopy = subprocess.check_output(["which", "phonopy"]).strip()
print("phonopy_path:", self._phonopy)
self._directory_data = directory_data
self._poscar_filename = poscar_filename
self._poscar_average_filename = poscar_average_filename
self._is_average_mass = is_average_mass
self.set_dim_sqs(dim_sqs)
self._is_band = is_band
self.set_is_tetrahedron(is_tetrahedron)
self.set_is_partial_dos(is_partial_dos)
self.set_is_tprop(is_tprop)
self._is_primitive = is_primitive
self._mesh = np.array(mesh)
self._nac = nac
def set_dim_sqs(self, dim_sqs):
self._dim_sqs = dim_sqs
def set_is_tetrahedron(self, is_tetrahedron):
self._is_tetrahedron = is_tetrahedron
def set_is_partial_dos(self, is_partial_dos):
self._is_partial_dos = is_partial_dos
def set_is_tprop(self, is_tprop):
self._is_tprop = is_tprop
def set_mesh(self, mesh):
self._mesh = mesh
def set_variables(self, variables):
self._variables = variables
def run(self):
self.copy_files()
self.create_phonopy_conf()
conf_files = self.gather_conf_files()
for conf_file in conf_files:
self.run_phonopy(conf_file)
def copy_files(self):
dir_data = self._directory_data
symlink_force(os.path.join(dir_data, 'writefc.conf'), 'writefc.conf')
symlink_force(os.path.join(dir_data, 'POSCAR'), 'POSCAR')
symlink_force(os.path.join(dir_data, 'POSCAR_ideal'), 'POSCAR_ideal')
symlink_force(os.path.join(dir_data, 'FORCE_CONSTANTS'), 'FORCE_CONSTANTS')
def create_phonopy_conf(self):
directory_data = self._directory_data
dim_sqs = self._dim_sqs
variables = self._variables
mesh = self._mesh.copy()
print("directory_data:", directory_data)
print("mesh:", mesh)
spg_number = self.create_spg_number()
# Get band path for the specific space group
phonopy_conf_creator = PhonopyConfCreator(
spg_number,
mesh=mesh,
tmax=3000,
dim_sqs=dim_sqs,
is_average_mass=self._is_average_mass,
is_primitive=self._is_primitive,
band_points=101,
poscar_name="POSCAR", # For getting the chemical symbols
magmom_line=None,
variables=variables,
nac=self._nac,
)
phonopy_conf_creator.run()
def create_spg_number(self):
"""
spg_number is used to determine the primitive axis and band paths.
"""
if self._poscar_average_filename is not None:
poscar_filename = self._poscar_average_filename
else:
poscar_filename = self._poscar_filename
print('SPG number is searched from {}'.format(poscar_filename))
spg_number = Poscar(poscar_filename).get_symmetry_dataset()["number"]
print("spg_number:", spg_number)
return spg_number
def gather_conf_files(self):
conf_files = [
"dos_smearing.conf",
]
if self._is_band:
conf_files.append("band.conf")
if self._is_tetrahedron:
conf_files.append("dos_tetrahedron.conf")
if self._is_partial_dos:
conf_files.append("partial_dos_smearing.conf")
if self._is_tetrahedron and self._is_partial_dos:
conf_files.append("partial_dos_tetrahedron.conf")
if self._is_tprop:
conf_files.append("tprop.conf")
return conf_files
def run_phonopy(self, conf_file):
root = os.getcwd()
home = self._home
phonopy = self._phonopy
print("=" * 80)
print(conf_file)
print("=" * 80)
dir_name = conf_file.replace(".conf", "_calc")
log_file = conf_file.replace(".conf", ".log")
if os.path.exists(dir_name):
shutil.rmtree(dir_name)
os.mkdir(dir_name)
os.chdir(dir_name)
for fn in [conf_file, "POSCAR", "FORCE_CONSTANTS", "BORN"]:
if os.path.exists(os.path.join("..", fn)):
os.symlink("../" + fn, fn)
if os.path.exists(log_file):
os.remove(log_file)
time1 = time.time()
with open(log_file, "w") as f:
subprocess.call(
[phonopy, conf_file, "-v"],
stdout=f,
)
time2 = time.time()
dtime = time2 - time1
print("Time for calc.: {:12.6f} s".format(dtime))
if conf_file == "tprop.conf":
subprocess.call(
["python", home + "/script/python/phonopy_tprop_arranger.py"]
)
os.chdir(root)
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--datadir",
default="..",
type=str,
help="Data directory")
parser.add_argument("--tetrahedron",
action="store_true",
help="Calculate using tetrahedron method.")
parser.add_argument("--partial_dos",
action="store_true",
help="Calculate partial DOS.")
parser.add_argument("--tprop",
action="store_true",
help="Calculate thermal properties.")
args = parser.parse_args()
phonon_analyzer = PhononCalculator(
directory_data=args.datadir,
is_tetrahedron=args.tetrahedron,
is_partial_dos=args.partial_dos,
is_tprop=args.tprop,
)
phonon_analyzer.run()
if __name__ == "__main__":
main()
|
yuzie007/ph_analysis
|
ph_analysis/phonon_calculator.py
|
Python
|
mit
| 6,728 | 0.000297 |
# coding: utf-8
import sqlalchemy as sa
import pandas as pd
from niamoto.data_providers.base_occurrence_provider import \
BaseOccurrenceProvider
from niamoto.exceptions import MalformedDataSourceError
class SQLOccurrenceProvider(BaseOccurrenceProvider):
"""
SQL occurrence provider. Instantiated with a sql query, that must return
AT LEAST the following columns:
id -> The provider's identifier for the occurrence.
taxon_id -> The provider's taxon id for the occurrence.
x -> The longitude of the occurrence (WGS84).
y -> The latitude of the occurrence (WGS84).
All the remaining column will be stored as properties.
"""
REQUIRED_COLUMNS = set(['id', 'taxon_id', 'x', 'y'])
def __init__(self, data_provider, occurrence_sql):
super(SQLOccurrenceProvider, self).__init__(data_provider)
self.occurrence_sql = occurrence_sql
def get_provider_occurrence_dataframe(self):
connection = sa.create_engine(self.data_provider.db_url).connect()
df = pd.read_sql(self.occurrence_sql, connection, index_col='id')
cols = set(list(df.columns) + ['id', ])
inter = cols.intersection(self.REQUIRED_COLUMNS)
if not inter == self.REQUIRED_COLUMNS:
m = "The queried data does not contains the required columns " \
"('id', 'taxon_id', 'x', 'y'), " \
"queried data has: {}".format(cols)
raise MalformedDataSourceError(m)
if len(df) == 0:
return df
property_cols = cols.difference(self.REQUIRED_COLUMNS)
if len(property_cols) > 0:
properties = df[list(property_cols)].apply(
lambda x: x.to_json(),
axis=1
)
else:
properties = '{}'
df.drop(property_cols, axis=1, inplace=True)
df['properties'] = properties
location = df[['x', 'y']].apply(
lambda x: "SRID=4326;POINT({} {})".format(x['x'], x['y']),
axis=1
)
df['location'] = location
df.drop(['x', 'y'], axis=1, inplace=True)
return df
|
dimitri-justeau/niamoto-core
|
niamoto/data_providers/sql_provider/sql_occurrence_provider.py
|
Python
|
gpl-3.0
| 2,139 | 0 |
# -*- coding: utf-8 -*-
"""
The :mod:`AquaponicsModeler.model` module contains all components to be used in
models.
All model components are classes that should inherit from
:class:`BaseModelClass`. So far there are two groups of component types:
containers and pumps.
:class:`Containers <Container>` are compents that contain water and have water
flowing in or out. They need to have another component before them in the
model, so water can flow from one container to the other.
As :class:`Containers <Container>` always need a source of water, the first
component in the model is a :class:`Pump`. There are several types of pumps,
but they all assume an infinite water source that they can pump from, and they
pump into a :class:`Container`.
"""
import logging
import collections
import copy
from PyElectronics.timers import AStable555
log = logging.getLogger("aquaponics.model")
class _PARAM_TYPES:
"""Constant holding the different parameter types."""
MODEL = 'Model Component Parameter'
INTEGER = 'Integer Parameter'
FLOAT = 'Float Parameter'
TEXT = 'Text Parameter'
class BaseModelClass(object):
"""
A base class for the model that other objects inherit from.
The BaseModelClass doesn't implement much except for general methods to
get the parameters for a component and to manage the state while stepping
through the model. The state is the main variable manipulated by the model.
For :class:`Pump` it contains the on/off state, while for
:class:`Containers <Container>` it contains the water volume of the
container.
"""
_PARAMS = collections.OrderedDict()
def __init__(self):
self.state = None
def __str__(self):
return self.__class__.__name__
def get_state(self):
"""
Get the current contents of this container.
Returns:
float: current state value
"""
return self.state
@classmethod
def getParameters(cls):
"""
Return the model parameters.
Returns:
collections.OrderedDict: The parameters for this class.
"""
log.debug('Getting parameters for class %s: %s' % (cls, cls._PARAMS))
return cls._PARAMS
def step(self):
"""Step into the next iteration of the model."""
raise NotImplementedError("Please implement a step instance method")
class SimpleContainer(BaseModelClass):
"""
A container in the aquaponics loop.
Each container is a container/tank/basin/growbed/etc containing a volume
of water, with possibly water flowing out into the next component and
flowing into it from the previous container in the loop.
The inflow speed of each container is determined by the outflow speed of
the previous container. The outflow of each container only starts when in
the treshold has been reached, and only if the contents of the
container > 0 liters.
"""
_PARAMS = {
'previous': (_PARAM_TYPES.MODEL, 'previous'),
'outflow': (_PARAM_TYPES.FLOAT, 'outflow (l/min)'),
'start_content': (_PARAM_TYPES.INTEGER, 'start content (l)')
}
def __init__(self, previous, outflow, start_content=0):
"""
Args:
previous (Container): The previous Container in the chain.
outflow (float): The outflow speed of this container.
threshold (int): The threshold contents after which the container
outflow speed starts.
start_content (int): The starting contents of the container.
"""
self.previous = previous
self.outflow = outflow
self.state = self.start_content = start_content
def get_current_outflow_speed(self):
"""
Determine the current flow speed of water from this container.
Returns:
float: The current outflow speed.
"""
return self.outflow
def get_current_inflow_speed(self):
"""
Determine the current speed of water flowing into this container.
This is determined by the outflow speed of the previous container.
Returns:
float: The current inflow speed.
"""
return self.previous.get_current_outflow_speed()
def step(self, time=10):
"""
Go through the next step of the simulation of this container.
Args:
time (int): The length of the next step in seconds.
"""
inflow = self.get_current_inflow_speed()
outflow = self.get_current_outflow_speed()
self.state += time / 60 * inflow - time / 60 * outflow
class Container(SimpleContainer):
_PARAMS = copy.deepcopy(SimpleContainer._PARAMS)
_PARAMS['threshold']= (_PARAM_TYPES.INTEGER, 'dump threshold (l)')
def __init__(self, previous, outflow, threshold, start_content=0):
"""
Args:
previous (Container): The previous Container in the chain.
outflow (float): The outflow speed of this container.
threshold (int): The threshold contents after which the container
outflow speed starts.
start_content (int): The starting contents of the container.
"""
self.previous = previous
self.outflow = outflow
self.threshold = threshold
self.state = self.start_content = start_content
def get_current_outflow_speed(self):
"""
Determine the current flow speed of water from this container.
Returns:
float: The current outflow speed.
"""
if self.state >= self.threshold:
return self.outflow
else:
return 0
class FloodDrainContainer(Container):
"""
This :class:`Container` will drain fully when the threshold has been
reached.
In other respects it works like other :class:`Containers <Container>` but
for the way it drains. A container with a U-siphon or bell siphon at the
end will only start draining when the waterlevel has reached a maximum.
When that happens, suction makes sure that all water is drained from the
container at the speed specified in outflow.
"""
def __init__(self, *args, **kwargs):
super(FloodDrainContainer, self).__init__(*args, **kwargs)
self.flooding = False
def get_current_outflow_speed(self):
"""
Return the current outlflow speed.
Outflow starts when self.threshold has been reached and will continue
at self.outflow speed until the container is empty.
Returns:
float: The outflow speed of this :class:`Container`
"""
if (self.flooding is True and self.state > 0)\
or self.state >= self.threshold:
self.flooding = True
return self.outflow
else:
self.flooding = False
return 0
class Pump(BaseModelClass):
"""
A general Pump object.
It pumps water into the system (from an unlimited source) and has a
constant outflow speed. It doesn't have contents (unlike containers for
instance). The state attribute contains the on (1) or off (0) state
of the pump, which is also what is plotted in the resulting graphs.
"""
_PARAMS = {
'outflow': (_PARAM_TYPES.FLOAT, 'outflow (l/min)'),
}
def __init__(self, outflow):
"""
Args:
outflow (float): The speed at which the pump pumps.
"""
self.outflow = outflow
self.state = 1
def get_current_outflow_speed(self):
"""
Return the pump speed of this pump.
Returns:
float: The outflow speed of this pump in L/min.
"""
return self.outflow
def step(self, time=10):
"""
Go through the next step of the pump state and return that state.
Args:
time (int): The time in seconds for which the pump state should be
returned.
Returns:
int: The state of the pump. 1=on 0=off.
"""
return self.state
class WaterSource(BaseModelClass):
"""
A general Water Source object.
Water flows at a static speed from a source (spring or other source).
It doesn't have contents (unlike containers for instance).
"""
_PARAMS = {
'outflow': (_PARAM_TYPES.FLOAT, 'outflow (l/min)'),
}
def __init__(self, outflow):
"""
Args:
outflow (float): The speed at which the watersource flows.
"""
self.outflow = outflow
self.state = None
def get_current_outflow_speed(self):
"""
Return the pump speed of this pump.
Returns:
float: The outflow speed of this source in L/min.
"""
return self.outflow
def step(self, time=10):
"""
Go through the next step of the source.
Args:
time (int): The time in seconds for which the pump state should be
returned.
"""
return
class TimedPump(Pump):
"""
A pump like the Pump object.
This pump has timing parameters which periodically switch it on and off.
This way the outflow speed of the pump is controlled. If it is on, it
equals the outflow speed parameter, else it is 0.
"""
_PARAMS = copy.deepcopy(Pump._PARAMS)
_PARAMS['ontime'] = (_PARAM_TYPES.FLOAT, 'on time (min)')
_PARAMS['offtime'] = (_PARAM_TYPES.FLOAT, 'off time (min)')
def __init__(self, ontime, offtime, outflow):
"""
Args:
ontime (float): The time in minutes the pump spends pumping.
offtime (float): The time in minutes the pump is off.
outflow (float): The speed at which the pump pumps in L/min.
"""
self.ontime = ontime * 60
self.offtime = offtime * 60
self.outflow = outflow
self.time_since_switch = 0
self.state = 1
def get_current_outflow_speed(self):
"""
Return the current outflow (pump) speed.
It is determined by a timed switch that toggles the pump on and off.
Returns:
float: The outflow speed in L/min
"""
log.debug("state %i, time since switch %i, ontime %i, offtime %i" %
(self.state, self.time_since_switch, self.ontime,
self.offtime))
if self.state == 1 and self.time_since_switch < self.ontime:
outflow = self.outflow
elif self.state == 0 and self.time_since_switch >= self.offtime:
outflow = self.outflow
elif self.state == 0 and self.time_since_switch < self.offtime:
outflow = 0
elif self.state == 1 and self.time_since_switch >= self.ontime:
outflow = 0
logging.debug("Returning outflow %0.2f" % outflow)
return outflow
def step(self, time=10):
"""
Go through the next step of the pump state and return that state.
Args:
time (int): The time in seconds for which the pump state should be
returned.
"""
if (self.state == 0 and self.time_since_switch >= self.offtime) or\
(self.state == 1 and self.time_since_switch >= self.ontime):
log.debug("Switching pump state to %i " % (self.state ^ 1))
self.state = self.state ^ 1
self.time_since_switch = 0
else:
log.debug("Keeping pump state at %i " % self.state)
self.time_since_switch += time
log.debug("Pump at state %i for %i sec" %
(self.state, self.time_since_switch))
class Timed555Pump(TimedPump):
"""
A pump like the :class:`TimedPump` object.
This pump gets resistor and capacitor values as input parameters instead of
the actual ontime and offtime. This object assumes a 555 timer circuit in
a-stable mode is used to switch the pump on and off. A relay is used for
the actual switching which is on when the timer is high.
The resistor values of the timer determine the on and off time.
"""
_PARAMS = copy.deepcopy(Pump._PARAMS)
_PARAMS['r1'] = (_PARAM_TYPES.FLOAT, 'Resistor 1 value (KOhm)')
_PARAMS['r2'] = (_PARAM_TYPES.FLOAT, 'Resistor 2 value (KOhm)')
_PARAMS['c'] = (_PARAM_TYPES.INTEGER, 'The capacitor value (uF)')
def __init__(self, r1, r2, c, outflow):
"""
Args:
r1 (int): The value in Ohm of resistor 1 for the 555 timer.
r2 (int): The value in Ohm of resistor 2 for the 555 timer.
c (int): The value of the capacitor in uF for the 555 timer
outflow (float): The speed at which the pump pumps in L/min.
"""
self.c = c
self.r1 = r1
self.r2 = r2
ontime = AStable555.timeHigh(r1, r2, c)
offtime = AStable555.timeLow(r2, c)
log.debug("Got ontime %i" % ontime)
log.debug("Got offtime %i" % offtime)
self.ontime = ontime
self.offtime = offtime
self.outflow = outflow
self.time_since_switch = 0
self.state = 1
class InvTimed555Pump(TimedPump):
"""
An inverted version of the :class:`Timed555Pump` object.
It works very similar, but the relay is inverted. The normally-off side
of the relay is used to switch the pump off when the timer is high.
"""
_PARAMS = copy.deepcopy(Pump._PARAMS)
_PARAMS['r1'] = (_PARAM_TYPES.FLOAT, 'Resistor 1 value (KOhm)')
_PARAMS['r2'] = (_PARAM_TYPES.FLOAT, 'Resistor 2 value (KOhm)')
_PARAMS['c'] = (_PARAM_TYPES.INTEGER, 'The capacitor value (uF)')
def __init__(self, r1, r2, c, outflow):
"""
Args:
r1 (int): The value in Ohm of resistor 1 for the 555 timer.
r2 (int): The value in Ohm of resistor 2 for the 555 timer.
c (int): The value of the capacitor in uF for the 555 timer
outflow (float): The speed at which the pump pumps in L/min.
"""
self.c = c
self.r1 = r1
self.r2 = r2
ontime = AStable555.timeLow(r2, c)
offtime = AStable555.timeHigh(r1, r2, c)
log.debug("Got ontime %i" % ontime)
log.debug("Got offtime %i" % offtime)
self.ontime = ontime
self.offtime = offtime
self.outflow = outflow
self.time_since_switch = 0
self.state = 1
def get_components():
"""
Get all available component types.
Returns:
list: Return a list of all component classes.
"""
return [Container, FloodDrainContainer, Pump, TimedPump, Timed555Pump,
InvTimed555Pump, WaterSource, SimpleContainer]
__all__ = ['BaseModelClass', 'Container', 'FloodDrainContainer', 'Pump',
'TimedPump', 'Timed555Pump', 'InvTimed555Pump', 'WaterSource',
'SimpleContainer', 'get_components', '_PARAM_TYPES']
|
dolfandringa/AquaponicsModeler
|
AquaponicsModeler/model.py
|
Python
|
gpl-3.0
| 14,950 | 0.000401 |
print ("How old are you?",)
age = input()
print ("How tall are you?",)
height = input()
print ("How much do you weigh?",)
weight = input()
print ("So, you are %r years old, %r tall and %r heavy." %(age, height, weight))
|
SunWalter/Hard
|
ex11.py
|
Python
|
apache-2.0
| 231 | 0.025974 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''Hacky way to make sure imports work'''
from os.path import abspath, dirname, realpath, join
import sys
# This allows imports to work, even if sim_game is not in python path:
package_location = abspath(join(dirname(realpath(__file__)) , ".."))
sys.path.insert(0, package_location)
|
olehermanse/sim_game
|
tests/path_fix.py
|
Python
|
mit
| 331 | 0.003021 |
# -*- Mode: Python; python-indent-offset: 4 -*-
#
# Time-stamp: <2017-06-03 11:36:32 alex>
#
# --------------------------------------------------------------------
# PiProbe
# Copyright (C) 2016-2017 Alexandre Chauvin Hameau <ach@meta-x.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# --------------------------------------------------------------------
"""
database package, redis and test modules
"""
from . import dbRedis
from . import dbTest
|
achauvinhameau/netProbe
|
py-net-probe/database/__init__.py
|
Python
|
gpl-3.0
| 1,035 | 0 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import json
import datetime
import mimetypes
import os
import frappe
from frappe import _
import frappe.model.document
import frappe.utils
import frappe.sessions
import werkzeug.utils
from werkzeug.local import LocalProxy
from werkzeug.wsgi import wrap_file
from werkzeug.wrappers import Response
from werkzeug.exceptions import NotFound, Forbidden
def report_error(status_code):
if (status_code!=404 or frappe.conf.logging) and not frappe.local.flags.disable_traceback:
frappe.errprint(frappe.utils.get_traceback())
response = build_response("json")
response.status_code = status_code
return response
def build_response(response_type=None):
if "docs" in frappe.local.response and not frappe.local.response.docs:
del frappe.local.response["docs"]
response_type_map = {
'csv': as_csv,
'download': as_raw,
'json': as_json,
'page': as_page,
'redirect': redirect
}
return response_type_map[frappe.response.get('type') or response_type]()
def as_csv():
response = Response()
response.headers[b"Content-Type"] = b"text/csv; charset: utf-8"
response.headers[b"Content-Disposition"] = ("attachment; filename=\"%s.csv\"" % frappe.response['doctype'].replace(' ', '_')).encode("utf-8")
response.data = frappe.response['result']
return response
def as_raw():
response = Response()
response.headers[b"Content-Type"] = frappe.response.get("content_type") or mimetypes.guess_type(frappe.response['filename'])[0] or b"application/unknown"
response.headers[b"Content-Disposition"] = ("filename=\"%s\"" % frappe.response['filename'].replace(' ', '_')).encode("utf-8")
response.data = frappe.response['filecontent']
return response
def as_json():
make_logs()
response = Response()
if frappe.local.response.http_status_code:
response.status_code = frappe.local.response['http_status_code']
del frappe.local.response['http_status_code']
response.headers[b"Content-Type"] = b"application/json; charset: utf-8"
response.data = json.dumps(frappe.local.response, default=json_handler, separators=(',',':'))
return response
def make_logs(response = None):
"""make strings for msgprint and errprint"""
if not response:
response = frappe.local.response
if frappe.error_log:
# frappe.response['exc'] = json.dumps("\n".join([cstr(d) for d in frappe.error_log]))
response['exc'] = json.dumps([frappe.utils.cstr(d) for d in frappe.local.error_log])
if frappe.local.message_log:
response['_server_messages'] = json.dumps([frappe.utils.cstr(d) for
d in frappe.local.message_log])
if frappe.debug_log and frappe.conf.get("logging") or False:
response['_debug_messages'] = json.dumps(frappe.local.debug_log)
def json_handler(obj):
"""serialize non-serializable data for json"""
# serialize date
if isinstance(obj, (datetime.date, datetime.timedelta, datetime.datetime)):
return unicode(obj)
elif isinstance(obj, LocalProxy):
return unicode(obj)
elif isinstance(obj, frappe.model.document.BaseDocument):
doc = obj.as_dict(no_nulls=True)
return doc
else:
raise TypeError, """Object of type %s with value of %s is not JSON serializable""" % \
(type(obj), repr(obj))
def as_page():
"""print web page"""
from frappe.website.render import render
return render(frappe.response['page_name'], http_status_code=frappe.response.get("http_status_code"))
def redirect():
return werkzeug.utils.redirect(frappe.response.location)
def download_backup(path):
try:
frappe.only_for(("System Manager", "Administrator"))
except frappe.PermissionError:
raise Forbidden(_("You need to be logged in and have System Manager Role to be able to access backups."))
return send_private_file(path)
def send_private_file(path):
path = os.path.join(frappe.local.conf.get('private_path', 'private'), path.strip("/"))
if frappe.local.request.headers.get('X-Use-X-Accel-Redirect'):
path = '/' + path
response = Response()
response.headers[b'X-Accel-Redirect'] = path
else:
filename = os.path.basename(path)
filepath = frappe.utils.get_site_path(path)
try:
f = open(filepath, 'rb')
except IOError:
raise NotFound
response = Response(wrap_file(frappe.local.request.environ, f))
response.headers.add(b'Content-Disposition', 'attachment', filename=filename.encode("utf-8"))
response.headers[b'Content-Type'] = mimetypes.guess_type(filename)[0] or b'application/octet-stream'
return response
def handle_session_stopped():
response = Response("""<html>
<body style="background-color: #EEE;">
<h3 style="width: 900px; background-color: #FFF; border: 2px solid #AAA; padding: 20px; font-family: Arial; margin: 20px auto">
Updating.
We will be back in a few moments...
</h3>
</body>
</html>""")
response.status_code = 503
response.content_type = 'text/html'
return response
|
gangadharkadam/saloon_frappe
|
frappe/utils/response.py
|
Python
|
mit
| 4,943 | 0.025086 |
from goto_file2 import foo
foo
|
indianajohn/ycmd
|
ycmd/tests/python/testdata/goto_file1.py
|
Python
|
gpl-3.0
| 31 | 0 |
"""
Tests for `pyleset` module.
"""
import pytest
from pyleset import pyleset
class TestPyleset(object):
@classmethod
def setup_class(cls):
pass
def test_something(self):
pass
@classmethod
def teardown_class(cls):
pass
|
westurner/pyleset
|
test/test_pyleset.py
|
Python
|
mit
| 267 | 0.003745 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import unittest
import jsbeautifier
class TestJSBeautifier(unittest.TestCase):
def test_unescape(self):
# Test cases contributed by <chrisjshull on GitHub.com>
test_fragment = self.decodesto
bt = self.bt
bt('"\\\\s"'); # == "\\s" in the js source
bt("'\\\\s'"); # == '\\s' in the js source
bt("'\\\\\\s'"); # == '\\\s' in the js source
bt("'\\s'"); # == '\s' in the js source
bt('"•"');
bt('"—"');
bt('"\\x41\\x42\\x43\\x01"', '"\\x41\\x42\\x43\\x01"');
bt('"\\u2022"', '"\\u2022"');
bt('a = /\s+/')
#bt('a = /\\x41/','a = /A/')
bt('"\\u2022";a = /\s+/;"\\x41\\x42\\x43\\x01".match(/\\x41/);','"\\u2022";\na = /\s+/;\n"\\x41\\x42\\x43\\x01".match(/\\x41/);')
bt('"\\x22\\x27",\'\\x22\\x27\',"\\x5c",\'\\x5c\',"\\xff and \\xzz","unicode \\u0000 \\u0022 \\u0027 \\u005c \\uffff \\uzzzz"', '"\\x22\\x27", \'\\x22\\x27\', "\\x5c", \'\\x5c\', "\\xff and \\xzz", "unicode \\u0000 \\u0022 \\u0027 \\u005c \\uffff \\uzzzz"');
self.options.unescape_strings = True
bt('"\\x41\\x42\\x43\\x01"', '"ABC\\x01"');
bt('"\\u2022"', '"\\u2022"');
bt('a = /\s+/')
bt('"\\u2022";a = /\s+/;"\\x41\\x42\\x43\\x01".match(/\\x41/);','"\\u2022";\na = /\s+/;\n"ABC\\x01".match(/\\x41/);')
bt('"\\x22\\x27",\'\\x22\\x27\',"\\x5c",\'\\x5c\',"\\xff and \\xzz","unicode \\u0000 \\u0022 \\u0027 \\u005c \\uffff \\uzzzz"', '"\\"\'", \'"\\\'\', "\\\\", \'\\\\\', "\\xff and \\xzz", "unicode \\u0000 \\" \' \\\\ \\uffff \\uzzzz"');
self.options.unescape_strings = False
def test_beautifier(self):
test_fragment = self.decodesto
bt = self.bt
bt('');
bt('return .5');
test_fragment(' return .5');
bt('a = 1', 'a = 1');
bt('a=1', 'a = 1');
bt("a();\n\nb();", "a();\n\nb();");
bt('var a = 1 var b = 2', "var a = 1\nvar b = 2");
bt('var a=1, b=c[d], e=6;', 'var a = 1,\n b = c[d],\n e = 6;');
bt('a = " 12345 "');
bt("a = ' 12345 '");
bt('if (a == 1) b = 2;', "if (a == 1) b = 2;");
bt('if(1){2}else{3}', "if (1) {\n 2\n} else {\n 3\n}");
bt('if(1||2);', 'if (1 || 2);');
bt('(a==1)||(b==2)', '(a == 1) || (b == 2)');
bt('var a = 1 if (2) 3;', "var a = 1\nif (2) 3;");
bt('a = a + 1');
bt('a = a == 1');
bt('/12345[^678]*9+/.match(a)');
bt('a /= 5');
bt('a = 0.5 * 3');
bt('a *= 10.55');
bt('a < .5');
bt('a <= .5');
bt('a<.5', 'a < .5');
bt('a<=.5', 'a <= .5');
bt('a = 0xff;');
bt('a=0xff+4', 'a = 0xff + 4');
bt('a = [1, 2, 3, 4]');
bt('F*(g/=f)*g+b', 'F * (g /= f) * g + b');
bt('a.b({c:d})', "a.b({\n c: d\n})");
bt('a.b\n(\n{\nc:\nd\n}\n)', "a.b({\n c: d\n})");
bt('a=!b', 'a = !b');
bt('a?b:c', 'a ? b : c');
bt('a?1:2', 'a ? 1 : 2');
bt('a?(b):c', 'a ? (b) : c');
bt('x={a:1,b:w=="foo"?x:y,c:z}', 'x = {\n a: 1,\n b: w == "foo" ? x : y,\n c: z\n}');
bt('x=a?b?c?d:e:f:g;', 'x = a ? b ? c ? d : e : f : g;');
bt('x=a?b?c?d:{e1:1,e2:2}:f:g;', 'x = a ? b ? c ? d : {\n e1: 1,\n e2: 2\n} : f : g;');
bt('function void(void) {}');
bt('if(!a)foo();', 'if (!a) foo();');
bt('a=~a', 'a = ~a');
bt('a;/*comment*/b;', "a; /*comment*/\nb;");
bt('a;/* comment */b;', "a; /* comment */\nb;");
test_fragment('a;/*\ncomment\n*/b;', "a;\n/*\ncomment\n*/\nb;"); # simple comments don't get touched at all
bt('a;/**\n* javadoc\n*/b;', "a;\n/**\n * javadoc\n */\nb;");
test_fragment('a;/**\n\nno javadoc\n*/b;', "a;\n/**\n\nno javadoc\n*/\nb;");
bt('a;/*\n* javadoc\n*/b;', "a;\n/*\n * javadoc\n */\nb;"); # comment blocks detected and reindented even w/o javadoc starter
bt('if(a)break;', "if (a) break;");
bt('if(a){break}', "if (a) {\n break\n}");
bt('if((a))foo();', 'if ((a)) foo();');
bt('for(var i=0;;) a', 'for (var i = 0;;) a');
bt('for(var i=0;;)\na', 'for (var i = 0;;)\n a');
bt('a++;', 'a++;');
bt('for(;;i++)a()', 'for (;; i++) a()');
bt('for(;;i++)\na()', 'for (;; i++)\n a()');
bt('for(;;++i)a', 'for (;; ++i) a');
bt('return(1)', 'return (1)');
bt('try{a();}catch(b){c();}finally{d();}', "try {\n a();\n} catch (b) {\n c();\n} finally {\n d();\n}");
bt('(xx)()'); # magic function call
bt('a[1]()'); # another magic function call
bt('if(a){b();}else if(c) foo();', "if (a) {\n b();\n} else if (c) foo();");
bt('switch(x) {case 0: case 1: a(); break; default: break}', "switch (x) {\n case 0:\n case 1:\n a();\n break;\n default:\n break\n}");
bt('switch(x){case -1:break;case !y:break;}', 'switch (x) {\n case -1:\n break;\n case !y:\n break;\n}');
bt('a !== b');
bt('if (a) b(); else c();', "if (a) b();\nelse c();");
bt("// comment\n(function something() {})"); # typical greasemonkey start
bt("{\n\n x();\n\n}"); # was: duplicating newlines
bt('if (a in b) foo();');
bt('var a, b;');
# bt('var a, b');
bt('{a:1, b:2}', "{\n a: 1,\n b: 2\n}");
bt('a={1:[-1],2:[+1]}', 'a = {\n 1: [-1],\n 2: [+1]\n}');
bt('var l = {\'a\':\'1\', \'b\':\'2\'}', "var l = {\n 'a': '1',\n 'b': '2'\n}");
bt('if (template.user[n] in bk) foo();');
bt('{{}/z/}', "{\n {}\n /z/\n}");
bt('return 45', "return 45");
bt('If[1]', "If[1]");
bt('Then[1]', "Then[1]");
bt('a = 1e10', "a = 1e10");
bt('a = 1.3e10', "a = 1.3e10");
bt('a = 1.3e-10', "a = 1.3e-10");
bt('a = -1.3e-10', "a = -1.3e-10");
bt('a = 1e-10', "a = 1e-10");
bt('a = e - 10', "a = e - 10");
bt('a = 11-10', "a = 11 - 10");
bt("a = 1;// comment", "a = 1; // comment");
bt("a = 1; // comment", "a = 1; // comment");
bt("a = 1;\n // comment", "a = 1;\n// comment");
bt('a = [-1, -1, -1]');
# The exact formatting these should have is open for discussion, but they are at least reasonable
bt('a = [ // comment\n -1, -1, -1\n]');
bt('var a = [ // comment\n -1, -1, -1\n]');
bt('a = [ // comment\n -1, // comment\n -1, -1\n]');
bt('var a = [ // comment\n -1, // comment\n -1, -1\n]');
bt('o = [{a:b},{c:d}]', 'o = [{\n a: b\n}, {\n c: d\n}]');
bt("if (a) {\n do();\n}"); # was: extra space appended
bt("if (a) {\n// comment\n}else{\n// comment\n}", "if (a) {\n // comment\n} else {\n // comment\n}"); # if/else statement with empty body
bt("if (a) {\n// comment\n// comment\n}", "if (a) {\n // comment\n // comment\n}"); # multiple comments indentation
bt("if (a) b() else c();", "if (a) b()\nelse c();");
bt("if (a) b() else if c() d();", "if (a) b()\nelse if c() d();");
bt("{}");
bt("{\n\n}");
bt("do { a(); } while ( 1 );", "do {\n a();\n} while (1);");
bt("do {} while (1);");
bt("do {\n} while (1);", "do {} while (1);");
bt("do {\n\n} while (1);");
bt("var a = x(a, b, c)");
bt("delete x if (a) b();", "delete x\nif (a) b();");
bt("delete x[x] if (a) b();", "delete x[x]\nif (a) b();");
bt("for(var a=1,b=2)d", "for (var a = 1, b = 2) d");
bt("for(var a=1,b=2,c=3) d", "for (var a = 1, b = 2, c = 3) d");
bt("for(var a=1,b=2,c=3;d<3;d++)\ne", "for (var a = 1, b = 2, c = 3; d < 3; d++)\n e");
bt("function x(){(a||b).c()}", "function x() {\n (a || b).c()\n}");
bt("function x(){return - 1}", "function x() {\n return -1\n}");
bt("function x(){return ! a}", "function x() {\n return !a\n}");
# a common snippet in jQuery plugins
bt("settings = $.extend({},defaults,settings);", "settings = $.extend({}, defaults, settings);");
bt('{xxx;}()', '{\n xxx;\n}()');
bt("a = 'a'\nb = 'b'");
bt("a = /reg/exp");
bt("a = /reg/");
bt('/abc/.test()');
bt('/abc/i.test()');
bt("{/abc/i.test()}", "{\n /abc/i.test()\n}");
bt('var x=(a)/a;', 'var x = (a) / a;');
bt('x != -1', 'x != -1');
bt('for (; s-->0;)t', 'for (; s-- > 0;) t');
bt('for (; s++>0;)u', 'for (; s++ > 0;) u');
bt('a = s++>s--;', 'a = s++ > s--;');
bt('a = s++>--s;', 'a = s++ > --s;');
bt('{x=#1=[]}', '{\n x = #1=[]\n}');
bt('{a:#1={}}', '{\n a: #1={}\n}');
bt('{a:#1#}', '{\n a: #1#\n}');
test_fragment('"incomplete-string');
test_fragment("'incomplete-string");
test_fragment('/incomplete-regex');
test_fragment('{a:1},{a:2}', '{\n a: 1\n}, {\n a: 2\n}');
test_fragment('var ary=[{a:1}, {a:2}];', 'var ary = [{\n a: 1\n}, {\n a: 2\n}];');
test_fragment('{a:#1', '{\n a: #1'); # incomplete
test_fragment('{a:#', '{\n a: #'); # incomplete
test_fragment('}}}', '}\n}\n}'); # incomplete
test_fragment('<!--\nvoid();\n// -->', '<!--\nvoid();\n// -->');
test_fragment('a=/regexp', 'a = /regexp'); # incomplete regexp
bt('{a:#1=[],b:#1#,c:#999999#}', '{\n a: #1=[],\n b: #1#,\n c: #999999#\n}');
bt("a = 1e+2");
bt("a = 1e-2");
bt("do{x()}while(a>1)", "do {\n x()\n} while (a > 1)");
bt("x(); /reg/exp.match(something)", "x();\n/reg/exp.match(something)");
test_fragment("something();(", "something();\n(");
test_fragment("#!she/bangs, she bangs\nf=1", "#!she/bangs, she bangs\n\nf = 1");
test_fragment("#!she/bangs, she bangs\n\nf=1", "#!she/bangs, she bangs\n\nf = 1");
test_fragment("#!she/bangs, she bangs\n\n/* comment */", "#!she/bangs, she bangs\n\n/* comment */");
test_fragment("#!she/bangs, she bangs\n\n\n/* comment */", "#!she/bangs, she bangs\n\n\n/* comment */");
test_fragment("#", "#");
test_fragment("#!", "#!");
bt("function namespace::something()");
test_fragment("<!--\nsomething();\n-->", "<!--\nsomething();\n-->");
test_fragment("<!--\nif(i<0){bla();}\n-->", "<!--\nif (i < 0) {\n bla();\n}\n-->");
bt('{foo();--bar;}', '{\n foo();\n --bar;\n}');
bt('{foo();++bar;}', '{\n foo();\n ++bar;\n}');
bt('{--bar;}', '{\n --bar;\n}');
bt('{++bar;}', '{\n ++bar;\n}');
# Handling of newlines around unary ++ and -- operators
bt('{foo\n++bar;}', '{\n foo\n ++bar;\n}');
bt('{foo++\nbar;}', '{\n foo++\n bar;\n}');
# This is invalid, but harder to guard against. Issue #203.
bt('{foo\n++\nbar;}', '{\n foo\n ++\n bar;\n}');
# regexps
bt('a(/abc\\/\\/def/);b()', "a(/abc\\/\\/def/);\nb()");
bt('a(/a[b\\[\\]c]d/);b()', "a(/a[b\\[\\]c]d/);\nb()");
test_fragment('a(/a[b\\[', "a(/a[b\\["); # incomplete char class
# allow unescaped / in char classes
bt('a(/[a/b]/);b()', "a(/[a/b]/);\nb()");
bt('a=[[1,2],[4,5],[7,8]]', "a = [\n [1, 2],\n [4, 5],\n [7, 8]\n]");
bt('a=[[1,2],[4,5],function(){},[7,8]]',
"a = [\n [1, 2],\n [4, 5],\n function() {},\n [7, 8]\n]");
bt('a=[[1,2],[4,5],function(){},function(){},[7,8]]',
"a = [\n [1, 2],\n [4, 5],\n function() {},\n function() {},\n [7, 8]\n]");
bt('a=[[1,2],[4,5],function(){},[7,8]]',
"a = [\n [1, 2],\n [4, 5],\n function() {},\n [7, 8]\n]");
bt('a=[b,c,function(){},function(){},d]',
"a = [b, c,\n function() {},\n function() {},\n d\n]");
bt('a=[a[1],b[4],c[d[7]]]', "a = [a[1], b[4], c[d[7]]]");
bt('[1,2,[3,4,[5,6],7],8]', "[1, 2, [3, 4, [5, 6], 7], 8]");
bt('[[["1","2"],["3","4"]],[["5","6","7"],["8","9","0"]],[["1","2","3"],["4","5","6","7"],["8","9","0"]]]',
'[\n [\n ["1", "2"],\n ["3", "4"]\n ],\n [\n ["5", "6", "7"],\n ["8", "9", "0"]\n ],\n [\n ["1", "2", "3"],\n ["4", "5", "6", "7"],\n ["8", "9", "0"]\n ]\n]');
bt('{[x()[0]];indent;}', '{\n [x()[0]];\n indent;\n}');
bt('return ++i', 'return ++i');
bt('return !!x', 'return !!x');
bt('return !x', 'return !x');
bt('return [1,2]', 'return [1, 2]');
bt('return;', 'return;');
bt('return\nfunc', 'return\nfunc');
bt('catch(e)', 'catch (e)');
bt('var a=1,b={foo:2,bar:3},{baz:4,wham:5},c=4;',
'var a = 1,\n b = {\n foo: 2,\n bar: 3\n }, {\n baz: 4,\n wham: 5\n }, c = 4;');
bt('var a=1,b={foo:2,bar:3},{baz:4,wham:5},\nc=4;',
'var a = 1,\n b = {\n foo: 2,\n bar: 3\n }, {\n baz: 4,\n wham: 5\n },\n c = 4;');
# inline comment
bt('function x(/*int*/ start, /*string*/ foo)', 'function x( /*int*/ start, /*string*/ foo)');
# javadoc comment
bt('/**\n* foo\n*/', '/**\n * foo\n */');
bt('{\n/**\n* foo\n*/\n}', '{\n /**\n * foo\n */\n}');
bt('var a,b,c=1,d,e,f=2;', 'var a, b, c = 1,\n d, e, f = 2;');
bt('var a,b,c=[],d,e,f=2;', 'var a, b, c = [],\n d, e, f = 2;');
bt('function() {\n var a, b, c, d, e = [],\n f;\n}');
bt('do/regexp/;\nwhile(1);', 'do /regexp/;\nwhile (1);'); # hmmm
bt('var a = a,\na;\nb = {\nb\n}', 'var a = a,\n a;\nb = {\n b\n}');
bt('var a = a,\n /* c */\n b;');
bt('var a = a,\n // c\n b;');
bt('foo.("bar");'); # weird element referencing
bt('if (a) a()\nelse b()\nnewline()');
bt('if (a) a()\nnewline()');
bt('a=typeof(x)', 'a = typeof(x)');
bt('var a = function() {\n return null;\n},\n b = false;');
bt('var a = function() {\n func1()\n}');
bt('var a = function() {\n func1()\n}\nvar b = function() {\n func2()\n}');
self.options.jslint_happy = True
bt('x();\n\nfunction(){}', 'x();\n\nfunction () {}');
bt('function () {\n var a, b, c, d, e = [],\n f;\n}');
bt('switch(x) {case 0: case 1: a(); break; default: break}',
"switch (x) {\ncase 0:\ncase 1:\n a();\n break;\ndefault:\n break\n}");
bt('switch(x){case -1:break;case !y:break;}',
'switch (x) {\ncase -1:\n break;\ncase !y:\n break;\n}');
test_fragment("// comment 1\n(function()", "// comment 1\n(function ()"); # typical greasemonkey start
bt('var o1=$.extend(a);function(){alert(x);}', 'var o1 = $.extend(a);\n\nfunction () {\n alert(x);\n}');
bt('a=typeof(x)', 'a = typeof (x)');
self.options.jslint_happy = False
bt('switch(x) {case 0: case 1: a(); break; default: break}',
"switch (x) {\n case 0:\n case 1:\n a();\n break;\n default:\n break\n}");
bt('switch(x){case -1:break;case !y:break;}',
'switch (x) {\n case -1:\n break;\n case !y:\n break;\n}');
test_fragment("// comment 2\n(function()", "// comment 2\n(function()"); # typical greasemonkey start
bt("var a2, b2, c2, d2 = 0, c = function() {}, d = '';", "var a2, b2, c2, d2 = 0,\n c = function() {}, d = '';");
bt("var a2, b2, c2, d2 = 0, c = function() {},\nd = '';", "var a2, b2, c2, d2 = 0,\n c = function() {},\n d = '';");
bt('var o2=$.extend(a);function(){alert(x);}', 'var o2 = $.extend(a);\n\nfunction() {\n alert(x);\n}');
bt('{"x":[{"a":1,"b":3},7,8,8,8,8,{"b":99},{"a":11}]}', '{\n "x": [{\n "a": 1,\n "b": 3\n },\n 7, 8, 8, 8, 8, {\n "b": 99\n }, {\n "a": 11\n }\n ]\n}');
bt('{"1":{"1a":"1b"},"2"}', '{\n "1": {\n "1a": "1b"\n },\n "2"\n}');
bt('{a:{a:b},c}', '{\n a: {\n a: b\n },\n c\n}');
bt('{[y[a]];keep_indent;}', '{\n [y[a]];\n keep_indent;\n}');
bt('if (x) {y} else { if (x) {y}}', 'if (x) {\n y\n} else {\n if (x) {\n y\n }\n}');
bt('if (foo) one()\ntwo()\nthree()');
bt('if (1 + foo() && bar(baz()) / 2) one()\ntwo()\nthree()');
bt('if (1 + foo() && bar(baz()) / 2) one();\ntwo();\nthree();');
self.options.indent_size = 1;
self.options.indent_char = ' ';
bt('{ one_char() }', "{\n one_char()\n}");
bt('var a,b=1,c=2', 'var a, b = 1,\n c = 2');
self.options.indent_size = 4;
self.options.indent_char = ' ';
bt('{ one_char() }', "{\n one_char()\n}");
self.options.indent_size = 1;
self.options.indent_char = "\t";
bt('{ one_char() }', "{\n\tone_char()\n}");
bt('x = a ? b : c; x;', 'x = a ? b : c;\nx;');
self.options.indent_size = 4;
self.options.indent_char = ' ';
self.options.preserve_newlines = False;
bt('var\na=dont_preserve_newlines;', 'var a = dont_preserve_newlines;');
# make sure the blank line between function definitions stays
# even when preserve_newlines = False
bt('function foo() {\n return 1;\n}\n\nfunction foo() {\n return 1;\n}');
bt('function foo() {\n return 1;\n}\nfunction foo() {\n return 1;\n}',
'function foo() {\n return 1;\n}\n\nfunction foo() {\n return 1;\n}'
);
bt('function foo() {\n return 1;\n}\n\n\nfunction foo() {\n return 1;\n}',
'function foo() {\n return 1;\n}\n\nfunction foo() {\n return 1;\n}'
);
self.options.preserve_newlines = True;
bt('var\na=do_preserve_newlines;', 'var\na = do_preserve_newlines;')
bt('// a\n// b\n\n// c\n// d')
bt('if (foo) // comment\n{\n bar();\n}')
self.options.keep_array_indentation = False;
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f']",
"a = ['a', 'b', 'c',\n 'd', 'e', 'f'\n]");
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i']",
"a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i'\n]");
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i']",
"a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i'\n]");
bt('var x = [{}\n]', 'var x = [{}]');
bt('var x = [{foo:bar}\n]', 'var x = [{\n foo: bar\n}]');
bt("a = ['something',\n 'completely',\n 'different'];\nif (x);",
"a = ['something',\n 'completely',\n 'different'\n];\nif (x);");
bt("a = ['a','b','c']", "a = ['a', 'b', 'c']");
bt("a = ['a', 'b','c']", "a = ['a', 'b', 'c']");
bt("x = [{'a':0}]",
"x = [{\n 'a': 0\n}]");
bt('{a([[a1]], {b;});}',
'{\n a([\n [a1]\n ], {\n b;\n });\n}');
bt("a();\n [\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ].toString();",
"a();\n[\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n].toString();");
bt("function() {\n Foo([\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ]);\n}",
"function() {\n Foo([\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ]);\n}");
self.options.keep_array_indentation = True;
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f']");
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i']");
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i']");
bt('var x = [{}\n]', 'var x = [{}\n]');
bt('var x = [{foo:bar}\n]', 'var x = [{\n foo: bar\n }\n]');
bt("a = ['something',\n 'completely',\n 'different'];\nif (x);");
bt("a = ['a','b','c']", "a = ['a', 'b', 'c']");
bt("a = ['a', 'b','c']", "a = ['a', 'b', 'c']");
bt("x = [{'a':0}]",
"x = [{\n 'a': 0\n}]");
bt('{a([[a1]], {b;});}',
'{\n a([[a1]], {\n b;\n });\n}');
bt("a();\n [\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ].toString();",
"a();\n [\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ].toString();");
bt("function() {\n Foo([\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ]);\n}",
"function() {\n Foo([\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ]);\n}");
self.options.keep_array_indentation = False;
bt('a = //comment\n/regex/;');
test_fragment('/*\n * X\n */');
test_fragment('/*\r\n * X\r\n */', '/*\n * X\n */');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}', 'if (a) {\n b;\n} else {\n c;\n}');
bt('var a = new function();');
test_fragment('new function');
self.options.brace_style = 'expand';
bt('//case 1\nif (a == 1)\n{}\n//case 2\nelse if (a == 2)\n{}');
bt('if(1){2}else{3}', "if (1)\n{\n 2\n}\nelse\n{\n 3\n}");
bt('try{a();}catch(b){c();}catch(d){}finally{e();}',
"try\n{\n a();\n}\ncatch (b)\n{\n c();\n}\ncatch (d)\n{}\nfinally\n{\n e();\n}");
bt('if(a){b();}else if(c) foo();',
"if (a)\n{\n b();\n}\nelse if (c) foo();");
bt("if (a) {\n// comment\n}else{\n// comment\n}",
"if (a)\n{\n // comment\n}\nelse\n{\n // comment\n}"); # if/else statement with empty body
bt('if (x) {y} else { if (x) {y}}',
'if (x)\n{\n y\n}\nelse\n{\n if (x)\n {\n y\n }\n}');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}',
'if (a)\n{\n b;\n}\nelse\n{\n c;\n}');
test_fragment(' /*\n* xx\n*/\n// xx\nif (foo) {\n bar();\n}',
' /*\n * xx\n */\n // xx\n if (foo)\n {\n bar();\n }');
bt('if (foo)\n{}\nelse /regex/.test();');
bt('if (foo) /regex/.test();');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}', 'if (a)\n{\n b;\n}\nelse\n{\n c;\n}');
test_fragment('if (foo) {', 'if (foo)\n{');
test_fragment('foo {', 'foo\n{');
test_fragment('return {', 'return {'); # return needs the brace.
test_fragment('return /* inline */ {', 'return /* inline */ {');
# test_fragment('return\n{', 'return\n{'); # can't support this?, but that's an improbable and extreme case anyway.
test_fragment('return;\n{', 'return;\n{');
bt("throw {}");
bt("throw {\n foo;\n}");
bt('var foo = {}');
bt('if (foo) bar();\nelse break');
bt('function x() {\n foo();\n}zzz', 'function x()\n{\n foo();\n}\nzzz');
bt('a: do {} while (); xxx', 'a: do {} while ();\nxxx');
bt('var a = new function();');
bt('var a = new function() {};');
bt('var a = new function a()\n {};');
test_fragment('new function');
bt("foo({\n 'a': 1\n},\n10);",
"foo(\n {\n 'a': 1\n },\n 10);");
bt('(["foo","bar"]).each(function(i) {return i;});',
'(["foo", "bar"]).each(function(i)\n{\n return i;\n});');
bt('(function(i) {return i;})();',
'(function(i)\n{\n return i;\n})();');
bt( "test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test(\n" +
"/*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"},\n" +
"/*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test(\n" +
" /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test( /*Argument 1*/\n" +
"{\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */\n" +
"{\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
self.options.brace_style = 'collapse';
bt('//case 1\nif (a == 1) {}\n//case 2\nelse if (a == 2) {}');
bt('if(1){2}else{3}', "if (1) {\n 2\n} else {\n 3\n}");
bt('try{a();}catch(b){c();}catch(d){}finally{e();}',
"try {\n a();\n} catch (b) {\n c();\n} catch (d) {} finally {\n e();\n}");
bt('if(a){b();}else if(c) foo();',
"if (a) {\n b();\n} else if (c) foo();");
bt("if (a) {\n// comment\n}else{\n// comment\n}",
"if (a) {\n // comment\n} else {\n // comment\n}"); # if/else statement with empty body
bt('if (x) {y} else { if (x) {y}}',
'if (x) {\n y\n} else {\n if (x) {\n y\n }\n}');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}',
'if (a) {\n b;\n} else {\n c;\n}');
test_fragment(' /*\n* xx\n*/\n// xx\nif (foo) {\n bar();\n}',
' /*\n * xx\n */\n // xx\n if (foo) {\n bar();\n }');
bt('if (foo) {} else /regex/.test();');
bt('if (foo) /regex/.test();');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}', 'if (a) {\n b;\n} else {\n c;\n}');
test_fragment('if (foo) {', 'if (foo) {');
test_fragment('foo {', 'foo {');
test_fragment('return {', 'return {'); # return needs the brace.
test_fragment('return /* inline */ {', 'return /* inline */ {');
# test_fragment('return\n{', 'return\n{'); # can't support this?, but that's an improbable and extreme case anyway.
test_fragment('return;\n{', 'return; {');
bt("throw {}");
bt("throw {\n foo;\n}");
bt('var foo = {}');
bt('if (foo) bar();\nelse break');
bt('function x() {\n foo();\n}zzz', 'function x() {\n foo();\n}\nzzz');
bt('a: do {} while (); xxx', 'a: do {} while ();\nxxx');
bt('var a = new function();');
bt('var a = new function() {};');
bt('var a = new function a() {};');
test_fragment('new function');
bt("foo({\n 'a': 1\n},\n10);",
"foo({\n 'a': 1\n },\n 10);");
bt('(["foo","bar"]).each(function(i) {return i;});',
'(["foo", "bar"]).each(function(i) {\n return i;\n});');
bt('(function(i) {return i;})();',
'(function(i) {\n return i;\n})();');
bt( "test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test(\n" +
"/*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"},\n" +
"/*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test(\n" +
" /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test( /*Argument 1*/\n" +
"{\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */\n" +
"{\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
self.options.brace_style = "end-expand";
bt('//case 1\nif (a == 1) {}\n//case 2\nelse if (a == 2) {}');
bt('if(1){2}else{3}', "if (1) {\n 2\n}\nelse {\n 3\n}");
bt('try{a();}catch(b){c();}catch(d){}finally{e();}',
"try {\n a();\n}\ncatch (b) {\n c();\n}\ncatch (d) {}\nfinally {\n e();\n}");
bt('if(a){b();}else if(c) foo();',
"if (a) {\n b();\n}\nelse if (c) foo();");
bt("if (a) {\n// comment\n}else{\n// comment\n}",
"if (a) {\n // comment\n}\nelse {\n // comment\n}"); # if/else statement with empty body
bt('if (x) {y} else { if (x) {y}}',
'if (x) {\n y\n}\nelse {\n if (x) {\n y\n }\n}');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}',
'if (a) {\n b;\n}\nelse {\n c;\n}');
test_fragment(' /*\n* xx\n*/\n// xx\nif (foo) {\n bar();\n}',
' /*\n * xx\n */\n // xx\n if (foo) {\n bar();\n }');
bt('if (foo) {}\nelse /regex/.test();');
bt('if (foo) /regex/.test();');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}', 'if (a) {\n b;\n}\nelse {\n c;\n}');
test_fragment('if (foo) {', 'if (foo) {');
test_fragment('foo {', 'foo {');
test_fragment('return {', 'return {'); # return needs the brace.
test_fragment('return /* inline */ {', 'return /* inline */ {');
# test_fragment('return\n{', 'return\n{'); # can't support this?, but that's an improbable and extreme case anyway.
test_fragment('return;\n{', 'return; {');
bt("throw {}");
bt("throw {\n foo;\n}");
bt('var foo = {}');
bt('if (foo) bar();\nelse break');
bt('function x() {\n foo();\n}zzz', 'function x() {\n foo();\n}\nzzz');
bt('a: do {} while (); xxx', 'a: do {} while ();\nxxx');
bt('var a = new function();');
bt('var a = new function() {};');
bt('var a = new function a() {};');
test_fragment('new function');
bt("foo({\n 'a': 1\n},\n10);",
"foo({\n 'a': 1\n },\n 10);");
bt('(["foo","bar"]).each(function(i) {return i;});',
'(["foo", "bar"]).each(function(i) {\n return i;\n});');
bt('(function(i) {return i;})();',
'(function(i) {\n return i;\n})();');
bt( "test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test(\n" +
"/*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"},\n" +
"/*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test(\n" +
" /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test( /*Argument 1*/\n" +
"{\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */\n" +
"{\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
self.options.brace_style = 'collapse';
bt('a = <?= external() ?> ;'); # not the most perfect thing in the world, but you're the weirdo beaufifying php mix-ins with javascript beautifier
bt('a = <%= external() %> ;');
test_fragment('roo = {\n /*\n ****\n FOO\n ****\n */\n BAR: 0\n};');
test_fragment("if (zz) {\n // ....\n}\n(function");
self.options.preserve_newlines = True;
bt('var a = 42; // foo\n\nvar b;')
bt('var a = 42; // foo\n\n\nvar b;')
bt("var a = 'foo' +\n 'bar';");
bt("var a = \"foo\" +\n \"bar\";");
bt('"foo""bar""baz"', '"foo"\n"bar"\n"baz"')
bt("'foo''bar''baz'", "'foo'\n'bar'\n'baz'")
bt("{\n get foo() {}\n}")
bt("{\n var a = get\n foo();\n}")
bt("{\n set foo() {}\n}")
bt("{\n var a = set\n foo();\n}")
bt("var x = {\n get function()\n}")
bt("var x = {\n set function()\n}")
bt("var x = set\n\nfunction() {}", "var x = set\n\n function() {}")
bt('<!-- foo\nbar();\n-->')
bt('<!-- dont crash')
bt('for () /abc/.test()')
bt('if (k) /aaa/m.test(v) && l();')
bt('switch (true) {\n case /swf/i.test(foo):\n bar();\n}')
bt('createdAt = {\n type: Date,\n default: Date.now\n}')
bt('switch (createdAt) {\n case a:\n Date,\n default:\n Date.now\n}')
bt('return function();')
bt('var a = function();')
bt('var a = 5 + function();')
bt('{\n foo // something\n ,\n bar // something\n baz\n}')
bt('function a(a) {} function b(b) {} function c(c) {}', 'function a(a) {}\n\nfunction b(b) {}\n\nfunction c(c) {}')
bt('3.*7;', '3. * 7;')
bt('import foo.*;', 'import foo.*;') # actionscript's import
test_fragment('function f(a: a, b: b)') # actionscript
bt('foo(a, function() {})');
bt('foo(a, /regex/)');
bt('/* foo */\n"x"');
self.options.break_chained_methods = False
self.options.preserve_newlines = False
bt('foo\n.bar()\n.baz().cucumber(fat)', 'foo.bar().baz().cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat); foo.bar().baz().cucumber(fat)', 'foo.bar().baz().cucumber(fat);\nfoo.bar().baz().cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat)\n foo.bar().baz().cucumber(fat)', 'foo.bar().baz().cucumber(fat)\nfoo.bar().baz().cucumber(fat)');
bt('this\n.something = foo.bar()\n.baz().cucumber(fat)', 'this.something = foo.bar().baz().cucumber(fat)');
bt('this.something.xxx = foo.moo.bar()');
bt('this\n.something\n.xxx = foo.moo\n.bar()', 'this.something.xxx = foo.moo.bar()');
self.options.break_chained_methods = False
self.options.preserve_newlines = True
bt('foo\n.bar()\n.baz().cucumber(fat)', 'foo\n .bar()\n .baz().cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat); foo.bar().baz().cucumber(fat)', 'foo\n .bar()\n .baz().cucumber(fat);\nfoo.bar().baz().cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat)\n foo.bar().baz().cucumber(fat)', 'foo\n .bar()\n .baz().cucumber(fat)\nfoo.bar().baz().cucumber(fat)');
bt('this\n.something = foo.bar()\n.baz().cucumber(fat)', 'this\n .something = foo.bar()\n .baz().cucumber(fat)');
bt('this.something.xxx = foo.moo.bar()');
bt('this\n.something\n.xxx = foo.moo\n.bar()', 'this\n .something\n .xxx = foo.moo\n .bar()');
self.options.break_chained_methods = True
self.options.preserve_newlines = False
bt('foo\n.bar()\n.baz().cucumber(fat)', 'foo.bar()\n .baz()\n .cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat); foo.bar().baz().cucumber(fat)', 'foo.bar()\n .baz()\n .cucumber(fat);\nfoo.bar()\n .baz()\n .cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat)\n foo.bar().baz().cucumber(fat)', 'foo.bar()\n .baz()\n .cucumber(fat)\nfoo.bar()\n .baz()\n .cucumber(fat)');
bt('this\n.something = foo.bar()\n.baz().cucumber(fat)', 'this.something = foo.bar()\n .baz()\n .cucumber(fat)');
bt('this.something.xxx = foo.moo.bar()');
bt('this\n.something\n.xxx = foo.moo\n.bar()', 'this.something.xxx = foo.moo.bar()');
self.options.break_chained_methods = True
self.options.preserve_newlines = True
bt('foo\n.bar()\n.baz().cucumber(fat)', 'foo\n .bar()\n .baz()\n .cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat); foo.bar().baz().cucumber(fat)', 'foo\n .bar()\n .baz()\n .cucumber(fat);\nfoo.bar()\n .baz()\n .cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat)\n foo.bar().baz().cucumber(fat)', 'foo\n .bar()\n .baz()\n .cucumber(fat)\nfoo.bar()\n .baz()\n .cucumber(fat)');
bt('this\n.something = foo.bar()\n.baz().cucumber(fat)', 'this\n .something = foo.bar()\n .baz()\n .cucumber(fat)');
bt('this.something.xxx = foo.moo.bar()');
bt('this\n.something\n.xxx = foo.moo\n.bar()', 'this\n .something\n .xxx = foo.moo\n .bar()');
self.options.break_chained_methods = False
self.options.preserve_newlines = False
self.options.preserve_newlines = False
self.options.wrap_line_length = 0
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_.okay();')
self.options.wrap_line_length = 70
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_.okay();');
self.options.wrap_line_length = 40
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat &&\n' +
' "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_.okay();');
self.options.wrap_line_length = 41
# NOTE: wrap is only best effort - line continues until next wrap point is found.
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") ||\n' +
' (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_.okay();');
self.options.wrap_line_length = 45
# NOTE: wrap is only best effort - line continues until next wrap point is found.
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('{\n' +
' foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
' if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();\n' +
'}',
# expected #
'{\n' +
' foo.bar().baz().cucumber((fat && "sassy") ||\n' +
' (leans && mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
' if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_.okay();\n' +
'}');
self.options.preserve_newlines = True
self.options.wrap_line_length = 0
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n' +
' .okay();');
self.options.wrap_line_length = 70
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n' +
' .okay();');
self.options.wrap_line_length = 40
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat &&\n' +
' "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();');
self.options.wrap_line_length = 41
# NOTE: wrap is only best effort - line continues until next wrap point is found.
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") ||\n' +
' (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();');
self.options.wrap_line_length = 45
# NOTE: wrap is only best effort - line continues until next wrap point is found.
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('{\n' +
' foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
' if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();\n' +
'}',
# expected #
'{\n' +
' foo.bar().baz().cucumber((fat && "sassy") ||\n' +
' (leans && mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
' if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();\n' +
'}');
self.options.wrap_line_length = 0
self.options.preserve_newlines = False
bt('if (foo) // comment\n bar();');
bt('if (foo) // comment\n (bar());');
bt('if (foo) // comment\n (bar());');
bt('if (foo) // comment\n /asdf/;');
bt('this.oa = new OAuth(\n' +
' _requestToken,\n' +
' _accessToken,\n' +
' consumer_key\n' +
');',
'this.oa = new OAuth(_requestToken, _accessToken, consumer_key);');
bt('foo = {\n x: y, // #44\n w: z // #44\n}');
bt('switch (x) {\n case "a":\n // comment on newline\n break;\n case "b": // comment on same line\n break;\n}');
# these aren't ready yet.
#bt('if (foo) // comment\n bar() /*i*/ + baz() /*j\n*/ + asdf();');
bt('if\n(foo)\nif\n(bar)\nif\n(baz)\nwhee();\na();',
'if (foo)\n if (bar)\n if (baz) whee();\na();');
bt('if\n(foo)\nif\n(bar)\nif\n(baz)\nwhee();\nelse\na();',
'if (foo)\n if (bar)\n if (baz) whee();\n else a();');
bt('if (foo)\nbar();\nelse\ncar();',
'if (foo) bar();\nelse car();');
bt('if (foo) if (bar) if (baz);\na();',
'if (foo)\n if (bar)\n if (baz);\na();');
bt('if (foo) if (bar) if (baz) whee();\na();',
'if (foo)\n if (bar)\n if (baz) whee();\na();');
bt('if (foo) a()\nif (bar) if (baz) whee();\na();',
'if (foo) a()\nif (bar)\n if (baz) whee();\na();');
bt('if (foo);\nif (bar) if (baz) whee();\na();',
'if (foo);\nif (bar)\n if (baz) whee();\na();');
bt('if (options)\n' +
' for (var p in options)\n' +
' this[p] = options[p];',
'if (options)\n'+
' for (var p in options) this[p] = options[p];');
bt('if (options) for (var p in options) this[p] = options[p];',
'if (options)\n for (var p in options) this[p] = options[p];');
bt('if (options) do q(); while (b());',
'if (options)\n do q(); while (b());');
bt('if (options) while (b()) q();',
'if (options)\n while (b()) q();');
bt('if (options) do while (b()) q(); while (a());',
'if (options)\n do\n while (b()) q(); while (a());');
bt('function f(a, b, c,\nd, e) {}',
'function f(a, b, c, d, e) {}');
bt('function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\nfunction g(a, b) {\n if (!a) b()\n}');
bt('function f(a,b) {if(a) b()}\n\n\n\nfunction g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\nfunction g(a, b) {\n if (!a) b()\n}');
# This is not valid syntax, but still want to behave reasonably and not side-effect
bt('(if(a) b())(if(a) b())',
'(\n if (a) b())(\n if (a) b())');
bt('(if(a) b())\n\n\n(if(a) b())',
'(\n if (a) b())\n(\n if (a) b())');
bt("if\n(a)\nb();", "if (a) b();");
bt('var a =\nfoo', 'var a = foo');
bt('var a = {\n"a":1,\n"b":2}', "var a = {\n \"a\": 1,\n \"b\": 2\n}");
bt("var a = {\n'a':1,\n'b':2}", "var a = {\n 'a': 1,\n 'b': 2\n}");
bt('var a = /*i*/ "b";');
bt('var a = /*i*/\n"b";', 'var a = /*i*/ "b";');
bt('var a = /*i*/\nb;', 'var a = /*i*/ b;');
bt('{\n\n\n"x"\n}', '{\n "x"\n}');
bt('if(a &&\nb\n||\nc\n||d\n&&\ne) e = f', 'if (a && b || c || d && e) e = f');
bt('if(a &&\n(b\n||\nc\n||d)\n&&\ne) e = f', 'if (a && (b || c || d) && e) e = f');
test_fragment('\n\n"x"', '"x"');
bt('a = 1;\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nb = 2;',
'a = 1;\nb = 2;');
self.options.preserve_newlines = True
bt('if (foo) // comment\n bar();');
bt('if (foo) // comment\n (bar());');
bt('if (foo) // comment\n (bar());');
bt('if (foo) // comment\n /asdf/;');
bt('this.oa = new OAuth(\n' +
' _requestToken,\n' +
' _accessToken,\n' +
' consumer_key\n' +
');');
bt('foo = {\n x: y, // #44\n w: z // #44\n}');
bt('switch (x) {\n case "a":\n // comment on newline\n break;\n case "b": // comment on same line\n break;\n}');
# these aren't ready yet.
# bt('if (foo) // comment\n bar() /*i*/ + baz() /*j\n*/ + asdf();');
bt('if\n(foo)\nif\n(bar)\nif\n(baz)\nwhee();\na();',
'if (foo)\n if (bar)\n if (baz)\n whee();\na();');
bt('if\n(foo)\nif\n(bar)\nif\n(baz)\nwhee();\nelse\na();',
'if (foo)\n if (bar)\n if (baz)\n whee();\n else\n a();');
bt('if (foo) bar();\nelse\ncar();',
'if (foo) bar();\nelse\n car();');
bt('if (foo) if (bar) if (baz);\na();',
'if (foo)\n if (bar)\n if (baz);\na();');
bt('if (foo) if (bar) if (baz) whee();\na();',
'if (foo)\n if (bar)\n if (baz) whee();\na();');
bt('if (foo) a()\nif (bar) if (baz) whee();\na();',
'if (foo) a()\nif (bar)\n if (baz) whee();\na();');
bt('if (foo);\nif (bar) if (baz) whee();\na();',
'if (foo);\nif (bar)\n if (baz) whee();\na();');
bt('if (options)\n' +
' for (var p in options)\n' +
' this[p] = options[p];');
bt('if (options) for (var p in options) this[p] = options[p];',
'if (options)\n for (var p in options) this[p] = options[p];');
bt('if (options) do q(); while (b());',
'if (options)\n do q(); while (b());');
bt('if (options) do; while (b());',
'if (options)\n do; while (b());');
bt('if (options) while (b()) q();',
'if (options)\n while (b()) q();');
bt('if (options) do while (b()) q(); while (a());',
'if (options)\n do\n while (b()) q(); while (a());');
bt('function f(a, b, c,\nd, e) {}',
'function f(a, b, c,\n d, e) {}');
bt('function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\nfunction g(a, b) {\n if (!a) b()\n}');
bt('function f(a,b) {if(a) b()}\n\n\n\nfunction g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\n\n\nfunction g(a, b) {\n if (!a) b()\n}');
# This is not valid syntax, but still want to behave reasonably and not side-effect
bt('(if(a) b())(if(a) b())',
'(\n if (a) b())(\n if (a) b())');
bt('(if(a) b())\n\n\n(if(a) b())',
'(\n if (a) b())\n\n\n(\n if (a) b())');
bt("if\n(a)\nb();", "if (a)\n b();");
bt('var a =\nfoo', 'var a =\n foo');
bt('var a = {\n"a":1,\n"b":2}', "var a = {\n \"a\": 1,\n \"b\": 2\n}");
bt("var a = {\n'a':1,\n'b':2}", "var a = {\n 'a': 1,\n 'b': 2\n}");
bt('var a = /*i*/ "b";');
bt('var a = /*i*/\n"b";', 'var a = /*i*/\n "b";');
bt('var a = /*i*/\nb;', 'var a = /*i*/\n b;');
bt('{\n\n\n"x"\n}', '{\n\n\n "x"\n}');
bt('if(a &&\nb\n||\nc\n||d\n&&\ne) e = f', 'if (a &&\n b ||\n c || d &&\n e) e = f');
bt('if(a &&\n(b\n||\nc\n||d)\n&&\ne) e = f', 'if (a &&\n (b ||\n c || d) &&\n e) e = f');
test_fragment('\n\n"x"', '"x"');
# this beavior differs between js and python, defaults to unlimited in js, 10 in python
bt('a = 1;\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nb = 2;',
'a = 1;\n\n\n\n\n\n\n\n\n\nb = 2;');
self.options.max_preserve_newlines = 8;
bt('a = 1;\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nb = 2;',
'a = 1;\n\n\n\n\n\n\n\nb = 2;');
# Test the option to have spaces within parens
self.options.space_in_paren = False
bt('if(p) foo(a,b)', 'if (p) foo(a, b)');
bt('try{while(true){willThrow()}}catch(result)switch(result){case 1:++result }',
'try {\n while (true) {\n willThrow()\n }\n} catch (result) switch (result) {\n case 1:\n ++result\n}');
bt('((e/((a+(b)*c)-d))^2)*5;', '((e / ((a + (b) * c) - d)) ^ 2) * 5;');
bt('function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\nfunction g(a, b) {\n if (!a) b()\n}');
bt('a=[];',
'a = [];');
bt('a=[b,c,d];',
'a = [b, c, d];');
bt('a= f[b];',
'a = f[b];');
self.options.space_in_paren = True
bt('if(p) foo(a,b)', 'if ( p ) foo( a, b )');
bt('try{while(true){willThrow()}}catch(result)switch(result){case 1:++result }',
'try {\n while ( true ) {\n willThrow( )\n }\n} catch ( result ) switch ( result ) {\n case 1:\n ++result\n}');
bt('((e/((a+(b)*c)-d))^2)*5;', '( ( e / ( ( a + ( b ) * c ) - d ) ) ^ 2 ) * 5;');
bt('function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
'function f( a, b ) {\n if ( a ) b( )\n}\n\nfunction g( a, b ) {\n if ( !a ) b( )\n}');
bt('a=[ ];',
'a = [ ];');
bt('a=[b,c,d];',
'a = [ b, c, d ];');
bt('a= f[b];',
'a = f[ b ];');
self.options.space_in_paren = False
# Test that e4x literals passed through when e4x-option is enabled
bt('xml=<a b="c"><d/><e>\n foo</e>x</a>;', 'xml = < a b = "c" > < d / > < e >\n foo < /e>x</a > ;');
self.options.e4x = True
bt('xml=<a b="c"><d/><e>\n foo</e>x</a>;', 'xml = <a b="c"><d/><e>\n foo</e>x</a>;');
bt('<a b=\'This is a quoted "c".\'/>', '<a b=\'This is a quoted "c".\'/>');
bt('<a b="This is a quoted \'c\'."/>', '<a b="This is a quoted \'c\'."/>');
bt('<a b="A quote \' inside string."/>', '<a b="A quote \' inside string."/>');
bt('<a b=\'A quote " inside string.\'/>', '<a b=\'A quote " inside string.\'/>');
bt('<a b=\'Some """ quotes "" inside string.\'/>', '<a b=\'Some """ quotes "" inside string.\'/>');
# Handles inline expressions
bt('xml=<{a} b="c"><d/><e v={z}>\n foo</e>x</{a}>;', 'xml = <{a} b="c"><d/><e v={z}>\n foo</e>x</{a}>;');
# Handles CDATA
bt('xml=<a b="c"><![CDATA[d/>\n</a></{}]]></a>;', 'xml = <a b="c"><![CDATA[d/>\n</a></{}]]></a>;');
bt('xml=<![CDATA[]]>;', 'xml = <![CDATA[]]>;');
bt('xml=<![CDATA[ b="c"><d/><e v={z}>\n foo</e>x/]]>;', 'xml = <![CDATA[ b="c"><d/><e v={z}>\n foo</e>x/]]>;');
# Handles messed up tags, as long as it isn't the same name
# as the root tag. Also handles tags of same name as root tag
# as long as nesting matches.
bt('xml=<a x="jn"><c></b></f><a><d jnj="jnn"><f></a ></nj></a>;',
'xml = <a x="jn"><c></b></f><a><d jnj="jnn"><f></a ></nj></a>;');
# If xml is not terminated, the remainder of the file is treated
# as part of the xml-literal (passed through unaltered)
test_fragment('xml=<a></b>\nc<b;', 'xml = <a></b>\nc<b;');
self.options.e4x = False
# START tests for issue 241
bt('obj\n' +
' .last({\n' +
' foo: 1,\n' +
' bar: 2\n' +
' });\n' +
'var test = 1;');
bt('obj\n' +
' .last(a, function() {\n' +
' var test;\n' +
' });\n' +
'var test = 1;');
bt('obj.first()\n' +
' .second()\n' +
' .last(function(err, response) {\n' +
' console.log(err);\n' +
' });');
# END tests for issue 241
# START tests for issue 268 and 275
bt('obj.last(a, function() {\n' +
' var test;\n' +
'});\n' +
'var test = 1;');
bt('obj.last(a,\n' +
' function() {\n' +
' var test;\n' +
' });\n' +
'var test = 1;');
bt('(function() {if (!window.FOO) window.FOO || (window.FOO = function() {var b = {bar: "zort"};});})();',
'(function() {\n' +
' if (!window.FOO) window.FOO || (window.FOO = function() {\n' +
' var b = {\n' +
' bar: "zort"\n' +
' };\n' +
' });\n' +
'})();');
# END tests for issue 268 and 275
# START tests for issue 281
bt('define(["dojo/_base/declare", "my/Employee", "dijit/form/Button",\n' +
' "dojo/_base/lang", "dojo/Deferred"\n' +
'], function(declare, Employee, Button, lang, Deferred) {\n' +
' return declare(Employee, {\n' +
' constructor: function() {\n' +
' new Button({\n' +
' onClick: lang.hitch(this, function() {\n' +
' new Deferred().then(lang.hitch(this, function() {\n' +
' this.salary * 0.25;\n' +
' }));\n' +
' })\n' +
' });\n' +
' }\n' +
' });\n' +
'});');
bt('define(["dojo/_base/declare", "my/Employee", "dijit/form/Button",\n' +
' "dojo/_base/lang", "dojo/Deferred"\n' +
' ],\n' +
' function(declare, Employee, Button, lang, Deferred) {\n' +
' return declare(Employee, {\n' +
' constructor: function() {\n' +
' new Button({\n' +
' onClick: lang.hitch(this, function() {\n' +
' new Deferred().then(lang.hitch(this, function() {\n' +
' this.salary * 0.25;\n' +
' }));\n' +
' })\n' +
' });\n' +
' }\n' +
' });\n' +
' });');
# END tests for issue 281
# This is what I think these should look like related #256
# we don't have the ability yet
#bt('var a=1,b={bang:2},c=3;',
# 'var a = 1,\n b = {\n bang: 2\n },\n c = 3;');
#bt('var a={bing:1},b=2,c=3;',
# 'var a = {\n bing: 1\n },\n b = 2,\n c = 3;');
def decodesto(self, input, expectation=None):
self.assertEqual(
jsbeautifier.beautify(input, self.options), expectation or input)
# if the expected is different from input, run it again
# expected output should be unchanged when run twice.
if not expectation == None:
self.assertEqual(
jsbeautifier.beautify(expectation, self.options), expectation)
def wrap(self, text):
return self.wrapregex.sub(' \\1', text)
def bt(self, input, expectation=None):
expectation = expectation or input
self.decodesto(input, expectation)
if self.options.indent_size == 4 and input:
wrapped_input = '{\n%s\nfoo=bar;}' % self.wrap(input)
wrapped_expect = '{\n%s\n foo = bar;\n}' % self.wrap(expectation)
self.decodesto(wrapped_input, wrapped_expect)
@classmethod
def setUpClass(cls):
options = jsbeautifier.default_options()
options.indent_size = 4
options.indent_char = ' '
options.preserve_newlines = True
options.jslint_happy = False
options.keep_array_indentation = False
options.brace_style = 'collapse'
options.indent_level = 0
options.break_chained_methods = False
cls.options = options
cls.wrapregex = re.compile('^(.+)$', re.MULTILINE)
if __name__ == '__main__':
unittest.main()
|
JT5D/Alfred-Popclip-Sublime
|
Sublime Text 2/JsFormat/libs/jsbeautifier/tests/testjsbeautifier.py
|
Python
|
gpl-2.0
| 64,176 | 0.013246 |
from tests.package.test_python import TestPythonPackageBase
class TestPythonPy2Subprocess32(TestPythonPackageBase):
__test__ = True
config = TestPythonPackageBase.config + \
"""
BR2_PACKAGE_PYTHON=y
BR2_PACKAGE_PYTHON_SUBPROCESS32=y
"""
sample_scripts = ["tests/package/sample_python_subprocess32.py"]
|
masahir0y/buildroot-yamada
|
support/testing/tests/package/test_python_subprocess32.py
|
Python
|
gpl-2.0
| 348 | 0 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import pytest
from llnl.util.filesystem import touch, working_dir
import spack.repo
import spack.config
from spack.spec import Spec
from spack.version import ver
from spack.util.executable import which
pytestmark = pytest.mark.skipif(
not which('svn'), reason='requires subversion to be installed')
@pytest.mark.parametrize("type_of_test", ['default', 'rev0'])
@pytest.mark.parametrize("secure", [True, False])
def test_fetch(
type_of_test,
secure,
mock_svn_repository,
config,
mutable_mock_packages
):
"""Tries to:
1. Fetch the repo using a fetch strategy constructed with
supplied args (they depend on type_of_test).
2. Check if the test_file is in the checked out repository.
3. Assert that the repository is at the revision supplied.
4. Add and remove some files, then reset the repo, and
ensure it's all there again.
"""
# Retrieve the right test parameters
t = mock_svn_repository.checks[type_of_test]
h = mock_svn_repository.hash
# Construct the package under test
spec = Spec('svn-test')
spec.concretize()
pkg = spack.repo.get(spec)
pkg.versions[ver('svn')] = t.args
# Enter the stage directory and check some properties
with pkg.stage:
with spack.config.override('config:verify_ssl', secure):
pkg.do_stage()
with working_dir(pkg.stage.source_path):
assert h() == t.revision
file_path = os.path.join(pkg.stage.source_path, t.file)
assert os.path.isdir(pkg.stage.source_path)
assert os.path.isfile(file_path)
os.unlink(file_path)
assert not os.path.isfile(file_path)
untracked_file = 'foobarbaz'
touch(untracked_file)
assert os.path.isfile(untracked_file)
pkg.do_restage()
assert not os.path.isfile(untracked_file)
assert os.path.isdir(pkg.stage.source_path)
assert os.path.isfile(file_path)
assert h() == t.revision
|
tmerrick1/spack
|
lib/spack/spack/test/svn_fetch.py
|
Python
|
lgpl-2.1
| 3,303 | 0 |
import logging
from anubis.model import builtin
from anubis.model import domain
from anubis.util import argmethod
_logger = logging.getLogger(__name__)
def wrap(method):
async def run():
_logger.info('Built in domains')
for ddoc in builtin.DOMAINS:
_logger.info('Domain: {0}'.format(ddoc['_id']))
await method(ddoc['_id'])
_logger.info('User domains')
ddocs = domain.get_multi(fields={'_id': 1})
async for ddoc in ddocs:
_logger.info('Domain: {0}'.format(ddoc['_id']))
await method(ddoc['_id'])
if method.__module__ == '__main__':
argmethod._methods[method.__name__] = method
argmethod._methods[method.__name__ + '_all'] = run()
return method
|
KawashiroNitori/Anubis
|
anubis/util/domainjob.py
|
Python
|
gpl-3.0
| 765 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import partialdate.fields
class Migration(migrations.Migration):
dependencies = [
('genealogio', '0023_auto_20160303_2105'),
]
operations = [
migrations.AlterField(
model_name='event',
name='date',
field=partialdate.fields.PartialDateField(default='', help_text='Datum im Format JJJJ-MM-TT (Teilangaben m\xf6glich)', verbose_name='Datum', blank=True),
),
migrations.AlterField(
model_name='family',
name='end_date',
field=partialdate.fields.PartialDateField(default='', help_text='Datum im Format JJJJ-MM-TT (Teilangaben m\xf6glich)', verbose_name='Enddatum', blank=True),
),
migrations.AlterField(
model_name='family',
name='start_date',
field=partialdate.fields.PartialDateField(default='', help_text='Datum im Format JJJJ-MM-TT (Teilangaben m\xf6glich)', verbose_name='Anfangsdatum', blank=True),
),
migrations.AlterField(
model_name='person',
name='datebirth',
field=partialdate.fields.PartialDateField(default='', help_text='Datum im Format JJJJ-MM-TT (Teilangaben m\xf6glich)', verbose_name='Geburtsdatum', blank=True),
),
migrations.AlterField(
model_name='person',
name='datedeath',
field=partialdate.fields.PartialDateField(default='', help_text='Datum im Format JJJJ-MM-TT (Teilangaben m\xf6glich)', verbose_name='Todesdatum', blank=True),
),
migrations.AlterField(
model_name='personplace',
name='end',
field=partialdate.fields.PartialDateField(default='', help_text='Datum im Format JJJJ-MM-TT (Teilangaben m\xf6glich)', verbose_name='Ende', blank=True),
),
migrations.AlterField(
model_name='personplace',
name='start',
field=partialdate.fields.PartialDateField(default='', help_text='Datum im Format JJJJ-MM-TT (Teilangaben m\xf6glich)', verbose_name='Beginn', blank=True),
),
migrations.AlterField(
model_name='timelineitem',
name='description',
field=models.TextField(default='', help_text='Wird beim pdf-Export verwendet, kann als ReST formattiert werden, mit Links auf Objekte der Datenbank (siehe Dokumentation).', verbose_name='Beschreibung', blank=True),
),
migrations.AlterField(
model_name='timelineitem',
name='end_date',
field=partialdate.fields.PartialDateField(default='', help_text='Datum im Format JJJJ-MM-TT (Teilangaben m\xf6glich); kann freibleiben', verbose_name='Enddatum', blank=True),
),
migrations.AlterField(
model_name='timelineitem',
name='start_date',
field=partialdate.fields.PartialDateField(default='', help_text='Datum im Format JJJJ-MM-TT (Teilangaben m\xf6glich)', verbose_name='Startdatum', blank=True),
),
]
|
ugoertz/django-familio
|
genealogio/migrations/0024_auto_20160316_2039.py
|
Python
|
bsd-3-clause
| 3,111 | 0.003214 |
import os, sys
# to read dependencies from ./lib direcroty
script_dir = os.path.dirname( os.path.realpath(__file__) )
sys.path.insert(0, script_dir + os.sep + "lib")
import logging, boto3, json, random
# for dynamodb filter queries
from boto3.dynamodb.conditions import Key, Attr
# setup log level to DEBUG
log = logging.getLogger()
log.setLevel(logging.DEBUG)
# initialize DynamoDB client
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table(os.environ['DYNAMO_TABLE'])
# During the day players are pointing to each other to blame for murder in the night
def handler(event, context):
return response( {"Message": "Welcome to the Serverless Workshop fully powered by AWS Lambda elastic cloud computing service"}, event)
def response(body, event, code=200):
if 'resource' in event and 'httpMethod' in event:
return {
'statusCode': code,
'headers': {},
'body': json.dumps(body, indent=4, separators=(',', ':'))
}
return body
|
akranga/mafia-serverless
|
game/day.py
|
Python
|
apache-2.0
| 979 | 0.015322 |
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Import related utilities and helper functions.
"""
import sys
import traceback
def import_class(import_str):
"""Returns a class from a string including module and class."""
mod_str, _sep, class_str = import_str.rpartition('.')
__import__(mod_str)
try:
return getattr(sys.modules[mod_str], class_str)
except AttributeError:
raise ImportError('Class %s cannot be found (%s)' %
(class_str,
traceback.format_exception(*sys.exc_info())))
def import_object(import_str, *args, **kwargs):
"""Import a class and return an instance of it."""
return import_class(import_str)(*args, **kwargs)
def import_object_ns(name_space, import_str, *args, **kwargs):
"""Tries to import object from default namespace.
Imports a class and return an instance of it, first by trying
to find the class in a default namespace, then failing back to
a full path if not found in the default namespace.
"""
import_value = "%s.%s" % (name_space, import_str)
try:
return import_class(import_value)(*args, **kwargs)
except ImportError:
return import_class(import_str)(*args, **kwargs)
def import_module(import_str):
"""Import a module."""
__import__(import_str)
return sys.modules[import_str]
def import_versioned_module(version, submodule=None):
module = 'os_net_config.v%s' % version
if submodule:
module = '.'.join((module, submodule))
return import_module(module)
def try_import(import_str, default=None):
"""Try to import a module and if it fails return default."""
try:
return import_module(import_str)
except ImportError:
return default
|
fredericlepied/os-net-config
|
os_net_config/openstack/common/importutils.py
|
Python
|
apache-2.0
| 2,368 | 0 |
import numpy as np
import scipy.cluster.hierarchy as hr
import scipy.spatial as spa
import clustering
import matplotlib.pyplot as plt
from sklearn.cluster import AgglomerativeClustering
import filter
class textMiningEac:
def __init__(self,k,N,low,high=0):
self.k = k
# Leer datos desde archivo [Temporal]
#data = np.genfromtxt('iris.data',delimiter=',')
#temp= spa.distance.pdist(data,'euclidean')
#self.D = spa.distance.squareform(temp)
self.D,self.tweets,self.words,self.freq = filter.filtertweets()
# Calcula la matriz de coasociacion
self.loadEAC(N,low,high)
def loadEAC(self,N,low,high=0):
"""
Genera de vuelta la matriz de coasociacion
"""
m,n = self.D.shape
coasocMatrix = clustering.EAC(self.D,N,low,high)
print(coasocMatrix)
self.EAC_D = np.ones(n) - coasocMatrix
def startPAM(self):
"""
Hace sobre PAM sobre la matriz de distancia del EAC
"""
(a,b,self.labels) = clustering.PAM(self.EAC_D, self.k,True)
return self.labels
def startHierarchical(self):
"""
Hace clustering Jerarquico sobre la matriz de distancia del EAC
"""
z = AgglomerativeClustering(n_clusters=self.k, linkage='ward').fit(self.EAC_D)
self.labels = z.labels_
return self.labels
def getClustersTweets(self):
"""
Obtiene clusters en relacion a la frecuencia de aparicion de las palabras
"""
labelsTweets = np.zeros(len(self.tweets),dtype=np.int)
for i in range(len(self.tweets)):
acum = np.zeros(2)
for j in range(len(self.labels)):
# Si la palabra se encuentra en el tweet
if(self.words[j] in self.tweets[i]):
#Acumula el valor en el acumulador del indice del cluster
acum[self.labels[j]] += self.freq[j]
# Asigna el cluster con mayor valor acumulado
labelsTweets[i] = np.argmax(acum)
lista = labelsTweets.tolist()
try:
saveFile = open('clustered.csv','w')
for i in range(len(self.tweets)):
saveFile.write(str(lista[i])+': '+' '.join(self.tweets[i])+'\n')
saveFile.close()
except Exception as e:
print("error: {0}".format(e))
return labelsTweets
def getPrecisionIris(self):
"""
Metodo de prueba
Calcula una precision de acierto. No es fiable.
"""
#Lee los cluster originales
originalClusters = np.genfromtxt('orCL.data',delimiter=',',dtype=None)
results ={}
j=0
for i in range(50,151,50):
# Encuentra el cluster con mayor frecuencia
unique, counts = np.unique(self.labels[i-50:i], return_count=True)
print(unique)
print(counts)
maxvalue = np.amax(counts)
results[j]=maxvalue/50
j=j+1
print("Setosa= " + '%.2f' % results[0] + "\nVersicolor= " + '%.2f' % results[1] + "\nVirginica= " + '%.2f' % results[2])
def getSilhouette(self):
"""
Grafica silhouette
"""
clustering.Silhouette(self.D,self.labels,self.k)
|
fbr1/textmining-eac
|
main.py
|
Python
|
mit
| 3,609 | 0.019119 |
import tornado.web
from datetime import date
from sqlalchemy.orm.exc import NoResultFound
from pyprint.handler import BaseHandler
from pyprint.models import User, Link, Post
class SignInHandler(BaseHandler):
def get(self):
return self.background_render('login.html')
def post(self):
username = self.get_argument('username', None)
password = self.get_argument('password', None)
if username and password:
try:
user = self.orm.query(User).filter(User.username == username).one()
except NoResultFound:
return self.redirect('/login')
if user.check(password):
self.set_secure_cookie('username', user.username)
self.redirect('/kamisama/posts')
return self.redirect('/login')
class ManagePostHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
posts = self.orm.query(Post.title, Post.id).order_by(Post.id.desc()).all()
self.background_render('posts.html', posts=posts)
@tornado.web.authenticated
def post(self):
action = self.get_argument('action', None)
if action == 'del':
post_id = self.get_argument('id', 0)
if post_id:
post = self.orm.query(Post).filter(Post.id == post_id).one()
self.orm.delete(post)
self.orm.commit()
class AddPostHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
self.background_render('add_post.html', post=None)
@tornado.web.authenticated
def post(self):
title = self.get_argument('title', None)
content = self.get_argument('content', None)
tags = self.get_argument('tags', '').strip().split(',')
if not title or not content:
return self.redirect('/kamisama/posts/add')
post = self.orm.query(Post.title).filter(Post.title == title).all()
if post:
return self.write('<script>alert("Title has already existed");window.history.go(-1);</script>')
self.orm.add(Post(title=title, content=content, created_time=date.today()))
self.orm.commit()
return self.redirect('/kamisama/posts')
class AddLinkHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
links = self.orm.query(Link).all()
self.background_render('links.html', links=links)
@tornado.web.authenticated
def post(self):
action = self.get_argument('action', None)
if action == 'add':
name = self.get_argument('name', '')
url = self.get_argument('url', '')
if not name or not url:
return self.redirect('/kamisama/links')
self.orm.add(Link(name=name, url=url))
self.orm.commit()
return self.redirect('/kamisama/links')
elif action == 'del':
link_id = self.get_argument('id', 0)
if link_id:
link = self.orm.query(Link).filter(Link.id == link_id).one()
self.orm.delete(link)
self.orm.commit()
|
RicterZ/pyprint
|
pyprint/views/background.py
|
Python
|
mit
| 3,116 | 0.001284 |
import datetime
def suffix(d):
return 'th' if 11<=d<=13 else {1:'st',2:'nd',3:'rd'}.get(d%10, 'th')
def custom_strftime(format, t):
return t.strftime(format).replace('{S}', str(t.day) + suffix(t.day))
print "Welcome to GenerateUpdateLines, the nation's favourite automatic update line generator."
start = int(raw_input("Enter initial day number: "))
stop = int(raw_input("Enter final day number: "))
t0 = datetime.date(2018, 3, 24)
for d in range(start, stop+1):
date = t0 + datetime.timedelta(d-1)
print "| "+str(d)+" | "+custom_strftime("%a {S} %B", date)+" | | |"
# from datetime import datetime as dt
#
# def suffix(d):
# return 'th' if 11<=d<=13 else {1:'st',2:'nd',3:'rd'}.get(d%10, 'th')
#
# def custom_strftime(format, t):
# return t.strftime(format).replace('{S}', str(t.day) + suffix(t.day))
#
# print custom_strftime('%B {S}, %Y', dt.now())
|
ArthurStart/arthurstart.github.io
|
GenerateUpdateLines.py
|
Python
|
mit
| 889 | 0.013498 |
# -*- coding: utf-8 -*-
"""
This module contains the implementation of a tab widget specialised to
show code editor tabs.
"""
import logging
import os
from pyqode.core.dialogs.unsaved_files import DlgUnsavedFiles
from pyqode.core.modes.filewatcher import FileWatcherMode
from pyqode.core.widgets.tab_bar import TabBar
from pyqode.qt import QtCore, QtWidgets
from pyqode.qt.QtWidgets import QTabBar, QTabWidget
def _logger():
return logging.getLogger(__name__)
class TabWidget(QTabWidget):
"""
QTabWidget specialised to hold CodeEdit instances (or any other
object that has the same interace).
It ensures that there is only one open editor tab for a specific file path,
it adds a few utility methods to quickly manipulate the current editor
widget. It will automatically rename tabs that share the same base filename
to include their distinctive parent directory.
It handles tab close requests automatically and show a dialog box when
a dirty tab widget is being closed. It also adds a convenience QTabBar
with a "close", "close others" and "close all" menu. (You can add custom
actions by using the addAction and addSeparator methods).
It exposes a variety of signal and slots for a better integration with
your applications( dirty_changed, save_current, save_all, close_all,
close_current, close_others).
.. deprecated: starting from version 2.4, this widget is considered as
deprecated. You should use
:class:`pyqode.core.widgets.SplittableTabWidget` instead. It will be
removed in version 2.6.
"""
#: Signal emitted when a tab dirty flag changed
dirty_changed = QtCore.Signal(bool)
#: Signal emitted when the last tab has been closed
last_tab_closed = QtCore.Signal()
#: Signal emitted when a tab has been closed
tab_closed = QtCore.Signal(QtWidgets.QWidget)
@property
def active_editor(self):
"""
Returns the current editor widget or None if the current tab widget is
not a subclass of CodeEdit or if there is no open tab.
"""
return self._current
def __init__(self, parent):
QtWidgets.QTabWidget.__init__(self, parent)
self._current = None
self.currentChanged.connect(self._on_current_changed)
self.tabCloseRequested.connect(self._on_tab_close_requested)
tab_bar = TabBar(self)
tab_bar.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
tab_bar.customContextMenuRequested.connect(self._show_tab_context_menu)
self.setTabBar(tab_bar)
self.tab_bar = tab_bar
self._context_mnu = QtWidgets.QMenu()
for name, slot in [('Close', self.close),
('Close others', self.close_others),
('Close all', self.close_all)]:
qaction = QtWidgets.QAction(name, self)
qaction.triggered.connect(slot)
self._context_mnu.addAction(qaction)
self.addAction(qaction)
# keep a list of widgets (to avoid PyQt bug where
# the C++ class loose the wrapped obj type).
self._widgets = []
@QtCore.Slot()
def close(self):
"""
Closes the active editor
"""
self.tabCloseRequested.emit(self.currentIndex())
@QtCore.Slot()
def close_others(self):
"""
Closes every editors tabs except the current one.
"""
current_widget = self.currentWidget()
self._try_close_dirty_tabs(exept=current_widget)
i = 0
while self.count() > 1:
widget = self.widget(i)
if widget != current_widget:
self.removeTab(i)
else:
i = 1
@QtCore.Slot()
def close_all(self):
"""
Closes all editors
"""
if self._try_close_dirty_tabs():
while self.count():
widget = self.widget(0)
self.removeTab(0)
self.tab_closed.emit(widget)
return True
return False
def _ensure_unique_name(self, code_edit, name):
if name is not None:
code_edit._tab_name = name
else:
code_edit._tab_name = code_edit.file.name
file_name = code_edit.file.name
if self._name_exists(file_name):
file_name = self._rename_duplicate_tabs(
code_edit, code_edit.file.name, code_edit.file.path)
code_edit._tab_name = file_name
@QtCore.Slot()
def save_current(self, path=None):
"""
Save current editor content. Leave file to None to erase the previous
file content. If the current editor's file_path is None and path
is None, the function will call
``QtWidgets.QFileDialog.getSaveFileName`` to get a valid save filename.
:param path: path of the file to save, leave it None to overwrite
existing file.
"""
try:
if not path and not self._current.file.path:
path, filter = QtWidgets.QFileDialog.getSaveFileName(
self, 'Choose destination path')
if not path:
return False
old_path = self._current.file.path
code_edit = self._current
self._save_editor(code_edit, path)
path = code_edit.file.path
# path (and icon) may have changed
if path and old_path != path:
self._ensure_unique_name(code_edit, code_edit.file.name)
self.setTabText(self.currentIndex(), code_edit._tab_name)
ext = os.path.splitext(path)[1]
old_ext = os.path.splitext(old_path)[1]
if ext != old_ext or not old_path:
icon = QtWidgets.QFileIconProvider().icon(
QtCore.QFileInfo(code_edit.file.path))
self.setTabIcon(self.currentIndex(), icon)
return True
except AttributeError: # not an editor widget
pass
return False
@QtCore.Slot()
def save_all(self):
"""
Save all editors.
"""
initial_index = self.currentIndex()
for i in range(self.count()):
try:
self.setCurrentIndex(i)
self.save_current()
except AttributeError:
pass
self.setCurrentIndex(initial_index)
def addAction(self, action):
"""
Adds an action to the TabBar context menu
:param action: QAction to append
"""
self._context_mnu.addAction(action)
def add_separator(self):
"""
Adds a separator to the TabBar context menu.
:returns The separator action.
"""
return self._context_mnu.addSeparator()
def index_from_filename(self, path):
"""
Checks if the path is already open in an editor tab.
:param path: path to check
:returns: The tab index if found or -1
"""
if path:
for i in range(self.count()):
widget = self.widget(i)
try:
if widget.file.path == path:
return i
except AttributeError:
pass # not an editor widget
return -1
@staticmethod
def _del_code_edit(code_edit):
try:
code_edit.close()
code_edit.delete()
except AttributeError:
pass
del code_edit
def add_code_edit(self, code_edit, name=None):
"""
Adds a code edit tab, sets its text as the editor.file.name and
sets it as the active tab.
The widget is only added if there is no other editor tab open with the
same filename, else the already open tab is set as current.
If the widget file path is empty, i.e. this is a new document that has
not been saved to disk, you may provided a formatted string
such as 'New document %d.txt' for the document name. The int format
will be automatically replaced by the number of new documents
(e.g. 'New document 1.txt' then 'New document 2.txt' and so on).
If you prefer to use your own code to manage the file names, just
ensure that the names are unique.
:param code_edit: The code editor widget tab to append
:type code_edit: pyqode.core.api.CodeEdit
:param name: Name of the tab. Will use code_edit.file.name if None is
supplied. Default is None. If this is a new document, you should
either pass a unique name or a formatted string (with a '%d'
format)
:return: Tab index
"""
# new empty editor widget (no path set)
if code_edit.file.path == '':
cnt = 0
for i in range(self.count()):
tab = self.widget(i)
if tab.file.path.startswith(name[:name.find('%')]):
cnt += 1
name %= (cnt + 1)
code_edit.file._path = name
index = self.index_from_filename(code_edit.file.path)
if index != -1:
# already open, just show it
self.setCurrentIndex(index)
# no need to keep this instance
self._del_code_edit(code_edit)
return -1
self._ensure_unique_name(code_edit, name)
index = self.addTab(code_edit, code_edit.file.icon,
code_edit._tab_name)
self.setCurrentIndex(index)
self.setTabText(index, code_edit._tab_name)
try:
code_edit.setFocus()
except TypeError:
# PySide
code_edit.setFocus()
try:
file_watcher = code_edit.modes.get(FileWatcherMode)
except (KeyError, AttributeError):
# not installed
pass
else:
file_watcher.file_deleted.connect(self._on_file_deleted)
return index
def addTab(self, elem, icon, name):
"""
Extends QTabWidget.addTab to keep an internal list of added tabs.
:param elem: tab widget
:param icon: tab icon
:param name: tab name
"""
self._widgets.append(elem)
return super(TabWidget, self).addTab(elem, icon, name)
def _name_exists(self, name):
"""
Checks if we already have an opened tab with the same name.
"""
for i in range(self.count()):
if self.tabText(i) == name:
return True
return False
def _save_editor(self, code_edit, path=None):
if not path:
path = code_edit.file.path
if not os.path.exists(path):
path, status = QtWidgets.QFileDialog.getSaveFileName(
self, 'Save as (%s)' % code_edit.file.path)
if path:
try:
code_edit.file.save(path)
except AttributeError:
# not a code edit, try with a save method
code_edit.save(path)
def _rename_duplicate_tabs(self, current, name, path):
"""
Rename tabs whose title is the same as the name
"""
for i in range(self.count()):
if self.widget(i)._tab_name == name and self.widget(i) != current:
file_path = self.widget(i).file.path
if file_path:
parent_dir = os.path.split(os.path.abspath(
os.path.join(file_path, os.pardir)))[1]
new_name = os.path.join(parent_dir, name)
self.setTabText(i, new_name)
self.widget(i)._tab_name = new_name
break
if path:
parent_dir = os.path.split(os.path.abspath(
os.path.join(path, os.pardir)))[1]
return os.path.join(parent_dir, name)
else:
return name
def _on_current_changed(self, index):
if index != -1:
widget = self.widget(index)
else:
widget = None
if self._current:
# needed if the user set save_on_focus_out to True which change
# the dirty flag
self._on_dirty_changed(self._current.dirty)
self._current = widget
try:
if self._current:
self._current.dirty_changed.connect(self._on_dirty_changed)
self._on_dirty_changed(self._current.dirty)
self._current.setFocus()
except AttributeError:
pass # not an editor widget
def removeTab(self, index):
"""
Removes tab at index ``index``.
This method will emits tab_closed for the removed tab.
:param index: index of the tab to remove.
"""
widget = self.widget(index)
try:
self._widgets.remove(widget)
except ValueError:
pass
self.tab_closed.emit(widget)
self._del_code_edit(widget)
QTabWidget.removeTab(self, index)
if widget == self._current:
self._current = None
def _on_tab_close_requested(self, index):
widget = self.widget(index)
try:
if not widget.dirty:
self.removeTab(index)
else:
dlg = DlgUnsavedFiles(
self, files=[widget.file.path if widget.file.path else
widget._tab_name])
if dlg.exec_() == dlg.Accepted:
if not dlg.discarded:
self._save_editor(widget)
self.removeTab(index)
except AttributeError:
_logger().warning('Failed to close tab %d', index)
if self.count() == 0:
self.last_tab_closed.emit()
def _show_tab_context_menu(self, position):
self._context_mnu.popup(self.mapToGlobal(position))
def _try_close_dirty_tabs(self, exept=None):
"""
Tries to close dirty tabs. Uses DlgUnsavedFiles to ask the user
what he wants to do.
"""
widgets, filenames = self._collect_dirty_tabs(exept=exept)
if not len(filenames):
return True
dlg = DlgUnsavedFiles(self, files=filenames)
if dlg.exec_() == dlg.Accepted:
if not dlg.discarded:
for item in dlg.listWidget.selectedItems():
filename = item.text()
widget = None
for widget in widgets:
if widget.file.path == filename:
break
if widget != exept:
self._save_editor(widget)
self.removeTab(self.indexOf(widget))
return True
return False
def _collect_dirty_tabs(self, exept=None):
"""
Collects the list of dirty tabs
"""
widgets = []
filenames = []
for i in range(self.count()):
widget = self.widget(i)
try:
if widget.dirty and widget != exept:
widgets.append(widget)
filenames.append(widget.file.path)
except AttributeError:
pass
return widgets, filenames
def _on_dirty_changed(self, dirty):
"""
Adds a star in front of a dirtt tab and emits dirty_changed.
"""
try:
title = self._current._tab_name
index = self.indexOf(self._current)
if dirty:
self.setTabText(index, "* " + title)
else:
self.setTabText(index, title)
except AttributeError:
pass
self.dirty_changed.emit(dirty)
def closeEvent(self, event):
# On close, we try to close dirty tabs and only process the close
# event if all dirty tabs were closed by the user.
if not self.close_all():
event.ignore()
else:
event.accept()
def _on_file_deleted(self, editor):
"""
Removes deleted files from the tab widget.
;:param editor: CodeEdit to remove
"""
self.removeTab(self.indexOf(editor))
|
jmwright/cadquery-x
|
gui/libs/pyqode/core/widgets/tabs.py
|
Python
|
lgpl-3.0
| 16,288 | 0 |
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Client side of the volume backup RPC API.
"""
from oslo_config import cfg
from oslo_log import log as logging
from jacket import rpc
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class BackupAPI(rpc.RPCAPI):
"""Client side of the volume rpc API.
API version history:
1.0 - Initial version.
1.1 - Changed methods to accept backup objects instead of IDs.
1.2 - A version that got in by mistake (without breaking anything).
1.3 - Dummy version bump to mark start of having storage-backup service
decoupled from storage-volume.
... Mitaka supports messaging 1.3. Any changes to existing methods in
1.x after this point should be done so that they can handle version cap
set to 1.3.
2.0 - Remove 1.x compatibility
"""
RPC_API_VERSION = '1.3'
TOPIC = CONF.backup_topic
BINARY = 'storage-backup'
def _compat_ver(self, current, legacy):
if self.client.can_send_version(current):
return current
else:
return legacy
def create_backup(self, ctxt, backup):
LOG.debug("create_backup in rpcapi backup_id %s", backup.id)
version = self._compat_ver('2.0', '1.1')
cctxt = self.client.prepare(server=backup.host, version=version)
cctxt.cast(ctxt, 'create_backup', backup=backup)
def restore_backup(self, ctxt, volume_host, backup, volume_id):
LOG.debug("restore_backup in rpcapi backup_id %s", backup.id)
version = self._compat_ver('2.0', '1.1')
cctxt = self.client.prepare(server=volume_host, version=version)
cctxt.cast(ctxt, 'restore_backup', backup=backup,
volume_id=volume_id)
def delete_backup(self, ctxt, backup):
LOG.debug("delete_backup rpcapi backup_id %s", backup.id)
version = self._compat_ver('2.0', '1.1')
cctxt = self.client.prepare(server=backup.host, version=version)
cctxt.cast(ctxt, 'delete_backup', backup=backup)
def export_record(self, ctxt, backup):
LOG.debug("export_record in rpcapi backup_id %(id)s "
"on host %(host)s.",
{'id': backup.id,
'host': backup.host})
version = self._compat_ver('2.0', '1.1')
cctxt = self.client.prepare(server=backup.host, version=version)
return cctxt.call(ctxt, 'export_record', backup=backup)
def import_record(self,
ctxt,
host,
backup,
backup_service,
backup_url,
backup_hosts):
LOG.debug("import_record rpcapi backup id %(id)s "
"on host %(host)s for backup_url %(url)s.",
{'id': backup.id,
'host': host,
'url': backup_url})
version = self._compat_ver('2.0', '1.1')
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'import_record',
backup=backup,
backup_service=backup_service,
backup_url=backup_url,
backup_hosts=backup_hosts)
def reset_status(self, ctxt, backup, status):
LOG.debug("reset_status in rpcapi backup_id %(id)s "
"on host %(host)s.",
{'id': backup.id,
'host': backup.host})
version = self._compat_ver('2.0', '1.1')
cctxt = self.client.prepare(server=backup.host, version=version)
return cctxt.cast(ctxt, 'reset_status', backup=backup, status=status)
def check_support_to_force_delete(self, ctxt, host):
LOG.debug("Check if backup driver supports force delete "
"on host %(host)s.", {'host': host})
version = self._compat_ver('2.0', '1.1')
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'check_support_to_force_delete')
|
HybridF5/jacket
|
jacket/storage/backup/rpcapi.py
|
Python
|
apache-2.0
| 4,652 | 0 |
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import p2p_primary_path_
class p2p_primary_path(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/mpls/lsps/constrained-path/tunnels/tunnel/p2p-tunnel-attributes/p2p-primary-path. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Primary paths associated with the LSP
"""
__slots__ = ("_path_helper", "_extmethods", "__p2p_primary_path")
_yang_name = "p2p-primary-path"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__p2p_primary_path = YANGDynClass(
base=YANGListType(
"name",
p2p_primary_path_.p2p_primary_path,
yang_name="p2p-primary-path",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="name",
extensions=None,
),
is_container="list",
yang_name="p2p-primary-path",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"mpls",
"lsps",
"constrained-path",
"tunnels",
"tunnel",
"p2p-tunnel-attributes",
"p2p-primary-path",
]
def _get_p2p_primary_path(self):
"""
Getter method for p2p_primary_path, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path (list)
YANG Description: List of p2p primary paths for a tunnel
"""
return self.__p2p_primary_path
def _set_p2p_primary_path(self, v, load=False):
"""
Setter method for p2p_primary_path, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_p2p_primary_path is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_p2p_primary_path() directly.
YANG Description: List of p2p primary paths for a tunnel
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGListType(
"name",
p2p_primary_path_.p2p_primary_path,
yang_name="p2p-primary-path",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="name",
extensions=None,
),
is_container="list",
yang_name="p2p-primary-path",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """p2p_primary_path must be of a type compatible with list""",
"defined-type": "list",
"generated-type": """YANGDynClass(base=YANGListType("name",p2p_primary_path_.p2p_primary_path, yang_name="p2p-primary-path", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name="p2p-primary-path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=True)""",
}
)
self.__p2p_primary_path = t
if hasattr(self, "_set"):
self._set()
def _unset_p2p_primary_path(self):
self.__p2p_primary_path = YANGDynClass(
base=YANGListType(
"name",
p2p_primary_path_.p2p_primary_path,
yang_name="p2p-primary-path",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="name",
extensions=None,
),
is_container="list",
yang_name="p2p-primary-path",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=True,
)
p2p_primary_path = __builtin__.property(
_get_p2p_primary_path, _set_p2p_primary_path
)
_pyangbind_elements = OrderedDict([("p2p_primary_path", p2p_primary_path)])
from . import p2p_primary_path_
class p2p_primary_path(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/mpls/lsps/constrained-path/tunnels/tunnel/p2p-tunnel-attributes/p2p-primary-path. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Primary paths associated with the LSP
"""
__slots__ = ("_path_helper", "_extmethods", "__p2p_primary_path")
_yang_name = "p2p-primary-path"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__p2p_primary_path = YANGDynClass(
base=YANGListType(
"name",
p2p_primary_path_.p2p_primary_path,
yang_name="p2p-primary-path",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="name",
extensions=None,
),
is_container="list",
yang_name="p2p-primary-path",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"mpls",
"lsps",
"constrained-path",
"tunnels",
"tunnel",
"p2p-tunnel-attributes",
"p2p-primary-path",
]
def _get_p2p_primary_path(self):
"""
Getter method for p2p_primary_path, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path (list)
YANG Description: List of p2p primary paths for a tunnel
"""
return self.__p2p_primary_path
def _set_p2p_primary_path(self, v, load=False):
"""
Setter method for p2p_primary_path, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_p2p_primary_path is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_p2p_primary_path() directly.
YANG Description: List of p2p primary paths for a tunnel
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGListType(
"name",
p2p_primary_path_.p2p_primary_path,
yang_name="p2p-primary-path",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="name",
extensions=None,
),
is_container="list",
yang_name="p2p-primary-path",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """p2p_primary_path must be of a type compatible with list""",
"defined-type": "list",
"generated-type": """YANGDynClass(base=YANGListType("name",p2p_primary_path_.p2p_primary_path, yang_name="p2p-primary-path", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name="p2p-primary-path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=True)""",
}
)
self.__p2p_primary_path = t
if hasattr(self, "_set"):
self._set()
def _unset_p2p_primary_path(self):
self.__p2p_primary_path = YANGDynClass(
base=YANGListType(
"name",
p2p_primary_path_.p2p_primary_path,
yang_name="p2p-primary-path",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="name",
extensions=None,
),
is_container="list",
yang_name="p2p-primary-path",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=True,
)
p2p_primary_path = __builtin__.property(
_get_p2p_primary_path, _set_p2p_primary_path
)
_pyangbind_elements = OrderedDict([("p2p_primary_path", p2p_primary_path)])
|
napalm-automation/napalm-yang
|
napalm_yang/models/openconfig/network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/__init__.py
|
Python
|
apache-2.0
| 14,694 | 0.001021 |
"""coop URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
from rest_framework import routers
from guide.views import area, ArtificialProblemViewSet,NaturalProblemViewSet,ProblemImageViewSet, AreaViewSet, SectorViewSet
from members.views import UserViewSet,MemberViewSet
admin.site.site_header='Galway Climbing Co-op admin'
admin.site.site_title='Galway Climbing Co-op admin'
#admin.site.index_title='Galway Climbing Co-op admin'
# django rest framework url routers for viewsets
router = routers.DefaultRouter()
router.register(r'artificialproblems',ArtificialProblemViewSet)
router.register(r'naturalproblems',NaturalProblemViewSet)
router.register(r'problemimages',ProblemImageViewSet)
router.register(r'users',UserViewSet)
router.register(r'members',MemberViewSet)
router.register(r'areas',AreaViewSet)
router.register(r'sectors',SectorViewSet)
from guide.views import area_map
urlpatterns = [
url(r'api/', include(router.urls)),
url(r'api-auth/',include('rest_framework.urls',namespace='rest_framework')),
url(r'^admin/', admin.site.urls),
url(r'^$',area_map,{'area_id':1}),
url(r'^guide/', include('guide.urls',namespace="guide")),
url(r'^home/', include('homepage.urls',namespace="homepage")),
url(r'^members/auth/', include('members.urls')),
# note that the (customised) templates for the auth views are in [BASE_DIR]/templates/registration
url(r'^members/', include('members.urls',namespace="members")),
] + static(settings.MEDIA_URL,document_root=settings.MEDIA_ROOT)
|
jalibras/coop
|
coop/coop/urls.py
|
Python
|
apache-2.0
| 2,258 | 0.0124 |
# coding=utf-8
"""
InaSAFE Disaster risk assessment tool developed by AusAid and World Bank
- **Functionality related to shake data files.**
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'tim@linfiniti.com'
__version__ = '0.5.0'
__date__ = '30/07/2012'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
import os
import shutil
from datetime import datetime
from zipfile import ZipFile
# The logger is intiailsed in utils.py by init
import logging
LOGGER = logging.getLogger('InaSAFE')
from rt_exceptions import (
EventUndefinedError,
EventIdError,
NetworkError,
EventValidationError,
InvalidInputZipError,
ExtractionError)
from ftp_client import FtpClient
from utils import shakemap_zip_dir, shakemap_extract_dir
class ShakeData:
"""A class for retrieving, reading, converting and extracting
data from shakefiles.
Shake files are provided on an ftp server. There are two files for every
event:
* an 'inp' file
* an 'out' file
These files are provided on the ftp server as zip files. For example:
* `ftp://118.97.83.243/20110413170148.inp.zip`_
* `ftp://118.97.83.243/20110413170148.out.zip`_
There are numerous files provided within these two zip files, but there
is only really one that we are interested in:
* grid.xml - which contains all the metadata pertaining to the event
The remaining files are fetched for completeness and possibly use in the
future.
This class provides a high level interface for retrieving this data and
then extracting various by products from it.
"""
def __init__(self, event=None, host='118.97.83.243'):
"""Constructor for the ShakeData class
:param event: (Optional) a string representing the event id
that this raster is associated with. e.g. 20110413170148.
**If no event id is supplied, a query will be made to the
ftp server, and the latest event id assigned.**
:param host: (Optional) a string representing the ip address
or host name of the server from which the data should be
retrieved. It assumes that the data is in the root directory.
Defaults to 118.97.83.243
:returns: None
:raises: None
"""
self.event_id = event
self.host = host
# private Shake event instance associated with this shake dataset
self._shakeEvent = None
if self.event_id is None:
try:
self.get_latest_event_id()
except NetworkError:
raise
else:
# If we fetched it above using get_latest_event_id we assume it is
# already validated.
try:
self.validate_event()
except EventValidationError:
raise
# If event_id is still None after all the above, moan....
if self.event_id is None:
message = ('No id was passed to the constructor and the '
'latest id could not be retrieved from the server.')
LOGGER.exception('ShakeData initialisation failed')
raise EventIdError(message)
def get_latest_event_id(self):
"""Query the ftp server and determine the latest event id.
:return: A string containing a valid event id.
:raises: NetworkError
"""
ftp_client = FtpClient()
try:
ftp_client_list = ftp_client.get_listing()
ftp_client_list.sort(key=lambda x: x.lower())
except NetworkError:
raise
now = datetime.now()
now = int(
'%04d%02d%02d%02d%02d%02d' % (
now.year, now.month, now.day, now.hour, now.minute, now.second
))
event_id = now + 1
while int(event_id) > now:
if len(ftp_client_list) < 1:
raise EventIdError('Latest Event Id could not be obtained')
event_id = ftp_client_list.pop().split('/')[-1].split('.')[0]
if event_id is None:
raise EventIdError('Latest Event Id could not be obtained')
self.event_id = event_id
def is_on_server(self):
"""Check the event associated with this instance exists on the server.
:return: True if valid, False if not
:raises: NetworkError
"""
input_file_name, output_file_name = self.file_names()
file_list = [input_file_name, output_file_name]
ftp_client = FtpClient()
return ftp_client.has_files(file_list)
def file_names(self):
"""Return file names for the inp and out files based on the event id.
e.g. 20120726022003.inp.zip, 20120726022003.out.zip
:return: Tuple Consisting of inp and out local cache
paths.
:rtype: tuple (str, str)
:raises: None
"""
input_file_name = '%s.inp.zip' % self.event_id
output_file_name = '%s.out.zip' % self.event_id
return input_file_name, output_file_name
def cache_paths(self):
"""Return the paths to the inp and out files as expected locally.
:return: Tuple consisting of inp and out local cache paths.
:rtype: tuple (str, str)
:raises: None
"""
input_file_name, output_file_name = self.file_names()
input_file_path = os.path.join(shakemap_zip_dir(), input_file_name)
output_file_path = os.path.join(shakemap_zip_dir(), output_file_name)
return input_file_path, output_file_path
def is_cached(self):
"""Check the event associated with this instance exists in cache.
:return: True if locally cached, False if not
:raises: None
"""
input_file_path, output_file_path = self.cache_paths()
if os.path.exists(input_file_path) and \
os.path.exists(output_file_path):
# TODO: we should actually try to unpack them for deeper validation
return True
else:
LOGGER.debug('%s is not cached' % input_file_path)
LOGGER.debug('%s is not cached' % output_file_path)
return False
def validate_event(self):
"""Check that the event associated with this instance exists either
in the local event cache, or on the remote ftp site.
:return: True if valid, False if not
:raises: NetworkError
"""
# First check local cache
if self.is_cached():
return True
else:
return self.is_on_server()
#noinspection PyMethodMayBeStatic
def _fetch_file(self, event_file, retries=3):
"""Private helper to fetch a file from the ftp site.
e.g. for event 20110413170148 this file would be fetched::
ftp://118.97.83.243/20110413170148.inp.zip
and this local file created::
/tmp/realtime/20110413170148.inp.zip
.. note:: If a cached copy of the file exits, the path to the cache
copy will simply be returned without invoking any network requests.
:param event_file: Filename on server e.g.20110413170148.inp.zip
:type event_file: str
:param retries: Number of reattempts that should be made in
in case of network error etc.
:type retries: int
:return: A string for the dataset path on the local storage system.
:rtype: str
:raises: EventUndefinedError, NetworkError
"""
# Return the cache copy if it exists
local_path = os.path.join(shakemap_zip_dir(), event_file)
if os.path.exists(local_path):
return local_path
#Otherwise try to fetch it using ftp
for counter in range(retries):
last_error = None
try:
client = FtpClient()
client.get_file(event_file, local_path)
except NetworkError, e:
last_error = e
except:
LOGGER.exception(
'Could not fetch shake event from server %s'
% event_file)
raise
if last_error is None:
return local_path
LOGGER.info('Fetching failed, attempt %s' % counter)
LOGGER.exception('Could not fetch shake event from server %s'
% event_file)
raise Exception('Could not fetch shake event from server %s'
% event_file)
def fetch_input(self):
"""Fetch the input file for the event id associated with this class
e.g. for event 20110413170148 this file would be fetched::
ftp://118.97.83.243/20110413170148.inp.zip
and this local file created::
/tmp/realtime/20110413170148.inp.zip
:return: A string for the 'inp' dataset path on the local storage
system.
:raises: EventUndefinedError, NetworkError
"""
if self.event_id is None:
raise EventUndefinedError('Event is none')
event_file = '%s.inp.zip' % self.event_id
try:
return self._fetch_file(event_file)
except (EventUndefinedError, NetworkError):
raise
def fetch_output(self):
"""Fetch the output file for the event id associated with this class.
e.g. for event 20110413170148 this file would be fetched::
ftp://118.97.83.243/20110413170148.out.zip
and this local file created::
/tmp/realtime/20110413170148.out.zip
:return: A string for the 'out' dataset path on the local storage
system.
:raises: EventUndefinedError, NetworkError
"""
if self.event_id is None:
raise EventUndefinedError('Event is none')
event_file = '%s.out.zip' % self.event_id
try:
return self._fetch_file(event_file)
except (EventUndefinedError, NetworkError):
raise
def fetch_event(self):
"""Fetch both the input and output shake data from the server for
the event id associated with this class.
:return: A two tuple where the first item is the inp dataset path and
the second the out dataset path on the local storage system.
:raises: EventUndefinedError, NetworkError
"""
if self.event_id is None:
raise EventUndefinedError('Event is none')
try:
input_file = self.fetch_input()
output_file = self.fetch_output()
except (EventUndefinedError, NetworkError):
raise
return input_file, output_file
def extract(self, force_flag=False):
"""Extract the zipped resources. The two zips associated with this
shakemap will be extracted to e.g.
:file:`/tmp/inasafe/realtime/shakemaps-extracted/20120726022003`
After extraction the complete path will appear something like this:
:file:`/tmp/inasafe/realtime/shakemaps-extracted/
20120726022003/usr/local/smap/data/20120726022003`
with input and output directories appearing beneath that.
This method will then move the grid.xml file up to the root of
the extract dir and recursively remove the extracted dirs.
After this final step, the following file will be present:
:file:`/tmp/inasafe/realtime/shakemaps-extracted/
20120726022003/grid.xml`
If the zips have not already been retrieved from the ftp server,
they will be fetched first automatically.
If the zips have previously been extracted, the extract dir will
be completely removed and the dataset re-extracted.
.. note:: You should not store any of your own working data in the
extract dir - it should be treated as transient.
.. note:: the grid.xml also contains MMI point data that
we care about and will extract as a matrix (MMI in the 5th column).
:param force_flag: (Optional) Whether to force re-extraction. If the
files were previously extracted, you can force them to be
extracted again. If False, grid.xml local file is used if
it is cached. Default False.
:return: a string containing the grid.xml paths e.g.::
myGridXml = myShakeData.extract()
print myGridXml
/tmp/inasafe/realtime/shakemaps-extracted/20120726022003/grid.xml
:raises: InvalidInputZipError, InvalidOutputZipError
"""
final_grid_xml_file = os.path.join(self.extract_dir(), 'grid.xml')
if force_flag:
self.remove_extracted_files()
elif os.path.exists(final_grid_xml_file):
return final_grid_xml_file
event_input, event_output = self.fetch_event()
input_zip = ZipFile(event_input)
output_zip = ZipFile(event_output)
expected_grid_xml_file = (
'usr/local/smap/data/%s/output/grid.xml' %
self.event_id)
output_name_list = output_zip.namelist()
if expected_grid_xml_file not in output_name_list:
raise InvalidInputZipError(
'The output zip does not contain an '
'%s file.' % expected_grid_xml_file)
extract_dir = self.extract_dir()
input_zip.extractall(extract_dir)
output_zip.extractall(extract_dir)
# move the file we care about to the top of the extract dir
shutil.copyfile(os.path.join(self.extract_dir(),
expected_grid_xml_file),
final_grid_xml_file)
# Get rid of all the other extracted stuff
user_dir = os.path.join(self.extract_dir(), 'usr')
if os.path.isdir(user_dir):
shutil.rmtree(user_dir)
if not os.path.exists(final_grid_xml_file):
raise ExtractionError('Error copying grid.xml')
return final_grid_xml_file
def extract_dir(self):
"""A helper method to get the path to the extracted datasets.
:return: A string representing the absolute local filesystem path to
the unzipped shake event dir. e.g.
:file:`/tmp/inasafe/realtime/shakemaps-extracted/20120726022003`
:raises: Any exceptions will be propogated
"""
return os.path.join(shakemap_extract_dir(), self.event_id)
def remove_extracted_files(self):
"""Tidy up the filesystem by removing all extracted files
for the given event instance.
Args: None
Returns: None
Raises: Any error e.g. file permission error will be raised.
"""
extracted_dir = self.extract_dir()
if os.path.isdir(extracted_dir):
shutil.rmtree(extracted_dir)
|
opengeogroep/inasafe
|
realtime/shake_data.py
|
Python
|
gpl-3.0
| 15,213 | 0.000394 |
#***************************************************************
#* Name: LMS7002_DCCAL.py
#* Purpose: Class implementing LMS7002 DCCAL functions
#* Author: Lime Microsystems ()
#* Created: 2017-02-10
#* Copyright: Lime Microsystems (limemicro.com)
#* License:
#**************************************************************
from LMS7002_base import *
class LMS7002_DCCAL(LMS7002_base):
__slots__ = [] # Used to generate error on typos
def __init__(self, chip):
self.chip = chip
self.channel = None
self.prefix = "DCCAL_"
#
# DCCAL_CFG (0x05C0)
#
# DCMODE
@property
def DCMODE(self):
"""
Get the value of DCMODE
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('CFG', 'DCMODE')
else:
raise ValueError("Bitfield DCMODE is not supported on chip version "+str(self.chip.chipID))
@DCMODE.setter
def DCMODE(self, value):
"""
Set the value of DCMODE
"""
if self.chip.chipID == self.chip.chipIDMR3:
if value not in [0, 1, 'MANUAL', 'AUTO']:
raise ValueError("Value must be [0,1,'MANUAL','AUTO']")
if value==0 or value=='MANUAL':
val = 0
else:
val = 1
self._writeReg('CFG', 'DCMODE', val)
else:
raise ValueError("Bitfield DCMODE is not supported on chip version "+str(self.chip.chipID))
# PD_DCDAC_RXB
@property
def PD_DCDAC_RXB(self):
"""
Get the value of PD_DCDAC_RXB
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('CFG', 'PD_DCDAC_RXB')
else:
raise ValueError("Bitfield PD_DCDAC_RXB is not supported on chip version "+str(self.chip.chipID))
@PD_DCDAC_RXB.setter
def PD_DCDAC_RXB(self, value):
"""
Set the value of PD_DCDAC_RXB
"""
if self.chip.chipID == self.chip.chipIDMR3:
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG', 'PD_DCDAC_RXB', value)
else:
raise ValueError("Bitfield PD_DCDAC_RXB is not supported on chip version "+str(self.chip.chipID))
# PD_DCDAC_RXA
@property
def PD_DCDAC_RXA(self):
"""
Get the value of PD_DCDAC_RXA
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('CFG', 'PD_DCDAC_RXA')
else:
raise ValueError("Bitfield PD_DCDAC_RXA is not supported on chip version "+str(self.chip.chipID))
@PD_DCDAC_RXA.setter
def PD_DCDAC_RXA(self, value):
"""
Set the value of PD_DCDAC_RXA
"""
if self.chip.chipID == self.chip.chipIDMR3:
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG', 'PD_DCDAC_RXA', value)
else:
raise ValueError("Bitfield PD_DCDAC_RXA is not supported on chip version "+str(self.chip.chipID))
# PD_DCDAC_TXB
@property
def PD_DCDAC_TXB(self):
"""
Get the value of PD_DCDAC_TXB
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('CFG', 'PD_DCDAC_TXB')
else:
raise ValueError("Bitfield PD_DCDAC_TXB is not supported on chip version "+str(self.chip.chipID))
@PD_DCDAC_TXB.setter
def PD_DCDAC_TXB(self, value):
"""
Set the value of PD_DCDAC_TXB
"""
if self.chip.chipID == self.chip.chipIDMR3:
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG', 'PD_DCDAC_TXB', value)
else:
raise ValueError("Bitfield PD_DCDAC_TXB is not supported on chip version "+str(self.chip.chipID))
# PD_DCDAC_TXA
@property
def PD_DCDAC_TXA(self):
"""
Get the value of PD_DCDAC_TXA
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('CFG', 'PD_DCDAC_TXA')
else:
raise ValueError("Bitfield PD_DCDAC_TXA is not supported on chip version "+str(self.chip.chipID))
@PD_DCDAC_TXA.setter
def PD_DCDAC_TXA(self, value):
"""
Set the value of PD_DCDAC_TXA
"""
if self.chip.chipID == self.chip.chipIDMR3:
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG', 'PD_DCDAC_TXA', value)
else:
raise ValueError("Bitfield PD_DCDAC_TXA is not supported on chip version "+str(self.chip.chipID))
# PD_DCCMP_RXB
@property
def PD_DCCMP_RXB(self):
"""
Get the value of PD_DCCMP_RXB
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('CFG', 'PD_DCCMP_RXB')
else:
raise ValueError("Bitfield PD_DCCMP_RXB is not supported on chip version "+str(self.chip.chipID))
@PD_DCCMP_RXB.setter
def PD_DCCMP_RXB(self, value):
"""
Set the value of PD_DCCMP_RXB
"""
if self.chip.chipID == self.chip.chipIDMR3:
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG', 'PD_DCCMP_RXB', value)
else:
raise ValueError("Bitfield PD_DCCMP_RXB is not supported on chip version "+str(self.chip.chipID))
# PD_DCCMP_RXA
@property
def PD_DCCMP_RXA(self):
"""
Get the value of PD_DCCMP_RXA
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('CFG', 'PD_DCCMP_RXA')
else:
raise ValueError("Bitfield PD_DCCMP_RXA is not supported on chip version "+str(self.chip.chipID))
@PD_DCCMP_RXA.setter
def PD_DCCMP_RXA(self, value):
"""
Set the value of PD_DCCMP_RXA
"""
if self.chip.chipID == self.chip.chipIDMR3:
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG', 'PD_DCCMP_RXA', value)
else:
raise ValueError("Bitfield PD_DCCMP_RXA is not supported on chip version "+str(self.chip.chipID))
# PD_DCCMP_TXB
@property
def PD_DCCMP_TXB(self):
"""
Get the value of PD_DCCMP_TXB
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('CFG', 'PD_DCCMP_TXB')
else:
raise ValueError("Bitfield PD_DCCMP_TXB is not supported on chip version "+str(self.chip.chipID))
@PD_DCCMP_TXB.setter
def PD_DCCMP_TXB(self, value):
"""
Set the value of PD_DCCMP_TXB
"""
if self.chip.chipID == self.chip.chipIDMR3:
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG', 'PD_DCCMP_TXB', value)
else:
raise ValueError("Bitfield PD_DCCMP_TXB is not supported on chip version "+str(self.chip.chipID))
# PD_DCCMP_TXA
@property
def PD_DCCMP_TXA(self):
"""
Get the value of PD_DCCMP_TXA
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('CFG', 'PD_DCCMP_TXA')
else:
raise ValueError("Bitfield PD_DCCMP_TXA is not supported on chip version "+str(self.chip.chipID))
@PD_DCCMP_TXA.setter
def PD_DCCMP_TXA(self, value):
"""
Set the value of PD_DCCMP_TXA
"""
if self.chip.chipID == self.chip.chipIDMR3:
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
self._writeReg('CFG', 'PD_DCCMP_TXA', value)
else:
raise ValueError("Bitfield PD_DCCMP_TXA is not supported on chip version "+str(self.chip.chipID))
#
# DCCAL_STAT (0x05C1)
#
# DCCAL_CALSTATUS<7:0>
@property
def DCCAL_CALSTATUS(self):
"""
Get the value of DCCAL_CALSTATUS<7:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('STAT', 'DCCAL_CALSTATUS<7:0>')
else:
raise ValueError("Bitfield DCCAL_CALSTATUS<7:0> is not supported on chip version "+str(self.chip.chipID))
@DCCAL_CALSTATUS.setter
def DCCAL_CALSTATUS(self, value):
"""
Set the value of DCCAL_CALSTATUS<7:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('STAT', 'DCCAL_CALSTATUS<7:0>', value)
else:
raise ValueError("Bitfield DCCAL_CALSTATUS<7:0> is not supported on chip version "+str(self.chip.chipID))
# DCCAL_CMPSTATUS<7:0>
@property
def DCCAL_CMPSTATUS(self):
"""
Get the value of DCCAL_CMPSTATUS<7:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('STAT', 'DCCAL_CMPSTATUS<7:0>')
else:
raise ValueError("Bitfield DCCAL_CMPSTATUS<7:0> is not supported on chip version "+str(self.chip.chipID))
@DCCAL_CMPSTATUS.setter
def DCCAL_CMPSTATUS(self, value):
"""
Set the value of DCCAL_CMPSTATUS<7:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('STAT', 'DCCAL_CMPSTATUS<7:0>', value)
else:
raise ValueError("Bitfield DCCAL_CMPSTATUS<7:0> is not supported on chip version "+str(self.chip.chipID))
#
# DCCAL_CFG2 (0x05C2)
#
# DCCAL_CMPCFG<7:0>
@property
def DCCAL_CMPCFG(self):
"""
Get the value of DCCAL_CMPCFG<7:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('CFG2', 'DCCAL_CMPCFG<7:0>')
else:
raise ValueError("Bitfield DCCAL_CMPCFG<7:0> is not supported on chip version "+str(self.chip.chipID))
@DCCAL_CMPCFG.setter
def DCCAL_CMPCFG(self, value):
"""
Set the value of DCCAL_CMPCFG<7:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG2', 'DCCAL_CMPCFG<7:0>', value)
else:
raise ValueError("Bitfield DCCAL_CMPCFG<7:0> is not supported on chip version "+str(self.chip.chipID))
# DCCAL_START<7:0>
@property
def DCCAL_START(self):
"""
Get the value of DCCAL_START<7:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('CFG2', 'DCCAL_START<7:0>')
else:
raise ValueError("Bitfield DCCAL_START<7:0> is not supported on chip version "+str(self.chip.chipID))
@DCCAL_START.setter
def DCCAL_START(self, value):
"""
Set the value of DCCAL_START<7:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CFG2', 'DCCAL_START<7:0>', value)
else:
raise ValueError("Bitfield DCCAL_START<7:0> is not supported on chip version "+str(self.chip.chipID))
def startRXBQ(self):
"""
Starts RXBQ calibration.
"""
self.DCCAL_START = 0
self.DCCAL_START = 1<<7
def startRXBI(self):
"""
Starts RXBI calibration.
"""
self.DCCAL_START = 0
self.DCCAL_START = 1<<6
def startRXAQ(self):
"""
Starts RXAQ calibration.
"""
self.DCCAL_START = 0
self.DCCAL_START = 1<<5
def startRXAI(self):
"""
Starts RXAI calibration.
"""
self.DCCAL_START = 0
self.DCCAL_START = 1<<4
def startTXBQ(self):
"""
Starts TXBQ calibration.
"""
self.DCCAL_START = 0
self.DCCAL_START = 1<<3
def startTXBI(self):
"""
Starts TXBI calibration.
"""
self.DCCAL_START = 0
self.DCCAL_START = 1<<2
def startTXAQ(self):
"""
Starts TXAQ calibration.
"""
self.DCCAL_START = 0
self.DCCAL_START = 1<<1
def startTXAI(self):
"""
Starts TXAI calibration.
"""
self.DCCAL_START = 0
self.DCCAL_START = 1
#
# DCCAL_TXAI (0x05C3)
#
@property
def DC_TXAI(self):
"""
Get the value of DC_TXAI
"""
if self.chip.chipID == self.chip.chipIDMR3:
self._writeReg('TXAI', 'DCRD_TXAI', 0)
self._writeReg('TXAI', 'DCRD_TXAI', 1)
val = self._readReg('TXAI', 'DC_TXAI<10:0>')
return self.signMagnitudeToInt(val, 11)
else:
raise ValueError("Bitfield DC_TXAI is not supported on chip version "+str(self.chip.chipID))
@DC_TXAI.setter
def DC_TXAI(self, value):
"""
Set the value of DC_TXAI
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(-1024<= value <=1024):
raise ValueError("Value must be [-1024..1024]")
val = self.intToSignMagnitude(value, 11)
self._writeReg('TXAI', 'DC_TXAI<10:0>', val)
self._writeReg('TXAI', 'DCWR_TXAI', 0)
self._writeReg('TXAI', 'DCWR_TXAI', 1)
else:
raise ValueError("Bitfield TXAI is not supported on chip version "+str(self.chip.chipID))
#
# DCCAL_TXAQ (0x05C4)
#
@property
def DC_TXAQ(self):
"""
Get the value of DC_TXAQ
"""
if self.chip.chipID == self.chip.chipIDMR3:
self._writeReg('TXAQ', 'DCRD_TXAQ', 0)
self._writeReg('TXAQ', 'DCRD_TXAQ', 1)
val = self._readReg('TXAQ', 'DC_TXAQ<10:0>')
return self.signMagnitudeToInt(val, 11)
else:
raise ValueError("Bitfield DC_TXAQ is not supported on chip version "+str(self.chip.chipID))
@DC_TXAQ.setter
def DC_TXAQ(self, value):
"""
Set the value of DC_TXAQ
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(-1024<= value <=1024):
raise ValueError("Value must be [-1024..1024]")
val = self.intToSignMagnitude(value, 11)
self._writeReg('TXAQ', 'DC_TXAQ<10:0>', val)
self._writeReg('TXAQ', 'DCWR_TXAQ', 0)
self._writeReg('TXAQ', 'DCWR_TXAQ', 1)
else:
raise ValueError("Bitfield TXAQ is not supported on chip version "+str(self.chip.chipID))
#
# DCCAL_TXBI (0x05C5)
#
@property
def DC_TXBI(self):
"""
Get the value of DC_TXBI
"""
if self.chip.chipID == self.chip.chipIDMR3:
self._writeReg('TXBI', 'DCRD_TXBI', 0)
self._writeReg('TXBI', 'DCRD_TXBI', 1)
val = self._readReg('TXBI', 'DC_TXBI<10:0>')
return self.signMagnitudeToInt(val, 11)
else:
raise ValueError("Bitfield DC_TXBI is not supported on chip version "+str(self.chip.chipID))
@DC_TXBI.setter
def DC_TXBI(self, value):
"""
Set the value of DC_TXBI
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(-1024<= value <=1024):
raise ValueError("Value must be [-1024..1024]")
val = self.intToSignMagnitude(value, 11)
self._writeReg('TXBI', 'DC_TXBI<10:0>', val)
self._writeReg('TXBI', 'DCWR_TXBI', 0)
self._writeReg('TXBI', 'DCWR_TXBI', 1)
else:
raise ValueError("Bitfield TXBI is not supported on chip version "+str(self.chip.chipID))
#
# DCCAL_TXBQ (0x05C6)
#
@property
def DC_TXBQ(self):
"""
Get the value of DC_TXBQ
"""
if self.chip.chipID == self.chip.chipIDMR3:
self._writeReg('TXBQ', 'DCRD_TXBQ', 0)
self._writeReg('TXBQ', 'DCRD_TXBQ', 1)
val = self._readReg('TXBQ', 'DC_TXBQ<10:0>')
return self.signMagnitudeToInt(val, 11)
else:
raise ValueError("Bitfield DC_TXBQ is not supported on chip version "+str(self.chip.chipID))
@DC_TXBQ.setter
def DC_TXBQ(self, value):
"""
Set the value of DC_TXBQ
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(-1024<= value <=1024):
raise ValueError("Value must be [-1024..1024]")
val = self.intToSignMagnitude(value, 11)
self._writeReg('TXBQ', 'DC_TXBQ<10:0>', val)
self._writeReg('TXBQ', 'DCWR_TXBQ', 0)
self._writeReg('TXBQ', 'DCWR_TXBQ', 1)
else:
raise ValueError("Bitfield TXBQ is not supported on chip version "+str(self.chip.chipID))
#
# DCCAL_RXAI (0x05C7)
#
@property
def DC_RXAI(self):
"""
Get the value of DC_RXAI
"""
if self.chip.chipID == self.chip.chipIDMR3:
self._writeReg('RXAI', 'DCRD_RXAI', 0)
self._writeReg('RXAI', 'DCRD_RXAI', 1)
val = self._readReg('RXAI', 'DC_RXAI<6:0>')
return self.signMagnitudeToInt(val, 7)
else:
raise ValueError("Bitfield DC_RXAI is not supported on chip version "+str(self.chip.chipID))
@DC_RXAI.setter
def DC_RXAI(self, value):
"""
Set the value of DC_RXAI
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(-63<= value <=63):
raise ValueError("Value must be [-63..63]")
val = self.intToSignMagnitude(value, 7)
self._writeReg('RXAI', 'DC_RXAI<6:0>', val)
self._writeReg('RXAI', 'DCWR_RXAI', 0)
self._writeReg('RXAI', 'DCWR_RXAI', 1)
else:
raise ValueError("Bitfield RXAI is not supported on chip version "+str(self.chip.chipID))
#
# DCCAL_RXAQ (0x05C8)
#
@property
def DC_RXAQ(self):
"""
Get the value of DC_RXAQ
"""
if self.chip.chipID == self.chip.chipIDMR3:
self._writeReg('RXAQ', 'DCRD_RXAQ', 0)
self._writeReg('RXAQ', 'DCRD_RXAQ', 1)
val = self._readReg('RXAQ', 'DC_RXAQ<6:0>')
return self.signMagnitudeToInt(val, 7)
else:
raise ValueError("Bitfield DC_RXAQ is not supported on chip version "+str(self.chip.chipID))
@DC_RXAQ.setter
def DC_RXAQ(self, value):
"""
Set the value of DC_RXAQ
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(-63<= value <=63):
raise ValueError("Value must be [-63..63]")
val = self.intToSignMagnitude(value, 7)
self._writeReg('RXAQ', 'DC_RXAQ<6:0>', val)
self._writeReg('RXAQ', 'DCWR_RXAQ', 0)
self._writeReg('RXAQ', 'DCWR_RXAQ', 1)
else:
raise ValueError("Bitfield RXAQ is not supported on chip version "+str(self.chip.chipID))
#
# DCCAL_RXBI (0x05C9)
#
@property
def DC_RXBI(self):
"""
Get the value of DC_RXBI
"""
if self.chip.chipID == self.chip.chipIDMR3:
self._writeReg('RXBI', 'DCRD_RXBI', 0)
self._writeReg('RXBI', 'DCRD_RXBI', 1)
val = self._readReg('RXBI', 'DC_RXBI<6:0>')
return self.signMagnitudeToInt(val, 7)
else:
raise ValueError("Bitfield DC_RXBI is not supported on chip version "+str(self.chip.chipID))
@DC_RXBI.setter
def DC_RXBI(self, value):
"""
Set the value of DC_RXBI
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(-63<= value <=63):
raise ValueError("Value must be [-63..63]")
val = self.intToSignMagnitude(value, 7)
self._writeReg('RXBI', 'DC_RXBI<6:0>', val)
self._writeReg('RXBI', 'DCWR_RXBI', 0)
self._writeReg('RXBI', 'DCWR_RXBI', 1)
else:
raise ValueError("Bitfield RXBI is not supported on chip version "+str(self.chip.chipID))
#
# DCCAL_RXBQ (0x05CA)
#
@property
def DC_RXBQ(self):
"""
Get the value of DC_RXBQ
"""
if self.chip.chipID == self.chip.chipIDMR3:
self._writeReg('RXBQ', 'DCRD_RXBQ', 0)
self._writeReg('RXBQ', 'DCRD_RXBQ', 1)
val = self._readReg('RXBQ', 'DC_RXBQ<6:0>')
return self.signMagnitudeToInt(val, 7)
else:
raise ValueError("Bitfield DC_RXBQ is not supported on chip version "+str(self.chip.chipID))
@DC_RXBQ.setter
def DC_RXBQ(self, value):
"""
Set the value of DC_RXBQ
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(-63<= value <=63):
raise ValueError("Value must be [-63..63]")
val = self.intToSignMagnitude(value, 7)
self._writeReg('RXBQ', 'DC_RXBQ<6:0>', val)
self._writeReg('RXBQ', 'DCWR_RXBQ', 0)
self._writeReg('RXBQ', 'DCWR_RXBQ', 1)
else:
raise ValueError("Bitfield RXBQ is not supported on chip version "+str(self.chip.chipID))
# DC_RXCDIV<7:0>
@property
def DC_RXCDIV(self):
"""
Get the value of DC_RXCDIV<7:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('CLKDIV', 'DC_RXCDIV<7:0>')
else:
raise ValueError("Bitfield DC_RXCDIV<7:0> is not supported on chip version "+str(self.chip.chipID))
@DC_RXCDIV.setter
def DC_RXCDIV(self, value):
"""
Set the value of DC_RXCDIV<7:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CLKDIV', 'DC_RXCDIV<7:0>', value)
else:
raise ValueError("Bitfield DC_RXCDIV<7:0> is not supported on chip version "+str(self.chip.chipID))
# DC_TXCDIV<7:0>
@property
def DC_TXCDIV(self):
"""
Get the value of DC_TXCDIV<7:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('CLKDIV', 'DC_TXCDIV<7:0>')
else:
raise ValueError("Bitfield DC_TXCDIV<7:0> is not supported on chip version "+str(self.chip.chipID))
@DC_TXCDIV.setter
def DC_TXCDIV(self, value):
"""
Set the value of DC_TXCDIV<7:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(0<= value <=255):
raise ValueError("Value must be [0..255]")
self._writeReg('CLKDIV', 'DC_TXCDIV<7:0>', value)
else:
raise ValueError("Bitfield DC_TXCDIV<7:0> is not supported on chip version "+str(self.chip.chipID))
# HYSCMP_RXB<2:0>
@property
def HYSCMP_RXB(self):
"""
Get the value of HYSCMP_RXB<2:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('HYSTCFG', 'HYSCMP_RXB<2:0>')
else:
raise ValueError("Bitfield HYSCMP_RXB<2:0> is not supported on chip version "+str(self.chip.chipID))
@HYSCMP_RXB.setter
def HYSCMP_RXB(self, value):
"""
Set the value of HYSCMP_RXB<2:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(0 <= value <= 7):
raise ValueError("Value must be [0..7]")
self._writeReg('HYSTCFG', 'HYSCMP_RXB<2:0>', value)
else:
raise ValueError("Bitfield HYSCMP_RXB<2:0> is not supported on chip version "+str(self.chip.chipID))
# HYSCMP_RXA<2:0>
@property
def HYSCMP_RXA(self):
"""
Get the value of HYSCMP_RXA<2:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('HYSTCFG', 'HYSCMP_RXA<2:0>')
else:
raise ValueError("Bitfield HYSCMP_RXA<2:0> is not supported on chip version "+str(self.chip.chipID))
@HYSCMP_RXA.setter
def HYSCMP_RXA(self, value):
"""
Set the value of HYSCMP_RXA<2:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(0 <= value <= 7):
raise ValueError("Value must be [0..7]")
self._writeReg('HYSTCFG', 'HYSCMP_RXA<2:0>', value)
else:
raise ValueError("Bitfield HYSCMP_RXA<2:0> is not supported on chip version "+str(self.chip.chipID))
# HYSCMP_TXB<2:0>
@property
def HYSCMP_TXB(self):
"""
Get the value of HYSCMP_TXB<2:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('HYSTCFG', 'HYSCMP_TXB<2:0>')
else:
raise ValueError("Bitfield HYSCMP_TXB<2:0> is not supported on chip version "+str(self.chip.chipID))
@HYSCMP_TXB.setter
def HYSCMP_TXB(self, value):
"""
Set the value of HYSCMP_TXB<2:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(0 <= value <= 7):
raise ValueError("Value must be [0..7]")
self._writeReg('HYSTCFG', 'HYSCMP_TXB<2:0>', value)
else:
raise ValueError("Bitfield HYSCMP_TXB<2:0> is not supported on chip version "+str(self.chip.chipID))
# HYSCMP_TXA<2:0>
@property
def HYSCMP_TXA(self):
"""
Get the value of HYSCMP_TXA<2:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
return self._readReg('HYSTCFG', 'HYSCMP_TXA<2:0>')
else:
raise ValueError("Bitfield HYSCMP_TXA<2:0> is not supported on chip version "+str(self.chip.chipID))
@HYSCMP_TXA.setter
def HYSCMP_TXA(self, value):
"""
Set the value of HYSCMP_TXA<2:0>
"""
if self.chip.chipID == self.chip.chipIDMR3:
if not(0 <= value <= 7):
raise ValueError("Value must be [0..7]")
self._writeReg('HYSTCFG', 'HYSCMP_TXA<2:0>', value)
else:
raise ValueError("Bitfield HYSCMP_TXA<2:0> is not supported on chip version "+str(self.chip.chipID))
|
bvacaliuc/pyrasdr
|
plugins/pyLMS7002M/pyLMS7002M/LMS7002_DCCAL.py
|
Python
|
gpl-3.0
| 27,053 | 0.007467 |
"""
Grab screen data from OMERO based on Screen ID
"""
import csv
import multiprocessing
import progressbar
import signal
import sys
import time
import requests
import json
from argparse import ArgumentParser
import omeroidr.connect as connect
from omeroidr.data import Data
parser = ArgumentParser(prog='OMERO screen data downloader')
parser.add_argument('-i', '--id', help='Id of the screen')
parser.add_argument('-o', '--output', required=False, default='omero.tab', help='Path to the tab separated output file')
parser.add_argument('-s', '--server', required=False, default='http://idr-demo.openmicroscopy.org', help='Base url for OMERO server')
parser.add_argument('-u', '--user', required=False, help='OMERO Username')
parser.add_argument('-w', '--password', required=False, help='OMERO Password')
pargs = parser.parse_args()
# list of well metadata
wells_data = []
# initialize the progress bar
widgets = [progressbar.Percentage(), " ", progressbar.Bar(), " ", progressbar.ETA()]
pbar = progressbar.ProgressBar(widgets=widgets)
def init_worker():
"""
Initialise multiprocessing pool
"""
signal.signal(signal.SIGINT, signal.SIG_IGN)
def well_details_callback(well):
"""
Callback from apply_async. Used to update progress bar
:param well: Well metadata object
"""
pbar.update(pbar.previous_value + 1)
# append well the wells data list
wells_data.append(well)
def main():
# login
session = connect.connect_to_omero(pargs.server, pargs.user, pargs.password)
# init data
omero_data = Data(session, pargs.server)
# get wells for screen
print('loading plates...')
wells = omero_data.get_wells(pargs.id)
print('Retrieving annotations...')
# get all annotations
# using a pool of processes
p = multiprocessing.Pool(multiprocessing.cpu_count(), init_worker)
pbar.max_value = len(wells)
pbar.start()
for well in wells:
p.apply_async(omero_data.get_well_details, args=(well,), callback=well_details_callback)
try:
# wait 10 seconds, this allows for the capture of the KeyboardInterrupt exception
time.sleep(10)
except KeyboardInterrupt:
p.terminate()
p.join()
disconnect(session, pargs.server)
print('exiting...')
sys.exit(0)
finally:
p.close()
p.join()
pbar.finish()
# sort results by id
wells_sorted = sorted(wells_data, key=lambda k: k['id'])
print('Writing flat file...')
# build a dict of all keys which will form the header row of the flat file
# this is necessary as the metadata key-value pairs might not be uniform across the dataet
columns = set()
for well in wells_sorted:
columns |= set(well.keys())
# write to a tab delimited file
with open(pargs.output, 'w') as output:
w = csv.DictWriter(output, columns, delimiter='\t', lineterminator='\n')
w.writeheader()
w.writerows(wells_sorted)
output.close()
connect.disconnect(session, pargs.server)
print('Metadata fetch complete')
if __name__ == '__main__':
main()
|
zegami/omero-idr-fetch
|
fetch_omero_data.py
|
Python
|
mit
| 3,129 | 0.003196 |
# Copyright (c) 2003-2014 CORE Security Technologies
#
# This software is provided under under a slightly modified version
# of the Apache Software License. See the accompanying LICENSE file
# for more information.
#
# $Id: lsad.py 1106 2014-01-19 14:17:01Z bethus@gmail.com $
#
# Author: Alberto Solino
#
# Description:
# [MS-LSAD] Interface implementation
#
# Best way to learn how to use these calls is to grab the protocol standard
# so you understand what the call does, and then read the test case located
# at https://code.google.com/p/impacket/source/browse/#svn%2Ftrunk%2Fimpacket%2Ftestcases%2FSMB-RPC
#
# Some calls have helper functions, which makes it even easier to use.
# They are located at the end of this file.
# Helper functions start with "h"<name of the call>.
# There are test cases for them too.
#
from impacket.dcerpc.v5 import ndr
from impacket.dcerpc.v5.ndr import NDRCALL, NDR, NDRSTRUCT, NDRENUM, NDRUNION, NDRPOINTER, NDRUniConformantArray, NDRUniConformantVaryingArray
from impacket.dcerpc.v5.dtypes import *
from impacket import nt_errors
from impacket.uuid import uuidtup_to_bin
from impacket.dcerpc.v5.enum import Enum
MSRPC_UUID_LSAD = uuidtup_to_bin(('12345778-1234-ABCD-EF00-0123456789AB','0.0'))
class DCERPCSessionError(Exception):
def __init__( self, packet = None, error_code = None):
Exception.__init__(self)
self.packet = packet
if packet is not None:
self.error_code = packet['ErrorCode']
else:
self.error_code = error_code
def get_error_code( self ):
return self.error_code
def get_packet( self ):
return self.packet
def __str__( self ):
key = self.error_code
if (nt_errors.ERROR_MESSAGES.has_key(key)):
error_msg_short = nt_errors.ERROR_MESSAGES[key][0]
error_msg_verbose = nt_errors.ERROR_MESSAGES[key][1]
return 'LSAD SessionError: code: 0x%x - %s - %s' % (self.error_code, error_msg_short, error_msg_verbose)
else:
return 'LSAD SessionError: unknown error code: 0x%x' % (self.error_code)
################################################################################
# CONSTANTS
################################################################################
# 2.2.1.1.2 ACCESS_MASK for Policy Objects
POLICY_VIEW_LOCAL_INFORMATION = 0x00000001
POLICY_VIEW_AUDIT_INFORMATION = 0x00000002
POLICY_GET_PRIVATE_INFORMATION = 0x00000004
POLICY_TRUST_ADMIN = 0x00000008
POLICY_CREATE_ACCOUNT = 0x00000010
POLICY_CREATE_SECRET = 0x00000020
POLICY_CREATE_PRIVILEGE = 0x00000040
POLICY_SET_DEFAULT_QUOTA_LIMITS = 0x00000080
POLICY_SET_AUDIT_REQUIREMENTS = 0x00000100
POLICY_AUDIT_LOG_ADMIN = 0x00000200
POLICY_SERVER_ADMIN = 0x00000400
POLICY_LOOKUP_NAMES = 0x00000800
POLICY_NOTIFICATION = 0x00001000
# 2.2.1.1.3 ACCESS_MASK for Account Objects
ACCOUNT_VIEW = 0x00000001
ACCOUNT_ADJUST_PRIVILEGES = 0x00000002
ACCOUNT_ADJUST_QUOTAS = 0x00000004
ACCOUNT_ADJUST_SYSTEM_ACCESS = 0x00000008
# 2.2.1.1.4 ACCESS_MASK for Secret Objects
SECRET_SET_VALUE = 0x00000001
SECRET_QUERY_VALUE = 0x00000002
# 2.2.1.1.5 ACCESS_MASK for Trusted Domain Objects
TRUSTED_QUERY_DOMAIN_NAME = 0x00000001
TRUSTED_QUERY_CONTROLLERS = 0x00000002
TRUSTED_SET_CONTROLLERS = 0x00000004
TRUSTED_QUERY_POSIX = 0x00000008
TRUSTED_SET_POSIX = 0x00000010
TRUSTED_SET_AUTH = 0x00000020
TRUSTED_QUERY_AUTH = 0x00000040
# 2.2.1.2 POLICY_SYSTEM_ACCESS_MODE
POLICY_MODE_INTERACTIVE = 0x00000001
POLICY_MODE_NETWORK = 0x00000002
POLICY_MODE_BATCH = 0x00000004
POLICY_MODE_SERVICE = 0x00000010
POLICY_MODE_DENY_INTERACTIVE = 0x00000040
POLICY_MODE_DENY_NETWORK = 0x00000080
POLICY_MODE_DENY_BATCH = 0x00000100
POLICY_MODE_DENY_SERVICE = 0x00000200
POLICY_MODE_REMOTE_INTERACTIVE = 0x00000400
POLICY_MODE_DENY_REMOTE_INTERACTIVE = 0x00000800
POLICY_MODE_ALL = 0x00000FF7
POLICY_MODE_ALL_NT4 = 0x00000037
# 2.2.4.4 LSAPR_POLICY_AUDIT_EVENTS_INFO
# EventAuditingOptions
POLICY_AUDIT_EVENT_UNCHANGED = 0x00000000
POLICY_AUDIT_EVENT_NONE = 0x00000004
POLICY_AUDIT_EVENT_SUCCESS = 0x00000001
POLICY_AUDIT_EVENT_FAILURE = 0x00000002
# 2.2.4.19 POLICY_DOMAIN_KERBEROS_TICKET_INFO
# AuthenticationOptions
POLICY_KERBEROS_VALIDATE_CLIENT = 0x00000080
# 2.2.7.21 LSA_FOREST_TRUST_RECORD
# Flags
LSA_TLN_DISABLED_NEW = 0x00000001
LSA_TLN_DISABLED_ADMIN = 0x00000002
LSA_TLN_DISABLED_CONFLICT = 0x00000004
LSA_SID_DISABLED_ADMIN = 0x00000001
LSA_SID_DISABLED_CONFLICT = 0x00000002
LSA_NB_DISABLED_ADMIN = 0x00000004
LSA_NB_DISABLED_CONFLICT = 0x00000008
LSA_FTRECORD_DISABLED_REASONS = 0x0000FFFF
################################################################################
# STRUCTURES
################################################################################
# 2.2.2.1 LSAPR_HANDLE
class LSAPR_HANDLE(NDR):
align = 1
structure = (
('Data','20s=""'),
)
# 2.2.2.3 LSA_UNICODE_STRING
LSA_UNICODE_STRING = RPC_UNICODE_STRING
# 2.2.3.1 STRING
class STRING(NDRSTRUCT):
commonHdr = (
('MaximumLength','<H=len(Data)-12'),
('Length','<H=len(Data)-12'),
('ReferentID','<L=0xff'),
)
commonHdr64 = (
('MaximumLength','<H=len(Data)-24'),
('Length','<H=len(Data)-24'),
('ReferentID','<Q=0xff'),
)
referent = (
('Data',STR),
)
def dump(self, msg = None, indent = 0):
if msg is None: msg = self.__class__.__name__
ind = ' '*indent
if msg != '':
print "%s" % (msg),
# Here just print the data
print " %r" % (self['Data']),
def __setitem__(self, key, value):
if key == 'Data':
self.fields['MaximumLength'] = None
self.fields['Length'] = None
self.data = None # force recompute
return NDR.__setitem__(self, key, value)
# 2.2.3.2 LSAPR_ACL
class LSAPR_ACL(NDRSTRUCT):
structure = (
('AclRevision', UCHAR),
('Sbz1', UCHAR),
('AclSize', USHORT),
('Dummy1',NDRUniConformantArray),
)
# 2.2.3.4 LSAPR_SECURITY_DESCRIPTOR
LSAPR_SECURITY_DESCRIPTOR = SECURITY_DESCRIPTOR
class PLSAPR_SECURITY_DESCRIPTOR(NDRPOINTER):
referent = (
('Data', LSAPR_SECURITY_DESCRIPTOR),
)
# 2.2.3.5 SECURITY_IMPERSONATION_LEVEL
class SECURITY_IMPERSONATION_LEVEL(NDRENUM):
class enumItems(Enum):
SecurityAnonymous = 0
SecurityIdentification = 1
SecurityImpersonation = 2
SecurityDelegation = 3
# 2.2.3.6 SECURITY_CONTEXT_TRACKING_MODE
SECURITY_CONTEXT_TRACKING_MODE = UCHAR
# 2.2.3.7 SECURITY_QUALITY_OF_SERVICE
class SECURITY_QUALITY_OF_SERVICE(NDRSTRUCT):
structure = (
('Length', DWORD),
('ImpersonationLevel', SECURITY_IMPERSONATION_LEVEL),
('ContextTrackingMode', SECURITY_CONTEXT_TRACKING_MODE),
('EffectiveOnly', UCHAR),
)
class PSECURITY_QUALITY_OF_SERVICE(NDRPOINTER):
referent = (
('Data', SECURITY_QUALITY_OF_SERVICE),
)
# 2.2.2.4 LSAPR_OBJECT_ATTRIBUTES
class LSAPR_OBJECT_ATTRIBUTES(NDRSTRUCT):
structure = (
('Length', DWORD),
('RootDirectory', LPWSTR),
('ObjectName', LPWSTR),
('Attributes', DWORD),
('SecurityDescriptor', PLSAPR_SECURITY_DESCRIPTOR),
('SecurityQualityOfService', PSECURITY_QUALITY_OF_SERVICE),
)
# 2.2.2.5 LSAPR_SR_SECURITY_DESCRIPTOR
class LSAPR_SR_SECURITY_DESCRIPTOR(NDRSTRUCT):
structure = (
('Length', DWORD),
('SecurityDescriptor', LPBYTE),
)
class PLSAPR_SR_SECURITY_DESCRIPTOR(NDRPOINTER):
referent = (
('Data', LSAPR_SR_SECURITY_DESCRIPTOR),
)
# 2.2.3.3 SECURITY_DESCRIPTOR_CONTROL
SECURITY_DESCRIPTOR_CONTROL = ULONG
# 2.2.4.1 POLICY_INFORMATION_CLASS
class POLICY_INFORMATION_CLASS(NDRENUM):
class enumItems(Enum):
PolicyAuditLogInformation = 1
PolicyAuditEventsInformation = 2
PolicyPrimaryDomainInformation = 3
PolicyPdAccountInformation = 4
PolicyAccountDomainInformation = 5
PolicyLsaServerRoleInformation = 6
PolicyReplicaSourceInformation = 7
PolicyInformationNotUsedOnWire = 8
PolicyModificationInformation = 9
PolicyAuditFullSetInformation = 10
PolicyAuditFullQueryInformation = 11
PolicyDnsDomainInformation = 12
PolicyDnsDomainInformationInt = 13
PolicyLocalAccountDomainInformation = 14
PolicyLastEntry = 15
# 2.2.4.3 POLICY_AUDIT_LOG_INFO
class POLICY_AUDIT_LOG_INFO(NDRSTRUCT):
structure = (
('AuditLogPercentFull', DWORD),
('MaximumLogSize', DWORD),
('AuditRetentionPeriod', LARGE_INTEGER),
('AuditLogFullShutdownInProgress', UCHAR),
('TimeToShutdown', LARGE_INTEGER),
('NextAuditRecordId', DWORD),
)
# 2.2.4.4 LSAPR_POLICY_AUDIT_EVENTS_INFO
class DWORD_ARRAY(NDRUniConformantArray):
item = DWORD
class PDWORD_ARRAY(NDRPOINTER):
referent = (
('Data', DWORD_ARRAY),
)
class LSAPR_POLICY_AUDIT_EVENTS_INFO(NDRSTRUCT):
structure = (
('AuditingMode', UCHAR),
('EventAuditingOptions', PDWORD_ARRAY),
('MaximumAuditEventCount', DWORD),
)
# 2.2.4.5 LSAPR_POLICY_PRIMARY_DOM_INFO
class LSAPR_POLICY_PRIMARY_DOM_INFO(NDRSTRUCT):
structure = (
('Name', RPC_UNICODE_STRING),
('Sid', PRPC_SID),
)
# 2.2.4.6 LSAPR_POLICY_ACCOUNT_DOM_INFO
class LSAPR_POLICY_ACCOUNT_DOM_INFO(NDRSTRUCT):
structure = (
('DomainName', RPC_UNICODE_STRING),
('DomainSid', PRPC_SID),
)
# 2.2.4.7 LSAPR_POLICY_PD_ACCOUNT_INFO
class LSAPR_POLICY_PD_ACCOUNT_INFO(NDRSTRUCT):
structure = (
('Name', RPC_UNICODE_STRING),
)
# 2.2.4.8 POLICY_LSA_SERVER_ROLE
class POLICY_LSA_SERVER_ROLE(NDRENUM):
class enumItems(Enum):
PolicyServerRoleBackup = 2
PolicyServerRolePrimary = 3
# 2.2.4.9 POLICY_LSA_SERVER_ROLE_INFO
class POLICY_LSA_SERVER_ROLE_INFO(NDRSTRUCT):
structure = (
('LsaServerRole', POLICY_LSA_SERVER_ROLE),
)
# 2.2.4.10 LSAPR_POLICY_REPLICA_SRCE_INFO
class LSAPR_POLICY_REPLICA_SRCE_INFO(NDRSTRUCT):
structure = (
('ReplicaSource', RPC_UNICODE_STRING),
('ReplicaAccountName', RPC_UNICODE_STRING),
)
# 2.2.4.11 POLICY_MODIFICATION_INFO
class POLICY_MODIFICATION_INFO(NDRSTRUCT):
structure = (
('ModifiedId', LARGE_INTEGER),
('DatabaseCreationTime', LARGE_INTEGER),
)
# 2.2.4.12 POLICY_AUDIT_FULL_SET_INFO
class POLICY_AUDIT_FULL_SET_INFO(NDRSTRUCT):
structure = (
('ShutDownOnFull', UCHAR),
)
# 2.2.4.13 POLICY_AUDIT_FULL_QUERY_INFO
class POLICY_AUDIT_FULL_QUERY_INFO(NDRSTRUCT):
structure = (
('ShutDownOnFull', UCHAR),
('LogIsFull', UCHAR),
)
# 2.2.4.14 LSAPR_POLICY_DNS_DOMAIN_INFO
class LSAPR_POLICY_DNS_DOMAIN_INFO(NDRSTRUCT):
structure = (
('Name', RPC_UNICODE_STRING),
('DnsDomainName', RPC_UNICODE_STRING),
('DnsForestName', RPC_UNICODE_STRING),
('DomainGuid', GUID),
('Sid', PRPC_SID),
)
# 2.2.4.2 LSAPR_POLICY_INFORMATION
class LSAPR_POLICY_INFORMATION(NDRUNION):
union = {
POLICY_INFORMATION_CLASS.PolicyAuditLogInformation : ('PolicyAuditLogInfo', POLICY_AUDIT_LOG_INFO),
POLICY_INFORMATION_CLASS.PolicyAuditEventsInformation : ('PolicyAuditEventsInfo', LSAPR_POLICY_AUDIT_EVENTS_INFO),
POLICY_INFORMATION_CLASS.PolicyPrimaryDomainInformation : ('PolicyPrimaryDomainInfo', LSAPR_POLICY_PRIMARY_DOM_INFO),
POLICY_INFORMATION_CLASS.PolicyAccountDomainInformation : ('PolicyAccountDomainInfo', LSAPR_POLICY_ACCOUNT_DOM_INFO),
POLICY_INFORMATION_CLASS.PolicyPdAccountInformation : ('PolicyPdAccountInfo', LSAPR_POLICY_PD_ACCOUNT_INFO),
POLICY_INFORMATION_CLASS.PolicyLsaServerRoleInformation : ('PolicyServerRoleInfo', POLICY_LSA_SERVER_ROLE_INFO),
POLICY_INFORMATION_CLASS.PolicyReplicaSourceInformation : ('PolicyReplicaSourceInfo', LSAPR_POLICY_REPLICA_SRCE_INFO),
POLICY_INFORMATION_CLASS.PolicyModificationInformation : ('PolicyModificationInfo', POLICY_MODIFICATION_INFO),
POLICY_INFORMATION_CLASS.PolicyAuditFullSetInformation : ('PolicyAuditFullSetInfo', POLICY_AUDIT_FULL_SET_INFO),
POLICY_INFORMATION_CLASS.PolicyAuditFullQueryInformation : ('PolicyAuditFullQueryInfo', POLICY_AUDIT_FULL_QUERY_INFO),
POLICY_INFORMATION_CLASS.PolicyDnsDomainInformation : ('PolicyDnsDomainInfo', LSAPR_POLICY_DNS_DOMAIN_INFO),
POLICY_INFORMATION_CLASS.PolicyDnsDomainInformationInt : ('PolicyDnsDomainInfoInt', LSAPR_POLICY_DNS_DOMAIN_INFO),
POLICY_INFORMATION_CLASS.PolicyLocalAccountDomainInformation: ('PolicyLocalAccountDomainInfo', LSAPR_POLICY_ACCOUNT_DOM_INFO),
}
class PLSAPR_POLICY_INFORMATION(NDRPOINTER):
referent = (
('Data', LSAPR_POLICY_INFORMATION),
)
# 2.2.4.15 POLICY_DOMAIN_INFORMATION_CLASS
class POLICY_DOMAIN_INFORMATION_CLASS(NDRENUM):
class enumItems(Enum):
PolicyDomainQualityOfServiceInformation = 1
PolicyDomainEfsInformation = 2
PolicyDomainKerberosTicketInformation = 3
# 2.2.4.17 POLICY_DOMAIN_QUALITY_OF_SERVICE_INFO
class POLICY_DOMAIN_QUALITY_OF_SERVICE_INFO(NDRSTRUCT):
structure = (
('QualityOfService', DWORD),
)
# 2.2.4.18 LSAPR_POLICY_DOMAIN_EFS_INFO
class LSAPR_POLICY_DOMAIN_EFS_INFO(NDRSTRUCT):
structure = (
('InfoLength', DWORD),
('EfsBlob', LPBYTE),
)
# 2.2.4.19 POLICY_DOMAIN_KERBEROS_TICKET_INFO
class POLICY_DOMAIN_KERBEROS_TICKET_INFO(NDRSTRUCT):
structure = (
('AuthenticationOptions', DWORD),
('MaxServiceTicketAge', LARGE_INTEGER),
('MaxTicketAge', LARGE_INTEGER),
('MaxRenewAge', LARGE_INTEGER),
('MaxClockSkew', LARGE_INTEGER),
('Reserved', LARGE_INTEGER),
)
# 2.2.4.16 LSAPR_POLICY_DOMAIN_INFORMATION
class LSAPR_POLICY_DOMAIN_INFORMATION(NDRUNION):
union = {
POLICY_DOMAIN_INFORMATION_CLASS.PolicyDomainQualityOfServiceInformation : ('PolicyDomainQualityOfServiceInfo', POLICY_DOMAIN_QUALITY_OF_SERVICE_INFO ),
POLICY_DOMAIN_INFORMATION_CLASS.PolicyDomainEfsInformation : ('PolicyDomainEfsInfo', LSAPR_POLICY_DOMAIN_EFS_INFO),
POLICY_DOMAIN_INFORMATION_CLASS.PolicyDomainKerberosTicketInformation : ('PolicyDomainKerbTicketInfo', POLICY_DOMAIN_KERBEROS_TICKET_INFO),
}
class PLSAPR_POLICY_DOMAIN_INFORMATION(NDRPOINTER):
referent = (
('Data', LSAPR_POLICY_DOMAIN_INFORMATION),
)
# 2.2.4.20 POLICY_AUDIT_EVENT_TYPE
class POLICY_AUDIT_EVENT_TYPE(NDRENUM):
class enumItems(Enum):
AuditCategorySystem = 0
AuditCategoryLogon = 1
AuditCategoryObjectAccess = 2
AuditCategoryPrivilegeUse = 3
AuditCategoryDetailedTracking = 4
AuditCategoryPolicyChange = 5
AuditCategoryAccountManagement = 6
AuditCategoryDirectoryServiceAccess = 7
AuditCategoryAccountLogon = 8
# 2.2.5.1 LSAPR_ACCOUNT_INFORMATION
class LSAPR_ACCOUNT_INFORMATION(NDRSTRUCT):
structure = (
('Sid', PRPC_SID),
)
# 2.2.5.2 LSAPR_ACCOUNT_ENUM_BUFFER
class LSAPR_ACCOUNT_INFORMATION_ARRAY(NDRUniConformantArray):
item = LSAPR_ACCOUNT_INFORMATION
class PLSAPR_ACCOUNT_INFORMATION_ARRAY(NDRPOINTER):
referent = (
('Data', LSAPR_ACCOUNT_INFORMATION_ARRAY),
)
class LSAPR_ACCOUNT_ENUM_BUFFER(NDRSTRUCT):
structure = (
('EntriesRead', ULONG),
('Information', PLSAPR_ACCOUNT_INFORMATION_ARRAY),
)
# 2.2.5.3 LSAPR_USER_RIGHT_SET
class RPC_UNICODE_STRING_ARRAY(NDRUniConformantArray):
item = RPC_UNICODE_STRING
class PRPC_UNICODE_STRING_ARRAY(NDRPOINTER):
referent = (
('Data', RPC_UNICODE_STRING_ARRAY),
)
class LSAPR_USER_RIGHT_SET(NDRSTRUCT):
structure = (
('EntriesRead', ULONG),
('UserRights', PRPC_UNICODE_STRING_ARRAY),
)
# 2.2.5.4 LSAPR_LUID_AND_ATTRIBUTES
class LSAPR_LUID_AND_ATTRIBUTES(NDRSTRUCT):
structure = (
('Luid', LUID),
('Attributes', ULONG),
)
# 2.2.5.5 LSAPR_PRIVILEGE_SET
class LSAPR_LUID_AND_ATTRIBUTES_ARRAY(NDRUniConformantArray):
item = LSAPR_LUID_AND_ATTRIBUTES
class LSAPR_PRIVILEGE_SET(NDRSTRUCT):
structure = (
('PrivilegeCount', ULONG),
('Control', ULONG),
('Privilege', LSAPR_LUID_AND_ATTRIBUTES_ARRAY),
)
class PLSAPR_PRIVILEGE_SET(NDRPOINTER):
referent = (
('Data', LSAPR_PRIVILEGE_SET),
)
# 2.2.6.1 LSAPR_CR_CIPHER_VALUE
class PCHAR_ARRAY(NDRPOINTER):
referent = (
('Data', NDRUniConformantVaryingArray),
)
class LSAPR_CR_CIPHER_VALUE(NDRSTRUCT):
structure = (
('Length', LONG),
('MaximumLength', LONG),
('Buffer', PCHAR_ARRAY),
)
class PLSAPR_CR_CIPHER_VALUE(NDRPOINTER):
referent = (
('Data', LSAPR_CR_CIPHER_VALUE),
)
class PPLSAPR_CR_CIPHER_VALUE(NDRPOINTER):
referent = (
('Data', PLSAPR_CR_CIPHER_VALUE),
)
# 2.2.7.1 LSAPR_TRUST_INFORMATION
class LSAPR_TRUST_INFORMATION(NDRSTRUCT):
structure = (
('Name', RPC_UNICODE_STRING),
('Sid', PRPC_SID),
)
# 2.2.7.2 TRUSTED_INFORMATION_CLASS
class TRUSTED_INFORMATION_CLASS(NDRENUM):
class enumItems(Enum):
TrustedDomainNameInformation = 1
TrustedControllersInformation = 2
TrustedPosixOffsetInformation = 3
TrustedPasswordInformation = 4
TrustedDomainInformationBasic = 5
TrustedDomainInformationEx = 6
TrustedDomainAuthInformation = 7
TrustedDomainFullInformation = 8
TrustedDomainAuthInformationInternal = 9
TrustedDomainFullInformationInternal = 10
TrustedDomainInformationEx2Internal = 11
TrustedDomainFullInformation2Internal = 12
TrustedDomainSupportedEncryptionTypes = 13
# 2.2.7.4 LSAPR_TRUSTED_DOMAIN_NAME_INFO
class LSAPR_TRUSTED_DOMAIN_NAME_INFO(NDRSTRUCT):
structure = (
('Name', RPC_UNICODE_STRING),
)
# 2.2.7.5 LSAPR_TRUSTED_CONTROLLERS_INFO
class LSAPR_TRUSTED_CONTROLLERS_INFO(NDRSTRUCT):
structure = (
('Entries', ULONG),
('Names', PRPC_UNICODE_STRING_ARRAY),
)
# 2.2.7.6 TRUSTED_POSIX_OFFSET_INFO
class TRUSTED_POSIX_OFFSET_INFO(NDRSTRUCT):
structure = (
('Offset', ULONG),
)
# 2.2.7.7 LSAPR_TRUSTED_PASSWORD_INFO
class LSAPR_TRUSTED_PASSWORD_INFO(NDRSTRUCT):
structure = (
('Password', PLSAPR_CR_CIPHER_VALUE),
('OldPassword', PLSAPR_CR_CIPHER_VALUE),
)
# 2.2.7.8 LSAPR_TRUSTED_DOMAIN_INFORMATION_BASIC
LSAPR_TRUSTED_DOMAIN_INFORMATION_BASIC = LSAPR_TRUST_INFORMATION
# 2.2.7.9 LSAPR_TRUSTED_DOMAIN_INFORMATION_EX
class LSAPR_TRUSTED_DOMAIN_INFORMATION_EX(NDRSTRUCT):
structure = (
('Name', RPC_UNICODE_STRING),
('FlatName', RPC_UNICODE_STRING),
('Sid', PRPC_SID),
('TrustDirection', ULONG),
('TrustType', ULONG),
('TrustAttributes', ULONG),
)
# 2.2.7.10 LSAPR_TRUSTED_DOMAIN_INFORMATION_EX2
class LSAPR_TRUSTED_DOMAIN_INFORMATION_EX2(NDRSTRUCT):
structure = (
('Name', RPC_UNICODE_STRING),
('FlatName', RPC_UNICODE_STRING),
('Sid', PRPC_SID),
('TrustDirection', ULONG),
('TrustType', ULONG),
('TrustAttributes', ULONG),
('ForestTrustLength', ULONG),
('ForestTrustInfo', LPBYTE),
)
# 2.2.7.17 LSAPR_AUTH_INFORMATION
class LSAPR_AUTH_INFORMATION(NDRSTRUCT):
structure = (
('LastUpdateTime', LARGE_INTEGER),
('AuthType', ULONG),
('AuthInfoLength', ULONG),
('AuthInfo', LPBYTE),
)
class PLSAPR_AUTH_INFORMATION(NDRPOINTER):
referent = (
('Data', LSAPR_AUTH_INFORMATION),
)
# 2.2.7.11 LSAPR_TRUSTED_DOMAIN_AUTH_INFORMATION
class LSAPR_TRUSTED_DOMAIN_AUTH_INFORMATION(NDRSTRUCT):
structure = (
('IncomingAuthInfos', ULONG),
('IncomingAuthenticationInformation', PLSAPR_AUTH_INFORMATION),
('IncomingPreviousAuthenticationInformation', PLSAPR_AUTH_INFORMATION),
('OutgoingAuthInfos', ULONG),
('OutgoingAuthenticationInformation', PLSAPR_AUTH_INFORMATION),
('OutgoingPreviousAuthenticationInformation', PLSAPR_AUTH_INFORMATION),
)
# 2.2.7.16 LSAPR_TRUSTED_DOMAIN_AUTH_BLOB
class LSAPR_TRUSTED_DOMAIN_AUTH_BLOB(NDRSTRUCT):
structure = (
('AuthSize', ULONG),
('AuthBlob', LPBYTE),
)
# 2.2.7.12 LSAPR_TRUSTED_DOMAIN_AUTH_INFORMATION_INTERNAL
class LSAPR_TRUSTED_DOMAIN_AUTH_INFORMATION_INTERNAL(NDRSTRUCT):
structure = (
('AuthBlob', LSAPR_TRUSTED_DOMAIN_AUTH_BLOB),
)
# 2.2.7.13 LSAPR_TRUSTED_DOMAIN_FULL_INFORMATION
class LSAPR_TRUSTED_DOMAIN_FULL_INFORMATION(NDRSTRUCT):
structure = (
('Information', LSAPR_TRUSTED_DOMAIN_INFORMATION_EX),
('PosixOffset', TRUSTED_POSIX_OFFSET_INFO),
('AuthInformation', LSAPR_TRUSTED_DOMAIN_AUTH_INFORMATION),
)
# 2.2.7.14 LSAPR_TRUSTED_DOMAIN_FULL_INFORMATION_INTERNAL
class LSAPR_TRUSTED_DOMAIN_FULL_INFORMATION_INTERNAL(NDRSTRUCT):
structure = (
('Information', LSAPR_TRUSTED_DOMAIN_INFORMATION_EX),
('PosixOffset', TRUSTED_POSIX_OFFSET_INFO),
('AuthInformation', LSAPR_TRUSTED_DOMAIN_AUTH_INFORMATION_INTERNAL),
)
# 2.2.7.15 LSAPR_TRUSTED_DOMAIN_FULL_INFORMATION2
class LSAPR_TRUSTED_DOMAIN_FULL_INFORMATION2(NDRSTRUCT):
structure = (
('Information', LSAPR_TRUSTED_DOMAIN_INFORMATION_EX),
('PosixOffset', TRUSTED_POSIX_OFFSET_INFO),
('AuthInformation', LSAPR_TRUSTED_DOMAIN_AUTH_INFORMATION),
)
# 2.2.7.18 TRUSTED_DOMAIN_SUPPORTED_ENCRYPTION_TYPES
class TRUSTED_DOMAIN_SUPPORTED_ENCRYPTION_TYPES(NDRSTRUCT):
structure = (
('SupportedEncryptionTypes', ULONG),
)
# 2.2.7.3 LSAPR_TRUSTED_DOMAIN_INFO
class LSAPR_TRUSTED_DOMAIN_INFO(NDRUNION):
union = {
TRUSTED_INFORMATION_CLASS.TrustedDomainNameInformation : ('TrustedDomainNameInfo', LSAPR_TRUSTED_DOMAIN_NAME_INFO ),
TRUSTED_INFORMATION_CLASS.TrustedControllersInformation : ('TrustedControllersInfo', LSAPR_TRUSTED_CONTROLLERS_INFO),
TRUSTED_INFORMATION_CLASS.TrustedPosixOffsetInformation : ('TrustedPosixOffsetInfo', TRUSTED_POSIX_OFFSET_INFO),
TRUSTED_INFORMATION_CLASS.TrustedPasswordInformation : ('TrustedPasswordInfo', LSAPR_TRUSTED_PASSWORD_INFO ),
TRUSTED_INFORMATION_CLASS.TrustedDomainInformationBasic : ('TrustedDomainInfoBasic', LSAPR_TRUSTED_DOMAIN_INFORMATION_BASIC),
TRUSTED_INFORMATION_CLASS.TrustedDomainInformationEx : ('TrustedDomainInfoEx', LSAPR_TRUSTED_DOMAIN_INFORMATION_EX),
TRUSTED_INFORMATION_CLASS.TrustedDomainAuthInformation : ('TrustedAuthInfo', LSAPR_TRUSTED_DOMAIN_AUTH_INFORMATION),
TRUSTED_INFORMATION_CLASS.TrustedDomainFullInformation : ('TrustedFullInfo', LSAPR_TRUSTED_DOMAIN_FULL_INFORMATION),
TRUSTED_INFORMATION_CLASS.TrustedDomainAuthInformationInternal : ('TrustedAuthInfoInternal', LSAPR_TRUSTED_DOMAIN_AUTH_INFORMATION_INTERNAL),
TRUSTED_INFORMATION_CLASS.TrustedDomainFullInformationInternal : ('TrustedFullInfoInternal', LSAPR_TRUSTED_DOMAIN_FULL_INFORMATION_INTERNAL),
TRUSTED_INFORMATION_CLASS.TrustedDomainInformationEx2Internal : ('TrustedDomainInfoEx2', LSAPR_TRUSTED_DOMAIN_INFORMATION_EX2),
TRUSTED_INFORMATION_CLASS.TrustedDomainFullInformation2Internal : ('TrustedFullInfo2', LSAPR_TRUSTED_DOMAIN_FULL_INFORMATION2),
TRUSTED_INFORMATION_CLASS.TrustedDomainSupportedEncryptionTypes : ('TrustedDomainSETs', TRUSTED_DOMAIN_SUPPORTED_ENCRYPTION_TYPES),
}
# 2.2.7.19 LSAPR_TRUSTED_ENUM_BUFFER
class LSAPR_TRUST_INFORMATION_ARRAY(NDRUniConformantArray):
item = LSAPR_TRUST_INFORMATION
class PLSAPR_TRUST_INFORMATION_ARRAY(NDRPOINTER):
referent = (
('Data', LSAPR_TRUST_INFORMATION_ARRAY),
)
class LSAPR_TRUSTED_ENUM_BUFFER(NDRSTRUCT):
structure = (
('Entries', ULONG),
('Information', PLSAPR_TRUST_INFORMATION_ARRAY),
)
# 2.2.7.20 LSAPR_TRUSTED_ENUM_BUFFER_EX
class LSAPR_TRUSTED_DOMAIN_INFORMATION_EX_ARRAY(NDRUniConformantArray):
item = LSAPR_TRUSTED_DOMAIN_INFORMATION_EX
class PLSAPR_TRUSTED_DOMAIN_INFORMATION_EX_ARRAY(NDRPOINTER):
referent = (
('Data', LSAPR_TRUSTED_DOMAIN_INFORMATION_EX_ARRAY),
)
class LSAPR_TRUSTED_ENUM_BUFFER_EX(NDRSTRUCT):
structure = (
('Entries', ULONG),
('EnumerationBuffer', PLSAPR_TRUSTED_DOMAIN_INFORMATION_EX_ARRAY),
)
# 2.2.7.22 LSA_FOREST_TRUST_RECORD_TYPE
class LSA_FOREST_TRUST_RECORD_TYPE(NDRENUM):
class enumItems(Enum):
ForestTrustTopLevelName = 0
ForestTrustTopLevelNameEx = 1
ForestTrustDomainInfo = 2
# 2.2.7.24 LSA_FOREST_TRUST_DOMAIN_INFO
class LSA_FOREST_TRUST_DOMAIN_INFO(NDRSTRUCT):
structure = (
('Sid', PRPC_SID),
('DnsName', LSA_UNICODE_STRING),
('NetbiosName', LSA_UNICODE_STRING),
)
# 2.2.7.21 LSA_FOREST_TRUST_RECORD
class LSA_FOREST_TRUST_DATA_UNION(NDRUNION):
union = {
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustTopLevelName : ('TopLevelName', LSA_UNICODE_STRING ),
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustTopLevelNameEx : ('TopLevelName', LSA_UNICODE_STRING),
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustDomainInfo : ('DomainInfo', LSA_FOREST_TRUST_DOMAIN_INFO),
}
class LSA_FOREST_TRUST_RECORD(NDRSTRUCT):
structure = (
('Flags', ULONG),
('ForestTrustType', LSA_FOREST_TRUST_RECORD_TYPE),
('Time', LARGE_INTEGER),
('ForestTrustData', LSA_FOREST_TRUST_DATA_UNION),
)
class PLSA_FOREST_TRUST_RECORD(NDRPOINTER):
referent = (
('Data', LSA_FOREST_TRUST_RECORD),
)
# 2.2.7.23 LSA_FOREST_TRUST_BINARY_DATA
class LSA_FOREST_TRUST_BINARY_DATA(NDRSTRUCT):
structure = (
('Length', ULONG),
('Buffer', LPBYTE),
)
# 2.2.7.25 LSA_FOREST_TRUST_INFORMATION
class LSA_FOREST_TRUST_RECORD_ARRAY(NDRUniConformantArray):
item = PLSA_FOREST_TRUST_RECORD
class PLSA_FOREST_TRUST_RECORD_ARRAY(NDRPOINTER):
referent = (
('Data', LSA_FOREST_TRUST_RECORD_ARRAY),
)
class LSA_FOREST_TRUST_INFORMATION(NDRSTRUCT):
structure = (
('RecordCount', ULONG),
('Entries', PLSA_FOREST_TRUST_RECORD_ARRAY),
)
class PLSA_FOREST_TRUST_INFORMATION(NDRPOINTER):
referent = (
('Data', LSA_FOREST_TRUST_INFORMATION),
)
# 2.2.7.26 LSA_FOREST_TRUST_COLLISION_RECORD_TYPE
class LSA_FOREST_TRUST_COLLISION_RECORD_TYPE(NDRENUM):
class enumItems(Enum):
CollisionTdo = 0
CollisionXref = 1
CollisionOther = 2
# 2.2.7.27 LSA_FOREST_TRUST_COLLISION_RECORD
class LSA_FOREST_TRUST_COLLISION_RECORD(NDRSTRUCT):
structure = (
('Index', ULONG),
('Type', LSA_FOREST_TRUST_COLLISION_RECORD_TYPE),
('Flags', ULONG),
('Name', LSA_UNICODE_STRING),
)
# 2.2.8.1 LSAPR_POLICY_PRIVILEGE_DEF
class LSAPR_POLICY_PRIVILEGE_DEF(NDRSTRUCT):
structure = (
('Name', RPC_UNICODE_STRING),
('LocalValue', LUID),
)
# 2.2.8.2 LSAPR_PRIVILEGE_ENUM_BUFFER
class LSAPR_POLICY_PRIVILEGE_DEF_ARRAY(NDRUniConformantArray):
item = LSAPR_POLICY_PRIVILEGE_DEF
class PLSAPR_POLICY_PRIVILEGE_DEF_ARRAY(NDRPOINTER):
referent = (
('Data', LSAPR_POLICY_PRIVILEGE_DEF_ARRAY),
)
class LSAPR_PRIVILEGE_ENUM_BUFFER(NDRSTRUCT):
structure = (
('Entries', ULONG),
('Privileges', PLSAPR_POLICY_PRIVILEGE_DEF_ARRAY),
)
################################################################################
# RPC CALLS
################################################################################
# 3.1.4.4.1 LsarOpenPolicy2 (Opnum 44)
class LsarOpenPolicy2(NDRCALL):
opnum = 44
structure = (
('SystemName', LPWSTR),
('ObjectAttributes',LSAPR_OBJECT_ATTRIBUTES),
('DesiredAccess',ACCESS_MASK),
)
class LsarOpenPolicy2Response(NDRCALL):
structure = (
('PolicyHandle',LSAPR_HANDLE),
('ErrorCode', NTSTATUS),
)
# 3.1.4.4.2 LsarOpenPolicy (Opnum 6)
class LsarOpenPolicy(NDRCALL):
opnum = 6
structure = (
('SystemName', LPWSTR),
('ObjectAttributes',LSAPR_OBJECT_ATTRIBUTES),
('DesiredAccess',ACCESS_MASK),
)
class LsarOpenPolicyResponse(NDRCALL):
structure = (
('PolicyHandle',LSAPR_HANDLE),
('ErrorCode', NTSTATUS),
)
# 3.1.4.4.3 LsarQueryInformationPolicy2 (Opnum 46)
class LsarQueryInformationPolicy2(NDRCALL):
opnum = 46
structure = (
('PolicyHandle', LSAPR_HANDLE),
('InformationClass',POLICY_INFORMATION_CLASS),
)
class LsarQueryInformationPolicy2Response(NDRCALL):
structure = (
('PolicyInformation',PLSAPR_POLICY_INFORMATION),
('ErrorCode', NTSTATUS),
)
# 3.1.4.4.4 LsarQueryInformationPolicy (Opnum 7)
class LsarQueryInformationPolicy(NDRCALL):
opnum = 7
structure = (
('PolicyHandle', LSAPR_HANDLE),
('InformationClass',POLICY_INFORMATION_CLASS),
)
class LsarQueryInformationPolicyResponse(NDRCALL):
structure = (
('PolicyInformation',PLSAPR_POLICY_INFORMATION),
('ErrorCode', NTSTATUS),
)
# 3.1.4.4.5 LsarSetInformationPolicy2 (Opnum 47)
class LsarSetInformationPolicy2(NDRCALL):
opnum = 47
structure = (
('PolicyHandle', LSAPR_HANDLE),
('InformationClass',POLICY_INFORMATION_CLASS),
('PolicyInformation',LSAPR_POLICY_INFORMATION),
)
class LsarSetInformationPolicy2Response(NDRCALL):
structure = (
('ErrorCode', NTSTATUS),
)
# 3.1.4.4.6 LsarSetInformationPolicy (Opnum 8)
class LsarSetInformationPolicy(NDRCALL):
opnum = 8
structure = (
('PolicyHandle', LSAPR_HANDLE),
('InformationClass',POLICY_INFORMATION_CLASS),
('PolicyInformation',LSAPR_POLICY_INFORMATION),
)
class LsarSetInformationPolicyResponse(NDRCALL):
structure = (
('ErrorCode', NTSTATUS),
)
# 3.1.4.4.7 LsarQueryDomainInformationPolicy (Opnum 53)
class LsarQueryDomainInformationPolicy(NDRCALL):
opnum = 53
structure = (
('PolicyHandle', LSAPR_HANDLE),
('InformationClass',POLICY_DOMAIN_INFORMATION_CLASS),
)
class LsarQueryDomainInformationPolicyResponse(NDRCALL):
structure = (
('PolicyDomainInformation',PLSAPR_POLICY_DOMAIN_INFORMATION),
('ErrorCode', NTSTATUS),
)
# 3.1.4.4.8 LsarSetDomainInformationPolicy (Opnum 54)
# 3.1.4.5.1 LsarCreateAccount (Opnum 10)
class LsarCreateAccount(NDRCALL):
opnum = 10
structure = (
('PolicyHandle', LSAPR_HANDLE),
('AccountSid',RPC_SID),
('DesiredAccess',ACCESS_MASK),
)
class LsarCreateAccountResponse(NDRCALL):
structure = (
('AccountHandle',LSAPR_HANDLE),
('ErrorCode', NTSTATUS),
)
# 3.1.4.5.2 LsarEnumerateAccounts (Opnum 11)
class LsarEnumerateAccounts(NDRCALL):
opnum = 11
structure = (
('PolicyHandle', LSAPR_HANDLE),
('EnumerationContext',ULONG),
('PreferedMaximumLength',ULONG),
)
class LsarEnumerateAccountsResponse(NDRCALL):
structure = (
('EnumerationContext',ULONG),
('EnumerationBuffer',LSAPR_ACCOUNT_ENUM_BUFFER),
('ErrorCode', NTSTATUS),
)
# 3.1.4.5.3 LsarOpenAccount (Opnum 17)
class LsarOpenAccount(NDRCALL):
opnum = 17
structure = (
('PolicyHandle', LSAPR_HANDLE),
('AccountSid',RPC_SID),
('DesiredAccess',ACCESS_MASK),
)
class LsarOpenAccountResponse(NDRCALL):
structure = (
('AccountHandle',LSAPR_HANDLE),
('ErrorCode', NTSTATUS),
)
# 3.1.4.5.4 LsarEnumeratePrivilegesAccount (Opnum 18)
class LsarEnumeratePrivilegesAccount(NDRCALL):
opnum = 18
structure = (
('AccountHandle', LSAPR_HANDLE),
)
class LsarEnumeratePrivilegesAccountResponse(NDRCALL):
structure = (
('Privileges',PLSAPR_PRIVILEGE_SET),
('ErrorCode', NTSTATUS),
)
# 3.1.4.5.5 LsarAddPrivilegesToAccount (Opnum 19)
class LsarAddPrivilegesToAccount(NDRCALL):
opnum = 19
structure = (
('AccountHandle', LSAPR_HANDLE),
('Privileges', LSAPR_PRIVILEGE_SET),
)
class LsarAddPrivilegesToAccountResponse(NDRCALL):
structure = (
('ErrorCode', NTSTATUS),
)
# 3.1.4.5.6 LsarRemovePrivilegesFromAccount (Opnum 20)
class LsarRemovePrivilegesFromAccount(NDRCALL):
opnum = 20
structure = (
('AccountHandle', LSAPR_HANDLE),
('AllPrivileges', UCHAR),
('Privileges', PLSAPR_PRIVILEGE_SET),
)
class LsarRemovePrivilegesFromAccountResponse(NDRCALL):
structure = (
('ErrorCode', NTSTATUS),
)
# 3.1.4.5.7 LsarGetSystemAccessAccount (Opnum 23)
class LsarGetSystemAccessAccount(NDRCALL):
opnum = 23
structure = (
('AccountHandle', LSAPR_HANDLE),
)
class LsarGetSystemAccessAccountResponse(NDRCALL):
structure = (
('SystemAccess', ULONG),
('ErrorCode', NTSTATUS),
)
# 3.1.4.5.8 LsarSetSystemAccessAccount (Opnum 24)
class LsarSetSystemAccessAccount(NDRCALL):
opnum = 24
structure = (
('AccountHandle', LSAPR_HANDLE),
('SystemAccess', ULONG),
)
class LsarSetSystemAccessAccountResponse(NDRCALL):
structure = (
('ErrorCode', NTSTATUS),
)
# 3.1.4.5.9 LsarEnumerateAccountsWithUserRight (Opnum 35)
class LsarEnumerateAccountsWithUserRight(NDRCALL):
opnum = 35
structure = (
('PolicyHandle', LSAPR_HANDLE),
('UserRight', PRPC_UNICODE_STRING),
)
class LsarEnumerateAccountsWithUserRightResponse(NDRCALL):
structure = (
('EnumerationBuffer',LSAPR_ACCOUNT_ENUM_BUFFER),
('ErrorCode', NTSTATUS),
)
# 3.1.4.5.10 LsarEnumerateAccountRights (Opnum 36)
class LsarEnumerateAccountRights(NDRCALL):
opnum = 36
structure = (
('PolicyHandle', LSAPR_HANDLE),
('AccountSid', RPC_SID),
)
class LsarEnumerateAccountRightsResponse(NDRCALL):
structure = (
('UserRights',LSAPR_USER_RIGHT_SET),
('ErrorCode', NTSTATUS),
)
# 3.1.4.5.11 LsarAddAccountRights (Opnum 37)
class LsarAddAccountRights(NDRCALL):
opnum = 37
structure = (
('PolicyHandle', LSAPR_HANDLE),
('AccountSid', RPC_SID),
('UserRights',LSAPR_USER_RIGHT_SET),
)
class LsarAddAccountRightsResponse(NDRCALL):
structure = (
('ErrorCode', NTSTATUS),
)
# 3.1.4.5.12 LsarRemoveAccountRights (Opnum 38)
class LsarRemoveAccountRights(NDRCALL):
opnum = 38
structure = (
('PolicyHandle', LSAPR_HANDLE),
('AccountSid', RPC_SID),
('AllRights', UCHAR),
('UserRights',LSAPR_USER_RIGHT_SET),
)
class LsarRemoveAccountRightsResponse(NDRCALL):
structure = (
('ErrorCode', NTSTATUS),
)
# 3.1.4.6.1 LsarCreateSecret (Opnum 16)
class LsarCreateSecret(NDRCALL):
opnum = 16
structure = (
('PolicyHandle', LSAPR_HANDLE),
('SecretName', RPC_UNICODE_STRING),
('DesiredAccess', ACCESS_MASK),
)
class LsarCreateSecretResponse(NDRCALL):
structure = (
('SecretHandle', LSAPR_HANDLE),
('ErrorCode', NTSTATUS),
)
# 3.1.4.6.2 LsarOpenSecret (Opnum 28)
class LsarOpenSecret(NDRCALL):
opnum = 28
structure = (
('PolicyHandle', LSAPR_HANDLE),
('SecretName', RPC_UNICODE_STRING),
('DesiredAccess', ACCESS_MASK),
)
class LsarOpenSecretResponse(NDRCALL):
structure = (
('SecretHandle', LSAPR_HANDLE),
('ErrorCode', NTSTATUS),
)
# 3.1.4.6.3 LsarSetSecret (Opnum 29)
class LsarSetSecret(NDRCALL):
opnum = 29
structure = (
('SecretHandle', LSAPR_HANDLE),
('EncryptedCurrentValue', PLSAPR_CR_CIPHER_VALUE),
('EncryptedOldValue', PLSAPR_CR_CIPHER_VALUE),
)
class LsarSetSecretResponse(NDRCALL):
structure = (
('ErrorCode', NTSTATUS),
)
# 3.1.4.6.4 LsarQuerySecret (Opnum 30)
class LsarQuerySecret(NDRCALL):
opnum = 30
structure = (
('SecretHandle', LSAPR_HANDLE),
('EncryptedCurrentValue', PPLSAPR_CR_CIPHER_VALUE),
('CurrentValueSetTime', PLARGE_INTEGER),
('EncryptedOldValue', PPLSAPR_CR_CIPHER_VALUE),
('OldValueSetTime', PLARGE_INTEGER),
)
class LsarQuerySecretResponse(NDRCALL):
structure = (
('EncryptedCurrentValue', PPLSAPR_CR_CIPHER_VALUE),
('CurrentValueSetTime', PLARGE_INTEGER),
('EncryptedOldValue', PPLSAPR_CR_CIPHER_VALUE),
('OldValueSetTime', PLARGE_INTEGER),
('ErrorCode', NTSTATUS),
)
# 3.1.4.6.5 LsarStorePrivateData (Opnum 42)
class LsarStorePrivateData(NDRCALL):
opnum = 42
structure = (
('PolicyHandle', LSAPR_HANDLE),
('KeyName', RPC_UNICODE_STRING),
('EncryptedData', PLSAPR_CR_CIPHER_VALUE),
)
class LsarStorePrivateDataResponse(NDRCALL):
structure = (
('ErrorCode', NTSTATUS),
)
# 3.1.4.6.6 LsarRetrievePrivateData (Opnum 43)
class LsarRetrievePrivateData(NDRCALL):
opnum = 43
structure = (
('PolicyHandle', LSAPR_HANDLE),
('KeyName', RPC_UNICODE_STRING),
('EncryptedData', PLSAPR_CR_CIPHER_VALUE),
)
class LsarRetrievePrivateDataResponse(NDRCALL):
structure = (
('EncryptedData', PLSAPR_CR_CIPHER_VALUE),
('ErrorCode', NTSTATUS),
)
# 3.1.4.7.1 LsarOpenTrustedDomain (Opnum 25)
# 3.1.4.7.1 LsarQueryInfoTrustedDomain (Opnum 26)
# 3.1.4.7.2 LsarQueryTrustedDomainInfo (Opnum 39)
# 3.1.4.7.3 LsarSetTrustedDomainInfo (Opnum 40)
# 3.1.4.7.4 LsarDeleteTrustedDomain (Opnum 41)
# 3.1.4.7.5 LsarQueryTrustedDomainInfoByName (Opnum 48)
# 3.1.4.7.6 LsarSetTrustedDomainInfoByName (Opnum 49)
# 3.1.4.7.7 LsarEnumerateTrustedDomainsEx (Opnum 50)
class LsarEnumerateTrustedDomainsEx(NDRCALL):
opnum = 50
structure = (
('PolicyHandle', LSAPR_HANDLE),
('EnumerationContext', ULONG),
('PreferedMaximumLength', ULONG),
)
class LsarEnumerateTrustedDomainsExResponse(NDRCALL):
structure = (
('EnumerationContext', ULONG),
('EnumerationBuffer',LSAPR_TRUSTED_ENUM_BUFFER_EX),
('ErrorCode', NTSTATUS),
)
# 3.1.4.7.8 LsarEnumerateTrustedDomains (Opnum 13)
class LsarEnumerateTrustedDomains(NDRCALL):
opnum = 13
structure = (
('PolicyHandle', LSAPR_HANDLE),
('EnumerationContext', ULONG),
('PreferedMaximumLength', ULONG),
)
class LsarEnumerateTrustedDomainsResponse(NDRCALL):
structure = (
('EnumerationContext', ULONG),
('EnumerationBuffer',LSAPR_TRUSTED_ENUM_BUFFER_EX),
('ErrorCode', NTSTATUS),
)
# 3.1.4.7.9 LsarOpenTrustedDomainByName (Opnum 55)
# 3.1.4.7.10 LsarCreateTrustedDomainEx2 (Opnum 59)
# 3.1.4.7.11 LsarCreateTrustedDomainEx (Opnum 51)
# 3.1.4.7.12 LsarCreateTrustedDomain (Opnum 12)
# 3.1.4.7.14 LsarSetInformationTrustedDomain (Opnum 27)
# 3.1.4.7.15 LsarQueryForestTrustInformation (Opnum 73)
class LsarQueryForestTrustInformation(NDRCALL):
opnum = 73
structure = (
('PolicyHandle', LSAPR_HANDLE),
('TrustedDomainName', LSA_UNICODE_STRING),
('HighestRecordType', LSA_FOREST_TRUST_RECORD_TYPE),
)
class LsarQueryForestTrustInformationResponse(NDRCALL):
structure = (
('ForestTrustInfo', PLSA_FOREST_TRUST_INFORMATION),
('ErrorCode', NTSTATUS),
)
# 3.1.4.7.16 LsarSetForestTrustInformation (Opnum 74)
# 3.1.4.8.1 LsarEnumeratePrivileges (Opnum 2)
class LsarEnumeratePrivileges(NDRCALL):
opnum = 2
structure = (
('PolicyHandle', LSAPR_HANDLE),
('EnumerationContext', ULONG),
('PreferedMaximumLength', ULONG),
)
class LsarEnumeratePrivilegesResponse(NDRCALL):
structure = (
('EnumerationContext', ULONG),
('EnumerationBuffer', LSAPR_PRIVILEGE_ENUM_BUFFER),
('ErrorCode', NTSTATUS),
)
# 3.1.4.8.2 LsarLookupPrivilegeValue (Opnum 31)
class LsarLookupPrivilegeValue(NDRCALL):
opnum = 31
structure = (
('PolicyHandle', LSAPR_HANDLE),
('Name', RPC_UNICODE_STRING),
)
class LsarLookupPrivilegeValueResponse(NDRCALL):
structure = (
('Value', LUID),
('ErrorCode', NTSTATUS),
)
# 3.1.4.8.3 LsarLookupPrivilegeName (Opnum 32)
class LsarLookupPrivilegeName(NDRCALL):
opnum = 32
structure = (
('PolicyHandle', LSAPR_HANDLE),
('Value', LUID),
)
class LsarLookupPrivilegeNameResponse(NDRCALL):
structure = (
('Name', PRPC_UNICODE_STRING),
('ErrorCode', NTSTATUS),
)
# 3.1.4.8.4 LsarLookupPrivilegeDisplayName (Opnum 33)
class LsarLookupPrivilegeDisplayName(NDRCALL):
opnum = 33
structure = (
('PolicyHandle', LSAPR_HANDLE),
('Name', RPC_UNICODE_STRING),
('ClientLanguage', USHORT),
('ClientSystemDefaultLanguage', USHORT),
)
class LsarLookupPrivilegeDisplayNameResponse(NDRCALL):
structure = (
('Name', PRPC_UNICODE_STRING),
('LanguageReturned', UCHAR),
('ErrorCode', NTSTATUS),
)
# 3.1.4.9.1 LsarQuerySecurityObject (Opnum 3)
class LsarQuerySecurityObject(NDRCALL):
opnum = 3
structure = (
('PolicyHandle', LSAPR_HANDLE),
('SecurityInformation', SECURITY_INFORMATION),
)
class LsarQuerySecurityObjectResponse(NDRCALL):
structure = (
('SecurityDescriptor', PLSAPR_SR_SECURITY_DESCRIPTOR),
('ErrorCode', NTSTATUS),
)
# 3.1.4.9.2 LsarSetSecurityObject (Opnum 4)
class LsarSetSecurityObject(NDRCALL):
opnum = 4
structure = (
('PolicyHandle', LSAPR_HANDLE),
('SecurityInformation', SECURITY_INFORMATION),
('SecurityDescriptor', LSAPR_SR_SECURITY_DESCRIPTOR),
)
class LsarSetSecurityObjectResponse(NDRCALL):
structure = (
('ErrorCode', NTSTATUS),
)
# 3.1.4.9.3 LsarDeleteObject (Opnum 34)
class LsarDeleteObject(NDRCALL):
opnum = 34
structure = (
('ObjectHandle', LSAPR_HANDLE),
)
class LsarDeleteObjectResponse(NDRCALL):
structure = (
('ObjectHandle', LSAPR_HANDLE),
('ErrorCode', NTSTATUS),
)
# 3.1.4.9.4 LsarClose (Opnum 0)
class LsarClose(NDRCALL):
opnum = 0
structure = (
('ObjectHandle', LSAPR_HANDLE),
)
class LsarCloseResponse(NDRCALL):
structure = (
('ObjectHandle', LSAPR_HANDLE),
('ErrorCode', NTSTATUS),
)
################################################################################
# OPNUMs and their corresponding structures
################################################################################
OPNUMS = {
0 : (LsarClose, LsarCloseResponse),
2 : (LsarEnumeratePrivileges, LsarEnumeratePrivilegesResponse),
3 : (LsarQuerySecurityObject, LsarQuerySecurityObjectResponse),
4 : (LsarSetSecurityObject, LsarSetSecurityObjectResponse),
6 : (LsarOpenPolicy, LsarOpenPolicyResponse),
7 : (LsarQueryInformationPolicy, LsarQueryInformationPolicyResponse),
8 : (LsarSetInformationPolicy, LsarSetInformationPolicyResponse),
10 : (LsarCreateAccount, LsarCreateAccountResponse),
11 : (LsarEnumerateAccounts, LsarEnumerateAccountsResponse),
#12 : (LsarCreateTrustedDomain, LsarCreateTrustedDomainResponse),
13 : (LsarEnumerateTrustedDomains, LsarEnumerateTrustedDomainsResponse),
16 : (LsarCreateSecret, LsarCreateSecretResponse),
17 : (LsarOpenAccount, LsarOpenAccountResponse),
18 : (LsarEnumeratePrivilegesAccount, LsarEnumeratePrivilegesAccountResponse),
19 : (LsarAddPrivilegesToAccount, LsarAddPrivilegesToAccountResponse),
20 : (LsarRemovePrivilegesFromAccount, LsarRemovePrivilegesFromAccountResponse),
23 : (LsarGetSystemAccessAccount, LsarGetSystemAccessAccountResponse),
24 : (LsarSetSystemAccessAccount, LsarSetSystemAccessAccountResponse),
#25 : (LsarOpenTrustedDomain, LsarOpenTrustedDomainResponse),
#26 : (LsarQueryInfoTrustedDomain, LsarQueryInfoTrustedDomainResponse),
#27 : (LsarSetInformationTrustedDomain, LsarSetInformationTrustedDomainResponse),
28 : (LsarOpenSecret, LsarOpenSecretResponse),
29 : (LsarSetSecret, LsarSetSecretResponse),
30 : (LsarQuerySecret, LsarQuerySecretResponse),
31 : (LsarLookupPrivilegeValue, LsarLookupPrivilegeValueResponse),
32 : (LsarLookupPrivilegeName, LsarLookupPrivilegeNameResponse),
33 : (LsarLookupPrivilegeDisplayName, LsarLookupPrivilegeDisplayNameResponse),
34 : (LsarDeleteObject, LsarDeleteObjectResponse),
35 : (LsarEnumerateAccountsWithUserRight, LsarEnumerateAccountsWithUserRightResponse),
36 : (LsarEnumerateAccountRights, LsarEnumerateAccountRightsResponse),
37 : (LsarAddAccountRights, LsarAddAccountRightsResponse),
38 : (LsarRemoveAccountRights, LsarRemoveAccountRightsResponse),
#39 : (LsarQueryTrustedDomainInfo, LsarQueryTrustedDomainInfoResponse),
#40 : (LsarSetTrustedDomainInfo, LsarSetTrustedDomainInfoResponse),
#41 : (LsarDeleteTrustedDomain, LsarDeleteTrustedDomainResponse),
42 : (LsarStorePrivateData, LsarStorePrivateDataResponse),
43 : (LsarRetrievePrivateData, LsarRetrievePrivateDataResponse),
44 : (LsarOpenPolicy2, LsarOpenPolicy2Response),
46 : (LsarQueryInformationPolicy2, LsarQueryInformationPolicy2Response),
47 : (LsarSetInformationPolicy2, LsarSetInformationPolicy2Response),
#48 : (LsarQueryTrustedDomainInfoByName, LsarQueryTrustedDomainInfoByNameResponse),
#49 : (LsarSetTrustedDomainInfoByName, LsarSetTrustedDomainInfoByNameResponse),
50 : (LsarEnumerateTrustedDomainsEx, LsarEnumerateTrustedDomainsExResponse),
#51 : (LsarCreateTrustedDomainEx, LsarCreateTrustedDomainExResponse),
53 : (LsarQueryDomainInformationPolicy, LsarQueryDomainInformationPolicyResponse),
#54 : (LsarSetDomainInformationPolicy, LsarSetDomainInformationPolicyResponse),
#55 : (LsarOpenTrustedDomainByName, LsarOpenTrustedDomainByNameResponse),
#59 : (LsarCreateTrustedDomainEx2, LsarCreateTrustedDomainEx2Response),
#73 : (LsarQueryForestTrustInformation, LsarQueryForestTrustInformationResponse),
#74 : (LsarSetForestTrustInformation, LsarSetForestTrustInformationResponse),
}
################################################################################
# HELPER FUNCTIONS
################################################################################
def hLsarOpenPolicy2(dce, desiredAccess = MAXIMUM_ALLOWED):
request = LsarOpenPolicy2()
request['SystemName'] = NULL
request['ObjectAttributes']['RootDirectory'] = NULL
request['ObjectAttributes']['ObjectName'] = NULL
request['ObjectAttributes']['SecurityDescriptor'] = NULL
request['ObjectAttributes']['SecurityQualityOfService'] = NULL
request['DesiredAccess'] = desiredAccess
return dce.request(request)
def hLsarOpenPolicy(dce, desiredAccess = MAXIMUM_ALLOWED):
request = LsarOpenPolicy()
request['SystemName'] = NULL
request['ObjectAttributes']['RootDirectory'] = NULL
request['ObjectAttributes']['ObjectName'] = NULL
request['ObjectAttributes']['SecurityDescriptor'] = NULL
request['ObjectAttributes']['SecurityQualityOfService'] = NULL
request['DesiredAccess'] = desiredAccess
return dce.request(request)
def hLsarQueryInformationPolicy2(dce, policyHandle, informationClass):
request = LsarQueryInformationPolicy2()
request['PolicyHandle'] = policyHandle
request['InformationClass'] = informationClass
return dce.request(request)
def hLsarQueryInformationPolicy(dce, policyHandle, informationClass):
request = LsarQueryInformationPolicy()
request['PolicyHandle'] = policyHandle
request['InformationClass'] = informationClass
return dce.request(request)
def hLsarQueryDomainInformationPolicy(dce, policyHandle, informationClass):
request = LsarQueryInformationPolicy()
request['PolicyHandle'] = policyHandle
request['InformationClass'] = informationClass
return dce.request(request)
def hLsarEnumerateAccounts(dce, policyHandle, preferedMaximumLength=0xffffffff):
request = LsarEnumerateAccounts()
request['PolicyHandle'] = policyHandle
request['PreferedMaximumLength'] = preferedMaximumLength
return dce.request(request)
def hLsarEnumerateAccountsWithUserRight(dce, policyHandle, UserRight):
request = LsarEnumerateAccountsWithUserRight()
request['PolicyHandle'] = policyHandle
request['UserRight'] = UserRight
return dce.request(request)
def hLsarEnumerateTrustedDomainsEx(dce, policyHandle, enumerationContext=0, preferedMaximumLength=0xffffffff):
request = LsarEnumerateTrustedDomainsEx()
request['PolicyHandle'] = policyHandle
request['EnumerationContext'] = enumerationContext
request['PreferedMaximumLength'] = preferedMaximumLength
return dce.request(request)
def hLsarEnumerateTrustedDomains(dce, policyHandle, enumerationContext=0, preferedMaximumLength=0xffffffff):
request = LsarEnumerateTrustedDomains()
request['PolicyHandle'] = policyHandle
request['EnumerationContext'] = enumerationContext
request['PreferedMaximumLength'] = preferedMaximumLength
return dce.request(request)
def hLsarOpenAccount(dce, policyHandle, accountSid, desiredAccess=MAXIMUM_ALLOWED):
request = LsarOpenAccount()
request['PolicyHandle'] = policyHandle
request['AccountSid'].fromCanonical(accountSid)
request['DesiredAccess'] = desiredAccess
return dce.request(request)
def hLsarClose(dce, objectHandle):
request = LsarClose()
request['ObjectHandle'] = objectHandle
return dce.request(request)
def hLsarCreateAccount(dce, policyHandle, accountSid, desiredAccess=MAXIMUM_ALLOWED):
request = LsarCreateAccount()
request['PolicyHandle'] = policyHandle
request['AccountSid'].fromCanonical(accountSid)
request['DesiredAccess'] = desiredAccess
return dce.request(request)
def hLsarDeleteObject(dce, objectHandle):
request = LsarDeleteObject()
request['ObjectHandle'] = objectHandle
return dce.request(request)
def hLsarEnumeratePrivilegesAccount(dce, accountHandle):
request = LsarEnumeratePrivilegesAccount()
request['AccountHandle'] = accountHandle
return dce.request(request)
def hLsarGetSystemAccessAccount(dce, accountHandle):
request = LsarGetSystemAccessAccount()
request['AccountHandle'] = accountHandle
return dce.request(request)
def hLsarSetSystemAccessAccount(dce, accountHandle, systemAccess):
request = LsarSetSystemAccessAccount()
request['AccountHandle'] = accountHandle
request['SystemAccess'] = systemAccess
return dce.request(request)
def hLsarAddPrivilegesToAccount(dce, accountHandle, privileges):
request = LsarAddPrivilegesToAccount()
request['AccountHandle'] = accountHandle
request['Privileges']['PrivilegeCount'] = len(privileges)
request['Privileges']['Control'] = 0
for priv in privileges:
request['Privileges']['Privilege'].append(priv)
return dce.request(request)
def hLsarRemovePrivilegesFromAccount(dce, accountHandle, privileges, allPrivileges = False):
request = LsarRemovePrivilegesFromAccount()
request['AccountHandle'] = accountHandle
request['Privileges']['Control'] = 0
if privileges != NULL:
request['Privileges']['PrivilegeCount'] = len(privileges)
for priv in privileges:
request['Privileges']['Privilege'].append(priv)
else:
request['Privileges']['PrivilegeCount'] = NULL
request['AllPrivileges'] = allPrivileges
return dce.request(request)
def hLsarEnumerateAccountRights(dce, policyHandle, accountSid):
request = LsarEnumerateAccountRights()
request['PolicyHandle'] = policyHandle
request['AccountSid'].fromCanonical(accountSid)
return dce.request(request)
def hLsarAddAccountRights(dce, policyHandle, accountSid, userRights):
request = LsarAddAccountRights()
request['PolicyHandle'] = policyHandle
request['AccountSid'].fromCanonical(accountSid)
request['UserRights']['EntriesRead'] = len(userRights)
for userRight in userRights:
right = RPC_UNICODE_STRING()
right['Data'] = userRight
request['UserRights']['UserRights'].append(right)
return dce.request(request)
def hLsarRemoveAccountRights(dce, policyHandle, accountSid, userRights):
request = LsarRemoveAccountRights()
request['PolicyHandle'] = policyHandle
request['AccountSid'].fromCanonical(accountSid)
request['UserRights']['EntriesRead'] = len(userRights)
for userRight in userRights:
right = RPC_UNICODE_STRING()
right['Data'] = userRight
request['UserRights']['UserRights'].append(right)
return dce.request(request)
def hLsarCreateSecret(dce, policyHandle, secretName, desiredAccess=MAXIMUM_ALLOWED):
request = LsarCreateSecret()
request['PolicyHandle'] = policyHandle
request['SecretName'] = secretName
request['DesiredAccess'] = desiredAccess
return dce.request(request)
def hLsarOpenSecret(dce, policyHandle, secretName, desiredAccess=MAXIMUM_ALLOWED):
request = LsarOpenSecret()
request['PolicyHandle'] = policyHandle
request['SecretName'] = secretName
request['DesiredAccess'] = desiredAccess
return dce.request(request)
def hLsarSetSecret(dce, secretHandle, encryptedCurrentValue, encryptedOldValue):
request = LsarOpenSecret()
request['SecretHandle'] = secretHandle
if encryptedCurrentValue != NULL:
request['EncryptedCurrentValue']['Length'] = len(encryptedCurrentValue)
request['EncryptedCurrentValue']['MaximumLength'] = len(encryptedCurrentValue)
request['EncryptedCurrentValue']['Buffer'] = list(encryptedCurrentValue)
else:
encryptedCurrentValue = NULL
if encryptedOldValue != NULL:
request['EncryptedOldValue']['Length'] = len(encryptedOldValue)
request['EncryptedOldValue']['MaximumLength'] = len(encryptedOldValue)
request['EncryptedOldValue']['Buffer'] = list(encryptedOldValue)
else:
encryptedOldValue = NULL
return dce.request(request)
def hLsarQuerySecret(dce, secretHandle):
request = LsarQuerySecret()
request['SecretHandle'] = secretHandle
request['EncryptedCurrentValue']['Buffer'] = NULL
request['EncryptedOldValue']['Buffer'] = NULL
request['OldValueSetTime'] = NULL
return dce.request(request)
def hLsarRetrievePrivateData(dce, policyHandle, keyName):
request = LsarRetrievePrivateData()
request['PolicyHandle'] = policyHandle
request['KeyName'] = keyName
retVal = dce.request(request)
return ''.join(retVal['EncryptedData']['Buffer'])
def hLsarStorePrivateData(dce, policyHandle, keyName, encryptedData):
request = LsarStorePrivateData()
request['PolicyHandle'] = policyHandle
request['KeyName'] = keyName
if encryptedData != NULL:
request['EncryptedData']['Length'] = len(encryptedData)
request['EncryptedData']['MaximumLength'] = len(encryptedData)
request['EncryptedData']['Buffer'] = list(encryptedData)
else:
request['EncryptedData'] = NULL
return dce.request(request)
def hLsarEnumeratePrivileges(dce, policyHandle, enumerationContext = 0, preferedMaximumLength = 0xffffffff):
request = LsarEnumeratePrivileges()
request['PolicyHandle'] = policyHandle
request['EnumerationContext'] = enumerationContext
request['PreferedMaximumLength'] = preferedMaximumLength
return dce.request(request)
def hLsarLookupPrivilegeValue(dce, policyHandle, name):
request = LsarLookupPrivilegeValue()
request['PolicyHandle'] = policyHandle
request['Name'] = name
return dce.request(request)
def hLsarLookupPrivilegeName(dce, policyHandle, luid):
request = LsarLookupPrivilegeName()
request['PolicyHandle'] = policyHandle
request['Value'] = luid
return dce.request(request)
def hLsarQuerySecurityObject(dce, policyHandle, securityInformation = OWNER_SECURITY_INFORMATION):
request = LsarQuerySecurityObject()
request['PolicyHandle'] = policyHandle
request['SecurityInformation'] = securityInformation
retVal = dce.request(request)
return ''.join(retVal['SecurityDescriptor']['SecurityDescriptor'])
def hLsarSetSecurityObject(dce, policyHandle, securityInformation, securityDescriptor):
request = LsarSetSecurityObject()
request['PolicyHandle'] = policyHandle
request['SecurityInformation'] = securityInformation
request['SecurityDescriptor']['Length'] = len(securityDescriptor)
request['SecurityDescriptor']['SecurityDescriptor'] = list(securityDescriptor)
return dce.request(request)
def hLsarSetInformationPolicy2(dce, policyHandle, informationClass, policyInformation):
request = LsarSetInformationPolicy2()
request['PolicyHandle'] = policyHandle
request['InformationClass'] = informationClass
request['PolicyInformation'] = policyInformation
return dce.request(request)
def hLsarSetInformationPolicy(dce, policyHandle, informationClass, policyInformation):
request = LsarSetInformationPolicy()
request['PolicyHandle'] = policyHandle
request['InformationClass'] = informationClass
request['PolicyInformation'] = policyInformation
return dce.request(request)
|
hecchi777/S3-SlaacSecuritySolution
|
impacket-0.9.11/impacket/dcerpc/v5/lsad.py
|
Python
|
apache-2.0
| 57,687 | 0.0121 |
for i in range (0, 53):
filepath = '/Users/tunder/Dropbox/PythonScripts/requests/pbs/fic' + str(i) + '.pbs'
with open(filepath, mode='w', encoding = 'utf-8') as file:
file.write('#!/bin/bash\n')
file.write('#PBS -l walltime=10:00:00\n')
file.write('#PBS -l nodes=1:ppn=12\n')
file.write('#PBS -N Fiction' + str(i) + '\n')
file.write('#PBS -q ichass\n')
file.write('#PBS -m be\n')
file.write('cd $PBS_O_WORKDIR\n')
file.write('python3 extract.py -idfile /projects/ichass/usesofscale/hathimeta/pre20cslices/slice' + str(i) + '.txt -g fic -v -sub -rh' + '\n')
|
tedunderwood/GenreProject
|
python/extract/GenerateExtractPBS.py
|
Python
|
mit
| 630 | 0.009524 |
#
# Collective Knowledge (artifact description (reproducibility, ACM meta, etc))
#
# See CK LICENSE.txt for licensing details
# See CK COPYRIGHT.txt for copyright details
#
# Developer: Grigori Fursin, Grigori.Fursin@cTuning.org, http://fursin.net
#
cfg={} # Will be updated by CK (meta description of this module)
work={} # Will be updated by CK (temporal data)
ck=None # Will be updated by CK (initialized CK kernel)
# Local settings
##############################################################################
# Initialize module
def init(i):
"""
Input: {}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
return {'return':0}
##############################################################################
def recursive_repos(i):
import os
repo=i['repo']
repo_deps=i.get('repo_deps',[])
level=i.get('level','')
ilevel=i.get('ilevel',0)
if ilevel>8:
# Somewhere got into loop - quit
# ck.out('Warning: you have a cyclic dependency in your repositories ...')
return {'return':0, 'repo_deps':repo_deps}
# Load repo
r=ck.access({'action':'load',
'module_uoa':cfg['module_deps']['repo'],
'data_uoa':repo})
if r['return']>0: return r
d=r['dict']
# Note that sometimes we update .ckr.json while CK keeps old deps cached
p=d.get('path','')
p1=os.path.join(p, ck.cfg['repo_file'])
if os.path.isfile(p1):
r=ck.load_json_file({'json_file':p1})
if r['return']==0:
d=r['dict'].get('dict',{})
rd=d.get('repo_deps',{})
# print (level+repo)
for q in rd:
drepo=q['repo_uoa']
if drepo!=repo:
repo_deps.append(drepo)
r=recursive_repos({'repo':drepo, 'repo_deps':repo_deps, 'level':level+' ', 'ilevel':ilevel+1})
if r['return']>0: return r
return {'return':0, 'repo_deps':repo_deps}
##############################################################################
# prepare artifact snapshot
def snapshot(i):
"""
Input: {
repo - which repo to snapshot with all deps
(file_name) - customize name ("ck-artifacts-" by default)
(no_deps) - if 'yes', do not process repo dependencies (useful for results repo accompanying main repos)
(copy_repos) - if 'yes', copy repositories instead of zipping
(date) - use this date (YYYYMMDD) instead of current one
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
import os
import platform
import zipfile
import shutil
o=i.get('out','')
repo=i.get('repo','')
if repo=='':
return {'return':1, 'error': '"repo" to snapshot is not defined'}
no_deps=i.get('no_deps','')=='yes'
copy_repos=i.get('copy_repos','')=='yes'
force_clean=i.get('force_clean','')=='yes'
# Preparing tmp directory where to zip repos and add scripts ...
curdir0=os.getcwd()
# ptmp=os.path.join(curdir0, 'tmp')
import tempfile
ptmp=os.path.join(tempfile.gettempdir(),'tmp-snapshot')
if o=='con':
ck.out('Temp directory: '+ptmp)
ck.out('')
if os.path.isdir(ptmp) and force_clean:
shutil.rmtree(ptmp, onerror=ck.rm_read_only)
if os.path.isdir(ptmp):
r=ck.inp({'text':'Directory "'+ptmp+'" exists. Delete (Y/n)?'})
if r['return']>0: return r
ck.out('')
x=r['string'].strip().lower()
if x=='' or x=='y' or x=='yes':
r=ck.delete_directory({'path':ptmp})
if r['return']>0: return r
if not os.path.isdir(ptmp):
os.makedirs(ptmp)
os.chdir(ptmp)
curdir=os.getcwd()
# Checking repo deps
final_repo_deps=[]
if not no_deps:
if o=='con':
ck.out('Checking dependencies on other repos ...')
r=recursive_repos({'repo':repo})
if r['return']>0: return r
# Removing redundant
for q in reversed(r['repo_deps']):
if q not in final_repo_deps:
final_repo_deps.append(q)
if repo not in final_repo_deps:
final_repo_deps.append(repo)
if o=='con':
ck.out('')
for q in final_repo_deps:
ck.out(' * '+q)
ck.out('')
ck.out('Collecting revisions, can take some time ...')
ck.out('')
r=ck.reload_repo_cache({}) # Ignore errors
pp=[]
pp2={}
il=0
path_to_main_repo=''
for xrepo in final_repo_deps:
# Reload repo to get UID
r=ck.access({'action':'load',
'module_uoa':cfg['module_deps']['repo'],
'data_uoa':xrepo})
if r['return']>0: return r
ruid=r['data_uid']
if ruid not in ck.cache_repo_info:
return {'return':1, 'error':'"'+q+'" repo is not in cache - strange!'}
# Get repo info
qq=ck.cache_repo_info[ruid]
d=qq['dict']
p=d.get('path','')
if xrepo==repo:
path_to_main_repo=p
t=d.get('shared','')
duoa=qq['data_uoa']
if t!='':
if len(duoa)>il: il=len(duoa)
url=d.get('url','')
branch=''
checkout=''
if os.path.isdir(p):
# Detect status
pc=os.getcwd()
os.chdir(p)
# Get current branch
r=ck.run_and_get_stdout({'cmd':['git','rev-parse','--abbrev-ref','HEAD']})
if r['return']==0 and r['return_code']==0:
branch=r['stdout'].strip()
# Get current checkout
r=ck.run_and_get_stdout({'cmd':['git','rev-parse','--short','HEAD']})
if r['return']==0 and r['return_code']==0:
checkout=r['stdout'].strip()
os.chdir(pc)
x={'branch':branch, 'checkout':checkout, 'path':p, 'type':t, 'url':url, 'data_uoa':duoa}
else:
x={'path':p, 'type':t, 'data_uoa':duoa}
pp.append(x)
pp2[duoa]=x
if copy_repos:
pu=os.path.join(ptmp,'CK')
if not os.path.isdir(pu):
os.mkdir(pu)
pu1=os.path.join(pu,xrepo)
if o=='con':
ck.out(' * Copying repo '+xrepo+' ...')
shutil.copytree(p,pu1,ignore=shutil.ignore_patterns('*.pyc', 'tmp', 'tmp*', '__pycache__'))
# Copying Readme if exists
fr='README.md'
pr1=os.path.join(path_to_main_repo, fr)
if os.path.isfile(pr1):
pr2=os.path.join(ptmp, fr)
if os.path.isfile(pr2):
os.remove(pr2)
shutil.copy(pr1,pr2)
# Print
if o=='con':
ck.out('')
for q in pp:
name=q['data_uoa']
x=' * '+name+' '*(il-len(name))
branch=q.get('branch','')
checkout=q.get('checkout','')
url=q.get('url','')
if branch!='' or checkout!='' or url!='':
x+=' ( '+branch+' ; '+checkout+' ; '+url+' )'
ck.out(x)
os.chdir(curdir)
# Archiving
if o=='con':
ck.out('')
ck.out('Archiving ...')
# Add some dirs and files to ignore
for q in ['__pycache__', 'tmp', 'module.pyc', 'customize.pyc']:
if q not in ck.cfg['ignore_directories_when_archive_repo']:
ck.cfg['ignore_directories_when_archive_repo'].append(q)
# Get current date in YYYYMMDD
date=i.get('date','')
if date=='':
r=ck.get_current_date_time({})
if r['return']>0: return r
a=r['array']
a1=str(a['date_year'])
a2=str(a['date_month'])
a2='0'*(2-len(a2))+a2
a3=str(a['date_day'])
a3='0'*(2-len(a3))+a3
date=a1+a2+a3
date=date.strip()
if not copy_repos:
zips=[]
for repo in final_repo_deps:
if o=='con':
ck.out('')
ck.out(' * '+repo)
ck.out('')
an='ckr-'+repo
if pp2[repo].get('branch','')!='':
an+='--'+pp2[repo]['branch']
if pp2[repo].get('checkout','')!='':
an+='--'+pp2[repo]['checkout']
an+='.zip'
zips.append(an)
r=ck.access({'action':'zip',
'module_uoa':cfg['module_deps']['repo'],
'data_uoa':repo,
'archive_name':an,
'overwrite':'yes',
'out':o})
if r['return']>0: return r
# Print sequence of adding CK repos (for self-sustainable virtual CK artifact)
if o=='con':
ck.out('')
for z in zips:
ck.out('ck add repo --zip='+z)
# Cloning CK master
if o=='con':
ck.out('')
ck.out('Cloning latest CK version ...')
ck.out('')
os.system('git clone https://github.com/ctuning/ck ck-master')
# Prepare scripts
if o=='con':
ck.out('')
ck.out('Preparing scripts ...')
for tp in ['win','linux']:
f1=cfg['bat_prepare_virtual_ck']
f2=cfg['bat_start_virtual_ck']
if tp=='win':
f1+='.bat'
f2+='.bat'
f3='\\'
f4='%~dp0'+f3
s='set PATH='+f4+'ck-master\\bin;%PATH%\n'
s+='set PYTHONPATH='+f4+'ck-master;%PYTHONPATH%\n'
s+='\n'
s+='set CK_REPOS='+f4+'CK\n'
s+='set CK_TOOLS='+f4+'CK-TOOLS\n'
s+='\n'
s1=s+'mkdir %CK_REPOS%\n'
s1+='mkdir %CK_TOOLS%\n'
s1+='\n'
s2=s+'rem uncomment next line to install tools to CK env entries rather than CK_TOOLS directory\n'
s2+='rem ck set kernel var.install_to_env=yes\n'
s2+='\n'
s2+='call ck ls repo\n\n'
s2+='cmd\n'
s3='call '
else:
f1+='.sh'
f2+='.sh'
f3='/'
f4='$PWD'+f3
s='#! /bin/bash\n'
s+='\n'
s+='export PATH='+f4+'ck-master/bin:$PATH\n'
s+='export PYTHONPATH='+f4+'ck-master:$PYTHONPATH\n'
s+='\n'
s+='export CK_REPOS='+f4+'CK\n'
s+='export CK_TOOLS='+f4+'CK-TOOLS\n'
s+='\n'
s1=s+'mkdir ${CK_REPOS}\n'
s1+='mkdir ${CK_TOOLS}\n'
s1+='\n'
s2=s+'# uncomment next line to install tools to CK env entries rather than CK_TOOLS directory\n'
s2+='# ck set kernel var.install_to_env=yes\n'
s2+='\n'
s2+='ck ls repo\n\n'
s2+='bash\n'
s3=''
# importing repos
if copy_repos:
for repo in final_repo_deps:
s1+=s3+'ck import repo --quiet --path='+f4+'CK'+f3+repo+'\n'
else:
for z in zips:
s1+=s3+'ck add repo --zip='+z+'\n'
# Recording scripts
r=ck.save_text_file({'text_file':f1, 'string':s1})
if r['return']>0: return r
r=ck.save_text_file({'text_file':f2, 'string':s2})
if r['return']>0: return r
# If non-Windows, set 755
if tp!='win':
os.system('chmod 755 '+f1)
os.system('chmod 755 '+f2)
# Generating final zip pack
fn=i.get('file_name','')
if fn=='': fn='ck-artifacts-'
fname=fn+date+'.zip'
# Write archive
os.chdir(ptmp)
if o=='con':
ck.out('')
ck.out('Recording '+fname+' ...')
r=ck.list_all_files({'path':'.', 'all':'yes'})
if r['return']>0: return r
flx=r['list']
try:
f=open(os.path.join(curdir0,fname), 'wb')
z=zipfile.ZipFile(f, 'w', zipfile.ZIP_DEFLATED)
for fn in flx:
z.write(fn, fn, zipfile.ZIP_DEFLATED)
# ck-install.json
z.close()
f.close()
except Exception as e:
return {'return':1, 'error':'failed to prepare CK artifact collections ('+format(e)+')'}
os.chdir(curdir0)
if os.path.isdir(ptmp) and force_clean:
shutil.rmtree(ptmp, onerror=ck.rm_read_only)
return {'return':0}
|
ctuning/ck-env
|
module/artifact/module.py
|
Python
|
bsd-3-clause
| 12,275 | 0.037882 |
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM15.IEC61970.Core.IdentifiedObject import IdentifiedObject
class ErpInventory(IdentifiedObject):
"""Utility inventory-related information about an item or part (and not for description of the item and its attributes). It is used by ERP applications to enable the synchronization of Inventory data that exists on separate Item Master databases. This data is not the master data that describes the attributes of the item such as dimensions, weight, or unit of measure - it describes the item as it exists at a specific location.Utility inventory-related information about an item or part (and not for description of the item and its attributes). It is used by ERP applications to enable the synchronization of Inventory data that exists on separate Item Master databases. This data is not the master data that describes the attributes of the item such as dimensions, weight, or unit of measure - it describes the item as it exists at a specific location.
"""
def __init__(self, Asset=None, status=None, *args, **kw_args):
"""Initialises a new 'ErpInventory' instance.
@param Asset:
@param status:
"""
self._Asset = None
self.Asset = Asset
self.status = status
super(ErpInventory, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["Asset", "status"]
_many_refs = []
def getAsset(self):
return self._Asset
def setAsset(self, value):
if self._Asset is not None:
self._Asset._ErpInventory = None
self._Asset = value
if self._Asset is not None:
self._Asset.ErpInventory = None
self._Asset._ErpInventory = self
Asset = property(getAsset, setAsset)
status = None
|
rwl/PyCIM
|
CIM15/IEC61970/Informative/InfERPSupport/ErpInventory.py
|
Python
|
mit
| 2,914 | 0.001373 |
# -*- coding: utf-8 -*-#
"""
Basic Twitter Authentication
requirements: Python 2.5+ tweepy (easy_install tweepy | pip install tweepy)
"""
__author__ = 'Bernie Hogan'
__version__= '1.0'
import string
import codecs
import os
import pickle
import copy
import sys
import json
import webbrowser
import tweepy
from tweepy import Cursor
import twitterhelpers as th
def getFollowerCount(api, screen_name="BarackObama"):
user = api.get_user(screen_name)
return user.followers_count
def getFollowingCount(api, screen_name="BarackObama"):
user = api.get_user(screen_name)
print user
print dir(user)
return user.friends_count
if __name__=='__main__':
CONSUMER_KEY = th.CONSUMER_KEY
CONSUMER_SECRET = th.CONSUMER_SECRET
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
ACCESS_TOKEN_SECRET = th.ACCESS_TOKEN_SECRET
ACCESS_TOKEN = th.ACCESS_TOKEN
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
print "Now you have received an access token."
print "Or rather, your account has authorized this application to use the twitter api."
print "You have this many hits to the API left this hour: "
# print json.dumps(api.rate_limit_status(), indent = 1) #['remaining_hits']
print getFollowerCount(api, "blurky")
print getFollowingCount(api, "blurky")
|
oxfordinternetinstitute/scriptingcourse
|
DSR-Week 2/wk02_twitter_test.py
|
Python
|
gpl-3.0
| 1,313 | 0.033511 |
import json
from urllib import request
import pymongo
connection = pymongo.MongoClient('mongodb://localhost')
db = connection.reddit
stories = db.stories
# stories.drop()
# req = request.Request('http://www.reddit.com/r/technology/.json')
# req.add_header('User-agent', 'Mozilla/5.0')
# reddit_page = request.urlopen(req)
#
# parsed_reddit = json.loads(reddit_page.read().decode())
#
# print('Adding reddit posts')
# for item in parsed_reddit['data']['children']:
# stories.insert_one(item['data'])
#
# print('Finished adding reddit posts')
def find():
print('Keyword search started')
query = {'title': {'$regex': 'apple|google', '$options': 'i'}}
projection = {'title': 1, '_id': 0}
try:
cursor = stories.find(query, projection)
except Exception as e:
print('Unexpected error', type(e), e)
for post in cursor:
print(post)
find()
|
dossorio/python-blog
|
reddit-data-extractor.py
|
Python
|
mit
| 891 | 0.001122 |
# Copyright 2012-2013 Eric Ptak - trouch.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from webiopi.utils.types import toint
from webiopi.devices.spi import SPI
from webiopi.devices.analog import DAC
class MCP492X(SPI, DAC):
def __init__(self, chip, channelCount, vref):
SPI.__init__(self, toint(chip), 0, 8, 10000000)
DAC.__init__(self, channelCount, 12, float(vref))
self.buffered=False
self.gain=False
self.shutdown=False
self.values = [0 for i in range(channelCount)]
def __str__(self):
return "MCP492%d(chip=%d)" % (self._analogCount, self.chip)
def __analogRead__(self, channel, diff=False):
return self.values[channel]
def __analogWrite__(self, channel, value):
d = bytearray(2)
d[0] = 0
d[0] |= (channel & 0x01) << 7
d[0] |= (self.buffered & 0x01) << 6
d[0] |= (not self.gain & 0x01) << 5
d[0] |= (not self.shutdown & 0x01) << 4
d[0] |= (value >> 8) & 0x0F
d[1] = value & 0xFF
self.writeBytes(d)
self.values[channel] = value
class MCP4921(MCP492X):
def __init__(self, chip=0, vref=3.3):
MCP492X.__init__(self, chip, 1)
class MCP4922(MCP492X):
def __init__(self, chip=0, vref=3.3):
MCP492X.__init__(self, chip, 2)
|
thortex/rpi3-webiopi
|
webiopi_0.7.1/python/webiopi/devices/analog/mcp492X.py
|
Python
|
apache-2.0
| 1,847 | 0.005414 |
from multicorn import ForeignDataWrapper
from cassandra_provider import CassandraProvider
from properties import ISDEBUG
import properties
import schema_importer
import time
class CassandraFDW(ForeignDataWrapper):
def __init__(self, options, columns):
super(CassandraFDW, self).__init__(options, columns)
self.init_options = options
self.init_columns = columns
self.cassandra_provider = None
self.concurency_level = int(options.get('modify_concurency', properties.DEFAULT_CONCURENCY_LEVEL))
self.per_transaction_connection = options.get('per_transaction_connection', properties.PER_TRANSACTION_CONNECTION) == 'True'
self.modify_items = []
def build_cassandra_provider(self):
if self.cassandra_provider == None:
self.cassandra_provider = CassandraProvider(self.init_options, self.init_columns)
@classmethod
def import_schema(self, schema, srv_options, options, restriction_type, restricts):
return schema_importer.import_schema(schema, srv_options, options, restriction_type, restricts)
def insert(self, new_values):
if self.concurency_level > 1:
self.modify_items.append(('insert', new_values))
if len(self.modify_items) >= properties.BATCH_MODIFY_THRESHOLD:
self.end_modify()
return new_values
else:
return self.cassandra_provider.insert(new_values)
def delete(self, rowid):
if self.concurency_level > 1:
self.modify_items.append(('delete', rowid))
if len(self.modify_items) >= properties.BATCH_MODIFY_THRESHOLD:
self.end_modify()
return { }
else:
return self.cassandra_provider.delete(rowid)
def update(self, rowid, new_values):
if ISDEBUG:
logger.log(u"requested update {0}".format(new_values))
self.insert(new_values)
return new_values
def execute(self, quals, columns, sort_keys=None):
self.scan_start_time = time.time()
return self.cassandra_provider.execute(quals, columns, sort_keys)
def can_sort(self, sort_keys):
return []
def begin(self, serializable):
self.build_cassandra_provider()
if ISDEBUG:
logger.log("begin: {0}".format(serializable))
def commit(self):
if ISDEBUG:
logger.log("commit")
if self.per_transaction_connection:
self.close_cass_connection()
pass
def close_cass_connection(self):
if self.cassandra_provider != None:
self.cassandra_provider.close()
self.cassandra_provider = None
def end_modify(self):
try:
mod_len = len(self.modify_items)
if mod_len > 0:
if ISDEBUG:
logger.log("end modify")
logger.log("modify concurrency level: {0}".format(self.concurency_level))
self.cassandra_provider.execute_modify_items(self.modify_items, self.concurency_level)
finally:
self.modify_items = []
pass
def explain(self, quals, columns, sortkeys=None, verbose=False):
return self.cassandra_provider.build_select_stmt(quals, columns, self.cassandra_provider.allow_filtering, verbose)
def end_scan(self):
if ISDEBUG:
logger.log("end_scan. Total time: {0} ms".format((time.time() - self.scan_start_time) * 1000))
pass
def pre_commit(self):
if ISDEBUG:
logger.log("pre commit")
pass
def rollback(self):
if ISDEBUG:
logger.log("rollback")
pass
def sub_begin(self, level):
if ISDEBUG:
logger.log("sub begin {0}".format(level))
pass
def sub_commit(self, level):
if ISDEBUG:
logger.log("sub commit {0}".format(level))
pass
def sub_rollback(self, level):
if ISDEBUG:
logger.log("sub rollback {0}".format(level))
pass
@property
def rowid_column(self):
return self.cassandra_provider.get_row_id_column()
def get_rel_size(self, quals, columns):
return self.cassandra_provider.get_rel_size(quals, columns)
def get_path_keys(self):
self.scan_start_time = time.time()
return self.cassandra_provider.get_path_keys()
|
rankactive/cassandra-fdw
|
cassandra-fdw/__init__.py
|
Python
|
mit
| 4,389 | 0.00319 |
#! /usr/bin/env python
import requests, json
from os.path import expanduser
from coinbase.wallet.client import Client
home = expanduser('~')
client = Client('YOUR_API_KEY', 'YOUR_API_SECRET')
accounts = client.get_accounts()
print accounts ['data'][0]['balance']
|
lightningvapes/conky-ethereum-ticker-with-graph-chart
|
cb_balance_grabber.py
|
Python
|
gpl-3.0
| 267 | 0.011236 |
import struct
import numpy
import io
import pickle
import pyctrl.packet as packet
def testA():
# test A
assert packet.pack('A','C') == b'AC'
assert packet.pack('A','B') == b'AB'
assert packet.pack('A','C') != b'AB'
assert packet.unpack_stream(io.BytesIO(b'AC')) == ('A', 'C')
assert packet.unpack_stream(io.BytesIO(b'AB')) == ('A', 'B')
assert packet.unpack_stream(io.BytesIO(b'AB')) != ('A', 'C')
def testC():
# test C
assert packet.pack('C','C') == b'CC'
assert packet.pack('C','B') == b'CB'
assert packet.pack('C','C') != b'CB'
assert packet.unpack_stream(io.BytesIO(b'CC')) == ('C', 'C')
assert packet.unpack_stream(io.BytesIO(b'CB')) == ('C', 'B')
assert packet.unpack_stream(io.BytesIO(b'CB')) != ('C', 'C')
def testS():
# test S
assert packet.pack('S','abc') == struct.pack('<cI3s', b'S', 3, b'abc')
assert packet.pack('S','abcd') != struct.pack('<cI3s', b'S', 3, b'abc')
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cI3s', b'S', 3, b'abc'))) == ('S', 'abc')
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cI3s', b'S', 3, b'abc'))) != ('S', 'abcd')
def testIFD():
# test I
assert packet.pack('I',3) == struct.pack('<ci', b'I', 3)
assert packet.pack('I',3) != struct.pack('<ci', b'I', 4)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<ci', b'I', 3))) == ('I', 3)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<ci', b'I', 4))) != ('I', 3)
# test F
assert packet.pack('F',3.3) == struct.pack('<cf', b'F', 3.3)
assert packet.pack('F',3.3) != struct.pack('<cf', b'F', 4.3)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cf', b'F', numpy.float32(3.3)))) == ('F', numpy.float32(3.3))
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cf', b'F', 4.3))) != ('F', 3.3)
# test D
assert packet.pack('D',3.3) == struct.pack('<cd', b'D', 3.3)
assert packet.pack('D',3.3) != struct.pack('<cd', b'D', 4.3)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cd', b'D', 3.3))) == ('D', 3.3)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cd', b'D', 4.3))) != ('D', 3.3)
def testV():
# test VI
vector = numpy.array((1,2,3), int)
assert packet.pack('V',vector) == struct.pack('<ccIiii', b'V', b'I', 3, 1, 2, 3)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<ccIiii', b'V', b'I', 3, 1, 2, 3)))
assert type == 'V'
assert numpy.all(rvector == vector)
vector = numpy.array((1,-2,3), int)
assert packet.pack('V',vector) == struct.pack('<ccIiii', b'V', b'I', 3, 1, -2, 3)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<ccIiii', b'V', b'I', 3, 1, -2, 3)))
assert type == 'V'
assert numpy.all(rvector == vector)
# test VF
vector = numpy.array((1.3,-2,3), numpy.float32)
assert packet.pack('V',vector) == struct.pack('<ccIfff', b'V', b'F', 3, 1.3, -2, 3)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<ccIfff', b'V', b'F', 3, 1.3, -2, 3)))
assert type == 'V'
assert numpy.all(rvector == vector)
# test VD
vector = numpy.array((1.3,-2,3), float)
assert packet.pack('V',vector) == struct.pack('<ccIddd', b'V', b'D', 3, 1.3, -2, 3)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<ccIddd', b'V', b'D', 3, 1.3, -2, 3)))
assert type == 'V'
assert numpy.all(rvector == vector)
def testM():
# test MI
vector = numpy.array(((1,2,3), (3,4,5)), int)
assert packet.pack('M',vector) == struct.pack('<cIccIiiiiii', b'M', 2, b'V', b'I', 6, 1, 2, 3, 3, 4, 5)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<cIccIiiiiii', b'M', 2, b'V', b'I', 6, 1, 2, 3, 3, 4, 5)))
assert type == 'M'
assert numpy.all(rvector == vector)
vector = numpy.array(((1,-2,3), (3,4,-5)), int)
assert packet.pack('M',vector) == struct.pack('<cIccIiiiiii', b'M', 2, b'V', b'I', 6, 1, -2, 3, 3, 4, -5)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<cIccIiiiiii', b'M', 2, b'V', b'I', 6, 1, -2, 3, 3, 4, -5)))
assert type == 'M'
assert numpy.all(rvector == vector)
# test MF
vector = numpy.array(((1.3,-2,3), (0,-1,2.5)), numpy.float32)
assert packet.pack('M',vector) == struct.pack('<cIccIffffff', b'M', 2, b'V', b'F', 6, 1.3, -2, 3, 0, -1, 2.5)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<cIccIffffff', b'M', 2, b'V', b'F', 6, 1.3, -2, 3, 0, -1, 2.5)))
assert type == 'M'
assert numpy.all(rvector == vector)
# test MD
vector = numpy.array(((1.3,-2,3), (0,-1,2.5)), numpy.float)
assert packet.pack('M',vector) == struct.pack('<cIccIdddddd', b'M', 2, b'V', b'D', 6, 1.3, -2, 3, 0, -1, 2.5)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<cIccIdddddd', b'M', 2, b'V', b'D', 6, 1.3, -2, 3, 0, -1, 2.5)))
assert type == 'M'
assert numpy.all(rvector == vector)
def testP():
vector = numpy.array(((1.3,-2,3), (0,-1,2.5)), numpy.float)
string = packet.pack('P', vector)
(type, rvector) = packet.unpack_stream(io.BytesIO(string))
assert type == 'P'
assert numpy.all(rvector == vector)
def testKR():
args = { 'a': 1, 'b': 2 }
string = packet.pack('K', args)
(type, rargs) = packet.unpack_stream(io.BytesIO(string))
assert type == 'K'
assert (args == rargs)
args = ('a', 1, 'b', 2)
string = packet.pack('R', args)
(type, rargs) = packet.unpack_stream(io.BytesIO(string))
assert type == 'R'
assert (args == rargs)
if __name__ == "__main__":
testA()
testC()
testS()
testIFD()
testV()
testM()
testP()
testKR()
|
mcdeoliveira/ctrl
|
test/test_packet.py
|
Python
|
apache-2.0
| 5,810 | 0.012909 |
"""
A Tkinter based backend for piddle.
Perry A. Stoll
Created: February 15, 1999
Requires PIL for rotated string support.
Known Problems:
- Doesn't handle the interactive commands yet.
- PIL based canvas inherits lack of underlining strings from piddlePIL
You can find the latest version of this file:
via http://piddle.sourceforge.net
"""
# we depend on PIL for rotated strings so watch for changes in PIL
import Tkinter, tkFont
tk = Tkinter
import rdkit.sping.pid
__version__ = "0.3"
__date__ = "April 8, 1999"
__author__ = "Perry Stoll, perry.stoll@mail.com "
# fixups by chris lee, cwlee@artsci.wustl.edu
# $Id$
# - added drawImage scaling support
# - shifted baseline y parameter in drawString to work around font metric
# shift due to Tkinter's Canvas text_item object
# - fixed argument names so that argument keywords agreed with piddle.py (passes discipline.py)
#
#
# ToDo: for TKCanvas
# make sure that fontHeight() is returnng appropriate measure. Where is this info?
#
# $Log: pidTK.py,v $
# Revision 1.1 2002/07/12 18:34:47 glandrum
# added
#
# Revision 1.6 2000/11/03 00:56:57 clee
# fixed sizing error in TKCanvas
#
# Revision 1.5 2000/11/03 00:25:37 clee
# removed reference to "BaseTKCanvas" (should just use TKCanvas as default)
#
# Revision 1.4 2000/10/29 19:35:31 clee
# eliminated BaseTKCanvas in favor of straightforward "TKCanvas" name
#
# Revision 1.3 2000/10/29 01:57:41 clee
# - added scrollbar support to both TKCanvas and TKCanvasPIL
# - added getTKCanvas() access method to TKCanvasPIL
#
# Revision 1.2 2000/10/15 00:47:17 clee
# commit before continuing after getting pil to work as package
#
# Revision 1.1.1.1 2000/09/27 03:53:15 clee
# Simple Platform Independent Graphics
#
# Revision 1.6 2000/04/06 01:55:34 pmagwene
# - TKCanvas now uses multiple inheritance from Tkinter.Canvas and piddle.Canvas
# * for the most part works much like a normal Tkinter.Canvas object
# - TKCanvas draws rotated strings using PIL image, other objects using normal Tk calls
# - Minor fixes to FontManager and TKCanvas so can specify root window other than Tk()
# - Removed Quit/Clear buttons from default canvas
#
# Revision 1.5 2000/03/12 07:07:42 clee
# sync with 1_x
#
# Revision 1.4 2000/02/26 23:12:42 clee
# turn off compression by default on piddlePDF
# add doc string to new pil-based piddleTK
#
# Revision 1.3 2000/02/26 21:23:19 clee
# update that makes PIL based TKCanvas the default Canvas for TK.
# Updated piddletest.py. Also, added clear() methdo to piddlePIL's
# canvas it clears to "white" is this correct behavior? Not well
# specified in current documents.
#
class FontManager:
__alt_faces = {"serif": "Times", "sansserif": "Helvetica", "monospaced": "Courier"}
def __init__(self, master):
self.master = master
self.font_cache = {}
# the main interface
def stringWidth(self, s, font):
tkfont = self.piddleToTkFont(font)
return tkfont.measure(s)
def fontHeight(self, font):
tkfont = self.piddleToTkFont(font)
return self._tkfontHeight(tkfont)
def fontAscent(self, font):
tkfont = self.piddleToTkFont(font)
return self._tkfontAscent(tkfont)
def fontDescent(self, font):
tkfont = self.piddleToTkFont(font)
return self._tkfontDescent(tkfont)
def getTkFontString(self, font):
"""Return a string suitable to pass as the -font option to
to a Tk widget based on the piddle-style FONT"""
tkfont = self.piddleToTkFont(font)
# XXX: should just return the internal tk font name?
# return str(tkfont)
return ('-family %(family)s -size %(size)s '
'-weight %(weight)s -slant %(slant)s '
'-underline %(underline)s' % tkfont.config())
def getTkFontName(self, font):
"""Return a the name associated with the piddle-style FONT"""
tkfont = self.piddleToTkFont(font)
return str(tkfont)
def piddleToTkFont(self, font):
"""Return a tkFont instance based on the pid-style FONT"""
if font is None:
return ''
#default 12 pt, "Times", non-bold, non-italic
size = 12
family = "Times"
weight = "normal"
slant = "roman"
underline = "false"
if font.face:
# check if the user specified a generic face type
# like serif or monospaced. check is case-insenstive.
f = font.face.lower()
if f in self.__alt_faces:
family = self.__alt_faces[f]
else:
family = font.face
size = font.size or 12
if font.bold:
weight = "bold"
if font.italic:
slant = "italic"
if font.underline:
underline = 'true'
# ugh... is there a better way to do this?
key = (family, size, weight, slant, underline)
# check if we've already seen this font.
if key in self.font_cache:
# yep, don't bother creating a new one. just fetch it.
font = self.font_cache[key]
else:
# nope, let's create a new tk font.
# this way we will return info about the actual font
# selected by Tk, which may be different than what we ask
# for if it's not availible.
font = tkFont.Font(self.master, family=family, size=size, weight=weight, slant=slant,
underline=underline)
self.font_cache[(family, size, weight, slant, underline)] = font
return font
def _tkfontAscent(self, tkfont):
return tkfont.metrics("ascent")
def _tkfontDescent(self, tkfont):
return tkfont.metrics("descent")
class TKCanvas(tk.Canvas, rdkit.sping.pid.Canvas):
__TRANSPARENT = '' # transparent for Tk color
def __init__(self,
size=(300, 300),
name="sping.TK",
master=None,
scrollingViewPortSize=None, # a 2-tuple to define the size of the viewport
**kw):
"""This canvas allows you to add a tk.Canvas with a sping API for drawing.
To add scrollbars, the simpliest method is to set the 'scrollingViewPortSize'
equal to a tuple that describes the width and height of the visible porition
of the canvas on screen. This sets scrollregion=(0,0, size[0], size[1]).
Then you can add scrollbars as you would any tk.Canvas.
Note, because this is a subclass of tk.Canvas, you can use the normal keywords
to specify a tk.Canvas with scrollbars, however, you should then be careful to
set the "scrollregion" option to the same size as the 'size' passed to __init__.
Tkinter's scrollregion option essentially makes 'size' ignored. """
rdkit.sping.pid.Canvas.__init__(self, size=size, name=size)
if scrollingViewPortSize: # turn on ability to scroll
kw["scrollregion"] = (0, 0, size[0], size[1])
kw["height"] = scrollingViewPortSize[0]
kw["width"] = scrollingViewPortSize[1]
else:
kw["width"] = size[0]
kw["height"] = size[1]
apply(tk.Canvas.__init__, (self, master), kw) # use kw to pass other tk.Canvas options
self.config(background="white")
self.width, self.height = size
self._font_manager = FontManager(self)
self._configure()
self._item_ids = []
self._images = []
def _configure(self):
pass
def _display(self):
self.flush()
self.mainloop()
def _quit(self):
self.quit()
# Hmmm...the postscript generated by this causes my Ghostscript to barf...
def _to_ps_file(self, filename):
self.postscript(file=filename)
def isInteractive(self):
return 0
def onOver(self, event):
pass
def onClick(self, event):
pass
def onKey(self, event):
pass
def flush(self):
tk.Canvas.update(self)
def clear(self):
map(self.delete, self._item_ids)
self._item_ids = []
def _colorToTkColor(self, c):
return "#%02X%02X%02X" % (int(c.red * 255), int(c.green * 255), int(c.blue * 255))
def _getTkColor(self, color, defaultColor):
if color is None:
color = defaultColor
if color is rdkit.sping.pid.transparent:
color = self.__TRANSPARENT
else:
color = self._colorToTkColor(color)
return color
def drawLine(self, x1, y1, x2, y2, color=None, width=None):
color = self._getTkColor(color, self.defaultLineColor)
if width is None:
width = self.defaultLineWidth
new_item = self.create_line(x1, y1, x2, y2, fill=color, width=width)
self._item_ids.append(new_item)
# NYI: curve with fill
#def drawCurve(self, x1, y1, x2, y2, x3, y3, x4, y4,
# edgeColor=None, edgeWidth=None, fillColor=None, closed=0):
#
def stringWidth(self, s, font=None):
return self._font_manager.stringWidth(s, font or self.defaultFont)
def fontAscent(self, font=None):
return self._font_manager.fontAscent(font or self.defaultFont)
def fontDescent(self, font=None):
return self._font_manager.fontDescent(font or self.defaultFont)
def drawString(self, s, x, y, font=None, color=None, angle=None):
if angle:
try:
self._drawRotatedString(s, x, y, font, color, angle)
return
except ImportError:
print("PIL not available. Using unrotated strings.")
# fudge factor for TK on linux (at least)
# strings are being drawn using create_text in canvas
y = y - self.fontHeight(font) * .28 # empirical
#y = y - self.fontDescent(font)
color = self._getTkColor(color, self.defaultLineColor)
font = self._font_manager.getTkFontString(font or self.defaultFont)
new_item = self.create_text(x, y, text=s, font=font, fill=color, anchor=Tkinter.W)
self._item_ids.append(new_item)
def _drawRotatedString(self, s, x, y, font=None, color=None, angle=0):
# we depend on PIL for rotated strings so watch for changes in PIL
try:
import rdkit.sping.PIL.pidPIL
from PIL import Image, ImageTk
pp = rdkit.sping.PIL.pidPIL
except ImportError:
raise ImportError("Rotated strings only possible with PIL support")
pilCan = pp.PILCanvas(size=(self.width, self.height))
pilCan.defaultFont = self.defaultFont
pilCan.defaultLineColor = self.defaultLineColor
if '\n' in s or '\r' in s:
self.drawMultiLineString(s, x, y, font, color, angle)
return
if not font:
font = pilCan.defaultFont
if not color:
color = self.defaultLineColor
if color == rdkit.sping.pid.transparent:
return
# draw into an offscreen Image
tempsize = pilCan.stringWidth(s, font) * 1.2
tempimg = Image.new('RGB', (tempsize, tempsize), (0, 0, 0))
txtimg = Image.new('RGB', (tempsize, tempsize), (255, 255, 255))
from PIL import ImageDraw
temppen = ImageDraw.ImageDraw(tempimg)
temppen.setink((255, 255, 255))
pilfont = pp._pilFont(font)
if not pilfont:
raise ValueError("Bad font: %s" % font)
temppen.setfont(pilfont)
pos = [4, int(tempsize / 2 - pilCan.fontAscent(font)) - pilCan.fontDescent(font)]
temppen.text(pos, s)
pos[1] = int(tempsize / 2)
# rotate
if angle:
from math import pi, sin, cos
tempimg = tempimg.rotate(angle, Image.BILINEAR)
temppen = ImageDraw.ImageDraw(tempimg)
radians = -angle * pi / 180.0
r = tempsize / 2 - pos[0]
pos[0] = int(tempsize / 2 - r * cos(radians))
pos[1] = int(pos[1] - r * sin(radians))
###temppen.rectangle( (pos[0],pos[1],pos[0]+2,pos[1]+2) ) # PATCH for debugging
# colorize, and copy it in
mask = tempimg.convert('L').point(lambda c: c)
temppen.setink((color.red * 255, color.green * 255, color.blue * 255))
temppen.setfill(1)
temppen.rectangle((0, 0, tempsize, tempsize))
txtimg.paste(tempimg, (0, 0), mask)
##Based on code posted by John Michelson in the PIL SIG
transp = txtimg.convert("RGBA")
source = transp.split()
R, G, B, A = 0, 1, 2, 3
mask = transp.point(lambda i: i < 255 and 255) # use white as transparent
source[A].paste(mask)
transp = Image.merge(transp.mode, source) # build a new multiband image
self.drawImage(transp, x - pos[0], y - pos[1])
def drawRect(self, x1, y1, x2, y2, edgeColor=None, edgeWidth=None, fillColor=None):
fillColor = self._getTkColor(fillColor, self.defaultFillColor)
edgeColor = self._getTkColor(edgeColor, self.defaultLineColor)
if edgeWidth is None:
edgeWidth = self.defaultLineWidth
new_item = self.create_rectangle(x1, y1, x2, y2, fill=fillColor, width=edgeWidth,
outline=edgeColor)
self._item_ids.append(new_item)
# NYI:
#def drawRoundRect(self, x1,y1, x2,y2, rx=5, ry=5,
# edgeColor=None, edgeWidth=None, fillColor=None):
def drawEllipse(self, x1, y1, x2, y2, edgeColor=None, edgeWidth=None, fillColor=None):
fillColor = self._getTkColor(fillColor, self.defaultFillColor)
edgeColor = self._getTkColor(edgeColor, self.defaultLineColor)
if edgeWidth is None:
edgeWidth = self.defaultLineWidth
new_item = self.create_oval(x1, y1, x2, y2, fill=fillColor, outline=edgeColor, width=edgeWidth)
self._item_ids.append(new_item)
def drawArc(self, x1, y1, x2, y2, startAng=0, extent=360, edgeColor=None, edgeWidth=None,
fillColor=None):
fillColor = self._getTkColor(fillColor, self.defaultFillColor)
edgeColor = self._getTkColor(edgeColor, self.defaultLineColor)
if edgeWidth is None:
edgeWidth = self.defaultLineWidth
new_item = self.create_arc(x1, y1, x2, y2, start=startAng, extent=extent, fill=fillColor,
width=edgeWidth, outline=edgeColor)
self._item_ids.append(new_item)
def drawPolygon(self, pointlist, edgeColor=None, edgeWidth=None, fillColor=None, closed=0):
fillColor = self._getTkColor(fillColor, self.defaultFillColor)
edgeColor = self._getTkColor(edgeColor, self.defaultLineColor)
if edgeWidth is None:
edgeWidth = self.defaultLineWidth
if closed:
# draw a closed shape
new_item = self.create_polygon(pointlist, fill=fillColor, width=edgeWidth, outline=edgeColor)
else:
if fillColor == self.__TRANSPARENT:
# draw open-ended set of lines
d = {'fill': edgeColor, 'width': edgeWidth}
new_item = apply(self.create_line, pointlist, d)
else:
# open filled shape.
# draw it twice:
# once as a polygon with no edge outline with the fill color
# and once as an open set of lines of the appropriate color
new_item = self.create_polygon(pointlist, fill=fillColor, outline=self.__TRANSPARENT)
self._item_ids.append(new_item)
d = {'fill': edgeColor, 'width': edgeWidth}
new_item = apply(self.create_line, pointlist, d)
self._item_ids.append(new_item)
#def drawFigure(self, partList,
# edgeColor=None, edgeWidth=None, fillColor=None):
# use default implementation
def drawImage(self, image, x1, y1, x2=None, y2=None):
try:
from PIL import ImageTk
except ImportError:
raise NotImplementedError('drawImage - require the ImageTk module')
w, h = image.size
if not x2:
x2 = w + x1
if not y2:
y2 = h + y1
if (w != x2 - x1) or (h != y2 - y1): # need to scale image
myimage = image.resize((x2 - x1, y2 - y1))
else:
myimage = image
# unless I keep a copy of this PhotoImage, it seems to be garbage collected
# and the image is removed from the display after this function. weird
itk = ImageTk.PhotoImage(myimage, master=self)
new_item = self.create_image(x1, y1, image=itk, anchor=Tkinter.NW)
self._item_ids.append(new_item)
self._images.append(itk)
try:
import rdkit.sping.PIL
class TKCanvasPIL(rdkit.sping.PIL.PILCanvas):
"""This canvas maintains a PILCanvas as its backbuffer. Drawing calls
are made to the backbuffer and flush() sends the image to the screen
using TKCanvas.
You can also save what is displayed to a file in any of the formats
supported by PIL"""
def __init__(self, size=(300, 300), name='TKCanvas', master=None, **kw):
rdkit.sping.PIL.PILCanvas.__init__(self, size=size, name=name)
self._tkcanvas = apply(TKCanvas, (size, name, master), kw)
def flush(self):
rdkit.sping.PIL.PILCanvas.flush(self) # call inherited one first
self._tkcanvas.drawImage(self._image, 0, 0) # self._image should be a PIL image
self._tkcanvas.flush()
def getTKCanvas(self):
return self._tkcanvas
except ImportError:
raise ImportError("TKCanvasPIL requires sping PIL Canvas, PIL may not be installed")
|
rvianello/rdkit
|
rdkit/sping/TK/pidTK.py
|
Python
|
bsd-3-clause
| 16,537 | 0.010099 |
from __future__ import (absolute_import, division,
print_function, unicode_literals)
import logging
import re
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
from kamaki.clients import astakos, cyclades
from kamaki.clients import ClientError
from kamaki.cli.config import Config as KamakiConfig
from fokia.utils import patch_certs
from fokia.cluster_error_constants import *
from Crypto.PublicKey import RSA
from base64 import b64encode
storage_templates = ['drdb', 'ext_vlmc']
class Provisioner:
"""
provisions virtual machines on ~okeanos
"""
def __init__(self, auth_token, cloud_name=None):
if auth_token is None and cloud_name is not None:
# Load .kamakirc configuration
logger.info("Retrieving .kamakirc configuration")
self.config = KamakiConfig()
patch_certs(self.config.get('global', 'ca_certs'))
cloud_section = self.config._sections['cloud'].get(cloud_name)
if not cloud_section:
message = "Cloud '%s' was not found in you .kamakirc configuration file. " \
"Currently you have availablie in your configuration these clouds: %s"
raise KeyError(message % (cloud_name, self.config._sections['cloud'].keys()))
# Get the authentication url and token
auth_url, auth_token = cloud_section['url'], cloud_section['token']
else:
auth_url = "https://accounts.okeanos.grnet.gr/identity/v2.0"
logger.info("Initiating Astakos Client")
self.astakos = astakos.AstakosClient(auth_url, auth_token)
logger.info("Retrieving cyclades endpoint url")
compute_url = self.astakos.get_endpoint_url(
cyclades.CycladesComputeClient.service_type)
logger.info("Initiating Cyclades client")
self.cyclades = cyclades.CycladesComputeClient(compute_url, auth_token)
# Create the network client
networkURL = self.astakos.get_endpoint_url(
cyclades.CycladesNetworkClient.service_type)
self.network_client = cyclades.CycladesNetworkClient(networkURL, auth_token)
# Constants
self.Bytes_to_GB = 1024 * 1024 * 1024
self.Bytes_to_MB = 1024 * 1024
self.master = None
self.ips = None
self.slaves = None
self.vpn = None
self.subnet = None
self.private_key = None
self.image_id = 'c6f5adce-21ad-4ce3-8591-acfe7eb73c02'
"""
FIND RESOURCES
"""
def find_flavor(self, **kwargs):
"""
:param kwargs: should contains the keys that specify the specs
:return: first flavor objects that matches the specs criteria
"""
# Set all the default parameters
kwargs.setdefault("vcpus", 1)
kwargs.setdefault("ram", 1024)
kwargs.setdefault("disk", 40)
kwargs.setdefault("SNF:allow_create", True)
logger.info("Retrieving flavor")
for flavor in self.cyclades.list_flavors(detail=True):
if all([kwargs[key] == flavor[key]
for key in set(flavor.keys()).intersection(kwargs.keys())]):
return flavor
return None
def find_image(self, **kwargs):
"""
:param image_name: Name of the image to filter by
:param kwargs:
:return: first image object that matches the name criteria
"""
image_name = kwargs['image_name']
logger.info("Retrieving image")
for image in self.cyclades.list_images(detail=True):
if image_name in image['name']:
return image
return None
def find_project_id(self, **kwargs):
"""
:param kwargs: name, state, owner and mode to filter project by
:return: first project_id that matches the project name
"""
filter = {
'name': kwargs.get("project_name"),
'state': kwargs.get("project_state"),
'owner': kwargs.get("project_owner"),
'mode': kwargs.get("project_mode"),
}
logger.info("Retrieving project")
return self.astakos.get_projects(**filter)[0]
"""
CREATE RESOURCES
"""
def create_lambda_cluster(self, vm_name, wait=True, **kwargs):
"""
:param vm_name: hostname of the master
:param kwargs: contains specifications of the vms.
:return: dictionary object with the nodes of the cluster if it was successfully created
"""
quotas = self.get_quotas()
vcpus = kwargs['slaves'] * kwargs['vcpus_slave'] + kwargs['vcpus_master']
ram = kwargs['slaves'] * kwargs['ram_slave'] + kwargs['ram_master']
disk = kwargs['slaves'] * kwargs['disk_slave'] + kwargs['disk_master']
project_id = self.find_project_id(**kwargs)['id']
cluster_size = kwargs['slaves'] + 1
response = self.check_all_resources(quotas, cluster_size=cluster_size,
vcpus=vcpus,
ram=ram,
disk=disk,
ip_allocation=kwargs['ip_allocation'],
network_request=kwargs['network_request'],
project_name=kwargs['project_name'])
if response:
# Check flavors for master and slaves
master_flavor = self.find_flavor(vcpus=kwargs['vcpus_master'],
ram=kwargs['ram_master'],
disk=kwargs['disk_master'])
if not master_flavor:
msg = 'This flavor does not allow create.'
raise ClientError(msg, error_flavor_list)
slave_flavor = self.find_flavor(vcpus=kwargs['vcpus_slave'],
ram=kwargs['ram_slave'],
disk=kwargs['disk_slave'])
if not slave_flavor:
msg = 'This flavor does not allow create.'
raise ClientError(msg, error_flavor_list)
# Get ssh keys
key = RSA.generate(2048)
self.private_key = key.exportKey('PEM')
pub_key = key.publickey().exportKey('OpenSSH') + ' root'
public = dict(contents=b64encode(pub_key),
path='/root/.ssh/id_rsa.pub',
owner='root', group='root', mode=0600)
authorized = dict(contents=b64encode(pub_key),
path='/root/.ssh/authorized_keys',
owner='root', group='root', mode=0600)
private = dict(contents=b64encode(self.private_key),
path='/root/.ssh/id_rsa',
owner='root', group='root', mode=0600)
master_personality = [authorized, public, private]
slave_personality = [authorized]
# Create private network for cluster
self.vpn = self.create_vpn('lambda-vpn', project_id=project_id)
vpn_id = self.vpn['id']
self.create_private_subnet(vpn_id)
master_ip = None
slave_ips = [None] * kwargs['slaves']
# reserve ip
if kwargs['ip_allocation'] in ["master", "all"]:
master_ip = self.reserve_ip(project_id=project_id)
if kwargs['ip_allocation'] == "all":
slave_ips = [self.reserve_ip(project_id=project_id)
for i in range(kwargs['slaves'])]
self.ips = [ip for ip in [master_ip] + slave_ips if ip]
self.master = self.create_vm(vm_name=vm_name, ip=master_ip,
net_id=vpn_id,
flavor=master_flavor,
personality=master_personality,
**kwargs)
# Create slaves
self.slaves = list()
for i in range(kwargs['slaves']):
slave_name = 'lambda-node' + str(i + 1)
slave = self.create_vm(vm_name=slave_name,
ip=slave_ips[i],
net_id=vpn_id,
flavor=slave_flavor,
personality=slave_personality,
**kwargs)
self.slaves.append(slave)
# Wait for VMs to complete being built
if wait:
self.cyclades.wait_server(server_id=self.master['id'])
for slave in self.slaves:
self.cyclades.wait_server(slave['id'])
# Create cluster dictionary object
inventory = {
"master": self.master,
"slaves": self.slaves
}
return inventory
def create_vm(self, vm_name=None, image_id=None,
ip=None, personality=None, flavor=None, **kwargs):
"""
:param vm_name: Name of the virtual machine to create
:param image_id: image id if you want another image than the default
:param kwargs: passed to the functions called for detail options
:return:
"""
flavor_id = flavor['id']
# Get image
if image_id == None:
image_id = self.image_id
else:
image_id = self.find_image(**kwargs)['id']
project_id = self.find_project_id(**kwargs)['id']
networks = list()
if ip:
ip_obj = dict()
ip_obj['uuid'] = ip['floating_network_id']
ip_obj['fixed_ip'] = ip['floating_ip_address']
networks.append(ip_obj)
networks.append({'uuid': kwargs['net_id']})
if personality == None:
personality = []
try:
okeanos_response = self.cyclades.create_server(name=vm_name,
flavor_id=flavor_id,
image_id=image_id,
project_id=project_id,
networks=networks,
personality=personality)
except ClientError as ex:
raise ex
return okeanos_response
def create_vpn(self, network_name, project_id):
"""
Creates a virtual private network
:param network_name: name of the network
:return: the virtual network object
"""
try:
# Create vpn with custom type and the name given as argument
vpn = self.network_client.create_network(
type=self.network_client.network_types[1],
name=network_name,
project_id=project_id)
return vpn
except ClientError as ex:
raise ex
def reserve_ip(self, project_id):
"""
Reserve ip
:return: the ip object if successfull
"""
# list_float_ips = self.network_client.list_floatingips()
# for ip in list_float_ips:
# if ip['instance_id'] is None and ip['port_id'] is None and ip not in ips:
# return ip
try:
ip = self.network_client.create_floatingip(project_id=project_id)
return ip
except ClientError as ex:
raise ex
def create_private_subnet(self, net_id, cidr='192.168.0.0/24', gateway_ip='192.168.0.1'):
"""
Creates a private subnets and connects it with this network
:param net_id: id of the network
:return: the id of the subnet if successfull
"""
try:
subnet = self.network_client.create_subnet(net_id, cidr,
gateway_ip=gateway_ip,
enable_dhcp=True)
self.subnet = subnet
return subnet['id']
except ClientError as ex:
raise ex
def connect_vm(self, vm_id, net_id):
"""
Connects the vm with this id to the network with the net_id
:param vm_id: id of the vm
:param net_id: id of the network
:return: returns True if successfull
"""
try:
port = self.network_client.create_port(network_id=net_id,
device_id=vm_id)
return True
except ClientError as ex:
raise ex
def attach_authorized_ip(self, ip, vm_id):
"""
Attach the authorized ip with this id to the vm
:param fnet_id: id of the floating network of the ip
:param vm_id: id of the vm
:return: returns True if successfull
"""
try:
port = self.network_client.create_port(network_id=ip['floating_network_id'],
device_id=vm_id,
fixed_ips=[dict(
ip_address=ip['floating_ip_address']), ])
return True
except ClientError as ex:
raise ex
"""
DELETE RESOURCES
"""
def delete_lambda_cluster(self, details):
"""
Delete a lambda cluster
:param details: details of the cluster we want to delete
:return: True if successfull
"""
# Delete every node
nodes = details['nodes']
for node in nodes:
if (not self.delete_vm(node)):
msg = 'Error deleting node with id ', node
raise ClientError(msg, error_fatal)
# Wait to complete deleting VMs
for node in nodes:
self.cyclades.wait_server(server_id=node, current_status='ACTIVE')
# Delete vpn
vpn = details['vpn']
if (not self.delete_vpn(vpn)):
msg = 'Error deleting node with id ', node
raise ClientError(msg, error_fatal)
def delete_vm(self, vm_id):
"""
Delete a vm
:param vm_id: id of the vm we want to delete
:return: True if successfull
"""
try:
self.cyclades.delete_server(vm_id)
return True
except ClientError as ex:
raise ex
def delete_vpn(self, net_id):
"""
Delete a virtual private network
:param net_id: id of the network we want to delete
:return: True if successfull
"""
try:
self.network_client.delete_network(net_id)
return True
except ClientError as ex:
raise ex
"""
GET RESOURCES
"""
def get_cluster_details(self):
"""
:returns: dictionary of basic details for the cluster
"""
details = dict()
nodes = dict()
master = dict()
master['id'] = self.master['id']
master['name'] = self.master['name']
master['adminPass'] = self.master['adminPass']
nodes['master'] = master
slaves = list()
for slave in self.slaves:
slave_obj = dict()
slave_obj['id'] = slave['id']
slave_obj['name'] = slave['name']
name = slave_obj['name']
slaves.append(slave_obj)
nodes['slaves'] = slaves
details['nodes'] = nodes
vpn = dict()
vpn['id'] = self.vpn['id']
vpn['type'] = self.vpn['type']
details['vpn'] = vpn
details['ips'] = self.ips
ips_list = list()
for ip in self.ips:
ip_obj = dict()
ip_obj['floating_network_id'] = ip['floating_network_id']
ip_obj['floating_ip_address'] = ip['floating_ip_address']
ip_obj['id'] = ip['id']
ips_list.append(ip_obj)
details['ips'] = ips_list
subnet = dict()
subnet['id'] = self.subnet['id']
subnet['cidr'] = self.subnet['cidr']
subnet['gateway_ip'] = self.subnet['gateway_ip']
details['subnet'] = subnet
return details
def get_private_key(self):
"""
:returns: Private key of master
"""
return self.private_key
def get_quotas(self, **kwargs):
"""
Get the user quotas for the defined project.
:return: user quotas object
"""
return self.astakos.get_quotas()
def get_server_info(self, server_id):
"""
"""
return self.cyclades.get_server_details(server_id=server_id)
def get_server_authorized_ip(self, server_id):
"""
:param server_id: id of the server
:returns: the authorized ip of the server if it has one,else None
"""
addresses = self.get_server_info(server_id=server_id)['addresses']
for key in list(addresses.keys()):
ip = addresses[key][0]['addr']
if '192.168.0' not in ip and not re.search('[a-zA-Z]', ip):
return ip
return None
def get_server_private_ip(self, server_id):
"""
:param server_id: id of the server
:returns: the private ip of the server if it has one,else None
"""
addresses = self.get_server_info(server_id=server_id)['addresses']
for key in list(addresses.keys()):
ip = addresses[key][0]['addr']
if '192.168.0' in ip:
return ip
return None
"""
CHECK RESOURCES
"""
def check_all_resources(self, quotas, **kwargs):
"""
Checks user's quota for every requested resource.
Returns True if everything available.
:param **kwargs: arguments
"""
project_id = self.find_project_id(**kwargs)['id']
# Check for VMs
pending_vm = quotas[project_id]['cyclades.vm']['project_pending']
limit_vm = quotas[project_id]['cyclades.vm']['project_limit']
usage_vm = quotas[project_id]['cyclades.vm']['project_usage']
available_vm = limit_vm - usage_vm - pending_vm
if available_vm < kwargs['cluster_size']:
msg = 'Cyclades VMs out of limit'
raise ClientError(msg, error_quotas_cluster_size)
# Check for CPUs
pending_cpu = quotas[project_id]['cyclades.cpu']['project_pending']
limit_cpu = quotas[project_id]['cyclades.cpu']['project_limit']
usage_cpu = quotas[project_id]['cyclades.cpu']['project_usage']
available_cpu = limit_cpu - usage_cpu - pending_cpu
if available_cpu < kwargs['vcpus']:
msg = 'Cyclades cpu out of limit'
raise ClientError(msg, error_quotas_cpu)
# Check for RAM
pending_ram = quotas[project_id]['cyclades.ram']['project_pending']
limit_ram = quotas[project_id]['cyclades.ram']['project_limit']
usage_ram = quotas[project_id]['cyclades.ram']['project_usage']
available_ram = (limit_ram - usage_ram - pending_ram) / self.Bytes_to_MB
if available_ram < kwargs['ram']:
msg = 'Cyclades ram out of limit'
raise ClientError(msg, error_quotas_ram)
# Check for Disk space
pending_cd = quotas[project_id]['cyclades.ram']['project_pending']
limit_cd = quotas[project_id]['cyclades.disk']['project_limit']
usage_cd = quotas[project_id]['cyclades.disk']['project_usage']
available_cyclades_disk_GB = (limit_cd - usage_cd - pending_cd) / self.Bytes_to_GB
if available_cyclades_disk_GB < kwargs['disk']:
msg = 'Cyclades disk out of limit'
raise ClientError(msg, error_quotas_cyclades_disk)
# Check for authorized IPs
list_float_ips = self.network_client.list_floatingips()
pending_ips = quotas[project_id]['cyclades.floating_ip']['project_pending']
limit_ips = quotas[project_id]['cyclades.floating_ip']['project_limit']
usage_ips = quotas[project_id]['cyclades.floating_ip']['project_usage']
available_ips = limit_ips - usage_ips - pending_ips
# TODO: figure out how to handle unassigned floating ips
# for d in list_float_ips:
# if d['instance_id'] is None and d['port_id'] is None:
# available_ips += 1
if (kwargs['ip_allocation'] == "master" and available_ips < 1) or \
(kwargs['ip_allocation'] == "all" and available_ips < kwargs['cluster_size']):
msg = 'authorized IPs out of limit'
raise ClientError(msg, error_get_ip)
# Check for networks
pending_net = quotas[project_id]['cyclades.network.private']['project_pending']
limit_net = quotas[project_id]['cyclades.network.private']['project_limit']
usage_net = quotas[project_id]['cyclades.network.private']['project_usage']
available_networks = limit_net - usage_net - pending_net
if available_networks < kwargs['network_request']:
msg = 'Private Network out of limit'
raise ClientError(msg, error_get_network_quota)
return True
|
themiszamani/okeanos-LoD
|
core/fokia/provisioner.py
|
Python
|
agpl-3.0
| 21,373 | 0.001544 |
#!/usr/bin/python
import sys
import urllib2
RAINX_STAT_KEYS = [
("rainx.reqpersec", "total_reqpersec"),
("rainx.reqputpersec", "put_reqpersec"),
("rainx.reqgetpersec", "get_reqpersec"),
("rainx.avreqtime", "total_avreqtime"),
("rainx.avputreqtime", "put_avreqtime"),
("rainx.avgetreqtime", "get_avreqtime"),
]
def parse_info(stream):
data = {}
for line in stream.readlines():
parts = line.split()
if len(parts) > 1:
# try to cast value to int or float
try:
value = int(parts[1])
except ValueError:
try:
value = float(parts[1])
except ValueError:
value = parts[1]
data[parts[0]] = value
else:
data[parts[0]] = None
return data
def get_stat_lines(url, stat_keys):
stream = urllib2.urlopen(url)
data = parse_info(stream)
stream.close()
stats = [("stat.%s = %s" % (k[1], str(data[k[0]])))
for k in stat_keys if k[0] in data]
return stats
def main(args):
ip_port = args[1].split("|")[2]
stats_url = "http://%s/stat" % ip_port
for stat in get_stat_lines(stats_url, RAINX_STAT_KEYS):
print stat
if __name__ == "__main__":
main(sys.argv)
|
redcurrant/redcurrant
|
svc-monitor/contrib/rainx-monitor.py
|
Python
|
lgpl-3.0
| 1,285 | 0.010117 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-06-28 20:51
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='PipelineRelease',
fields=[
('pipeline_id', models.AutoField(db_column='PIPELINE_ID', primary_key=True, serialize=False)),
('description', models.TextField(blank=True, db_column='DESCRIPTION', null=True)),
('changes', models.TextField(db_column='CHANGES')),
('release_version', models.CharField(db_column='RELEASE_VERSION', max_length=20)),
('release_date', models.DateField(db_column='RELEASE_DATE')),
],
options={
'db_table': 'PIPELINE_RELEASE',
},
),
migrations.CreateModel(
name='PipelineTool',
fields=[
('tool_id', models.SmallIntegerField(db_column='TOOL_ID', primary_key=True, serialize=False)),
('tool_name', models.CharField(db_column='TOOL_NAME', max_length=30)),
('description', models.TextField(db_column='DESCRIPTION')),
('web_link', models.CharField(blank=True, db_column='WEB_LINK', max_length=500, null=True)),
('version', models.CharField(db_column='VERSION', max_length=30)),
('exe_command', models.CharField(db_column='EXE_COMMAND', max_length=500)),
('installation_dir', models.CharField(blank=True, db_column='INSTALLATION_DIR', max_length=200, null=True)),
('configuration_file', models.TextField(blank=True, db_column='CONFIGURATION_FILE', null=True)),
('notes', models.TextField(blank=True, db_column='NOTES', null=True)),
],
options={
'db_table': 'PIPELINE_TOOL',
},
),
migrations.CreateModel(
name='PipelineReleaseTool',
fields=[
('pipeline', models.ForeignKey(db_column='PIPELINE_ID', on_delete=django.db.models.deletion.DO_NOTHING, primary_key=True, serialize=False, to='emgapi.PipelineRelease')),
('tool', models.ForeignKey(db_column='TOOL_ID', on_delete=django.db.models.deletion.DO_NOTHING, to='emgapi.PipelineTool')),
('tool_group_id', models.DecimalField(db_column='TOOL_GROUP_ID', decimal_places=3, max_digits=6)),
('how_tool_used_desc', models.TextField(db_column='HOW_TOOL_USED_DESC')),
],
options={
'db_table': 'PIPELINE_RELEASE_TOOL',
},
),
migrations.CreateModel(
name='AnalysisStatus',
fields=[
('analysis_status_id', models.AutoField(db_column='ANALYSIS_STATUS_ID', primary_key=True, serialize=False)),
('analysis_status', models.CharField(db_column='ANALYSIS_STATUS', max_length=25)),
],
options={
'db_table': 'ANALYSIS_STATUS',
},
),
migrations.CreateModel(
name='BiomeHierarchyTree',
fields=[
('biome_id', models.SmallIntegerField(db_column='BIOME_ID', primary_key=True, serialize=False)),
('biome_name', models.CharField(db_column='BIOME_NAME', max_length=60)),
('lft', models.SmallIntegerField(db_column='LFT')),
('rgt', models.SmallIntegerField(db_column='RGT')),
('depth', models.IntegerField(db_column='DEPTH')),
('lineage', models.CharField(db_column='LINEAGE', max_length=500)),
],
options={
'db_table': 'BIOME_HIERARCHY_TREE',
},
),
migrations.CreateModel(
name='Publication',
fields=[
('pub_id', models.AutoField(db_column='PUB_ID', primary_key=True, serialize=False)),
('authors', models.CharField(blank=True, db_column='AUTHORS', max_length=4000, null=True)),
('doi', models.CharField(blank=True, db_column='DOI', max_length=1500, null=True)),
('isbn', models.CharField(blank=True, db_column='ISBN', max_length=100, null=True)),
('iso_journal', models.CharField(blank=True, db_column='ISO_JOURNAL', max_length=255, null=True)),
('issue', models.CharField(blank=True, db_column='ISSUE', max_length=55, null=True)),
('medline_journal', models.CharField(blank=True, db_column='MEDLINE_JOURNAL', max_length=255, null=True)),
('pub_abstract', models.TextField(blank=True, db_column='PUB_ABSTRACT', null=True)),
('pubmed_central_id', models.IntegerField(blank=True, db_column='PUBMED_CENTRAL_ID', null=True)),
('pubmed_id', models.IntegerField(blank=True, db_column='PUBMED_ID', null=True)),
('pub_title', models.CharField(db_column='PUB_TITLE', max_length=740)),
('raw_pages', models.CharField(blank=True, db_column='RAW_PAGES', max_length=30, null=True)),
('url', models.CharField(blank=True, db_column='URL', max_length=740, null=True)),
('volume', models.CharField(blank=True, db_column='VOLUME', max_length=55, null=True)),
('published_year', models.SmallIntegerField(blank=True, db_column='PUBLISHED_YEAR', null=True)),
('pub_type', models.CharField(blank=True, db_column='PUB_TYPE', max_length=150, null=True)),
],
options={
'db_table': 'PUBLICATION',
},
),
migrations.CreateModel(
name='Study',
fields=[
('study_id', models.AutoField(db_column='STUDY_ID', primary_key=True, serialize=False)),
('centre_name', models.CharField(blank=True, db_column='CENTRE_NAME', max_length=255, null=True)),
('experimental_factor', models.CharField(blank=True, db_column='EXPERIMENTAL_FACTOR', max_length=255, null=True)),
('is_public', models.IntegerField(blank=True, db_column='IS_PUBLIC', null=True)),
('ncbi_project_id', models.IntegerField(blank=True, db_column='NCBI_PROJECT_ID', null=True)),
('public_release_date', models.DateField(blank=True, db_column='PUBLIC_RELEASE_DATE', null=True)),
('study_abstract', models.TextField(blank=True, db_column='STUDY_ABSTRACT', null=True)),
('ext_study_id', models.CharField(db_column='EXT_STUDY_ID', max_length=18)),
('study_name', models.CharField(blank=True, db_column='STUDY_NAME', max_length=255, null=True)),
('study_status', models.CharField(blank=True, db_column='STUDY_STATUS', max_length=30, null=True)),
('data_origination', models.CharField(blank=True, db_column='DATA_ORIGINATION', max_length=20, null=True)),
('author_email', models.CharField(blank=True, db_column='AUTHOR_EMAIL', max_length=100, null=True)),
('author_name', models.CharField(blank=True, db_column='AUTHOR_NAME', max_length=100, null=True)),
('last_update', models.DateTimeField(db_column='LAST_UPDATE')),
('submission_account_id', models.CharField(blank=True, db_column='SUBMISSION_ACCOUNT_ID', max_length=15, null=True)),
('result_directory', models.CharField(blank=True, db_column='RESULT_DIRECTORY', max_length=100, null=True)),
('first_created', models.DateTimeField(db_column='FIRST_CREATED')),
('project_id', models.CharField(blank=True, db_column='PROJECT_ID', max_length=18, null=True)),
('biome', models.ForeignKey(db_column='BIOME_ID', on_delete=django.db.models.deletion.DO_NOTHING, to='emgapi.BiomeHierarchyTree')),
# ('publications', models.ManyToManyField(through='emgapi.StudyPublication', to='emgapi.Publication')),
],
options={
'db_table': 'STUDY',
},
),
migrations.CreateModel(
name='StudyPublication',
fields=[
('study', models.ForeignKey(db_column='STUDY_ID', on_delete=django.db.models.deletion.DO_NOTHING, primary_key=True, serialize=False, to='emgapi.Study')),
('pub', models.ForeignKey(db_column='PUB_ID', on_delete=django.db.models.deletion.DO_NOTHING, to='emgapi.Publication')),
],
options={
'db_table': 'STUDY_PUBLICATION',
},
),
migrations.CreateModel(
name='Sample',
fields=[
('sample_id', models.AutoField(db_column='SAMPLE_ID', primary_key=True, serialize=False)),
('analysis_completed', models.DateField(blank=True, db_column='ANALYSIS_COMPLETED', null=True)),
('collection_date', models.DateField(blank=True, db_column='COLLECTION_DATE', null=True)),
('geo_loc_name', models.CharField(blank=True, db_column='GEO_LOC_NAME', max_length=255, null=True)),
('is_public', models.IntegerField(blank=True, db_column='IS_PUBLIC', null=True)),
('metadata_received', models.DateTimeField(blank=True, db_column='METADATA_RECEIVED', null=True)),
('sample_desc', models.TextField(blank=True, db_column='SAMPLE_DESC', null=True)),
('sequencedata_archived', models.DateTimeField(blank=True, db_column='SEQUENCEDATA_ARCHIVED', null=True)),
('sequencedata_received', models.DateTimeField(blank=True, db_column='SEQUENCEDATA_RECEIVED', null=True)),
('environment_biome', models.CharField(blank=True, db_column='ENVIRONMENT_BIOME', max_length=255, null=True)),
('environment_feature', models.CharField(blank=True, db_column='ENVIRONMENT_FEATURE', max_length=255, null=True)),
('environment_material', models.CharField(blank=True, db_column='ENVIRONMENT_MATERIAL', max_length=255, null=True)),
('study', models.ForeignKey(db_column='STUDY_ID', on_delete=django.db.models.deletion.DO_NOTHING, to='emgapi.Study')),
('sample_name', models.CharField(blank=True, db_column='SAMPLE_NAME', max_length=255, null=True)),
('sample_alias', models.CharField(blank=True, db_column='SAMPLE_ALIAS', max_length=255, null=True)),
('host_tax_id', models.IntegerField(blank=True, db_column='HOST_TAX_ID', null=True)),
('ext_sample_id', models.CharField(blank=True, db_column='EXT_SAMPLE_ID', max_length=15, null=True)),
('species', models.CharField(blank=True, db_column='SPECIES', max_length=255, null=True)),
('latitude', models.DecimalField(blank=True, db_column='LATITUDE', decimal_places=4, max_digits=7, null=True)),
('longitude', models.DecimalField(blank=True, db_column='LONGITUDE', decimal_places=4, max_digits=7, null=True)),
('last_update', models.DateTimeField(db_column='LAST_UPDATE')),
('submission_account_id', models.CharField(blank=True, db_column='SUBMISSION_ACCOUNT_ID', max_length=15, null=True)),
('biome', models.ForeignKey(db_column='BIOME_ID', on_delete=django.db.models.deletion.DO_NOTHING, to='emgapi.BiomeHierarchyTree')),
],
options={
'db_table': 'SAMPLE',
},
),
migrations.CreateModel(
name='SamplePublication',
fields=[
('sample', models.ForeignKey(db_column='SAMPLE_ID', on_delete=django.db.models.deletion.DO_NOTHING, primary_key=True, serialize=False, to='emgapi.Sample')),
('pub', models.ForeignKey(db_column='PUB_ID', on_delete=django.db.models.deletion.DO_NOTHING, to='emgapi.Publication')),
],
options={
'db_table': 'SAMPLE_PUBLICATION',
},
),
migrations.CreateModel(
name='ExperimentType',
fields=[
('experiment_type_id', models.AutoField(db_column='EXPERIMENT_TYPE_ID', primary_key=True, serialize=False)),
('experiment_type', models.CharField(db_column='EXPERIMENT_TYPE', max_length=30)),
],
options={
'db_table': 'EXPERIMENT_TYPE',
},
),
migrations.CreateModel(
name='Run',
fields=[
('accession', models.CharField(db_column='EXTERNAL_RUN_IDS', max_length=100, primary_key=True, serialize=False)),
('run_status_id', models.IntegerField(blank=True, db_column='RUN_STATUS_ID', null=True)),
('instrument_platform', models.CharField(blank=True, db_column='INSTRUMENT_PLATFORM', max_length=50, null=True)),
('instrument_model', models.CharField(blank=True, db_column='INSTRUMENT_MODEL', max_length=50, null=True)),
],
options={
'db_table': 'ANALYSIS_JOB',
'ordering': ('accession',),
'managed': False,
},
),
migrations.CreateModel(
name='AnalysisJob',
fields=[
('job_id', models.BigAutoField(db_column='JOB_ID', primary_key=True, serialize=False)),
('job_operator', models.CharField(db_column='JOB_OPERATOR', max_length=15)),
('pipeline', models.ForeignKey(db_column='PIPELINE_ID', on_delete=django.db.models.deletion.DO_NOTHING, to='emgapi.PipelineRelease')),
('submit_time', models.DateTimeField(db_column='SUBMIT_TIME')),
('complete_time', models.DateTimeField(blank=True, db_column='COMPLETE_TIME', null=True)),
('analysis_status', models.ForeignKey(db_column='ANALYSIS_STATUS_ID', on_delete=django.db.models.deletion.DO_NOTHING, to='emgapi.AnalysisStatus')),
('re_run_count', models.IntegerField(blank=True, db_column='RE_RUN_COUNT', null=True)),
('input_file_name', models.CharField(db_column='INPUT_FILE_NAME', max_length=50)),
('result_directory', models.CharField(db_column='RESULT_DIRECTORY', max_length=100)),
('sample', models.ForeignKey(db_column='SAMPLE_ID', on_delete=django.db.models.deletion.DO_NOTHING, to='emgapi.Sample')),
('external_run_ids', models.CharField(blank=True, db_column='EXTERNAL_RUN_IDS', max_length=100, null=True)),
('is_production_run', models.TextField(blank=True, db_column='IS_PRODUCTION_RUN', null=True)),
('experiment_type', models.ForeignKey(db_column='EXPERIMENT_TYPE_ID', on_delete=django.db.models.deletion.DO_NOTHING, to='emgapi.ExperimentType')),
('run_status_id', models.IntegerField(blank=True, db_column='RUN_STATUS_ID', null=True)),
('instrument_platform', models.CharField(blank=True, db_column='INSTRUMENT_PLATFORM', max_length=50, null=True)),
('instrument_model', models.CharField(blank=True, db_column='INSTRUMENT_MODEL', max_length=50, null=True)),
],
options={
'db_table': 'ANALYSIS_JOB',
},
),
migrations.CreateModel(
name='StudyErrorType',
fields=[
('error_id', models.IntegerField(db_column='ERROR_ID', primary_key=True, serialize=False)),
('error_type', models.CharField(db_column='ERROR_TYPE', max_length=50)),
('description', models.TextField(db_column='DESCRIPTION')),
],
options={
'db_table': 'STUDY_ERROR_TYPE',
'managed': False,
},
),
migrations.CreateModel(
name='BlacklistedStudy',
fields=[
('ext_study_id', models.CharField(db_column='EXT_STUDY_ID', max_length=18, primary_key=True, serialize=False)),
('error_type', models.ForeignKey(db_column='ERROR_TYPE_ID', on_delete=django.db.models.deletion.DO_NOTHING, to='emgapi.StudyErrorType')),
('analyzer', models.CharField(db_column='ANALYZER', max_length=15)),
('pipeline_id', models.IntegerField(blank=True, db_column='PIPELINE_ID', null=True)),
('date_blacklisted', models.DateField(db_column='DATE_BLACKLISTED')),
('comment', models.TextField(blank=True, db_column='COMMENT', null=True)),
],
options={
'db_table': 'BLACKLISTED_STUDY',
'managed': False,
},
),
migrations.CreateModel(
name='VariableNames',
fields=[
('var_id', models.SmallIntegerField(db_column='VAR_ID', primary_key=True, serialize=False)),
('var_name', models.CharField(db_column='VAR_NAME', max_length=50, unique=True)),
('definition', models.TextField(blank=True, db_column='DEFINITION', null=True)),
('value_syntax', models.CharField(blank=True, db_column='VALUE_SYNTAX', max_length=250, null=True)),
('alias', models.CharField(blank=True, db_column='ALIAS', max_length=30, null=True)),
('authority', models.CharField(blank=True, db_column='AUTHORITY', max_length=30, null=True)),
('sra_xml_attribute', models.CharField(blank=True, db_column='SRA_XML_ATTRIBUTE', max_length=30, null=True)),
('required_for_mimarks_complianc', models.CharField(blank=True, db_column='REQUIRED_FOR_MIMARKS_COMPLIANC', max_length=1, null=True)),
('required_for_mims_compliance', models.CharField(blank=True, db_column='REQUIRED_FOR_MIMS_COMPLIANCE', max_length=1, null=True)),
('gsc_env_packages', models.CharField(blank=True, db_column='GSC_ENV_PACKAGES', max_length=250, null=True)),
('comments', models.CharField(blank=True, db_column='COMMENTS', max_length=250, null=True)),
],
options={
'db_table': 'VARIABLE_NAMES',
},
),
migrations.CreateModel(
name='GscCvCv',
fields=[
('var_val_cv', models.CharField(db_column='VAR_VAL_CV', max_length=60, primary_key=True, serialize=False)),
('var_name', models.ForeignKey(blank=True, db_column='VAR_NAME', null=True, on_delete=django.db.models.deletion.CASCADE, to='emgapi.VariableNames')),
],
options={
'db_table': 'GSC_CV_CV',
},
),
migrations.CreateModel(
name='SampleAnn',
fields=[
('sample', models.ForeignKey(db_column='SAMPLE_ID', on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='metadata', serialize=False, to='emgapi.Sample')),
('var_val_cv', models.ForeignKey(blank=True, db_column='VAR_VAL_CV', null=True, on_delete=django.db.models.deletion.CASCADE, to='emgapi.GscCvCv')),
('units', models.CharField(blank=True, db_column='UNITS', max_length=25, null=True)),
('var', models.ForeignKey(db_column='VAR_ID', on_delete=django.db.models.deletion.CASCADE, to='emgapi.VariableNames')),
('var_val_ucv', models.CharField(blank=True, db_column='VAR_VAL_UCV', max_length=4000, null=True)),
],
options={
'db_table': 'SAMPLE_ANN',
},
),
]
|
EBI-Metagenomics/emgapi
|
emgapi/migrations/0001_initial.py
|
Python
|
apache-2.0
| 19,658 | 0.006511 |
"""
WSGI config for test_project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/stable/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_project.settings")
application = get_wsgi_application()
|
mgrouchy/django-stronghold
|
test_project/test_project/wsgi.py
|
Python
|
mit
| 402 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-27 19:06
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('posts', '0005_post_author'),
]
operations = [
migrations.CreateModel(
name='PostImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('alt_text', models.CharField(blank=True, max_length=96, null=True)),
('image', models.ImageField(upload_to='')),
],
),
migrations.AddField(
model_name='post',
name='images',
field=models.ManyToManyField(related_name='posts', to='posts.PostImage'),
),
]
|
jokuf/hack-blog
|
posts/migrations/0006_auto_20170327_1906.py
|
Python
|
mit
| 843 | 0.003559 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
from tests import TestCase, mkstemp
from .helper import temp_filename
from quodlibet.util.config import Config, Error, ConfigProxy
class TConfig(TestCase):
def test_set_default_only(self):
conf = Config()
self.assertRaises(Error, conf.set, "foo", "bar", 1)
conf.defaults.add_section("foo")
conf.set("foo", "bar", 1)
def test_options(self):
conf = Config()
self.assertRaises(Error, conf.options, "foo")
conf.defaults.add_section("foo")
self.assertEqual(conf.options("foo"), [])
conf.defaults.set("foo", "bar", 1)
conf.defaults.set("foo", "blah", 1)
conf.set("foo", "blah", 1)
conf.set("foo", "quux", 1)
self.assertEqual(conf.options("foo"), ['blah', 'quux', 'bar'])
conf.defaults.clear()
def test_options_no_default(self):
conf = Config()
conf.add_section("foo")
self.assertEqual(conf.options("foo"), [])
def test_has_section(self):
conf = Config()
self.assertFalse(conf.has_section("foo"))
conf.defaults.add_section("foo")
self.assertTrue(conf.has_section("foo"))
conf.add_section("foo")
conf.defaults.clear()
self.assertTrue(conf.has_section("foo"))
conf.clear()
self.assertFalse(conf.has_section("foo"))
def test_read_garbage_file(self):
conf = Config()
garbage = b"\xf1=\xab\xac"
fd, filename = mkstemp()
os.close(fd)
with open(filename, "wb") as f:
f.write(garbage)
self.assertRaises(Error, conf.read, filename)
os.remove(filename)
def test_set(self):
conf = Config()
conf.add_section("foo")
conf.set("foo", "bar", 1)
self.failUnlessEqual(conf.get("foo", "bar"), "1")
self.failUnlessEqual(conf.getint("foo", "bar"), 1)
def test_setbytes(self):
conf = Config()
conf.add_section("foo")
conf.setbytes("foo", "bar", b"\xff\xff")
assert conf.getbytes("foo", "bar") == b"\xff\xff"
def test_getbytes(self):
conf = Config()
assert conf.getbytes("foo", "bar", b"\xff") == b"\xff"
def test_reset(self):
conf = Config()
conf.defaults.add_section("player")
conf.defaults.set("player", "backend", "blah")
conf.set("player", "backend", "foo")
self.assertEqual(conf.get("player", "backend"), "foo")
conf.reset("player", "backend")
conf.defaults.set("player", "backend", "blah_new")
self.assertEqual(conf.get("player", "backend"), "blah_new")
def test_reset_no_section(self):
conf = Config()
conf.defaults.add_section("player")
conf.defaults.set("player", "backend", "blah")
conf.reset("player", "backend")
assert conf.get("player", "backend") == "blah"
def test_initial_after_set(self):
conf = Config()
conf.add_section("player")
conf.set("player", "backend", "orig")
conf.defaults.add_section("player")
conf.defaults.set("player", "backend", "initial")
self.assertEqual(conf.get("player", "backend"), "orig")
self.assertEqual(conf.defaults.get("player", "backend"), "initial")
conf.reset("player", "backend")
self.assertEqual(conf.get("player", "backend"), "initial")
def test_get_fallback_default(self):
conf = Config()
conf.defaults.add_section("get")
self.assertRaises(Error, conf.get, "get", "bar")
conf.defaults.set("get", "bar", 1)
self.assertEqual(conf.get("get", "bar"), "1")
conf.defaults.add_section("getboolean")
self.assertRaises(Error, conf.getboolean, "getboolean", "bar")
conf.defaults.set("getboolean", "bar", True)
self.assertEqual(conf.getboolean("getboolean", "bar"), True)
conf.defaults.add_section("getfloat")
self.assertRaises(Error, conf.getfloat, "getfloat", "bar")
conf.defaults.set("getfloat", "bar", 1.0)
self.assertEqual(conf.getfloat("getfloat", "bar"), 1.0)
conf.defaults.add_section("getint")
self.assertRaises(Error, conf.getint, "getint", "bar")
conf.defaults.set("getint", "bar", 42)
self.assertEqual(conf.getint("getint", "bar"), 42)
conf.defaults.add_section("getlist")
self.assertRaises(Error, conf.getlist, "getlist", "bar")
conf.defaults.setlist("getlist", "bar", [1, 2, 3])
self.assertEqual(conf.getlist("getlist", "bar"), ["1", "2", "3"])
def test_get(self):
conf = Config()
conf.add_section("foo")
conf.set("foo", "int", "1")
conf.set("foo", "float", "1.25")
conf.set("foo", "str", "foobar")
conf.set("foo", "bool", "True")
self.failUnlessEqual(conf.getint("foo", "int"), 1)
self.failUnlessEqual(conf.getfloat("foo", "float"), 1.25)
self.failUnlessEqual(conf.get("foo", "str"), "foobar")
self.failUnlessEqual(conf.getboolean("foo", "bool"), True)
def test_get_invalid_data(self):
conf = Config()
conf.add_section("foo")
conf.set("foo", "bla", "xx;,,;\n\n\naa")
self.assertTrue(conf.getboolean("foo", "bla", True))
self.assertEqual(conf.getint("foo", "bla", 42), 42)
self.assertEqual(conf.getfloat("foo", "bla", 1.5), 1.5)
self.assertEqual(conf.getstringlist("foo", "bla", ["baz"]), ["baz"])
def test_getint_float(self):
conf = Config()
conf.add_section("foo")
conf.set("foo", "float", "1.25")
self.assertEqual(conf.getint("foo", "float"), 1)
def test_get_default(self):
conf = Config()
conf.add_section("foo")
self.failUnlessEqual(conf.getboolean("foo", "nothing", True), True)
self.failUnlessEqual(conf.getint("foo", "nothing", 42), 42)
self.failUnlessEqual(conf.getfloat("foo", "nothing", 42.42), 42.42)
self.failUnlessEqual(conf.get("foo", "nothing", "foo"), "foo")
def test_stringlist_simple(self):
conf = Config()
conf.add_section("foo")
self.failIf(conf.get("foo", "bar", None))
vals = ["one", "two", "three"]
conf.setstringlist("foo", "bar", vals)
self.failUnlessEqual(conf.getstringlist("foo", "bar"), vals)
def test_stringlist_mixed(self):
conf = Config()
conf.add_section("foo")
self.failIf(conf.get("foo", "bar", None))
conf.setstringlist("foo", "bar", ["one", 2])
self.failUnlessEqual(conf.getstringlist("foo", "bar"), ["one", "2"])
def test_stringlist_quoting(self):
conf = Config()
conf.add_section("foo")
self.failIf(conf.get("foo", "bar", None))
vals = ["foo's gold", "bar, \"best\" 'ever'",
u"le goût d'œufs à Noël"]
conf.setstringlist("foo", "bar", vals)
self.failUnlessEqual(conf.getstringlist("foo", "bar"), vals)
def test_stringlist_spaces(self):
conf = Config()
conf.add_section("foo")
vals = [" ", " ", " \t ", " \n \n"]
conf.setstringlist("foo", "bar", vals)
self.failUnlessEqual(conf.getstringlist("foo", "bar"), vals)
def test_stringlist_invalid_encoding(self):
conf = Config()
conf.add_section("foo")
conf.setbytes("foo", "bar", b"\xff\xff\xff\xff\xff\xff")
def test_getlist(self):
conf = Config()
conf.add_section("foo")
self.assertEqual(conf.getlist("foo", "bar", ["arg"]), ["arg"])
conf.set("foo", "bar", "abc,fo:o\\,bar")
self.assertEqual(conf.getlist("foo", "bar"), ["abc", "fo:o,bar"])
self.assertEqual(conf.getlist("foo", "bar", sep=":"),
["abc,fo", "o\\,bar"])
conf.set("foo", "bar", "")
self.assertEqual(conf.getlist("foo", "bar"), [""])
def test_setlist(self):
conf = Config()
conf.add_section("foo")
conf.setlist("foo", "bar", [" a", ",", "c"])
self.assertEqual(conf.getlist("foo", "bar"), [" a", ",", "c"])
self.assertEqual(conf.get("foo", "bar"), " a,\\,,c")
conf.setlist("foo", "bar", [" a", ",", "c"], sep=":")
self.assertEqual(conf.get("foo", "bar"), " a:,:c")
def test_versioning_disabled(self):
# we don't pass a version, so versioning is disabled
conf = Config()
self.assertRaises(Error, conf.get_version)
with temp_filename() as filename:
conf.read(filename)
self.assertRaises(Error, conf.register_upgrade_function, lambda: None)
def test_versioning_upgrade_func(self):
called = []
with temp_filename() as filename:
conf = Config(version=0)
def func(*args):
called.append(args)
conf.register_upgrade_function(func)
self.assertRaises(Error, conf.get_version)
conf.read(filename)
self.assertEqual(conf.get_version(), -1)
conf.register_upgrade_function(func)
self.assertEqual([(conf, -1, 0), (conf, -1, 0)], called)
def test_versioning(self):
with temp_filename() as filename:
conf = Config(version=41)
conf.add_section("foo")
conf.set("foo", "bar", "quux")
conf.write(filename)
self.assertRaises(Error, conf.get_version)
# old was 41, we have 42, so upgrade
def func(config, old, new):
if old < 42:
config.set("foo", "bar", "nope")
conf = Config(version=42)
conf.register_upgrade_function(func)
conf.read(filename)
self.assertEqual(conf.get_version(), 41)
self.assertEqual(conf.get("foo", "bar"), "nope")
# write doesn't change version
conf.write(filename)
self.assertEqual(conf.get_version(), 41)
# but if we load again, it does
conf.read(filename)
self.assertEqual(conf.get_version(), 42)
def test_upgrade_first_read(self):
# don't run upgrade funcs if there is no config file yet
with temp_filename() as filename:
pass
conf = Config(version=41)
def func(*args):
self.assertTrue(False)
conf.register_upgrade_function(func)
conf.read(filename)
class TConfigProxy(TestCase):
def setUp(self):
conf = Config()
conf.defaults.add_section("somesection")
self.proxy = ConfigProxy(conf, "somesection")
def test_getters_setters(self):
self.proxy.set("foo", "bar")
self.assertEqual(self.proxy.get("foo"), "bar")
self.proxy.set("foo", 1.5)
self.assertEqual(self.proxy.getfloat("foo"), 1.5)
self.proxy.set("foo", 15)
self.assertEqual(self.proxy.getint("foo"), 15)
self.proxy.set("foo", False)
self.assertEqual(self.proxy.getboolean("foo"), False)
self.proxy.setbytes("foo", b"\xff")
assert self.proxy.getbytes("foo") == b"\xff"
def test_default(self):
self.assertEqual(self.proxy.get("foo", "quux"), "quux")
def test_get_initial(self):
self.proxy.defaults.set("a", 3.0)
self.assertEqual(self.proxy.defaults.get("a"), "3.0")
def test_initial_and_reset(self):
self.proxy.defaults.set("bla", "baz")
self.assertEqual(self.proxy.get("bla"), "baz")
self.proxy.set("bla", "nope")
self.assertEqual(self.proxy.get("bla"), "nope")
self.proxy.reset("bla")
self.assertEqual(self.proxy.get("bla"), "baz")
|
ptitjes/quodlibet
|
tests/test_util_config.py
|
Python
|
gpl-2.0
| 11,883 | 0 |
#!/usr/bin/env python
# encoding: utf-8
"""
update/disease.py
Update the disease terms in database
Created by Måns Magnusson on 2017-04-03.
Copyright (c) 2017 __MoonsoInc__. All rights reserved.
"""
import logging
import os
import click
from flask.cli import current_app, with_appcontext
from scout.constants import UPDATE_DISEASES_RESOURCES
from scout.load.hpo import load_disease_terms
from scout.server.extensions import store
from scout.utils.handle import get_file_handle
from scout.utils.scout_requests import (
fetch_hpo_terms,
fetch_hpo_to_genes_to_disease,
fetch_mim_files,
)
LOG = logging.getLogger(__name__)
def _check_resources(resources):
"""Check that resource lines file contain valid data
Args:
resources(dict): resource names as keys and resource file lines as values
"""
for resname, lines in resources.items():
if not lines or lines[0].startswith("#") is False:
LOG.error(f"Resource file '{resname}' doesn't contain valid data.")
raise click.Abort()
def _fetch_downloaded_resources(resources, downloads_folder):
"""Populate resource lines if a resource exists in downloads folder
Args:
resources(dict):
downloads_folder(str): path to downloaded files or demo version of these files
"""
for resname, filenames in UPDATE_DISEASES_RESOURCES.items():
for filename in filenames:
resource_path = os.path.join(downloads_folder, filename)
resource_exists = os.path.isfile(resource_path)
if resource_exists:
resources[resname] = get_file_handle(resource_path).readlines()
if resname not in resources:
LOG.error(f"Resource file '{resname}' was not found in provided downloads folder.")
raise click.Abort()
@click.command("diseases", short_help="Update disease terms")
@click.option(
"-f",
"--downloads-folder",
type=click.Path(exists=True, dir_okay=True, readable=True),
help="specify path to folder where files necessary to update diseases are pre-downloaded",
)
@click.option(
"--api-key",
help="Download resources using an OMIM api key (required only if downloads folder is NOT specified)",
)
@with_appcontext
def diseases(downloads_folder, api_key):
"""
Update disease terms in mongo database. Use pre-downloaded resource files (phenotype_to_genes and genemap2) or download them from OMIM.
Both options require using a valid omim api key.
"""
adapter = store
api_key = api_key or current_app.config.get("OMIM_API_KEY")
resources = {}
if downloads_folder:
api_key = None
# Fetch required resource lines after making sure that are present in downloads folder and that contain valid data
_fetch_downloaded_resources(resources, downloads_folder)
else:
# Download resources
if not api_key:
LOG.warning("Please provide a omim api key to load the omim gene panel")
raise click.Abort()
try:
mim_files = fetch_mim_files(api_key, genemap2=True)
resources["genemap_lines"] = mim_files["genemap2"]
resources["hpo_gene_lines"] = fetch_hpo_to_genes_to_disease()
except Exception as err:
LOG.warning(err)
raise click.Abort()
_check_resources(resources)
LOG.info("Dropping DiseaseTerms")
adapter.disease_term_collection.delete_many({})
LOG.debug("DiseaseTerms dropped")
load_disease_terms(
adapter=adapter,
genemap_lines=resources["genemap_lines"],
hpo_disease_lines=resources["hpo_gene_lines"],
)
LOG.info("Successfully loaded all disease terms")
|
Clinical-Genomics/scout
|
scout/commands/update/disease.py
|
Python
|
bsd-3-clause
| 3,712 | 0.002156 |
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ')p9u&kcu@_(8u&-%4(m9!&4*82sx97zyl-!i#m9kic2lycj%0)'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'demografia.apps.DemografiaConfig',
'dal',
'dal_select2',
'suit',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'debug_toolbar',
#'input_mask',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
ROOT_URLCONF = 'comunidad.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'comunidad.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'HOST': '127.0.0.1',
'NAME': 'comunidad',
'PASSWORD': '123456',
'PORT': '5432',
'USER': 'postgres',
'SCHEMAS': 'public,demografia'
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'es'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
SUIT_CONFIG = {
# header
'ADMIN_NAME': 'comunidad',
'HEADER_DATE_FORMAT': 'l, j. F Y',
'HEADER_TIME_FORMAT': 'H:i',
# forms
'SHOW_REQUIRED_ASTERISK': True, # Default True
'CONFIRM_UNSAVED_CHANGES': True, # Default True
# menu
'SEARCH_URL': '/admin/auth/user/',
'MENU_ICONS': {
'sites': 'icon-leaf',
'auth': 'icon-lock',
},
# 'MENU_OPEN_FIRST_CHILD': True, # Default True
'MENU_EXCLUDE': ('demografia.miembrohogar',),
# 'MENU': (
# 'sites',
# {'app': 'auth', 'icon':'icon-lock', 'models': ('user', 'group')},
# {'label': 'Settings', 'icon':'icon-cog', 'models': ('auth.user', 'auth.group')},
# {'label': 'Support', 'icon':'icon-question-sign', 'url': '/support/'},
# ),
# misc
'LIST_PER_PAGE': 20
}
LOGIN_URL = 'login'
LOGOUT_URL = 'logout'
LOGIN_REDIRECT_URL = 'index'
CACHE_BACKEND = 'simple:///'
AUTH_PROFILE_MODULE = "demografia.persona"
DEBUG_TOOLBAR_PANELS = [
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.cache.CachePanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel',
]
|
gvizquel/comunidad
|
comunidad/settings.py
|
Python
|
gpl-3.0
| 4,931 | 0.001825 |
# coding=utf-8
# dictionary value -> 7-segment data
Font = {
0: 0b00111111, # (48) 0
1: 0b00000110, # (49) 1
2: 0b01011011, # (50) 2
3: 0b01001111, # (51) 3
4: 0b01100110, # (52) 4
5: 0b01101101, # (53) 5
6: 0b01111101, # (54) 6
7: 0b00100111, # (55) 7
8: 0b01111111, # (56) 8
9: 0b01101111, # (57) 9
}
# build array10 and array10 of numbers such that
# i/16 = array10[i]/10 + array100[i&7]/100 (approximatively)
array10 = []
array100 = []
for i in range(16):
f = i/16.0
if i < 8:
array100.append(int(f * 100) % 10)
array10.append(int(f * 10))
print(array10)
print(array100)
# check
for i in range(16):
print("%d -> %s%s" % (i, array10[i], array100[i & 7]))
# print the C arrays
print("const uint8_t digit[16] = {" + ",".join(str(Font[i % 10]+128) for i in range(16)) + "};")
print("const uint8_t array10[16] = {" + ",".join(str(Font[array10[i]]) for i in range(16)) + "};")
print("const uint8_t array100[8] = {" + ",".join(str(Font[array100[i]]) for i in range(8)) + "};")
# check
for i in range(256):
# if i&15:
print("%s%d.%d%d%d%d" % ("1" if ((i >> 4) > 9) else " ", (i >> 4) % 10, array10[i & 15], array100[i & 7],
array100[i & 3], array100[(i << 1) & 3]))
# else:
# print("%d.%d%d%d%d" % (i >> 4, 0, 0, 0, 0))
|
thilaire/missionBoard
|
src/AVR/genTableCountDown.py
|
Python
|
gpl-3.0
| 1,264 | 0.019778 |
# -*- coding: utf-8 -*-
from django.views.generic import View
from django.views.decorators.csrf import csrf_exempt
try:
from django.http import StreamingHttpResponse as HttpResponse
except ImportError:
from django.http import HttpResponse
from django.utils.decorators import method_decorator
from sse import Sse
class BaseSseView(View):
"""
This is a base class for sse streaming.
"""
def get_last_id(self):
if "HTTP_LAST_EVENT_ID" in self.request.META:
return self.request.META['HTTP_LAST_EVENT_ID']
return None
def _iterator(self):
for subiterator in self.iterator():
for bufferitem in self.sse:
yield bufferitem
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
self.sse = Sse()
self.request = request
self.args = args
self.kwargs = kwargs
response = HttpResponse(self._iterator(), content_type="text/event-stream")
response['Cache-Control'] = 'no-cache'
response['Software'] = 'django-sse'
return response
def iterator(self):
"""
This is a source of stream.
Must use ``yield`` statement to flush
content from sse object to the client.
Example:
def iterator(self):
counter = 0
while True:
self.sse.add_message('foo', 'bar')
self.sse.add_message('bar', 'foo')
yield
"""
raise NotImplementedError
|
raspberrywhite/django-sse
|
django_sse/views.py
|
Python
|
bsd-3-clause
| 1,543 | 0.000648 |
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider
from mpi4py import MPI
import sys
from cplpy import CPL
#initialise MPI and CPL
comm = MPI.COMM_WORLD
CPL = CPL()
MD_COMM = CPL.init(CPL.CFD_REALM)
nprocs_realm = MD_COMM.Get_size()
## Parameters of the cpu topology (cartesian grid)
npxyz = np.array([1, 1, 1], order='F', dtype=np.int32)
NProcs = np.product(npxyz)
print 'Number of arguments:', len(sys.argv), 'arguments: ', str(sys.argv)
if len(sys.argv) > 1:
g = float(sys.argv[1])
else:
g = 9.81
xyzL = np.array([1.5000000000000000E-003,
1.5000000000000000E-003,
2.5000000000000001E-003], order='F', dtype=np.float64)
xyz_orig = np.array([0.0, 0.0, 0.0], order='F', dtype=np.float64)
ncxyz = np.array([8, 8, 8], order='F', dtype=np.int32)
if (nprocs_realm != NProcs):
print("Non-coherent number of processes in MD ", nprocs_realm,
" no equal to ", npxyz[0], " X ", npxyz[1], " X ", npxyz[2])
MPI.Abort(errorcode=1)
#Setup coupled simulation
cart_comm = MD_COMM.Create_cart([npxyz[0], npxyz[1], npxyz[2]])
CPL.setup_cfd(cart_comm, xyzL, xyz_orig, ncxyz)
#Get constraint region
cnst_limits = CPL.get_cnst_limits();
cnst_portion = CPL.my_proc_portion(cnst_limits)
[cnst_ncxl, cnst_ncyl, cnst_nczl] = CPL.get_no_cells(cnst_portion)
#Get overlap region
olap_limits = CPL.get_olap_limits()
BC_limits = np.array([olap_limits[0], olap_limits[1],
olap_limits[2], olap_limits[3],
olap_limits[4], olap_limits[5]], dtype=np.int32)
BC_portion = CPL.my_proc_portion(BC_limits)
[BC_ncxl, BC_ncyl, BC_nczl] = CPL.get_no_cells(BC_portion)
#Allocate send and recv arrays
recv_array = np.zeros((4, BC_ncxl, BC_ncyl, BC_nczl), order='F', dtype=np.float64)
send_array = np.zeros((9, cnst_ncxl, cnst_ncyl, cnst_nczl), order='F', dtype=np.float64)
ft = True
Nsteps = 21
for time in range(Nsteps):
# send data to update
send_array[2,:,:,:] = -5.9490638385009208e-08*g*np.sin(2.*np.pi*time/Nsteps)
CPL.send(send_array, cnst_portion)
# recv data and plot
recv_array, ierr = CPL.recv(recv_array, BC_portion)
print(time)
CPL.finalize()
MPI.Finalize()
|
Crompulence/cpl-library
|
test/lammps/single/no_wall/time_varying_force/CFD_single_ball.py
|
Python
|
gpl-3.0
| 2,218 | 0.008566 |
# coding: utf-8
#
# Copyright 2010 Alexandre Fiori
# based on the original Tornado by Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""HTTP utility code shared by clients and servers."""
from __future__ import absolute_import, division, with_statement
import urllib
import re
from twisted.python import log
from cyclone.util import b, ObjectDict
class HTTPHeaders(dict):
"""A dictionary that maintains Http-Header-Case for all keys.
Supports multiple values per key via a pair of new methods,
add() and get_list(). The regular dictionary interface returns a single
value per key, with multiple values joined by a comma.
>>> h = HTTPHeaders({"content-type": "text/html"})
>>> h.keys()
['Content-Type']
>>> h["Content-Type"]
'text/html'
>>> h.add("Set-Cookie", "A=B")
>>> h.add("Set-Cookie", "C=D")
>>> h["set-cookie"]
'A=B,C=D'
>>> h.get_list("set-cookie")
['A=B', 'C=D']
>>> for (k,v) in sorted(h.get_all()):
... print '%s: %s' % (k,v)
...
Content-Type: text/html
Set-Cookie: A=B
Set-Cookie: C=D
"""
def __init__(self, *args, **kwargs):
# Don't pass args or kwargs to dict.__init__, as it will bypass
# our __setitem__
dict.__init__(self)
self._as_list = {}
self._last_key = None
if (len(args) == 1 and len(kwargs) == 0 and
isinstance(args[0], HTTPHeaders)):
# Copy constructor
for k, v in args[0].get_all():
self.add(k, v)
else:
# Dict-style initialization
self.update(*args, **kwargs)
# new public methods
def add(self, name, value):
"""Adds a new value for the given key."""
norm_name = HTTPHeaders._normalize_name(name)
self._last_key = norm_name
if norm_name in self:
# bypass our override of __setitem__ since it modifies _as_list
dict.__setitem__(self, norm_name, self[norm_name] + ',' + value)
self._as_list[norm_name].append(value)
else:
self[norm_name] = value
def get_list(self, name):
"""Returns all values for the given header as a list."""
norm_name = HTTPHeaders._normalize_name(name)
return self._as_list.get(norm_name, [])
def get_all(self):
"""Returns an iterable of all (name, value) pairs.
If a header has multiple values, multiple pairs will be
returned with the same name.
"""
for name, list in self._as_list.iteritems():
for value in list:
yield (name, value)
def parse_line(self, line):
"""Updates the dictionary with a single header line.
>>> h = HTTPHeaders()
>>> h.parse_line("Content-Type: text/html")
>>> h.get('content-type')
'text/html'
"""
if line[0].isspace():
# continuation of a multi-line header
new_part = ' ' + line.lstrip()
self._as_list[self._last_key][-1] += new_part
dict.__setitem__(self, self._last_key,
self[self._last_key] + new_part)
else:
name, value = line.split(":", 1)
self.add(name, value.strip())
@classmethod
def parse(cls, headers):
"""Returns a dictionary from HTTP header text.
>>> h = HTTPHeaders.parse(
"Content-Type: text/html\\r\\nContent-Length: 42\\r\\n")
>>> sorted(h.iteritems())
[('Content-Length', '42'), ('Content-Type', 'text/html')]
"""
h = cls()
for line in headers.splitlines():
if line:
h.parse_line(line)
return h
# dict implementation overrides
def __setitem__(self, name, value):
norm_name = HTTPHeaders._normalize_name(name)
dict.__setitem__(self, norm_name, value)
self._as_list[norm_name] = [value]
def __getitem__(self, name):
return dict.__getitem__(self, HTTPHeaders._normalize_name(name))
def __delitem__(self, name):
norm_name = HTTPHeaders._normalize_name(name)
dict.__delitem__(self, norm_name)
del self._as_list[norm_name]
def __contains__(self, name):
norm_name = HTTPHeaders._normalize_name(name)
return dict.__contains__(self, norm_name)
def get(self, name, default=None):
return dict.get(self, HTTPHeaders._normalize_name(name), default)
def update(self, *args, **kwargs):
# dict.update bypasses our __setitem__
for k, v in dict(*args, **kwargs).iteritems():
self[k] = v
def copy(self):
# default implementation returns dict(self), not the subclass
return HTTPHeaders(self)
_NORMALIZED_HEADER_RE = \
re.compile(r'^[A-Z0-9][a-z0-9]*(-[A-Z0-9][a-z0-9]*)*$')
_normalized_headers = {}
@staticmethod
def _normalize_name(name):
"""Converts a name to Http-Header-Case.
>>> HTTPHeaders._normalize_name("coNtent-TYPE")
'Content-Type'
"""
try:
return HTTPHeaders._normalized_headers[name]
except KeyError:
if HTTPHeaders._NORMALIZED_HEADER_RE.match(name):
normalized = name
else:
normalized = "-".join(
[w.capitalize() for w in name.split("-")])
HTTPHeaders._normalized_headers[name] = normalized
return normalized
def url_concat(url, args):
"""Concatenate url and argument dictionary regardless of whether
url has existing query parameters.
>>> url_concat("http://example.com/foo?a=b", dict(c="d"))
'http://example.com/foo?a=b&c=d'
"""
if not args:
return url
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
return url + urllib.urlencode(args)
class HTTPFile(ObjectDict):
"""Represents an HTTP file. For backwards compatibility, its instance
attributes are also accessible as dictionary keys.
:ivar filename:
:ivar body:
:ivar content_type: The content_type comes from the provided HTTP header
and should not be trusted outright given that it can be easily forged.
"""
pass
def parse_multipart_form_data(boundary, data, arguments, files):
"""Parses a multipart/form-data body.
The boundary and data parameters are both byte strings.
The dictionaries given in the arguments and files parameters
will be updated with the contents of the body.
"""
# The standard allows for the boundary to be quoted in the header,
# although it's rare (it happens at least for google app engine
# xmpp). I think we're also supposed to handle backslash-escapes
# here but I'll save that until we see a client that uses them
# in the wild.
if boundary.startswith(b('"')) and boundary.endswith(b('"')):
boundary = boundary[1:-1]
final_boundary_index = data.rfind(b("--") + boundary + b("--"))
if final_boundary_index == -1:
log.msg("Invalid multipart/form-data: no final boundary")
return
parts = data[:final_boundary_index].split(b("--") + boundary + b("\r\n"))
for part in parts:
if not part:
continue
eoh = part.find(b("\r\n\r\n"))
if eoh == -1:
log.msg("multipart/form-data missing headers")
continue
headers = HTTPHeaders.parse(part[:eoh].decode("utf-8"))
disp_header = headers.get("Content-Disposition", "")
disposition, disp_params = _parse_header(disp_header)
if disposition != "form-data" or not part.endswith(b("\r\n")):
log.msg("Invalid multipart/form-data")
continue
value = part[eoh + 4:-2]
if not disp_params.get("name"):
log.msg("multipart/form-data value missing name")
continue
name = disp_params["name"]
if disp_params.get("filename"):
ctype = headers.get("Content-Type", "application/unknown")
files.setdefault(name, []).append(HTTPFile(
filename=disp_params["filename"], body=value,
content_type=ctype))
else:
arguments.setdefault(name, []).append(value)
# _parseparam and _parse_header are copied and modified from python2.7's cgi.py
# The original 2.7 version of this code did not correctly support some
# combinations of semicolons and double quotes.
def _parseparam(s):
while s[:1] == ';':
s = s[1:]
end = s.find(';')
while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2:
end = s.find(';', end + 1)
if end < 0:
end = len(s)
f = s[:end]
yield f.strip()
s = s[end:]
def _parse_header(line):
"""Parse a Content-type like header.
Return the main content-type and a dictionary of options.
"""
parts = _parseparam(';' + line)
key = parts.next()
pdict = {}
for p in parts:
i = p.find('=')
if i >= 0:
name = p[:i].strip().lower()
value = p[i + 1:].strip()
if len(value) >= 2 and value[0] == value[-1] == '"':
value = value[1:-1]
value = value.replace('\\\\', '\\').replace('\\"', '"')
pdict[name] = value
return key, pdict
def doctests():
import doctest
return doctest.DocTestSuite()
|
shirk3y/cyclone
|
cyclone/httputil.py
|
Python
|
apache-2.0
| 9,933 | 0.000101 |
#!/usr/bin/env python
# ===============================================================================
# Copyright (c) 2014 Geoscience Australia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither Geoscience Australia nor the names of its contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ===============================================================================
from datetime import datetime, timedelta
__author__ = "Simon Oldfield"
import argparse
import gdal
import numpy
from gdalconst import GA_ReadOnly, GA_Update
import logging
import os
import resource
from datacube.api.model import DatasetType, Satellite, get_bands, dataset_type_database
from datacube.api.query import list_tiles_as_list
from datacube.api.utils import PqaMask, get_dataset_metadata, get_dataset_data, get_dataset_data_with_pq, empty_array
from datacube.api.utils import NDV, UINT16_MAX
from datacube.api.workflow import writeable_dir
from datacube.config import Config
from enum import Enum
_log = logging.getLogger()
def satellite_arg(s):
if s in Satellite._member_names_:
return Satellite[s]
raise argparse.ArgumentTypeError("{0} is not a supported satellite".format(s))
def pqa_mask_arg(s):
if s in PqaMask._member_names_:
return PqaMask[s]
raise argparse.ArgumentTypeError("{0} is not a supported PQA mask".format(s))
def dataset_type_arg(s):
if s in DatasetType._member_names_:
return DatasetType[s]
raise argparse.ArgumentTypeError("{0} is not a supported dataset type".format(s))
def summary_method_arg(s):
if s in TimeSeriesSummaryMethod._member_names_:
return TimeSeriesSummaryMethod[s]
raise argparse.ArgumentTypeError("{0} is not a supported summary method".format(s))
class TimeSeriesSummaryMethod(Enum):
__order__ = "YOUNGEST_PIXEL OLDEST_PIXEL MEDOID_PIXEL COUNT MIN MAX MEAN MEDIAN MEDIAN_NON_INTERPOLATED SUM STANDARD_DEVIATION VARIANCE PERCENTILE"
YOUNGEST_PIXEL = 1
OLDEST_PIXEL = 2
MEDOID_PIXEL = 3
COUNT = 4
MIN = 5
MAX = 6
MEAN = 7
MEDIAN = 8
MEDIAN_NON_INTERPOLATED = 9
SUM = 10
STANDARD_DEVIATION = 11
VARIANCE = 12
PERCENTILE = 13
class SummariseDatasetTimeSeriesWorkflow():
application_name = None
x = None
y = None
acq_min = None
acq_max = None
process_min = None
process_max = None
ingest_min = None
ingest_max = None
satellites = None
apply_pqa_filter = None
pqa_mask = None
dataset_type = None
output_directory = None
overwrite = None
list_only = None
summary_method = None
chunk_size_x = None
chunk_size_y = None
def __init__(self, application_name):
self.application_name = application_name
def parse_arguments(self):
parser = argparse.ArgumentParser(prog=__name__, description=self.application_name)
group = parser.add_mutually_exclusive_group()
group.add_argument("--quiet", help="Less output", action="store_const", dest="log_level", const=logging.WARN)
group.add_argument("--verbose", help="More output", action="store_const", dest="log_level", const=logging.DEBUG)
parser.set_defaults(log_level=logging.INFO)
parser.add_argument("--x", help="X grid reference", action="store", dest="x", type=int, choices=range(110, 155+1), required=True, metavar="[110 - 155]")
parser.add_argument("--y", help="Y grid reference", action="store", dest="y", type=int, choices=range(-45, -10+1), required=True, metavar="[-45 - -10]")
parser.add_argument("--acq-min", help="Acquisition Date (YYYY or YYYY-MM or YYYY-MM-DD)", action="store", dest="acq_min", type=str, required=True)
parser.add_argument("--acq-max", help="Acquisition Date (YYYY or YYYY-MM or YYYY-MM-DD)", action="store", dest="acq_max", type=str, required=True)
# parser.add_argument("--process-min", help="Process Date", action="store", dest="process_min", type=str)
# parser.add_argument("--process-max", help="Process Date", action="store", dest="process_max", type=str)
#
# parser.add_argument("--ingest-min", help="Ingest Date", action="store", dest="ingest_min", type=str)
# parser.add_argument("--ingest-max", help="Ingest Date", action="store", dest="ingest_max", type=str)
parser.add_argument("--satellite", help="The satellite(s) to include", action="store", dest="satellite",
type=satellite_arg, nargs="+", choices=Satellite, default=[Satellite.LS5, Satellite.LS7], metavar=" ".join([s.name for s in Satellite]))
parser.add_argument("--apply-pqa", help="Apply PQA mask", action="store_true", dest="apply_pqa", default=False)
parser.add_argument("--pqa-mask", help="The PQA mask to apply", action="store", dest="pqa_mask",
type=pqa_mask_arg, nargs="+", choices=PqaMask, default=[PqaMask.PQ_MASK_CLEAR], metavar=" ".join([s.name for s in PqaMask]))
supported_dataset_types = dataset_type_database
parser.add_argument("--dataset-type", help="The types of dataset to retrieve", action="store",
dest="dataset_type",
type=dataset_type_arg,
#nargs="+",
choices=supported_dataset_types, default=DatasetType.ARG25, metavar=" ".join([s.name for s in supported_dataset_types]))
parser.add_argument("--output-directory", help="Output directory", action="store", dest="output_directory",
type=writeable_dir, required=True)
parser.add_argument("--overwrite", help="Over write existing output file", action="store_true", dest="overwrite", default=False)
parser.add_argument("--list-only", help="List the datasets that would be retrieved rather than retrieving them", action="store_true", dest="list_only", default=False)
supported_summary_methods = [
TimeSeriesSummaryMethod.YOUNGEST_PIXEL,
TimeSeriesSummaryMethod.OLDEST_PIXEL,
# TimeSeriesSummaryMethod.MEDOID_PIXEL,
TimeSeriesSummaryMethod.COUNT,
TimeSeriesSummaryMethod.MIN,
TimeSeriesSummaryMethod.MAX,
TimeSeriesSummaryMethod.MEAN,
TimeSeriesSummaryMethod.MEDIAN,
TimeSeriesSummaryMethod.MEDIAN_NON_INTERPOLATED,
TimeSeriesSummaryMethod.SUM,
TimeSeriesSummaryMethod.STANDARD_DEVIATION,
TimeSeriesSummaryMethod.VARIANCE,
TimeSeriesSummaryMethod.PERCENTILE]
parser.add_argument("--summary-method", help="The summary method to apply", action="store",
dest="summary_method",
type=summary_method_arg,
#nargs="+",
choices=supported_summary_methods, required=True, metavar=" ".join([s.name for s in supported_summary_methods]))
parser.add_argument("--chunk-size-x", help="Number of X pixels to process at once", action="store", dest="chunk_size_x", type=int, choices=range(0, 4000+1), default=4000, metavar="[1 - 4000]")
parser.add_argument("--chunk-size-y", help="Number of Y pixels to process at once", action="store", dest="chunk_size_y", type=int, choices=range(0, 4000+1), default=4000, metavar="[1 - 4000]")
args = parser.parse_args()
_log.setLevel(args.log_level)
self.x = args.x
self.y = args.y
def parse_date_min(s):
from datetime import datetime
if s:
if len(s) == len("YYYY"):
return datetime.strptime(s, "%Y").date()
elif len(s) == len("YYYY-MM"):
return datetime.strptime(s, "%Y-%m").date()
elif len(s) == len("YYYY-MM-DD"):
return datetime.strptime(s, "%Y-%m-%d").date()
return None
def parse_date_max(s):
from datetime import datetime
import calendar
if s:
if len(s) == len("YYYY"):
d = datetime.strptime(s, "%Y").date()
d = d.replace(month=12, day=31)
return d
elif len(s) == len("YYYY-MM"):
d = datetime.strptime(s, "%Y-%m").date()
first, last = calendar.monthrange(d.year, d.month)
d = d.replace(day=last)
return d
elif len(s) == len("YYYY-MM-DD"):
d = datetime.strptime(s, "%Y-%m-%d").date()
return d
return None
self.acq_min = parse_date_min(args.acq_min)
self.acq_max = parse_date_max(args.acq_max)
# self.process_min = parse_date_min(args.process_min)
# self.process_max = parse_date_max(args.process_max)
#
# self.ingest_min = parse_date_min(args.ingest_min)
# self.ingest_max = parse_date_max(args.ingest_max)
self.satellites = args.satellite
self.apply_pqa_filter = args.apply_pqa
self.pqa_mask = args.pqa_mask
self.dataset_type = args.dataset_type
self.output_directory = args.output_directory
self.overwrite = args.overwrite
self.list_only = args.list_only
self.summary_method = args.summary_method
self.chunk_size_x = args.chunk_size_x
self.chunk_size_y = args.chunk_size_y
_log.info("""
x = {x:03d}
y = {y:04d}
acq = {acq_min} to {acq_max}
process = {process_min} to {process_max}
ingest = {ingest_min} to {ingest_max}
satellites = {satellites}
apply PQA filter = {apply_pqa_filter}
PQA mask = {pqa_mask}
datasets to retrieve = {dataset_type}
output directory = {output}
over write existing = {overwrite}
list only = {list_only}
summary method = {summary_method}
chunk size = {chunk_size_x:4d} x {chunk_size_y:4d} pixels
""".format(x=self.x, y=self.y,
acq_min=self.acq_min, acq_max=self.acq_max,
process_min=self.process_min, process_max=self.process_max,
ingest_min=self.ingest_min, ingest_max=self.ingest_max,
satellites=self.satellites,
apply_pqa_filter=self.apply_pqa_filter, pqa_mask=self.pqa_mask,
dataset_type=decode_dataset_type(self.dataset_type),
output=self.output_directory,
overwrite=self.overwrite,
list_only=self.list_only,
summary_method=self.summary_method,
chunk_size_x=self.chunk_size_x,
chunk_size_y=self.chunk_size_y))
def run(self):
self.parse_arguments()
config = Config(os.path.expanduser("~/.datacube/config"))
_log.debug(config.to_str())
path = self.get_output_filename(self.dataset_type)
_log.info("Output file is [%s]", path)
if os.path.exists(path):
if self.overwrite:
_log.info("Removing existing output file [%s]", path)
os.remove(path)
else:
_log.error("Output file [%s] exists", path)
raise Exception("Output file [%s] already exists" % path)
# TODO
bands = get_bands(self.dataset_type, self.satellites[0])
# TODO once WOFS is in the cube
tiles = list_tiles_as_list(x=[self.x], y=[self.y], acq_min=self.acq_min, acq_max=self.acq_max,
satellites=[satellite for satellite in self.satellites],
dataset_types=[self.dataset_type],
database=config.get_db_database(),
user=config.get_db_username(),
password=config.get_db_password(),
host=config.get_db_host(), port=config.get_db_port())
raster = None
metadata = None
# TODO - PQ is UNIT16 (others are INT16) and so -999 NDV doesn't work
ndv = self.dataset_type == DatasetType.PQ25 and UINT16_MAX or NDV
_log.debug("Current MAX RSS usage is [%d] MB", resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)
import itertools
for x, y in itertools.product(range(0, 4000, self.chunk_size_x), range(0, 4000, self.chunk_size_y)):
_log.info("About to read data chunk ({xmin:4d},{ymin:4d}) to ({xmax:4d},{ymax:4d})".format(xmin=x, ymin=y, xmax=x+self.chunk_size_x-1, ymax=y+self.chunk_size_y-1))
_log.debug("Current MAX RSS usage is [%d] MB", resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)
stack = dict()
for tile in tiles:
if self.list_only:
_log.info("Would summarise dataset [%s]", tile.datasets[self.dataset_type].path)
continue
pqa = None
_log.debug("Reading dataset [%s]", tile.datasets[self.dataset_type].path)
if not metadata:
metadata = get_dataset_metadata(tile.datasets[self.dataset_type])
# Apply PQA if specified
if self.apply_pqa_filter:
data = get_dataset_data_with_pq(tile.datasets[self.dataset_type], tile.datasets[DatasetType.PQ25], bands=bands, x=x, y=y, x_size=self.chunk_size_x, y_size=self.chunk_size_y, pq_masks=self.pqa_mask, ndv=ndv)
else:
data = get_dataset_data(tile.datasets[self.dataset_type], bands=bands, x=x, y=y, x_size=self.chunk_size_x, y_size=self.chunk_size_y)
for band in bands:
if band in stack:
stack[band].append(data[band])
else:
stack[band] = [data[band]]
_log.debug("data[%s] has shape [%s] and MB [%s]", band.name, numpy.shape(data[band]), data[band].nbytes/1000/1000)
_log.debug("stack[%s] has [%s] elements", band.name, len(stack[band]))
# Apply summary method
_log.info("Finished reading {count} datasets for chunk ({xmin:4d},{ymin:4d}) to ({xmax:4d},{ymax:4d}) - about to summarise them".format(count=len(tiles), xmin=x, ymin=y, xmax=x+self.chunk_size_x-1, ymax=y+self.chunk_size_y-1))
_log.debug("Current MAX RSS usage is [%d] MB", resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)
masked_stack = dict()
for band in bands:
masked_stack[band] = numpy.ma.masked_equal(stack[band], ndv)
_log.debug("masked_stack[%s] is %s", band.name, masked_stack[band])
_log.debug("masked stack[%s] has shape [%s] and MB [%s]", band.name, numpy.shape(masked_stack[band]), masked_stack[band].nbytes/1000/1000)
_log.debug("Current MAX RSS usage is [%d] MB", resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)
if self.summary_method == TimeSeriesSummaryMethod.MIN:
masked_summary = numpy.min(masked_stack[band], axis=0)
elif self.summary_method == TimeSeriesSummaryMethod.MAX:
masked_summary = numpy.max(masked_stack[band], axis=0)
elif self.summary_method == TimeSeriesSummaryMethod.MEAN:
masked_summary = numpy.mean(masked_stack[band], axis=0)
elif self.summary_method == TimeSeriesSummaryMethod.MEDIAN:
masked_summary = numpy.median(masked_stack[band], axis=0)
# aka 50th percentile
elif self.summary_method == TimeSeriesSummaryMethod.MEDIAN_NON_INTERPOLATED:
masked_sorted = numpy.ma.sort(masked_stack[band], axis=0)
masked_percentile_index = numpy.ma.floor(numpy.ma.count(masked_sorted, axis=0) * 0.95).astype(numpy.int16)
masked_summary = numpy.ma.choose(masked_percentile_index, masked_sorted)
elif self.summary_method == TimeSeriesSummaryMethod.COUNT:
# TODO Need to artificially create masked array here since it is being expected/filled below!!!
masked_summary = numpy.ma.masked_equal(masked_stack[band].count(axis=0), ndv)
elif self.summary_method == TimeSeriesSummaryMethod.SUM:
masked_summary = numpy.sum(masked_stack[band], axis=0)
elif self.summary_method == TimeSeriesSummaryMethod.STANDARD_DEVIATION:
masked_summary = numpy.std(masked_stack[band], axis=0)
elif self.summary_method == TimeSeriesSummaryMethod.VARIANCE:
masked_summary = numpy.var(masked_stack[band], axis=0)
# currently 95th percentile
elif self.summary_method == TimeSeriesSummaryMethod.PERCENTILE:
masked_sorted = numpy.ma.sort(masked_stack[band], axis=0)
masked_percentile_index = numpy.ma.floor(numpy.ma.count(masked_sorted, axis=0) * 0.95).astype(numpy.int16)
masked_summary = numpy.ma.choose(masked_percentile_index, masked_sorted)
elif self.summary_method == TimeSeriesSummaryMethod.YOUNGEST_PIXEL:
# TODO the fact that this is band at a time might be problematic. We really should be considering
# all bands at once (that is what the landsat_mosaic logic did). If PQA is being applied then
# it's probably all good but if not then we might get odd results....
masked_summary = empty_array(shape=(self.chunk_size_x, self.chunk_size_x), dtype=numpy.int16, ndv=ndv)
# Note the reversed as the stack is created oldest first
for d in reversed(stack[band]):
masked_summary = numpy.where(masked_summary == ndv, d, masked_summary)
# If the summary doesn't contain an no data values then we can stop
if not numpy.any(masked_summary == ndv):
break
# TODO Need to artificially create masked array here since it is being expected/filled below!!!
masked_summary = numpy.ma.masked_equal(masked_summary, ndv)
elif self.summary_method == TimeSeriesSummaryMethod.OLDEST_PIXEL:
# TODO the fact that this is band at a time might be problematic. We really should be considering
# all bands at once (that is what the landsat_mosaic logic did). If PQA is being applied then
# it's probably all good but if not then we might get odd results....
masked_summary = empty_array(shape=(self.chunk_size_x, self.chunk_size_x), dtype=numpy.int16, ndv=ndv)
# Note the NOT reversed as the stack is created oldest first
for d in stack[band]:
masked_summary = numpy.where(masked_summary == ndv, d, masked_summary)
# If the summary doesn't contain an no data values then we can stop
if not numpy.any(masked_summary == ndv):
break
# TODO Need to artificially create masked array here since it is being expected/filled below!!!
masked_summary = numpy.ma.masked_equal(masked_summary, ndv)
masked_stack[band] = None
_log.debug("NONE-ing masked stack[%s]", band.name)
_log.debug("Current MAX RSS usage is [%d] MB", resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)
_log.debug("masked summary is [%s]", masked_summary)
_log.debug("Current MAX RSS usage is [%d] MB", resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)
# Create the output file
if not os.path.exists(path):
_log.info("Creating raster [%s]", path)
driver = gdal.GetDriverByName("GTiff")
assert driver
raster = driver.Create(path, metadata.shape[0], metadata.shape[1], len(bands), gdal.GDT_Int16)
assert raster
raster.SetGeoTransform(metadata.transform)
raster.SetProjection(metadata.projection)
for b in bands:
raster.GetRasterBand(b.value).SetNoDataValue(ndv)
_log.info("Writing band [%s] data to raster [%s]", band.name, path)
_log.debug("Current MAX RSS usage is [%d] MB", resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)
raster.GetRasterBand(band.value).WriteArray(masked_summary.filled(ndv), xoff=x, yoff=y)
raster.GetRasterBand(band.value).ComputeStatistics(True)
raster.FlushCache()
masked_summary = None
_log.debug("NONE-ing the masked summary")
_log.debug("Current MAX RSS usage is [%d] MB", resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)
stack = None
_log.debug("Just NONE-ed the stack")
_log.debug("Current MAX RSS usage is [%d] MB", resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)
raster = None
_log.debug("Just NONE'd the raster")
_log.debug("Current MAX RSS usage is [%d] MB", resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)
_log.info("Memory usage was [%d MB]", resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024)
_log.info("CPU time used [%s]", timedelta(seconds=int(resource.getrusage(resource.RUSAGE_SELF).ru_utime)))
def get_output_filename(self, dataset_type):
if dataset_type == DatasetType.WATER:
return os.path.join(self.output_directory,
"LS_WOFS_SUMMARY_{x:03d}_{y:04d}_{acq_min}_{acq_max}.tif".format(latitude=self.x,
longitude=self.y,
acq_min=self.acq_min,
acq_max=self.acq_max))
satellite_str = ""
if Satellite.LS5 in self.satellites or Satellite.LS7 in self.satellites or Satellite.LS8 in self.satellites:
satellite_str += "LS"
if Satellite.LS5 in self.satellites:
satellite_str += "5"
if Satellite.LS7 in self.satellites:
satellite_str += "7"
if Satellite.LS8 in self.satellites:
satellite_str += "8"
dataset_str = ""
if dataset_type == DatasetType.ARG25:
dataset_str += "NBAR"
elif dataset_type == DatasetType.PQ25:
dataset_str += "PQA"
elif dataset_type == DatasetType.FC25:
dataset_str += "FC"
elif dataset_type == DatasetType.WATER:
dataset_str += "WOFS"
if self.apply_pqa_filter and dataset_type != DatasetType.PQ25:
dataset_str += "_WITH_PQA"
return os.path.join(self.output_directory,
"{satellite}_{dataset}_SUMMARY_{x:03d}_{y:04d}_{acq_min}_{acq_max}.tif".format(
satellite=satellite_str, dataset=dataset_str, x=self.x,
y=self.y,
acq_min=self.acq_min,
acq_max=self.acq_max))
def decode_dataset_type(dataset_type):
return {DatasetType.ARG25: "Surface Reflectance",
DatasetType.PQ25: "Pixel Quality",
DatasetType.FC25: "Fractional Cover",
DatasetType.WATER: "WOFS Woffle",
DatasetType.NDVI: "NDVI",
DatasetType.EVI: "EVI",
DatasetType.NBR: "Normalised Burn Ratio"}[dataset_type]
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
SummariseDatasetTimeSeriesWorkflow("Summarise Dataset Time Series").run()
|
alex-ip/agdc
|
api-examples/source/main/python/tool/summarise_dataset_time_series.py
|
Python
|
bsd-3-clause
| 25,656 | 0.004249 |
"""LTI integration tests"""
import json
import urllib
from collections import OrderedDict
import mock
import oauthlib
from django.conf import settings
from django.core.urlresolvers import reverse
from nose.plugins.attrib import attr
from courseware.tests import BaseTestXmodule
from courseware.views.views import get_course_lti_endpoints
from openedx.core.lib.url_utils import quote_slashes
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.x_module import STUDENT_VIEW
@attr(shard=1)
class TestLTI(BaseTestXmodule):
"""
Integration test for lti xmodule.
It checks overall code, by assuring that context that goes to template is correct.
As part of that, checks oauth signature generation by mocking signing function
of `oauthlib` library.
"""
CATEGORY = "lti"
def setUp(self):
"""
Mock oauth1 signing of requests library for testing.
"""
super(TestLTI, self).setUp()
mocked_nonce = u'135685044251684026041377608307'
mocked_timestamp = u'1234567890'
mocked_signature_after_sign = u'my_signature%3D'
mocked_decoded_signature = u'my_signature='
# Note: this course_id is actually a course_key
context_id = self.item_descriptor.course_id.to_deprecated_string()
user_id = unicode(self.item_descriptor.xmodule_runtime.anonymous_student_id)
hostname = self.item_descriptor.xmodule_runtime.hostname
resource_link_id = unicode(urllib.quote('{}-{}'.format(hostname, self.item_descriptor.location.html_id())))
sourcedId = "{context}:{resource_link}:{user_id}".format(
context=urllib.quote(context_id),
resource_link=resource_link_id,
user_id=user_id
)
self.correct_headers = {
u'user_id': user_id,
u'oauth_callback': u'about:blank',
u'launch_presentation_return_url': '',
u'lti_message_type': u'basic-lti-launch-request',
u'lti_version': 'LTI-1p0',
u'roles': u'Student',
u'context_id': context_id,
u'resource_link_id': resource_link_id,
u'lis_result_sourcedid': sourcedId,
u'oauth_nonce': mocked_nonce,
u'oauth_timestamp': mocked_timestamp,
u'oauth_consumer_key': u'',
u'oauth_signature_method': u'HMAC-SHA1',
u'oauth_version': u'1.0',
u'oauth_signature': mocked_decoded_signature
}
saved_sign = oauthlib.oauth1.Client.sign
self.expected_context = {
'display_name': self.item_descriptor.display_name,
'input_fields': self.correct_headers,
'element_class': self.item_descriptor.category,
'element_id': self.item_descriptor.location.html_id(),
'launch_url': u'http://www.example.com', # default value
'open_in_a_new_page': True,
'form_url': self.item_descriptor.xmodule_runtime.handler_url(self.item_descriptor,
'preview_handler').rstrip('/?'),
'hide_launch': False,
'has_score': False,
'module_score': None,
'comment': u'',
'weight': 1.0,
'ask_to_send_username': self.item_descriptor.ask_to_send_username,
'ask_to_send_email': self.item_descriptor.ask_to_send_email,
'description': self.item_descriptor.description,
'button_text': self.item_descriptor.button_text,
'accept_grades_past_due': self.item_descriptor.accept_grades_past_due,
}
def mocked_sign(self, *args, **kwargs):
"""
Mocked oauth1 sign function.
"""
# self is <oauthlib.oauth1.rfc5849.Client object> here:
__, headers, __ = saved_sign(self, *args, **kwargs)
# we should replace nonce, timestamp and signed_signature in headers:
old = headers[u'Authorization']
old_parsed = OrderedDict([param.strip().replace('"', '').split('=') for param in old.split(',')])
old_parsed[u'OAuth oauth_nonce'] = mocked_nonce
old_parsed[u'oauth_timestamp'] = mocked_timestamp
old_parsed[u'oauth_signature'] = mocked_signature_after_sign
headers[u'Authorization'] = ', '.join([k + '="' + v + '"' for k, v in old_parsed.items()])
return None, headers, None
patcher = mock.patch.object(oauthlib.oauth1.Client, "sign", mocked_sign)
patcher.start()
self.addCleanup(patcher.stop)
def test_lti_constructor(self):
generated_content = self.item_descriptor.render(STUDENT_VIEW).content
expected_content = self.runtime.render_template('lti.html', self.expected_context)
self.assertEqual(generated_content, expected_content)
def test_lti_preview_handler(self):
generated_content = self.item_descriptor.preview_handler(None, None).body
expected_content = self.runtime.render_template('lti_form.html', self.expected_context)
self.assertEqual(generated_content, expected_content)
@attr(shard=1)
class TestLTIModuleListing(SharedModuleStoreTestCase):
"""
a test for the rest endpoint that lists LTI modules in a course
"""
# arbitrary constant
COURSE_SLUG = "100"
COURSE_NAME = "test_course"
@classmethod
def setUpClass(cls):
super(TestLTIModuleListing, cls).setUpClass()
cls.course = CourseFactory.create(display_name=cls.COURSE_NAME, number=cls.COURSE_SLUG)
cls.chapter1 = ItemFactory.create(
parent_location=cls.course.location,
display_name="chapter1",
category='chapter')
cls.section1 = ItemFactory.create(
parent_location=cls.chapter1.location,
display_name="section1",
category='sequential')
cls.chapter2 = ItemFactory.create(
parent_location=cls.course.location,
display_name="chapter2",
category='chapter')
cls.section2 = ItemFactory.create(
parent_location=cls.chapter2.location,
display_name="section2",
category='sequential')
# creates one draft and one published lti module, in different sections
cls.lti_published = ItemFactory.create(
parent_location=cls.section1.location,
display_name="lti published",
category="lti",
location=cls.course.id.make_usage_key('lti', 'lti_published'),
)
cls.lti_draft = ItemFactory.create(
parent_location=cls.section2.location,
display_name="lti draft",
category="lti",
location=cls.course.id.make_usage_key('lti', 'lti_draft'),
publish_item=False,
)
def expected_handler_url(self, handler):
"""convenience method to get the reversed handler urls"""
return "https://{}{}".format(settings.SITE_NAME, reverse(
'courseware.module_render.handle_xblock_callback_noauth',
args=[
self.course.id.to_deprecated_string(),
quote_slashes(unicode(self.lti_published.scope_ids.usage_id.to_deprecated_string()).encode('utf-8')),
handler
]
))
def test_lti_rest_bad_course(self):
"""Tests what happens when the lti listing rest endpoint gets a bad course_id"""
bad_ids = [u"sf", u"dne/dne/dne", u"fo/ey/\\u5305"]
for bad_course_id in bad_ids:
lti_rest_endpoints_url = 'courses/{}/lti_rest_endpoints/'.format(bad_course_id)
response = self.client.get(lti_rest_endpoints_url)
self.assertEqual(404, response.status_code)
def test_lti_rest_listing(self):
"""tests that the draft lti module is part of the endpoint response"""
request = mock.Mock()
request.method = 'GET'
response = get_course_lti_endpoints(request, course_id=self.course.id.to_deprecated_string())
self.assertEqual(200, response.status_code)
self.assertEqual('application/json', response['Content-Type'])
expected = {
"lti_1_1_result_service_xml_endpoint": self.expected_handler_url('grade_handler'),
"lti_2_0_result_service_json_endpoint":
self.expected_handler_url('lti_2_0_result_rest_handler') + "/user/{anon_user_id}",
"display_name": self.lti_published.display_name,
}
self.assertEqual([expected], json.loads(response.content))
def test_lti_rest_non_get(self):
"""tests that the endpoint returns 404 when hit with NON-get"""
DISALLOWED_METHODS = ("POST", "PUT", "DELETE", "HEAD", "OPTIONS") # pylint: disable=invalid-name
for method in DISALLOWED_METHODS:
request = mock.Mock()
request.method = method
response = get_course_lti_endpoints(request, self.course.id.to_deprecated_string())
self.assertEqual(405, response.status_code)
|
miptliot/edx-platform
|
lms/djangoapps/courseware/tests/test_lti_integration.py
|
Python
|
agpl-3.0
| 9,169 | 0.002508 |
from xblock.fragment import Fragment
from xmodule.x_module import XModule
from xmodule.seq_module import SequenceDescriptor
from xmodule.progress import Progress
from pkg_resources import resource_string
# HACK: This shouldn't be hard-coded to two types
# OBSOLETE: This obsoletes 'type'
class_priority = ['video', 'problem']
class VerticalFields(object):
has_children = True
class VerticalModule(VerticalFields, XModule):
''' Layout module for laying out submodules vertically.'''
def student_view(self, context):
fragment = Fragment()
contents = []
for child in self.get_display_items():
rendered_child = child.render('student_view', context)
fragment.add_frag_resources(rendered_child)
contents.append({
'id': child.id,
'content': rendered_child.content
})
fragment.add_content(self.system.render_template('vert_module.html', {
'items': contents
}))
return fragment
def mobi_student_view(self, context):
fragment = Fragment()
contents = []
for child in self.get_display_items():
rendered_child = child.render('mobi_student_view', context)
fragment.add_frag_resources(rendered_child)
contents.append({
'id': child.id,
'content': rendered_child.content
})
fragment.add_content(self.system.render_template('vert_module.html', {
'items': contents
}))
return fragment
def get_progress(self):
# TODO: Cache progress or children array?
children = self.get_children()
progresses = [child.get_progress() for child in children]
progress = reduce(Progress.add_counts, progresses, None)
return progress
def get_icon_class(self):
child_classes = set(child.get_icon_class() for child in self.get_children())
new_class = 'other'
for c in class_priority:
if c in child_classes:
new_class = c
return new_class
class VerticalDescriptor(VerticalFields, SequenceDescriptor):
module_class = VerticalModule
js = {'coffee': [resource_string(__name__, 'js/src/vertical/edit.coffee')]}
js_module_name = "VerticalDescriptor"
# TODO (victor): Does this need its own definition_to_xml method? Otherwise it looks
# like verticals will get exported as sequentials...
|
XiaodunServerGroup/ddyedx
|
common/lib/xmodule/xmodule/vertical_module.py
|
Python
|
agpl-3.0
| 2,477 | 0.002019 |
# coding=utf-8
# Copyright 2021 HuggingFace Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import random
import unittest
import numpy as np
from transformers import is_speech_available
from transformers.testing_utils import require_torch, require_torchaudio
from ..test_sequence_feature_extraction_common import SequenceFeatureExtractionTestMixin
if is_speech_available():
from transformers import Speech2TextFeatureExtractor
global_rng = random.Random()
def floats_list(shape, scale=1.0, rng=None, name=None):
"""Creates a random float32 tensor"""
if rng is None:
rng = global_rng
values = []
for batch_idx in range(shape[0]):
values.append([])
for _ in range(shape[1]):
values[-1].append(rng.random() * scale)
return values
@require_torch
@require_torchaudio
class Speech2TextFeatureExtractionTester(unittest.TestCase):
def __init__(
self,
parent,
batch_size=7,
min_seq_length=400,
max_seq_length=2000,
feature_size=24,
num_mel_bins=24,
padding_value=0.0,
sampling_rate=16_000,
return_attention_mask=True,
do_normalize=True,
):
self.parent = parent
self.batch_size = batch_size
self.min_seq_length = min_seq_length
self.max_seq_length = max_seq_length
self.seq_length_diff = (self.max_seq_length - self.min_seq_length) // (self.batch_size - 1)
self.feature_size = feature_size
self.num_mel_bins = num_mel_bins
self.padding_value = padding_value
self.sampling_rate = sampling_rate
self.return_attention_mask = return_attention_mask
self.do_normalize = do_normalize
def prepare_feat_extract_dict(self):
return {
"feature_size": self.feature_size,
"num_mel_bins": self.num_mel_bins,
"padding_value": self.padding_value,
"sampling_rate": self.sampling_rate,
"return_attention_mask": self.return_attention_mask,
"do_normalize": self.do_normalize,
}
def prepare_inputs_for_common(self, equal_length=False, numpify=False):
def _flatten(list_of_lists):
return list(itertools.chain(*list_of_lists))
if equal_length:
speech_inputs = [floats_list((self.max_seq_length, self.feature_size)) for _ in range(self.batch_size)]
else:
# make sure that inputs increase in size
speech_inputs = [
floats_list((x, self.feature_size))
for x in range(self.min_seq_length, self.max_seq_length, self.seq_length_diff)
]
if numpify:
speech_inputs = [np.asarray(x) for x in speech_inputs]
return speech_inputs
@require_torch
@require_torchaudio
class Speech2TextFeatureExtractionTest(SequenceFeatureExtractionTestMixin, unittest.TestCase):
feature_extraction_class = Speech2TextFeatureExtractor if is_speech_available() else None
def setUp(self):
self.feat_extract_tester = Speech2TextFeatureExtractionTester(self)
def _check_zero_mean_unit_variance(self, input_vector):
self.assertTrue(np.all(np.mean(input_vector, axis=0) < 1e-3))
self.assertTrue(np.all(np.abs(np.var(input_vector, axis=0) - 1) < 1e-3))
def test_call(self):
# Tests that all call wrap to encode_plus and batch_encode_plus
feature_extractor = self.feature_extraction_class(**self.feat_extract_tester.prepare_feat_extract_dict())
# create three inputs of length 800, 1000, and 1200
speech_inputs = [floats_list((1, x))[0] for x in range(800, 1400, 200)]
np_speech_inputs = [np.asarray(speech_input) for speech_input in speech_inputs]
# Test feature size
input_features = feature_extractor(np_speech_inputs, padding=True, return_tensors="np").input_features
self.assertTrue(input_features.ndim == 3)
self.assertTrue(input_features.shape[-1] == feature_extractor.feature_size)
# Test not batched input
encoded_sequences_1 = feature_extractor(speech_inputs[0], return_tensors="np").input_features
encoded_sequences_2 = feature_extractor(np_speech_inputs[0], return_tensors="np").input_features
self.assertTrue(np.allclose(encoded_sequences_1, encoded_sequences_2, atol=1e-3))
# Test batched
encoded_sequences_1 = feature_extractor(speech_inputs, return_tensors="np").input_features
encoded_sequences_2 = feature_extractor(np_speech_inputs, return_tensors="np").input_features
for enc_seq_1, enc_seq_2 in zip(encoded_sequences_1, encoded_sequences_2):
self.assertTrue(np.allclose(enc_seq_1, enc_seq_2, atol=1e-3))
def test_cepstral_mean_and_variance_normalization(self):
feature_extractor = self.feature_extraction_class(**self.feat_extract_tester.prepare_feat_extract_dict())
speech_inputs = [floats_list((1, x))[0] for x in range(800, 1400, 200)]
paddings = ["longest", "max_length", "do_not_pad"]
max_lengths = [None, 16, None]
for max_length, padding in zip(max_lengths, paddings):
inputs = feature_extractor(
speech_inputs, padding=padding, max_length=max_length, return_attention_mask=True
)
input_features = inputs.input_features
attention_mask = inputs.attention_mask
fbank_feat_lengths = [np.sum(x) for x in attention_mask]
self._check_zero_mean_unit_variance(input_features[0][: fbank_feat_lengths[0]])
self._check_zero_mean_unit_variance(input_features[1][: fbank_feat_lengths[1]])
self._check_zero_mean_unit_variance(input_features[2][: fbank_feat_lengths[2]])
def test_cepstral_mean_and_variance_normalization_np(self):
feature_extractor = self.feature_extraction_class(**self.feat_extract_tester.prepare_feat_extract_dict())
speech_inputs = [floats_list((1, x))[0] for x in range(800, 1400, 200)]
paddings = ["longest", "max_length", "do_not_pad"]
max_lengths = [None, 16, None]
for max_length, padding in zip(max_lengths, paddings):
inputs = feature_extractor(
speech_inputs, max_length=max_length, padding=padding, return_tensors="np", return_attention_mask=True
)
input_features = inputs.input_features
attention_mask = inputs.attention_mask
fbank_feat_lengths = [np.sum(x) for x in attention_mask]
self._check_zero_mean_unit_variance(input_features[0][: fbank_feat_lengths[0]])
self.assertTrue(input_features[0][fbank_feat_lengths[0] :].sum() < 1e-6)
self._check_zero_mean_unit_variance(input_features[1][: fbank_feat_lengths[1]])
self.assertTrue(input_features[0][fbank_feat_lengths[1] :].sum() < 1e-6)
self._check_zero_mean_unit_variance(input_features[2][: fbank_feat_lengths[2]])
def test_cepstral_mean_and_variance_normalization_trunc_max_length(self):
feature_extractor = self.feature_extraction_class(**self.feat_extract_tester.prepare_feat_extract_dict())
speech_inputs = [floats_list((1, x))[0] for x in range(800, 1400, 200)]
inputs = feature_extractor(
speech_inputs,
padding="max_length",
max_length=4,
truncation=True,
return_tensors="np",
return_attention_mask=True,
)
input_features = inputs.input_features
attention_mask = inputs.attention_mask
fbank_feat_lengths = np.sum(attention_mask == 1, axis=1)
self._check_zero_mean_unit_variance(input_features[0, : fbank_feat_lengths[0]])
self._check_zero_mean_unit_variance(input_features[1])
self._check_zero_mean_unit_variance(input_features[2])
def test_cepstral_mean_and_variance_normalization_trunc_longest(self):
feature_extractor = self.feature_extraction_class(**self.feat_extract_tester.prepare_feat_extract_dict())
speech_inputs = [floats_list((1, x))[0] for x in range(800, 1400, 200)]
inputs = feature_extractor(
speech_inputs,
padding="longest",
max_length=4,
truncation=True,
return_tensors="np",
return_attention_mask=True,
)
input_features = inputs.input_features
attention_mask = inputs.attention_mask
fbank_feat_lengths = np.sum(attention_mask == 1, axis=1)
self._check_zero_mean_unit_variance(input_features[0, : fbank_feat_lengths[0]])
self._check_zero_mean_unit_variance(input_features[1, : fbank_feat_lengths[1]])
self._check_zero_mean_unit_variance(input_features[2])
# make sure that if max_length < longest -> then pad to max_length
self.assertEqual(input_features.shape, (3, 4, 24))
speech_inputs = [floats_list((1, x))[0] for x in range(800, 1400, 200)]
inputs = feature_extractor(
speech_inputs,
padding="longest",
max_length=16,
truncation=True,
return_tensors="np",
return_attention_mask=True,
)
input_features = inputs.input_features
attention_mask = inputs.attention_mask
fbank_feat_lengths = np.sum(attention_mask == 1, axis=1)
self._check_zero_mean_unit_variance(input_features[0, : fbank_feat_lengths[0]])
self._check_zero_mean_unit_variance(input_features[1, : fbank_feat_lengths[1]])
self._check_zero_mean_unit_variance(input_features[2])
# make sure that if max_length < longest -> then pad to max_length
self.assertEqual(input_features.shape, (3, 6, 24))
def test_double_precision_pad(self):
import torch
feature_extractor = self.feature_extraction_class(**self.feat_extract_tester.prepare_feat_extract_dict())
np_speech_inputs = np.random.rand(100, 32).astype(np.float64)
py_speech_inputs = np_speech_inputs.tolist()
for inputs in [py_speech_inputs, np_speech_inputs]:
np_processed = feature_extractor.pad([{"input_features": inputs}], return_tensors="np")
self.assertTrue(np_processed.input_features.dtype == np.float32)
pt_processed = feature_extractor.pad([{"input_features": inputs}], return_tensors="pt")
self.assertTrue(pt_processed.input_features.dtype == torch.float32)
|
huggingface/transformers
|
tests/speech_to_text/test_feature_extraction_speech_to_text.py
|
Python
|
apache-2.0
| 10,984 | 0.003733 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', include('launch.urls', namespace="launch", app_name="launch")),
url(r'^admin/', include(admin.site.urls)),
)
|
eltonsantos/dom
|
dom/urls.py
|
Python
|
mit
| 263 | 0.003802 |
#!/usr/bin/env python
import pyflag.IO as IO
import pyflag.Registry as Registry
Registry.Init()
import pyflag.FileSystem as FileSystem
from FileSystem import DBFS
case = "demo"
## This gives us a handle to the VFS
fsfd = Registry.FILESYSTEMS.fs['DBFS'](case)
## WE just open a file in the VFS:
#fd=fsfd.open(inode="Itest|S1/2")
## And read it
#print fd.read()
|
naototty/pyflag
|
tests/init.py
|
Python
|
gpl-2.0
| 364 | 0.019231 |
"""
Provides the base class for all quadrature rules.
"""
import numpy as np
import copy
class QuadRule(object):
"""
Provides an abstract base class for all quadrature rules.
Parameters
----------
order : int
The polynomial order up to which the quadrature should be exact
"""
def __init__(self, order, dimension):
self._order = order
self._dimension = dimension
self._points = [None] * (dimension + 1)
self._weights = [None] * (dimension + 1)
self._set_data()
def _set_data(self):
"""
Sets the quadrature points and weights.
"""
raise NotImplementedError()
@property
def order(self):
return self._order
@property
def dimension(self):
return self._dimension
@property
def points(self):
return copy.deepcopy(self._points)
@property
def weights(self):
return copy.deepcopy(self._weights)
|
pysofe/pysofe
|
pysofe/quadrature/base.py
|
Python
|
bsd-3-clause
| 977 | 0.002047 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for BatchDeleteIntents
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dialogflow
# [START dialogflow_generated_dialogflow_v2_Intents_BatchDeleteIntents_sync]
from google.cloud import dialogflow_v2
def sample_batch_delete_intents():
# Create a client
client = dialogflow_v2.IntentsClient()
# Initialize request argument(s)
intents = dialogflow_v2.Intent()
intents.display_name = "display_name_value"
request = dialogflow_v2.BatchDeleteIntentsRequest(
parent="parent_value",
intents=intents,
)
# Make the request
operation = client.batch_delete_intents(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
# [END dialogflow_generated_dialogflow_v2_Intents_BatchDeleteIntents_sync]
|
googleapis/python-dialogflow
|
samples/generated_samples/dialogflow_generated_dialogflow_v2_intents_batch_delete_intents_sync.py
|
Python
|
apache-2.0
| 1,696 | 0.00059 |
# django
default_app_config = "zentral.contrib.okta.apps.ZentralOktaAppConfig"
|
zentralopensource/zentral
|
zentral/contrib/okta/__init__.py
|
Python
|
apache-2.0
| 79 | 0 |
import os
import ConfigParser
import click
from base64 import b64encode
import requests
import json
class B3Notify(object):
"""
Build status notifier for bitbucket server
"""
def __init__(self, home='~/.b3notifyrc'):
self.home = home
self.verbose = True
self.config = {}
self.build_url = ''
self.key = ''
self.name = ''
self.commit = ''
self.auth = ''
def read_configuration(self, profile='default'):
config = ConfigParser.ConfigParser()
config.read([
os.path.expanduser('~/.b3notifyrc'),
'.b3notifyrc',
os.path.expanduser('{0}'.format(self.home)),
])
self.url = config.get(profile, 'url').strip("'")
self.username = config.get(profile, 'username').strip("'")
self.password = config.get(profile, 'password').strip("'")
self.auth = '{0}'.format(
b64encode('{0}:{1}'.format(self.username, self.password))
)
@property
def headers(self):
return {
'Content-Type': 'application/json',
'Authorization': 'Basic {0}'.format(self.auth)
}
def notify(
self, commit, build_url, build_key, build_name,
build_state='FAIL'):
data = {
# <INPROGRESS|SUCCESSFUL|FAILED>",
'state': build_state,
'key': build_key,
'name': build_name,
'url': build_url
}
self.commit_url = '{0}{1}'.format(self.url, commit)
response = requests.post(
self.commit_url,
headers=self.headers,
data=json.dumps(data))
return response
@click.command()
@click.option(
'--config-file', envvar='CONFIG_FILE', default='.',
help='Location to find configuration file')
@click.option(
'--profile', default='default',
help='Profile to use for credentials')
@click.option(
'--host', '-h',
help='Server URL')
@click.option(
'--verbose', '-v', is_flag=True,
help='Enable verbose mode')
@click.option(
'--success', '-s', is_flag=True, default=False,
help='Notify build success')
@click.option(
'--fail', '-f', is_flag=True, default=False,
help='Notify build failure')
@click.option(
'--progress', '-p', is_flag=True, default=False,
help='Notify inprogress build')
@click.option(
'--commit', '-c', envvar='GIT_COMMIT',
help='Hash value of the commit')
@click.option(
'--build-url', '-b', envvar='BUILD_URL',
help='Current build url')
@click.option(
'--key', '-k', envvar='BUILD_TAG',
help='Build key')
@click.option(
'--name', '-n', envvar='BUILD_DISPLAY_NAME',
help='Build name')
@click.option(
'--auth', '-a', envvar='BUILD_AUTH', required=False,
help='Base64 encoded string of username:password')
def cli(
config_file, profile, host, verbose, success, fail, progress,
commit, build_url, key, name, auth):
"""
Build status notifier for bitbucket server
"""
build_state = 'INPROGRESS'
notify = B3Notify(config_file)
notify.read_configuration(profile=profile)
notify.verbose = verbose
if host is not None:
notify.url = host
if auth is not None:
notify.auth = auth
if success is True:
build_state = 'SUCCESSFUL'
if fail is True:
build_state = 'FAILED'
response = notify.notify(
commit=commit,
build_url=build_url,
build_key=key,
build_name=name,
build_state=build_state)
print response.status_code, response.text
|
xydinesh/b3notify
|
b3notify.py
|
Python
|
apache-2.0
| 3,613 | 0 |
# import the libraries that you need
import requests
import csv
# make a GET request to the OneSearch X-Service API
response = requests.get('http://onesearch.cuny.edu/PrimoWebServices'
'/xservice/search/brief?'
'&institution=KB'
'&query=any,contains,obama'
'&query=facet_rtype,exact,books'
'&loc=adaptor,primo_central_multiple_fe'
'&loc=local,scope:(KB,AL,CUNY_BEPRESS)'
'&json=true')
# take the JSON from the response
# and store it in a variable called alldata
alldata = response.json()
# drill down into a smaller subset of the json
# and print this smaller bit of json
somedata = alldata['SEGMENTS']['JAGROOT']['RESULT']['FACETLIST']['FACET']\
[1]['FACET_VALUES']
print(somedata)
# open a file called mycsv.csv, then loop through the data
# and write to that file
with open('mycsv.csv', 'wb') as f:
writer = csv.writer(f)
for x in somedata:
writer.writerow([x['@KEY'], x['@VALUE']])
|
MarkEEaton/api-workshop
|
4-json-to-csv.py
|
Python
|
mit
| 1,093 | 0.000915 |
from __future__ import print_function, division
import numpy as np
class Tuning():
"""
Equal temperament tuning - allows to convert between frequency and pitch.
- unit pitch space
- continous, unbounded
- 1.0 ~ one octave
- step pitch space
- continous, unbounded
- N steps ~ one octave
- unit pitch space * N
- unit pitch class space
- continous, bounded [0, 1.0)
- unit pitch space % 1.0
- step pitch class space
- continous, bounded [0, N)
- unit step pitch space % N
- integer step pitch space
- discrete, unbounded
- floor(step pitch space)
- integer step pitch class space
- discrete, bounded {0, 1, .. N - 1}
- floor(step pitch class space)
"""
def __init__(self, base_freq=440, steps_per_octave=12, octave_ratio=2):
self.base_freq = base_freq
self.steps_per_octave = steps_per_octave
self.octave_ratio = octave_ratio
def pitch_to_freq(self, pitch):
factor = self.pitch_to_relative_freq(pitch)
return factor * self.base_freq
def freq_to_pitch(self, freq):
rel_freq = freq / self.base_freq
if self.octave_ratio == 2:
p = np.log2(rel_freq)
else:
p = np.log(rel_freq) / np.log(2)
return p * self.steps_per_octave
def pitch_to_relative_freq(self, pitch):
return pow(self.octave_ratio, pitch / self.steps_per_octave)
class PitchQuantizer():
def __init__(self, tuning, bin_division=1):
self.tuning = tuning
self.bin_division = bin_division
def quantize(self, freqs):
"""
Quantizes frequencies to nearest pitch bins (with optional division of
bins).
"""
return np.round(self.tuning.freq_to_pitch(freqs) * self.bin_division) / self.bin_division
|
bzamecnik/tfr
|
tfr/tuning.py
|
Python
|
mit
| 1,850 | 0.001622 |
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class WebGLConformanceExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('gl-enable-vertex-attrib.html',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
# Fails everywhere.
self.Skip('conformance/glsl/misc/large-loop-compile.html',
bug=322764)
self.Skip('conformance/textures/texture-size-limit.html',
bug=322789)
# Windows failures.
self.Fail('conformance/ogles/GL/atan/atan_001_to_008.html',
['win'], bug=322794)
self.Fail('conformance/ogles/GL/atan/atan_009_to_012.html',
['win'], bug=322794)
self.Skip('conformance/ogles/GL/control_flow/control_flow_001_to_008.html',
['win'], bug=322795)
# Windows/Intel failures
self.Fail('conformance/textures/texture-size.html',
['win', 'intel'], bug=121139)
self.Fail('conformance/rendering/gl-scissor-test.html',
['win', 'intel'], bug=314997)
# Windows/AMD failures
self.Fail('conformance/rendering/more-than-65536-indices.html',
['win', 'amd'], bug=314997)
# Windows 7/Intel failures
self.Fail('conformance/context/context-lost-restored.html',
['win7', 'intel'])
self.Fail('conformance/context/premultiplyalpha-test.html',
['win7', 'intel'])
self.Fail('conformance/extensions/oes-texture-float-with-image-data.html',
['win7', 'intel'])
self.Fail('conformance/extensions/oes-texture-float.html',
['win7', 'intel'])
self.Fail('conformance/limits/gl-min-attribs.html',
['win7', 'intel'])
self.Fail('conformance/limits/gl-max-texture-dimensions.html',
['win7', 'intel'])
self.Fail('conformance/limits/gl-min-textures.html',
['win7', 'intel'])
self.Fail('conformance/limits/gl-min-uniforms.html',
['win7', 'intel'])
self.Fail('conformance/rendering/gl-clear.html',
['win7', 'intel'])
self.Fail('conformance/textures/copy-tex-image-and-sub-image-2d.html',
['win7', 'intel'])
self.Fail('conformance/textures/gl-teximage.html',
['win7', 'intel'])
self.Fail('conformance/textures/tex-image-and-sub-image-2d-with-array-buffer-view.html',
['win7', 'intel'])
self.Fail('conformance/textures/tex-image-and-sub-image-2d-with-image-data.html',
['win7', 'intel'])
self.Fail('conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgb565.html',
['win7', 'intel'])
self.Fail('conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgba4444.html',
['win7', 'intel'])
self.Fail('conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgba5551.html',
['win7', 'intel'])
self.Fail('conformance/textures/tex-image-with-format-and-type.html',
['win7', 'intel'])
self.Fail('conformance/textures/tex-sub-image-2d.html',
['win7', 'intel'])
self.Fail('conformance/textures/texparameter-test.html',
['win7', 'intel'])
self.Fail('conformance/textures/texture-active-bind-2.html',
['win7', 'intel'])
self.Fail('conformance/textures/texture-active-bind.html',
['win7', 'intel'])
self.Fail('conformance/textures/texture-complete.html',
['win7', 'intel'])
self.Fail('conformance/textures/texture-formats-test.html',
['win7', 'intel'])
self.Fail('conformance/textures/texture-mips.html',
['win7', 'intel'])
self.Fail('conformance/textures/texture-npot.html',
['win7', 'intel'])
self.Fail('conformance/textures/texture-size-cube-maps.html',
['win7', 'intel'])
self.Fail('conformance/context/context-attribute-preserve-drawing-buffer.html',
['win7', 'intel'], bug=322770)
# Mac failures.
self.Fail('conformance/glsl/misc/shaders-with-varyings.html',
['mac'], bug=322760)
self.Fail('conformance/context/context-attribute-preserve-drawing-buffer.html',
['mac'], bug=322770)
self.Skip('conformance/ogles/GL/control_flow/control_flow_001_to_008.html',
['mac'], bug=322795)
# Mac/Intel failures
self.Fail('conformance/rendering/gl-scissor-test.html',
['mac', 'intel'], bug=314997)
# The following two tests hang the WindowServer.
self.Skip('conformance/canvas/drawingbuffer-static-canvas-test.html',
['mac', 'intel'], bug=303915)
self.Skip('conformance/canvas/drawingbuffer-test.html',
['mac', 'intel'], bug=303915)
# The following three tests only fail.
# Radar 13499677
self.Fail(
'conformance/glsl/functions/glsl-function-smoothstep-gentype.html',
['mac', 'intel'], bug=225642)
# Radar 13499466
self.Fail('conformance/limits/gl-max-texture-dimensions.html',
['mac', 'intel'], bug=225642)
# Radar 13499623
self.Fail('conformance/textures/texture-size.html',
['mac', 'intel'], bug=225642)
self.Skip('conformance/ogles/GL/control_flow/control_flow_009_to_010.html',
['mac', 'intel'], bug=322795)
self.Fail('conformance/ogles/GL/operators/operators_009_to_016.html',
['mac', 'intel'], bug=322795)
# Mac/Intel failures on 10.7
self.Skip('conformance/glsl/functions/glsl-function-asin.html',
['lion', 'intel'])
self.Skip('conformance/glsl/functions/glsl-function-dot.html',
['lion', 'intel'])
self.Skip('conformance/glsl/functions/glsl-function-faceforward.html',
['lion', 'intel'])
self.Skip('conformance/glsl/functions/glsl-function-length.html',
['lion', 'intel'])
self.Skip('conformance/glsl/functions/glsl-function-normalize.html',
['lion', 'intel'])
self.Skip('conformance/glsl/functions/glsl-function-reflect.html',
['lion', 'intel'])
self.Skip(
'conformance/glsl/functions/glsl-function-smoothstep-gentype.html',
['lion', 'intel'])
self.Skip('conformance/limits/gl-max-texture-dimensions.html',
['lion', 'intel'])
self.Skip('conformance/rendering/line-loop-tri-fan.html',
['lion', 'intel'])
self.Skip('conformance/ogles/GL/control_flow/control_flow_009_to_010.html',
['lion'], bug=322795)
self.Skip('conformance/ogles/GL/dot/dot_001_to_006.html',
['lion', 'intel'], bug=323736)
self.Skip('conformance/ogles/GL/faceforward/faceforward_001_to_006.html',
['lion', 'intel'], bug=323736)
self.Skip('conformance/ogles/GL/length/length_001_to_006.html',
['lion', 'intel'], bug=323736)
self.Skip('conformance/ogles/GL/normalize/normalize_001_to_006.html',
['lion', 'intel'], bug=323736)
self.Skip('conformance/ogles/GL/reflect/reflect_001_to_006.html',
['lion', 'intel'], bug=323736)
self.Skip('conformance/ogles/GL/refract/refract_001_to_006.html',
['lion', 'intel'], bug=323736)
self.Skip('conformance/ogles/GL/tan/tan_001_to_006.html',
['lion', 'intel'], bug=323736)
# Mac/ATI failures
self.Skip('conformance/extensions/oes-texture-float-with-image-data.html',
['mac', 'amd'], bug=308328)
self.Skip('conformance/rendering/gl-clear.html',
['mac', 'amd'], bug=308328)
self.Skip('conformance/textures/tex-image-and-sub-image-2d-with-array-buffer-view.html',
['mac', 'amd'], bug=308328)
self.Skip('conformance/textures/tex-image-and-sub-image-2d-with-image-data.html',
['mac', 'amd'], bug=308328)
self.Skip('conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgb565.html',
['mac', 'amd'], bug=308328)
self.Skip('conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgba4444.html',
['mac', 'amd'], bug=308328)
self.Skip('conformance/textures/tex-image-and-sub-image-2d-with-image-data-rgba5551.html',
['mac', 'amd'], bug=308328)
self.Fail('conformance/canvas/drawingbuffer-test.html',
['mac', 'amd'], bug=314997)
# Linux/NVIDIA failures
self.Fail('conformance/glsl/misc/empty_main.vert.html',
['linux', ('nvidia', 0x1040)], bug=325884)
self.Fail('conformance/glsl/misc/gl_position_unset.vert.html',
['linux', ('nvidia', 0x1040)], bug=325884)
self.Fail('conformance/uniforms/uniform-location.html',
['linux', ('nvidia', 0x1040)], bug=325884)
# Android failures
self.Fail('conformance/textures/texture-npot-video.html',
['android'], bug=306485)
# The following test is very slow and therefore times out on Android bot.
self.Skip('conformance/rendering/multisample-corruption.html',
['android'])
self.Fail('conformance/canvas/drawingbuffer-test.html',
['android'], bug=314997)
self.Fail('conformance/glsl/misc/empty_main.vert.html',
['android'], bug=315976)
self.Fail('conformance/glsl/misc/gl_position_unset.vert.html',
['android'], bug=315976)
# Skip slow tests.
self.Skip('conformance/context/context-creation-and-destruction.html',
bug=322689)
self.Skip('conformance/rendering/multisample-corruption.html',
bug=322689)
|
qtekfun/htcDesire820Kernel
|
external/chromium_org/content/test/gpu/gpu_tests/webgl_conformance_expectations.py
|
Python
|
gpl-2.0
| 9,671 | 0.009616 |
'''
Pixie: FreeBSD virtualization guest configuration client
Copyright (C) 2011 The Hotel Communication Network inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import threading, Queue as queue, time, subprocess, shlex, datetime
import urllib, tarfile, os, shutil, tempfile, pwd
import cherrypy
from cherrypy.process import wspbus, plugins
from pixie.lib.jails import EzJail
from pixie.lib.interfaces import NetInterfaces
class SetupTask(object):
def __init__(self, puck, queue):
self.queue = queue
self._puck = puck
self.vm = puck.getVM()
def run(self):
raise NotImplementedError("`run` must be defined.")
def log(self, msg):
now = datetime.datetime.now()
cherrypy.log("%s %s" % (self.__class__.__name__, msg))
tpl = "%s\t%s\t%s"
date_format = "%Y-%m-%d %H:%M:%S"
cls = self.__class__.__name__
self.queue.put(tpl % (now.strftime(date_format), cls, msg))
class RcReader(object):
def _has_line(self, lines, line_start):
for line in lines:
if line.startswith(line_start):
return True
return False
def _get_rc_content(self):
rc = None
try:
with open('/etc/rc.conf', 'r') as f:
rc = f.readlines()
except IOError:
pass
if not rc:
raise RuntimeError("File `/etc/rc.conf` is empty!")
return rc
class EZJailTask(SetupTask, RcReader):
'''
Setups ezjail in the virtual machine.
'''
def run(self):
try:
self.log("Enabling EZJail.")
self._enable_ezjail()
self.log("Installing EZJail")
EzJail().install(cherrypy.config.get('setup_plugin.ftp_mirror'))
except (IOError, OSError) as e:
self.log("Error while installing ezjail: %s" % e)
return False
return True
def _enable_ezjail(self):
rc = self._get_rc_content()
if self._has_line(rc, 'ezjail_enable'):
self.log("EZJail is already enabled.")
return
self.log("Adding to rc: `%s`" % 'ezjail_enable="YES"')
'''if we get here, it means ezjail_enable is not in rc.conf'''
with open('/etc/rc.conf', 'a') as f:
f.write("ezjail_enable=\"YES\"\n")
class SSHTask(SetupTask):
'''Create the base user `puck` and add the authorized ssh keys'''
def run(self):
self._setup_ssh()
return True
def _setup_ssh(self):
if not self.vm.keys:
self.log("No keys to install.");
return True
#@TODO Could be moved to config values instead of hardcoded.
user = 'puck'
try:
pwd.getpwnam(user)
except KeyError as e:
cmd = 'pw user add %s -m -G wheel' % user
self.log("Adding user. Executing `%s`" % cmd)
subprocess.Popen(shlex.split(str(cmd))).wait()
user_pwd = pwd.getpwnam(user)
path = '/home/%s/.ssh' % user
authorized_file = "%s/authorized_keys" % path
if not os.path.exists(path):
os.mkdir(path)
os.chown(path, user_pwd.pw_uid, user_pwd.pw_gid)
with open(authorized_file, 'a') as f:
for key in self.vm.keys:
self.log("Writing key `%s`" % key)
f.write('%s\n' % self.vm.keys[key]['key'])
os.chmod(authorized_file, 0400)
os.chown(authorized_file, user_pwd.pw_uid, user_pwd.pw_gid)
os.chmod(path, 0700)
os.chmod('/home/%s' % user, 0700)
class FirewallSetupTask(SetupTask, RcReader):
def run(self):
# TODO Move this to a congfiguration value from puck. Not high priority
pf_conf = '/etc/pf.rules.conf'
rc_conf = '/etc/rc.conf'
self.setup_rc(rc_conf, pf_conf)
self.setup_pf_conf(pf_conf)
self.launch_pf()
return True
def launch_pf(self):
# Stop it in case it
commands = ['/etc/rc.d/pf stop', '/etc/rc.d/pf start']
for command in commands:
self.log("Executing: `%s`" % command)
subprocess.Popen(shlex.split(str(command))).wait()
def setup_pf_conf(self, pf_conf):
rules = self.vm.firewall
if not rules:
self.log("No firewall to write.")
return False
self.log("Writing firewall rules at `%s`." % pf_conf)
with open(pf_conf, 'w') as f:
f.write(rules.replace('\r\n', '\n').replace('\r', '\n'))
f.flush()
def setup_rc(self, rc_conf, pf_conf):
#TODO Move this to a configuration value. Not high priority.
rc_items = {
'pf_enable' : 'YES',
'pf_rules' : pf_conf,
'pflog_enable' : 'YES',
'gateway_enable' : 'YES'
}
rc_present = []
rc = self._get_rc_content()
for line in rc:
for k in rc_items:
if line.startswith(k):
rc_present.append(k)
break
missing = set(rc_items.keys()) - set(rc_present)
tpl = 'Adding to rc: `%s="%s"`'
[self.log(tpl % (k, rc_items[k])) for k in missing]
template = '%s="%s"\n'
with open(rc_conf, 'a') as f:
[f.write(template % (k,rc_items[k])) for k in missing]
f.flush()
class InterfacesSetupTask(SetupTask, RcReader):
'''Configures network interfaces for the jails.'''
def run(self):
(netaddrs, missing) = self._get_missing_netaddrs()
self._add_missing_netaddrs(missing)
self._add_missing_rc(netaddrs)
return True
def _add_missing_rc(self, netaddrs):
rc_addresses = []
rc = self._get_rc_content()
alias_count = self._calculate_alias_count(rc_addresses, rc)
with open('/etc/rc.conf', 'a') as f:
for netaddr in netaddrs:
if self._add_rc_ip(rc_addresses, f, alias_count, netaddr):
alias_count += 1
def _add_missing_netaddrs(self, netaddrs):
for netaddr in netaddrs:
self.log("Registering new ip address `%s`" % netaddr['ip'])
self._add_addr(netaddr['ip'], netaddr['netmask'])
def _get_missing_netaddrs(self):
interfaces = NetInterfaces.getInterfaces()
missing = []
netaddrs = []
for jail in self.vm.jails:
netaddr = {'ip': jail.ip, 'netmask': jail.netmask}
netaddrs.append(netaddr)
if not interfaces.has_key(jail.ip):
missing.append(netaddr)
return (netaddrs, missing)
def _calculate_alias_count(self, addresses, rc):
alias_count = 0
for line in rc:
if line.startswith('ifconfig_%s_alias' % self.vm.interface):
alias_count += 1
addresses.append(line)
return alias_count
def _add_addr(self, ip, netmask):
cmd = "ifconfig %s alias %s netmask %s"
command = cmd % (self.vm.interface, ip, netmask)
self.log('executing: `%s`' % command)
subprocess.Popen(shlex.split(str(command))).wait()
def _add_rc_ip(self, rc_addresses, file, alias_count, netaddr):
for item in rc_addresses:
if item.find(netaddr['ip']) > 0:
self.log("rc already knows about ip `%s`" % netaddr['ip'])
return False
self.log("Registering new rc value for ip `%s`" % netaddr['ip'])
template = 'ifconfig_%s_alias%s="inet %s netmask %s"'
line = "%s\n" % template
values = (
self.vm.interface, alias_count, netaddr['ip'], netaddr['netmask']
)
file.write(line % values)
file.flush()
return True
class HypervisorSetupTask(SetupTask, RcReader):
'''
Setups a few hypervisor settings such as Shared Memory/IPC
'''
def run(self):
self._add_rc_settings()
self._add_sysctl_settings()
self._set_hostname()
return True
def _set_hostname(self):
self.log("Replacing hostname in /etc/rc.conf")
(fh, abspath) = tempfile.mkstemp()
tmp = open(abspath, 'w')
with open('/etc/rc.conf', 'r') as f:
for line in f:
if not line.startswith('hostname'):
tmp.write(line)
continue
tmp.write('hostname="%s"\n' % self.vm.name)
tmp.close()
os.close(fh)
os.remove('/etc/rc.conf')
shutil.move(abspath, '/etc/rc.conf')
os.chmod('/etc/rc.conf', 0644)
cmd = str('hostname %s' % self.vm.name)
self.log('Executing: `%s`' % cmd)
subprocess.Popen(shlex.split(cmd)).wait()
def _add_sysctl_settings(self):
sysvipc = cherrypy.config.get('hypervisor.jail_sysvipc_allow')
ipc_setting = 'security.jail.sysvipc_allowed'
self.log("Configuring sysctl")
with open('/etc/sysctl.conf', 'r') as f:
sysctl = f.readlines()
if sysvipc:
cmd = str("sysctl %s=1" % ipc_setting)
self.log('Executing: `%s`' % cmd)
subprocess.Popen(shlex.split(cmd)).wait()
if self._has_line(sysctl, ipc_setting):
self.log('SysV IPC already configured in sysctl.conf')
return
template = '%s=%s\n'
data = template % (ipc_setting, 1)
self.log('Adding to sysctl.conf: `%s`' % data)
with open('/etc/sysctl.conf', 'a') as f:
f.write(data)
def _add_rc_settings(self):
items = [
'jail_sysvipc_allow',
'syslogd_flags'
]
rc = self._get_rc_content()
# settings will contain items to be added to rc
settings = {}
for i in items:
value = cherrypy.config.get('hypervisor.%s' % i)
if not value:
continue
if self._has_line(rc, i):
continue
self.log('Adding to rc: `%s="%s"`' % (i, value))
settings[i] = value
# settings now contains items to be added
template = '%s="%s"\n'
with open('/etc/rc.conf', 'a') as f:
[f.write(template % (k, settings[k])) for k in settings]
f.flush()
class EZJailSetupTask(SetupTask):
'''
Setups ezjail in the virtual machine
'''
def run(self):
base_dir = cherrypy.config.get('setup_plugin.jail_dir')
dst_dir = '%s/flavours' % base_dir
if not os.path.isdir(dst_dir):
try:
self.log("Creating folder `%s`." % dst_dir)
os.makedirs(dst_dir)
except OSError as e:
self.log('Could not create folder `%s`' % dst_dir)
return False
# Holds the temporary file list
tmpfiles = self._retrieveFlavours()
if not tmpfiles:
self.log('No flavours downloaded.')
return False
# Verify and extract the flavour tarball
for file in tmpfiles:
# Verify
if not tarfile.is_tarfile(file['tmp_file']):
msg = "File `%s` is not a tarfile."
self.log(msg % file['tmp_file'])
return False
self.log('Extracting `%s`' % file['tmp_file'])
# Extraction
try:
with tarfile.open(file['tmp_file'], mode='r:*') as t:
'''Will raise KeyError if file does not exists.'''
if not t.getmember(file['type']).isdir():
msg ="Tar member `%s` is not a folder."
raise tarfile.ExtractError(msg % file['type'])
t.extractall("%s/" % dst_dir)
except (IOError, KeyError, tarfile.ExtractError) as e:
msg = "File `%s` could not be extracted. Reason: %s"
self.log(msg % (file['tmp_file'], e))
# Remove the temporary tarball
try:
os.unlink(file['tmp_file'])
except OSerror as e:
msg = "Error while removing file `%s`: %s"
self.log(msg % (file['tmp_file'], e))
return True
def _retrieveFlavours(self):
'''Retrieve the tarball for each flavours'''
tmpfiles = []
jail_dir = cherrypy.config.get('setup_plugin.jail_dir')
for jail in self.vm.jails:
(handle, tmpname) = tempfile.mkstemp(dir=jail_dir)
self.log("Fetching flavour `%s` at `%s`" % (jail.name, jail.url))
try:
(filename, headers) = urllib.urlretrieve(jail.url, tmpname)
except (urllib.ContentTooShortError, IOError) as e:
msg = "Error while retrieving jail `%s`: %s"
self.log(msg % (jail.name, e))
return False
tmpfiles.append({'type': jail.jail_type, 'tmp_file': filename})
self.log("Jail `%s` downloaded at `%s`" % (jail.name, filename))
return tmpfiles
class JailConfigTask(SetupTask):
'''
Handles jails configuration
'''
def run(self):
jail_dir = cherrypy.config.get('setup_plugin.jail_dir')
flavour_dir = "%s/flavours" % jail_dir
for jail in self.vm.jails:
self.log("Configuring jail `%s`." % jail.jail_type)
path = "%s/%s" % (flavour_dir, jail.jail_type)
authorized_key_file = "%s/installdata/authorized_keys" % path
resolv_file = "%s/etc/resolv.conf" % path
yum_file = "%s/installdata/yum_repo" % path
rc_file = "%s/etc/rc.conf" % path
host_file = "%s/etc/hosts" % path
# Create /installdata and /etc folder.
for p in ['%s/installdata', '%s/etc']:
if not os.path.exists(p % path):
os.mkdir(p % path)
# Verify the flavours exists.
exists = os.path.exists(path)
is_dir = os.path.isdir(path)
if not exists or not is_dir:
msg = "Flavour `%s` directory is missing in `%s."
self.log(msg % (jail.jail_type, flavour_dir))
return False
msg = "Retrieving yum repository for environment `%s`."
self.log(msg % self.vm.environment)
yum_repo = self._puck.getYumRepo(self.vm.environment)
self.log("Writing ssh keys.")
if not self._writeKeys(jail, authorized_key_file):
return False
self.log("Copying resolv.conf.")
if not self._writeResolvConf(jail, resolv_file):
return False
self.log("Updating jail hostname to `%s-%s`" % (self.vm.name, jail.jail_type))
if not self._update_hostname(jail, rc_file, host_file):
return False
self.log("Writing yum repository.")
if not self._writeYumRepoConf(yum_repo, yum_file):
return False
self.log("Creating jail.")
if not self._createJail(jail):
return False
return True
def _writeKeys(self, jail, authorized_key_file):
'''Write authorized keys'''
try:
with open(authorized_key_file, 'w') as f:
for key in self.vm.keys.values():
f.write("%s\n" % key['key'])
except IOError as e:
msg = "Error while writing authorized keys to jail `%s`: %s"
self.log(msg % (jail.jail_type, e))
return False
return True
def _update_hostname(self, jail, rc_file, host_file):
hostname = "%s-%s" % (self.vm.name, jail.jail_type)
self.log("Replacing hostname in %s" % rc_file)
(fh, abspath) = tempfile.mkstemp()
has_hostname = False
tmp = open(abspath, 'w')
with open(rc_file, 'r') as f:
for line in f:
if not line.startswith('hostname'):
tmp.write(line)
continue
tmp.write('hostname="%s"\n' % hostname)
has_hostname = True
if not has_hostname:
tmp.write('hostname="%s"\n' % hostname)
tmp.close()
os.close(fh)
os.remove(rc_file)
shutil.move(abspath, rc_file)
os.chmod(rc_file, 0644)
self.log("Adding new hostname in %s" % host_file)
(fh, abspath) = tempfile.mkstemp()
has_hostname = False
tmp = open(abspath, 'w')
with open(host_file, 'r') as f:
for line in f:
if not line.startswith('127.0.0.1'):
tmp.write(line)
continue
tmp.write('%s %s\n' % (line.replace('\n', ''), hostname))
tmp.close()
os.close(fh)
os.remove(host_file)
shutil.move(abspath, host_file)
os.chmod(host_file, 0644)
return True
def _writeResolvConf(self, jail, resolv_file):
'''Copy resolv.conf'''
try:
shutil.copyfile('/etc/resolv.conf', resolv_file)
except IOError as e:
self.log("Error while copying host resolv file: %s" % e)
return False
return True
def _writeYumRepoConf(self, yum_repo, yum_file):
'''Setup yum repo.d file ezjail will use.'''
try:
with open(yum_file, 'w') as f:
f.write(yum_repo['data'])
except (KeyError, IOError) as e:
self.log("Error while writing YUM repo data: %s" % e)
return False
return True
def _createJail(self, jail):
'''Create the jail'''
try:
jail.create()
except OSError as e:
msg = "Error while installing jail `%s`: %s"
self.log(msg % (jail.jail_type, e))
return False
return True
class JailStartupTask(SetupTask):
'''
Handles starting each jail.
'''
def run(self):
# Start each jail
for jail in self.vm.jails:
self.log("Starting jail `%s`" % jail.jail_type)
try:
status = jail.start()
except OSError as e:
self.log("Could not start jail `%s`: %s" % (jail.jail_type, e))
return False
self.log("Jail status: %s" % status)
self.log("Jail `%s` started" % jail.jail_type)
if not jail.status():
self.log("Jail `%s` is not running!" % jail.jail_type)
return False
return True
class SetupWorkerThread(threading.Thread):
"""
Thread class with a stop() method. The thread itself has to check
regularly for the stopped() condition.
"""
def __init__(self, bus, queue, outqueue, puck):
super(self.__class__, self).__init__()
self._stop = threading.Event()
self.running = threading.Event()
self.successful = False
self.completed = False
self._queue = queue
self._bus = bus
self._outqueue = outqueue
self._puck = puck
def stop(self):
self._stop.set()
def stopped(self):
return self._stop.isSet()
def _step(self):
'''
Run a task
@raise RuntimeError when the task failed to complete
'''
# This will probably need to be wrapped in a try/catch.
task = self._queue.get(True, 10)(self._puck, self._outqueue)
loginfo = (self.__class__.__name__, task.__class__.__name__)
task.log('Starting')
if not task.run():
raise RuntimeError("%s error while running task `%s`" % loginfo)
task.log('Completed')
self._queue.task_done()
def run(self):
if self.completed:
self._bus.log("%s had already been run." % self.__class__.__name__)
return False
if self.running.isSet():
self._bus.log("%s is already running." % self.__class__.__name__)
return False
self.running.set()
self._bus.log("%s started." % self.__class__.__name__)
try:
while not self.stopped():
self._step()
except RuntimeError as err:
self._bus.log(str(err))
self._empty_queue()
self._puck.getVM().status = 'setup_failed'
self._puck.updateStatus()
self.succesful = False
self.completed = True
return False
except queue.Empty:
pass
self.completed = True
self.sucessful = True
self._puck.getVM().status = 'setup_complete'
self._puck.updateStatus()
self._outqueue.put("%s finished." % self.__class__.__name__)
def _empty_queue(self):
while not self._queue.empty():
try:
self._queue.get(False)
except queue.Empty:
return
class SetupPlugin(plugins.SimplePlugin):
'''
Handles tasks related to virtual machine setup.
The plugin launches a separate thread to asynchronously execute the tasks.
'''
def __init__(self, puck, bus, freq=30.0):
plugins.SimplePlugin.__init__(self, bus)
self.freq = freq
self._puck = puck
self._queue = queue.Queue()
self._workerQueue = queue.Queue()
self.worker = None
self.statuses = []
def start(self):
self.bus.log('Starting up setup tasks')
self.bus.subscribe('setup', self.switch)
start.priority = 70
def stop(self):
self.bus.log('Stopping down setup task.')
self._setup_stop();
def switch(self, *args, **kwargs):
'''
This is the task switchboard. Depending on the parameters received,
it will execute the appropriate action.
'''
if not 'action' in kwargs:
self.log("Parameter `action` is missing.")
return
# Default task
def default(**kwargs):
return
return {
'start': self._setup_start,
'stop': self._setup_stop,
'status': self._setup_status,
'clear': self._clear_status
}.get(kwargs['action'], default)()
def _clear_status(self, **kwargs):
'''Clear the status list'''
del(self.statuses[:])
def _setup_stop(self, **kwargs):
self.bus.log("Received stop request.")
if self.worker and self.worker.isAlive():
self.worker.stop()
def _start_worker(self):
self.worker = SetupWorkerThread(
bus=self.bus, queue = self._queue,
outqueue = self._workerQueue, puck = self._puck
)
self.worker.start()
def _setup_start(self, **kwargs):
self.bus.log("Received start request.")
# Start the worker if it is not running.
if not self.worker:
self._start_worker()
if not self.worker.is_alive() and not self.worker.successful:
self._start_worker()
# @TODO: Persistence of the list when failure occurs.
# or a state machine instead of a queue.
for task in cherrypy.config.get('setup_plugin.tasks'):
self._queue.put(task)
def _setup_status(self, **kwargs):
'''
Returns the current log queue and if the setup is running or not.
'''
status = self._readQueue(self._workerQueue)
while status:
self.statuses.append(status)
status = self._readQueue(self._workerQueue)
if not self.worker or not self.worker.isAlive():
return (self.statuses, False)
return (self.statuses, True)
def _readQueue(self, q, blocking = True, timeout = 0.2):
'''
Wraps code to read from a queue, including exception handling.
'''
try:
item = q.get(blocking, timeout)
except queue.Empty:
return None
return item
|
masom/Puck
|
client/pixie/lib/setup_plugin.py
|
Python
|
lgpl-3.0
| 24,567 | 0.001628 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.